├── .cfnlintrc ├── .cspell.json ├── .ecrc.json ├── .editorconfig ├── .github ├── ISSUE_TEMPLATE │ ├── bug-template.yaml │ ├── doc-template.yaml │ └── feature-template.yaml ├── PULL_REQUEST_TEMPLATE.md └── workflows │ ├── adf.yml │ └── mega-linter.yml ├── .gitignore ├── .markdown-link-check.json ├── .markdownlint.json ├── .mega-linter.yml ├── .nvmrc ├── .prettierrc.js ├── .pylintrc ├── .yamllint.yml ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE.txt ├── Makefile ├── Makefile.tox ├── NOTICE.md ├── README.md ├── docs ├── admin-guide.md ├── images │ ├── TechnicalGuide-AccountManagementStateMachine.drawio.png │ ├── TechnicalGuide-BootstrapRepo.drawio.png │ ├── adf-pipeline-high-level.png │ ├── app-deploying.png │ ├── approval.png │ ├── aws-multi-org-1.png │ ├── aws-multi-org-2.png │ ├── aws-multi-org-adf-config-multi-organization.png │ ├── cc-repos.png │ ├── cfn-output.png │ ├── cp-building.png │ ├── create-account.png │ ├── create-ou.png │ ├── ecr-pipeline-running.png │ ├── new-ous.png │ ├── run-state-machine.png │ ├── stack-complete.png │ ├── step-func-complete.png │ └── vpc-pipeline.png ├── installation-guide.md ├── multi-organization-guide.md ├── pipeline-types-guide.md ├── providers-guide.md ├── samples-guide.md ├── technical-guide.md └── user-guide.md ├── linters └── custom-adf-dict.txt ├── requirements-dev.txt ├── requirements.txt ├── resources └── OrganizationAccountAccessRole.yaml ├── samples ├── sample-cdk-app │ ├── .gitignore │ ├── README.md │ ├── buildspec.yml │ ├── cdk.json │ ├── handler.py │ ├── index.ts │ ├── package.json │ └── tsconfig.json ├── sample-cdk-bootstrap │ ├── README.md │ ├── buildspec.yml │ └── params │ │ └── global.yml ├── sample-codebuild-vpc │ ├── README.md │ ├── buildspec.yml │ ├── params │ │ └── global.yml │ ├── template.yml │ └── testspec.yml ├── sample-ec2-java-app-codedeploy │ ├── .mvn │ │ └── wrapper │ │ │ ├── maven-wrapper.jar │ │ │ └── maven-wrapper.properties │ ├── README.md │ ├── appspec.yml │ ├── buildspec.yml │ ├── pom.xml │ ├── scripts │ │ ├── start.sh │ │ ├── stop.sh │ │ └── validate.sh │ └── src │ │ ├── main │ │ └── java │ │ │ └── hello │ │ │ └── Application.java │ │ └── resources │ │ └── application.yml ├── sample-ec2-with-codedeploy │ ├── README.md │ ├── buildspec.yml │ ├── params │ │ └── global.yml │ ├── scripts │ │ ├── install-codedeploy.sh │ │ └── install-deps.sh │ └── template.yml ├── sample-ecr-repository │ ├── README.md │ ├── buildspec.yml │ ├── params │ │ └── global.yml │ └── template.yml ├── sample-ecs-cluster │ ├── README.md │ ├── buildspec.yml │ ├── params │ │ ├── banking-production.yml │ │ └── global.yml │ └── template.yml ├── sample-etl-pipeline │ ├── README.md │ ├── big_data.txt │ └── scripts │ │ └── some_etl_script.sh ├── sample-expunge-vpc │ ├── README.md │ ├── build-lambda.sh │ ├── buildspec.yml │ ├── src │ │ └── lambda_vpc │ │ │ ├── lambda_function.py │ │ │ └── requirements.txt │ └── template.yml ├── sample-fargate-node-app │ ├── Dockerfile │ ├── README.md │ ├── build │ │ ├── docker.sh │ │ └── generate_parameters.sh │ ├── buildspec.yml │ ├── index.js │ ├── package-lock.json │ ├── package.json │ ├── params │ │ ├── banking-production.yml │ │ ├── global.yml │ │ └── global_eu-west-1.json │ ├── public │ │ └── main.css │ ├── template.yml │ └── views │ │ └── index.ejs ├── sample-iam │ ├── README.md │ ├── buildspec.yml │ ├── params │ │ └── global.yml │ └── template.yml ├── sample-mono-repo │ ├── README.md │ └── apps │ │ ├── alpha │ │ ├── README.md │ │ ├── buildspec.yml │ │ ├── params │ │ │ └── global.yml │ │ └── template.yml │ │ └── beta │ │ ├── README.md │ │ ├── buildspec.yml │ │ ├── params │ │ └── global.yml │ │ └── template.yml ├── sample-serverless-app │ ├── README.md │ ├── build │ │ └── generate_parameters.sh │ ├── buildspec.yml │ ├── handler.py │ ├── params │ │ └── global.yml │ ├── requirements.txt │ └── template.yml ├── sample-service-catalog-product │ ├── README.md │ ├── buildspec.yml │ ├── params │ │ └── global.yml │ ├── productX │ │ └── template.yml │ └── template.yml ├── sample-terraform │ ├── README.md │ ├── buildspec.yml │ ├── params │ │ └── global.yml │ ├── tf │ │ ├── backend.tf │ │ ├── main.tf │ │ ├── s3.tf │ │ └── variables.tf │ ├── tf_apply.yml │ ├── tf_destroy.yml │ ├── tf_plan.yml │ ├── tf_scan.yml │ └── tfvars │ │ └── global.auto.tfvars └── sample-vpc │ ├── README.md │ ├── buildspec.yml │ ├── params │ ├── banking-production.yml │ └── global.yml │ └── template.yml ├── src ├── account_bootstrapping_jump_role.yml ├── lambda_codebase │ ├── LICENSE.txt │ ├── __init__.py │ ├── account │ │ ├── handler.py │ │ ├── main.py │ │ ├── pytest.ini │ │ ├── requirements.txt │ │ └── tests │ │ │ ├── __init__.py │ │ │ └── test_main.py │ ├── account_bootstrap.py │ ├── account_processing │ │ ├── __init__.py │ │ ├── configure_account_alias.py │ │ ├── configure_account_ou.py │ │ ├── configure_account_regions.py │ │ ├── configure_account_tags.py │ │ ├── create_account.py │ │ ├── delete_default_vpc.py │ │ ├── get_account_regions.py │ │ ├── process_account_files.py │ │ ├── pytest.ini │ │ ├── register_account_for_support.py │ │ ├── requirements.txt │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_account_alias.py │ │ │ ├── test_account_creation.py │ │ │ ├── test_account_file_processing.py │ │ │ ├── test_account_tags.py │ │ │ ├── test_configure_account_regions.py │ │ │ ├── test_delete_default_vpc.py │ │ │ └── test_get_default_regions.py │ ├── cleanup_legacy_stacks │ │ ├── cleanup_legacy_stacks.py │ │ ├── handler.py │ │ └── requirements.txt │ ├── cross_region_bucket │ │ ├── handler.py │ │ ├── main.py │ │ └── requirements.txt │ ├── determine_event.py │ ├── event.py │ ├── generic_account_config.py │ ├── initial_commit │ │ ├── adf.yml.j2 │ │ ├── adfconfig.yml.j2 │ │ ├── bootstrap_repository │ │ │ ├── .gitignore │ │ │ ├── README.md │ │ │ ├── adf-accounts │ │ │ │ └── README.md │ │ │ ├── adf-bootstrap │ │ │ │ ├── deployment │ │ │ │ │ ├── example-global-iam.yml │ │ │ │ │ ├── global.yml │ │ │ │ │ ├── lambda_codebase │ │ │ │ │ │ ├── LICENSE.txt │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ ├── determine_default_branch │ │ │ │ │ │ │ ├── determine_default_branch.py │ │ │ │ │ │ │ ├── handler.py │ │ │ │ │ │ │ ├── pytest.ini │ │ │ │ │ │ │ ├── requirements.txt │ │ │ │ │ │ │ └── tests │ │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ │ └── test_determine_default_branch.py │ │ │ │ │ │ ├── enable_cross_account_access.py │ │ │ │ │ │ ├── iam_cfn_deploy_role_policy.py │ │ │ │ │ │ ├── initial_commit │ │ │ │ │ │ │ ├── handler.py │ │ │ │ │ │ │ ├── initial_commit.py │ │ │ │ │ │ │ ├── pipelines_repository │ │ │ │ │ │ │ │ ├── .gitignore │ │ │ │ │ │ │ │ ├── README.md │ │ │ │ │ │ │ │ └── example-deployment_map.yml │ │ │ │ │ │ │ ├── pytest.ini │ │ │ │ │ │ │ ├── requirements.txt │ │ │ │ │ │ │ └── tests │ │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ │ └── test_initial_commit.py │ │ │ │ │ │ ├── pipeline_management │ │ │ │ │ │ │ ├── create_or_update_rule.py │ │ │ │ │ │ │ ├── create_repository.py │ │ │ │ │ │ │ ├── generate_pipeline_inputs.py │ │ │ │ │ │ │ ├── identify_out_of_date_pipelines.py │ │ │ │ │ │ │ ├── process_deployment_map.py │ │ │ │ │ │ │ ├── requirements.txt │ │ │ │ │ │ │ └── store_pipeline_definition.py │ │ │ │ │ │ ├── pytest.ini │ │ │ │ │ │ ├── requirements.txt │ │ │ │ │ │ ├── slack.py │ │ │ │ │ │ ├── tests │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ ├── stubs │ │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ │ ├── slack.py │ │ │ │ │ │ │ │ └── stub_iam.py │ │ │ │ │ │ │ ├── test_iam_cfn_deploy_role_policy.py │ │ │ │ │ │ │ └── test_slack.py │ │ │ │ │ │ └── update_pipelines.py │ │ │ │ │ ├── pipeline_management.yml │ │ │ │ │ └── regional.yml │ │ │ │ ├── example-global-iam.yml │ │ │ │ ├── example-scp.json │ │ │ │ ├── example-tagging-policy.json │ │ │ │ └── global.yml │ │ │ ├── adf-build │ │ │ │ ├── LICENSE.txt │ │ │ │ ├── __init__.py │ │ │ │ ├── config.py │ │ │ │ ├── main.py │ │ │ │ ├── organization_policy.py │ │ │ │ ├── provisioner │ │ │ │ │ └── src │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ ├── account.py │ │ │ │ │ │ ├── configparser.py │ │ │ │ │ │ ├── support.py │ │ │ │ │ │ └── vpc.py │ │ │ │ ├── requirements-dev.txt │ │ │ │ ├── requirements.txt │ │ │ │ ├── shared │ │ │ │ │ ├── LICENSE.txt │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── base_resolver.py │ │ │ │ │ ├── cdk │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ ├── cdk_constructs │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ ├── adf_chatbot.py │ │ │ │ │ │ │ ├── adf_cloudformation.py │ │ │ │ │ │ │ ├── adf_codebuild.py │ │ │ │ │ │ │ ├── adf_codecommit.py │ │ │ │ │ │ │ ├── adf_codeconnections.py │ │ │ │ │ │ │ ├── adf_codepipeline.py │ │ │ │ │ │ │ ├── adf_events.py │ │ │ │ │ │ │ ├── adf_github.py │ │ │ │ │ │ │ ├── adf_jenkins.py │ │ │ │ │ │ │ ├── adf_notifications.py │ │ │ │ │ │ │ ├── adf_s3.py │ │ │ │ │ │ │ └── tests │ │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ │ ├── adf_codepipeline_test_constants.py │ │ │ │ │ │ │ │ ├── test_adf_codebuild_buildspec.py │ │ │ │ │ │ │ │ ├── test_adf_codebuild_determine_build_image.py │ │ │ │ │ │ │ │ ├── test_adf_codepipeline_generate.py │ │ │ │ │ │ │ │ ├── test_adf_codepipeline_input_artifacts.py │ │ │ │ │ │ │ │ └── test_adf_codepipeline_output_artifacts.py │ │ │ │ │ │ ├── cdk_stacks │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ ├── adf_default_pipeline.py │ │ │ │ │ │ │ ├── main.py │ │ │ │ │ │ │ └── tests │ │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ │ ├── test_default_pipeline_type.py │ │ │ │ │ │ │ │ └── test_pipeline_creation.py │ │ │ │ │ │ ├── clean_pipelines.py │ │ │ │ │ │ ├── execute_pipeline_stacks.py │ │ │ │ │ │ ├── generate_pipeline_stacks.py │ │ │ │ │ │ └── pytest.ini │ │ │ │ │ ├── generate_params.py │ │ │ │ │ ├── helpers │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ ├── describe_codepipeline_trigger.py │ │ │ │ │ │ ├── package_transform.sh │ │ │ │ │ │ ├── pytest.ini │ │ │ │ │ │ ├── requirements-dev.txt │ │ │ │ │ │ ├── requirements.txt │ │ │ │ │ │ ├── retrieve_organization_accounts.py │ │ │ │ │ │ ├── sts.sh │ │ │ │ │ │ ├── sync_to_s3.py │ │ │ │ │ │ ├── terraform │ │ │ │ │ │ │ ├── adf_terraform.sh │ │ │ │ │ │ │ ├── get_accounts.py │ │ │ │ │ │ │ └── install_terraform.sh │ │ │ │ │ │ └── tests │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ └── test_sync_to_s3.py │ │ │ │ │ ├── pytest.ini │ │ │ │ │ ├── python │ │ │ │ │ │ ├── LICENSE.txt │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ ├── cache.py │ │ │ │ │ │ ├── cloudformation.py │ │ │ │ │ │ ├── cloudwatch.py │ │ │ │ │ │ ├── codepipeline.py │ │ │ │ │ │ ├── deployment_map.py │ │ │ │ │ │ ├── errors.py │ │ │ │ │ │ ├── list_utils.py │ │ │ │ │ │ ├── logger.py │ │ │ │ │ │ ├── organizations.py │ │ │ │ │ │ ├── paginator.py │ │ │ │ │ │ ├── parameter_store.py │ │ │ │ │ │ ├── partition.py │ │ │ │ │ │ ├── pipeline.py │ │ │ │ │ │ ├── pytest.ini │ │ │ │ │ │ ├── repo.py │ │ │ │ │ │ ├── requirements.txt │ │ │ │ │ │ ├── rule.py │ │ │ │ │ │ ├── s3.py │ │ │ │ │ │ ├── schema_validation.py │ │ │ │ │ │ ├── stepfunctions.py │ │ │ │ │ │ ├── sts.py │ │ │ │ │ │ ├── target.py │ │ │ │ │ │ ├── tests │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ ├── stubs │ │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ │ ├── stub_cloudformation.py │ │ │ │ │ │ │ │ ├── stub_codepipeline.py │ │ │ │ │ │ │ │ ├── stub_deployment_map.yml │ │ │ │ │ │ │ │ ├── stub_event.py │ │ │ │ │ │ │ │ ├── stub_kms.py │ │ │ │ │ │ │ │ ├── stub_organizations.py │ │ │ │ │ │ │ │ ├── stub_parameter_store.py │ │ │ │ │ │ │ │ ├── stub_s3.py │ │ │ │ │ │ │ │ ├── stub_step_functions.py │ │ │ │ │ │ │ │ └── stub_target.py │ │ │ │ │ │ │ ├── test_cache.py │ │ │ │ │ │ │ ├── test_cloudformation.py │ │ │ │ │ │ │ ├── test_codepipeline.py │ │ │ │ │ │ │ ├── test_deployment_map.py │ │ │ │ │ │ │ ├── test_list_utils.py │ │ │ │ │ │ │ ├── test_organizations.py │ │ │ │ │ │ │ ├── test_parameter_store.py │ │ │ │ │ │ │ ├── test_partition.py │ │ │ │ │ │ │ ├── test_pipeline.py │ │ │ │ │ │ │ ├── test_s3.py │ │ │ │ │ │ │ ├── test_schema_validation.py │ │ │ │ │ │ │ ├── test_step_functions.py │ │ │ │ │ │ │ ├── test_sts.py │ │ │ │ │ │ │ └── test_target.py │ │ │ │ │ │ └── thread.py │ │ │ │ │ ├── requirements-dev.txt │ │ │ │ │ ├── requirements.txt │ │ │ │ │ ├── resolver.py │ │ │ │ │ ├── resolver_param_store.py │ │ │ │ │ ├── resolver_stack_output.py │ │ │ │ │ ├── resolver_upload.py │ │ │ │ │ ├── templates │ │ │ │ │ │ ├── codecommit.yml │ │ │ │ │ │ └── events.yml │ │ │ │ │ └── tests │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ ├── stubs │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ ├── parameter_environment_acceptance_tag_project_a.yml │ │ │ │ │ │ ├── parameter_environment_prod.json │ │ │ │ │ │ ├── parameter_extra_one_only.json │ │ │ │ │ │ ├── stub_cfn_global.json │ │ │ │ │ │ ├── stub_cfn_global.yml │ │ │ │ │ │ ├── tag_cost_center_free_only.json │ │ │ │ │ │ ├── tag_cost_center_nonfree_only.json │ │ │ │ │ │ ├── tag_department_alpha_only.json │ │ │ │ │ │ └── tag_geo_eu_only.json │ │ │ │ │ │ └── test_generate_params.py │ │ │ │ ├── store_config.py │ │ │ │ └── tests │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── stubs │ │ │ │ │ └── stub_adfconfig.yml │ │ │ │ │ ├── test_config.py │ │ │ │ │ └── test_main.py │ │ │ ├── example-adfconfig.yml │ │ │ ├── pytest.ini │ │ │ ├── requirements-dev.txt │ │ │ └── tox.ini │ │ ├── handler.py │ │ ├── initial_commit.py │ │ ├── pytest.ini │ │ ├── requirements.txt │ │ └── tests │ │ │ ├── __init__.py │ │ │ └── test_initial_commit.py │ ├── jump_role_manager │ │ ├── main.py │ │ ├── pytest.ini │ │ ├── requirements.txt │ │ └── tests │ │ │ ├── __init__.py │ │ │ └── test_main.py │ ├── moved_to_root.py │ ├── organization │ │ ├── handler.py │ │ ├── main.py │ │ └── requirements.txt │ ├── organization_unit │ │ ├── handler.py │ │ ├── main.py │ │ └── requirements.txt │ ├── requirements.txt │ └── wait_until_complete.py └── template.yml └── tox.ini /.cfnlintrc: -------------------------------------------------------------------------------- 1 | templates: 2 | - ./**/template.yml 3 | - ./resources/*.yaml 4 | - ./**/example-global-iam.yml 5 | - ./**/global.yml 6 | - ./**/regional.yml 7 | ignore_templates: 8 | - ./**/appspec.yml 9 | - ./**/appspec.yaml 10 | - ./**/buildspec.yml 11 | - ./**/buildspec.yaml 12 | - ./**/params/*.yml 13 | - ./**/params/*.yaml 14 | include_checks: 15 | - I 16 | ignore_checks: 17 | - W3002 18 | - W3045 19 | -------------------------------------------------------------------------------- /.cspell.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2", 3 | "language": "en", 4 | "dictionaries": [ 5 | "custom-adf", 6 | "bash", 7 | "python", 8 | "en-US", 9 | "corp-terms", 10 | "softwareTerms", 11 | "typescript", 12 | "node", 13 | "npm" 14 | ], 15 | "ignorePaths": [ 16 | ".pylintrc", 17 | "CHANGELOG.md", 18 | "requirements.txt", 19 | "requirements-dev.txt", 20 | "maven-wrapper.jar", 21 | ".cspell.json", 22 | ".gitignore", 23 | "custom-adf-dict.txt" 24 | ], 25 | "allowCompoundWords": true, 26 | "dictionaryDefinitions": [ 27 | { 28 | "name": "custom-adf", 29 | "path": "./linters/custom-adf-dict.txt", 30 | "addWords": true 31 | } 32 | ] 33 | } 34 | -------------------------------------------------------------------------------- /.ecrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "Verbose": false, 3 | "Debug": false, 4 | "IgnoreDefaults": false, 5 | "SpacesAftertabs": false, 6 | "NoColor": false, 7 | "Exclude": ["LICENSE.txt"], 8 | "AllowedContentTypes": [], 9 | "PassedFiles": [], 10 | "Disable": { 11 | "EndOfLine": false, 12 | "Indentation": false, 13 | "IndentSize": false, 14 | "InsertFinalNewline": false, 15 | "TrimTrailingWhitespace": false, 16 | "MaxLineLength": true 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | # Includes all - file types included in the ADF repo: 5 | # - .ini 6 | # - .java 7 | # - .js 8 | # - .json 9 | # - .md 10 | # - .py 11 | # - .rb 12 | # - .sh 13 | # - .ts 14 | # - .txt 15 | # - .xml 16 | # - .yml 17 | # - .yml.j2 18 | # - Makefile 19 | charset = utf-8 20 | indent_style = space 21 | indent_size = 2 22 | end_of_line = lf 23 | insert_final_newline = true 24 | trim_trailing_whitespace = true 25 | max_line_length = 80 26 | 27 | [*.py] 28 | indent_size = 4 29 | max_line_length = 100 30 | 31 | [*.{json,xml,html,ejs}] 32 | indent_size = 4 33 | 34 | [*.{json,yml,yml.j2}] 35 | max_line_length = 140 36 | 37 | [{package.json,package-lock.json}] 38 | indent_size = 2 39 | 40 | [*.java] 41 | indent_size = 4 42 | 43 | [*.md] 44 | indent_size = unset 45 | 46 | [*.txt] 47 | indent_size = 4 48 | 49 | [*.ini] 50 | indent_size = 4 51 | 52 | [tox.ini] 53 | max_line_length = 120 54 | 55 | [Makefile*] 56 | indent_style = tab 57 | indent_size = 4 58 | tab_width = 4 59 | 60 | [*.sh] 61 | space_redirects = true 62 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/doc-template.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: "📕 Documentation Issue" 3 | description: Report an issue in the documentation 4 | title: "[Docs]: " 5 | labels: [docs] 6 | assignees: [] 7 | body: 8 | - type: textarea 9 | id: description 10 | attributes: 11 | label: Describe the documentation issue 12 | description: A clear and concise description of the documentation issue. 13 | validations: 14 | required: true 15 | - type: textarea 16 | id: links 17 | attributes: 18 | label: Links 19 | description: | 20 | Include links to affected documentation page(s). 21 | validations: 22 | required: true 23 | - type: checkboxes 24 | id: ack 25 | attributes: 26 | label: Acknowledgements 27 | options: 28 | - label: I may be able to submit a pull-request to fix this issue. 29 | required: false 30 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature-template.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: 🚀 Feature Request 3 | description: Suggest an idea for this project 4 | title: "[Feat]: " 5 | labels: [feature] 6 | assignees: [] 7 | body: 8 | - type: textarea 9 | id: description 10 | attributes: 11 | label: Describe the feature 12 | description: | 13 | A clear and concise description of the feature you are proposing. 14 | validations: 15 | required: true 16 | - type: textarea 17 | id: use-case 18 | attributes: 19 | label: Use Case 20 | description: | 21 | Why do you need this feature? For example: "I'm always frustrated 22 | when...", "I have a customer that needs..." 23 | validations: 24 | required: true 25 | - type: textarea 26 | id: solution 27 | attributes: 28 | label: Proposed Solution 29 | description: | 30 | Suggest how to implement the addition or change. Please include 31 | prototype/workaround/sketch/reference implementation. 32 | validations: 33 | required: false 34 | - type: checkboxes 35 | id: ack 36 | attributes: 37 | label: Acknowledgements 38 | options: 39 | - label: I may be able to implement this feature request 40 | required: false 41 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | # Why? 2 | 3 | Describe why you are proposing these changes 4 | 5 | *Issue #, if available:* 6 | 7 | ## What? 8 | 9 | Description of changes: 10 | 11 | - List 12 | - What 13 | - You 14 | - Changed 15 | 16 | --- 17 | 18 | By submitting this pull request, I confirm that you can use, modify, copy, and 19 | redistribute this contribution, under the terms of your choice. 20 | -------------------------------------------------------------------------------- /.github/workflows/adf.yml: -------------------------------------------------------------------------------- 1 | name: ADF CI 2 | 3 | on: [push, pull_request] 4 | 5 | env: 6 | CI_BUILD: 1 7 | 8 | jobs: 9 | build: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | matrix: 13 | python-version: ["3.12"] 14 | 15 | steps: 16 | - name: Checkout Repo 17 | uses: actions/checkout@v4 18 | with: 19 | fetch-depth: 150 20 | fetch-tags: true 21 | - name: Set up Python ${{ matrix.python-version }} 22 | uses: actions/setup-python@v5 23 | with: 24 | python-version: ${{ matrix.python-version }} 25 | - name: Set the correct Node version using nvm 26 | shell: bash -l {0} 27 | run: nvm install 28 | - name: Test compatibility of source dependencies 29 | run: | 30 | make clean src_deps 31 | echo "Source dependencies are compatible!" 32 | - name: Run tox 33 | run: | 34 | make clean tox 35 | - name: Build fully 36 | run: | 37 | make clean build 38 | -------------------------------------------------------------------------------- /.github/workflows/mega-linter.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # MegaLinter GitHub Action configuration file 3 | # More info at https://megalinter.io 4 | name: MegaLinter 5 | 6 | on: [push, pull_request] 7 | 8 | env: 9 | APPLY_FIXES: none 10 | APPLY_FIXES_EVENT: pull_request 11 | APPLY_FIXES_MODE: pull_request 12 | 13 | concurrency: 14 | group: ${{ github.ref }}-${{ github.workflow }} 15 | cancel-in-progress: true 16 | 17 | jobs: 18 | build: 19 | name: MegaLinter 20 | runs-on: ubuntu-latest 21 | steps: 22 | - name: Checkout Code 23 | uses: actions/checkout@v4 24 | with: 25 | token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }} 26 | fetch-depth: 0 27 | 28 | - name: MegaLinter 29 | id: ml 30 | uses: oxsecurity/megalinter@v8.4.2 31 | env: 32 | # All available variables are described in documentation 33 | # https://megalinter.io/configuration/ 34 | VALIDATE_ALL_CODEBASE: true 35 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 36 | 37 | - name: Archive production artifacts 38 | if: ${{ success() }} || ${{ failure() }} 39 | uses: actions/upload-artifact@v4 40 | with: 41 | name: MegaLinter reports 42 | path: | 43 | megalinter-reports 44 | mega-linter.log 45 | 46 | - name: Upload MegaLinter scan results to GitHub Security tab 47 | if: ${{ success() }} || ${{ failure() }} 48 | uses: github/codeql-action/upload-sarif@v3 49 | with: 50 | sarif_file: 'megalinter-reports/megalinter-report.sarif' 51 | -------------------------------------------------------------------------------- /.markdown-link-check.json: -------------------------------------------------------------------------------- 1 | { 2 | "httpHeaders": [ 3 | { 4 | "urls": [ 5 | "https://docs.github.com/", 6 | "https://help.github.com/" 7 | ], 8 | "headers": { 9 | "Accept-Encoding": "zstd, br, gzip, deflate" 10 | } 11 | } 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /.markdownlint.json: -------------------------------------------------------------------------------- 1 | { 2 | "emphasis-style": { 3 | "style": "consistent" 4 | }, 5 | "no-duplicate-heading": { 6 | "siblings_only": true 7 | }, 8 | "heading-style": { 9 | "style": "atx" 10 | }, 11 | "ul-style": { 12 | "style": "dash" 13 | }, 14 | "ul-indent": { 15 | "indent": 2 16 | }, 17 | "line-length": { 18 | "tables": false, 19 | "code_blocks": true, 20 | "headings": true, 21 | "line_length": 80, 22 | "heading_line_length": 80, 23 | "code_block_line_length": 80 24 | }, 25 | "no-trailing-punctuation": { 26 | "punctuation": ".,;:!。,;:" 27 | }, 28 | "ol-prefix": { 29 | "style": "one_or_ordered" 30 | }, 31 | "no-inline-html": { 32 | "allowed_elements": [] 33 | }, 34 | "no-emphasis-as-heading": { 35 | "punctuation": ".,;:!。,;:" 36 | }, 37 | "single-trailing-newline": true 38 | } 39 | -------------------------------------------------------------------------------- /.mega-linter.yml: -------------------------------------------------------------------------------- 1 | # Configuration file for MegaLinter 2 | # See all available variables at https://megalinter.io/configuration/ and in linters documentation 3 | 4 | # all, none, or list of linter keys 5 | APPLY_FIXES: none 6 | 7 | # If you use ENABLE variable, all other languages/formats/tooling-formats will be disabled by default 8 | # ENABLE: 9 | 10 | # If you use ENABLE_LINTERS variable, all other linters will be disabled by default 11 | ENABLE_LINTERS: 12 | - BASH_EXEC 13 | - BASH_SHFMT 14 | - CLOUDFORMATION_CFN_LINT 15 | - DOCKERFILE_HADOLINT 16 | - EDITORCONFIG_EDITORCONFIG_CHECKER 17 | - JSON_JSONLINT 18 | - JSON_PRETTIER 19 | - JSON_V8R 20 | - JAVASCRIPT_STANDARD 21 | - MARKDOWN_MARKDOWN_LINK_CHECK 22 | - MARKDOWN_MARKDOWNLINT 23 | - MARKDOWN_MARKDOWN_TABLE_FORMATTER 24 | - SPELL_CSPELL 25 | - TERRAFORM_TFLINT 26 | - YAML_YAMLLINT 27 | 28 | SARIF_REPORTER: true 29 | SHOW_ELAPSED_TIME: true 30 | FILEIO_REPORTER: false 31 | 32 | # Install plugin for list handling. 33 | JSON_PRETTIER_PRE_COMMANDS: 34 | - command: "npm install prettier-plugin-multiline-arrays@3.0.6" 35 | cwd: "workspace" 36 | 37 | CLOUDFORMATION_CFN_LINT_CONFIG_FILE: '.cfnlintrc' 38 | CLOUDFORMATION_CFN_LINT_FILE_EXTENSIONS: [".yml", ".yaml"] 39 | 40 | EDITORCONFIG_EDITORCONFIG_CHECKER_CONFIG_FILE: '.ecrc.json' 41 | 42 | MARKDOWN_MARKDOWN_LINK_CHECK_ARGUMENTS: '-q' 43 | MARKDOWN_MARKDOWNLINT_DISABLE_ERRORS: false 44 | 45 | SPELL_CSPELL_ARGUMENTS: '--gitignore --no-progress --show-suggestions' 46 | SPELL_CSPELL_FILE_EXTENSIONS: ["*"] 47 | 48 | TERRAFORM_TFLINT_UNSECURED_ENV_VARIABLES: 49 | - GITHUB_TOKEN 50 | 51 | GITHUB_STATUS_REPORTER: true 52 | GITHUB_COMMENT_REPORTER: true 53 | -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | 12 2 | -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: [ 3 | 'prettier-plugin-multiline-arrays' 4 | ], 5 | trailingComma: 'es5', 6 | semi: false, 7 | singleQuote: true, 8 | }; 9 | -------------------------------------------------------------------------------- /.yamllint.yml: -------------------------------------------------------------------------------- 1 | --- 2 | yaml-files: 3 | - '*.yaml' 4 | - '*.yml' 5 | 6 | rules: 7 | braces: 8 | forbid: non-empty 9 | min-spaces-inside-empty: 0 10 | max-spaces-inside-empty: 0 11 | brackets: enable 12 | colons: enable 13 | commas: enable 14 | comments: 15 | level: warning 16 | comments-indentation: 17 | level: warning 18 | document-end: disable 19 | document-start: disable 20 | empty-lines: enable 21 | empty-values: disable 22 | float-values: 23 | forbid-inf: true 24 | forbid-nan: true 25 | forbid-scientific-notation: true 26 | require-numeral-before-decimal: true 27 | hyphens: enable 28 | indentation: enable 29 | key-duplicates: enable 30 | key-ordering: disable 31 | line-length: disable 32 | new-line-at-end-of-file: disable 33 | new-lines: enable 34 | octal-values: enable 35 | quoted-strings: disable 36 | trailing-spaces: enable 37 | truthy: 38 | level: error 39 | check-keys: false 40 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | This project has adopted the 4 | [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 5 | 6 | For more information see the 7 | [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 8 | [opensource-codeofconduct@amazon.com](mailto:opensource-codeofconduct@amazon.com) 9 | with any additional questions or comments. 10 | -------------------------------------------------------------------------------- /Makefile.tox: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | # Files to work with 5 | SRC_DIR := ./src 6 | TEST_CONFIGS := $(shell find $(SRC_DIR) -name 'pytest.ini') 7 | 8 | all: test lint 9 | 10 | .PHONY: all test lint 11 | 12 | test: 13 | # Run unit tests 14 | @ $(foreach config,$(TEST_CONFIGS), \ 15 | pytest $$(dirname $(config)) -vvv -s -c $(config) || exit 1; \ 16 | ) 17 | 18 | lint: 19 | # Linter performs static analysis to catch latent bugs 20 | find $(SRC_DIR) -iname "*.py" | xargs pylint --verbose --rcfile .pylintrc 21 | find $(SRC_DIR) -iname "*.yml" -o -iname "*.yaml" | xargs yamllint -c .yamllint.yml 22 | cfn-lint 23 | -------------------------------------------------------------------------------- /NOTICE.md: -------------------------------------------------------------------------------- 1 | # Notice 2 | 3 | AWS Deployment Framework 4 | 5 | Copyright Amazon.com Inc. or its affiliates. 6 | -------------------------------------------------------------------------------- /docs/images/TechnicalGuide-AccountManagementStateMachine.drawio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/TechnicalGuide-AccountManagementStateMachine.drawio.png -------------------------------------------------------------------------------- /docs/images/TechnicalGuide-BootstrapRepo.drawio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/TechnicalGuide-BootstrapRepo.drawio.png -------------------------------------------------------------------------------- /docs/images/adf-pipeline-high-level.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/adf-pipeline-high-level.png -------------------------------------------------------------------------------- /docs/images/app-deploying.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/app-deploying.png -------------------------------------------------------------------------------- /docs/images/approval.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/approval.png -------------------------------------------------------------------------------- /docs/images/aws-multi-org-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/aws-multi-org-1.png -------------------------------------------------------------------------------- /docs/images/aws-multi-org-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/aws-multi-org-2.png -------------------------------------------------------------------------------- /docs/images/aws-multi-org-adf-config-multi-organization.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/aws-multi-org-adf-config-multi-organization.png -------------------------------------------------------------------------------- /docs/images/cc-repos.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/cc-repos.png -------------------------------------------------------------------------------- /docs/images/cfn-output.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/cfn-output.png -------------------------------------------------------------------------------- /docs/images/cp-building.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/cp-building.png -------------------------------------------------------------------------------- /docs/images/create-account.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/create-account.png -------------------------------------------------------------------------------- /docs/images/create-ou.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/create-ou.png -------------------------------------------------------------------------------- /docs/images/ecr-pipeline-running.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/ecr-pipeline-running.png -------------------------------------------------------------------------------- /docs/images/new-ous.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/new-ous.png -------------------------------------------------------------------------------- /docs/images/run-state-machine.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/run-state-machine.png -------------------------------------------------------------------------------- /docs/images/stack-complete.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/stack-complete.png -------------------------------------------------------------------------------- /docs/images/step-func-complete.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/step-func-complete.png -------------------------------------------------------------------------------- /docs/images/vpc-pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/docs/images/vpc-pipeline.png -------------------------------------------------------------------------------- /docs/technical-guide.md: -------------------------------------------------------------------------------- 1 | # Technical Guide 2 | 3 | ## Introduction 4 | 5 | This document is intended to give insight into how the AWS Deployment Framework 6 | works under the hood. 7 | 8 | ## High Level Overview - AWS Deployment Framework Bootstrap Repository 9 | 10 | The AWS Deployment Framework Bootstrap Repository aka "Bootstrap Repo" is where 11 | the source code used by ADF lives. The bootstrap repo is also where your 12 | accounts, OU layout and base templates are defined. 13 | The flow below is a high level overview of what happens when a change is 14 | committed to this repository. 15 | 16 | ![bootstrap-repo-overview](images/TechnicalGuide-BootstrapRepo.drawio.png) 17 | 18 | ### Account Management State Machine 19 | 20 | The Account Management State Machine is triggered by S3 PUT events to the ADF 21 | Accounts bucket. Below is a diagram detailing the components of the standard 22 | state machine. This state machine is defined in `src/account_processing.yml` and 23 | the lambda functions code is location in 24 | `src/lambda_codebase/account_processing` 25 | 26 | ![account-management-state-machine](images/TechnicalGuide-AccountManagementStateMachine.drawio.png) 27 | 28 | ## High Level Overview - AWS Deployment Framework Pipeline Repository 29 | 30 | The AWS Deployment Framework Pipeline Repository aka "Pipeline Repo" is where 31 | the deployment map definitions live. It typically exists in CodeCommit within 32 | your Deployment Account(s). The diagram below details what happens when a commit 33 | is pushed to this repository. 34 | 35 | ![pipeline-repo-overview](images/adf-pipeline-high-level.png) 36 | 37 | ### Pipeline Management State Machine 38 | 39 | The Pipeline Management State machine is triggered by S3 PUT events to the ADF 40 | Pipelines bucket. This state machine is responsible for expanding the deployment 41 | map, resolving the targets, creating pipeline definitions (JSON objects that 42 | detail the source(s) and stages involved and the targets) and then generating 43 | CDK stacks off of the definitions. 44 | 45 | It additionally covers the deletion of stale pipelines. A Stale pipeline is any 46 | pipeline that was created by ADF initially, but is no longer defined in a 47 | deployment map. 48 | -------------------------------------------------------------------------------- /linters/custom-adf-dict.txt: -------------------------------------------------------------------------------- 1 | !blacklist 2 | !whitelist 3 | adf 4 | adfconfig 5 | apogorielov 6 | awscli 7 | backoff 8 | benbridts 9 | binfmt 10 | bitnami 11 | boto 12 | boto3 13 | botocore 14 | bundyfx 15 | cfnlintrc 16 | chattr 17 | chkconfig 18 | chsh 19 | cicd 20 | codepipelinecodeartifactpipelinetriggermytestrepoall 21 | codepipelinecodeartifactpipelinetriggermytestrepomytestpackage 22 | codeql 23 | corretto 24 | crhelper 25 | datacls 26 | deregistration 27 | devsecops 28 | drawio 29 | dserver 30 | dsudduth 31 | ecrc 32 | epel 33 | fargate 34 | hadolint 35 | iname 36 | infinidash 37 | javydekoning 38 | mhdaehnert 39 | msvs 40 | mymodule 41 | mypackage 42 | norecursedirs 43 | ntwobike 44 | ouid 45 | oxsecurity 46 | pipelinenoti 47 | pozeus 48 | pygtk 49 | pylintrc 50 | rcfile 51 | releasever 52 | rexec 53 | rickardl 54 | runas 55 | sarif 56 | sbkok 57 | scps 58 | sdkman 59 | skycolangelom 60 | srabidoux 61 | SSEKMS 62 | stefanzweifel 63 | stubber 64 | tfapply 65 | tfdestroy 66 | tfinit 67 | tflint 68 | tflocktable 69 | tfplandestroy 70 | tfrun 71 | tfstate 72 | tfvars 73 | toxinidir 74 | tylergohl 75 | unconfigured 76 | urlize 77 | vpcid 78 | zstd 79 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | cfn-lint~=0.86.2 2 | isort==5.13.2 3 | mock==5.1.0 4 | pylint==3.1.0 5 | pytest~=8.1.1 6 | pytest-cov==5.0.0 7 | tox==4.14.2 8 | yamllint==1.35.1 9 | yq==3.2.3 10 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | astroid==3.1.0 2 | aws-sam-cli==1.114.0 3 | boto3==1.34.80 4 | botocore==1.34.80 5 | pyyaml~=6.0.1 6 | schema==0.7.5 7 | -------------------------------------------------------------------------------- /resources/OrganizationAccountAccessRole.yaml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | AWSTemplateFormatVersion: "2010-09-09" 5 | Description: >- 6 | Organizational Account Access Role for Cross-Account automation 7 | 8 | Parameters: 9 | RoleName: 10 | Type: String 11 | Description: >- 12 | The name of the Cross-Account role 13 | Default: OrganizationAccountAccessRole 14 | 15 | AdministratorAccountId: 16 | Type: String 17 | Description: >- 18 | AWS Account Id of the administrator account 19 | (the account in which StackSets will be created). 20 | MaxLength: 12 21 | MinLength: 12 22 | 23 | Resources: 24 | OrganizationAccountAccessRole: 25 | Type: AWS::IAM::Role 26 | Properties: 27 | RoleName: !Ref RoleName 28 | AssumeRolePolicyDocument: 29 | Version: 2012-10-17 30 | Statement: 31 | - Effect: Allow 32 | Principal: 33 | AWS: 34 | - !Ref AdministratorAccountId 35 | Action: 36 | - sts:AssumeRole 37 | Path: / 38 | ManagedPolicyArns: 39 | - !Sub arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess 40 | 41 | Outputs: 42 | RoleArn: 43 | Description: The ARN of the Organization Account Access Role 44 | Value: !GetAtt OrganizationAccountAccessRole.Arn 45 | Export: 46 | Name: !Sub "${AWS::StackName}-RoleArn" 47 | -------------------------------------------------------------------------------- /samples/sample-cdk-app/.gitignore: -------------------------------------------------------------------------------- 1 | cdk.out/ 2 | node_modules/ 3 | package-lock.json 4 | -------------------------------------------------------------------------------- /samples/sample-cdk-app/README.md: -------------------------------------------------------------------------------- 1 | # Sample CDK Application to showcase ADF Pipelines 2 | 3 | This pipeline is expecting *(in the example case)* an AWS CodeCommit repository 4 | on the account `111111111111` in your main deployment region named 5 | *sample-cdk-application*. 6 | 7 | ## Deployment Map example 8 | 9 | ```yaml 10 | - name: sample-cdk-application 11 | default_providers: 12 | source: 13 | provider: codecommit 14 | properties: 15 | account_id: 111111111111 16 | build: 17 | provider: codebuild 18 | properties: 19 | image: "STANDARD_7_0" 20 | targets: 21 | - /banking/testing 22 | - /banking/production 23 | ``` 24 | -------------------------------------------------------------------------------- /samples/sample-cdk-app/buildspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | phases: 7 | install: 8 | runtime-versions: 9 | python: 3.12 10 | nodejs: 20 11 | commands: 12 | - aws s3 cp s3://$S3_BUCKET_NAME/adf-build/ adf-build/ --recursive --only-show-errors 13 | - pip install -r adf-build/requirements.txt -q 14 | - python adf-build/generate_params.py 15 | 16 | build: 17 | commands: 18 | - npm install aws-cdk -g 19 | - npm install 20 | - npm run build 21 | - cdk synth > template.yml 22 | 23 | artifacts: 24 | files: 25 | - 'template.yml' 26 | - 'params/*.json' 27 | - 'params/*.yml' 28 | -------------------------------------------------------------------------------- /samples/sample-cdk-app/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node --prefer-ts-exts index.ts" 3 | } 4 | -------------------------------------------------------------------------------- /samples/sample-cdk-app/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | def main(event, context): 5 | print("I'm running!") 6 | -------------------------------------------------------------------------------- /samples/sample-cdk-app/index.ts: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com Inc. or its affiliates. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | import { App, Stack, Duration } from 'aws-cdk-lib'; 5 | import { aws_lambda as lambda, aws_events as events, aws_events_targets as targets } from 'aws-cdk-lib'; 6 | 7 | import fs = require('fs'); 8 | 9 | export class LambdaCronStack extends Stack { 10 | constructor(app: App, id: string) { 11 | super(app, id); 12 | 13 | const lambdaFn = new lambda.Function(this, 'Singleton', { 14 | code: new lambda.InlineCode( 15 | fs.readFileSync('handler.py', { encoding: 'utf-8' }), 16 | ), 17 | handler: 'index.main', 18 | timeout: Duration.seconds(300), 19 | runtime: lambda.Runtime.PYTHON_3_12 20 | }); 21 | // Run every day at 6PM UTC 22 | // See https://docs.aws.amazon.com/lambda/latest/dg/tutorial-scheduled-events-schedule-expressions.html 23 | const rule = new events.Rule(this, 'Rule', { 24 | schedule: events.Schedule.expression('cron(0 18 ? * MON-FRI *)') 25 | }); 26 | rule.addTarget(new targets.LambdaFunction(lambdaFn)); 27 | } 28 | } 29 | 30 | const app = new App(); 31 | new LambdaCronStack(app, 'LambdaCronExample'); 32 | app.synth(); 33 | -------------------------------------------------------------------------------- /samples/sample-cdk-app/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "lambda-cron", 3 | "version": "0.24.0", 4 | "description": "Running a Lambda on a schedule", 5 | "private": true, 6 | "scripts": { 7 | "build": "tsc", 8 | "watch": "tsc -w", 9 | "cdk": "cdk" 10 | }, 11 | "author": { 12 | "name": "Amazon Web Services", 13 | "url": "https://aws.amazon.com", 14 | "organization": true 15 | }, 16 | "license": "Apache-2.0", 17 | "devDependencies": { 18 | "@types/node": "^20.0.0", 19 | "typescript": "^5.4.4" 20 | }, 21 | "dependencies": { 22 | "aws-cdk-lib": "^2.135.0", 23 | "constructs": "^10.3.0" 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /samples/sample-cdk-app/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2018", 4 | "module": "commonjs", 5 | "lib": [ 6 | "es2016", 7 | "es2017.object", 8 | "es2017.string" 9 | ], 10 | "strict": true, 11 | "noImplicitAny": true, 12 | "strictNullChecks": true, 13 | "noImplicitThis": true, 14 | "alwaysStrict": true, 15 | "noUnusedLocals": true, 16 | "noUnusedParameters": true, 17 | "noImplicitReturns": true, 18 | "noFallthroughCasesInSwitch": false, 19 | "inlineSourceMap": true, 20 | "inlineSources": true, 21 | "experimentalDecorators": true, 22 | "strictPropertyInitialization": false 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /samples/sample-cdk-bootstrap/README.md: -------------------------------------------------------------------------------- 1 | # Sample CDK Bootstrap pipeline 2 | 3 | This pipeline is expecting *(in the example case)* an AWS CodeCommit repository 4 | on the account `111111111111` in your main deployment region named 5 | *sample-cdk-bootstrap*. 6 | 7 | ## Deployment Map example 8 | 9 | ```yaml 10 | - name: sample-cdk-bootstrap 11 | default_providers: 12 | source: 13 | provider: codecommit 14 | properties: 15 | account_id: 111111111111 16 | build: 17 | provider: codebuild 18 | properties: 19 | image: "STANDARD_7_0" 20 | targets: 21 | - /banking/testing 22 | - /banking/production 23 | ``` 24 | -------------------------------------------------------------------------------- /samples/sample-cdk-bootstrap/buildspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | phases: 7 | install: 8 | runtime-versions: 9 | python: 3.12 10 | nodejs: 20 11 | commands: 12 | - aws s3 cp s3://$S3_BUCKET_NAME/adf-build/ adf-build/ --recursive --quiet 13 | - pip install -r adf-build/requirements.txt -q 14 | - python adf-build/generate_params.py 15 | 16 | build: 17 | commands: 18 | - npm install aws-cdk -g 19 | - cdk bootstrap --show-template > template.yml 20 | 21 | artifacts: 22 | files: '**/*' 23 | -------------------------------------------------------------------------------- /samples/sample-codebuild-vpc/README.md: -------------------------------------------------------------------------------- 1 | # Sample CodeBuild VPC usage showcasing ADF Pipelines 2 | 3 | This pipeline will demonstrate how-to setup CodeBuild to use a specific VPC. 4 | 5 | **Please note**: Before you can deploy CodeBuild in a VPC, you need to follow 6 | the instructions as described in the CodeBuild provider documentation at: 7 | [docs/providers-guide.md](../../docs/providers-guide.md#setup-permissions-for-codebuild-vpc-usage) 8 | This is only required once to allow the CodeBuild service to locate and create 9 | the required resources. Once configured, the permissions allow any pipeline to 10 | make use of VPCs when running CodeBuild steps. 11 | 12 | Back to the sample: The pipeline deploys a simple S3 bucket without granting 13 | any permissions. The point of this sample is to demonstrate how different 14 | build and deployment stages can use CodeBuild in a VPC to connect to internal 15 | resources. 16 | 17 | Create a new repository that will host the files that are contained inside 18 | this sample folder. 19 | 20 | Update the `vpc_id`, `subnet_ids`, and `security_group_ids` attributes to match 21 | your own VPC and subnets that are operational in the deployment account. 22 | 23 | ## Deployment Map example 24 | 25 | ```yaml 26 | - name: sample-codebuild-vpc 27 | default_providers: 28 | source: 29 | provider: codecommit 30 | properties: 31 | account_id: 111111111111 32 | build: 33 | provider: codebuild 34 | properties: 35 | image: "STANDARD_7_0" 36 | vpc_id: vpc-01234567890abcdef 37 | subnet_ids: 38 | - subnet-1234567890abcdef1 39 | - subnet-bcdef01234567890a 40 | deploy: 41 | provider: cloudformation 42 | targets: 43 | - /banking/testing 44 | - name: integration-tests 45 | provider: codebuild 46 | properties: 47 | image: "STANDARD_7_0" 48 | spec_filename: testspec.yml 49 | vpc_id: vpc-01234567890abcdef 50 | subnet_ids: 51 | - subnet-1234567890abcdef1 52 | - subnet-bcdef01234567890a 53 | security_group_ids: 54 | - sg-234567890abcdef01 55 | - sg-cdef01234567890ab 56 | - /banking/production 57 | ``` 58 | -------------------------------------------------------------------------------- /samples/sample-codebuild-vpc/buildspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | phases: 7 | install: 8 | runtime-versions: 9 | python: 3.12 10 | commands: 11 | # It will connect through the VPC to fetch all the resources. 12 | # Make sure the subnets and security groups are configured such that 13 | # it is able to connect to S3 and fetch the requirements using pip. 14 | # 15 | # If you want to restrict public access, you can create a local copy 16 | # of the pip required packages and use S3 private link. 17 | - aws s3 cp s3://$S3_BUCKET_NAME/adf-build/ adf-build/ --recursive --only-show-errors 18 | - pip install -r adf-build/requirements.txt -q 19 | 20 | build: 21 | commands: 22 | - python adf-build/generate_params.py 23 | 24 | artifacts: 25 | files: 26 | - 'template.yml' 27 | - 'params/*.json' 28 | - 'params/*.yml"' 29 | -------------------------------------------------------------------------------- /samples/sample-codebuild-vpc/params/global.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Tags: 5 | Repository: sample-codebuild-vpc-repo 6 | App: Sample CodeBuild VPC application 7 | -------------------------------------------------------------------------------- /samples/sample-codebuild-vpc/template.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | AWSTemplateFormatVersion: '2010-09-09' 5 | Description: ADF CloudFormation Sample Template 6 | Metadata: 7 | License: Apache-2.0 8 | Resources: 9 | Bucket: 10 | Type: AWS::S3::Bucket 11 | Properties: 12 | BucketEncryption: 13 | ServerSideEncryptionConfiguration: 14 | - ServerSideEncryptionByDefault: 15 | SSEAlgorithm: AES256 16 | VersioningConfiguration: 17 | Status: Enabled 18 | PublicAccessBlockConfiguration: 19 | BlockPublicAcls: true 20 | BlockPublicPolicy: true 21 | IgnorePublicAcls: true 22 | RestrictPublicBuckets: true 23 | -------------------------------------------------------------------------------- /samples/sample-codebuild-vpc/testspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | phases: 7 | install: 8 | runtime-versions: 9 | python: 3.12 10 | 11 | build: 12 | commands: 13 | # A sample API call to an internal only service to perform the 14 | # integration tests. 15 | - curl https://integration-test-url.internal/test 16 | -------------------------------------------------------------------------------- /samples/sample-ec2-java-app-codedeploy/.mvn/wrapper/maven-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/samples/sample-ec2-java-app-codedeploy/.mvn/wrapper/maven-wrapper.jar -------------------------------------------------------------------------------- /samples/sample-ec2-java-app-codedeploy/.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.3.9/apache-maven-3.3.9-bin.zip 2 | -------------------------------------------------------------------------------- /samples/sample-ec2-java-app-codedeploy/README.md: -------------------------------------------------------------------------------- 1 | # Sample Spring Boot Java application Running on EC2 deployed via CodePipeline 2 | 3 | This example is coupled with the `sample-ec2-with-codedeploy` repository and 4 | is aimed at showcasing how to deploy a basic Java Spring Boot application with 5 | [AWS CodeDeploy](https://docs.aws.amazon.com/codedeploy/latest/userguide/welcome.html) 6 | via ADF. 7 | 8 | ## Deployment Map example 9 | 10 | ```yaml 11 | - name: sample-ec2-java-app-codedeploy 12 | # ^ A CodeCommit repo would be created automatically on the source account 13 | # if it did not exist with this name, granted you are using CodeCommit as 14 | # a source below. 15 | default_providers: 16 | source: 17 | provider: codecommit 18 | properties: 19 | account_id: 111111111111 20 | build: 21 | provider: codebuild 22 | properties: 23 | image: "STANDARD_5_0" 24 | # ^ Since we're building a Java application here we want to use 25 | # STANDARD_5_0 (Ubuntu) as our base CodeBuild Image, that way we can 26 | # tell it to have Java ready for us so we can build, compile and test 27 | # our application. 28 | deploy: 29 | provider: codedeploy 30 | # ^ We will deploy out application with AWS CodeDeploy. 31 | targets: 32 | - path: 9999999999 33 | # ^ In this example we only want to deploy to a single AWS Account, 34 | # so we include its account ID here. 35 | properties: 36 | # These are Parameters for this specific stage in the pipeline, 37 | # CodeDeploy needs to know which application and deployment group 38 | # it should use to deploy. These resources would typically be deployed 39 | # in a different stack as they are more part of the infrastructure to 40 | # support the application as opposed to the application itself. 41 | application_name: sample 42 | deployment_group_name: testing-sample 43 | # ^ See https://docs.aws.amazon.com/codedeploy/latest/userguide/deployment-groups.html 44 | ``` 45 | -------------------------------------------------------------------------------- /samples/sample-ec2-java-app-codedeploy/appspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.0 5 | os: linux 6 | 7 | files: 8 | - source: / 9 | destination: /home/ec2-user/server 10 | 11 | permissions: 12 | - object: / 13 | pattern: "**" 14 | owner: ec2-user 15 | group: ec2-user 16 | 17 | hooks: 18 | ApplicationStop: 19 | - location: stop.sh 20 | timeout: 20 21 | runas: ec2-user 22 | ApplicationStart: 23 | - location: start.sh 24 | timeout: 20 25 | runas: ec2-user 26 | ValidateService: 27 | - location: validate.sh 28 | timeout: 120 29 | runas: ec2-user 30 | -------------------------------------------------------------------------------- /samples/sample-ec2-java-app-codedeploy/buildspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | phases: 7 | install: 8 | runtime-versions: 9 | java: corretto8 10 | 11 | build: 12 | commands: 13 | - mvn clean package --quiet 14 | 15 | artifacts: 16 | discard-paths: yes # yamllint disable-line rule:truthy 17 | files: 18 | - target/* 19 | - scripts/* 20 | - appspec.yml 21 | -------------------------------------------------------------------------------- /samples/sample-ec2-java-app-codedeploy/scripts/start.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Copyright Amazon.com Inc. or its affiliates. 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | cd /home/ec2-user/server 7 | sudo /usr/bin/java -jar -Dserver.port=80 \ 8 | *.jar > /dev/null 2> /dev/null < /dev/null & 9 | -------------------------------------------------------------------------------- /samples/sample-ec2-java-app-codedeploy/scripts/stop.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Copyright Amazon.com Inc. or its affiliates. 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | sudo killall java 7 | exit 0 8 | -------------------------------------------------------------------------------- /samples/sample-ec2-java-app-codedeploy/scripts/validate.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Copyright Amazon.com Inc. or its affiliates. 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | echo "Waiting for 15 seconds before checking health.." 7 | sleep 15 8 | 9 | status_code=$(curl --write-out %{http_code} --silent --output /dev/null http://localhost:80) 10 | if [[ "$status_code" -ne 200 ]]; then 11 | echo "App is not healthy - $status_code" 12 | exit 1 13 | else 14 | echo "App is responding with $status_code" 15 | exit 0 16 | fi 17 | -------------------------------------------------------------------------------- /samples/sample-ec2-java-app-codedeploy/src/main/java/hello/Application.java: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com Inc. or its affiliates. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | package hello; 5 | 6 | import org.springframework.boot.SpringApplication; 7 | import org.springframework.boot.autoconfigure.SpringBootApplication; 8 | import org.springframework.web.bind.annotation.RequestMapping; 9 | import org.springframework.web.bind.annotation.RestController; 10 | 11 | @SpringBootApplication 12 | @RestController 13 | public class Application { 14 | 15 | @RequestMapping("/") 16 | public String home() { 17 | return "Hello from Spring Boot!"; 18 | } 19 | 20 | public static void main(String[] args) { 21 | SpringApplication.run(Application.class, args); 22 | } 23 | 24 | } 25 | -------------------------------------------------------------------------------- /samples/sample-ec2-java-app-codedeploy/src/resources/application.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | server: 5 | port: 8080 6 | 7 | spring: 8 | application: 9 | name: testLatticeApp 10 | 11 | ribbon: 12 | ServerListRefreshInterval: 1000 13 | 14 | endpoints: 15 | health: 16 | sensitive: false 17 | restart: 18 | enabled: true 19 | shutdown: 20 | enabled: true 21 | -------------------------------------------------------------------------------- /samples/sample-ec2-with-codedeploy/README.md: -------------------------------------------------------------------------------- 1 | # Sample EC2 Application Stack with CodeDeploy Components 2 | 3 | This example is coupled with the `sample-ec2-java-app-codedeploy` repository and 4 | is aimed at showcasing how to deploy a basic Spring Boot application with 5 | [AWS CodeDeploy](https://docs.aws.amazon.com/codedeploy/latest/userguide/welcome.html) 6 | via ADF. 7 | 8 | This stack is a generic stack for applications that run on Amazon EC2. 9 | This stack could be extended and used as a base for all line of business type 10 | applications that run Amazon EC2. 11 | 12 | This stack also requires `sample-vpc` and `sample-iam` to be in deployed as it 13 | imports resources directly from both of them. 14 | 15 | ## Prerequisites 16 | 17 | This sample stack depends on resources in `sample-iam` and `sample-vpc`. 18 | 19 | ## Deployment Map example 20 | 21 | ```yaml 22 | - name: sample-ec2-app-codedeploy 23 | default_providers: 24 | source: 25 | provider: codecommit 26 | properties: 27 | account_id: 111111111111 28 | build: 29 | provider: codebuild 30 | properties: 31 | image: "STANDARD_7_0" # So we can specify which Python version we need 32 | targets: 33 | - /banking/testing 34 | - /banking/production 35 | ``` 36 | -------------------------------------------------------------------------------- /samples/sample-ec2-with-codedeploy/buildspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | phases: 7 | install: 8 | runtime-versions: 9 | python: 3.12 10 | commands: 11 | - aws s3 cp s3://$S3_BUCKET_NAME/adf-build/ adf-build/ --recursive --only-show-errors 12 | - pip install -r adf-build/requirements.txt -q 13 | - python adf-build/generate_params.py 14 | 15 | artifacts: 16 | files: 17 | - 'template.yml' 18 | - 'params/*.json' 19 | - 'params/*.yml' 20 | -------------------------------------------------------------------------------- /samples/sample-ec2-with-codedeploy/params/global.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Parameters: 5 | Environment: "testing" 6 | ApplicationName: "sample" 7 | InstanceMaxSize: "3" 8 | InstanceMinSize: "1" 9 | ImageId: "resolve:/aws/service/ami-amazon-linux-latest/al2023-ami-kernel-default-x86_64" 10 | InstanceType: "t3.micro" 11 | CodeDeployAgentInstallScript: "upload:path:scripts/install-codedeploy.sh" 12 | JavaInstallScript: "upload:path:scripts/install-deps.sh" 13 | -------------------------------------------------------------------------------- /samples/sample-ec2-with-codedeploy/scripts/install-codedeploy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Copyright Amazon.com Inc. or its affiliates. 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | set -xe 7 | 8 | ## Code Deploy Agent Bootstrap Script ## 9 | 10 | exec > >(sudo tee /var/log/user-data.log | logger -t user-data -s 2> /dev/console) 2>&1 11 | AUTOUPDATE=false 12 | 13 | function installdep() { 14 | echo "Installing dependencies..." 15 | if [ ${PLAT} = "ubuntu" ]; then 16 | apt-get -y update 17 | # Satisfying even Ubuntu older versions. 18 | apt-get -y install jq awscli ruby2.0 || apt-get -y install jq awscli ruby 19 | elif [ ${PLAT} = "amz" ]; then 20 | yum -y update 21 | yum install -y aws-cli ruby jq 22 | fi 23 | echo "Done installing dependencies." 24 | } 25 | 26 | function platformize() { 27 | # Linux OS detection 28 | if hash lsb_release; then 29 | echo "Ubuntu server OS detected" 30 | export PLAT="ubuntu" 31 | elif hash yum; then 32 | echo "Amazon Linux detected" 33 | export PLAT="amz" 34 | else 35 | echo "Unsupported release" 36 | exit 1 37 | fi 38 | } 39 | 40 | function execute() { 41 | if [[ "${PLAT}" = "ubuntu" ]] || [[ "${PLAT}" = "amz" ]]; then 42 | echo "Downloading CodeDeploy Agent..." 43 | cd /tmp/ 44 | wget https://aws-codedeploy-${REGION}.s3.${REGION}.amazonaws.com/latest/install 45 | chmod +x ./install 46 | 47 | echo "Installing CodeDeploy Agent..." 48 | if ./install auto; then 49 | echo "Installation completed" 50 | exit 0 51 | else 52 | echo "Installation script failed, please investigate" 53 | rm -f /tmp/install 54 | exit 1 55 | fi 56 | 57 | else 58 | echo "Unsupported platform ''${PLAT}''" 59 | fi 60 | } 61 | 62 | platformize 63 | installdep 64 | export TOKEN=$(curl -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 21600") 65 | export REGION=$(curl -H "X-aws-ec2-metadata-token: ${TOKEN}" -s http://169.254.169.254/latest/dynamic/instance-identity/document | jq -r ".region") 66 | execute 67 | -------------------------------------------------------------------------------- /samples/sample-ec2-with-codedeploy/scripts/install-deps.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Copyright Amazon.com Inc. or its affiliates. 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | set -xe 7 | 8 | # install apache httpd 9 | sudo yum install httpd -y 10 | 11 | # install sdk 12 | curl -s "https://get.sdkman.io" | bash 13 | source "$HOME/.sdkman/bin/sdkman-init.sh" 14 | 15 | # install Java 16 | sudo yum install -y java-17-amazon-corretto-headless 17 | 18 | # install Maven 19 | yum -y update 20 | sudo yum install -y maven 21 | 22 | # sdk version 23 | java -version 24 | mvn --version 25 | 26 | # Install Springboot 27 | sdk install springboot 28 | 29 | # create a springboot user to run the app as a service 30 | sudo useradd springboot 31 | # springboot login shell disabled 32 | sudo usermod --shell /sbin/nologin springboot 33 | 34 | # forward port 80 to 8080 35 | echo " 36 | 37 | ProxyRequests Off 38 | ProxyPass / http://localhost:8080/ 39 | ProxyPassReverse / http://localhost:8080/ 40 | 41 | " | sudo tee -a /etc/httpd/conf/httpd.conf > /dev/null 42 | 43 | # start the httpd service now and stop it until userdata 44 | sudo systemctl start httpd 45 | sudo systemctl stop httpd 46 | 47 | # ensure httpd stays on 48 | sudo systemctl enable httpd 49 | -------------------------------------------------------------------------------- /samples/sample-ecr-repository/README.md: -------------------------------------------------------------------------------- 1 | # Sample ECR Repository to showcase ADF Pipelines 2 | 3 | ## Deployment Map example 4 | 5 | ```yaml 6 | - name: sample-ecr-repository 7 | default_providers: 8 | source: 9 | provider: codecommit 10 | properties: 11 | account_id: 111111111111 12 | build: 13 | provider: codebuild 14 | properties: 15 | image: "STANDARD_7_0" 16 | targets: 17 | - /deployment 18 | ``` 19 | -------------------------------------------------------------------------------- /samples/sample-ecr-repository/buildspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | phases: 7 | install: 8 | runtime-versions: 9 | python: 3.12 10 | commands: 11 | - aws s3 cp s3://$S3_BUCKET_NAME/adf-build/ adf-build/ --recursive --only-show-errors 12 | - pip install -r adf-build/requirements.txt -q 13 | - python adf-build/generate_params.py 14 | 15 | artifacts: 16 | files: 17 | - 'template.yml' 18 | - 'params/*.json' 19 | - 'params/*.yml' 20 | -------------------------------------------------------------------------------- /samples/sample-ecr-repository/params/global.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Parameters: 5 | TestingAccountId: "11111111111" 6 | ProductionAccountId: "999999999999" 7 | 8 | Tags: 9 | TagKey: "TagValue" 10 | MyKey: "MyValue" 11 | -------------------------------------------------------------------------------- /samples/sample-ecs-cluster/README.md: -------------------------------------------------------------------------------- 1 | # Sample ECS Cluster to showcase ADF Pipelines 2 | 3 | ## Prerequisites 4 | 5 | Please make sure you deploy the `sample-vpc` example before you deploy 6 | this sample. The VPC should be deployed to the same target accounts and region. 7 | 8 | ## Deployment Map example 9 | 10 | ```yaml 11 | - name: sample-ecs-cluster 12 | default_providers: 13 | source: 14 | provider: codecommit 15 | properties: 16 | account_id: 111111111111 17 | build: 18 | provider: codebuild 19 | properties: 20 | image: "STANDARD_7_0" # So we can specify which Python version we need 21 | targets: 22 | - 222222222222 23 | - path: 333333333333 24 | regions: eu-west-1 25 | name: production 26 | ``` 27 | -------------------------------------------------------------------------------- /samples/sample-ecs-cluster/buildspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | phases: 7 | install: 8 | runtime-versions: 9 | python: 3.12 10 | commands: 11 | - aws s3 cp s3://$S3_BUCKET_NAME/adf-build/ adf-build/ --recursive --only-show-errors 12 | - pip install -r adf-build/requirements.txt -q 13 | - python adf-build/generate_params.py 14 | 15 | artifacts: 16 | files: 17 | - 'template.yml' 18 | - 'params/*.json' 19 | - 'params/*.yml' 20 | -------------------------------------------------------------------------------- /samples/sample-ecs-cluster/params/banking-production.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Parameters: 5 | Environment: "production" 6 | -------------------------------------------------------------------------------- /samples/sample-ecs-cluster/params/global.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Parameters: 5 | Environment: "testing" 6 | -------------------------------------------------------------------------------- /samples/sample-etl-pipeline/README.md: -------------------------------------------------------------------------------- 1 | # Sample ETL type Pipeline 2 | 3 | ## Deployment Map example 4 | 5 | ```yaml 6 | - name: sample-etl-pipeline 7 | default_providers: 8 | source: 9 | provider: s3 10 | properties: 11 | account_id: 111111111111 12 | bucket_name: banking-etl-bucket-source 13 | object_key: input.zip 14 | build: 15 | enabled: False 16 | deploy: 17 | provider: s3 18 | targets: 19 | - path: 222222222222 20 | regions: eu-west-1 21 | properties: 22 | bucket_name: account-blah-bucket-etl 23 | object_key: some_path/output.zip 24 | - path: 333333333333 25 | properties: 26 | bucket_name: business_unit_bucket-etl 27 | object_key: another/path/output.zip 28 | ``` 29 | -------------------------------------------------------------------------------- /samples/sample-etl-pipeline/big_data.txt: -------------------------------------------------------------------------------- 1 | Lots of data... 2 | -------------------------------------------------------------------------------- /samples/sample-etl-pipeline/scripts/some_etl_script.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Copyright Amazon.com Inc. or its affiliates. 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | set -e 7 | 8 | echo "Doing some ETL tasks... This could also be done with a custom CodeBuild Image..." 9 | 10 | cat big_data.txt 11 | 12 | echo "You can optionally bundle the buildspec.yml in the source zip and have the commands executed that way.." 13 | echo "Don't forget to enable the build stage to support this" 14 | -------------------------------------------------------------------------------- /samples/sample-expunge-vpc/README.md: -------------------------------------------------------------------------------- 1 | # Expunge VPC 2 | 3 | This template uses a Custom Lambda backed resource to expunge the default VPC 4 | within all regions. 5 | 6 | Upon stack deletion the default VPCs will be recreated. 7 | 8 | ## Deployment Map Example 9 | 10 | ```yaml 11 | - name: expunge-vpc 12 | default_providers: 13 | source: 14 | provider: codecommit 15 | properties: 16 | account_id: 111111111111 17 | build: 18 | provider: codebuild 19 | properties: 20 | image: "STANDARD_7_0" # So we can specify which Python version we need 21 | environment_variables: 22 | CONTAINS_TRANSFORM: true 23 | # ^ Required for templates that contain transforms (eg SAM Templates) 24 | 25 | params: 26 | restart_execution_on_update: true 27 | targets: 28 | - path: /test 29 | name: test-deployments 30 | ``` 31 | -------------------------------------------------------------------------------- /samples/sample-expunge-vpc/build-lambda.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Copyright Amazon.com Inc. or its affiliates. 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | set -e 7 | 8 | cd src/lambda_vpc 9 | 10 | pip install crhelper -t . 11 | 12 | cd - 13 | -------------------------------------------------------------------------------- /samples/sample-expunge-vpc/buildspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | phases: 7 | install: 8 | runtime-versions: 9 | python: 3.12 10 | commands: 11 | - aws s3 cp s3://$S3_BUCKET_NAME/adf-build/ adf-build/ --recursive --only-show-errors 12 | - pip install -r adf-build/requirements.txt -q 13 | - python adf-build/generate_params.py 14 | 15 | build: 16 | commands: 17 | - ./build-lambda.sh 18 | - bash adf-build/helpers/package_transform.sh 19 | 20 | artifacts: 21 | files: "**/*" 22 | -------------------------------------------------------------------------------- /samples/sample-expunge-vpc/src/lambda_vpc/requirements.txt: -------------------------------------------------------------------------------- 1 | crhelper 2 | -------------------------------------------------------------------------------- /samples/sample-expunge-vpc/template.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | AWSTemplateFormatVersion: "2010-09-09" 5 | Transform: AWS::Serverless-2016-10-31 6 | Description: Deploys the Custom Resource for deleting the default VPC in all regions 7 | 8 | Resources: 9 | LambdaVPCPolicyRole: 10 | Type: "AWS::IAM::Role" 11 | Properties: 12 | AssumeRolePolicyDocument: 13 | Version: "2012-10-17" 14 | Statement: 15 | - Effect: Allow 16 | Principal: 17 | Service: "lambda.amazonaws.com" 18 | Action: 19 | - "sts:AssumeRole" 20 | Path: "/" 21 | ManagedPolicyArns: 22 | - !Sub "arn:${AWS::Partition}:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" 23 | - !Sub "arn:${AWS::Partition}:iam::aws:policy/AmazonVPCFullAccess" 24 | Policies: 25 | - PolicyName: ec2 26 | PolicyDocument: 27 | Statement: 28 | - Effect: Allow 29 | Action: 30 | - "ec2:DescribeRegions" 31 | Resource: "*" 32 | 33 | DeleteVPCLambda: 34 | Type: AWS::Serverless::Function 35 | Properties: 36 | CodeUri: ./src/lambda_vpc 37 | Handler: lambda_function.lambda_handler 38 | MemorySize: 128 39 | Role: !GetAtt LambdaVPCPolicyRole.Arn 40 | Runtime: python3.12 41 | Timeout: 600 42 | Environment: 43 | Variables: 44 | region_name: !Ref "AWS::Region" 45 | 46 | DeleteVPCCustom: 47 | Type: Custom::DeleteVPC 48 | Properties: 49 | ServiceToken: !GetAtt DeleteVPCLambda.Arn 50 | -------------------------------------------------------------------------------- /samples/sample-fargate-node-app/Dockerfile: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | FROM public.ecr.aws/docker/library/node:current-alpine 5 | WORKDIR /app 6 | COPY . . 7 | RUN npm install 8 | EXPOSE 3000 9 | ENTRYPOINT ["npm", "start"] 10 | -------------------------------------------------------------------------------- /samples/sample-fargate-node-app/README.md: -------------------------------------------------------------------------------- 1 | # Sample NodeJS Web Application running on AWS Fargate 2 | 3 | ## Prerequisites 4 | 5 | Please make sure you deploy the `sample-ecr-repository` and 6 | `sample-ecs-cluster` examples before you deploy this sample. 7 | The ECS cluster should be deployed to the same target accounts and region. 8 | 9 | If you want to change the region to another region, please make sure to rename 10 | the `params/global_eu-west-1.json` file to use the new region name. 11 | For example: `params/global_us-east-1.json`. 12 | Also update the regions list in the deployment map for this example. 13 | 14 | ## Deployment Map example 15 | 16 | ```yaml 17 | - name: sample-fargate-node-app 18 | default_providers: 19 | source: 20 | provider: codecommit 21 | properties: 22 | account_id: 111111111111 23 | build: 24 | provider: codebuild 25 | properties: 26 | image: "STANDARD_7_0" 27 | privileged: true 28 | # ^ Required for Docker in Docker to work as expected (since 29 | # CodeBuild will run our docker commands to create and push our 30 | # image). 31 | regions: 32 | - eu-west-1 33 | targets: 34 | # Example Targets: These accounts/regions have had the sample-vpc deployed 35 | - 222222222222 36 | - path: 333333333333 37 | regions: eu-west-1 38 | name: production 39 | ``` 40 | -------------------------------------------------------------------------------- /samples/sample-fargate-node-app/build/docker.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Copyright Amazon.com Inc. or its affiliates. 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | set -e 7 | 8 | aws ecr get-login-password --region ${AWS_REGION} | docker login --username AWS --password-stdin ${ACCOUNT_ID}.dkr.ecr.${AWS_REGION}.amazonaws.com 9 | REPOSITORY_URI="${ACCOUNT_ID}.dkr.ecr.${AWS_REGION}.amazonaws.com/${ADF_PROJECT_NAME}" 10 | IMAGE_TAG=$(echo $CODEBUILD_RESOLVED_SOURCE_VERSION | cut -c 1-7) 11 | 12 | docker build -t "${REPOSITORY_URI}:latest" . 13 | docker tag "${REPOSITORY_URI}:latest" "${REPOSITORY_URI}:${IMAGE_TAG}" 14 | docker push "${REPOSITORY_URI}:latest" 15 | docker push "${REPOSITORY_URI}:${IMAGE_TAG}" 16 | 17 | tmp=$(mktemp) 18 | jq \ 19 | --arg REPOSITORY_URI "$REPOSITORY_URI" \ 20 | --arg IMAGE_TAG "$IMAGE_TAG" \ 21 | '.Parameters.Image = $REPOSITORY_URI+":"+$IMAGE_TAG' \ 22 | "params/global_${AWS_REGION}.json" \ 23 | > "$tmp" 24 | 25 | mv "$tmp" "params/global_${AWS_REGION}.json" 26 | -------------------------------------------------------------------------------- /samples/sample-fargate-node-app/build/generate_parameters.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Copyright Amazon.com Inc. or its affiliates. 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | set -e 7 | 8 | aws s3 cp s3://$S3_BUCKET_NAME/adf-build/ adf-build/ --recursive --only-show-errors 9 | pip install -r adf-build/requirements.txt -q 10 | python adf-build/generate_params.py 11 | -------------------------------------------------------------------------------- /samples/sample-fargate-node-app/buildspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | phases: 7 | install: 8 | runtime-versions: 9 | python: 3.12 10 | 11 | build: 12 | commands: 13 | - bash build/docker.sh 14 | 15 | post_build: 16 | commands: 17 | - bash build/generate_parameters.sh 18 | 19 | artifacts: 20 | files: 21 | - 'template.yml' 22 | - 'params/*.json' 23 | - 'params/*.yml' 24 | -------------------------------------------------------------------------------- /samples/sample-fargate-node-app/index.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com Inc. or its affiliates. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | const express = require('express') 5 | 6 | const app = express() 7 | 8 | app.set('view engine', 'ejs') 9 | app.use(express.static('public')) 10 | 11 | app.get('/', (req, res) => { 12 | res.render('index', { 13 | region: process.env.REGION, 14 | environment: process.env.ENVIRONMENT 15 | }) 16 | }) 17 | 18 | app.get('/version', (req, res) => { 19 | res.json({ 20 | version: '0.0.1' 21 | }) 22 | }) 23 | 24 | app.listen(3000, () => { 25 | console.log('Listening on 3000') 26 | }) 27 | -------------------------------------------------------------------------------- /samples/sample-fargate-node-app/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sample-node-app", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 0", 8 | "start": "node index.js" 9 | }, 10 | "author": "", 11 | "license": "Apache-2.0", 12 | "dependencies": { 13 | "ejs": "^3.1.10", 14 | "express": "^4.21.0" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /samples/sample-fargate-node-app/params/banking-production.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Parameters: 5 | Environment: production 6 | -------------------------------------------------------------------------------- /samples/sample-fargate-node-app/params/global.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Parameters: 5 | Environment: "testing" 6 | ServiceName: "sample-node-app" 7 | ContainerPort: "3000" 8 | Priority: "1" 9 | 10 | Tags: 11 | TagKey: "TagValue" 12 | MyKey: "MyValue" 13 | -------------------------------------------------------------------------------- /samples/sample-fargate-node-app/params/global_eu-west-1.json: -------------------------------------------------------------------------------- 1 | { 2 | "Parameters": { 3 | "Image": "" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /samples/sample-fargate-node-app/public/main.css: -------------------------------------------------------------------------------- 1 | /* Copyright Amazon.com Inc. or its affiliates. */ 2 | /* SPDX-License-Identifier: Apache-2.0 */ 3 | 4 | @import url(https://fonts.googleapis.com/css?family=Signika:700,300,600); 5 | 6 | html, body { 7 | height: 100%; 8 | } 9 | 10 | body { 11 | display: flex; 12 | justify-content: center; 13 | align-items: center; 14 | margin: 20px 0; 15 | text-align: center; 16 | background: beige; 17 | overflow: hidden; 18 | } 19 | 20 | h1 { 21 | font-size: 5em; 22 | font: bold 7.5vw/1.6 'Signika', sans-serif; 23 | user-select: none; 24 | } 25 | 26 | h1 span { 27 | display: inline-block; 28 | animation: float .2s ease-in-out infinite; 29 | } 30 | 31 | @keyframes float { 32 | 0%,100%{ 33 | transform: none; 34 | } 35 | 33%{ 36 | transform: translateY(-1px) rotate(-2deg); 37 | } 38 | 66%{ 39 | transform: translateY(1px) rotate(2deg); 40 | } 41 | } 42 | 43 | body:hover span { 44 | animation: bounce .6s; 45 | } 46 | 47 | @keyframes bounce { 48 | 0%,100% { 49 | transform: translate(0); 50 | } 51 | 25%{ 52 | transform: rotateX(20deg) translateY(2px) rotate(-3deg); 53 | } 54 | 50%{ 55 | transform: translateY(-20px) rotate(3deg) scale(1.1); 56 | } 57 | } 58 | 59 | span:nth-child(4n-3) { 60 | color:hsl(30, 65%, 60%); 61 | text-shadow: 1px 1px hsl(30, 45%, 50%), 2px 2px hsl(30, 45%, 50%), 3px 3px hsl(30, 45%, 50%), 4px 4px hsl(30, 45%, 50%); 62 | } 63 | -------------------------------------------------------------------------------- /samples/sample-fargate-node-app/views/index.ejs: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Sample App 10 | 11 | 12 | 13 | 14 |

15 | Welcome to <%= region %> in <%= environment %> 16 |

17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /samples/sample-iam/README.md: -------------------------------------------------------------------------------- 1 | # Sample IAM to showcase ADF Pipelines 2 | 3 | This pipeline is expecting *(in the example case)* a AWS CodeCommit repository 4 | on the account `111111111111` in your main deployment region named *sample-iam*. 5 | 6 | This sample is configured to deploy to the `eu-west-1` region. 7 | If you would like to deploy it to another region, please update the 8 | parameters in the `params/global.yml` file. Replacing the `eu-west-1` part 9 | with the region you like to deploy to. 10 | 11 | As all resources in this stack are globally accessible, this sample should only 12 | be deployed to a single region per account. It is recommended to leave it 13 | configured to the default deployment region of your ADF installation. 14 | 15 | ## Deployment Map example 16 | 17 | ```yaml 18 | - name: sample-iam 19 | default_providers: 20 | source: 21 | provider: codecommit 22 | properties: 23 | account_id: 111111111111 24 | build: 25 | provider: codebuild 26 | properties: 27 | image: "STANDARD_7_0" 28 | params: 29 | restart_execution_on_update: True 30 | targets: 31 | - /banking/testing 32 | - /banking/production 33 | ``` 34 | -------------------------------------------------------------------------------- /samples/sample-iam/buildspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | phases: 7 | install: 8 | runtime-versions: 9 | python: 3.12 10 | commands: 11 | - aws s3 cp s3://$S3_BUCKET_NAME/adf-build/ adf-build/ --recursive --only-show-errors 12 | - pip install -r adf-build/requirements.txt -q 13 | - python adf-build/generate_params.py 14 | 15 | artifacts: 16 | files: 17 | - 'template.yml' 18 | - 'params/*.json' 19 | - 'params/*.yml' 20 | -------------------------------------------------------------------------------- /samples/sample-iam/params/global.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Parameters: 5 | DeploymentAssetS3BucketName: "resolve:/adf/cross_region/s3_regional_bucket/eu-west-1" 6 | DeploymentAssetKMSKeyArn: "resolve:/adf/cross_region/kms_arn/eu-west-1" 7 | 8 | Tags: 9 | TagKey: "my_tag" 10 | MyKey: "new_value" 11 | -------------------------------------------------------------------------------- /samples/sample-mono-repo/apps/alpha/README.md: -------------------------------------------------------------------------------- 1 | # Sample App A 2 | 3 | This app is part of the mono repo sample. 4 | As part of the deployment it will create a simple S3 bucket to host 5 | application assets. 6 | -------------------------------------------------------------------------------- /samples/sample-mono-repo/apps/alpha/buildspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | env: 7 | variables: 8 | INFRASTRUCTURE_ROOT_DIR: 'apps/alpha' 9 | 10 | phases: 11 | install: 12 | runtime-versions: 13 | python: 3.12 14 | commands: 15 | - cd $INFRASTRUCTURE_ROOT_DIR 16 | - aws s3 cp s3://$S3_BUCKET_NAME/adf-build/ adf-build/ --recursive --only-show-errors 17 | - pip install -r adf-build/requirements.txt -q 18 | 19 | build: 20 | commands: 21 | - python adf-build/generate_params.py 22 | 23 | artifacts: 24 | files: 25 | - '$INFRASTRUCTURE_ROOT_DIR/template.yml' 26 | - '$INFRASTRUCTURE_ROOT_DIR/params/*.json' 27 | - '$INFRASTRUCTURE_ROOT_DIR/params/*.yml"' 28 | -------------------------------------------------------------------------------- /samples/sample-mono-repo/apps/alpha/params/global.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Tags: 5 | Repository: sample-mono-repo 6 | App: Sample Mono Repo Alpha 7 | -------------------------------------------------------------------------------- /samples/sample-mono-repo/apps/alpha/template.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | AWSTemplateFormatVersion: '2010-09-09' 5 | Description: ADF CloudFormation Sample Template (Mono Repo/App A) 6 | Metadata: 7 | License: Apache-2.0 8 | 9 | Resources: 10 | Bucket: 11 | Type: AWS::S3::Bucket 12 | Properties: 13 | BucketEncryption: 14 | ServerSideEncryptionConfiguration: 15 | - ServerSideEncryptionByDefault: 16 | SSEAlgorithm: AES256 17 | VersioningConfiguration: 18 | Status: Enabled 19 | PublicAccessBlockConfiguration: 20 | BlockPublicAcls: true 21 | BlockPublicPolicy: true 22 | IgnorePublicAcls: true 23 | RestrictPublicBuckets: true 24 | -------------------------------------------------------------------------------- /samples/sample-mono-repo/apps/beta/README.md: -------------------------------------------------------------------------------- 1 | # Sample App B 2 | 3 | This app is part of the mono repo sample. 4 | As part of the deployment it will create a simple S3 bucket to host 5 | application assets. 6 | -------------------------------------------------------------------------------- /samples/sample-mono-repo/apps/beta/buildspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | env: 7 | variables: 8 | INFRASTRUCTURE_ROOT_DIR: 'apps/beta' 9 | 10 | phases: 11 | install: 12 | runtime-versions: 13 | python: 3.12 14 | commands: 15 | - cd $INFRASTRUCTURE_ROOT_DIR 16 | - aws s3 cp s3://$S3_BUCKET_NAME/adf-build/ adf-build/ --recursive --only-show-errors 17 | - pip install -r adf-build/requirements.txt -q 18 | 19 | build: 20 | commands: 21 | - python adf-build/generate_params.py 22 | 23 | artifacts: 24 | files: 25 | - '$INFRASTRUCTURE_ROOT_DIR/template.yml' 26 | - '$INFRASTRUCTURE_ROOT_DIR/params/*.json' 27 | - '$INFRASTRUCTURE_ROOT_DIR/params/*.yml"' 28 | -------------------------------------------------------------------------------- /samples/sample-mono-repo/apps/beta/params/global.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Tags: 5 | Repository: sample-mono-repo 6 | App: Sample Mono Repo Beta 7 | -------------------------------------------------------------------------------- /samples/sample-mono-repo/apps/beta/template.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | AWSTemplateFormatVersion: '2010-09-09' 5 | Description: ADF CloudFormation Sample Template (Mono Repo/App B) 6 | Metadata: 7 | License: Apache-2.0 8 | 9 | Resources: 10 | Bucket: 11 | Type: AWS::S3::Bucket 12 | Properties: 13 | BucketEncryption: 14 | ServerSideEncryptionConfiguration: 15 | - ServerSideEncryptionByDefault: 16 | SSEAlgorithm: AES256 17 | VersioningConfiguration: 18 | Status: Enabled 19 | PublicAccessBlockConfiguration: 20 | BlockPublicAcls: true 21 | BlockPublicPolicy: true 22 | IgnorePublicAcls: true 23 | RestrictPublicBuckets: true 24 | -------------------------------------------------------------------------------- /samples/sample-serverless-app/README.md: -------------------------------------------------------------------------------- 1 | # Sample Serverless Python based Application 2 | 3 | ## Deployment Map example 4 | 5 | ```yaml 6 | - name: sample-serverless-app 7 | default_providers: 8 | source: 9 | provider: codecommit 10 | properties: 11 | account_id: 111111111111 12 | build: 13 | provider: codebuild 14 | properties: 15 | image: "STANDARD_7_0" 16 | environment_variables: 17 | CONTAINS_TRANSFORM: True 18 | targets: 19 | - 222222222222 20 | - path: 333333333333 21 | regions: eu-west-1 22 | name: production 23 | ``` 24 | -------------------------------------------------------------------------------- /samples/sample-serverless-app/build/generate_parameters.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Copyright Amazon.com Inc. or its affiliates. 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | set -e 7 | 8 | aws s3 cp s3://$S3_BUCKET_NAME/adf-build/ adf-build/ --recursive --only-show-errors 9 | pip install -r adf-build/requirements.txt -q 10 | python adf-build/generate_params.py 11 | -------------------------------------------------------------------------------- /samples/sample-serverless-app/buildspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | phases: 7 | install: 8 | runtime-versions: 9 | python: 3.12 10 | 11 | build: 12 | commands: 13 | - bash build/generate_parameters.sh 14 | - bash adf-build/helpers/package_transform.sh 15 | 16 | artifacts: 17 | files: 18 | - 'template*.yml' 19 | - 'params/*.json' 20 | - 'params/*.yml' 21 | -------------------------------------------------------------------------------- /samples/sample-serverless-app/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import json 5 | 6 | def lambda_handler(event, context): 7 | print(event) 8 | return event 9 | -------------------------------------------------------------------------------- /samples/sample-serverless-app/params/global.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Tags: 5 | TagKey: "my_tag" 6 | MyKey: "new_value" 7 | -------------------------------------------------------------------------------- /samples/sample-serverless-app/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/samples/sample-serverless-app/requirements.txt -------------------------------------------------------------------------------- /samples/sample-serverless-app/template.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | AWSTemplateFormatVersion: '2010-09-09' 5 | Transform: AWS::Serverless-2016-10-31 6 | Description: ADF CloudFormation Sample Template (Serverless Application) 7 | Metadata: 8 | License: Apache-2.0 9 | cfn-lint: 10 | config: 11 | ignore_checks: 12 | # https://github.com/aws/serverless-application-model/issues/1590 13 | - I3042 14 | 15 | Resources: 16 | ExampleServerlessApp: 17 | Type: 'AWS::Serverless::Function' 18 | Properties: 19 | Handler: lambda_function.handler 20 | Runtime: python3.12 21 | CodeUri: . 22 | Description: Sample Lambda Function 23 | MemorySize: 128 24 | Timeout: 3 25 | Events: 26 | GetResource: 27 | Type: Api 28 | Properties: 29 | Path: /resource/{id} 30 | Method: get 31 | -------------------------------------------------------------------------------- /samples/sample-service-catalog-product/README.md: -------------------------------------------------------------------------------- 1 | # Sample Service Catalog Product 2 | 3 | This stack imports values from `sample-vpc` and `sample-iam`. 4 | 5 | ## Deployment Map example 6 | 7 | ```yaml 8 | - name: sample-service-catalog-product 9 | default_providers: 10 | source: 11 | provider: codecommit 12 | properties: 13 | account_id: 111111111111 14 | build: 15 | provider: codebuild 16 | properties: 17 | image: "STANDARD_7_0" 18 | targets: 19 | - /banking/testing 20 | - path: /banking/production 21 | regions: eu-west-1 22 | name: production 23 | ``` 24 | 25 | ## Parameters 26 | 27 | In the Parameter files for this specific pipeline we are uploading any of the 28 | Product templates *(productX/template.yml)* contained within this Repository. 29 | We can upload the templates to S3 and reference their S3 Object URL by using 30 | the following example: 31 | 32 | ```yaml 33 | Parameters: 34 | ProductXTemplateURL: 'upload:eu-central-1:path:productX/template.yml' 35 | ``` 36 | 37 | In this example, ADF will search for a file in `productX/template.yml` within 38 | this repository. If found, this will be uploaded to an Amazon S3 Bucket within 39 | the region defined within the value *(region is optional)*. Once uploaded, this 40 | string will be replaced by the uploaded S3 object URL *(using a 41 | [path style](https://docs.aws.amazon.com/AmazonS3/latest/dev/VirtualHosting.html) 42 | url)* and passed into the template as required. If the repository contains 43 | numerous Service Catalog products they will require their own folder and upload 44 | parameter within their associated parameter files *(or global.yml)*. 45 | 46 | If the region is omitted from the value when using the **upload** functionality 47 | your default deployment region will be used: 48 | 49 | ```yaml 50 | Parameters: 51 | ProductXTemplateURL: 'upload:path:productX/template.yml' 52 | ProductYTemplateURL: 'upload:path:productY/another_name.yml' 53 | ``` 54 | 55 | In this example, both files within folder `productX` and `productX` will be 56 | uploaded to our default deployment region S3 Bucket and have their parameters 57 | updated to contain the S3 URL. 58 | -------------------------------------------------------------------------------- /samples/sample-service-catalog-product/buildspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | phases: 7 | install: 8 | runtime-versions: 9 | python: 3.12 10 | commands: 11 | - aws s3 cp s3://$S3_BUCKET_NAME/adf-build/ adf-build/ --recursive --only-show-errors 12 | - pip install -r adf-build/requirements.txt -q 13 | - python adf-build/generate_params.py 14 | 15 | artifacts: 16 | files: 17 | - 'template.yml' 18 | - 'params/*.json' 19 | - 'params/*.yml' 20 | -------------------------------------------------------------------------------- /samples/sample-service-catalog-product/params/global.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Parameters: 5 | ProductXTemplateURL: "upload:path:productX/template.yml" 6 | -------------------------------------------------------------------------------- /samples/sample-service-catalog-product/productX/template.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | AWSTemplateFormatVersion: '2010-09-09' 5 | Description: ADF CloudFormation Sample Service Catalog Product 6 | Metadata: 7 | License: Apache-2.0 8 | 9 | Parameters: 10 | Environment: 11 | Type: String 12 | Default: development 13 | AllowedValues: 14 | - development 15 | - testing 16 | Description: The environment to use, IDE are only supported in testing 17 | 18 | InstanceType: 19 | Type: String 20 | Default: t3.micro 21 | AllowedValues: 22 | - t3.micro 23 | - m5.large 24 | Description: Enter t3.micro or m5.large. Default is t3.micro. 25 | 26 | AutomaticStopTimeInMinutes: 27 | Type: Number 28 | Default: 480 29 | AllowedValues: 30 | - 480 31 | - 960 32 | Description: The amount of minutes that this Cloud9 Instance should stop after (8 or 16 hours). 33 | 34 | InstanceDescription: 35 | Type: String 36 | Default: "Development environment used during office hours" 37 | Description: The Description of the Cloud9 Instance. 38 | 39 | InstanceName: 40 | Type: String 41 | Description: The name of the Cloud9 Instance. 42 | 43 | UserName: 44 | Type: String 45 | Description: Your IAM UserName that will be used as the OwnerArn in the Cloud9 Instance. 46 | 47 | Resources: 48 | Cloud9Instance: 49 | Type: AWS::Cloud9::EnvironmentEC2 50 | Metadata: 51 | cfn-lint: 52 | config: 53 | ignore_checks: 54 | - E1152 55 | Properties: 56 | AutomaticStopTimeMinutes: !Ref AutomaticStopTimeInMinutes 57 | Description: !Ref InstanceDescription 58 | InstanceType: !Ref InstanceType 59 | ImageId: 'amazonlinux-2023-x86_64' 60 | Name: !Ref InstanceName 61 | OwnerArn: !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:user/${UserName}" # In this sample case 'sample-developer' from the IAM stack can be used here 62 | SubnetId: 63 | Fn::ImportValue: 64 | Fn::Sub: ${Environment}-public-subnet-1a # Imported from sample-vpc 65 | -------------------------------------------------------------------------------- /samples/sample-service-catalog-product/template.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | AWSTemplateFormatVersion: "2010-09-09" 5 | Description: ADF CloudFormation Sample Template (Service Catalog Product) 6 | Metadata: 7 | License: Apache-2.0 8 | 9 | Parameters: 10 | ProductXTemplateURL: 11 | Type: String 12 | Description: Service Catalog CloudFormation Template URL 13 | 14 | Resources: 15 | Portfolio: 16 | Type: AWS::ServiceCatalog::Portfolio 17 | Properties: 18 | AcceptLanguage: en 19 | Description: Portfolio containing Cloud9 Development Environment 20 | DisplayName: IDE Portfolio 21 | ProviderName: Company 22 | 23 | IDETagOption: 24 | Type: "AWS::ServiceCatalog::TagOption" 25 | Properties: 26 | Key: "ProductType" 27 | Value: "IDE" 28 | 29 | IDEPortfolioTagOptionAssociation: 30 | Type: "AWS::ServiceCatalog::TagOptionAssociation" 31 | Properties: 32 | ResourceId: !Ref Portfolio 33 | TagOptionId: !Ref IDETagOption 34 | 35 | Cloud9Product: 36 | Type: AWS::ServiceCatalog::CloudFormationProduct 37 | Properties: 38 | AcceptLanguage: en 39 | Description: CloudFormation Template to deploy Cloud9 Development Environment 40 | Distributor: Company 41 | Name: Cloud9 Development Environment 42 | Owner: Company 43 | ProvisioningArtifactParameters: 44 | - Info: 45 | LoadTemplateFromURL: !Ref ProductXTemplateURL 46 | SupportDescription: For help with Cloud9 Dev Environment contact us 47 | SupportEmail: john@example.com 48 | SupportUrl: http://example.com 49 | 50 | Association: 51 | Type: AWS::ServiceCatalog::PortfolioProductAssociation 52 | Properties: 53 | AcceptLanguage: en 54 | PortfolioId: !Ref Portfolio 55 | ProductId: !Ref Cloud9Product 56 | 57 | IDEPortfolioPrincipalAssociation: 58 | Type: "AWS::ServiceCatalog::PortfolioPrincipalAssociation" 59 | Properties: 60 | PortfolioId: !Ref Portfolio 61 | PrincipalARN: !ImportValue SampleDevelopersIAMGroupArn 62 | PrincipalType: "IAM" 63 | -------------------------------------------------------------------------------- /samples/sample-terraform/README.md: -------------------------------------------------------------------------------- 1 | # Terraform template 2 | 3 | ## Overview 4 | 5 | Please read the [user guide on ADF's support for 6 | Terraform](../../docs/user-guide.md#terraform-pipeline) before you proceed. 7 | 8 | ## Deployment procedure 9 | 10 | 1. Add a sample-terraform pipeline in ADF `deployment-map.yml` as in the 11 | example: 12 | 13 | ```yaml 14 | - name: sample-terraform 15 | default_providers: 16 | source: 17 | provider: codecommit 18 | properties: 19 | account_id: 111111111111 # Source account id 20 | build: 21 | provider: codebuild 22 | deploy: 23 | provider: codebuild 24 | properties: 25 | image: "STANDARD_7_0" 26 | environment_variables: 27 | TARGET_ACCOUNTS: 111111111111,222222222222 # Target accounts 28 | TARGET_OUS: /core/infrastructure,/sandbox # Target OUs 29 | MANAGEMENT_ACCOUNT_ID: 333333333333 # Billing account 30 | # Regions in comma-separated list format, for example 31 | # "eu-west-1,us-east-1" 32 | REGIONS: eu-west-1 33 | targets: 34 | - name: terraform-scan # optional 35 | properties: 36 | spec_filename: tf_scan.yml # Terraform scan 37 | - name: terraform-plan 38 | properties: 39 | spec_filename: tf_plan.yml # Terraform plan 40 | - approval # manual approval 41 | - name: terraform-apply 42 | properties: 43 | spec_filename: tf_apply.yml # Terraform apply 44 | ``` 45 | 46 | The sample uses the following configuration, please update accordingly: 47 | 48 | - Project name: `sample-tf-module` 49 | - Target accounts: `111111111111` and `222222222222` 50 | - Target regions: `eu-west-1` (the main ADF deployment region) and `us-east-1` 51 | -------------------------------------------------------------------------------- /samples/sample-terraform/buildspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | env: 7 | variables: 8 | # Terraform version to use. ADF supports Terraform version v0.13.0 and later. 9 | TERRAFORM_VERSION: "1.0.10" 10 | phases: 11 | install: 12 | commands: 13 | - aws s3 cp s3://$S3_BUCKET_NAME/adf-build/ adf-build/ --recursive --only-show-errors 14 | - export PATH=$PATH:$(pwd) 15 | - bash adf-build/helpers/terraform/install_terraform.sh 16 | - pip install --upgrade pip 17 | - pip install -r adf-build/requirements.txt -q 18 | build: 19 | commands: 20 | - python adf-build/generate_params.py 21 | 22 | artifacts: 23 | files: "**/*" 24 | -------------------------------------------------------------------------------- /samples/sample-terraform/params/global.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Parameters: 5 | ProjectName: "sample-terraform" 6 | -------------------------------------------------------------------------------- /samples/sample-terraform/tf/backend.tf: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | terraform { 5 | backend "s3" {} 6 | } 7 | -------------------------------------------------------------------------------- /samples/sample-terraform/tf/main.tf: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | data "aws_partition" "current" {} 5 | 6 | terraform { 7 | required_providers { 8 | aws = { 9 | source = "hashicorp/aws" 10 | version = ">= 3.0" 11 | } 12 | } 13 | required_version = ">= 0.13.0" 14 | } 15 | provider "aws" { 16 | assume_role { 17 | role_arn = "arn:${data.aws_partition.current}:iam::${var.TARGET_ACCOUNT_ID}:role/${var.TARGET_ACCOUNT_ROLE}" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /samples/sample-terraform/tf/s3.tf: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | resource "aws_s3_bucket" "s3" { 5 | bucket = "my-tf-test-bucket-${var.TARGET_REGION}-${var.TARGET_ACCOUNT_ID}" 6 | acl = "private" 7 | } 8 | 9 | resource "aws_s3_bucket_public_access_block" "s3-public-block" { 10 | bucket = aws_s3_bucket.s3.id 11 | 12 | block_public_acls = true 13 | block_public_policy = true 14 | ignore_public_acls = true 15 | restrict_public_buckets = true 16 | } 17 | -------------------------------------------------------------------------------- /samples/sample-terraform/tf/variables.tf: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | variable "TARGET_ACCOUNT_ID" { 5 | type = string 6 | } 7 | variable "TARGET_ACCOUNT_ROLE" { 8 | type = string 9 | } 10 | variable "TARGET_REGION" { 11 | type = string 12 | } 13 | -------------------------------------------------------------------------------- /samples/sample-terraform/tf_apply.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | env: 7 | variables: 8 | TF_VAR_TARGET_ACCOUNT_ROLE: adf-pipeline-terraform # The IAM Role Terraform will assume to deploy resources 9 | TF_IN_AUTOMATION: true 10 | TF_CLI_ARGS: "-no-color" 11 | TF_STAGE: "apply" 12 | 13 | phases: 14 | install: 15 | runtime-versions: 16 | python: 3.12 17 | 18 | build: 19 | commands: 20 | - python adf-build/helpers/terraform/get_accounts.py 21 | - bash adf-build/helpers/terraform/adf_terraform.sh 22 | -------------------------------------------------------------------------------- /samples/sample-terraform/tf_destroy.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | env: 7 | variables: 8 | TF_VAR_TARGET_ACCOUNT_ROLE: adf-pipeline-terraform # The IAM Role Terraform will assume to deploy resources 9 | TF_IN_AUTOMATION: true 10 | TF_STAGE: "destroy" 11 | TF_CLI_ARGS: "-no-color" 12 | 13 | phases: 14 | install: 15 | runtime-versions: 16 | python: 3.12 17 | 18 | build: 19 | commands: 20 | - python adf-build/helpers/terraform/get_accounts.py 21 | - bash adf-build/helpers/terraform/adf_terraform.sh 22 | -------------------------------------------------------------------------------- /samples/sample-terraform/tf_plan.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | env: 7 | variables: 8 | TF_VAR_TARGET_ACCOUNT_ROLE: adf-pipeline-terraform # The IAM Role Terraform will assume to deploy resources 9 | TF_IN_AUTOMATION: true 10 | TF_STAGE: "plan" 11 | TF_CLI_ARGS: "-no-color" 12 | 13 | phases: 14 | install: 15 | runtime-versions: 16 | python: 3.12 17 | 18 | build: 19 | commands: 20 | - python adf-build/helpers/terraform/get_accounts.py 21 | - bash adf-build/helpers/terraform/adf_terraform.sh 22 | -------------------------------------------------------------------------------- /samples/sample-terraform/tf_scan.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | phases: 7 | install: 8 | commands: 9 | - curl -L "$(curl -s https://api.github.com/repositories/103084166/releases/latest | grep -o -E "https:\/\/.+?_Linux_x86_64.tar.gz")" > terrascan.tar.gz 10 | - tar -xf terrascan.tar.gz terrascan && rm terrascan.tar.gz 11 | - install terrascan /usr/local/bin && rm terrascan 12 | build: 13 | commands: 14 | - cd tf 15 | - terrascan scan -o yaml 16 | -------------------------------------------------------------------------------- /samples/sample-terraform/tfvars/global.auto.tfvars: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/samples/sample-terraform/tfvars/global.auto.tfvars -------------------------------------------------------------------------------- /samples/sample-vpc/README.md: -------------------------------------------------------------------------------- 1 | # Sample VPC to showcase ADF Pipelines 2 | 3 | ## Deployment Map example 4 | 5 | ```yaml 6 | - name: sample-vpc 7 | default_providers: 8 | source: 9 | provider: codecommit 10 | properties: 11 | account_id: 111111111111 12 | build: 13 | provider: codebuild 14 | properties: 15 | image: "STANDARD_7_0" 16 | params: 17 | restart_execution_on_update: True 18 | targets: 19 | - /banking/testing 20 | - path: /banking/production 21 | regions: eu-west-1 22 | name: production 23 | ``` 24 | -------------------------------------------------------------------------------- /samples/sample-vpc/buildspec.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | version: 0.2 5 | 6 | phases: 7 | install: 8 | runtime-versions: 9 | python: 3.12 10 | commands: 11 | - aws s3 cp s3://$S3_BUCKET_NAME/adf-build/ adf-build/ --recursive --only-show-errors 12 | - pip install -r adf-build/requirements.txt -q 13 | - python adf-build/generate_params.py 14 | 15 | artifacts: 16 | files: 17 | - 'template.yml' 18 | - 'params/*.json' 19 | - 'params/*.yml' 20 | -------------------------------------------------------------------------------- /samples/sample-vpc/params/banking-production.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Parameters: 5 | CostCenter: "123" 6 | Environment: "production" 7 | 8 | Tags: 9 | TagKey: "my_tag" 10 | MyKey: "new_value" 11 | -------------------------------------------------------------------------------- /samples/sample-vpc/params/global.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Parameters: 5 | CostCenter: "123" 6 | Environment: "testing" 7 | 8 | Tags: 9 | TagKey: "my_tag" 10 | MyKey: "new_value" 11 | -------------------------------------------------------------------------------- /src/lambda_codebase/LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT No Attribution 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 4 | software and associated documentation files (the "Software"), to deal in the Software 5 | without restriction, including without limitation the rights to use, copy, modify, 6 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 7 | permit persons to whom the Software is furnished to do so. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 10 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 11 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 12 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 13 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 14 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /src/lambda_codebase/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | -------------------------------------------------------------------------------- /src/lambda_codebase/account/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | The Account handler that is called when ADF is installed to initially create 6 | the deployment account if required 7 | """ 8 | 9 | try: 10 | from main import lambda_handler # pylint: disable=unused-import 11 | except Exception as err: # pylint: disable=broad-except 12 | import os 13 | import logging 14 | from urllib.request import Request, urlopen 15 | import json 16 | 17 | LOGGER = logging.getLogger(__name__) 18 | LOGGER.setLevel(os.environ.get("ADF_LOG_LEVEL", logging.INFO)) 19 | 20 | def lambda_handler(event, _context, prior_error=err): 21 | payload = { 22 | "LogicalResourceId": event["LogicalResourceId"], 23 | "PhysicalResourceId": event.get( 24 | "PhysicalResourceId", 25 | "NOT_YET_CREATED"), 26 | "Status": "FAILED", 27 | "RequestId": event["RequestId"], 28 | "StackId": event["StackId"], 29 | "Reason": str(prior_error), 30 | } 31 | if not event["ResponseURL"].lower().startswith('http'): 32 | raise ValueError('ResponseURL is forbidden') from None 33 | with urlopen( 34 | Request( 35 | event["ResponseURL"], 36 | data=json.dumps(payload).encode(), 37 | headers={"content-type": ""}, 38 | method="PUT", 39 | ) 40 | ) as response: 41 | response_body = response.read().decode("utf-8") 42 | LOGGER.debug( 43 | "Response: %s", 44 | response_body, 45 | ) 46 | -------------------------------------------------------------------------------- /src/lambda_codebase/account/pytest.ini: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | [pytest] 5 | testpaths = tests 6 | -------------------------------------------------------------------------------- /src/lambda_codebase/account/requirements.txt: -------------------------------------------------------------------------------- 1 | cfn_custom_resource~=1.0.1 2 | -------------------------------------------------------------------------------- /src/lambda_codebase/account/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | -------------------------------------------------------------------------------- /src/lambda_codebase/account_processing/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | -------------------------------------------------------------------------------- /src/lambda_codebase/account_processing/configure_account_ou.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Moves an account to the specified OU. 6 | """ 7 | import boto3 8 | from aws_xray_sdk.core import patch_all 9 | 10 | # ADF imports 11 | from logger import configure_logger 12 | from organizations import Organizations 13 | 14 | 15 | patch_all() 16 | LOGGER = configure_logger(__name__) 17 | 18 | 19 | def lambda_handler(event, _): 20 | LOGGER.info( 21 | "Ensuring Account: %s is in OU %s", 22 | event.get('account_full_name'), 23 | event.get('organizational_unit_path'), 24 | ) 25 | organizations = Organizations(boto3) 26 | organizations.move_account( 27 | event.get("account_id"), 28 | event.get("organizational_unit_path"), 29 | ) 30 | return event 31 | -------------------------------------------------------------------------------- /src/lambda_codebase/account_processing/configure_account_tags.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Creates or adds tags to an account. 6 | Currently only appends new tags. 7 | Will not delete tags that aren't 8 | in the config file. 9 | """ 10 | import boto3 11 | from aws_xray_sdk.core import patch_all 12 | 13 | # ADF imports 14 | from logger import configure_logger 15 | 16 | patch_all() 17 | LOGGER = configure_logger(__name__) 18 | ORG_CLIENT = boto3.client("organizations") 19 | 20 | 21 | def create_account_tags(account_id, tags, client): 22 | LOGGER.info( 23 | "Ensuring Account: %s has tags: %s", 24 | account_id, 25 | tags, 26 | ) 27 | formatted_tags = [ 28 | {"Key": str(key), "Value": str(value)} 29 | for tag in tags 30 | for key, value in tag.items() 31 | ] 32 | LOGGER.debug( 33 | "Ensuring Account: %s has tags (formatted): %s", 34 | account_id, 35 | formatted_tags, 36 | ) 37 | client.tag_resource(ResourceId=account_id, Tags=formatted_tags) 38 | 39 | 40 | def lambda_handler(event, _): 41 | if event.get("tags"): 42 | create_account_tags( 43 | event.get("account_id"), 44 | event.get("tags"), 45 | ORG_CLIENT, 46 | ) 47 | else: 48 | LOGGER.info( 49 | "Account: %s does not need tags configured", 50 | event.get("account_full_name"), 51 | ) 52 | return event 53 | -------------------------------------------------------------------------------- /src/lambda_codebase/account_processing/create_account.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Creates an account within your organization. 6 | """ 7 | 8 | import os 9 | from aws_xray_sdk.core import patch_all 10 | import boto3 11 | 12 | # ADF imports 13 | from logger import configure_logger 14 | 15 | patch_all() 16 | 17 | LOGGER = configure_logger(__name__) 18 | ADF_PRIVILEGED_CROSS_ACCOUNT_ROLE_NAME = os.getenv("ADF_PRIVILEGED_CROSS_ACCOUNT_ROLE_NAME") 19 | 20 | 21 | def create_account(account, adf_privileged_role_name, org_client): 22 | LOGGER.info("Creating account %s", account.get('account_full_name')) 23 | allow_billing = "ALLOW" if account.get("allow_billing", False) else "DENY" 24 | response = org_client.create_account( 25 | Email=account.get("email"), 26 | AccountName=account.get("account_full_name"), 27 | RoleName=adf_privileged_role_name, # defaults to OrganizationAccountAccessRole 28 | IamUserAccessToBilling=allow_billing, 29 | )["CreateAccountStatus"] 30 | while response["State"] == "IN_PROGRESS": 31 | response = org_client.describe_create_account_status( 32 | CreateAccountRequestId=response["Id"] 33 | )["CreateAccountStatus"] 34 | if response.get("FailureReason"): 35 | raise IOError( 36 | f"Failed to create account {account.get('account_full_name')}: " 37 | f"{response['FailureReason']}" 38 | ) 39 | return { 40 | **account, 41 | "account_id": response["AccountId"], 42 | } 43 | 44 | 45 | def lambda_handler(event, _): 46 | org_client = boto3.client("organizations") 47 | return create_account(event, ADF_PRIVILEGED_CROSS_ACCOUNT_ROLE_NAME, org_client) 48 | -------------------------------------------------------------------------------- /src/lambda_codebase/account_processing/get_account_regions.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Gets all the default regions for an account. 6 | """ 7 | 8 | import os 9 | from aws_xray_sdk.core import patch_all 10 | 11 | # ADF imports 12 | from logger import configure_logger 13 | from sts import STS 14 | 15 | patch_all() 16 | 17 | LOGGER = configure_logger(__name__) 18 | ADF_PRIVILEGED_CROSS_ACCOUNT_ROLE_NAME = os.getenv( 19 | "ADF_PRIVILEGED_CROSS_ACCOUNT_ROLE_NAME", 20 | ) 21 | AWS_PARTITION = os.getenv("AWS_PARTITION") 22 | MANAGEMENT_ACCOUNT_ID = os.getenv('MANAGEMENT_ACCOUNT_ID') 23 | 24 | 25 | def get_default_regions_for_account(ec2_client): 26 | filtered_regions = ec2_client.describe_regions( 27 | AllRegions=False, 28 | Filters=[ 29 | { 30 | "Name": "opt-in-status", 31 | "Values": [ 32 | "opt-in-not-required", 33 | "opted-in", 34 | ], 35 | }, 36 | ], 37 | )["Regions"] 38 | default_regions = [region["RegionName"] for region in filtered_regions] 39 | return default_regions 40 | 41 | 42 | def lambda_handler(event, _): 43 | LOGGER.info("Fetching Default regions %s", event.get("account_full_name")) 44 | sts = STS() 45 | account_id = event.get("account_id") 46 | role = sts.assume_bootstrap_deployment_role( 47 | AWS_PARTITION, 48 | MANAGEMENT_ACCOUNT_ID, 49 | account_id, 50 | ADF_PRIVILEGED_CROSS_ACCOUNT_ROLE_NAME, 51 | "adf_account_get_regions", 52 | ) 53 | default_regions = get_default_regions_for_account(role.client("ec2")) 54 | 55 | LOGGER.debug("Default regions for %s: %s", account_id, default_regions) 56 | return { 57 | **event, 58 | "default_regions": default_regions, 59 | } 60 | -------------------------------------------------------------------------------- /src/lambda_codebase/account_processing/pytest.ini: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | [pytest] 5 | testpaths = tests 6 | addopts = --cov=./src/lambda_codebase/account_processing/ --cov-fail-under=50 --cov-report term 7 | 8 | [coverage:run] 9 | omit = tests/ 10 | -------------------------------------------------------------------------------- /src/lambda_codebase/account_processing/requirements.txt: -------------------------------------------------------------------------------- 1 | aws-xray-sdk==2.13.0 2 | boto3==1.34.80 3 | botocore==1.34.80 4 | pyyaml~=6.0.1 5 | tenacity==8.2.3 6 | -------------------------------------------------------------------------------- /src/lambda_codebase/account_processing/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | 6 | """ 7 | __init__ for tests module 8 | """ 9 | 10 | import sys 11 | import os 12 | 13 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) 14 | -------------------------------------------------------------------------------- /src/lambda_codebase/account_processing/tests/test_account_tags.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Tests the account tag configuration lambda 6 | """ 7 | 8 | import unittest 9 | import boto3 10 | from botocore.stub import Stubber 11 | from aws_xray_sdk import global_sdk_config 12 | from ..configure_account_tags import ( 13 | create_account_tags, 14 | ) 15 | 16 | global_sdk_config.set_sdk_enabled(False) 17 | 18 | # pylint: disable=W0106 19 | class SuccessTestCase(unittest.TestCase): 20 | def test_account_tag_creation(self): 21 | test_event = {"account_id": "123456789012", "tags": [{"CreatedBy": "ADF"}]} 22 | ou_client = boto3.client("organizations") 23 | stubber = Stubber(ou_client) 24 | stubber.add_response( 25 | "tag_resource", 26 | {}, 27 | { 28 | "Tags": [{"Key": "CreatedBy", "Value": "ADF"}], 29 | "ResourceId": "123456789012", 30 | }, 31 | ), 32 | stubber.activate() 33 | create_account_tags( 34 | test_event.get("account_id"), test_event.get("tags"), ou_client 35 | ) 36 | stubber.assert_no_pending_responses() 37 | 38 | def test_account_tag_creation_multiple_tags(self): 39 | test_event = { 40 | "account_id": "123456789012", 41 | "tags": [ 42 | { 43 | "CreatedBy": "ADF", 44 | "TagName": "TagValue", 45 | } 46 | ], 47 | } 48 | ou_client = boto3.client("organizations") 49 | stubber = Stubber(ou_client) 50 | stubber.add_response( 51 | "tag_resource", 52 | {}, 53 | { 54 | "Tags": [ 55 | {"Key": "CreatedBy", "Value": "ADF"}, 56 | {"Key": "TagName", "Value": "TagValue"}, 57 | ], 58 | "ResourceId": "123456789012", 59 | }, 60 | ), 61 | stubber.activate() 62 | create_account_tags( 63 | test_event.get("account_id"), test_event.get("tags"), ou_client 64 | ) 65 | stubber.assert_no_pending_responses() 66 | -------------------------------------------------------------------------------- /src/lambda_codebase/account_processing/tests/test_delete_default_vpc.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Tests the delete_default_vpc lambda 6 | """ 7 | 8 | import unittest 9 | from unittest.mock import MagicMock, patch 10 | from delete_default_vpc import find_default_vpc 11 | from botocore.exceptions import ClientError 12 | 13 | 14 | class TestFindDefaultVPC(unittest.TestCase): 15 | 16 | @patch("tenacity.nap.time.sleep", MagicMock()) 17 | @patch('delete_default_vpc.patch_all') 18 | # pylint: disable=unused-argument 19 | def test_find_default_vpc(self, mock_patch_all): 20 | # Create a mock ec2_client 21 | mock_ec2_client = MagicMock() 22 | 23 | # Define the side effects for describe_vpcs method 24 | side_effects = [ 25 | ClientError({'Error': {'Code': 'MockTestError'}}, 'describe_vpcs'), 26 | ClientError({'Error': {'Code': 'MockTestError'}}, 'describe_vpcs'), 27 | {"Vpcs": [ 28 | {"VpcId": "vpc-123", "IsDefault": False}, 29 | {"VpcId": "vpc-456", "IsDefault": True}, 30 | {"VpcId": "vpc-789", "IsDefault": False} 31 | ]} 32 | ] 33 | 34 | # Set side_effect for the mock ec2_client.describe_vpcs 35 | mock_ec2_client.describe_vpcs.side_effect = side_effects 36 | 37 | # Call the function with the mock ec2_client 38 | default_vpc_id = find_default_vpc(mock_ec2_client) 39 | 40 | # Check if the correct default VPC ID is returned 41 | self.assertEqual(default_vpc_id, "vpc-456") 42 | 43 | # Check if describe_vpcs method is called 3 times 44 | self.assertEqual(mock_ec2_client.describe_vpcs.call_count, 3) 45 | 46 | 47 | if __name__ == '__main__': 48 | unittest.main() 49 | -------------------------------------------------------------------------------- /src/lambda_codebase/cleanup_legacy_stacks/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | The Cleanup Legacy Stacks Handler that is called when ADF is installed or 6 | updated remove previous ADF stacks in the management account if these 7 | were to exist. 8 | """ 9 | 10 | try: 11 | from cleanup_legacy_stacks import lambda_handler # pylint: disable=unused-import 12 | except Exception as err: # pylint: disable=broad-except 13 | import os 14 | import logging 15 | from urllib.request import Request, urlopen 16 | import json 17 | 18 | LOGGER = logging.getLogger(__name__) 19 | LOGGER.setLevel(os.environ.get("ADF_LOG_LEVEL", logging.INFO)) 20 | 21 | def lambda_handler(event, _context, prior_error=err): 22 | payload = { 23 | "LogicalResourceId": event["LogicalResourceId"], 24 | "PhysicalResourceId": event.get( 25 | "PhysicalResourceId", 26 | "NOT_YET_CREATED", 27 | ), 28 | "Status": "FAILED", 29 | "RequestId": event["RequestId"], 30 | "StackId": event["StackId"], 31 | "Reason": str(prior_error), 32 | } 33 | if not event["ResponseURL"].lower().startswith('http'): 34 | raise ValueError('ResponseURL is forbidden') from None 35 | with urlopen( 36 | Request( 37 | event["ResponseURL"], 38 | data=json.dumps(payload).encode(), 39 | headers={"content-type": ""}, 40 | method="PUT", 41 | ) 42 | ) as response: 43 | response_body = response.read().decode("utf-8") 44 | LOGGER.debug( 45 | "Response: %s", 46 | response_body, 47 | ) 48 | -------------------------------------------------------------------------------- /src/lambda_codebase/cleanup_legacy_stacks/requirements.txt: -------------------------------------------------------------------------------- 1 | boto3==1.34.80 2 | cfn-custom-resource~=1.0.1 3 | -------------------------------------------------------------------------------- /src/lambda_codebase/cross_region_bucket/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | The Cross Region S3 Bucket Handler that is called when ADF is installed to 6 | create the bucket in the management account in the deployment region 7 | """ 8 | 9 | try: 10 | from main import lambda_handler # pylint: disable=unused-import 11 | except Exception as err: # pylint: disable=broad-except 12 | import os 13 | import logging 14 | from urllib.request import Request, urlopen 15 | import json 16 | 17 | LOGGER = logging.getLogger(__name__) 18 | LOGGER.setLevel(os.environ.get("ADF_LOG_LEVEL", logging.INFO)) 19 | 20 | def lambda_handler(event, _context, prior_error=err): 21 | payload = { 22 | "LogicalResourceId": event["LogicalResourceId"], 23 | "PhysicalResourceId": event.get( 24 | "PhysicalResourceId", 25 | "NOT_YET_CREATED", 26 | ), 27 | "Status": "FAILED", 28 | "RequestId": event["RequestId"], 29 | "StackId": event["StackId"], 30 | "Reason": str(prior_error), 31 | } 32 | if not event["ResponseURL"].lower().startswith('http'): 33 | raise ValueError('ResponseURL is forbidden') from None 34 | with urlopen( 35 | Request( 36 | event["ResponseURL"], 37 | data=json.dumps(payload).encode(), 38 | headers={"content-type": ""}, 39 | method="PUT", 40 | ) 41 | ) as response: 42 | response_body = response.read().decode("utf-8") 43 | LOGGER.debug( 44 | "Response: %s", 45 | response_body, 46 | ) 47 | -------------------------------------------------------------------------------- /src/lambda_codebase/cross_region_bucket/requirements.txt: -------------------------------------------------------------------------------- 1 | cfn_custom_resource~=1.0.1 2 | -------------------------------------------------------------------------------- /src/lambda_codebase/determine_event.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | The Initial event object determination function 6 | """ 7 | 8 | import os 9 | 10 | import boto3 11 | 12 | # ADF imports 13 | from cache import Cache 14 | from event import Event 15 | from organizations import Organizations 16 | from parameter_store import ParameterStore 17 | 18 | REGION_DEFAULT = os.environ["AWS_REGION"] 19 | 20 | 21 | def lambda_handler(event, _): 22 | parameters = ParameterStore(region=REGION_DEFAULT, role=boto3) 23 | account_id = event.get('detail').get('requestParameters').get('accountId') 24 | cache = Cache() 25 | organizations = Organizations(role=boto3, account_id=account_id, cache=cache) 26 | parsed_event = Event( 27 | event=event, 28 | parameter_store=parameters, 29 | organizations=organizations, 30 | account_id=account_id 31 | ) 32 | 33 | account_path = ( 34 | "ROOT" if parsed_event.moved_to_root 35 | else parsed_event.organizations.build_account_path( 36 | parsed_event.destination_ou_id, 37 | [], # Initial empty array to hold OU Path 38 | ) 39 | ) 40 | 41 | if parsed_event.moved_to_root or parsed_event.moved_to_protected: 42 | return parsed_event.create_output_object( 43 | f"adf-bootstrap/{account_path}" 44 | ) 45 | 46 | parsed_event.set_destination_ou_name() 47 | 48 | return parsed_event.create_output_object(f"adf-bootstrap/{account_path}") 49 | -------------------------------------------------------------------------------- /src/lambda_codebase/generic_account_config.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Executes for any account that has been Bootstrapped other 6 | than the Deployment Account. 7 | 8 | This step is responsible for starting the execution of the 9 | State Machine on the Deployment Account to Update the IAM 10 | Role, KMS Policy and S3 Bucket Policy To include the newly 11 | created account. 12 | """ 13 | 14 | import os 15 | 16 | # ADF imports 17 | from logger import configure_logger 18 | from partition import get_partition 19 | from stepfunctions import StepFunctions 20 | from sts import STS 21 | 22 | LOGGER = configure_logger(__name__) 23 | REGION_DEFAULT = os.getenv('AWS_REGION') 24 | MANAGEMENT_ACCOUNT_ID = os.getenv('MANAGEMENT_ACCOUNT_ID') 25 | 26 | 27 | def lambda_handler(event, _): 28 | sts = STS() 29 | 30 | deployment_account_id = event.get('deployment_account_id') 31 | partition = get_partition(REGION_DEFAULT) 32 | cross_account_access_role = event.get('cross_account_access_role') 33 | 34 | role = sts.assume_bootstrap_deployment_role( 35 | partition, 36 | MANAGEMENT_ACCOUNT_ID, 37 | deployment_account_id, 38 | cross_account_access_role, 39 | 'step_function', 40 | ) 41 | 42 | step_functions = StepFunctions( 43 | role=role, 44 | deployment_account_id=deployment_account_id, 45 | deployment_account_region=event['deployment_account_region'], 46 | full_path=event['full_path'], 47 | regions=event['regions'], 48 | account_ids=[event['account_id']], 49 | update_pipelines_only=( 50 | 1 if ( 51 | event.get('moved_to_protected') 52 | or event.get('moved_to_root') 53 | ) 54 | else 0 55 | ), 56 | error=event.get('error', 0), 57 | ) 58 | step_functions.execute_statemachine() 59 | 60 | return event 61 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/adf.yml.j2: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | accounts: 5 | - account_full_name: {{ DeploymentAccountFullName }} 6 | organizational_unit_path: /deployment 7 | email: {{ DeploymentAccountEmailAddress }} 8 | allow_billing: False 9 | delete_default_vpc: False 10 | alias: {{ DeploymentAccountAlias }} 11 | tags: 12 | - created_by: adf 13 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/adfconfig.yml.j2: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | roles: 5 | cross-account-access: {{ CrossAccountAccessRole }} 6 | # ^ The role by ADF to assume cross account access 7 | 8 | regions: 9 | deployment-account: {{ DeploymentAccountRegion }} 10 | # ^ The region you define as your main deployment region 11 | targets: # A list of regions you wish to bootstrap and also deploy into via pipelines 12 | {%- for Region in TargetRegions %} 13 | - {{ Region }} 14 | {%- endfor %} 15 | 16 | config: 17 | main-notification-endpoint: 18 | - type: {{ NotificationEndpointType }} # slack or email 19 | target: {{ NotificationEndpoint }} 20 | # ^ Email/Slack channel who receives notifications for the main bootstrapping pipeline 21 | protected: 22 | {%- for OU in ProtectedOUs %} 23 | - {{ OU }} 24 | {%- endfor %} 25 | moves: 26 | - name: to-root 27 | action: safe # Can be safe or remove-base 28 | scp: 29 | keep-default-scp: enabled 30 | # ^ Determines if the default AWSFullAccess SCP stays attached to all OU's 31 | scm: 32 | auto-create-repositories: enabled 33 | default-scm-branch: main 34 | # Optional: 35 | # default-scm-codecommit-account-id: "123456789012" 36 | deployment-maps: 37 | allow-empty-target: disabled 38 | # ^ Needs to be set to "enabled" to activate. Defaults to "disabled" when 39 | # not set. 40 | # Optional config for multi-organization deployments of ADF: 41 | # org: 42 | # # Optional: Use this variable to define the AWS Organization in case of 43 | # # staged multi-organization ADF deployments 44 | # stage: dev 45 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/.gitignore: -------------------------------------------------------------------------------- 1 | .vscode 2 | .pyc 3 | .zip 4 | .DS_Store 5 | .pylintrc 6 | cleanup.py 7 | config 8 | config.sec 9 | config.bak 10 | policy.json 11 | pipeline.yml 12 | template-sam.yaml 13 | template-deploy.yml 14 | master-deploy.yml 15 | .pytest_cache 16 | 17 | # Byte-compiled / optimized / DLL files 18 | __pycache__/ 19 | *.py[cod] 20 | *$py.class 21 | 22 | # C extensions 23 | *.so 24 | 25 | # Distribution / packaging 26 | .Python 27 | develop-eggs/ 28 | dist/ 29 | downloads/ 30 | eggs/ 31 | .eggs/ 32 | lib/ 33 | lib64/ 34 | parts/ 35 | sdist/ 36 | var/ 37 | wheels/ 38 | *.egg-info/ 39 | .installed.cfg 40 | *.egg 41 | MANIFEST 42 | 43 | # PyInstaller 44 | # Usually these files are written by a python script from a template 45 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 46 | *.manifest 47 | *.spec 48 | 49 | # Installer logs 50 | pip-log.txt 51 | pip-delete-this-directory.txt 52 | 53 | # Unit test / coverage reports 54 | htmlcov/ 55 | .tox/ 56 | .nox/ 57 | .coverage 58 | .coverage.* 59 | .cache 60 | nosetests.xml 61 | coverage.xml 62 | *.cover 63 | .hypothesis/ 64 | .pytest_cache/ 65 | 66 | # Translations 67 | *.mo 68 | *.pot 69 | 70 | # Django stuff: 71 | *.log 72 | local_settings.py 73 | db.sqlite3 74 | 75 | # Flask stuff: 76 | instance/ 77 | .webassets-cache 78 | 79 | # Scrapy stuff: 80 | .scrapy 81 | 82 | # Sphinx documentation 83 | docs/_build/ 84 | 85 | # PyBuilder 86 | target/ 87 | 88 | # Jupyter Notebook 89 | .ipynb_checkpoints 90 | 91 | # IPython 92 | profile_default/ 93 | ipython_config.py 94 | 95 | # pyenv 96 | .python-version 97 | 98 | # celery beat schedule file 99 | celerybeat-schedule 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/README.md: -------------------------------------------------------------------------------- 1 | # Bootstrap Repository 2 | 3 | This repository is where you define your AWS Organizations structure in the 4 | form of folders. In the folders you can define AWS CloudFormation Templates and 5 | Service Control Policies *(SCPs)* that correlate to those specific 6 | Organizational Units *(OU)*. 7 | 8 | You can define `global.yml` or `regional.yml` templates that will be applied to 9 | either the *main* region *(as defined in `adfconfig.yml`)* in all accounts for a 10 | specific OU and if regional is specified, all regions within accounts in that 11 | OU. To create Service Control Policies, create a *scp.json* file in the 12 | Organizational Unit of your choice, for more information please see the admin 13 | guide for ADF. 14 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT No Attribution 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 4 | software and associated documentation files (the "Software"), to deal in the Software 5 | without restriction, including without limitation the rights to use, copy, modify, 6 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 7 | permit persons to whom the Software is furnished to do so. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 10 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 11 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 12 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 13 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 14 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/determine_default_branch/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | The Determine Default Branch Handler that is called when ADF is installed 6 | or updated to determine the default branch for the repository. 7 | """ 8 | 9 | try: 10 | from determine_default_branch import lambda_handler # pylint: disable=unused-import 11 | except Exception as err: # pylint: disable=broad-except 12 | import os 13 | import logging 14 | from urllib.request import Request, urlopen 15 | import json 16 | 17 | LOGGER = logging.getLogger(__name__) 18 | LOGGER.setLevel(os.environ.get("ADF_LOG_LEVEL", logging.INFO)) 19 | 20 | def lambda_handler(event, _context, prior_error=err): 21 | payload = { 22 | "LogicalResourceId": event["LogicalResourceId"], 23 | "PhysicalResourceId": event.get( 24 | "PhysicalResourceId", 25 | "NOT_YET_CREATED", 26 | ), 27 | "Status": "FAILED", 28 | "RequestId": event["RequestId"], 29 | "StackId": event["StackId"], 30 | "Reason": str(prior_error), 31 | } 32 | if not event["ResponseURL"].lower().startswith('http'): 33 | raise ValueError('ResponseURL is forbidden') from None 34 | with urlopen( 35 | Request( 36 | event["ResponseURL"], 37 | data=json.dumps(payload).encode(), 38 | headers={"content-type": ""}, 39 | method="PUT", 40 | ) 41 | ) as response: 42 | response_body = response.read().decode("utf-8") 43 | LOGGER.debug( 44 | "Response: %s", 45 | response_body, 46 | ) 47 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/determine_default_branch/pytest.ini: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | [pytest] 5 | testpaths = tests 6 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/determine_default_branch/requirements.txt: -------------------------------------------------------------------------------- 1 | boto3==1.34.80 2 | cfn-custom-resource~=1.0.1 3 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/determine_default_branch/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | __init__ for tests module 6 | """ 7 | 8 | import sys 9 | import os 10 | 11 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) 12 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/initial_commit/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | The Initial Commit Handler that is called when ADF is installed to commit the 6 | initial pipelines repository content. 7 | """ 8 | 9 | try: 10 | from initial_commit import lambda_handler # pylint: disable=unused-import 11 | except Exception as err: # pylint: disable=broad-except 12 | import os 13 | import logging 14 | from urllib.request import Request, urlopen 15 | import json 16 | 17 | LOGGER = logging.getLogger(__name__) 18 | LOGGER.setLevel(os.environ.get("ADF_LOG_LEVEL", logging.INFO)) 19 | 20 | def lambda_handler(event, _context, prior_error=err): 21 | payload = { 22 | "LogicalResourceId": event["LogicalResourceId"], 23 | "PhysicalResourceId": event.get( 24 | "PhysicalResourceId", 25 | "NOT_YET_CREATED", 26 | ), 27 | "Status": "FAILED", 28 | "RequestId": event["RequestId"], 29 | "StackId": event["StackId"], 30 | "Reason": str(prior_error), 31 | } 32 | if not event["ResponseURL"].lower().startswith('http'): 33 | raise ValueError('ResponseURL is forbidden') from None 34 | with urlopen( 35 | Request( 36 | event["ResponseURL"], 37 | data=json.dumps(payload).encode(), 38 | headers={"content-type": ""}, 39 | method="PUT", 40 | ) 41 | ) as response: 42 | response_body = response.read().decode("utf-8") 43 | LOGGER.debug( 44 | "Response: %s", 45 | response_body, 46 | ) 47 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/initial_commit/pipelines_repository/README.md: -------------------------------------------------------------------------------- 1 | # Pipelines Repository 2 | 3 | This repository is where you create pipeline definitions with Deployment Map 4 | files. For examples on this process please see the `example-deployment_map.yml` 5 | or read the ADF user guide. 6 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/initial_commit/pytest.ini: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | [pytest] 5 | testpaths = tests 6 | norecursedirs = pipelines_repository 7 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/initial_commit/requirements.txt: -------------------------------------------------------------------------------- 1 | Jinja2==3.1.4 2 | boto3==1.34.80 3 | cfn-custom-resource~=1.0.1 4 | markupsafe==2.1.5 5 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/initial_commit/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | __init__ for tests module 6 | """ 7 | 8 | import sys 9 | import os 10 | 11 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) 12 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/requirements.txt: -------------------------------------------------------------------------------- 1 | pyyaml~=6.0.1 2 | schema==0.7.5 3 | tenacity==8.2.3 4 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/store_pipeline_definition.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Pipeline Management Lambda Function 6 | Stores pipeline input from prior function to S3. 7 | """ 8 | 9 | import os 10 | import json 11 | 12 | import boto3 13 | 14 | from logger import configure_logger 15 | 16 | 17 | LOGGER = configure_logger(__name__) 18 | S3_BUCKET_NAME = os.environ["S3_BUCKET_NAME"] 19 | 20 | 21 | def upload_event_to_s3(s3_resource, definition): 22 | """ 23 | Upload the event received to the Pipeline Definition Bucket. 24 | 25 | Args: 26 | s3_resource (boto3.S3.resource): The S3 resource. 27 | definition (any): The pipeline definition, input and other data 28 | related to the pipeline to store. 29 | 30 | Returns: 31 | str: The location where the definition is stored in the S3 bucket. 32 | """ 33 | pipeline_name = definition.get("pipeline_input", {}).get("name") 34 | s3_object = s3_resource.Object( 35 | S3_BUCKET_NAME, 36 | f"pipelines/{pipeline_name}/definition.json", 37 | ) 38 | s3_object.put(Body=json.dumps(definition).encode("UTF-8")) 39 | return f"{S3_BUCKET_NAME}/pipelines/{pipeline_name}/" 40 | 41 | 42 | def lambda_handler(event, _): 43 | """ 44 | Writes the pipeline definition to S3. 45 | 46 | Args: 47 | event (dict): The input event, that is also returned as the output. 48 | 49 | Returns: 50 | dict: The input event + definition_location. 51 | """ 52 | output = event.copy() 53 | s3_resource = boto3.resource("s3") 54 | 55 | location = upload_event_to_s3(s3_resource, event) 56 | 57 | output["definition_location"] = location 58 | return output 59 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pytest.ini: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | [pytest] 5 | testpaths = tests 6 | norecursedirs = initial_commit determine_default_branch 7 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/aws-deployment-framework/325ae13fb83aad600d81494b8694362aae8f7e13/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/requirements.txt -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/tests/stubs/stub_iam.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | 6 | """ 7 | Stubs for testing iam.py 8 | """ 9 | 10 | get_role_policy = { 11 | 'RoleName': 'string', 12 | 'PolicyName': 'string', 13 | 'PolicyDocument': { 14 | "Version": "2012-10-17", 15 | "Statement": [ 16 | { 17 | "Sid": "KMS", 18 | "Effect": "Allow", 19 | "Action": ["iam:ChangePassword"], 20 | "Resource": ( 21 | "arn:aws:kms:eu-west-1:111111111111:key/existing_key" 22 | ), 23 | }, 24 | { 25 | "Sid": "S3", 26 | "Effect": "Allow", 27 | "Action": "s3:ListAllMyBuckets", 28 | "Resource": [ 29 | "arn:aws:s3:::existing_bucket", 30 | "arn:aws:s3:::existing_bucket/*", 31 | ], 32 | }, 33 | { 34 | "Sid": "AssumeRole", 35 | "Effect": "Allow", 36 | "Action": "sts:AssumeRole", 37 | "Resource": ['something'], 38 | }, 39 | ] 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/update_pipelines.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | update_pipelines.py is responsible for starting the 6 | aws-deployment-framework-pipelines pipeline if 7 | it is not already executing or failed 8 | """ 9 | 10 | import os 11 | 12 | import boto3 13 | 14 | # ADF imports 15 | from logger import configure_logger 16 | from codepipeline import CodePipeline 17 | 18 | LOGGER = configure_logger(__name__) 19 | 20 | 21 | def generate_notify_message(event): 22 | """ 23 | The message we want to pass into the next step (Notify) of the 24 | state machine if the current account in execution has been 25 | bootstrapped. 26 | """ 27 | update_status = event.get('update_only', 1) 28 | if len(event.get('account_ids')) > 1: 29 | update_status = 1 30 | return { 31 | "update_only": update_status, 32 | "message": ( 33 | f"Account {event['account_ids'][0]} has now been bootstrapped " 34 | f"into {event['full_path']}" 35 | ) 36 | } 37 | 38 | 39 | def lambda_handler(event, _): 40 | """ 41 | Responsible for triggering the aws-deployment-framework-pipelines 42 | pipeline if its not already running 43 | """ 44 | codepipeline = CodePipeline( 45 | boto3, 46 | os.environ['AWS_REGION'] 47 | ) 48 | 49 | pipeline_status = codepipeline.get_pipeline_status( 50 | 'aws-deployment-framework-pipelines' 51 | ) 52 | 53 | if pipeline_status == 'InProgress': 54 | LOGGER.info( 55 | 'aws-deployment-framework-pipelines is in %s. Exiting.', 56 | pipeline_status 57 | ) 58 | return generate_notify_message(event) 59 | 60 | codepipeline.start_pipeline_execution( 61 | 'aws-deployment-framework-pipelines' 62 | ) 63 | 64 | return generate_notify_message(event) 65 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/example-scp.json: -------------------------------------------------------------------------------- 1 | { 2 | "Version": "2012-10-17", 3 | "Statement": [ 4 | { 5 | "Effect": "Deny", 6 | "Action": "cloudtrail:Stop*", 7 | "Resource": "*" 8 | }, 9 | { 10 | "Effect": "Allow", 11 | "Action": "*", 12 | "Resource": "*" 13 | }, 14 | { 15 | "Effect": "Deny", 16 | "Action": [ 17 | "config:DeleteConfigRule", 18 | "config:DeleteConfigurationRecorder", 19 | "config:DeleteDeliveryChannel", 20 | "config:Stop*" 21 | ], 22 | "Resource": "*" 23 | } 24 | ] 25 | } 26 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/example-tagging-policy.json: -------------------------------------------------------------------------------- 1 | { 2 | "tags": { 3 | "my_key": { 4 | "tag_key": { 5 | "@@assign": "my_key" 6 | }, 7 | "tag_value": { 8 | "@@assign": [ 9 | "value1", 10 | "value2" 11 | ] 12 | }, 13 | "enforced_for": { 14 | "@@assign": ["s3:bucket"] 15 | } 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT No Attribution 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 4 | software and associated documentation files (the "Software"), to deal in the Software 5 | without restriction, including without limitation the rights to use, copy, modify, 6 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 7 | permit persons to whom the Software is furnished to do so. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 10 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 11 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 12 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 13 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 14 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | __init__ for adf-build 6 | """ 7 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/provisioner/src/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """__init__ 5 | """ 6 | 7 | from .configparser import read_config_files 8 | from .vpc import delete_default_vpc 9 | from .account import Account 10 | from .support import Support, SupportLevel 11 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/provisioner/src/account.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Model for handling AWS accounts within the organization. The Account class 6 | allows you to create or update a new account. 7 | """ 8 | 9 | 10 | class Account: 11 | def __init__( 12 | self, 13 | full_name, 14 | email, 15 | ou_path, 16 | alias=None, 17 | delete_default_vpc=False, 18 | allow_direct_move_between_ou=False, 19 | allow_billing=True, 20 | support_level='basic', 21 | tags=None 22 | ): 23 | self.full_name = full_name 24 | self.email = email 25 | self.ou_path = ou_path 26 | self.delete_default_vpc = delete_default_vpc 27 | self.allow_direct_move_between_ou = allow_direct_move_between_ou 28 | self.allow_billing = allow_billing 29 | self.alias = alias 30 | self.support_level = support_level 31 | self.tags = tags or {} 32 | 33 | @classmethod 34 | def load_from_config(cls, config): 35 | """Initialize Account class from configuration object""" 36 | return cls( 37 | config["account_full_name"], 38 | config["email"], 39 | config["organizational_unit_path"], 40 | alias=config.get("alias", None), 41 | delete_default_vpc=config.get("delete_default_vpc", False), 42 | allow_direct_move_between_ou=( 43 | config.get("allow_direct_move_between_ou", False) 44 | ), 45 | allow_billing=config.get("allow_billing", True), 46 | support_level=config.get("support_level", 'basic'), 47 | tags=config.get("tags", {}), 48 | ) 49 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/provisioner/src/configparser.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Module to parse and validate the yaml configuration files. 6 | """ 7 | 8 | 9 | import os 10 | import yaml 11 | from logger import configure_logger 12 | from .account import Account 13 | 14 | 15 | LOGGER = configure_logger(__name__) 16 | 17 | 18 | def read_config_files(folder): 19 | files = [os.path.join(folder, f) for f in os.listdir(folder)] 20 | accounts = [] 21 | for filename in files: 22 | if not filename.endswith(".yml"): 23 | # Skipping files that do not end with .yml 24 | continue 25 | accounts.extend(_read_config_file(filename)) 26 | 27 | return accounts 28 | 29 | 30 | def _read_config_file(filename): 31 | accounts = [] 32 | try: 33 | with open(filename, mode='r', encoding='utf-8') as stream: 34 | config = yaml.safe_load(stream) 35 | for account in config.get('accounts', []): 36 | accounts.append(Account.load_from_config(account)) 37 | return accounts 38 | except Exception as error: 39 | LOGGER.error( 40 | "Could not process %s due to an error: %s", 41 | filename, 42 | error, 43 | ) 44 | LOGGER.error( 45 | "Make sure the content of YAML files (.yml) are not empty and " 46 | "contain a valid YAML data structure.", 47 | ) 48 | raise 49 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/requirements-dev.txt: -------------------------------------------------------------------------------- 1 | mock~=5.1.0 2 | pylint~=3.1.0 3 | pytest~=8.1.1 4 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/requirements.txt: -------------------------------------------------------------------------------- 1 | # Install libs here that you might want in AWS CodeBuild (On the management 2 | # account) 3 | astroid==3.1.0 4 | aws-sam-cli==1.114.0 5 | boto3==1.34.80 6 | botocore==1.34.80 7 | pip~=24.0 8 | pyyaml~=6.0.1 9 | six~=1.16.0 10 | tenacity==8.2.3 11 | urllib3~=2.2.1 12 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT No Attribution 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 4 | software and associated documentation files (the "Software"), to deal in the Software 5 | without restriction, including without limitation the rights to use, copy, modify, 6 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 7 | permit persons to whom the Software is furnished to do so. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 10 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 11 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 12 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 13 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 14 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/base_resolver.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | BaseResolver abstract class used for resolver implementations 6 | to inherit from so they use the same interface 7 | """ 8 | from abc import ABC, abstractmethod 9 | from cache import Cache 10 | 11 | 12 | class BaseResolver(ABC): 13 | """ 14 | The abstract BaseResolver class ensures that the interface 15 | of the methods for resolvers are defined and common code is stored here. 16 | """ 17 | 18 | def __init__(self): 19 | self.cache = Cache() 20 | 21 | @abstractmethod 22 | def resolve(self, lookup_str: str, random_filename: str) -> str: 23 | """ 24 | Assumes that the lookup_str is supported. 25 | 26 | This function will perform the intrinsic function to 27 | resolve the value as requested. 28 | 29 | Args: 30 | lookup_str (str): The lookup string that contains the lookup 31 | syntax. 32 | random_filename (str): The random filename, used to ensure 33 | unique uploads when required. 34 | 35 | Returns: 36 | str: The value as looked up using the intrinsic function. 37 | """ 38 | 39 | @abstractmethod 40 | def supports(self, lookup_str: str) -> bool: 41 | """ 42 | Check if this resolver supports the lookup_str syntax. 43 | 44 | Args: 45 | lookup_str (str): The lookup string that might have the lookup 46 | syntax or not. 47 | 48 | Returns: 49 | bool: True if this resolver supports the lookup_str syntax. 50 | False if not. 51 | """ 52 | 53 | @staticmethod 54 | def _is_optional(value: str) -> bool: 55 | return value.endswith('?') 56 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codecommit.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """Construct related to CodeCommit Input 5 | """ 6 | 7 | import os 8 | from aws_cdk import ( 9 | aws_codepipeline as _codepipeline, 10 | ) 11 | from constructs import Construct 12 | 13 | from cdk_constructs.adf_codepipeline import Action 14 | 15 | ADF_DEPLOYMENT_REGION = os.environ["AWS_REGION"] 16 | ADF_DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] 17 | ADF_DEFAULT_BUILD_TIMEOUT = 20 18 | 19 | 20 | class CodeCommit(Construct): 21 | def __init__(self, scope: Construct, id: str, map_params: dict, **kwargs): #pylint: disable=W0622 22 | super().__init__(scope, id, **kwargs) 23 | default_providers = map_params.get("default_providers", {}) 24 | source_props = default_providers.get("source", {}).get("properties", {}) 25 | account_id = source_props.get("account_id", ADF_DEPLOYMENT_ACCOUNT_ID) 26 | 27 | self.source = _codepipeline.CfnPipeline.StageDeclarationProperty( 28 | name=f"Source-{account_id}", 29 | actions=[ 30 | Action( 31 | name="source", 32 | provider="CodeCommit", 33 | category="Source", 34 | run_order=1, 35 | map_params=map_params, 36 | action_name="source" 37 | ).config 38 | ] 39 | ) 40 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codeconnections.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """Construct related to CodeConnections CodePipeline Input 5 | """ 6 | 7 | import os 8 | 9 | from aws_cdk import ( 10 | aws_codepipeline as _codepipeline, 11 | ) 12 | from constructs import Construct 13 | 14 | from cdk_constructs.adf_codepipeline import Action 15 | 16 | ADF_DEPLOYMENT_REGION = os.environ["AWS_REGION"] 17 | ADF_DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] 18 | ADF_DEFAULT_BUILD_TIMEOUT = 20 19 | 20 | 21 | class CodeConnections(Construct): 22 | def __init__(self, scope: Construct, id: str, map_params: dict, **kwargs): #pylint: disable=W0622 23 | super().__init__(scope, id, **kwargs) 24 | self.source = _codepipeline.CfnPipeline.StageDeclarationProperty( 25 | name="Source-CodeConnections", 26 | actions=[ 27 | Action( 28 | name="source", 29 | provider="CodeStarSourceConnection", 30 | owner="AWS", 31 | category="Source", 32 | run_order=1, 33 | map_params=map_params, 34 | action_name="source" 35 | ).config 36 | ] 37 | ) 38 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_jenkins.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """Construct related to Jenkins Codepipeline Input 5 | """ 6 | 7 | import os 8 | from aws_cdk import ( 9 | aws_codepipeline as _codepipeline, 10 | ) 11 | from constructs import Construct 12 | 13 | from cdk_constructs.adf_codepipeline import Action 14 | 15 | ADF_DEPLOYMENT_REGION = os.environ["AWS_REGION"] 16 | ADF_DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] 17 | 18 | class Jenkins(Construct): 19 | def __init__(self, scope: Construct, id: str, map_params: dict, **kwargs): #pylint: disable=W0622 20 | super().__init__(scope, id, **kwargs) 21 | self.build = _codepipeline.CfnPipeline.StageDeclarationProperty( 22 | name="Build", 23 | actions=[ 24 | Action( 25 | name="Build", 26 | provider="Jenkins", 27 | category="Build", 28 | run_order=1, 29 | map_params=map_params, 30 | action_name="build" 31 | ).config 32 | ] 33 | ) 34 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_s3.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """Construct related to S3 Codepipeline Input 5 | """ 6 | 7 | import os 8 | 9 | from aws_cdk import ( 10 | aws_codepipeline as _codepipeline, 11 | ) 12 | from constructs import Construct 13 | 14 | from cdk_constructs.adf_codepipeline import Action 15 | 16 | ADF_DEPLOYMENT_REGION = os.environ["AWS_REGION"] 17 | ADF_DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] 18 | ADF_DEFAULT_BUILD_TIMEOUT = 20 19 | 20 | 21 | class S3(Construct): 22 | def __init__(self, scope: Construct, id: str, map_params: dict, **kwargs): #pylint: disable=W0622 23 | super().__init__(scope, id, **kwargs) 24 | self.source = _codepipeline.CfnPipeline.StageDeclarationProperty( 25 | name="Source-S3", 26 | actions=[ 27 | Action( 28 | name="source", 29 | provider="S3", 30 | owner="AWS", 31 | category="Source", 32 | run_order=1, 33 | map_params=map_params, 34 | action_name="source" 35 | ).config 36 | ] 37 | ) 38 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Tests for cdk_constructs 6 | """ 7 | 8 | import sys 9 | import os 10 | 11 | sys.path.append( 12 | os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))) 13 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) 14 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__)))) 15 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/tests/adf_codepipeline_test_constants.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | 6 | BASE_MAP_PARAMS = { 7 | 'default_providers': { 8 | 'source': { 9 | 'properties': { 10 | 'account_id': 111111111111, 11 | } 12 | }, 13 | 'build': {}, 14 | 'deploy': {}, 15 | }, 16 | 'name': 'name', 17 | } 18 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/tests/test_adf_codepipeline_generate.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | 6 | from mock import patch 7 | from cdk_constructs.adf_codepipeline import Action 8 | from adf_codepipeline_test_constants import BASE_MAP_PARAMS 9 | 10 | @patch('cdk_constructs.adf_codepipeline._codepipeline.CfnPipeline.ActionDeclarationProperty') 11 | @patch('cdk_constructs.adf_codepipeline.Action._get_output_artifacts') 12 | @patch('cdk_constructs.adf_codepipeline.Action._get_input_artifacts') 13 | def test_generates_with_input_and_output_artifacts_when_given(input_mock, output_mock, action_decl_mock): 14 | action_decl_mock.side_effect = lambda **x: x 15 | mocked_input_value = 'InputArtifacts' 16 | mocked_output_value = 'OutputArtifacts' 17 | input_mock.return_value = mocked_input_value 18 | output_mock.return_value = mocked_output_value 19 | action = Action( 20 | map_params=BASE_MAP_PARAMS, 21 | category='Build', 22 | provider='CodeBuild', 23 | ) 24 | assert action.config['input_artifacts'] == mocked_input_value 25 | assert action.config['output_artifacts'] == mocked_output_value 26 | 27 | 28 | @patch('cdk_constructs.adf_codepipeline._codepipeline.CfnPipeline.ActionDeclarationProperty') 29 | @patch('cdk_constructs.adf_codepipeline.Action._get_output_artifacts') 30 | @patch('cdk_constructs.adf_codepipeline.Action._get_input_artifacts') 31 | def test_generates_without_input_and_output_artifacts(input_mock, output_mock, action_decl_mock): 32 | action_decl_mock.side_effect = lambda **x: x 33 | mocked_value = None 34 | input_mock.return_value = mocked_value 35 | output_mock.return_value = mocked_value 36 | action = Action( 37 | map_params=BASE_MAP_PARAMS, 38 | category='Build', 39 | provider='CodeBuild', 40 | ) 41 | assert not 'input_artifacts' in action.config 42 | assert not 'output_artifacts' in action.config 43 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: MIT-0 3 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/main.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | This is the main construct file file for PipelineStack 6 | """ 7 | 8 | from aws_cdk import Stack 9 | from constructs import Construct 10 | from logger import configure_logger 11 | 12 | from cdk_stacks.adf_default_pipeline import ( 13 | generate_adf_default_pipeline as generate_default_pipeline, 14 | PIPELINE_TYPE as DEFAULT_PIPELINE, 15 | ) 16 | 17 | LOGGER = configure_logger(__name__) 18 | 19 | 20 | class PipelineStack(Stack): 21 | def __init__( 22 | self, 23 | scope: Construct, 24 | stack_input: dict, 25 | **kwargs, 26 | ) -> None: # pylint: disable=R0912, R0915 27 | """ 28 | Initialize the pipeline stack 29 | """ 30 | super().__init__( 31 | scope, 32 | stack_input["pipeline_input"]['name'], 33 | **kwargs, 34 | ) 35 | LOGGER.info( 36 | 'Pipeline creation/update of %s commenced', 37 | stack_input['pipeline_input']['name'], 38 | ) 39 | pipeline_type = ( 40 | stack_input['pipeline_input'] 41 | .get('params', {}) 42 | .get('pipeline_type', DEFAULT_PIPELINE) 43 | .lower() 44 | ) 45 | 46 | self.generate_pipeline(pipeline_type, stack_input) 47 | 48 | def generate_pipeline(self, pipeline_type, stack_input): 49 | if pipeline_type == DEFAULT_PIPELINE: 50 | generate_default_pipeline(self, stack_input) 51 | else: 52 | raise ValueError(f'{pipeline_type} is not defined in main.py') 53 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: MIT-0 3 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/generate_pipeline_stacks.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Copyright Amazon.com Inc. or its affiliates. 4 | # SPDX-License-Identifier: MIT-0 5 | 6 | """ 7 | This file is pulled into CodeBuild containers 8 | and used to build the pipeline CloudFormation stacks via the AWS CDK 9 | """ 10 | 11 | import glob 12 | import os 13 | import json 14 | 15 | # CDK Specific 16 | from aws_cdk import App, BootstraplessSynthesizer 17 | from cdk_stacks.main import PipelineStack 18 | 19 | from logger import configure_logger 20 | 21 | LOGGER = configure_logger(__name__) 22 | ADF_VERSION = os.environ["ADF_VERSION"] 23 | ADF_LOG_LEVEL = os.environ["ADF_LOG_LEVEL"] 24 | 25 | 26 | def main(): 27 | LOGGER.info("ADF Version %s", ADF_VERSION) 28 | LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL) 29 | _templates = glob.glob("cdk_inputs/*.json") 30 | for template_path in _templates: 31 | with open(template_path, encoding="utf-8") as template: 32 | stack_input = json.load(template) 33 | app = App() 34 | PipelineStack( 35 | app, 36 | stack_input, 37 | synthesizer=BootstraplessSynthesizer(), 38 | ) 39 | app.synth() 40 | 41 | 42 | if __name__ == "__main__": 43 | main() 44 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/pytest.ini: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | [pytest] 5 | testpaths = 6 | cdk_constructs/tests 7 | cdk_stacks/tests 8 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/pytest.ini: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | [pytest] 5 | testpaths = tests 6 | norecursedirs = terraform 7 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/requirements-dev.txt: -------------------------------------------------------------------------------- 1 | pytest-env~=1.1.3 2 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/requirements.txt: -------------------------------------------------------------------------------- 1 | boto3==1.34.80 2 | botocore==1.34.80 3 | docopt~=0.6.2 4 | schema==0.7.5 5 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/sts.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Copyright Amazon.com Inc. or its affiliates. 4 | # SPDX-License-Identifier: MIT-0 5 | 6 | if [ -z "$AWS_PARTITION" ]; then 7 | AWS_PARTITION="aws" 8 | fi 9 | 10 | # Example usage sts 123456789012 adf-pipeline-terraform-deployment 11 | export ROLE=arn:$AWS_PARTITION:iam::$1:role/$2 12 | temp_role=$(aws sts assume-role --role-arn $ROLE --role-session-name $2-$ADF_PROJECT_NAME) 13 | export AWS_ACCESS_KEY_ID=$(echo $temp_role | jq -r .Credentials.AccessKeyId) 14 | export AWS_SECRET_ACCESS_KEY=$(echo $temp_role | jq -r .Credentials.SecretAccessKey) 15 | export AWS_SESSION_TOKEN=$(echo $temp_role | jq -r .Credentials.SessionToken) 16 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/terraform/install_terraform.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Copyright Amazon.com Inc. or its affiliates. 4 | # SPDX-License-Identifier: MIT-0 5 | 6 | set -e 7 | 8 | apt-get install --assume-yes jq 9 | TERRAFORM_URL="https://releases.hashicorp.com/terraform/$TERRAFORM_VERSION/terraform_${TERRAFORM_VERSION}_linux_amd64.zip" 10 | echo "Downloading $TERRAFORM_URL." 11 | curl -o terraform.zip $TERRAFORM_URL 12 | unzip terraform.zip 13 | export PATH=$PATH:$(pwd) 14 | terraform --version 15 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/helpers/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | __init__ for tests module 6 | """ 7 | 8 | import sys 9 | import os 10 | 11 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) 12 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/pytest.ini: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | [pytest] 5 | testpaths = tests 6 | norecursedirs = python cdk helpers 7 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT No Attribution 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 4 | software and associated documentation files (the "Software"), to deal in the Software 5 | without restriction, including without limitation the rights to use, copy, modify, 6 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 7 | permit persons to whom the Software is furnished to do so. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 10 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 11 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 12 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 13 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 14 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cache.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Used as a cache for AWS Organizations calls within threads. 6 | A single instance of this class is passed into all threads to act 7 | as a cache 8 | """ 9 | 10 | 11 | class Cache: 12 | def __init__(self): 13 | self._stash = {} 14 | 15 | def exists(self, key): 16 | return key in self._stash 17 | 18 | def get(self, key): 19 | try: 20 | return self._stash[key] 21 | except KeyError: 22 | return None 23 | 24 | def add(self, key, value): 25 | self._stash[key] = value 26 | 27 | def remove(self, key): 28 | if key in self._stash: 29 | del self._stash[key] 30 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/cloudwatch.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Standardized class for pushing CloudWatch metric data to a service within the ADF Namespace 6 | """ 7 | 8 | import boto3 9 | 10 | 11 | class ADFMetrics: 12 | def __init__(self, client: boto3.client, service, namespace="ADF") -> None: 13 | """ 14 | Client: Any Boto3 CloudWatch client 15 | Service: The name of the Service e.g PipelineManagement/Repository 16 | or AccountManagement/EnableSupport namespace. 17 | Defaults to ADF. 18 | """ 19 | self.cw_client = client 20 | self.namespace = f"{namespace}/{service}" 21 | 22 | def put_metric_data(self, metric_data): 23 | if not isinstance(metric_data, list): 24 | metric_data = [metric_data] 25 | self.cw_client.put_metric_data(Namespace=self.namespace, MetricData=metric_data) 26 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/codepipeline.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """CodePipeline module used throughout the ADF 5 | """ 6 | 7 | from logger import configure_logger 8 | 9 | LOGGER = configure_logger(__name__) 10 | 11 | 12 | class CodePipeline(): 13 | """Class used for modeling CodePipeline 14 | """ 15 | 16 | def __init__(self, role, region): 17 | self.client = role.client('codepipeline', region_name=region) 18 | 19 | def get_pipeline_status(self, pipeline_name): 20 | """Gets a Pipeline Execution Status 21 | """ 22 | try: 23 | response = self.client.get_pipeline_state( 24 | name=pipeline_name 25 | ) 26 | 27 | return list( 28 | response.get('stageStates') 29 | )[0]['latestExecution']['status'] 30 | except KeyError: 31 | LOGGER.error('Pipeline status for %s could not be determined', pipeline_name) 32 | return None 33 | 34 | def start_pipeline_execution(self, pipeline_name): 35 | self.client.start_pipeline_execution( 36 | name=pipeline_name 37 | ) 38 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/errors.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | A collection of all Error Types used in ADF 6 | """ 7 | 8 | 9 | class Error(Exception): 10 | """Base class for exceptions in this module.""" 11 | 12 | 13 | class RetryError(Exception): 14 | """Retry Error used for Step Functions logic""" 15 | 16 | 17 | class ParameterNotFoundError(Exception): 18 | """ 19 | Parameter not found in Parameter Store 20 | """ 21 | 22 | 23 | class InvalidConfigError(Exception): 24 | """ 25 | Used for invalid configuration(s) within 26 | adfconfig.yml and deployment_map.yml 27 | """ 28 | 29 | 30 | class GenericAccountConfigureError(Exception): 31 | """ 32 | Generic Account cannot be setup since no base stack is present 33 | """ 34 | 35 | 36 | class AccountCreationNotFinishedError(Exception): 37 | """ 38 | When we interact with a Boto3 API call and it fails with the 39 | SubscriptionRequiredException error code. This implies that the 40 | account is still being created behind the scenes. To ease troubleshooting 41 | we raise this Exception class instead. To clarify what is happening. 42 | """ 43 | 44 | 45 | class RootOUIDError(Exception): 46 | """ 47 | Raised when an account is moved to the root of the organization 48 | and a describe call is attempted again the root of the org. 49 | """ 50 | 51 | 52 | class InvalidTemplateError(Exception): 53 | """ 54 | Raised when a CloudFormation template fails the Validate Template call 55 | """ 56 | 57 | 58 | class InvalidDeploymentMapError(Exception): 59 | """ 60 | Raised when a Deployment Map is invalid 61 | """ 62 | 63 | 64 | class NoAccountsFoundError(Exception): 65 | """ 66 | Raised when there are no Accounts found a specific OU defined in the Deployment Map 67 | """ 68 | 69 | 70 | class InsufficientWaveSizeError(Exception): 71 | """ 72 | Raised when the defined wave size is less than the calculated minimum actions 73 | """ 74 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/list_utils.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | List utilities to ease list management. 6 | """ 7 | 8 | 9 | def _flatten_list(input_list): 10 | result = [] 11 | for item in input_list: 12 | if isinstance(item, list): 13 | if item: 14 | result.extend( 15 | _flatten_list(item), 16 | ) 17 | else: 18 | result.append(item) 19 | return result 20 | 21 | 22 | def flatten_to_unique_sorted(input_list): 23 | """ 24 | Flatten nested lists and return a unique and sorted list of items. 25 | This will recursively iterate over the lists and flatten them together 26 | into one list. It will then remove redundant items, followed by sorting 27 | them. 28 | 29 | Args: 30 | input_list (list): The input list that could hold multiple levels of 31 | nested lists. 32 | 33 | Returns: 34 | List with unique and sorted list of items. 35 | """ 36 | result = _flatten_list(input_list) 37 | return sorted(list(set(result))) 38 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/logger.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """Primary Logging Configuration Function 5 | """ 6 | 7 | import logging 8 | import os 9 | 10 | 11 | def configure_logger(logger_name): 12 | """Configures a generic logger which can be imported and used as needed 13 | """ 14 | 15 | # Create logger and define INFO as the log level 16 | logger = logging.getLogger(logger_name) 17 | logger.setLevel(os.environ.get("ADF_LOG_LEVEL", logging.INFO)) 18 | logger.propagate = False 19 | 20 | # Define our logging formatter 21 | formatter = logging.Formatter( 22 | '%(asctime)s | %(levelname)s | %(name)s | %(message)s ' 23 | '| (%(filename)s:%(lineno)d)', 24 | ) 25 | 26 | # Create our stream handler and apply the formatting 27 | stream_handler = logging.StreamHandler() 28 | stream_handler.setFormatter(formatter) 29 | 30 | # Add the stream handler to the logger 31 | logger.addHandler(stream_handler) 32 | 33 | return logger 34 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/paginator.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Paginator used with certain boto3 calls 6 | when pagination is required 7 | """ 8 | 9 | 10 | def paginator(method, **kwargs): 11 | client = method.__self__ 12 | iterator = client.get_paginator(method.__name__) 13 | for page in iterator.paginate(**kwargs).result_key_iters(): 14 | yield from page 15 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/partition.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """Partition. 5 | 6 | A partition is a group of AWS Regions. This module provides a helper function 7 | to help determine the proper partition given a region name. For more details 8 | on partitions, see: 9 | https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html#genref-aws-service-namespaces 10 | """ 11 | 12 | from boto3.session import Session 13 | 14 | COMPATIBLE_PARTITIONS = ['aws-us-gov', 'aws'] 15 | 16 | 17 | class IncompatiblePartitionError(Exception): 18 | """Raised in case the partition is not supported.""" 19 | 20 | 21 | def get_partition(region_name: str) -> str: 22 | """Given the region, this function will return the appropriate partition. 23 | 24 | :param region_name: The name of the region (us-east-1, us-gov-west-1) 25 | :return: Returns the partition name as a string. 26 | """ 27 | partition = Session().get_partition_for_region(region_name) 28 | if partition not in COMPATIBLE_PARTITIONS: 29 | raise IncompatiblePartitionError( 30 | f'The {partition} partition is not supported by this version of ' 31 | 'ADF yet.' 32 | ) 33 | 34 | return partition 35 | 36 | 37 | def get_organization_api_region(region_name: str) -> str: 38 | """ 39 | Given the current region, it will determine the partition and use 40 | that to return the Organizations API region (us-east-1 or us-gov-west-1) 41 | 42 | :param region_name: The name of the region (eu-west-1, us-gov-east-1) 43 | :return: Returns the AWS Organizations API region to use as a string. 44 | """ 45 | if get_partition(region_name) == 'aws-us-gov': 46 | return 'us-gov-west-1' 47 | 48 | return 'us-east-1' 49 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/pytest.ini: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | [pytest] 5 | testpaths = tests 6 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/requirements.txt: -------------------------------------------------------------------------------- 1 | # Install libs here that you might want in the ADF Shared Python Lambda Layer 2 | boto3==1.34.80 3 | botocore==1.34.80 4 | pyyaml~=6.0.1 5 | schema~=0.7.5 6 | tenacity==8.2.3 7 | typing-extensions~=4.11.0 8 | urllib3~=1.26.18 ; python_version < "3.10" 9 | urllib3~=2.2.1 ; python_version >= "3.10" 10 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | __init__ for bootstrap_repository 6 | """ 7 | 8 | import sys 9 | import os 10 | 11 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) 12 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__)))) 13 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/stubs/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/stubs/stub_event.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Stubs for testing event.py 6 | """ 7 | 8 | 9 | class StubEvent(): 10 | def __init__(self): 11 | self.deployment_account_region = 'us-east-1' 12 | self.target_regions = ['region-1', 'region-2'] 13 | self.account_id = '123456789012' 14 | self.deployment_account_id = '111111111111' 15 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/stubs/stub_kms.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | 6 | """ 7 | Stubs for testing KMS 8 | """ 9 | 10 | kms_get_policy = { 11 | 'Policy': '{\n "Version" : "2012-10-17",\n "Id" : "key-default-1",\n "Statement" : [ {\n "Sid" : "Enable IAM User Permissions",\n "Effect" : "Allow",\n "Principal" : {\n "AWS" : ["arn:aws:iam::111111111111:root"]\n },\n "Action" : "kms:*",\n "Resource" : "*"\n } ]\n}', 12 | } 13 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/stubs/stub_organizations.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Stubs for testing organization.py 6 | """ 7 | 8 | describe_organization = { 9 | 'Organization': { 10 | 'Id': 'some_org_id', 11 | 'Arn': 'string', 12 | 'FeatureSet': 'ALL', 13 | 'MasterAccountArn': 'string', 14 | 'MasterAccountId': 'some_management_account_id', 15 | 'MasterAccountEmail': 'string', 16 | 'AvailablePolicyTypes': [ 17 | { 18 | 'Type': 'SERVICE_CONTROL_POLICY', 19 | 'Status': 'ENABLED' 20 | }, 21 | ] 22 | } 23 | } 24 | 25 | list_parents = { 26 | 'Parents': [ 27 | { 28 | 'Id': 'some_id', 29 | 'Type': 'ORGANIZATIONAL_UNIT' 30 | }, 31 | ], 32 | 'NextToken': 'string' 33 | } 34 | 35 | list_parents_root = { 36 | 'Parents': [ 37 | { 38 | 'Id': 'some_id', 39 | 'Type': 'ROOT' 40 | }, 41 | ], 42 | 'NextToken': 'string' 43 | } 44 | 45 | describe_organizational_unit = { 46 | 'OrganizationalUnit': { 47 | 'Id': 'some_org_unit_id', 48 | 'Arn': 'string', 49 | 'Name': 'some_ou_name' 50 | } 51 | } 52 | 53 | describe_account = { 54 | 'Account': { 55 | 'Id': 'some_account_id', 56 | 'Arn': 'string', 57 | 'Email': 'some_account_email', 58 | 'Name': 'some_account_name', 59 | 'Status': 'ACTIVE', 60 | 'JoinedMethod': 'INVITED' 61 | # Excluding JoinedTimestamp to avoid 62 | # adding dependency on datetime 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/stubs/stub_parameter_store.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Stubs for testing parameter_store.py 6 | """ 7 | 8 | get_parameter = { 9 | 'Parameter': { 10 | 'Name': 'string', 11 | 'Type': 'String', 12 | 'Value': 'some_parameter_value', 13 | 'Version': 123, 14 | 'Selector': 'string', 15 | 'SourceResult': 'string', 16 | 'LastModifiedDate': 1, 17 | 'ARN': 'string' 18 | } 19 | } 20 | 21 | get_parameters_by_path = { 22 | 'Parameters': [ 23 | { 24 | 'Name': 'string', 25 | 'Type': 'String', 26 | 'Value': 'string', 27 | 'Version': 123, 28 | 'Selector': 'string', 29 | 'SourceResult': 'string', 30 | 'LastModifiedDate': 1, 31 | 'ARN': 'string' 32 | }, 33 | ], 34 | 'NextToken': 'string' 35 | } 36 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/stubs/stub_s3.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Stubs for testing s3.py 6 | """ 7 | 8 | s3_get_bucket_policy = { 9 | 'Policy': { 10 | "Version": "2012-10-17", 11 | "Id": "Policy14564645656", 12 | "Statement": [{ 13 | "Sid": "Stmt1445654645618", 14 | "Effect": "Allow", 15 | "Principal": { 16 | "AWS": [ 17 | "arn:aws:iam::11111222222:root", 18 | "arn:aws:iam::99999999999:root", 19 | "SOME_RANDOM_DEAD_GUID" 20 | ] 21 | }, 22 | "Action": "s3:Get*", 23 | "Resource": "arn:aws:s3:::bucket_name/abc/*" 24 | }] 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/stubs/stub_step_functions.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Stubs for testing stepfunction.py 6 | """ 7 | 8 | start_execution = { 9 | 'executionArn': 'some_execution_arn', 10 | 'startDate': 123 11 | } 12 | 13 | describe_execution = { 14 | 'executionArn': 'string', 15 | 'stateMachineArn': 'string', 16 | 'name': 'string', 17 | 'status': 'RUNNING', 18 | 'startDate': 123, 19 | 'stopDate': 123, 20 | 'input': 'string', 21 | 'output': 'string' 22 | } 23 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_cache.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | 6 | from pytest import fixture 7 | from cache import Cache 8 | 9 | 10 | @fixture 11 | def cls(): 12 | return Cache() 13 | 14 | 15 | def test_add(cls): 16 | cls.add('my_key', 'my_value') 17 | assert cls.get('my_key') == 'my_value' 18 | 19 | 20 | def test_exists(cls): 21 | cls.add('my_key', 'my_value') 22 | cls.add('false_key', False) 23 | assert cls.exists('my_key') is True 24 | assert cls.exists('false_key') is True 25 | assert cls.exists('missing_key') is False 26 | 27 | 28 | def test_get(cls): 29 | cls.add('my_key', 'my_value') 30 | cls.add('true_key', True) 31 | cls.add('false_key', False) 32 | assert cls.get('my_key') == 'my_value' 33 | assert cls.get('true_key') is True 34 | assert cls.get('false_key') is False 35 | 36 | 37 | def test_remove_existing_key(cls): 38 | # Arrange 39 | cls.add("test_key", "test_value") 40 | 41 | # Act 42 | cls.remove("test_key") 43 | 44 | # Assert 45 | assert cls.exists("test_key") is False 46 | assert cls.get("test_key") is None 47 | 48 | 49 | def test_remove_non_existing_key(cls): 50 | # Arrange 51 | cls.remove("non_existing_key") 52 | 53 | # Assert 54 | assert cls.exists("non_existing_key") is False 55 | assert cls.get("non_existing_key") is None 56 | 57 | 58 | def test_remove_and_read(cls): 59 | # Arrange 60 | cls.add("test_key", "test_value") 61 | 62 | # Act 63 | cls.remove("test_key") 64 | cls.add("test_key", "new_value") 65 | 66 | # Assert 67 | assert cls.exists("test_key") is True 68 | assert cls.get("test_key") == "new_value" 69 | 70 | 71 | def test_remove_multiple_keys(cls): 72 | # Arrange 73 | cls.add("key1", "value1") 74 | cls.add("key2", "value2") 75 | 76 | # Act 77 | cls.remove("key1") 78 | 79 | # Assert 80 | assert cls.exists("key1") is False 81 | assert cls.exists("key2") is True 82 | assert cls.get("key1") is None 83 | assert cls.get("key2") == "value2" 84 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_codepipeline.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | 6 | import os 7 | import boto3 8 | from pytest import fixture 9 | from stubs import stub_codepipeline 10 | from mock import Mock 11 | 12 | from paginator import paginator 13 | from codepipeline import CodePipeline 14 | 15 | 16 | @fixture 17 | def cls(): 18 | return CodePipeline(boto3, os.environ["AWS_REGION"]) 19 | 20 | 21 | def test_get_pipeline_status(cls): 22 | cls.client = Mock() 23 | cls.client.get_pipeline_state.return_value = stub_codepipeline.get_pipeline_state 24 | assert cls.get_pipeline_status('my_pipeline') == 'Succeeded' 25 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_list_utils.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | 6 | from ..list_utils import flatten_to_unique_sorted 7 | 8 | 9 | def test_flatten_to_unique_sorted(): 10 | """ 11 | Flatten and sort the list 12 | """ 13 | result = flatten_to_unique_sorted( 14 | [ 15 | # Nested lists: 16 | ['val9', 'val0', 'val1'], 17 | ['val1', 'val2'], 18 | # Empty list 19 | [], 20 | # Double nested list: 21 | [ 22 | ['val8', 'val2'], 23 | 'val4', 24 | ], 25 | # Single item 26 | 'val3', 27 | ], 28 | ) 29 | assert result == ['val0', 'val1', 'val2', 'val3', 'val4', 'val8', 'val9'] 30 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_partition.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """Tests for partition.py""" 5 | 6 | import pytest 7 | 8 | from partition import get_partition, IncompatiblePartitionError 9 | 10 | _us_commercial_regions = [ 11 | 'us-east-1', 12 | 'us-west-1', 13 | 'us-west-2' 14 | ] 15 | 16 | _govcloud_regions = [ 17 | 'us-gov-west-1', 18 | 'us-gov-east-1' 19 | ] 20 | 21 | _incompatible_regions = [ 22 | 'cn-north-1', 23 | 'cn-northwest-1' 24 | ] 25 | 26 | 27 | @pytest.mark.parametrize('region', _govcloud_regions) 28 | def test_partition_govcloud_regions(region): 29 | assert get_partition(region) == 'aws-us-gov' 30 | 31 | 32 | @pytest.mark.parametrize('region', _us_commercial_regions) 33 | def test_partition_us_commercial_regions(region): 34 | assert get_partition(region) == 'aws' 35 | 36 | 37 | @pytest.mark.parametrize('region', _incompatible_regions) 38 | def test_partition_incompatible_regions(region): 39 | with pytest.raises(IncompatiblePartitionError) as excinfo: 40 | get_partition(region) 41 | 42 | error_message = str(excinfo.value) 43 | assert error_message.find("partition is not supported") >= 0 44 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_pipeline.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | 6 | import os 7 | import sys 8 | import yaml 9 | import boto3 10 | 11 | from pytest import fixture 12 | from ..pipeline import Pipeline 13 | 14 | 15 | @fixture 16 | def cls(): 17 | return Pipeline( 18 | pipeline={ 19 | "name": "pipeline", 20 | "params": {"key": "value"}, 21 | "targets": [], 22 | "default_providers": { 23 | "source": { 24 | "name": "codecommit", 25 | "properties": { 26 | "account_id": 111111111111, 27 | } 28 | } 29 | } 30 | }, 31 | ) 32 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/tests/test_step_functions.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | 6 | import os 7 | import boto3 8 | from pytest import fixture, raises 9 | from stubs import stub_step_functions 10 | from mock import Mock 11 | from stepfunctions import StepFunctions 12 | 13 | 14 | @fixture 15 | def cls(): 16 | cls = StepFunctions( 17 | role=boto3, 18 | deployment_account_id='11111111111', 19 | deployment_account_region='eu-central-1', 20 | regions=['region-1', 'region-2'], 21 | account_ids='99999999999', 22 | full_path='banking/testing', 23 | update_pipelines_only=0 24 | ) 25 | 26 | cls.client = Mock() 27 | return cls 28 | 29 | 30 | def test_statemachine_start(cls): 31 | cls.client.start_execution.return_value = stub_step_functions.start_execution 32 | cls._start_statemachine() 33 | assert cls.execution_arn == 'some_execution_arn' 34 | 35 | 36 | def test_statemachine_get_status(cls): 37 | cls.client.describe_execution.return_value = stub_step_functions.describe_execution 38 | cls._start_statemachine() 39 | cls._fetch_statemachine_status() 40 | cls._execution_status == 'RUNNING' 41 | 42 | 43 | def test_wait_failed_state_machine_execution(cls): 44 | stub_step_functions.describe_execution["status"] = "FAILED" 45 | cls.client.describe_execution.return_value = stub_step_functions.describe_execution 46 | cls._start_statemachine() 47 | cls._fetch_statemachine_status() 48 | assert cls._execution_status == 'FAILED' 49 | with raises(Exception): 50 | cls._wait_state_machine_execution() 51 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/thread.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | 6 | from threading import Thread 7 | 8 | 9 | class PropagatingThread(Thread): 10 | def run(self): 11 | self.exc = None 12 | try: 13 | if hasattr(self, '_Thread__target'): 14 | self.ret = self._Thread__target( 15 | *self._Thread__args, 16 | **self._Thread__kwargs 17 | ) 18 | else: 19 | self.ret = self._target( 20 | *self._args, 21 | **self._kwargs 22 | ) 23 | except BaseException as e: 24 | self.exc = e 25 | 26 | def join(self): 27 | super(PropagatingThread, self).join() 28 | if self.exc: 29 | raise self.exc 30 | return self.ret 31 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/requirements-dev.txt: -------------------------------------------------------------------------------- 1 | mock~=5.1.0 2 | pytest~=8.1.1 3 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/requirements.txt: -------------------------------------------------------------------------------- 1 | # Install libs here that you might want in AWS CodeBuild 2 | aws-cdk-lib~=2.136.0 3 | constructs>=10.0.0,<11.0.0 4 | aws-sam-cli==1.114.0 5 | boto3==1.34.80 6 | botocore==1.34.80 7 | pyyaml~=6.0.1 8 | schema~=0.7.5 9 | tenacity==8.2.3 10 | typing-extensions~=4.11.0 11 | urllib3~=1.26.18 ; python_version < "3.10" 12 | urllib3~=2.2.1 ; python_version >= "3.10" 13 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/templates/codecommit.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Parameters: 5 | RepoName: 6 | Type: String 7 | Description: 8 | Type: String 9 | Default: Created by ADF 10 | 11 | Resources: 12 | Repo: 13 | Type: AWS::CodeCommit::Repository 14 | DeletionPolicy: Retain 15 | UpdateReplacePolicy: Retain 16 | Properties: 17 | RepositoryName: !Ref RepoName 18 | RepositoryDescription: !Ref Description 19 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Tests for pipeline_repository 6 | """ 7 | 8 | import sys 9 | import os 10 | 11 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) 12 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__)))) 13 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | __init__ for stubs 6 | """ 7 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/parameter_environment_acceptance_tag_project_a.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | Parameters: 5 | Environment: acceptance 6 | 7 | Tags: 8 | Project: 'ProjectA' 9 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/parameter_environment_prod.json: -------------------------------------------------------------------------------- 1 | { 2 | "Parameters": { 3 | "Environment": "production" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/parameter_extra_one_only.json: -------------------------------------------------------------------------------- 1 | { 2 | "Parameters": { 3 | "Extra": "one" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/stub_cfn_global.json: -------------------------------------------------------------------------------- 1 | { 2 | "Parameters": { 3 | "Environment": "testing", 4 | "MySpecialValue": "resolve:/values/some_value" 5 | }, 6 | "Tags": { 7 | "CostCenter": "overhead", 8 | "Department": "unknown", 9 | "Geography": "world" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/stub_cfn_global.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | Parameters: 5 | Environment: "testing" 6 | MySpecialValue: "resolve:/values/some_value" 7 | 8 | Tags: 9 | CostCenter: "overhead" 10 | Department: "unknown" 11 | Geography: "world" 12 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/tag_cost_center_free_only.json: -------------------------------------------------------------------------------- 1 | { 2 | "Tags": { 3 | "CostCenter": "free" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/tag_cost_center_nonfree_only.json: -------------------------------------------------------------------------------- 1 | { 2 | "Tags": { 3 | "CostCenter": "non-free" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/tag_department_alpha_only.json: -------------------------------------------------------------------------------- 1 | { 2 | "Tags": { 3 | "Department": "alpha" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/stubs/tag_geo_eu_only.json: -------------------------------------------------------------------------------- 1 | { 2 | "Tags": { 3 | "Geography": "eu" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/store_config.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | Store config entry point for storing the adfconfig.yml configuration 6 | into the Parameter Store such that the bootstrapping and account management 7 | steps can execute correctly when triggered. 8 | 9 | This gets executed from within AWS CodeBuild in the management account. 10 | """ 11 | import os 12 | from config import Config 13 | from logger import configure_logger 14 | 15 | ADF_VERSION = os.environ["ADF_VERSION"] 16 | ADF_LOG_LEVEL = os.environ["ADF_LOG_LEVEL"] 17 | LOGGER = configure_logger(__name__) 18 | 19 | 20 | def main(): 21 | """ 22 | Main entry point to store the configuration into AWS Systems 23 | Manager Parameter Store 24 | """ 25 | LOGGER.info("ADF Version %s", ADF_VERSION) 26 | LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL) 27 | 28 | LOGGER.info( 29 | "Storing configuration values in AWS Systems Manager Parameter Store." 30 | ) 31 | config = Config() 32 | config.store_config() 33 | LOGGER.info("Configuration values stored successfully.") 34 | 35 | 36 | if __name__ == '__main__': 37 | main() 38 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | __init__ for tests module 6 | """ 7 | 8 | import sys 9 | import os 10 | 11 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) 12 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/stubs/stub_adfconfig.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | roles: 5 | cross-account-access: some_role 6 | 7 | regions: 8 | deployment-account: eu-central-1 9 | targets: 10 | - eu-west-1 11 | - us-west-2 12 | 13 | config: 14 | main-notification-endpoint: 15 | - type: email 16 | target: john@example.com 17 | moves: 18 | - name: to-root 19 | action: remove-base 20 | protected: 21 | - ou-a9ny-123test 22 | scp: 23 | keep-default-scp: enabled 24 | deployment-maps: 25 | allow-empty-target: disabled 26 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/example-adfconfig.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | roles: 5 | cross-account-access: OrganizationAccountAccessRole 6 | # ^ The role by ADF to assume cross account access 7 | 8 | regions: 9 | deployment-account: eu-central-1 10 | # ^ The region you define as your main deployment region 11 | targets: 12 | # A list of regions you wish to bootstrap and also deploy into via pipelines 13 | # By default it includes the deployment account region already. 14 | # The default deployment account region should not be listed here. 15 | - eu-west-1 16 | 17 | config: 18 | main-notification-endpoint: 19 | - type: email # Options include: slack or email 20 | target: john.doe@example.com 21 | # ^ Email/Slack channel who receives notifications for the main bootstrapping pipeline 22 | moves: 23 | - name: to-root 24 | action: safe # Can be safe or remove-base (see docs) 25 | scp: 26 | keep-default-scp: enabled 27 | # ^ Determines if the default AWSFullAccess SCP stays attached to all OU's 28 | scm: # Source control management 29 | auto-create-repositories: enabled 30 | # ^ If true and using CodeCommit as source, the repository will be automatically created 31 | default-scm-branch: main 32 | # ^ The default branch is used when the pipeline does not specify a specific branch. 33 | # If this parameter is not specified, it defaults to the "main" branch. 34 | org: 35 | stage: prod 36 | # ^ This value will be set as an SSM Parameter named /adf/org/stage 37 | # in both the deployment account and and all 38 | # Target member accounts as part of the Bootstrap Statemachine. 39 | # It is useful as a flag to drive Organization specific logic within 40 | # IAM Role definitions/conditions as well as Build spec behavior. 41 | 42 | extensions: 43 | terraform: 44 | enabled: false # If true resources needed to run Terraform template will be deployed 45 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/pytest.ini: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | [pytest] 5 | env = 6 | ACCOUNT_ID="123456789012" 7 | testpaths = adf-build/tests adf-bootstrap/deployment/lambda_codebase/tests adf-build/shared/python/tests/ 8 | norecursedirs = adf-bootstrap/deployment/lambda_codebase/initial_commit adf-bootstrap/deployment/lambda_codebase/determine_default_branch adf-build/shared 9 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/requirements-dev.txt: -------------------------------------------------------------------------------- 1 | tox==4.14.2 2 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/bootstrap_repository/tox.ini: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # tox (https://tox.readthedocs.io/) is a tool for running tests 5 | # in multiple virtualenvs. This configuration file will run the 6 | # test suite on all supported python versions. To use it, "pip install tox" 7 | # and then run "tox" from this directory. 8 | 9 | [tox] 10 | envlist = py312 11 | skipsdist = True 12 | 13 | 14 | [testenv] 15 | setenv= 16 | PYTHONPATH={toxinidir}/adf-build/shared/python 17 | ORGANIZATION_ID=o-123456789 18 | AWS_REGION=eu-central-1 19 | AWS_DEFAULT_REGION=eu-central-1 20 | ADF_PIPELINE_PREFIX=adf-pipeline- 21 | CODEBUILD_BUILD_ID=abcdef 22 | S3_BUCKET=some_bucket 23 | S3_BUCKET_NAME=some_bucket 24 | SHARED_MODULES_BUCKET=some_shared_modules_bucket 25 | MANAGEMENT_ACCOUNT_ID=123 26 | ADF_VERSION=1.0.0 27 | ADF_LOG_LEVEL=CRITICAL 28 | ADF_PROJECT_NAME=whatever 29 | ACCOUNT_ID=111111111111 30 | 31 | allowlist_externals = 32 | pytest 33 | # python 34 | deps = 35 | -r{toxinidir}/adf-build/requirements.txt 36 | -r{toxinidir}/adf-build/requirements-dev.txt 37 | -r{toxinidir}/adf-build/shared/helpers/requirements.txt 38 | -r{toxinidir}/adf-build/shared/helpers/requirements-dev.txt 39 | -r{toxinidir}/adf-build/shared/requirements.txt 40 | -r{toxinidir}/adf-build/shared/requirements-dev.txt 41 | 42 | commands = 43 | pytest -vvv 44 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | The Initial Commit Handler that is called when ADF is installed to commit the 6 | initial bootstrap repository content. 7 | """ 8 | 9 | try: 10 | from initial_commit import lambda_handler # pylint: disable=unused-import 11 | except Exception as err: # pylint: disable=broad-except 12 | import os 13 | import logging 14 | from urllib.request import Request, urlopen 15 | import json 16 | 17 | LOGGER = logging.getLogger(__name__) 18 | LOGGER.setLevel(os.environ.get("ADF_LOG_LEVEL", logging.INFO)) 19 | 20 | def lambda_handler(event, _context, prior_error=err): 21 | payload = { 22 | "LogicalResourceId": event["LogicalResourceId"], 23 | "PhysicalResourceId": event.get( 24 | "PhysicalResourceId", 25 | "NOT_YET_CREATED", 26 | ), 27 | "Status": "FAILED", 28 | "RequestId": event["RequestId"], 29 | "StackId": event["StackId"], 30 | "Reason": str(prior_error), 31 | } 32 | if not event["ResponseURL"].lower().startswith('http'): 33 | raise ValueError('ResponseURL is forbidden') from None 34 | with urlopen( 35 | Request( 36 | event["ResponseURL"], 37 | data=json.dumps(payload).encode(), 38 | headers={"content-type": ""}, 39 | method="PUT", 40 | ) 41 | ) as response: 42 | response_body = response.read().decode("utf-8") 43 | LOGGER.debug( 44 | "Response: %s", 45 | response_body, 46 | ) 47 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/pytest.ini: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | [pytest] 5 | testpaths = tests 6 | norecursedirs = bootstrap_repository 7 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/requirements.txt: -------------------------------------------------------------------------------- 1 | Jinja2==3.1.4 2 | boto3==1.34.80 3 | cfn-custom-resource~=1.0.1 4 | markupsafe==2.1.5 5 | -------------------------------------------------------------------------------- /src/lambda_codebase/initial_commit/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | __init__ for tests module 6 | """ 7 | 8 | import sys 9 | import os 10 | 11 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) 12 | -------------------------------------------------------------------------------- /src/lambda_codebase/jump_role_manager/pytest.ini: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | [pytest] 5 | testpaths = tests 6 | -------------------------------------------------------------------------------- /src/lambda_codebase/jump_role_manager/requirements.txt: -------------------------------------------------------------------------------- 1 | aws-xray-sdk==2.13.0 2 | pyyaml~=6.0.1 3 | -------------------------------------------------------------------------------- /src/lambda_codebase/jump_role_manager/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # pylint: skip-file 5 | -------------------------------------------------------------------------------- /src/lambda_codebase/organization/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | The Organization Handler that is called when ADF is installed to create the 6 | organization if required 7 | """ 8 | 9 | try: 10 | from main import lambda_handler # pylint: disable=unused-import 11 | except Exception as err: # pylint: disable=broad-except 12 | import os 13 | import logging 14 | from urllib.request import Request, urlopen 15 | import json 16 | 17 | LOGGER = logging.getLogger(__name__) 18 | LOGGER.setLevel(os.environ.get("ADF_LOG_LEVEL", logging.INFO)) 19 | 20 | def lambda_handler(event, _context, prior_error=err): 21 | """ 22 | Handler for custom resource 23 | """ 24 | payload = { 25 | "LogicalResourceId": event["LogicalResourceId"], 26 | "PhysicalResourceId": event.get( 27 | "PhysicalResourceId", 28 | "NOT_YET_CREATED", 29 | ), 30 | "Status": "FAILED", 31 | "RequestId": event["RequestId"], 32 | "StackId": event["StackId"], 33 | "Reason": str(prior_error), 34 | } 35 | if not event["ResponseURL"].lower().startswith('http'): 36 | raise ValueError('ResponseURL is forbidden') from None 37 | with urlopen( 38 | Request( 39 | event["ResponseURL"], 40 | data=json.dumps(payload).encode(), 41 | headers={"content-type": ""}, 42 | method="PUT", 43 | ) 44 | ) as response: 45 | response_body = response.read().decode("utf-8") 46 | LOGGER.debug( 47 | "Response: %s", 48 | response_body, 49 | ) 50 | -------------------------------------------------------------------------------- /src/lambda_codebase/organization/requirements.txt: -------------------------------------------------------------------------------- 1 | cfn_custom_resource~=1.0.1 2 | -------------------------------------------------------------------------------- /src/lambda_codebase/organization_unit/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com Inc. or its affiliates. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | """ 5 | The Organization Unit Handler that is called when ADF is installed to create 6 | the deployment OU. 7 | """ 8 | 9 | try: 10 | from main import lambda_handler # pylint: disable=unused-import 11 | except Exception as err: # pylint: disable=broad-except 12 | import os 13 | import logging 14 | from urllib.request import Request, urlopen 15 | import json 16 | 17 | LOGGER = logging.getLogger(__name__) 18 | LOGGER.setLevel(os.environ.get("ADF_LOG_LEVEL", logging.INFO)) 19 | 20 | def lambda_handler(event, _context, prior_error=err): 21 | payload = { 22 | "LogicalResourceId": event["LogicalResourceId"], 23 | "PhysicalResourceId": event.get( 24 | "PhysicalResourceId", 25 | "NOT_YET_CREATED", 26 | ), 27 | "Status": "FAILED", 28 | "RequestId": event["RequestId"], 29 | "StackId": event["StackId"], 30 | "Reason": str(prior_error), 31 | } 32 | if not event["ResponseURL"].lower().startswith('http'): 33 | raise ValueError('ResponseURL is forbidden') from None 34 | with urlopen( 35 | Request( 36 | event["ResponseURL"], 37 | data=json.dumps(payload).encode(), 38 | headers={"content-type": ""}, 39 | method="PUT", 40 | ) 41 | ) as response: 42 | response_body = response.read().decode("utf-8") 43 | LOGGER.debug( 44 | "Response: %s", 45 | response_body, 46 | ) 47 | -------------------------------------------------------------------------------- /src/lambda_codebase/organization_unit/requirements.txt: -------------------------------------------------------------------------------- 1 | cfn_custom_resource~=1.0.1 2 | -------------------------------------------------------------------------------- /src/lambda_codebase/requirements.txt: -------------------------------------------------------------------------------- 1 | tenacity==8.2.3 2 | --------------------------------------------------------------------------------