├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── documentation-improvements.md │ └── feature_request.md └── PULL_REQUEST_TEMPLATE.md ├── .gitignore ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE.txt ├── NOTICE.txt ├── README.md ├── SECURITY.md ├── deployment ├── build-s3-dist.sh ├── cdk-solution-helper │ ├── README.md │ ├── index.js │ ├── npm-shrinkwrap.json │ └── package.json ├── run-unit-tests.sh └── solution_env.sh ├── solution-manifest.yaml └── source ├── .eslintignore ├── .eslintrc.json ├── .prettierrc ├── bin └── aws_devops_monitoring_dashboard.ts ├── cdk.json ├── image └── architecture_diagram.png ├── jest.config.js ├── lambda ├── event_parser │ ├── codebuild_index.js │ ├── codebuild_metrics.js │ ├── codecommit_events.js │ ├── codedeploy_events.js │ ├── codepipeline_events.js │ ├── github_events.js │ ├── github_index.js │ ├── index.js │ ├── jest.config.js │ ├── lib │ │ ├── github_authorizer.js │ │ ├── ip_helper.js │ │ ├── logger.js │ │ └── secrets_manager.js │ ├── package-lock.json │ ├── package.json │ ├── synthetic_canary_alarm_events.js │ └── test │ │ ├── github_authorizer.spec.js │ │ ├── github_events.spec.js │ │ ├── github_index.spec.js │ │ ├── index.spec.js │ │ ├── ip_helper.spec.js │ │ ├── logger.spec.js │ │ └── secrets_manager.spec.js ├── multi_account_custom_resources │ ├── jest.config.js │ ├── lib │ │ ├── cfn.js │ │ ├── eventbridge.js │ │ ├── logger.js │ │ └── s3_bucket_policy.js │ ├── manage_s3_bucket_policy.js │ ├── monitoring_account_permission_index.js │ ├── package-lock.json │ ├── package.json │ └── test │ │ ├── cfn.spec.js │ │ ├── eventbridge.spec.js │ │ ├── logger.spec.js │ │ ├── manage_s3_bucket_policy.spec.js │ │ ├── monitoring_account_permission_index.spec.js │ │ └── s3_bucket_policy.spec.js ├── query_runner │ ├── add_athena_partition.js │ ├── build_athena_query.js │ ├── index.js │ ├── jest.config.js │ ├── lib │ │ ├── cfn.js │ │ ├── execute_athena_query.js │ │ ├── logger.js │ │ └── metrics_helper.js │ ├── package-lock.json │ ├── package.json │ └── test │ │ ├── add_athena_partition.spec.js │ │ ├── build_athena_query.spec.js │ │ ├── cfn.spec.js │ │ ├── config.js │ │ ├── execute_athena_query.spec.js │ │ ├── index.spec.js │ │ ├── logger.spec.js │ │ └── metrics_helper.spec.js ├── quicksight-custom-resources │ ├── .coveragerc │ ├── .gitignore │ ├── __init__.py │ ├── lambda_function.py │ ├── poetry.lock │ ├── pyproject.toml │ ├── pytest.ini │ ├── test │ │ ├── __init__.py │ │ ├── conftest.py │ │ ├── dump_environment.py │ │ ├── fixtures │ │ │ ├── quicksight_analysis_fixtures.py │ │ │ ├── quicksight_dashboard_fixtures.py │ │ │ ├── quicksight_dataset_fixtures.py │ │ │ ├── quicksight_datasource_fixtures.py │ │ │ ├── quicksight_template_fixtures.py │ │ │ └── quicksight_test_fixture.py │ │ ├── logger_test_helper.py │ │ ├── test_analysis.py │ │ ├── test_dashboard.py │ │ ├── test_dataset.py │ │ ├── test_datasource.py │ │ ├── test_environment.py │ │ ├── test_helpers.py │ │ ├── test_lambda_function.py │ │ ├── test_logging.py │ │ ├── test_quicksight.py │ │ ├── test_source_entity.py │ │ └── test_template.py │ └── util │ │ ├── __init__.py │ │ ├── analysis.py │ │ ├── config │ │ ├── analysis-main.config.json │ │ ├── dashboard-main.config.json │ │ ├── dataset-code-build-detail.config.json │ │ ├── dataset-code-change-activity.config.json │ │ ├── dataset-code-deployment-detail.config.json │ │ ├── dataset-code-pipeline-detail.config.json │ │ ├── dataset-github-change-activity.config.json │ │ ├── dataset-recovery-time-detail.config.json │ │ └── template-main.config.json │ │ ├── dashboard.py │ │ ├── dataset.py │ │ ├── datasource.py │ │ ├── helpers.py │ │ ├── logging.py │ │ ├── quicksight.py │ │ ├── quicksight_application.py │ │ ├── quicksight_resource.py │ │ ├── source_entity.py │ │ └── template.py ├── solution_helper │ ├── .coveragerc │ ├── .gitignore │ ├── __init__.py │ ├── lambda_function.py │ ├── poetry.lock │ ├── pyproject.toml │ ├── test │ │ ├── test_lambda_function.py │ │ └── test_solution_metrics.py │ └── util │ │ └── solution_metrics.py └── tag_query │ ├── .jest │ └── setEnvVars.js │ ├── index.js │ ├── jest.config.js │ ├── lib │ ├── cfn.js │ ├── logger.js │ ├── metrics_helper.js │ ├── query_generator.js │ ├── resource_info.js │ └── throttler.js │ ├── package-lock.json │ ├── package.json │ ├── reporter.js │ ├── tag_query.js │ └── test │ ├── index.spec.js │ ├── lib │ ├── cfn.spec.js │ ├── logger.spec.js │ ├── metrics_helper.spec.js │ ├── query_generator.spec.js │ ├── resource_info.spec.js │ └── throttler.spec.js │ ├── reporter.spec.js │ └── tag_query.spec.js ├── lib ├── app-registry │ ├── app_register.ts │ ├── apply_tag.ts │ └── condition_aspect.ts ├── aws_devops_monitoring_dashboard_stack.ts ├── database │ └── database_construct.ts ├── deployment-helper │ ├── canary_alarm │ │ ├── alarm_construct.ts │ │ └── canary_alarm_stack.ts │ └── codepipeline_alarm │ │ ├── codepipeline_alarm_construct.ts │ │ └── codepipeline_alarm_stack.ts ├── events │ ├── canary_events_construct.ts │ ├── code_build_events_construct.ts │ ├── code_commit_events_construct.ts │ ├── code_deploy_events_construct.ts │ ├── code_pipeline_events_construct.ts │ └── codepipeline_alarm_events_construct.ts ├── github │ ├── github_construct.ts │ └── github_stack.ts ├── multi-account-resources │ ├── monitoring_account │ │ └── monitoring_account_permissions_construct.ts │ └── sharing_account │ │ └── sharing_account_stack.ts ├── quicksight-custom-resources │ ├── quicksight-construct.ts │ └── quicksight-stack.ts ├── solution-helper │ ├── lambda-role-cloudwatch-construct.ts │ └── solution-helper-construct.ts ├── tagging │ └── tag-query-construct.ts └── util │ └── apply_to_construct.ts ├── package-lock.json ├── package.json ├── test ├── __snapshots__ │ ├── apply_to_construct.test.ts.snap │ ├── aws_devops_monitoring_dashboard_stack.test.ts.snap │ ├── canary_stack.test.ts.snap │ ├── codepipeline_alarm_stack.test.ts.snap │ └── sharing_account_stack.test.ts.snap ├── apply_to_construct.test.ts ├── aws_devops_monitoring_dashboard_stack.test.ts ├── canary_stack.test.ts ├── codepipeline_alarm_stack.test.ts └── sharing_account_stack.test.ts └── tsconfig.json /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: "" 5 | labels: bug 6 | assignees: "" 7 | --- 8 | 9 | **Describe the bug** 10 | 11 | 12 | 13 | **To Reproduce** 14 | 15 | 16 | 17 | **Expected behavior** 18 | 19 | 20 | 21 | **Please complete the following information about the solution:** 22 | 23 | - [ ] Version: [e.g. v1.0.0] 24 | 25 | To get the version of the solution, you can look at the description of the created CloudFormation stack. For example, "_(SO0143) - The AWS CloudFormation template for deployment of the DevOps Monitoring Dashboard on AWS. Version **v1.0.0**_". You can also find the version from [releases](https://github.com/aws-solutions/aws-devops-monitoring-dashboard/releases) 26 | 27 | - [ ] Region: [e.g. us-east-1] 28 | - [ ] Was the solution modified from the version published on this repository? 29 | - [ ] If the answer to the previous question was yes, are the changes available on GitHub? 30 | - [ ] Have you checked your [service quotas](https://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html) for the services this solution uses? 31 | - [ ] Were there any errors in the CloudWatch Logs? [How to enable debug mode?](https://github.com/aws-solutions/aws-devops-monitoring-dashboard/#enable-debug-mode) 32 | 33 | **Screenshots** 34 | If applicable, add screenshots to help explain your problem (please **DO NOT include sensitive information**). 35 | 36 | **Additional context** 37 | Add any other context about the problem here. 38 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/documentation-improvements.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Documentation improvements 3 | about: Suggest a documentation update 4 | title: '' 5 | labels: documentation 6 | assignees: '' 7 | 8 | --- 9 | 10 | **What were you initially searching for in the docs?** 11 | 12 | 13 | **Is this related to an existing part of the documentation? Please share a link** 14 | 15 | **Describe how we could make it clearer** 16 | 17 | **If you have a proposed update, please share it here** -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this solution 4 | title: '' 5 | labels: feature-request, enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | 12 | 13 | **Describe the feature you'd like** 14 | 15 | 16 | **Additional context** 17 | 18 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | *Issue #, if available:* 2 | 3 | *Description of changes:* 4 | 5 | By submitting this pull request, I confirm that you can use, modify, copy, and redistribute this contribution, under the terms of your choice. 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore TS compile output 2 | **/*.js 3 | **/*.d.ts 4 | # Exclude required .js files from above ignore rule 5 | !deployment/.typescript/cdk-solution-helper/index.js 6 | !jest.config.js 7 | !source/.typescript/lambda/**/*.js 8 | !source/lambda/**/*.js 9 | !**/cdk-solution-helper/index.js 10 | 11 | node_modules 12 | **/__pycache__/* 13 | *.venv-test/ 14 | *.venv 15 | *_yaml/ 16 | 17 | # CDK asset staging directory 18 | .cdk.staging 19 | cdk.out 20 | 21 | 22 | deployment/global-s3-assets 23 | deployment/regional-s3-assets 24 | deployment/temp 25 | open-source/ 26 | .DS_Store 27 | deployment/setenv.sh 28 | internal/source/tools/setenv.sh 29 | 30 | #nodejs 31 | *dist* 32 | *coverage* 33 | 34 | #intellij 35 | /.idea 36 | 37 | #SonarQube 38 | .scannerwork/ 39 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 7 | 8 | ## [1.8.13] - 2024-11-27 9 | ### Changed 10 | - Upgraded cross-spawn to mitigate CVE-2024-21538 11 | - Added Locked Dependency Versions 12 | 13 | ## [1.8.12] - 2024-09-18 14 | ### Changed 15 | - Upgraded path-to-regexp to mitigate CVE-2024-45296 16 | - Upgraded micromatch to mitigate CVE-2024-4067 17 | 18 | ## [1.8.11] - 2024-08-16 19 | ### Changed 20 | - Upgraded axios to mitigate CVE-2024-39338 21 | 22 | ## [1.8.10] - 2024-08-07 23 | ### Changed 24 | - Upgraded fast-xml-parser to mitigate CVE-2024-41818 25 | 26 | ## [1.8.9] - 2024-07-10 27 | ### Changed 28 | - Upgraded braces to mitigate CVE-2024-4068 29 | - Upgraded requests to mitigate CVE-2024-35195 30 | 31 | ## [1.8.8] - 2024-03-11 32 | ### Changed 33 | - Upgraded synthetics canary runtime to v6.2 34 | - Upgraded CDK to v2.130 35 | - Upgraded solutions-constructs to v2.54 36 | 37 | ## [1.8.7] - 2024-01-30 38 | ### Fixed 39 | 40 | - Handling of aws sdk v3 errors 41 | 42 | ## [1.8.6] - 2023-12-21 43 | 44 | ### Changed 45 | 46 | - Upgraded lambdas from node 16 to node 20 47 | - Upgraded lambdas from python 3.10 to python 3.11 48 | - Upgraded aws sdk from v2 to v3 49 | 50 | ## [1.8.5] - 2023-10-26 51 | 52 | ### Changed 53 | 54 | - Upgraded @babel/traverse to mitigate CVE-2023-45133 55 | - Upgraded chaijs/get-func-name to mitigate CVE-2023-43646 56 | - Upgraded urllib3 to mitigate CVE-2023-45803 and CVE-2023-43804 57 | - Upgraded other dev dependencies (moto, pytest, pytest-env) 58 | 59 | ## [1.8.4] - 2023-08-07 60 | 61 | ### Changed 62 | 63 | - Refactored code to reduce complexity 64 | - Upgraded requests to mitigate CVE-2023-32681 65 | - Upgraded semver to mitigate CVE-2022-25883 66 | - Upgraded cryptography 67 | 68 | ## [1.8.3] - 2023-04-18 69 | 70 | ### Changed 71 | 72 | - Fixed S3 logging bucket setting 73 | - Fixed missing userName in codecommit event when pushes are made by assumed role credentials 74 | - Upgraded Werkzeug to mitigate CVE-2023-25577 75 | - Upgraded cryptography to mitigate CVE-2023-23931 76 | - upgraded tenacity 77 | - Added timeout to requests call 78 | - Upgraded Athena engine version 3 79 | 80 | ## [1.8.2] - 2023-01-13 81 | 82 | ### Security 83 | 84 | - Upgrade JSON5 to mitigate CVE-2022-46175 85 | - Upgrade certifi to mitigate CVE-2022-23491 86 | 87 | ## [1.8.1] - 2022-12-05 88 | 89 | ### Added 90 | 91 | - Added Application Registry 92 | 93 | ### Changed 94 | 95 | - Upgraded node 14 to 16 96 | 97 | ## [1.8.0] - 2022-10-31 98 | 99 | ### Added 100 | 101 | - Added multi-account multi-region data ingestion 102 | - Added tag filter for AWS CodeCommit, CodeBuild and CodePipeline 103 | 104 | ## [1.5.0] - 2022-04-19 105 | 106 | ### Added 107 | 108 | - Added GitHub integration - GitHub activity metric for push events 109 | - Added Mean Time to Recovery (MTTR) metric for Code Pipeline 110 | 111 | ## [1.1.0] - 2021-06-16 112 | 113 | ### Added 114 | 115 | - Metrics visualization for codebuild and codepipeline Events. 116 | 117 | ## [1.0.0] - 2021-03-22 118 | 119 | ### Added 120 | 121 | - Initial version 122 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | 10 | ## Reporting Bugs/Feature Requests 11 | 12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 13 | 14 | When filing an issue, please check [existing open](https://github.com/aws-solutions/aws-devops-monitoring-dashboard/issues), or [recently closed](https://github.com/aws-solutions/aws-devops-monitoring-dashboard/issues?utf8=%E2%9C%93&q=is%3Aissue%20is%3Aclosed%20), issues to make sure somebody else hasn't already 15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 16 | 17 | * A reproducible test case or series of steps 18 | * The version of our code being used 19 | * Any modifications you've made relevant to the bug 20 | * Anything unusual about your environment or deployment 21 | 22 | 23 | ## Contributing via Pull Requests 24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 25 | 26 | 1. You are working against the latest source on the *main* branch. 27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 29 | 30 | To send us a pull request, please: 31 | 32 | 1. Fork the repository. 33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 34 | 3. Ensure all build processes execute successfully (see README.md for additional guidance). 35 | 4. Ensure all unit, integration, and/or snapshot tests pass, as applicable. 36 | 5. Commit to your fork using clear commit messages. 37 | 6. Send us a pull request, answering any default questions in the pull request interface. 38 | 7. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 39 | 40 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 41 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 42 | 43 | 44 | ## Finding contributions to work on 45 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels ((enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/aws-solutions/aws-devops-monitoring-dashboard/labels/help%20wanted) issues is a great place to start. 46 | 47 | 48 | ## Code of Conduct 49 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 50 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 51 | opensource-codeofconduct@amazon.com with any additional questions or comments. 52 | 53 | 54 | ## Security issue notifications 55 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public GitHub issue. 56 | 57 | 58 | ## Licensing 59 | See the [LICENSE](https://github.com/aws-solutions/aws-devops-monitoring-dashboard/blob/main/LICENSE.txt) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 60 | 61 | We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes. 62 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | Reporting Security Issues 2 | ---------------------------------------------------------------------------------------------------------- 3 | We take all security reports seriously. When we receive such reports, we will investigate and 4 | subsequently address any potential vulnerabilities as quickly as possible. If you discover a potential 5 | security issue in this project, please notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/) or 6 | directly via email to [AWS Security](mailto:aws-security@amazon.com). Please do not create a public GitHub issue in this project. -------------------------------------------------------------------------------- /deployment/cdk-solution-helper/npm-shrinkwrap.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cdk-solution-helper", 3 | "version": "0.1.0", 4 | "lockfileVersion": 2, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "cdk-solution-helper", 9 | "version": "0.1.0", 10 | "license": "Apache-2.0" 11 | } 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /deployment/cdk-solution-helper/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cdk-solution-helper", 3 | "version": "0.1.0", 4 | "description": "CDK solution helper to build zip files for lambda functions", 5 | "author": { 6 | "name": "Amazon Web Services", 7 | "url": "https://aws.amazon.com/solutions" 8 | }, 9 | "license": "Apache-2.0" 10 | } 11 | -------------------------------------------------------------------------------- /deployment/solution_env.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | export SOLUTION_ID='SO0143' 3 | export SOLUTION_NAME='DevOps Monitoring Dashboard on AWS' 4 | export SOLUTION_TRADEMARKEDNAME='aws-devops-monitoring-dashboard' -------------------------------------------------------------------------------- /solution-manifest.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | id: SO0143 # Solution Id 3 | name: devops-monitoring-dashboard-on-aws # trademarked name 4 | opensource_archive: aws-devops-monitoring-dashboard.zip 5 | version: v1.8.13 # current version of the solution. Used to verify template headers 6 | cloudformation_templates: # This list should match with AWS CloudFormation templates section of IG 7 | - template: aws-devops-monitoring-dashboard.template 8 | main_template: true 9 | - template: sharing-account-stack.template 10 | - template: pipeline-alarm.template 11 | - template: canary-alarm.template 12 | build_environment: 13 | build_image: 'aws/codebuild/standard:7.0' # Options include: 'aws/codebuild/standard:5.0','aws/codebuild/standard:6.0','aws/codebuild/standard:7.0','aws/codebuild/amazonlinux2-x86_64-standard:4.0','aws/codebuild/amazonlinux2-x86_64-standard:5.0' -------------------------------------------------------------------------------- /source/.eslintignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | coverage 3 | dist 4 | build 5 | cdk.out 6 | lib/**/*.js 7 | lib/**/*.d.ts 8 | bin/**/*.d.ts 9 | bin/**/*.js 10 | test/*.d.ts 11 | test/*.js -------------------------------------------------------------------------------- /source/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "jest": true, 4 | "node": true 5 | }, 6 | "extends": [ 7 | "eslint:recommended", 8 | "plugin:@typescript-eslint/recommended", 9 | "plugin:prettier/recommended", 10 | "plugin:react/recommended" 11 | ], 12 | "parser": "@typescript-eslint/parser", 13 | "parserOptions": { 14 | "ecmaVersion": "latest", 15 | "project": "**/tsconfig.json", 16 | "sourceType": "module" 17 | }, 18 | "plugins": [ 19 | "@typescript-eslint", 20 | "header", 21 | "import", 22 | "react" 23 | ], 24 | "rules": { 25 | "@typescript-eslint/no-inferrable-types": [ 26 | "off", 27 | { 28 | "ignoreParameters": true, 29 | "ignoreProperties": true 30 | } 31 | ], 32 | "@typescript-eslint/no-useless-constructor": [ 33 | "off" 34 | ], 35 | "arrow-body-style": [ 36 | "warn", 37 | "as-needed" 38 | ], 39 | "prefer-arrow-callback": [ 40 | "warn" 41 | ], 42 | "no-inferrable-types": [ 43 | "off", 44 | "ignore-params" 45 | ], 46 | "no-unused-vars": [ 47 | "error", 48 | { 49 | "args": "none", 50 | "argsIgnorePattern": "^_", 51 | "varsIgnorePattern": "^[A-Z]" 52 | } 53 | ], 54 | "header/header": [ 55 | "error", 56 | "line", 57 | [ 58 | " Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.", 59 | " SPDX-License-Identifier: Apache-2.0" 60 | ], 61 | 2 62 | ], 63 | "no-empty-function": "off", 64 | "@typescript-eslint/no-empty-function": [ 65 | "off" 66 | ], 67 | "@typescript-eslint/no-var-requires": [ 68 | "off" 69 | ], 70 | "no-prototype-builtins": [ 71 | "off" 72 | ] 73 | } 74 | } -------------------------------------------------------------------------------- /source/.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "arrowParens": "avoid", 3 | "bracketSameLine": true, 4 | "bracketSpacing": true, 5 | "printWidth": 120, 6 | "singleQuote": true, 7 | "tabWidth": 2, 8 | "trailingComma": "none" 9 | } 10 | -------------------------------------------------------------------------------- /source/bin/aws_devops_monitoring_dashboard.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | // SPDX-License-Identifier: Apache-2.0 4 | 5 | import 'source-map-support/register'; 6 | import * as cdk from 'aws-cdk-lib'; 7 | import * as lambda from 'aws-cdk-lib/aws-lambda'; 8 | import { CanaryStack } from '../lib/deployment-helper/canary_alarm/canary_alarm_stack'; 9 | import { PipelineAlarmStack } from '../lib/deployment-helper/codepipeline_alarm/codepipeline_alarm_stack'; 10 | import { DevOpsDashboardStack } from '../lib/aws_devops_monitoring_dashboard_stack'; 11 | import { SharingAccountStack } from '../lib/multi-account-resources/sharing_account/sharing_account_stack'; 12 | import { AwsSolutionsChecks } from 'cdk-nag'; 13 | import { AppRegister } from '../lib/app-registry/app_register'; 14 | 15 | // SOLUTION_* - set by solution_env.sh 16 | const SOLUTION_ID = process.env['SOLUTION_ID'] || 'undefined'; 17 | const SOLUTION_NAME = process.env['SOLUTION_NAME'] || 'undefined'; 18 | // DIST_* - set by build-s3-dist.sh 19 | const DIST_VERSION = process.env['DIST_VERSION'] || '%%VERSION%%'; 20 | const DIST_OUTPUT_BUCKET = process.env['DIST_OUTPUT_BUCKET'] || '%%BUCKET%%'; 21 | const DIST_SOLUTION_NAME = process.env['DIST_SOLUTION_NAME'] || '%%SOLUTION%%'; 22 | const LAMBDA_RUNTIME_NODEJS = lambda.Runtime.NODEJS_22_X; 23 | const TEMPLATE_FORMAT_VERSION = '2010-09-09'; 24 | 25 | const app = new cdk.App(); 26 | cdk.Aspects.of(app).add(new AwsSolutionsChecks()); 27 | 28 | const canaryStack = new CanaryStack(app, 'canary-alarm', { 29 | synthesizer: new cdk.DefaultStackSynthesizer({ 30 | generateBootstrapVersionRule: false 31 | }), 32 | description: `(${SOLUTION_ID}C) ${SOLUTION_NAME} - Create Canary Alarm Template. Version: ${DIST_VERSION}`, 33 | solutionId: SOLUTION_ID, 34 | solutionVersion: DIST_VERSION, 35 | solutionName: SOLUTION_NAME, 36 | solutionDistBucket: DIST_OUTPUT_BUCKET, 37 | solutionDistName: DIST_SOLUTION_NAME 38 | }); 39 | 40 | /* Main stack for the solution */ 41 | const devopsDashboardStack = new DevOpsDashboardStack(app, 'aws-devops-monitoring-dashboard', { 42 | synthesizer: new cdk.DefaultStackSynthesizer({ 43 | generateBootstrapVersionRule: false 44 | }), 45 | description: `(${SOLUTION_ID}) ${SOLUTION_NAME} - Main Template (Monitoring Account). Version: ${DIST_VERSION}`, 46 | solutionId: SOLUTION_ID, 47 | solutionVersion: DIST_VERSION, 48 | solutionName: SOLUTION_NAME, 49 | solutionDistBucket: DIST_OUTPUT_BUCKET, 50 | solutionDistName: DIST_SOLUTION_NAME, 51 | lambdaRuntimeNode: LAMBDA_RUNTIME_NODEJS, 52 | }); 53 | 54 | /* Stack for creating codepipeline alarm */ 55 | const pipelineAlarmStack = new PipelineAlarmStack(app, 'pipeline-alarm', { 56 | synthesizer: new cdk.DefaultStackSynthesizer({ 57 | generateBootstrapVersionRule: false 58 | }), 59 | description: `(${SOLUTION_ID}P) ${SOLUTION_NAME} - Create CodePipeline Alarm Template. Version: ${DIST_VERSION}`, 60 | solutionId: SOLUTION_ID, 61 | solutionVersion: DIST_VERSION 62 | }); 63 | 64 | /* Stack for creating sharing account resources */ 65 | const sharingAccountStack = new SharingAccountStack(app, 'sharing-account-stack', { 66 | synthesizer: new cdk.DefaultStackSynthesizer({ 67 | generateBootstrapVersionRule: false 68 | }), 69 | description: `(${SOLUTION_ID}S) ${SOLUTION_NAME} - Sharing Account Template. Version: ${DIST_VERSION}`, 70 | solutionId: SOLUTION_ID, 71 | solutionVersion: DIST_VERSION, 72 | solutionName: SOLUTION_NAME, 73 | solutionDistBucket: DIST_OUTPUT_BUCKET, 74 | solutionDistName: DIST_SOLUTION_NAME, 75 | lambdaRuntimeNode: LAMBDA_RUNTIME_NODEJS, 76 | }); 77 | 78 | const appRegister = new AppRegister({ 79 | solutionId: SOLUTION_ID, 80 | solutionName: SOLUTION_NAME, 81 | solutionVersion: DIST_VERSION, 82 | appRegistryApplicationName: 'devops-monitoring-dashboard-on-aws', 83 | applicationType: 'AWS-Solutions', 84 | attributeGroupName: 'Solution-Metadata' 85 | }); 86 | 87 | appRegister.applyAppRegistryToStacks( 88 | devopsDashboardStack as cdk.Stack, 89 | [], // Do not associate spoke (sharing) stack because cross-region associations are not supported currently 90 | devopsDashboardStack.getNestedStacks() 91 | ); 92 | 93 | devopsDashboardStack.templateOptions.templateFormatVersion = TEMPLATE_FORMAT_VERSION; 94 | canaryStack.templateOptions.templateFormatVersion = TEMPLATE_FORMAT_VERSION; 95 | pipelineAlarmStack.templateOptions.templateFormatVersion = TEMPLATE_FORMAT_VERSION; 96 | sharingAccountStack.templateOptions.templateFormatVersion = TEMPLATE_FORMAT_VERSION; 97 | -------------------------------------------------------------------------------- /source/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node bin/aws_devops_monitoring_dashboard.ts", 3 | "context": { 4 | "quicksight_source_template_arn": "arn:aws:quicksight:us-east-1:%%TEMPLATE_ACCOUNT_ID%%:template/%%DIST_QUICKSIGHT_NAMESPACE%%_%%SOLUTION_NAME%%_%%DASHED_VERSION%%", 5 | "constructs:stackRelativeExports": false, 6 | "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true 7 | } 8 | } -------------------------------------------------------------------------------- /source/image/architecture_diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/aws-devops-monitoring-dashboard/6c02457a0bcf18fa7b37434b049597001c8d2478/source/image/architecture_diagram.png -------------------------------------------------------------------------------- /source/jest.config.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | module.exports = { 5 | roots: ['/test'], 6 | testMatch: ['**/*.test.ts'], 7 | transform: { 8 | '^.+\\.tsx?$': 'ts-jest' 9 | } 10 | }; 11 | -------------------------------------------------------------------------------- /source/lambda/event_parser/codebuild_index.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const LOGGER = new (require('./lib/logger'))(); 7 | const codeBuildMetrics = require('./codebuild_metrics'); 8 | 9 | /** 10 | * Transform AWS CloudWatch metrics 11 | * @param event 12 | * @param context 13 | * @param callback 14 | */ 15 | exports.handler = async (event, context, callback) => { 16 | let recordTotalCount = event.records.length; 17 | let recordCount = 0; 18 | let droppedCount = 0; 19 | 20 | LOGGER.log('INFO', 'Total incoming source events : ' + recordTotalCount.toString()); 21 | 22 | const output = event.records.map(record => { 23 | try { 24 | const sourceData = Buffer.from(record.data, 'base64').toString('utf8'); 25 | 26 | recordCount++; 27 | 28 | LOGGER.log('INFO', 'Decoded source event ' + recordCount.toString() + ': ' + sourceData); 29 | 30 | const transformedRecordString = codeBuildMetrics.transformCodeBuildCWMetrics(sourceData, recordCount); 31 | 32 | // Drop record and notify as needed 33 | if (transformedRecordString.length === 0) { 34 | droppedCount++; 35 | LOGGER.log('INFO', 'Drop event ' + recordCount.toString()); 36 | return { 37 | recordId: record.recordId, 38 | result: 'Dropped', 39 | data: record.data 40 | }; 41 | } 42 | 43 | LOGGER.log('INFO', 'Transformed event ' + recordCount.toString() + ': ' + transformedRecordString); 44 | 45 | return { 46 | recordId: record.recordId, 47 | result: 'Ok', 48 | data: new Buffer.from(transformedRecordString).toString('base64') 49 | }; 50 | } catch (err) { 51 | LOGGER.log('WARN', 'Processing record ' + recordTotalCount.toString() + ' failed. Error: ' + err.message); 52 | } 53 | }); 54 | 55 | LOGGER.log('INFO', 'Processed ' + recordTotalCount.toString() + ' event(s).'); 56 | LOGGER.log('INFO', 'Dropped ' + droppedCount.toString() + ' event(s).'); 57 | LOGGER.log('DEBUG', 'Payload for AWS Kinesis Data Firehose: ' + JSON.stringify(output, null, 2)); 58 | 59 | callback(null, { records: output }); 60 | }; 61 | -------------------------------------------------------------------------------- /source/lambda/event_parser/codebuild_metrics.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const LOGGER = new (require('./lib/logger'))(); 7 | 8 | /** 9 | * Transform AWS CloudWatch metrics for CodeBuild 10 | * @param data 11 | * @param recordNumber 12 | */ 13 | const TransformCodeBuildCWMetrics = (data, recordNumber) => { 14 | try { 15 | // Split JSON objects in source data by newline and store them in an array 16 | const arrayOfObjectsFromData = data.split('\n'); 17 | 18 | // Filter out duplicated JSON objects that have no projects in dimensions 19 | const ObjectsWithDimensionsValue = arrayOfObjectsFromData.filter(obj => { 20 | try { 21 | const jsonData = JSON.parse(obj); 22 | return jsonData['dimensions']['ProjectName'] != undefined; 23 | } catch (err) { 24 | return false; 25 | } 26 | }); 27 | 28 | LOGGER.log( 29 | 'INFO', 30 | 'JSON objects after filtering empty dimensions for source event ' + 31 | recordNumber.toString() + 32 | ': ' + 33 | ObjectsWithDimensionsValue 34 | ); 35 | 36 | // Put JSON objects back to a string, separated by a newline 37 | return ObjectsWithDimensionsValue.join('\n'); 38 | } catch (error) { 39 | LOGGER.log('ERROR', 'Error transforming codebuild metrics failed. Error: ' + error.message); 40 | } 41 | }; 42 | 43 | module.exports = { 44 | transformCodeBuildCWMetrics: TransformCodeBuildCWMetrics 45 | }; 46 | -------------------------------------------------------------------------------- /source/lambda/event_parser/codedeploy_events.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const LOGGER = new (require('./lib/logger'))(); 7 | 8 | /** 9 | * Transform AWS CloudWatch events from AWS CodeDeploy 10 | */ 11 | 12 | const transformCodeDeployEvents = (data, recordNumber) => { 13 | LOGGER.log('INFO', 'Start transforming CodeDeploy CW Event ' + recordNumber.toString()); 14 | 15 | let detailData = {}; 16 | let transformedRecord = {}; 17 | let transformedDetail = {}; 18 | 19 | //Process event data 20 | for (let key in data) { 21 | //Keep all key values that are not under detail tag and are common in all cloudwatch events 22 | if (key !== 'detail') { 23 | transformedRecord = getCWEventCommonData(key, data, transformedRecord); 24 | } 25 | //process key values under detail tag that are specific only for this event 26 | else { 27 | detailData = data['detail']; 28 | transformedDetail = getCodeDeployDetailData(detailData, transformedDetail); 29 | if (Object.keys(transformedDetail).length === 0) return {}; 30 | } //end else 31 | } //end for loop 32 | 33 | transformedRecord['detail'] = transformedDetail; 34 | 35 | LOGGER.log('DEBUG', 'Transformed record: ' + JSON.stringify(transformedRecord, null, 2)); 36 | LOGGER.log('INFO', 'End transforming CodeDeploy CW Event ' + recordNumber.toString()); 37 | 38 | return transformedRecord; 39 | }; 40 | 41 | /** 42 | * Keep all key values that are not under detail tag as they are common in all cloudwatch events 43 | * @param {string} key - key in the CodeDeploy CloudWatch raw event 44 | * @param {json} data - CodeDeploy CloudWatch raw event 45 | * @param {json} transformedRecord - Transformed CodeDeploy record 46 | */ 47 | const getCWEventCommonData = (key, data, transformedRecord) => { 48 | if (key !== 'detail-type') transformedRecord[key] = !transformedRecord.hasOwnProperty(key) ? data[key] : null; 49 | //rename key detail-type to detail_type to support athena query 50 | else transformedRecord['detail_type'] = !transformedRecord.hasOwnProperty(key) ? data[key] : null; 51 | 52 | return transformedRecord; 53 | }; 54 | 55 | /** 56 | * Process key values under detail tag that are specifically for this event 57 | * @param {json} detailData - CodeDeploy CloudWatch raw event data under detail key 58 | * @param {json} transformedDetail - Transformed CodeDeploy record under detail key 59 | */ 60 | const getCodeDeployDetailData = (detailData, transformedDetail) => { 61 | transformedDetail['deploymentState'] = detailData.hasOwnProperty('state') ? detailData['state'] : ''; 62 | 63 | // filter out deployments that are not completed 64 | if (transformedDetail['deploymentState'] !== 'SUCCESS' && transformedDetail['deploymentState'] !== 'FAILURE') 65 | return {}; 66 | 67 | transformedDetail['deploymentId'] = detailData.hasOwnProperty('deploymentId') ? detailData['deploymentId'] : ''; 68 | transformedDetail['deploymentApplication'] = detailData.hasOwnProperty('application') 69 | ? detailData['application'] 70 | : ''; 71 | 72 | return transformedDetail; 73 | }; 74 | 75 | module.exports = { 76 | transformCodeDeployEvents: transformCodeDeployEvents 77 | }; 78 | -------------------------------------------------------------------------------- /source/lambda/event_parser/github_events.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const LOGGER = new (require('./lib/logger'))(); 7 | 8 | /** 9 | * Transform GitHub Events 10 | * @param data 11 | * @param recordNumber 12 | */ 13 | const TransformGitHubEvents = (data, recordNumber) => { 14 | try { 15 | LOGGER.log('INFO', 'Start transforming GitHub event ' + recordNumber.toString()); 16 | 17 | let transformedRecord = {}; 18 | let commitIDs = []; 19 | 20 | // If it is not push event, stop further processing but return empty json object to drop it 21 | if (!(data.hasOwnProperty('ref') && data.hasOwnProperty('pusher'))) { 22 | LOGGER.log('INFO', 'Event ' + recordNumber.toString() + ' is NOT a push event. STOP processing.'); 23 | return {}; 24 | } 25 | 26 | transformedRecord['repository_name'] = data['repository']['name']; 27 | transformedRecord['branch_name'] = data['ref'].split('/').pop(); 28 | transformedRecord['author_name'] = data['head_commit']['author']['name']; 29 | transformedRecord['time'] = new Date(data['head_commit']['timestamp']) 30 | .toISOString() 31 | .replace('T', ' ') 32 | .replace('Z', ' '); 33 | transformedRecord['event_name'] = data['additional-data']['input-parameters']['header']['X-GitHub-Event']; 34 | 35 | for (const commit in data['commits']) { 36 | commitIDs.push(data['commits'][commit]['id']); 37 | LOGGER.log('DEBUG', 'commit id ' + commit.toString() + ': ' + data['commits'][commit]['id']); 38 | } 39 | 40 | transformedRecord['commit_id'] = commitIDs; 41 | 42 | LOGGER.log('DEBUG', 'Transformed record: ' + JSON.stringify(transformedRecord, null, 2)); 43 | LOGGER.log('INFO', 'End transforming GitHub event ' + recordNumber.toString()); 44 | 45 | return transformedRecord; 46 | } catch (error) { 47 | LOGGER.log('ERROR', 'Transforming GitHub event failed. Error: ' + error.message); 48 | return {}; 49 | } 50 | }; 51 | 52 | module.exports = { 53 | transformGitHubEvents: TransformGitHubEvents 54 | }; 55 | -------------------------------------------------------------------------------- /source/lambda/event_parser/github_index.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const LOGGER = new (require('./lib/logger'))(); 7 | const githubAuthorizer = require('./lib/github_authorizer'); 8 | const githubEvents = require('./github_events'); 9 | 10 | /** 11 | * Transform GitHub Events 12 | * @param event 13 | * @param _ 14 | * @param __ 15 | */ 16 | exports.handler = async (event, _, __) => { 17 | let recordTotalCount = event.records.length; 18 | let droppedCount = 0; 19 | 20 | LOGGER.log('INFO', 'Total incoming source events: ' + recordTotalCount.toString()); 21 | LOGGER.log('DEBUG', 'Source event: ' + JSON.stringify(event)); 22 | 23 | const output = await Promise.all( 24 | event.records.map(async (record, index) => { 25 | const currentRecord = index + 1; 26 | 27 | try { 28 | const sourceData = Buffer.from(record.data, 'base64').toString('utf8'); 29 | 30 | LOGGER.log('INFO', 'Decoded source data ' + currentRecord.toString() + ': ' + sourceData); 31 | 32 | const parsedData = JSON.parse(sourceData); 33 | 34 | const isAuthorized = await githubAuthorizer.authorizeGitHubRequest(parsedData); 35 | 36 | if (!isAuthorized) { 37 | droppedCount++; 38 | LOGGER.log('INFO', `Drop event ${currentRecord.toString()}. GitHub not Authorized.`); 39 | return { 40 | recordId: record.recordId, 41 | result: 'Dropped', 42 | data: record.data 43 | }; 44 | } 45 | 46 | const transformedRecord = githubEvents.transformGitHubEvents(parsedData, currentRecord); 47 | 48 | // Drop record and notify as needed 49 | if (Object.keys(transformedRecord).length === 0) { 50 | droppedCount++; 51 | LOGGER.log('INFO', 'Drop event ' + currentRecord.toString()); 52 | return { 53 | recordId: record.recordId, 54 | result: 'Dropped', 55 | data: record.data 56 | }; 57 | } 58 | 59 | LOGGER.log( 60 | 'INFO', 61 | 'Transformed event ' + currentRecord.toString() + ': ' + JSON.stringify(transformedRecord, null, 2) 62 | ); 63 | 64 | const transformedRecordString = JSON.stringify(transformedRecord); 65 | 66 | return { 67 | recordId: record.recordId, 68 | result: 'Ok', 69 | data: new Buffer.from(transformedRecordString).toString('base64') 70 | }; 71 | } catch (err) { 72 | LOGGER.log('ERROR', 'Processing record ' + currentRecord.toString() + ' failed. Error: ' + err.message); 73 | } 74 | }) 75 | ); 76 | 77 | LOGGER.log('INFO', 'Processed ' + recordTotalCount.toString() + ' event(s).'); 78 | LOGGER.log('INFO', 'Dropped ' + droppedCount.toString() + ' event(s).'); 79 | LOGGER.log('DEBUG', 'Payload for AWS Kinesis Data Firehose: ' + JSON.stringify(output, null, 2)); 80 | 81 | return { records: output }; 82 | }; 83 | -------------------------------------------------------------------------------- /source/lambda/event_parser/index.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const LOGGER = new (require('./lib/logger'))(); 7 | const codeCommitEvents = require('./codecommit_events'); 8 | const synCanaryAlarmEvents = require('./synthetic_canary_alarm_events'); 9 | const codeDeployEvents = require('./codedeploy_events'); 10 | const codePipelineEvents = require('./codepipeline_events'); 11 | 12 | /** 13 | * Transform AWS CloudWatch event data 14 | * @param event 15 | * @param context 16 | * @param callback 17 | */ 18 | exports.handler = async (event, context, callback) => { 19 | /* Process the list of records and transform them */ 20 | let recordTotalCount = event.records.length; 21 | let recordCount = 0; 22 | let droppedCount = 0; 23 | let transformedRecord = {}; 24 | 25 | LOGGER.log('INFO', 'Total incoming source events : ' + recordTotalCount.toString()); 26 | 27 | const output = event.records.map(record => { 28 | try { 29 | const sourceData = Buffer.from(record.data, 'base64').toString('utf8'); 30 | 31 | recordCount++; 32 | 33 | LOGGER.log('INFO', 'Decoded source event ' + recordCount.toString() + ': ' + sourceData); 34 | 35 | const parsedData = JSON.parse(sourceData); 36 | 37 | // Transform codecommit cloudwatch events data 38 | if (parsedData['source'] === 'aws.codecommit') { 39 | transformedRecord = codeCommitEvents.transformCodeCommitEvents(parsedData, recordCount); 40 | } 41 | // Transform synthetic canary alarm cloudwatch events data 42 | else if (parsedData['source'] === 'aws.cloudwatch') { 43 | transformedRecord = synCanaryAlarmEvents.transformSyntheticCanaryAlarmEvents(parsedData, recordCount); 44 | } 45 | // Transform codedeploy cloudwatch events data 46 | else if (parsedData['source'] === 'aws.codedeploy') { 47 | transformedRecord = codeDeployEvents.transformCodeDeployEvents(parsedData, recordCount); 48 | } else if (parsedData['source'] === 'aws.codepipeline') { 49 | transformedRecord = codePipelineEvents.transformCodePipelineEvents(parsedData, recordCount); 50 | } 51 | // Drop record and notify as needed 52 | if (Object.keys(transformedRecord).length === 0) { 53 | droppedCount++; 54 | LOGGER.log('INFO', 'Drop event ' + recordCount.toString()); 55 | return { 56 | recordId: record.recordId, 57 | result: 'Dropped', 58 | data: record.data 59 | }; 60 | } 61 | 62 | LOGGER.log( 63 | 'INFO', 64 | 'Transformed event ' + recordCount.toString() + ': ' + JSON.stringify(transformedRecord, null, 2) 65 | ); 66 | 67 | let transformedRecordString = JSON.stringify(transformedRecord); 68 | 69 | //add new line break between records 70 | if (recordCount < recordTotalCount) transformedRecordString = transformedRecordString + '\n'; 71 | 72 | return { 73 | recordId: record.recordId, 74 | result: 'Ok', 75 | data: new Buffer.from(transformedRecordString).toString('base64') 76 | }; 77 | } catch (err) { 78 | LOGGER.log('INFO', 'Processing record ' + recordTotalCount.toString() + ' failed. Error: ' + err.message); 79 | } 80 | }); 81 | 82 | LOGGER.log('INFO', 'Processed ' + recordTotalCount.toString() + ' event(s).'); 83 | LOGGER.log('INFO', 'Dropped ' + droppedCount.toString() + ' event(s).'); 84 | LOGGER.log('DEBUG', 'Payload for AWS Kinesis Firehose: ' + JSON.stringify(output, null, 2)); 85 | 86 | callback(null, { records: output }); 87 | }; 88 | -------------------------------------------------------------------------------- /source/lambda/event_parser/jest.config.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | module.exports = { 5 | testEnvironment: 'node', 6 | testMatch: ['test/**/*.[jt]s?(x)', '**/?(*.)+(spec|test).[jt]s?(x)'], 7 | collectCoverageFrom: ['*.js', 'lib/*.js', '!test/*.js', '!jest.config.js'], 8 | coverageReporters: [['lcov', { projectRoot: '../../../' }], 'text'] 9 | }; 10 | -------------------------------------------------------------------------------- /source/lambda/event_parser/lib/ip_helper.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const LOGGER = new (require('./logger'))(); 7 | 8 | /** 9 | * Checks if provide ip is in the provided ipRange 10 | * @param {string} ip - The IP address being validated 11 | * @param {string} ipRange - The masked IP address range e.g. 192.168.1.1/20 12 | * @returns true if provided ip falls in the ipRange 13 | */ 14 | const isIpInRange = (ip, ipRange) => { 15 | const [range, bits = 32] = ipRange.split('/'); 16 | const mask = ~(2 ** (32 - bits) - 1); 17 | 18 | const ipA = ip4ToInt(ip) & mask; 19 | const ipB = ip4ToInt(range) & mask; 20 | return !Number.isNaN(ipA) && !Number.isNaN(ipB) && ipA === ipB; 21 | }; 22 | 23 | /** 24 | * Converts an IP4 address string into an integer 25 | * @param {string} ip - The IP address to convert 26 | * @returns integer representation of the IP address string 27 | */ 28 | const ip4ToInt = ip => { 29 | const ipRegEx = /^(?!.*\.$)((?!0\d)(1?\d?\d|25[0-5]|2[0-4]\d)(\.|$)){4}$/; 30 | if (!ipRegEx.test(ip)) { 31 | LOGGER.log('ERROR', `Invalid IP Address: ${ip}`); 32 | return NaN; 33 | } 34 | 35 | return ip.split('.').reduce((int, oct) => (int << 8) + parseInt(oct, 10), 0) >>> 0; 36 | }; 37 | 38 | module.exports = { 39 | isIpInRange: isIpInRange 40 | }; 41 | -------------------------------------------------------------------------------- /source/lambda/event_parser/lib/logger.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | class Logger { 7 | constructor() { 8 | this.loglevel = process.env.LOG_LEVEL; 9 | this.LOGLEVELS = { 10 | ERROR: 1, 11 | WARN: 2, 12 | INFO: 3, 13 | DEBUG: 4 14 | }; 15 | } 16 | 17 | log(level, message) { 18 | if (this.LOGLEVELS[level] <= this.LOGLEVELS[this.loglevel]) console.log(`[${level}][${message}]`); 19 | } 20 | } 21 | 22 | module.exports = Object.freeze(Logger); 23 | -------------------------------------------------------------------------------- /source/lambda/event_parser/lib/secrets_manager.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const LOGGER = new (require('./logger'))(); 7 | const { SecretsManager } = require('@aws-sdk/client-secrets-manager'); 8 | 9 | const options = { 10 | customUserAgent: process.env.userAgentExtra 11 | }; 12 | const secretsManager = new SecretsManager(options); 13 | 14 | const secretMap = new Map(); 15 | 16 | /** 17 | * Retrieve the secret with the provided secretId. Returns the secret or undefined if one isn't found. 18 | * @param {string} secretId - the ID of the secret to retrieve 19 | * @returns The SecretString if found, otherwise undefined 20 | */ 21 | const getSecret = async secretId => { 22 | if (secretMap.has(secretId)) { 23 | return secretMap.get(secretId); 24 | } 25 | 26 | const params = { 27 | SecretId: secretId 28 | }; 29 | let secret; 30 | 31 | try { 32 | const response = await secretsManager.getSecretValue(params); 33 | secret = response.SecretString; 34 | secretMap.set(secretId, secret); 35 | } catch (error) { 36 | LOGGER.log('ERROR', `Error when retrieving secret. ${error.message}`); 37 | } 38 | 39 | return secret; 40 | }; 41 | 42 | module.exports = { 43 | getSecret: getSecret 44 | }; 45 | -------------------------------------------------------------------------------- /source/lambda/event_parser/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "event-parser-js", 3 | "description": "Lambda function for transforming cloudwatch event data within kinesis firehose", 4 | "main": "index.js", 5 | "author": { 6 | "name": "Amazon Web Services", 7 | "url": "https://aws.amazon.com/solutions" 8 | }, 9 | "license": "Apache-2.0", 10 | "dependencies": { 11 | "async": "^3.2.4", 12 | "uuid": "^9.0.0" 13 | }, 14 | "devDependencies": { 15 | "@aws-sdk/client-secrets-manager": "^3.470.0", 16 | "aws-sdk-mock": "^5.8.0", 17 | "chai": "^4.3.7", 18 | "jest": "^29.3.1", 19 | "mocha": "^10.2.0", 20 | "npm-run-all": "^4.1.5", 21 | "nyc": "^15.1.0", 22 | "sinon": "^15.0.1", 23 | "sinon-chai": "^3.7.0" 24 | }, 25 | "overrides": { 26 | "semver": "~7.5.2" 27 | }, 28 | "scripts": { 29 | "pretest": "npm install", 30 | "test": "jest --coverage", 31 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules && mkdir dist", 32 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml --exclude=*dist/* dist/event-parser-js.zip .", 33 | "build": "npm run build:init && npm install --production && npm run build:zip", 34 | "clean": "rm -rf node_modules" 35 | }, 36 | "bundleDependencies": [] 37 | } 38 | -------------------------------------------------------------------------------- /source/lambda/event_parser/test/github_events.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const { expect } = require('chai'); 7 | const githubEvent = require('../github_events'); 8 | 9 | const recordNumber = 1; 10 | const data = { 11 | ref: 'refs/heads/main', 12 | repository: { 13 | name: 'aws-integration-test' 14 | }, 15 | pusher: { 16 | name: 'fbaggins', 17 | email: 'emailAddress' 18 | }, 19 | commits: [ 20 | { 21 | id: '2bf894de6812831eed090150d2827e8adc15eb40' 22 | } 23 | ], 24 | head_commit: { 25 | timestamp: '2022-01-01T12:34:56Z', 26 | author: { 27 | name: 'Frodo Baggins' 28 | } 29 | }, 30 | 'additional-data': { 31 | 'input-parameters': { 32 | header: { 33 | 'X-GitHub-Event': 'push' 34 | } 35 | } 36 | } 37 | }; 38 | 39 | const dataNoPusher = { 40 | ref: 'refs/heads/main', 41 | repository: { 42 | name: 'aws-integration-test' 43 | }, 44 | commits: [ 45 | { 46 | id: '2bf894de6812831eed090150d2827e8adc15eb40' 47 | } 48 | ], 49 | head_commit: { 50 | timestamp: '2022-01-01T12:34:56Z', 51 | author: { 52 | name: 'Frodo Baggins' 53 | } 54 | }, 55 | 'additional-data': { 56 | 'input-parameters': { 57 | header: { 58 | 'X-GitHub-Event': 'push' 59 | } 60 | } 61 | } 62 | }; 63 | 64 | const dataNoAdditionalData = { 65 | ref: 'refs/heads/main', 66 | repository: { 67 | name: 'aws-integration-test' 68 | }, 69 | pusher: { 70 | name: 'fbaggins', 71 | email: 'emailAddress' 72 | }, 73 | commits: [ 74 | { 75 | id: '2bf894de6812831eed090150d2827e8adc15eb40' 76 | } 77 | ], 78 | head_commit: { 79 | timestamp: '2022-01-01T12:34:56Z', 80 | author: { 81 | name: 'Frodo Baggins' 82 | } 83 | } 84 | }; 85 | 86 | const expectedTransformedRecord = { 87 | repository_name: 'aws-integration-test', 88 | branch_name: 'main', 89 | author_name: 'Frodo Baggins', 90 | time: '2022-01-01 12:34:56.000 ', 91 | event_name: 'push', 92 | commit_id: ['2bf894de6812831eed090150d2827e8adc15eb40'] 93 | }; 94 | const emptyTransformedRecord = {}; 95 | 96 | describe('When testing github_events', () => { 97 | it('should transform a record', () => { 98 | const transformedRecord = githubEvent.transformGitHubEvents(data, recordNumber); 99 | 100 | expect(transformedRecord).to.eql(expectedTransformedRecord); 101 | }); 102 | 103 | it('should return an empty object if it is not a push event', () => { 104 | const transformedRecord = githubEvent.transformGitHubEvents(dataNoPusher, recordNumber); 105 | 106 | expect(transformedRecord).to.eql(emptyTransformedRecord); 107 | }); 108 | 109 | it('should return an empty object if an exception is thrown during the transformation', () => { 110 | const transformedRecord = githubEvent.transformGitHubEvents(dataNoAdditionalData, recordNumber); 111 | 112 | expect(transformedRecord).to.eql(emptyTransformedRecord); 113 | }); 114 | }); 115 | -------------------------------------------------------------------------------- /source/lambda/event_parser/test/github_index.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const githubIndex = require('../github_index'); 7 | const githubAuthorizer = require('../lib/github_authorizer'); 8 | const githubEvents = require('../github_events'); 9 | const { expect } = require('chai'); 10 | 11 | const validRecord = { 12 | recordId: '123456789', 13 | approximateArrivalTimestamp: 1646768972399, 14 | data: 'ewogICAgInJlcG9zaXRvcnlfbmFtZSI6ICJhd3MtaW50ZWdyYXRpb24tdGVzdCIsCiAgICAiYnJhbmNoX25hbWUiOiAibWFpbiIsCiAgICAiYXV0aG9yX25hbWUiOiAiRnJvZG8gQmFnZ2lucyIsCiAgICAidGltZSI6ICIyMDIyLTAxLTAxIDEyOjM0OjU2LjAwMCAiLAogICAgImV2ZW50X25hbWUiOiAicHVzaCIsCiAgICAiY29tbWl0X2lkIjogWyIyYmY4OTRkZTY4MTI4MzFlZWQwOTAxNTBkMjgyN2U4YWRjMTVlYjQwIl0KfQ==' 15 | }; 16 | 17 | const invalidRecord = { 18 | recordId: '123456789', 19 | approximateArrivalTimestamp: 1646768972399, 20 | data: 'ewogICAgImJhZF9kYXRhIjogImF3cy1pbnRlZ3JhdGlvbi10ZXN0Igp9' 21 | }; 22 | 23 | const event = { 24 | records: [validRecord] 25 | }; 26 | 27 | const emptyEvent = { 28 | records: [invalidRecord] 29 | }; 30 | 31 | const multiEvent = { 32 | records: [validRecord, validRecord, invalidRecord] 33 | }; 34 | 35 | const nonsenseEvent = { 36 | records: [undefined] 37 | }; 38 | 39 | jest.mock('../lib/github_authorizer'); 40 | githubAuthorizer.authorizeGitHubRequest.mockResolvedValue(true); 41 | 42 | const transformedRecord = { 43 | repository_name: 'aws-integration-test', 44 | branch_name: 'main', 45 | author_name: 'Frodo Baggins', 46 | time: '2022-01-01 12:34:56.000 ', 47 | event_name: 'push', 48 | commit_id: ['2bf894de6812831eed090150d2827e8adc15eb40'] 49 | }; 50 | const transformedRecordString = JSON.stringify(transformedRecord); 51 | 52 | const emptyTransformedRecord = {}; 53 | 54 | jest.mock('../github_events'); 55 | githubEvents.transformGitHubEvents.mockReturnValue(transformedRecord); 56 | 57 | const okayRecord = { 58 | recordId: '123456789', 59 | result: 'Ok', 60 | data: new Buffer.from(transformedRecordString).toString('base64') 61 | }; 62 | const unauthorizedRecord = { recordId: '123456789', result: 'Dropped', data: validRecord.data }; 63 | const droppedRecord = { recordId: '123456789', result: 'Dropped', data: invalidRecord.data }; 64 | 65 | const expectedRecords = { records: [okayRecord] }; 66 | const expectedUnauthorizedRecord = { records: [unauthorizedRecord] }; 67 | const expectedNonsenseRecord = { records: [undefined] }; 68 | const droppedRecords = { records: [droppedRecord] }; 69 | const expectedMultipleRecords = { records: [okayRecord, okayRecord, droppedRecord] }; 70 | 71 | describe('When testing github_index', () => { 72 | it('should transform a record using GitHub event transformation', async () => { 73 | const records = await githubIndex.handler(event, undefined, undefined); 74 | expect(records).to.eql(expectedRecords); 75 | }); 76 | 77 | it('should skip a record if not authorized from github', async () => { 78 | githubAuthorizer.authorizeGitHubRequest.mockResolvedValueOnce(false); 79 | 80 | const records = await githubIndex.handler(event, undefined, undefined); 81 | expect(records).to.eql(expectedUnauthorizedRecord); 82 | }); 83 | 84 | it('should drop an empty record', async () => { 85 | githubEvents.transformGitHubEvents.mockReturnValueOnce(emptyTransformedRecord); 86 | const records = await githubIndex.handler(emptyEvent, undefined, undefined); 87 | expect(records).to.eql(droppedRecords); 88 | }); 89 | 90 | it('should create an undefined object if it throws an exception while parsing', async () => { 91 | const records = await githubIndex.handler(nonsenseEvent, undefined, undefined); 92 | expect(records).to.eql(expectedNonsenseRecord); 93 | }); 94 | 95 | it('should transform multiple records using GitHub event transformation', async () => { 96 | githubEvents.transformGitHubEvents.mockReturnValueOnce(transformedRecord); 97 | githubEvents.transformGitHubEvents.mockReturnValueOnce(transformedRecord); 98 | githubEvents.transformGitHubEvents.mockReturnValueOnce(emptyTransformedRecord); 99 | const records = await githubIndex.handler(multiEvent, undefined, undefined); 100 | expect(records).to.eql(expectedMultipleRecords); 101 | }); 102 | }); 103 | -------------------------------------------------------------------------------- /source/lambda/event_parser/test/ip_helper.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const ipHelper = require('../lib/ip_helper'); 7 | 8 | const ip = '192.168.1.1'; 9 | const ipRange = '192.168.100.1/16'; 10 | const badIPRange = '192.168.100.1/24'; 11 | const malformedIpRange = '192x.168.100.1/24'; 12 | const malformedIp = 'abc.def.ghi.jkl'; 13 | 14 | describe('When testing ip_helper', () => { 15 | it('should match an IP in the given range', () => { 16 | const isInRange = ipHelper.isIpInRange(ip, ipRange); 17 | expect(isInRange).toBe(true); 18 | }); 19 | 20 | it('should match a single IP', () => { 21 | const isInRange = ipHelper.isIpInRange('192.167.200.1', '192.167.200.1'); 22 | expect(isInRange).toBe(true); 23 | }); 24 | 25 | it('should fail if the ip address is out of range', () => { 26 | const isInRange = ipHelper.isIpInRange(ip, badIPRange); 27 | expect(isInRange).toBe(false); 28 | }); 29 | 30 | it('should return false if a malformed IP is provided', () => { 31 | const isInRange = ipHelper.isIpInRange(malformedIp, ipRange); 32 | expect(isInRange).toBe(false); 33 | }); 34 | 35 | it('should return false if a malformed IP range is provided', () => { 36 | const isInRange = ipHelper.isIpInRange(ip, malformedIpRange); 37 | expect(isInRange).toBe(false); 38 | }); 39 | }); 40 | -------------------------------------------------------------------------------- /source/lambda/event_parser/test/logger.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | require('chai').assert; 7 | require('chai').expect; 8 | 9 | let Logger = new (require('../lib/logger'))(); 10 | 11 | describe('#Logger', () => { 12 | describe('#logger', () => { 13 | it('check with LOG_LEVEL=INFO', () => { 14 | Logger.loglevel = 'INFO'; 15 | Logger.log('INFO', 'INFO_MESSAGE'); 16 | Logger.log('WARN', 'WARN_MESSAGE'); 17 | Logger.log('ERROR', 'ERROR_MESSAGE'); 18 | Logger.log('DEBUG', 'DEBUG_MESSAGE'); 19 | }); 20 | 21 | it('check with LOG_LEVEL=WARN', () => { 22 | Logger.loglevel = 'WARN'; 23 | Logger.log('INFO', 'INFO_MESSAGE'); 24 | Logger.log('WARN', 'WARN_MESSAGE'); 25 | Logger.log('ERROR', 'ERROR_MESSAGE'); 26 | Logger.log('DEBUG', 'DEBUG_MESSAGE'); 27 | }); 28 | 29 | it('check with LOG_LEVEL=ERROR', () => { 30 | Logger.loglevel = 'ERROR'; 31 | Logger.log('INFO', 'INFO_MESSAGE'); 32 | Logger.log('WARN', 'WARN_MESSAGE'); 33 | Logger.log('ERROR', 'ERROR_MESSAGE'); 34 | Logger.log('DEBUG', 'DEBUG_MESSAGE'); 35 | }); 36 | 37 | it('check with LOG_LEVEL=DEBUG', () => { 38 | Logger.loglevel = 'DEBUG'; 39 | Logger.log('INFO', 'INFO_MESSAGE'); 40 | Logger.log('WARN', 'WARN_MESSAGE'); 41 | Logger.log('ERROR', 'ERROR_MESSAGE'); 42 | Logger.log('DEBUG', 'DEBUG_MESSAGE'); 43 | }); 44 | }); 45 | }); 46 | -------------------------------------------------------------------------------- /source/lambda/event_parser/test/secrets_manager.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const secretsManager = require('../lib/secrets_manager'); 7 | 8 | const goodSecretId = 'goodSecretId'; 9 | const badSecretId = 'badSecretId'; 10 | 11 | const successfulSecretResponse = { 12 | SecretString: 'mySecretString' 13 | }; 14 | 15 | const mockGetSecretValue = jest.fn(SecretId => { 16 | if (SecretId === goodSecretId) { 17 | return { 18 | SecretString: successfulSecretResponse.SecretString 19 | }; 20 | } else { 21 | throw Error('secret not found'); 22 | } 23 | }); 24 | 25 | jest.mock('@aws-sdk/client-secrets-manager', () => ({ 26 | config: { 27 | logger: String, 28 | update() { 29 | return {}; 30 | } 31 | }, 32 | SecretsManager: jest.fn(() => ({ 33 | getSecretValue: jest.fn(({ SecretId }) => mockGetSecretValue(SecretId)) 34 | })) 35 | })); 36 | 37 | describe('When testing secrets manager', () => { 38 | afterEach(() => { 39 | mockGetSecretValue.mockClear(); 40 | }); 41 | 42 | it('should successfully retrieve a secret', async () => { 43 | const secret = await secretsManager.getSecret(goodSecretId); 44 | expect(secret).toBe(successfulSecretResponse.SecretString); 45 | }); 46 | 47 | it('should return a cached secret', async () => { 48 | let secret = await secretsManager.getSecret(goodSecretId); 49 | expect(secret).toBe(successfulSecretResponse.SecretString); 50 | 51 | expect(mockGetSecretValue).not.toHaveBeenCalled(); 52 | 53 | secret = await secretsManager.getSecret(goodSecretId); 54 | expect(secret).toBe(successfulSecretResponse.SecretString); 55 | }); 56 | 57 | it('should return undefined if there is an error when retrieving the secret', async () => { 58 | const secret = await secretsManager.getSecret(badSecretId); 59 | expect(secret).toBe(undefined); 60 | }); 61 | }); 62 | -------------------------------------------------------------------------------- /source/lambda/multi_account_custom_resources/jest.config.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | module.exports = { 5 | testEnvironment: 'node', 6 | testMatch: ['test/**/*.[jt]s?(x)', '**/?(*.)+(spec|test).[jt]s?(x)'], 7 | collectCoverageFrom: ['*.js', 'lib/*.js', '!test/*.js', '!jest.config.js'], 8 | coverageReporters: [['lcov', { projectRoot: '../../../' }], 'text'] 9 | }; 10 | -------------------------------------------------------------------------------- /source/lambda/multi_account_custom_resources/lib/cfn.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | const axios = require('axios'); 5 | 6 | /** 7 | * Send custom resource response. 8 | * @param {object} event - Custom resource event 9 | * @param {string} logStreamName - Custom resource log stream name 10 | * @param {object} response - Response object { status: "SUCCESS|FAILED", data: any } 11 | */ 12 | async function sendResponse(event, logStreamName, response) { 13 | const responseBody = JSON.stringify({ 14 | Status: response.status, 15 | Reason: `See the details in CloudWatch Log Stream: ${logStreamName}`, 16 | PhysicalResourceId: logStreamName, 17 | StackId: event.StackId, 18 | RequestId: event.RequestId, 19 | LogicalResourceId: event.LogicalResourceId, 20 | Data: response.data 21 | }); 22 | 23 | console.log(`RESPONSE BODY: ${responseBody}`); 24 | 25 | const config = { 26 | headers: { 27 | 'Content-Type': '', 28 | 'Content-Length': responseBody.length 29 | } 30 | }; 31 | 32 | await axios.put(event.ResponseURL, responseBody, config); 33 | } 34 | 35 | module.exports = { 36 | send: sendResponse 37 | }; 38 | -------------------------------------------------------------------------------- /source/lambda/multi_account_custom_resources/lib/eventbridge.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const { EventBridge } = require('@aws-sdk/client-eventbridge'); 7 | const LOGGER = new (require('./logger'))(); 8 | 9 | const userAgentExtra = process.env.UserAgentExtra; 10 | const options = userAgentExtra ? { customUserAgent: userAgentExtra } : {}; 11 | const eventBridge = new EventBridge(options); 12 | 13 | /** 14 | * Add permission to allow the specified AWS account or AWS organization to put events to the specified event bus 15 | * @param principalType 16 | * @param principal 17 | * @param eventBusName 18 | */ 19 | const PutPermission = async (principalType, principal, eventBusName) => { 20 | try { 21 | LOGGER.log('INFO', `[PutPermission] Start putting permission on event bus ${eventBusName} for ${principal}.`); 22 | 23 | let params = { 24 | Action: 'events:PutEvents', 25 | EventBusName: eventBusName, 26 | StatementId: principal 27 | }; 28 | 29 | if (principalType === 'Account') { 30 | params = { ...params, Principal: principal }; 31 | } 32 | // When principal type is AWS Organization, add a condition to grant permission to all the accounts within the organization. 33 | else { 34 | params = { 35 | ...params, 36 | Principal: '*', 37 | Condition: { 38 | Key: 'aws:PrincipalOrgID', 39 | Type: 'StringEquals', 40 | Value: principal 41 | } 42 | }; 43 | } 44 | 45 | const response = await eventBridge.putPermission(params); 46 | 47 | LOGGER.log('DEBUG', `[PutPermission] Response: ${JSON.stringify(response)}`); 48 | LOGGER.log('INFO', '[PutPermission] End putting permission on event bus.'); 49 | 50 | return response; 51 | } catch (err) { 52 | LOGGER.log('ERROR', `[PutPermission]Error when putting permission on event bus: ${err.message}`); 53 | throw err; 54 | } 55 | }; 56 | 57 | /** 58 | * Remove permission that allows the specified AWS account or AWS organization to put events to the specified event bus 59 | * @param principal 60 | * @param eventBusName 61 | */ 62 | const RemovePermission = async (principal, eventBusName) => { 63 | try { 64 | LOGGER.log('INFO', `[RemovePermission] Start removing permission from event bus ${eventBusName} for ${principal}.`); 65 | 66 | let params = { 67 | EventBusName: eventBusName, 68 | StatementId: principal, 69 | RemoveAllPermissions: false 70 | }; 71 | 72 | const response = await eventBridge.removePermission(params); 73 | 74 | LOGGER.log('DEBUG', `[RemovePermission] Response: ${JSON.stringify(response)}`); 75 | LOGGER.log('INFO', '[RemovePermission] END removing permission from event bus.'); 76 | 77 | return response; 78 | } catch (err) { 79 | if (err.name !== 'ResourceNotFoundException') { 80 | LOGGER.log('ERROR', `[RemovePermission] Error when removing permission from event bus: ${err.message}`); 81 | throw err; 82 | } 83 | } 84 | }; 85 | 86 | /** 87 | * Displays details about an event bus, including name, ARN, policy, etc. 88 | * @param eventBusName 89 | */ 90 | const DescribeEventBus = async eventBusName => { 91 | try { 92 | LOGGER.log('DEBUG', `[DescribeEventBus] Start describing event bus ${eventBusName}.`); 93 | 94 | const params = { 95 | Name: eventBusName 96 | }; 97 | 98 | const response = await eventBridge.describeEventBus(params); 99 | 100 | LOGGER.log('INFO', `[DescribeEventBus] Response: ${JSON.stringify(response)}`); 101 | LOGGER.log('DEBUG', '[DescribeEventBus] End describing event bus.'); 102 | 103 | return response; 104 | } catch (err) { 105 | LOGGER.log('ERROR', `[DescribeEventBus]Error when describing event bus: ${err.message}`); 106 | throw err; 107 | } 108 | }; 109 | 110 | module.exports = { 111 | putPermission: PutPermission, 112 | removePermission: RemovePermission, 113 | describeEventBus: DescribeEventBus 114 | }; 115 | -------------------------------------------------------------------------------- /source/lambda/multi_account_custom_resources/lib/logger.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | class Logger { 7 | constructor() { 8 | this.loglevel = process.env.LOG_LEVEL; 9 | this.LOGLEVELS = { 10 | ERROR: 1, 11 | WARN: 2, 12 | INFO: 3, 13 | DEBUG: 4 14 | }; 15 | } 16 | 17 | log(level, message) { 18 | if (this.LOGLEVELS[level] <= this.LOGLEVELS[this.loglevel]) console.log(`[${level}][${message}]`); 19 | } 20 | } 21 | 22 | module.exports = Object.freeze(Logger); 23 | -------------------------------------------------------------------------------- /source/lambda/multi_account_custom_resources/lib/s3_bucket_policy.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const { S3 } = require('@aws-sdk/client-s3'); 7 | const LOGGER = new (require('./logger'))(); 8 | 9 | const userAgentExtra = process.env.UserAgentExtra; 10 | const options = userAgentExtra ? { customUserAgent: userAgentExtra } : {}; 11 | const s3 = new S3(options); 12 | 13 | /** 14 | * Get s3 bucket policy given a bucket name 15 | * @param bucketName 16 | */ 17 | const GetS3BucketPolicy = async bucketName => { 18 | try { 19 | LOGGER.log('INFO', `[GetS3BucketPolicy] Start getting bucket policy on s3 bucket ${bucketName}`); 20 | 21 | let params = { 22 | Bucket: bucketName 23 | }; 24 | 25 | const response = await s3.getBucketPolicy(params); 26 | 27 | LOGGER.log('INFO', `[GetS3BucketPolicy] Response: ${JSON.stringify(response)}`); 28 | LOGGER.log('INFO', '[GetS3BucketPolicy] END getting s3 bucket policy.'); 29 | 30 | return response; 31 | } catch (err) { 32 | if (err.name !== 'NoSuchBucketPolicy') { 33 | LOGGER.log('ERROR', `[GetS3BucketPolicy] Error when getting s3 bucket policy: ${err.message}`); 34 | throw err; 35 | } 36 | } 37 | }; 38 | 39 | /** 40 | * Put s3 bucket policy on a bucket 41 | * @param bucketName 42 | * @param bucketPolicy 43 | */ 44 | const PutS3BucketPolicy = async (bucketName, bucketPolicy) => { 45 | try { 46 | LOGGER.log('INFO', `[PutS3BucketPolicy] Start putting bucket policy ${bucketPolicy} on s3 bucket ${bucketName}.`); 47 | 48 | const params = { 49 | Bucket: bucketName, 50 | Policy: bucketPolicy 51 | }; 52 | 53 | const response = await s3.putBucketPolicy(params); 54 | 55 | LOGGER.log('DEBUG', `[PutS3BucketPolicy] Response: ${JSON.stringify(response)}`); 56 | LOGGER.log('INFO', '[PutS3BucketPolicy] End putting bucket policy on s3 bucket .'); 57 | 58 | return response; 59 | } catch (err) { 60 | LOGGER.log('ERROR', `[PutS3BucketPolicy] Error when putting bucket policy on s3 bucket : ${err.message}`); 61 | throw err; 62 | } 63 | }; 64 | 65 | /** 66 | * Delete s3 bucket policy from a bucket 67 | * @param bucketName 68 | */ 69 | const DeleteS3BucketPolicy = async bucketName => { 70 | try { 71 | LOGGER.log('INFO', `[DeleteS3BucketPolicy] Start deleting bucket policy from s3 bucket ${bucketName}`); 72 | 73 | const params = { 74 | Bucket: bucketName 75 | }; 76 | 77 | const response = await s3.deleteBucketPolicy(params); 78 | 79 | LOGGER.log('DEBUG', `[DeleteS3BucketPolicy] Response: ${JSON.stringify(response)}`); 80 | LOGGER.log('INFO', '[DeleteS3BucketPolicy] END deleting s3 bucket policy.'); 81 | 82 | return response; 83 | } catch (err) { 84 | if (err.name !== 'NoSuchBucketPolicy') { 85 | LOGGER.log( 86 | 'ERROR', 87 | `[DeleteS3BucketPolicy] Error when deleting bucket policy from s3 bucket ${bucketName}: ${err.message}` 88 | ); 89 | throw err; 90 | } 91 | } 92 | }; 93 | 94 | module.exports = { 95 | putS3BucketPolicy: PutS3BucketPolicy, 96 | getS3BucketPolicy: GetS3BucketPolicy, 97 | deleteS3BucketPolicy: DeleteS3BucketPolicy 98 | }; 99 | -------------------------------------------------------------------------------- /source/lambda/multi_account_custom_resources/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "multi-account-custom-resources-js", 3 | "description": "Lambda function for setting up permissions required for multi-account data ingestion", 4 | "main": "index.js", 5 | "author": { 6 | "name": "Amazon Web Services", 7 | "url": "https://aws.amazon.com/solutions" 8 | }, 9 | "license": "Apache-2.0", 10 | "dependencies": { 11 | "async": "^3.2.4", 12 | "axios": "^1.7.4", 13 | "uuid": "^9.0.0" 14 | }, 15 | "devDependencies": { 16 | "@aws-sdk/client-eventbridge": "^3.470.0", 17 | "@aws-sdk/client-s3": "^3.470.0", 18 | "aws-sdk-mock": "^5.8.0", 19 | "axios-mock-adapter": "^1.21.2", 20 | "chai": "^4.3.7", 21 | "jest": "^29.3.1", 22 | "mocha": "^10.2.0", 23 | "npm-run-all": "^4.1.5", 24 | "nyc": "^15.1.0", 25 | "sinon": "^15.0.1", 26 | "sinon-chai": "^3.7.0" 27 | }, 28 | "overrides": { 29 | "semver": "~7.5.2" 30 | }, 31 | "scripts": { 32 | "pretest": "npm install", 33 | "test": "jest --coverage", 34 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules && mkdir dist", 35 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*dist/* dist/query-runner-js.zip .", 36 | "build": "npm run build:init && npm install --production && npm run build:zip", 37 | "clean": "rm -rf node_modules" 38 | }, 39 | "bundleDependencies": [] 40 | } 41 | -------------------------------------------------------------------------------- /source/lambda/multi_account_custom_resources/test/cfn.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | const axios = require('axios'); 5 | const cfn = require('../lib/cfn.js'); 6 | 7 | const event = { 8 | LogicalResourceId: 'testLRId', 9 | StackId: 'testStackId', 10 | RequestId: 'testRequestId', 11 | ResponseURL: 'http://example.com' 12 | }; 13 | 14 | const logStreamName = 'testLSName'; 15 | const responseData = {status: 200, data: 'testData'} 16 | 17 | jest.mock("axios"); 18 | axios.put.mockImplementation(() => Promise.resolve({status: 200, data:{}})); 19 | 20 | describe('Test sending CFN response', () => { 21 | it('should call axios.put to send CFN response', async () => { 22 | await cfn.send(event, logStreamName, responseData); 23 | expect(axios.put).toHaveBeenCalled(); 24 | }); 25 | }); 26 | -------------------------------------------------------------------------------- /source/lambda/multi_account_custom_resources/test/eventbridge.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const mockEventBridge = { 7 | putPermission: jest.fn(), 8 | removePermission: jest.fn(), 9 | describeEventBus: jest.fn(), 10 | }; 11 | const eb = require('../lib/eventbridge.js'); 12 | const { ServiceException } = require('@smithy/smithy-client'); 13 | const accountPrincipalType = 'Account'; 14 | const accountPrincipal = 'testAccount'; 15 | const orgPrincipalType = 'Account'; 16 | const orgPrincipal = 'testOrgId'; 17 | const eventBusName = 'testEventBusName'; 18 | 19 | jest.mock( 20 | '@aws-sdk/client-eventbridge', 21 | () => ({ 22 | __esmodule: true, 23 | EventBridge: jest.fn(() => mockEventBridge), 24 | }) 25 | ); 26 | 27 | describe('When testing event bridge APIs', () => { 28 | beforeEach(() => { 29 | mockEventBridge.putPermission.mockReset(); 30 | mockEventBridge.removePermission.mockReset(); 31 | mockEventBridge.describeEventBus.mockReset(); 32 | }); 33 | 34 | it('should successfully put permission for account', async () => { 35 | const response = await eb.putPermission(accountPrincipalType, accountPrincipal, eventBusName); 36 | expect(response).not.toBeNull(); 37 | }); 38 | 39 | it('should successfully remove permission from account', async () => { 40 | const response = await eb.removePermission(accountPrincipal, eventBusName); 41 | expect(response).not.toBeNull(); 42 | }); 43 | 44 | it('should successfully put permission for organization', async () => { 45 | const response = await eb.putPermission(orgPrincipalType, accountPrincipal, eventBusName); 46 | expect(response).not.toBeNull(); 47 | }); 48 | 49 | it('should successfully remove permission from organization', async () => { 50 | const response = await eb.removePermission(orgPrincipal, eventBusName); 51 | expect(response).not.toBeNull(); 52 | }); 53 | it('should successfully describe event bus', async () => { 54 | const response = await eb.describeEventBus(eventBusName); 55 | expect(response).not.toBeNull(); 56 | }); 57 | it('should return undefined when resource is not found', async () => { 58 | const mockError = new ServiceException({name: 'ResourceNotFoundException'}); 59 | mockEventBridge.removePermission.mockImplementation(async () => { throw mockError }); 60 | const response = await eb.removePermission(orgPrincipal, eventBusName); 61 | expect(response).toEqual(undefined); 62 | }); 63 | it('should throw when removing permissions throws anything other than ResourceNotFoundException', async () => { 64 | const mockError = new ServiceException({name: 'OtherException'}); 65 | mockEventBridge.removePermission.mockImplementation(async () => { throw mockError }); 66 | expect(eb.removePermission(orgPrincipal, eventBusName)).rejects.toThrow(mockError); 67 | }); 68 | }); 69 | -------------------------------------------------------------------------------- /source/lambda/multi_account_custom_resources/test/logger.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | let Logger = new (require('../lib/logger'))(); 7 | 8 | describe('#Logger', () => { 9 | describe('#logger', () => { 10 | it('check with LOG_LEVEL=INFO', () => { 11 | Logger.loglevel = 'INFO'; 12 | Logger.log('INFO', 'INFO_MESSAGE'); 13 | Logger.log('WARN', 'WARN_MESSAGE'); 14 | Logger.log('ERROR', 'ERROR_MESSAGE'); 15 | Logger.log('DEBUG', 'DEBUG_MESSAGE'); 16 | }); 17 | 18 | it('check with LOG_LEVEL=WARN', () => { 19 | Logger.loglevel = 'WARN'; 20 | Logger.log('INFO', 'INFO_MESSAGE'); 21 | Logger.log('WARN', 'WARN_MESSAGE'); 22 | Logger.log('ERROR', 'ERROR_MESSAGE'); 23 | Logger.log('DEBUG', 'DEBUG_MESSAGE'); 24 | }); 25 | 26 | it('check with LOG_LEVEL=ERROR', () => { 27 | Logger.loglevel = 'ERROR'; 28 | Logger.log('INFO', 'INFO_MESSAGE'); 29 | Logger.log('WARN', 'WARN_MESSAGE'); 30 | Logger.log('ERROR', 'ERROR_MESSAGE'); 31 | Logger.log('DEBUG', 'DEBUG_MESSAGE'); 32 | }); 33 | 34 | it('check with LOG_LEVEL=DEBUG', () => { 35 | Logger.loglevel = 'DEBUG'; 36 | Logger.log('INFO', 'INFO_MESSAGE'); 37 | Logger.log('WARN', 'WARN_MESSAGE'); 38 | Logger.log('ERROR', 'ERROR_MESSAGE'); 39 | Logger.log('DEBUG', 'DEBUG_MESSAGE'); 40 | }); 41 | }); 42 | }); 43 | -------------------------------------------------------------------------------- /source/lambda/multi_account_custom_resources/test/manage_s3_bucket_policy.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | const mockGetS3BucketPolicy = jest.fn(); 6 | const manageBucketPolicy = require('../manage_s3_bucket_policy'); 7 | const { ServiceException } = require('@smithy/smithy-client'); 8 | 9 | const accountPrincipalType = 'Account'; 10 | const accountPrincipalList = ['111111111111', '222222222222']; 11 | const orgPrincipalType = 'Organization'; 12 | const orgPrincipalList = ['o-xxxxxxxx', 'o-yyyyyyyy']; 13 | const bucketName = 'testBucketName'; 14 | const multiAcctBucketPSID = 'testAccount1'; 15 | 16 | const existingPS = [{ 17 | Sid: "testAccount1", 18 | Effect: "Allow", 19 | Principal: { 20 | AWS: "testAccount1" 21 | }, 22 | Action: "s3:GetBucketLocation", 23 | Resource: "arn:aws:s3:::bucket" 24 | }, 25 | { 26 | Sid: "testAccount2", 27 | Effect: "Allow", 28 | Principal: { 29 | AWS: "testAccount2" 30 | }, 31 | Action: "s3:GetBucketLocation", 32 | Resource: "arn:aws:s3:::bucket" 33 | }]; 34 | 35 | const newPS = [{ 36 | Sid: "testAccount1", 37 | Effect: "Allow", 38 | Principal: { 39 | AWS: "testAccount" 40 | }, 41 | Action: "s3:GetBucketLocation", 42 | Resource: "arn:aws:s3:::bucket" 43 | }]; 44 | 45 | jest.mock( 46 | '../lib/s3_bucket_policy', 47 | () => ({ 48 | __esmodule: true, 49 | getS3BucketPolicy: mockGetS3BucketPolicy.mockReturnValue(existingPS), 50 | putS3BucketPolicy: jest.fn().mockReturnThis(), 51 | deleteS3BucketPolicy: jest.fn().mockReturnThis(), 52 | promise: jest.fn() 53 | }) 54 | ); 55 | 56 | describe('Test managing s3 bucket policy', () => { 57 | it('should successfully put s3 bucket policy', async () => { 58 | const response = await manageBucketPolicy.putS3BucketPolicy( 59 | accountPrincipalType, 60 | accountPrincipalList, 61 | bucketName, 62 | multiAcctBucketPSID 63 | ); 64 | expect(response).not.toBeNull(); 65 | }); 66 | it('should successfully get s3 bucket policy', async () => { 67 | const response = await manageBucketPolicy.getExistingS3BucketPolicy(bucketName); 68 | expect(response).not.toBeNull(); 69 | }); 70 | it('should successfully build s3 bucket policy statement', async () => { 71 | const response = await manageBucketPolicy.buildMultiAcctS3BucketPolicyStatement( 72 | orgPrincipalType, 73 | orgPrincipalList, 74 | bucketName, 75 | multiAcctBucketPSID 76 | ); 77 | expect(response).not.toBeNull(); 78 | }); 79 | it('should successfully add s3 bucket policy statement', async () => { 80 | const response = await manageBucketPolicy.addMultiAcctBucketPolicyStatement( 81 | existingPS, 82 | newPS, 83 | multiAcctBucketPSID 84 | ); 85 | expect(response).not.toBeNull(); 86 | }); 87 | it('Should return empty object when there is no bucket policy ', async () => { 88 | const mockError = new ServiceException({name: 'NoSuchBucketPolicy'}); 89 | mockGetS3BucketPolicy.mockImplementation(async () => { throw mockError }); 90 | const response = await manageBucketPolicy.getExistingS3BucketPolicy('test-bucket'); 91 | expect(response).toEqual({}); 92 | }); 93 | }); 94 | -------------------------------------------------------------------------------- /source/lambda/multi_account_custom_resources/test/s3_bucket_policy.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | const mockS3Service = { 6 | deleteBucketPolicy: jest.fn(), 7 | getBucketPolicy: jest.fn(), 8 | }; 9 | const s3BucketPolicy = require('../lib/s3_bucket_policy'); 10 | const { ServiceException } = require('@smithy/smithy-client'); 11 | const bucketName = 'testBucketName'; 12 | jest.mock( 13 | '@aws-sdk/client-s3', 14 | () => ({ 15 | __esmodule: true, 16 | S3: jest.fn(() => mockS3Service), 17 | }) 18 | ); 19 | 20 | describe('Test s3 bucket policy', () => { 21 | beforeEach(() => { 22 | mockS3Service.getBucketPolicy.mockReset(); 23 | mockS3Service.deleteBucketPolicy.mockReset(); 24 | }); 25 | 26 | it('should delete s3 bucket policy', async () => { 27 | const response = await s3BucketPolicy.deleteS3BucketPolicy(bucketName); 28 | expect(response).not.toBeNull(); 29 | }); 30 | it('should return undefined when getting bucket policy that does not exist', async () => { 31 | const mockError = new ServiceException({name: 'NoSuchBucketPolicy'}); 32 | mockS3Service.getBucketPolicy.mockImplementation(async () => { throw mockError }); 33 | try { 34 | await s3BucketPolicy.getS3BucketPolicy(bucketName); 35 | } catch (err) { 36 | expect(err).toEqual(mockError); 37 | } 38 | }); 39 | it('should throw error when deleting bucket policy returns exception other than NoSuchBucketPolicy', async () => { 40 | const mockError = new ServiceException({name: 'OtherException'}); 41 | mockS3Service.deleteBucketPolicy.mockImplementation(async () => { throw mockError }); 42 | expect(s3BucketPolicy.deleteS3BucketPolicy(bucketName)).rejects.toThrow(mockError); 43 | }); 44 | }); 45 | 46 | -------------------------------------------------------------------------------- /source/lambda/query_runner/jest.config.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | module.exports = { 5 | testEnvironment: 'node', 6 | testMatch: ['test/**/*.[jt]s?(x)', '**/?(*.)+(spec|test).[jt]s?(x)'], 7 | collectCoverageFrom: ['*.js', 'lib/*.js', '!test/*.js', '!jest.config.js'], 8 | coverageReporters: [['lcov', { projectRoot: '../../../' }], 'text'] 9 | }; 10 | -------------------------------------------------------------------------------- /source/lambda/query_runner/lib/cfn.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | const axios = require('axios'); 5 | 6 | let sendResponse = async (event, context, responseStatus, responseData) => { 7 | let data; 8 | 9 | const responseBody = JSON.stringify({ 10 | Status: responseStatus, 11 | Reason: 'See the details in CloudWatch Log Stream: ' + context.logStreamName, 12 | PhysicalResourceId: event.LogicalResourceId, 13 | StackId: event.StackId, 14 | RequestId: event.RequestId, 15 | LogicalResourceId: event.LogicalResourceId, 16 | Data: responseData 17 | }); 18 | 19 | const params = { 20 | url: event.ResponseURL, 21 | port: 443, 22 | method: 'put', 23 | headers: { 24 | 'content-type': '', 25 | 'content-length': responseBody.length 26 | }, 27 | data: responseBody 28 | }; 29 | 30 | data = await axios(params); 31 | return data.status; 32 | }; 33 | 34 | module.exports = { 35 | send: sendResponse 36 | }; 37 | -------------------------------------------------------------------------------- /source/lambda/query_runner/lib/execute_athena_query.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const { Athena } = require('@aws-sdk/client-athena'); 7 | const LOGGER = new (require('./logger'))(); 8 | 9 | let options = {}; 10 | const userAgentExtra = process.env.UserAgentExtra; 11 | if (userAgentExtra) { 12 | options = { customUserAgent: userAgentExtra }; 13 | } 14 | const athena = new Athena(options); 15 | 16 | /** 17 | * Execute Athena Query 18 | * @param dbName 19 | * @param workGroup 20 | * @param queryString 21 | */ 22 | const ExecuteAthenaQuery = async (dbName, workGroup, queryString) => { 23 | try { 24 | LOGGER.log('INFO', '[ExecuteAthenaQuery] Start'); 25 | 26 | const params = { 27 | QueryString: queryString.toString(), 28 | QueryExecutionContext: { Database: dbName }, 29 | WorkGroup: workGroup 30 | }; 31 | 32 | LOGGER.log('INFO', 'Query params: ' + JSON.stringify(params, null, 2)); 33 | LOGGER.log('INFO', 'Query string: \n' + queryString.toString()); 34 | 35 | const response = await athena.startQueryExecution(params); 36 | const queryExecutionId = response.QueryExecutionId; 37 | 38 | LOGGER.log('INFO', '[ExecuteAthenaQuery] response: ' + JSON.stringify(response)); 39 | LOGGER.log('INFO', '[ExecuteAthenaQuery] queryExecutionId: ' + queryExecutionId); 40 | LOGGER.log('INFO', '[ExecuteAthenaQuery] END'); 41 | 42 | return queryExecutionId; 43 | } catch (err) { 44 | LOGGER.log('ERROR', err); 45 | } 46 | }; 47 | 48 | module.exports = { 49 | executeAthenaQuery: ExecuteAthenaQuery 50 | }; 51 | -------------------------------------------------------------------------------- /source/lambda/query_runner/lib/logger.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | class Logger { 7 | constructor() { 8 | this.loglevel = process.env.LOG_LEVEL; 9 | this.LOGLEVELS = { 10 | ERROR: 1, 11 | WARN: 2, 12 | INFO: 3, 13 | DEBUG: 4 14 | }; 15 | } 16 | 17 | log(level, message) { 18 | if (this.LOGLEVELS[level] <= this.LOGLEVELS[this.loglevel]) console.log(`[${level}][${message}]`); 19 | } 20 | } 21 | 22 | module.exports = Object.freeze(Logger); 23 | -------------------------------------------------------------------------------- /source/lambda/query_runner/lib/metrics_helper.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | const axios = require('axios'); 5 | const LOGGER = new (require('./logger'))(); 6 | 7 | /** 8 | * Send anonymous usage metrics 9 | * @param solutionId 10 | * @param uuid 11 | * @param metricsData 12 | * @param metricsURL 13 | */ 14 | const SendMetrics = async (solutionId, uuid, metricsData, metricsURL) => { 15 | LOGGER.log('INFO', '[metrics_helper] Start sending Anonymous Metric.'); 16 | 17 | let data; 18 | 19 | try { 20 | const metrics = { 21 | Solution: solutionId, 22 | UUID: uuid, 23 | //Formatting the time string to the format 'YYYY-MM-DD HH:mm:ss.S' 24 | TimeStamp: new Date().toISOString().replace('T', ' ').replace('Z', ' ').substring(0, 21), 25 | Data: metricsData 26 | }; 27 | const params = { 28 | method: 'post', 29 | port: 443, 30 | url: metricsURL, 31 | headers: { 32 | 'Content-Type': 'application/json' 33 | }, 34 | data: metrics 35 | }; 36 | data = await axios(params); 37 | } catch (err) { 38 | LOGGER.log('ERROR', err); 39 | throw err; 40 | } 41 | 42 | LOGGER.log('INFO', '[metrics_helper] End sending Anonymous Metric.'); 43 | 44 | return data.status; 45 | }; 46 | 47 | module.exports = { 48 | sendMetrics: SendMetrics 49 | }; 50 | -------------------------------------------------------------------------------- /source/lambda/query_runner/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "query-runner-js", 3 | "description": "Lambda function for building and executing athena queries", 4 | "main": "index.js", 5 | "author": { 6 | "name": "Amazon Web Services", 7 | "url": "https://aws.amazon.com/solutions" 8 | }, 9 | "license": "Apache-2.0", 10 | "dependencies": { 11 | "async": "^3.2.4", 12 | "axios": "^1.7.4", 13 | "uuid": "^9.0.0" 14 | }, 15 | "devDependencies": { 16 | "@aws-sdk/client-athena": "^3.470.0", 17 | "@aws-sdk/client-cloudwatch": "^3.470.0", 18 | "@aws-sdk/client-glue": "^3.470.0", 19 | "aws-sdk-mock": "^5.8.0", 20 | "axios-mock-adapter": "^1.21.2", 21 | "chai": "^4.3.7", 22 | "jest": "^29.3.1", 23 | "mocha": "^10.2.0", 24 | "npm-run-all": "^4.1.5", 25 | "nyc": "^15.1.0", 26 | "sinon": "^15.0.1", 27 | "sinon-chai": "^3.7.0" 28 | }, 29 | "overrides": { 30 | "semver": "~7.5.2" 31 | }, 32 | "scripts": { 33 | "pretest": "npm install", 34 | "test": "jest --coverage --setupFiles ./test/config.js", 35 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules && mkdir dist", 36 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml --exclude=*dist/* dist/query-runner-js.zip .", 37 | "build": "npm run build:init && npm install --production && npm run build:zip", 38 | "clean": "rm -rf node_modules" 39 | }, 40 | "bundleDependencies": [] 41 | } 42 | -------------------------------------------------------------------------------- /source/lambda/query_runner/test/add_athena_partition.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | const add_athena_partition = require('../add_athena_partition'); 5 | 6 | jest.mock( 7 | '@aws-sdk/client-cloudwatch', 8 | () => ({ 9 | __esmodule: true, 10 | CloudWatch: jest.fn().mockReturnValue({ 11 | getMetricStatistics: jest.fn().mockImplementation(data => { 12 | console.log('inside the cloudwatch getMetricsStatistics method'); 13 | expect(data['Namespace']).toBe('AWS/Athena'); 14 | expect(data['Dimensions'][0]['Name']).toBe('QueryState'); 15 | expect(data['Dimensions'][0]['Value']).toBe('SUCCEEDED'); 16 | expect(data['Dimensions'][1]['Name']).toBe('QueryType'); 17 | expect(data['Dimensions'][1]['Value']).toBe('DML'); 18 | expect(data['Dimensions'][2]['Name']).toBe('WorkGroup'); 19 | expect(data['Dimensions'][2]['Value']).toBe('AWSDevOpsDashboardWG-2820b493-864c-4ca1-99d3-7174fef7f374'); 20 | }) 21 | }), 22 | 23 | Athena: jest.fn().mockImplementation(options => {}) 24 | }), 25 | { virtual: true } 26 | ); 27 | 28 | jest.mock( 29 | '../build_athena_query', 30 | () => ({ 31 | __esmodule: true, 32 | buildAddAthenaPartitionQuery: jest.fn().mockImplementation((athenaDB, athenaTable) => { 33 | expect(athenaDB).toBe('metrics_db'); 34 | expect(athenaTable).toBe('metrics_table'); 35 | return 'queryString'; 36 | }) 37 | }), 38 | { virtual: true } 39 | ); 40 | 41 | jest.mock( 42 | '../lib/execute_athena_query', 43 | () => ({ 44 | __esmodule: true, 45 | executeAthenaQuery: jest.fn().mockImplementation((athenaDB, athenaWorkGroup, queryString) => { 46 | expect(queryString).toBe('queryString'); 47 | expect(athenaDB).toBe('metrics_db'); 48 | expect(athenaWorkGroup).toBe('AWSDevOpsDashboardWG-2820b493-864c-4ca1-99d3-7174fef7f374'); 49 | }) 50 | }), 51 | { virtual: true } 52 | ); 53 | 54 | jest.mock( 55 | '../lib/metrics_helper', 56 | () => ({ 57 | __esmodule: true, 58 | sendMetrics: jest.fn().mockImplementation((solutionId, solutionUUID, data, metricsURL) => { 59 | expect(solutionId).toBe('SO0103'); 60 | expect(solutionUUID).toBe('2820b493-864c-4ca1-99d3-7174fef7f374'); 61 | expect(metricsURL).toBe('https://example.com'); 62 | }) 63 | }), 64 | { virtual: true } 65 | ); 66 | 67 | describe('Test suite for add athena partition.', () => { 68 | test('add partition', async () => { 69 | await add_athena_partition.handler(); 70 | }); 71 | }); 72 | -------------------------------------------------------------------------------- /source/lambda/query_runner/test/cfn.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | const axios = require('axios'); 5 | const cfn = require('../lib/cfn.js'); 6 | 7 | const event = { 8 | LogicalResourceId: 'testLRId', 9 | StackId: 'testStackId', 10 | RequestId: 'testRequestId', 11 | ResponseURL: 'http://example.com' 12 | }; 13 | const context = {logStreamName: 'testLSName'} 14 | const responseData = {Data: 'testData'} 15 | const responseStatus = '200' 16 | 17 | jest.mock("axios"); 18 | axios.mockImplementation(() => Promise.resolve({status: 200, data:{}})); 19 | 20 | describe('Test sending CFN response', () => { 21 | it('should call axios.put to send CFN response', async () => { 22 | await cfn.send(event, context, responseStatus, responseData); 23 | expect(axios).toHaveBeenCalled(); 24 | }); 25 | }); 26 | -------------------------------------------------------------------------------- /source/lambda/query_runner/test/config.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | process.env.MetricsDBName = 'metrics_db'; 5 | process.env.MetricsTableName = 'metrics_table'; 6 | process.env.CodeBuildMetricsTableName = 'aws_codebuild_metrics_table'; 7 | process.env.AthenaWorkGroup = 'AWSDevOpsDashboardWG-2820b493-864c-4ca1-99d3-7174fef7f374'; 8 | process.env.SendAnonymousUsageData = 'Yes'; 9 | process.env.SolutionId = 'SO0103'; 10 | process.env.Version = 'v1.1.0'; 11 | process.env.UUID = '2820b493-864c-4ca1-99d3-7174fef7f374'; 12 | process.env.Region = 'us-east-1'; 13 | process.env.MetricsURL = 'https://example.com'; 14 | process.env.UserAgentExtra = '/AwsSolutions/SO0103/v1.2.0'; 15 | -------------------------------------------------------------------------------- /source/lambda/query_runner/test/execute_athena_query.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | 7 | const executeQuery = require('../lib/execute_athena_query'); 8 | 9 | const dbName = "metrics_db" 10 | const workGroup = "AWSDevOpsDashboardWG-2820b493-864c-4ca1-99d3-7174fef7f374" 11 | const queryString = "select 1 from metrics_db.table" 12 | 13 | jest.mock( 14 | '@aws-sdk/client-athena', 15 | () => { 16 | const mockAthenaService = { 17 | startQueryExecution: jest.fn().mockReturnThis(), 18 | promise: jest.fn() 19 | }; 20 | return { 21 | __esmodule: true, 22 | Athena: jest.fn(() => mockAthenaService), 23 | }; 24 | }, 25 | { virual: true } 26 | ); 27 | 28 | describe('Test executing athena query.', () => { 29 | it('should successfully execute Athena query', async () => { 30 | const response = await executeQuery.executeAthenaQuery( 31 | dbName, 32 | workGroup, 33 | queryString 34 | ); 35 | expect(response).not.toBeNull(); 36 | }); 37 | }); 38 | -------------------------------------------------------------------------------- /source/lambda/query_runner/test/logger.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | let Logger = new (require('../lib/logger'))(); 7 | 8 | describe('#Logger', () => { 9 | describe('#logger', () => { 10 | it('check with LOG_LEVEL=INFO', () => { 11 | Logger.loglevel = 'INFO'; 12 | Logger.log('INFO', 'INFO_MESSAGE'); 13 | Logger.log('WARN', 'WARN_MESSAGE'); 14 | Logger.log('ERROR', 'ERROR_MESSAGE'); 15 | Logger.log('DEBUG', 'DEBUG_MESSAGE'); 16 | }); 17 | 18 | it('check with LOG_LEVEL=WARN', () => { 19 | Logger.loglevel = 'WARN'; 20 | Logger.log('INFO', 'INFO_MESSAGE'); 21 | Logger.log('WARN', 'WARN_MESSAGE'); 22 | Logger.log('ERROR', 'ERROR_MESSAGE'); 23 | Logger.log('DEBUG', 'DEBUG_MESSAGE'); 24 | }); 25 | 26 | it('check with LOG_LEVEL=ERROR', () => { 27 | Logger.loglevel = 'ERROR'; 28 | Logger.log('INFO', 'INFO_MESSAGE'); 29 | Logger.log('WARN', 'WARN_MESSAGE'); 30 | Logger.log('ERROR', 'ERROR_MESSAGE'); 31 | Logger.log('DEBUG', 'DEBUG_MESSAGE'); 32 | }); 33 | 34 | it('check with LOG_LEVEL=DEBUG', () => { 35 | Logger.loglevel = 'DEBUG'; 36 | Logger.log('INFO', 'INFO_MESSAGE'); 37 | Logger.log('WARN', 'WARN_MESSAGE'); 38 | Logger.log('ERROR', 'ERROR_MESSAGE'); 39 | Logger.log('DEBUG', 'DEBUG_MESSAGE'); 40 | }); 41 | }); 42 | }); 43 | -------------------------------------------------------------------------------- /source/lambda/query_runner/test/metrics_helper.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | const axios = require('axios'); 5 | const expect = require('chai').expect; 6 | const MockAdapter = require('axios-mock-adapter'); 7 | 8 | let metricsHelper = require('../lib/metrics_helper.js'); 9 | 10 | const solutionId = 'SO0000'; 11 | const uuid = '999-999'; 12 | const metricsURL = 'https://metrics.awssolutionsbuilder.com/generic'; 13 | 14 | const data = { 15 | version: 'v1', 16 | data_type: 'lambda', 17 | region: 'us-east-1' 18 | }; 19 | 20 | describe('#SEND METRICS', () => { 21 | it('should return "200" on a send metrics success', async () => { 22 | let mock = new MockAdapter(axios); 23 | mock.onPost().reply(200, {}); 24 | 25 | let response = await metricsHelper.sendMetrics(solutionId, uuid, data, metricsURL); 26 | expect(response).to.equal(200); 27 | }); 28 | 29 | it('should return "Network Error" on connection timeout', async () => { 30 | let mock = new MockAdapter(axios); 31 | mock.onPut().networkError(); 32 | 33 | await metricsHelper.sendMetrics(solutionId, uuid, data, metricsURL).catch(err => { 34 | expect(err.toString()).to.equal('Error: Request failed with status code 404'); 35 | }); 36 | }); 37 | }); 38 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | *images/* 4 | *tests/* 5 | *.venv-*/* 6 | *test/* 7 | */__init__.py 8 | *setup.py 9 | *certifi/* 10 | *chardet/* 11 | *crhelper/* 12 | *idna/* 13 | *requests/* 14 | *six.py 15 | *tenacity/* 16 | *urllib3/* 17 | *yaml/* 18 | 19 | source = 20 | . -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/.gitignore: -------------------------------------------------------------------------------- 1 | # exclude python 3rd party modules 2 | *.dist-info/ 3 | bin 4 | certifi/ 5 | chardet/ 6 | crhelper/ 7 | idna/ 8 | requests/ 9 | six.py 10 | tenacity/ 11 | ## crhelper tests directory 12 | tests/ 13 | urllib3/ 14 | _yaml.cpython-*.so 15 | yaml 16 | charset_normalizer/ 17 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/aws-devops-monitoring-dashboard/6c02457a0bcf18fa7b37434b049597001c8d2478/source/lambda/quicksight-custom-resources/__init__.py -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "quicksight-custom-resources" 3 | package-mode = false 4 | 5 | [tool.poetry.dependencies] 6 | python = "~3.12" 7 | crhelper = "^2.0.11" 8 | PyYAML = "^6.0" 9 | requests = "^2.32.3" 10 | tenacity = "^8.2.2" 11 | urllib3 = "<2" 12 | 13 | [tool.poetry.dev-dependencies] 14 | mock = "^5.1.0" 15 | moto = "^4.2.6" 16 | pytest = "^7.4.2" 17 | pytest-cov = "^4.1.0" 18 | pytest-env = "^1.0.1" 19 | boto3 = "^1.35.0" 20 | botocore = "^1.35.0" 21 | 22 | [build-system] 23 | requires = ["poetry-core"] 24 | build-backend = "poetry.core.masonry.api" -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | log_cli = False 3 | #log_cli = True 4 | 5 | log_cli_level = INFO 6 | #log_cli_level = DEBUG 7 | 8 | log_format = %(asctime)s %(levelname)s %(message)s 9 | #log_format = %(asctime)s %(levelname)s %(module)s %(message)s 10 | #log_format = %(asctime)s %(levelname)s [%(module)s, %(funcName)s] %(message)s 11 | #log_format = %(asctime)s %(levelname)s [%(filename)s, %(module)s, %(funcName)s] %(message)s 12 | #log_format = %(asctime)s %(levelname)s [%(pathname)s, %(module)s, %(funcName)s] %(message)s 13 | 14 | log_date_format = %Y-%m-%d %H:%M:%S 15 | 16 | env = 17 | MOTO_ACCOUNT_ID=MOCK_ACCOUNT 18 | 19 | filterwarnings = 20 | # Being strict here and treating warnings as error 21 | error 22 | # We can safely ignore this particular boto imp DeprecationWarning in the test 23 | ignore:the imp module is deprecated in favour of importlib:DeprecationWarning:boto 24 | # Ignore the utcnow() deprecation warning from botocore 25 | ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:botocore 26 | 27 | # Exclude source/lambda/quicksight-custom-resources/tests as it is deployed by the crhepler library and no need to run the tests in this solution 28 | addopts = --ignore=tests -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/aws-devops-monitoring-dashboard/6c02457a0bcf18fa7b37434b049597001c8d2478/source/lambda/quicksight-custom-resources/test/__init__.py -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/test/conftest.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import os 6 | import pytest 7 | 8 | @ pytest.fixture(autouse=True) 9 | def aws_environment_variables(): 10 | """Mocked AWS environment variables such as AWS credentials and region""" 11 | os.environ['AWS_ACCESS_KEY_ID'] = 'mocked-aws-access-key-id' 12 | os.environ['AWS_SECRET_ACCESS_KEY'] = 'mocked-aws-secret-access-key' 13 | os.environ['AWS_SESSION_TOKEN'] = 'mocked-aws-session-token' 14 | os.environ['AWS_REGION'] = 'us-east-1' # must be a valid region 15 | 16 | collect_ignore_glob = ["tests/*.py"] # crhelper library 17 | collect_ignore = [] 18 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/test/dump_environment.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import test.logger_test_helper 6 | import logging 7 | 8 | from test.fixtures.quicksight_test_fixture import dump_env 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | def check_env(): 13 | dump_env() 14 | 15 | 16 | if __name__ == "__main__": 17 | check_env() 18 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/test/logger_test_helper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import json 6 | import logging 7 | 8 | def set_log_overrides(): 9 | rootLogger = logging.getLogger() 10 | rootLogger.warning(f'Root logger level: {rootLogger.level}. Overriding log level for selected modules') 11 | logging.getLogger().setLevel(logging.WARNING) 12 | logging.getLogger('hooks').setLevel(logging.WARNING) 13 | logging.getLogger('botocore.hooks').setLevel(logging.WARNING) 14 | logging.getLogger('hooks').setLevel(logging.WARNING) 15 | 16 | 17 | set_log_overrides() 18 | 19 | 20 | logger = logging.getLogger(__name__) 21 | 22 | def dump_state(obj, msg=None): 23 | if not msg: 24 | msg = "Dump object " 25 | if hasattr(obj, 'get_data'): 26 | obj_data = obj.get_data() 27 | else: 28 | obj_data = obj 29 | obj_data_json = json.dumps(obj_data, indent=2, sort_keys=True) 30 | logger.debug(f'{msg}, obj data json: {obj_data_json}') 31 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/test/test_dashboard.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import test.logger_test_helper 6 | import logging 7 | import pytest 8 | from moto import mock_sts 9 | 10 | from util.quicksight_application import QuicksightApplication 11 | from util.dataset import DataSet 12 | from util.dashboard import Dashboard 13 | 14 | from test.fixtures.quicksight_dashboard_fixtures import DashboardStubber 15 | from test.fixtures.quicksight_dataset_fixtures import ( 16 | data_set_type, 17 | minimal_data_sets_stub, 18 | quicksight_create_data_set_stubber, 19 | quicksight_data_set_stubber, 20 | quicksight_delete_data_set_stubber 21 | ) 22 | from test.fixtures.quicksight_template_fixtures import template_arn 23 | from test.fixtures.quicksight_datasource_fixtures import minimal_data_source_stub 24 | from test.fixtures.quicksight_test_fixture import quicksight_application_stub 25 | from test.logger_test_helper import dump_state 26 | 27 | logger = logging.getLogger(__name__) 28 | 29 | 30 | @ mock_sts 31 | def test_dashboard_init(quicksight_application_stub, minimal_data_sets_stub): 32 | obj = Dashboard( 33 | quicksight_application=quicksight_application_stub, 34 | data_sets=minimal_data_sets_stub.data_sets_stub, 35 | quicksight_template_arn=template_arn, 36 | props=None 37 | ) 38 | dump_state(obj, 'Dump dashboard') 39 | 40 | @ mock_sts 41 | def test_dashboard_update_source_entity(quicksight_application_stub, minimal_data_sets_stub, template_arn): 42 | obj = Dashboard( 43 | quicksight_application=quicksight_application_stub, 44 | data_sets=minimal_data_sets_stub.data_sets_stub, 45 | quicksight_template_arn=template_arn, 46 | props=None 47 | ) 48 | 49 | sub_type = 'main' 50 | assert sub_type in obj.config_data 51 | assert 'SourceEntity' in obj.config_data[sub_type] 52 | 53 | source_entity = obj.source_entity._get_map(sub_type, "SourceEntity") 54 | dump_state(source_entity, 'Dump SourceEntity before update') 55 | 56 | assert 'SourceTemplate' in source_entity 57 | source_template = source_entity.get('SourceTemplate', None) 58 | assert 'DataSetReferences' in source_template 59 | assert 'Arn' in source_template 60 | obj.source_entity._update_source_entity(source_entity) 61 | 62 | dump_state(source_entity, 'Dump SourceEntity after update') 63 | 64 | assert template_arn == source_template['Arn'] 65 | 66 | @ mock_sts 67 | def test_dashboard_create( 68 | quicksight_application_stub, 69 | minimal_data_source_stub, 70 | minimal_data_sets_stub, 71 | template_arn, 72 | ): 73 | obj = Dashboard( 74 | quicksight_application=quicksight_application_stub, 75 | data_source=minimal_data_source_stub, 76 | data_sets=minimal_data_sets_stub.data_sets_stub, 77 | quicksight_template_arn=template_arn, 78 | props=None 79 | ) 80 | 81 | sub_type = 'main' 82 | 83 | dump_state(obj, 'Before create') 84 | DashboardStubber.stub_create_dashboard_call(sub_type) 85 | obj.create() 86 | dump_state(obj, 'After create') 87 | 88 | @ mock_sts 89 | def test_dashboard_delete(quicksight_application_stub, minimal_data_sets_stub, template_arn): 90 | obj = Dashboard( 91 | quicksight_application=quicksight_application_stub, 92 | data_sets=minimal_data_sets_stub.data_sets_stub, 93 | quicksight_template_arn=template_arn, 94 | props=None 95 | ) 96 | 97 | sub_type = 'main' 98 | 99 | dump_state(obj, 'Before delete') 100 | DashboardStubber.stub_delete_dashboard_call(sub_type) 101 | obj.delete() 102 | dump_state(obj, 'After delete') 103 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/test/test_datasource.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import test.logger_test_helper 6 | import logging 7 | import pytest 8 | from moto import mock_sts 9 | 10 | from util.quicksight_application import QuicksightApplication 11 | from util.dataset import DataSet 12 | from util.datasource import DataSource 13 | 14 | from test.fixtures.quicksight_datasource_fixtures import DataSourceStubber 15 | from test.fixtures.quicksight_test_fixture import ( 16 | quicksight_application_stub, 17 | quicksight_state_all 18 | ) 19 | 20 | from test.logger_test_helper import dump_state 21 | 22 | 23 | logger = logging.getLogger(__name__) 24 | 25 | class DataSourceHelperStub(): 26 | pass 27 | 28 | 29 | # globals 30 | FAKE_ACCOUNT_ID = 'FAKE_ACCOUNT' 31 | FAKE_ACCOUNT_ID_SRC = 'FAKE_ACCOUNT_SRC' 32 | 33 | class Stub(): 34 | pass 35 | 36 | @ mock_sts 37 | def test_data_source_init(quicksight_application_stub): 38 | obj = DataSource( 39 | quicksight_application=quicksight_application_stub, 40 | props=None 41 | ) 42 | dump_state(obj) 43 | 44 | @ mock_sts 45 | def test_data_source_create(quicksight_application_stub): 46 | obj = DataSource( 47 | quicksight_application=quicksight_application_stub, 48 | props=None 49 | ) 50 | 51 | assert obj.athena_workgroup 52 | assert obj.athena_workgroup == "primary" 53 | dump_state(obj) 54 | 55 | sub_type = 'main' 56 | dump_state(obj, 'Before create') 57 | DataSourceStubber.stub_create_data_source_call(sub_type) 58 | response = obj.create() 59 | assert response 60 | assert response["Status"] in [202] 61 | assert response["CreationStatus"] in ["CREATION_IN_PROGRESS"] 62 | assert obj.arn 63 | dump_state(obj, 'After create') 64 | 65 | @ mock_sts 66 | def test_data_source_delete(quicksight_application_stub): 67 | obj = DataSource( 68 | quicksight_application=quicksight_application_stub, 69 | props=None 70 | ) 71 | 72 | dump_state(obj) 73 | 74 | sub_type = 'main' 75 | dump_state(obj, 'Before delete') 76 | DataSourceStubber.stub_delete_data_source_call(sub_type) 77 | obj.delete() 78 | dump_state(obj, 'After delete') 79 | 80 | @ mock_sts 81 | def test_data_source_create_exist(quicksight_application_stub): 82 | obj = DataSource( 83 | quicksight_application=quicksight_application_stub, 84 | props=None 85 | ) 86 | 87 | assert obj.athena_workgroup 88 | assert obj.athena_workgroup == "primary" 89 | 90 | sub_type = "main" 91 | DataSourceStubber.stub_create_data_source_error_call(sub_type) 92 | DataSourceStubber.stub_describe_data_source_call(sub_type) 93 | 94 | # Function under test 95 | response = obj.create() 96 | 97 | # This response is the response to describe_data_source as the code is remaps the response 98 | assert response 99 | assert response["Status"] in ["CREATION_SUCCESSFUL"] 100 | assert obj.arn 101 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/test/test_environment.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import test.logger_test_helper 6 | import logging 7 | import pytest 8 | from moto import mock_sts 9 | 10 | from test.fixtures.quicksight_test_fixture import dump_env 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | def test_import_application(): 15 | from util.quicksight import QuicksightApi 16 | from util.quicksight_application import QuicksightApplication 17 | from util.datasource import DataSource 18 | from util.dataset import DataSet 19 | from util.analysis import Analysis 20 | from util.dashboard import Dashboard 21 | from util.template import Template 22 | 23 | def test_import_test_environment(): 24 | from test.fixtures.quicksight_dataset_fixtures import quicksight_data_set_stubber 25 | from test.fixtures.quicksight_dataset_fixtures import data_set_type 26 | from test.fixtures.quicksight_dataset_fixtures import quicksight_create_data_set_stubber, quicksight_delete_data_set_stubber 27 | from test.fixtures.quicksight_dataset_fixtures import minimal_data_sets_stub 28 | 29 | from test.fixtures.quicksight_analysis_fixtures import AnalysisStubber 30 | 31 | from test.fixtures.quicksight_test_fixture import quicksight_state_all 32 | 33 | @mock_sts 34 | def test_dummy(): 35 | dump_env() 36 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/test/test_helpers.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import os 5 | 6 | import pytest 7 | from botocore.exceptions import ParamValidationError 8 | from moto import mock_sts 9 | 10 | from util.helpers import ( 11 | get_aws_partition, 12 | get_aws_region, 13 | get_aws_account_id, 14 | get_sts_client, 15 | get_quicksight_client, 16 | EnvironmentVariableError, 17 | ) 18 | 19 | @mock_sts 20 | def test_with_aws_account_id(): 21 | assert get_aws_account_id() == "MOCK_ACCOUNT" 22 | 23 | @mock_sts 24 | def test_get_sts_client(): 25 | client = get_sts_client() 26 | assert "https://sts." in client.meta.endpoint_url 27 | 28 | # @mock_sts 29 | def test_get_quicksight_client(): 30 | client = get_quicksight_client() 31 | assert "https://quicksight." in client.meta.endpoint_url 32 | 33 | def test_region_missing(): 34 | region = os.environ.pop("AWS_REGION") 35 | with pytest.raises(EnvironmentVariableError): 36 | get_aws_region() 37 | os.environ["AWS_REGION"] = region 38 | 39 | def test_aws_partition(): 40 | region = os.environ.pop("AWS_REGION") 41 | with pytest.raises(EnvironmentVariableError): 42 | get_aws_region() 43 | os.environ["AWS_REGION"] = region 44 | 45 | def test_cn_partition(monkeypatch): 46 | """Set the SECRET env var to assert the behavior.""" 47 | monkeypatch.setenv('AWS_REGION', 'cn-north-1') 48 | assert get_aws_region() == 'cn-north-1' 49 | assert get_aws_partition() == 'aws-cn' 50 | 51 | def test_us_gov_cloud_partition(monkeypatch): 52 | """Set the SECRET env var to assert the behavior.""" 53 | monkeypatch.setenv('AWS_REGION', 'us-gov-east-1') 54 | assert get_aws_region() == 'us-gov-east-1' 55 | assert get_aws_partition() == 'aws-us-gov' 56 | 57 | def test_get_region_us_west_2(monkeypatch): 58 | """Set the SECRET env var to assert the behavior.""" 59 | monkeypatch.setenv('AWS_REGION', 'us-west-2') 60 | assert get_aws_region() == 'us-west-2' 61 | assert get_aws_partition() == 'aws' 62 | 63 | def test_get_aws_region(monkeypatch): 64 | """Set the SECRET env var to assert the behavior.""" 65 | monkeypatch.setenv("SECRET", "top_secret") 66 | # See mocked aws_environment_variables in conftest.py 67 | assert get_aws_region() == "us-east-1" 68 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/test/test_logging.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import logging 6 | import os 7 | import pytest 8 | 9 | from util.logging import get_level, get_logger 10 | 11 | @pytest.fixture(scope="function", autouse=True) 12 | def reset_logging_defaults(): 13 | """Remove any logging configuration defaults that might have existed before starting any test""" 14 | try: 15 | os.environ.pop("LOG_LEVEL") 16 | except KeyError: 17 | pass 18 | 19 | 20 | @pytest.mark.parametrize("level", ["DEBUG", "INFO", "WARNING", "ERROR"]) 21 | def test_valid_levels(level): 22 | os.environ["LOG_LEVEL"] = level 23 | assert get_level() == level 24 | 25 | 26 | def test_invalid_level(): 27 | os.environ["LOG_LEVEL"] = "TRACE" 28 | assert get_level() == "INFO" 29 | os.environ["LOG_LEVEL"] = "INFO" 30 | 31 | 32 | def test_get_logger(): 33 | logger = get_logger(__name__) 34 | assert logger.level == logging.INFO 35 | 36 | 37 | def test_logger_log(caplog): 38 | logger = get_logger(__name__) 39 | logger.error("This is an error") 40 | logger.warning("This is a warning") 41 | logger.info("This is an informational message") 42 | logger.debug("This is a debug message") 43 | assert "This is an error" in caplog.text 44 | assert "This is a warning" in caplog.text 45 | assert "This is an informational message" in caplog.text 46 | assert "This is a debug message" not in caplog.text -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/test/test_template.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import test.logger_test_helper 6 | import logging 7 | import pytest 8 | from moto import mock_sts 9 | 10 | from util.quicksight_application import QuicksightApplication 11 | from util.template import Template, TemplatePermissionType 12 | 13 | from test.fixtures.quicksight_dataset_fixtures import minimal_data_sets_stub 14 | from test.fixtures.quicksight_template_fixtures import (TemplateStubber, template_arn) 15 | from test.fixtures.quicksight_test_fixture import quicksight_application_stub 16 | from test.logger_test_helper import dump_state 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | 21 | @pytest.fixture() 22 | def source_template_arn(request): 23 | FAKE_ACCOUNT_ID_SRC = 'FAKE_ACCOUNT_SRC' 24 | return f"arn:aws:quicksight:us-east-1:{FAKE_ACCOUNT_ID_SRC}:template/SO0122-Discovering-Hot-Topics-v1_1_0" 25 | 26 | @ mock_sts 27 | def test_template_init(quicksight_application_stub): 28 | obj = Template( 29 | quicksight_application=quicksight_application_stub, 30 | data_sets=None, 31 | props=None 32 | ) 33 | 34 | dump_state(obj) 35 | 36 | @ mock_sts 37 | def test_template_init_minimal_data_sets(quicksight_application_stub, minimal_data_sets_stub): 38 | stub = minimal_data_sets_stub 39 | 40 | obj = Template( 41 | quicksight_application=quicksight_application_stub, 42 | data_sets=stub.data_sets_stub, 43 | props=None 44 | ) 45 | 46 | dump_state(obj) 47 | 48 | @ mock_sts 49 | def test_template_create_from_analysis(quicksight_application_stub, minimal_data_sets_stub): 50 | obj = Template( 51 | quicksight_application=quicksight_application_stub, 52 | data_sets=minimal_data_sets_stub.data_sets_stub, 53 | props=None 54 | ) 55 | 56 | sub_type = 'main' 57 | TemplateStubber.stub_create_template_call(sub_type) 58 | 59 | class AnalysisStub: 60 | def __init__(self): 61 | self.arn = "MOCK_ANALYSIS" 62 | 63 | analysis = AnalysisStub() 64 | 65 | dump_state(obj) 66 | obj.create_from_analysis(analysis) 67 | dump_state(obj, 'After template create_from_analysis obj') 68 | 69 | @ mock_sts 70 | def test_template_create_from_template(quicksight_application_stub, template_arn): 71 | obj = Template( 72 | quicksight_application=quicksight_application_stub, 73 | data_sets=None, 74 | props=None 75 | ) 76 | 77 | sub_type = 'main' 78 | TemplateStubber.stub_create_template_call(sub_type) 79 | 80 | dump_state(obj) 81 | obj.create_from_template(template_arn) 82 | dump_state(obj, 'After template create_from_template obj') 83 | 84 | @ mock_sts 85 | def test_template_delete(quicksight_application_stub, minimal_data_sets_stub): 86 | obj = Template( 87 | quicksight_application=quicksight_application_stub, 88 | data_sets=minimal_data_sets_stub.data_sets_stub, 89 | props=None 90 | ) 91 | 92 | sub_type = 'main' 93 | TemplateStubber.stub_delete_template_call(sub_type) 94 | 95 | dump_state(obj) 96 | obj.delete() 97 | dump_state(obj, 'After template delete obj') 98 | 99 | @ mock_sts 100 | def test_template_update_template_permissions(quicksight_application_stub, minimal_data_sets_stub): 101 | obj = Template( 102 | quicksight_application=quicksight_application_stub, 103 | data_sets=minimal_data_sets_stub.data_sets_stub, 104 | props=None 105 | ) 106 | 107 | sub_type = 'main' 108 | TemplateStubber.stub_update_template_permissions_call(sub_type) 109 | permission_type = TemplatePermissionType.PUBLIC 110 | logger.info(f"Testing with permission type {permission_type}") 111 | dump_state(obj) 112 | obj.update_template_permissions( 113 | permission=TemplatePermissionType.PUBLIC, 114 | principal=None 115 | ) 116 | dump_state(obj, 'After template delete obj') 117 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/util/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/aws-devops-monitoring-dashboard/6c02457a0bcf18fa7b37434b049597001c8d2478/source/lambda/quicksight-custom-resources/util/__init__.py -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/util/analysis.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from tenacity import retry, retry_if_exception_type, stop_after_attempt 5 | 6 | from util.helpers import get_quicksight_client 7 | from util.logging import get_logger 8 | from util.quicksight_resource import QuickSightFailure, QuickSightResource 9 | from util.source_entity import SourceEntity 10 | 11 | logger = get_logger(__name__) 12 | 13 | 14 | class Analysis(QuickSightResource): 15 | def __init__( 16 | self, quicksight_application=None, data_sets=None, quicksight_template_arn=None, data_source=None, props=None 17 | ): 18 | super().__init__(quicksight_application=quicksight_application, type="analysis", props=props) 19 | self.use_props(props) 20 | 21 | self.data_sets = data_sets 22 | self.data_source = data_source 23 | self.quicksight_template_arn = quicksight_template_arn 24 | 25 | self.config_data = dict() 26 | self._load_config(self.type, ["main"], self.config_data) 27 | self.source_entity = SourceEntity( 28 | data_sets, quicksight_template_arn, self.config_data, source_entity_type="SourceTemplate" 29 | ) 30 | 31 | @retry(retry=retry_if_exception_type(QuickSightFailure), stop=stop_after_attempt(3)) 32 | def create(self): 33 | logger.info(f"requesting quicksight create_analysis: {self.id}") 34 | quicksight_client = get_quicksight_client() 35 | 36 | try: 37 | response = quicksight_client.create_analysis( 38 | AwsAccountId=self.aws_account_id, 39 | AnalysisId=self.id, 40 | Name=self.name, 41 | Permissions=self._get_permissions(), 42 | SourceEntity=self._get_source_entity(), 43 | ) 44 | logger.info(f"finished quicksight create_analysis for id:{self.id}, response: {response}") 45 | except quicksight_client.exceptions.ResourceExistsException: 46 | response = quicksight_client.describe_analysis(AwsAccountId=self.aws_account_id, AnalysisId=self.id) 47 | response = response["Analysis"] 48 | except quicksight_client.exceptions.InvalidParameterValueException as exc: 49 | logger.error(str(exc)) 50 | raise QuickSightFailure() 51 | 52 | self.arn = response["Arn"] 53 | return response 54 | 55 | def delete(self): 56 | logger.info(f"requesting quicksight delete_analysis id:{self.id}") 57 | quicksight_client = get_quicksight_client() 58 | 59 | response = quicksight_client.delete_analysis(AwsAccountId=self.aws_account_id, AnalysisId=self.id) 60 | logger.info(f"finished quicksight delete_analysis for id:{self.id}, response: {response}") 61 | return response 62 | 63 | def _get_permissions(self): 64 | # The principal is the owner of the resource and create the resources and is given full actions for the type 65 | permissions = [ 66 | { 67 | "Principal": self.principal_arn, 68 | "Actions": [ 69 | "quicksight:RestoreAnalysis", 70 | "quicksight:UpdateAnalysisPermissions", 71 | "quicksight:DeleteAnalysis", 72 | "quicksight:QueryAnalysis", 73 | "quicksight:DescribeAnalysisPermissions", 74 | "quicksight:DescribeAnalysis", 75 | "quicksight:UpdateAnalysis" 76 | ], 77 | } 78 | ] 79 | return permissions 80 | 81 | def _get_source_entity(self): 82 | return self.source_entity.get_source_entity() 83 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/util/config/analysis-main.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "SourceEntity": { 3 | "SourceTemplate": { 4 | "DataSetReferences": [ 5 | { 6 | "DataSetPlaceholder": "recovery-time-detail", 7 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/recovery-time" 8 | }, 9 | { 10 | "DataSetPlaceholder": "code-change-activity", 11 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/code-change-activity" 12 | }, 13 | { 14 | "DataSetPlaceholder": "code-deployment-detail", 15 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/code-deployment" 16 | }, 17 | { 18 | "DataSetPlaceholder": "code-pipeline-detail", 19 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/code-pipeline" 20 | }, 21 | { 22 | "DataSetPlaceholder": "code-build-detail", 23 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/code-build" 24 | }, 25 | { 26 | "DataSetPlaceholder": "github-change-activity", 27 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/github-change-activity" 28 | } 29 | 30 | ], 31 | "Arn": "{self.source_template_arn}" 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/util/config/dashboard-main.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "SourceEntity": { 3 | "SourceTemplate": { 4 | "DataSetReferences": [ 5 | { 6 | "DataSetPlaceholder": "recovery-time-detail", 7 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/recovery-time" 8 | }, 9 | { 10 | "DataSetPlaceholder": "code-change-activity", 11 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/code-change-activity" 12 | }, 13 | { 14 | "DataSetPlaceholder": "code-deployment-detail", 15 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/code-deployment" 16 | }, 17 | { 18 | "DataSetPlaceholder": "code-pipeline-detail", 19 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/code-pipeline" 20 | }, 21 | { 22 | "DataSetPlaceholder": "code-build-detail", 23 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/code-build" 24 | }, 25 | { 26 | "DataSetPlaceholder": "github-change-activity", 27 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/github-change-activity" 28 | } 29 | ], 30 | "Arn": "{self.source_template_arn}" 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/util/config/dataset-code-deployment-detail.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "PhysicalTableMap": { 3 | "3847a7fb-32e8-4210-bcce-07b7370032c8": { 4 | "RelationalTable": { 5 | "DataSourceArn": "{self.data_source.arn}", 6 | "Schema": "aws_devops_metrics_db_so0143", 7 | "Name": "code_deployment_detail_view", 8 | "InputColumns": [ 9 | { 10 | "Name": "account", 11 | "Type": "STRING" 12 | }, 13 | { 14 | "Name": "time", 15 | "Type": "DATETIME" 16 | }, 17 | { 18 | "Name": "region", 19 | "Type": "STRING" 20 | }, 21 | { 22 | "Name": "deployment_id", 23 | "Type": "STRING" 24 | }, 25 | { 26 | "Name": "application", 27 | "Type": "STRING" 28 | }, 29 | { 30 | "Name": "state", 31 | "Type": "STRING" 32 | }, 33 | { 34 | "Name": "created_at", 35 | "Type": "DATETIME" 36 | } 37 | ] 38 | } 39 | } 40 | }, 41 | "LogicalTableMap": { 42 | "3847a7fb-32e8-4210-bcce-07b7370032c8": { 43 | "Alias": "code_deployment_detail_view", 44 | "DataTransforms": [ 45 | { 46 | "RenameColumnOperation": { 47 | "ColumnName": "account", 48 | "NewColumnName": "Account" 49 | } 50 | }, 51 | { 52 | "RenameColumnOperation": { 53 | "ColumnName": "time", 54 | "NewColumnName": "Time" 55 | } 56 | }, 57 | { 58 | "RenameColumnOperation": { 59 | "ColumnName": "region", 60 | "NewColumnName": "Region" 61 | } 62 | }, 63 | { 64 | "RenameColumnOperation": { 65 | "ColumnName": "deployment_id", 66 | "NewColumnName": "Deployment Id" 67 | } 68 | }, 69 | { 70 | "RenameColumnOperation": { 71 | "ColumnName": "application", 72 | "NewColumnName": "Application" 73 | } 74 | }, 75 | { 76 | "RenameColumnOperation": { 77 | "ColumnName": "state", 78 | "NewColumnName": "State" 79 | } 80 | }, 81 | { 82 | "RenameColumnOperation": { 83 | "ColumnName": "created_at", 84 | "NewColumnName": "Created At" 85 | } 86 | } 87 | ], 88 | "Source": { 89 | "PhysicalTableId": "3847a7fb-32e8-4210-bcce-07b7370032c8" 90 | } 91 | } 92 | } 93 | } -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/util/config/dataset-github-change-activity.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "PhysicalTableMap": { 3 | "cd2fd368-a250-46d5-995f-d3395ffc5ac5": { 4 | "RelationalTable": { 5 | "DataSourceArn": "{self.data_source.arn}", 6 | "Schema": "aws_devops_metrics_db_so0143", 7 | "Name": "github_change_activity_view", 8 | "InputColumns": [ 9 | { 10 | "Name": "repository_name", 11 | "Type": "STRING" 12 | }, 13 | { 14 | "Name": "branch_name", 15 | "Type": "STRING" 16 | }, 17 | { 18 | "Name": "author_name", 19 | "Type": "STRING" 20 | }, 21 | { 22 | "Name": "event_name", 23 | "Type": "STRING" 24 | }, 25 | { 26 | "Name": "commit_count", 27 | "Type": "INTEGER" 28 | }, 29 | { 30 | "Name": "time", 31 | "Type": "DATETIME" 32 | }, 33 | { 34 | "Name": "created_at", 35 | "Type": "DATETIME" 36 | } 37 | ] 38 | } 39 | } 40 | }, 41 | "LogicalTableMap": { 42 | "cd2fd368-a250-46d5-995f-d3395ffc5ac5": { 43 | "Alias": "github_change_activity_view", 44 | "DataTransforms": [ 45 | { 46 | "RenameColumnOperation": { 47 | "ColumnName": "repository_name", 48 | "NewColumnName": "Repository" 49 | } 50 | }, 51 | { 52 | "RenameColumnOperation": { 53 | "ColumnName": "branch_name", 54 | "NewColumnName": "Branch" 55 | } 56 | }, 57 | { 58 | "RenameColumnOperation": { 59 | "ColumnName": "author_name", 60 | "NewColumnName": "Author" 61 | } 62 | }, 63 | { 64 | "RenameColumnOperation": { 65 | "ColumnName": "event_name", 66 | "NewColumnName": "Event Name" 67 | } 68 | }, 69 | { 70 | "RenameColumnOperation": { 71 | "ColumnName": "commit_count", 72 | "NewColumnName": "Commit Count" 73 | } 74 | }, 75 | { 76 | "RenameColumnOperation": { 77 | "ColumnName": "time", 78 | "NewColumnName": "Time" 79 | } 80 | }, 81 | { 82 | "RenameColumnOperation": { 83 | "ColumnName": "created_at", 84 | "NewColumnName": "Created At" 85 | } 86 | }, 87 | { 88 | "ProjectOperation": { 89 | "ProjectedColumns": [ 90 | "Repository", 91 | "Branch", 92 | "Author", 93 | "Event Name", 94 | "Commit Count", 95 | "Time", 96 | "Created At" 97 | ] 98 | } 99 | } 100 | ], 101 | "Source": { 102 | "PhysicalTableId": "cd2fd368-a250-46d5-995f-d3395ffc5ac5" 103 | } 104 | } 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/util/config/template-main.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "SourceEntity": { 3 | "SourceAnalysis": { 4 | "DataSetReferences": [ 5 | { 6 | "DataSetPlaceholder": "recovery-time-detail", 7 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/recovery-time" 8 | }, 9 | { 10 | "DataSetPlaceholder": "code-change-activity", 11 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/code-change-activity" 12 | }, 13 | { 14 | "DataSetPlaceholder": "code-deployment-detail", 15 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/code-deployment" 16 | }, 17 | { 18 | "DataSetPlaceholder": "code-pipeline-detail", 19 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/code-pipeline" 20 | }, 21 | { 22 | "DataSetPlaceholder": "code-build-detail", 23 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/code-build" 24 | }, 25 | { 26 | "DataSetPlaceholder": "github-change-activity", 27 | "DataSetArn": "arn:{Aws.PARTITION}:quicksight:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/github-change-activity" 28 | } 29 | ], 30 | "Arn": "{self.source_analysis_arn}" 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/util/dashboard.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from util.helpers import get_quicksight_client 5 | from util.logging import get_logger 6 | from util.quicksight_resource import QuickSightResource 7 | from util.source_entity import SourceEntity 8 | 9 | logger = get_logger(__name__) 10 | 11 | 12 | class Dashboard(QuickSightResource): 13 | def __init__( 14 | self, 15 | quicksight_application=None, 16 | data_sets=None, 17 | quicksight_template_arn=None, 18 | data_source=None, 19 | props=None, 20 | ): 21 | super().__init__(quicksight_application=quicksight_application, type = "dashboard", props=props) 22 | self.use_props(props) 23 | 24 | self.data_sets = data_sets 25 | self.data_source = data_source 26 | self.quicksight_template_arn = quicksight_template_arn 27 | 28 | self.config_data = dict() 29 | self._load_config(self.type, ["main"], self.config_data) 30 | self.source_entity = SourceEntity( 31 | data_sets, quicksight_template_arn, self.config_data, source_entity_type="SourceTemplate" 32 | ) 33 | 34 | def create(self): 35 | logger.info(f"requesting quicksight create_dashboard: {self.id}") 36 | quicksight_client = get_quicksight_client() 37 | 38 | response = quicksight_client.create_dashboard( 39 | AwsAccountId=self.aws_account_id, 40 | DashboardId=self.id, 41 | Name=self.name, 42 | Permissions=self._get_permissions(), 43 | SourceEntity=self._get_source_entity(), 44 | DashboardPublishOptions=self._get_dashboard_publish_options(), 45 | ) 46 | logger.info(f"finished quicksight create_dashboard for id:{self.id}, response: {response}") 47 | 48 | self.arn = response["Arn"] 49 | return response 50 | 51 | def delete(self): 52 | logger.info(f"requesting quicksight delete_dashboard id:{self.id}") 53 | quicksight_client = get_quicksight_client() 54 | 55 | response = quicksight_client.delete_dashboard(AwsAccountId=self.aws_account_id, DashboardId=self.id) 56 | logger.info(f"finished quicksight delete_dashboard for id:{self.id}, response: {response}") 57 | 58 | return response 59 | 60 | def _get_dashboard_publish_options(self): 61 | dashboard_publish_options = { 62 | "AdHocFilteringOption": {"AvailabilityStatus": "ENABLED"}, 63 | "ExportToCSVOption": {"AvailabilityStatus": "ENABLED"}, 64 | "SheetControlsOption": {"VisibilityState": "EXPANDED"}, 65 | } 66 | return dashboard_publish_options 67 | 68 | def _get_permissions(self): 69 | # The principal is the owner of the resource and create the resources and is given full actions for the type 70 | permissions = [ 71 | { 72 | "Principal": self.principal_arn, 73 | "Actions": [ 74 | "quicksight:DescribeDashboard", 75 | "quicksight:ListDashboardVersions", 76 | "quicksight:UpdateDashboardPermissions", 77 | "quicksight:QueryDashboard", 78 | "quicksight:UpdateDashboard", 79 | "quicksight:DeleteDashboard", 80 | "quicksight:DescribeDashboardPermissions", 81 | "quicksight:UpdateDashboardPublishedVersion", 82 | ], 83 | } 84 | ] 85 | return permissions 86 | 87 | def _get_source_entity(self): 88 | return self.source_entity.get_source_entity() 89 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/util/datasource.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from util.helpers import get_quicksight_client 5 | from util.logging import get_logger 6 | from util.quicksight_resource import QuickSightResource 7 | 8 | logger = get_logger(__name__) 9 | 10 | 11 | class DataSource(QuickSightResource): 12 | def __init__(self, quicksight_application=None, props=None): 13 | super().__init__(quicksight_application, type="datasource", props=props) 14 | self.use_props(props) 15 | self.athena_workgroup = "primary" 16 | 17 | def create(self): 18 | logger.info(f"creating quicksight datasource id:{self.id}") 19 | quicksight_client = get_quicksight_client() 20 | 21 | data_source_parameters = {"AthenaParameters": {"WorkGroup": self.athena_workgroup}} 22 | 23 | try: 24 | response = quicksight_client.create_data_source( 25 | AwsAccountId=self.aws_account_id, 26 | DataSourceId=self.id, 27 | Name=self.name, 28 | Type="ATHENA", 29 | DataSourceParameters=data_source_parameters, 30 | Permissions=self._get_permissions(), 31 | SslProperties={"DisableSsl": False}, 32 | ) 33 | logger.info(f"finished creating quicksight datasource for id:{self.id}, response {response}") 34 | except quicksight_client.exceptions.ResourceExistsException: 35 | logger.info(f"datasource for id:{self.id} already exists") 36 | response = quicksight_client.describe_data_source(AwsAccountId=self.aws_account_id, DataSourceId=self.id) 37 | response = response["DataSource"] 38 | 39 | self.arn = response["Arn"] 40 | return response 41 | 42 | def update(self): 43 | quicksight_client = get_quicksight_client() 44 | quicksight_client.describe_data_source 45 | data_source_parameters = {"AthenaParameters": {"WorkGroup": self.athena_workgroup}} 46 | try: 47 | response = quicksight_client.update_data_source( 48 | AwsAccountId=self.aws_account_id, 49 | DataSourceId=self.id, 50 | Name=self.name, 51 | DataSourceParameters=data_source_parameters, 52 | SslProperties={"DisableSsl": False}, 53 | ) 54 | except quicksight_client.exceptions.ConflictException as exc: 55 | logger.debug(str(exc)) 56 | response = response["DataSource"] 57 | return response 58 | 59 | def delete(self): 60 | logger.info(f"deleting quicksight datasource id:{self.id}") 61 | quicksight_client = get_quicksight_client() 62 | 63 | response = quicksight_client.delete_data_source(AwsAccountId=self.aws_account_id, DataSourceId=self.id) 64 | logger.info(f"finished deleting quicksight datasource for id:{self.id}, response:{response}") 65 | self.arn = response["Arn"] 66 | return response 67 | 68 | def _get_permissions(self): 69 | # The principal is the owner of the resource and create the resources and is given full actions for the type 70 | permissions = [ 71 | { 72 | "Principal": self.principal_arn, 73 | "Actions": [ 74 | "quicksight:DescribeDataSource", 75 | "quicksight:DescribeDataSourcePermissions", 76 | "quicksight:PassDataSource", 77 | "quicksight:UpdateDataSource", 78 | "quicksight:UpdateDataSourcePermissions", 79 | "quicksight:DeleteDataSource", 80 | ], 81 | } 82 | ] 83 | return permissions 84 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/util/helpers.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import json 5 | from os import environ 6 | 7 | import boto3 8 | import botocore.config 9 | 10 | from util.logging import get_logger 11 | 12 | logger = get_logger(__name__) 13 | 14 | # Global boto3 clients to help with initialization and performance 15 | _helpers_service_clients = dict() 16 | 17 | 18 | class EnvironmentVariableError(Exception): 19 | pass 20 | 21 | 22 | def get_service_client(service_name): 23 | """Get the global service boto3 client""" 24 | global _helpers_service_clients 25 | if service_name not in _helpers_service_clients: 26 | config = botocore.config.Config(retries=dict(max_attempts=3), user_agent_extra = environ.get("UserAgentExtra")) 27 | 28 | logger.debug(f"Initializing global boto3 client for {service_name}") 29 | _helpers_service_clients[service_name] = boto3.client(service_name, config=config, region_name=get_aws_region()) 30 | return _helpers_service_clients[service_name] 31 | 32 | 33 | def get_quicksight_client(): 34 | """Get the global quicksight boto3 client""" 35 | return get_service_client("quicksight") 36 | 37 | 38 | def get_sts_client(): 39 | """Get the global sts boto3 client""" 40 | return get_service_client("sts") 41 | 42 | 43 | def get_aws_partition(): 44 | """ 45 | Get the caller's AWS partition by driving it from AWS region 46 | :return: partition name for the current AWS region (e.g. aws) 47 | """ 48 | region_name = environ.get("AWS_REGION") 49 | china_region_name_prefix = "cn" 50 | us_gov_cloud_region_name_prefix = "us-gov" 51 | aws_regions_partition = "aws" 52 | aws_china_regions_partition = "aws-cn" 53 | aws_us_gov_cloud_regions_partition = "aws-us-gov" 54 | 55 | # China regions 56 | if region_name.startswith(china_region_name_prefix): 57 | return aws_china_regions_partition 58 | # AWS GovCloud(US) Regions 59 | elif region_name.startswith(us_gov_cloud_region_name_prefix): 60 | return aws_us_gov_cloud_regions_partition 61 | else: 62 | return aws_regions_partition 63 | 64 | 65 | def get_aws_region(): 66 | """ 67 | Get the caller's AWS region from the environment variable AWS_REGION 68 | :return: the AWS region name (e.g. us-east-1) 69 | """ 70 | region = environ.get("AWS_REGION") 71 | if not region: 72 | raise EnvironmentVariableError("Missing AWS_REGION environment variable.") 73 | 74 | return region 75 | 76 | 77 | def get_aws_account_id(): 78 | """ 79 | Get the caller's AWS account ID 80 | :return: The AWS account ID 81 | """ 82 | sts_client = get_sts_client() 83 | identity = sts_client.get_caller_identity() 84 | return identity.get("Account") 85 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/util/logging.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import logging 6 | import os 7 | 8 | DEFAULT_LEVEL = "INFO" 9 | 10 | 11 | def get_level(): 12 | valid_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] 13 | requested_level = os.environ.get("LOG_LEVEL", DEFAULT_LEVEL) 14 | if requested_level and requested_level in valid_levels: 15 | return requested_level 16 | return DEFAULT_LEVEL 17 | 18 | 19 | def get_logger(name): 20 | logger = None 21 | # first case: running as a lambda function or in pytest with conftest 22 | # second case: running a single test or locally under test 23 | if len(logging.getLogger().handlers) > 0: 24 | logger = logging.getLogger() 25 | logger.setLevel(get_level()) 26 | # overrides 27 | logging.getLogger("boto3").setLevel(logging.WARNING) 28 | logging.getLogger("botocore").setLevel(logging.WARNING) 29 | logging.getLogger("urllib3").setLevel(logging.WARNING) 30 | else: 31 | """ 32 | Configuring loggers is security-sensitive. For this logger, no 33 | sensitive information is logged and hence suppressing the rule 34 | """ 35 | logging.basicConfig(level=get_level()) # NOSONAR (python:S4792) 36 | logger = logging.getLogger(name) 37 | return logger 38 | -------------------------------------------------------------------------------- /source/lambda/quicksight-custom-resources/util/source_entity.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from util.logging import get_logger 5 | 6 | logger = get_logger(__name__) 7 | 8 | 9 | class SourceEntity: 10 | supported_source_entity_types = ["SourceTemplate", "SourceAnalysis"] 11 | 12 | def __init__(self, data_sets, source_obj_arn, config_data, source_entity_type): 13 | self.data_sets = data_sets 14 | self.source_obj_arn = source_obj_arn 15 | self.config_data = config_data 16 | if source_entity_type not in self.supported_source_entity_types: 17 | raise ValueError( 18 | f"Invalid source_entity_type {source_entity_type}, " 19 | f"valid values are {self.supported_source_entity_types}" 20 | ) 21 | self.source_entity_type = source_entity_type 22 | 23 | def get_source_entity(self): 24 | sub_type = "main" 25 | source_entity = self._get_map(sub_type, "SourceEntity") 26 | self._update_source_entity(source_entity) 27 | return source_entity 28 | 29 | def _update_source_entity(self, obj): 30 | """Update DataSetArn values in SourceEntity""" 31 | source_object = obj.get(self.source_entity_type, None) 32 | assert source_object 33 | logger.debug(f"Initial value of sourceEntity.sourceTemplate.arn: {source_object['Arn']}") 34 | source_object["Arn"] = self.source_obj_arn 35 | logger.debug(f"Updated value of sourceEntity.sourceTemplate.arn: {source_object['Arn']}") 36 | data_set_references = source_object.get("DataSetReferences", None) 37 | assert source_object 38 | 39 | for ds_ref in data_set_references: 40 | dsr_placeholder = ds_ref.get("DataSetPlaceholder", None) 41 | dsr_arn = ds_ref.get("DataSetArn", None) 42 | logger.debug( 43 | f"Initial value of DataSetReferences, DataSetPlaceholder: {dsr_placeholder}, DataSetArn: {dsr_arn}" 44 | ) 45 | data_set = self.data_sets.get(dsr_placeholder, None) 46 | assert data_set 47 | ds_ref["DataSetArn"] = data_set.arn 48 | logger.debug( 49 | f"Updated value of DataSetReferences, DataSetPlaceholder: {ds_ref['DataSetPlaceholder']}, DataSetArn: {ds_ref['DataSetArn']}" 50 | ) 51 | 52 | def _get_map(self, sub_type, map_type): 53 | if sub_type not in self.config_data: 54 | raise ValueError(f"Unknown sub type {sub_type}.") 55 | sub_type_config = self.config_data[sub_type] 56 | if map_type not in sub_type_config: 57 | raise ValueError(f"Missing {map_type} in config of data set type {sub_type}.") 58 | return sub_type_config[map_type] 59 | -------------------------------------------------------------------------------- /source/lambda/solution_helper/.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | images/* 4 | tests/* 5 | .venv-*/* 6 | test/* 7 | */__init__.py 8 | setup.py 9 | bin 10 | cert* 11 | crhelper* 12 | idna* 13 | requests* 14 | urllib* 15 | chardet* 16 | charset_normalizer* 17 | source = 18 | . -------------------------------------------------------------------------------- /source/lambda/solution_helper/.gitignore: -------------------------------------------------------------------------------- 1 | # exclude python 3rd party modules 2 | *.dist-info/ 3 | bin 4 | certifi/ 5 | chardet/ 6 | crhelper/ 7 | idna/ 8 | requests/ 9 | ## crhelper tests directory 10 | tests/ 11 | urllib3/ 12 | -------------------------------------------------------------------------------- /source/lambda/solution_helper/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/aws-devops-monitoring-dashboard/6c02457a0bcf18fa7b37434b049597001c8d2478/source/lambda/solution_helper/__init__.py -------------------------------------------------------------------------------- /source/lambda/solution_helper/lambda_function.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | import os 6 | import uuid 7 | import requests 8 | import json 9 | from copy import copy 10 | from datetime import datetime 11 | from crhelper import CfnResource 12 | from util.solution_metrics import send_metrics 13 | 14 | logger = logging.getLogger(__name__) 15 | helper = CfnResource(json_logging=True, log_level="INFO") 16 | 17 | 18 | @helper.create 19 | @helper.update 20 | @helper.delete 21 | def solution_helper(event, _): 22 | 23 | logger.info(f"[solution_helper] event: {event}") 24 | 25 | if event["ResourceType"] == "Custom::CreateUUID" and event["RequestType"] == "Create": 26 | random_id = str(uuid.uuid4()) 27 | helper.Data.update({"UUID": random_id}) 28 | logger.info(f"[solution_helper] create uuid: {random_id}") 29 | 30 | 31 | def handler(event, context): 32 | logger.info(f"[handler] event: {event}") 33 | try: 34 | helper(event, context) 35 | except Exception as error: 36 | logger.exception(f"[handler] failed: {error}") 37 | -------------------------------------------------------------------------------- /source/lambda/solution_helper/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "solution-helper" 3 | package-mode = false 4 | 5 | [tool.poetry.dependencies] 6 | python = "~3.12" 7 | crhelper = "^2.0.11" 8 | requests = "^2.32.3" 9 | urllib3 = "<2" 10 | 11 | [tool.poetry.dev-dependencies] 12 | coverage = "^7.0.5" 13 | pytest = "^7.4.2" 14 | pytest-cov = "^4.1.0" 15 | boto3 = "^1.35.0" 16 | botocore = "^1.35.0" 17 | 18 | [build-system] 19 | requires = ["poetry-core"] 20 | build-backend = "poetry.core.masonry.api" -------------------------------------------------------------------------------- /source/lambda/solution_helper/test/test_lambda_function.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import unittest 5 | 6 | 7 | class LambdaTest(unittest.TestCase): 8 | 9 | def test_create_unique_id(self): 10 | import lambda_function 11 | 12 | event = { 13 | "RequestType": "Create", 14 | "ResourceType": "Custom::CreateUUID", 15 | "ResourceProperties": { 16 | "Resource": "UUID" 17 | }} 18 | 19 | lambda_function.solution_helper(event, None) 20 | self.assertIsNotNone(lambda_function.helper.Data.get("UUID")) 21 | -------------------------------------------------------------------------------- /source/lambda/solution_helper/test/test_solution_metrics.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from unittest import TestCase 5 | from unittest.mock import patch 6 | from util.solution_metrics import send_metrics 7 | 8 | class LambdaTest(TestCase): 9 | 10 | @patch('util.solution_metrics.requests.post') 11 | def test_send_metrics(self, mock_post): 12 | data = {"data": "some data"} 13 | uuid = "2820b493-864c-4ca1-99d3-7174fef7f374" 14 | solution_id = "SO0000" 15 | url = "https://example.com" 16 | 17 | mock_post.return_value.status_code = 200 18 | response = send_metrics(data, uuid, solution_id, url) 19 | self.assertIsNotNone(response) 20 | -------------------------------------------------------------------------------- /source/lambda/solution_helper/util/solution_metrics.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import os 5 | import requests 6 | from json import dumps 7 | from datetime import datetime 8 | import logging 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | def send_metrics(data, 13 | uuid=os.getenv('UUID'), 14 | solution_id=os.getenv('SOLUTION_ID'), 15 | url=os.getenv('METRICS_URL')): 16 | """Sends anonymous customer metrics to s3 via API gateway owned and 17 | managed by the Solutions Builder team. 18 | 19 | Args: 20 | data - anonymous customer metrics to be sent 21 | uuid - uuid of the solution 22 | solution_id: unique id of the solution 23 | url: url for API Gateway via which data is sent 24 | 25 | Return: response returned by https post request 26 | """ 27 | try: 28 | metrics_data = { 29 | "Solution": solution_id, 30 | "UUID": uuid, 31 | "TimeStamp": str(datetime.utcnow().isoformat()), 32 | "Data": data 33 | } 34 | json_data = dumps(metrics_data) 35 | print('metrics data:' + json_data) 36 | headers = {'content-type': 'application/json'} 37 | response = requests.post(url, data=json_data, headers=headers, timeout=10) 38 | return response 39 | except Exception as error : 40 | logger.exception(f"Error sending usage data: {error}") 41 | -------------------------------------------------------------------------------- /source/lambda/tag_query/.jest/setEnvVars.js: -------------------------------------------------------------------------------- 1 | //process.env.LOG_LEVEL = 'DEBUG'; 2 | -------------------------------------------------------------------------------- /source/lambda/tag_query/jest.config.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | module.exports = { 5 | testEnvironment: 'node', 6 | testMatch: ['test/**/*.[jt]s?(x)', '**/?(*.)+(spec|test).[jt]s?(x)'], 7 | collectCoverageFrom: ['*.js', 'lib/*.js', '!test/*.js', '!jest.config.js'], 8 | coverageReporters: [['lcov', { projectRoot: '../../../' }], 'text'], 9 | setupFiles: ['/.jest/setEnvVars.js'] 10 | }; 11 | -------------------------------------------------------------------------------- /source/lambda/tag_query/lib/cfn.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | const axios = require('axios'); 5 | 6 | /** 7 | * Send custom resource response. 8 | * @param {object} event - Custom resource event 9 | * @param {string} logStreamName - Custom resource log stream name 10 | * @param {object} response - Response object { status: "SUCCESS|FAILED", data: any } 11 | */ 12 | async function sendResponse(event, logStreamName, response) { 13 | const responseBody = JSON.stringify({ 14 | Status: response.status, 15 | Reason: `See the details in CloudWatch Log Stream: ${logStreamName}`, 16 | PhysicalResourceId: event.PhysicalResourceId ?? logStreamName, 17 | StackId: event.StackId, 18 | RequestId: event.RequestId, 19 | LogicalResourceId: event.LogicalResourceId, 20 | Data: response.data 21 | }); 22 | 23 | console.log(`RESPONSE BODY: ${responseBody}`); 24 | 25 | const config = { 26 | headers: { 27 | 'Content-Type': '', 28 | 'Content-Length': responseBody.length 29 | } 30 | }; 31 | 32 | await axios.put(event.ResponseURL, responseBody, config); 33 | } 34 | 35 | module.exports = { 36 | send: sendResponse 37 | }; 38 | -------------------------------------------------------------------------------- /source/lambda/tag_query/lib/logger.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | // SPDX-License-Identifier: Apache-2.0 4 | 5 | 'use strict'; 6 | 7 | class Logger { 8 | constructor() { 9 | this.loglevel = process.env.LOG_LEVEL; 10 | this.LOGLEVELS = { 11 | ERROR: 1, 12 | WARN: 2, 13 | INFO: 3, 14 | DEBUG: 4 15 | }; 16 | } 17 | 18 | log(level, message) { 19 | if (this.LOGLEVELS[level] <= this.LOGLEVELS[this.loglevel]) console.log(`[${level}][${message}]`); 20 | } 21 | } 22 | 23 | module.exports = Object.freeze(Logger); 24 | -------------------------------------------------------------------------------- /source/lambda/tag_query/lib/metrics_helper.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | const axios = require('axios'); 5 | const LOGGER = new (require('./logger'))(); 6 | 7 | /** 8 | * Send anonymous usage metrics 9 | * @param solutionId 10 | * @param uuid 11 | * @param metricsData 12 | * @param metricsURL 13 | */ 14 | const SendMetrics = async (solutionId, uuid, metricsData, metricsURL) => { 15 | LOGGER.log('INFO', '[metrics_helper] Start sending Anonymous Metric.'); 16 | 17 | let data; 18 | 19 | try { 20 | const metrics = { 21 | Solution: solutionId, 22 | UUID: uuid, 23 | //Formatting the time string to the format 'YYYY-MM-DD HH:mm:ss.S' 24 | TimeStamp: new Date().toISOString().replace('T', ' ').replace('Z', ' ').substring(0, 21), 25 | Data: metricsData 26 | }; 27 | const params = { 28 | method: 'post', 29 | port: 443, 30 | url: metricsURL, 31 | headers: { 32 | 'Content-Type': 'application/json' 33 | }, 34 | data: metrics 35 | }; 36 | data = await axios(params); 37 | } catch (err) { 38 | LOGGER.log('ERROR', err); 39 | throw err; 40 | } 41 | 42 | LOGGER.log('INFO', '[metrics_helper] End sending Anonymous Metric.'); 43 | 44 | return data.status; 45 | }; 46 | 47 | module.exports = { 48 | sendMetrics: SendMetrics 49 | }; 50 | -------------------------------------------------------------------------------- /source/lambda/tag_query/lib/query_generator.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | // SPDX-License-Identifier: Apache-2.0 4 | 5 | 'use strict'; 6 | 7 | const { TagConfig } = require('./resource_info'); 8 | 9 | function generateQueriesForUnion(tagConfigs) { 10 | const queries = []; 11 | for (const tagConfig of tagConfigs) { 12 | const tags = tagConfig.tagConfig.length > 0 ? tagConfig.tagConfig.split(TagConfig.tagConfigSeparator) : []; 13 | for (const tag of tags) { 14 | const tagSplit = tag.split(TagConfig.tagKeyValueSeparator); 15 | const key = tagSplit[0]; 16 | const value = tagSplit[1]; 17 | 18 | const resourceTypeFilters = [tagConfig.resourceType]; 19 | const tagFilters = []; 20 | tagFilters.push({ Key: key, Values: [value] }); 21 | 22 | const params = { 23 | TagFilters: tagFilters, 24 | ResourceTypeFilters: resourceTypeFilters 25 | }; 26 | queries.push(params); 27 | } 28 | } 29 | return queries; 30 | } 31 | 32 | function generateQueriesForIntersection(tagConfigs) { 33 | const queries = []; 34 | for (const tagConfig of tagConfigs) { 35 | const tags = tagConfig.tagConfig.length > 0 ? tagConfig.tagConfig.split(TagConfig.tagConfigSeparator) : []; 36 | const tagFilters = []; 37 | for (const tag of tags) { 38 | const tagSplit = tag.split(TagConfig.tagKeyValueSeparator); 39 | const key = tagSplit[0]; 40 | const value = tagSplit[1]; 41 | 42 | tagFilters.push({ Key: key, Values: [value] }); 43 | } 44 | 45 | if (tagFilters.length === 0) { 46 | continue; 47 | } 48 | 49 | const resourceTypeFilters = [tagConfig.resourceType]; 50 | const params = { 51 | TagFilters: tagFilters, 52 | ResourceTypeFilters: resourceTypeFilters 53 | }; 54 | queries.push(params); 55 | } 56 | return queries; 57 | } 58 | 59 | // Generate the set of keys requested for each resource type, so that we can filter the tags that go into the report 60 | function generateTagFilters(tagConfigs) { 61 | const filters = {}; 62 | for (const tagConfig of tagConfigs) { 63 | const tags = tagConfig.tagConfig.length > 0 ? tagConfig.tagConfig.split(TagConfig.tagConfigSeparator) : []; 64 | for (const tag of tags) { 65 | const tagSplit = tag.split(TagConfig.tagKeyValueSeparator); 66 | const key = tagSplit[0]; 67 | 68 | if (!(tagConfig.resourceType in filters)) { 69 | filters[tagConfig.resourceType] = new Set(); 70 | } 71 | 72 | filters[tagConfig.resourceType].add(key); 73 | } 74 | } 75 | return filters; 76 | } 77 | 78 | exports.generateQueriesForUnion = generateQueriesForUnion; 79 | exports.generateQueriesForIntersection = generateQueriesForIntersection; 80 | exports.generateTagFilters = generateTagFilters; 81 | -------------------------------------------------------------------------------- /source/lambda/tag_query/lib/throttler.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | // SPDX-License-Identifier: Apache-2.0 4 | 5 | 'use strict'; 6 | 7 | class Throttler { 8 | constructor(minIntervalMilliseconds) { 9 | this._minIntervalMilliseconds = minIntervalMilliseconds; 10 | this._timeLastReady = undefined; 11 | } 12 | 13 | async ready() { 14 | const timeWhenCalled = new Date().getTime(); 15 | if (this._timeLastReady === undefined) { 16 | // First call, return immediately 17 | this._timeLastReady = timeWhenCalled; 18 | return; 19 | } 20 | 21 | const timeNextReady = this._timeLastReady + this._minIntervalMilliseconds; 22 | if (timeWhenCalled < timeNextReady) { 23 | const delayMilliseconds = timeNextReady - timeWhenCalled; 24 | await new Promise(resolve => { 25 | setTimeout(() => { 26 | resolve(); 27 | }, delayMilliseconds); 28 | }); 29 | } 30 | 31 | // Whether we waited or not, remember the last time we returned a ready status 32 | this._timeLastReady = new Date().getTime(); 33 | } 34 | } 35 | 36 | exports.Throttler = Throttler; 37 | -------------------------------------------------------------------------------- /source/lambda/tag_query/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tag-query-js", 3 | "description": "Lambda function for querying tag information for CodeCommit, CodeBuild, and CodePipeline resources", 4 | "main": "index.js", 5 | "author": { 6 | "name": "Amazon Web Services", 7 | "url": "https://aws.amazon.com/solutions" 8 | }, 9 | "license": "Apache-2.0", 10 | "dependencies": { 11 | "axios": "^1.7.4" 12 | }, 13 | "devDependencies": { 14 | "@aws-sdk/client-resource-groups-tagging-api": "^3.451.0", 15 | "@aws-sdk/client-s3": "^3.362.0", 16 | "@aws-sdk/client-sts": "^3.362.0", 17 | "aws-sdk-client-mock": "^3.0.0", 18 | "axios-mock-adapter": "^1.21.2", 19 | "aws-sdk-client-mock-jest": "^3.0.0", 20 | "chai": "^4.3.7", 21 | "jest": "^29.3.1" 22 | }, 23 | "overrides": { 24 | "semver": "~7.5.2" 25 | }, 26 | "scripts": { 27 | "pretest": "npm install", 28 | "test": "jest --coverage", 29 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules && mkdir dist", 30 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml --exclude=*dist/* dist/event-parser-js.zip .", 31 | "build": "npm run build:init && npm install --production && npm run build:zip", 32 | "clean": "rm -rf node_modules" 33 | }, 34 | "bundleDependencies": [] 35 | } 36 | -------------------------------------------------------------------------------- /source/lambda/tag_query/tag_query.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | // SPDX-License-Identifier: Apache-2.0 4 | 5 | 'use strict'; 6 | 7 | const { ResourceGroupsTaggingAPIClient, GetResourcesCommand } = require('@aws-sdk/client-resource-groups-tagging-api'); 8 | const { generateTagFilters, generateQueriesForIntersection } = require('./lib/query_generator'); 9 | const { createResourceInfo, getType } = require('./lib/resource_info'); 10 | const { Throttler } = require('./lib/throttler'); 11 | const LOGGER = new (require('./lib/logger'))(); 12 | 13 | class TagQueryEngine { 14 | constructor(tagConfigs) { 15 | this._tagStrings = {}; // When creating a resource info object, we need to know the original filter string 16 | tagConfigs.forEach(({ resourceType, tagConfig }) => Object.assign(this._tagStrings, { [resourceType]: tagConfig })); 17 | this._queries = generateQueriesForIntersection(tagConfigs); 18 | this._tagFilters = generateTagFilters(tagConfigs); 19 | this._tagApi = new ResourceGroupsTaggingAPIClient({ customUserAgent: process.env.USER_AGENT_EXTRA }); 20 | this._throttler = new Throttler(1000); 21 | this._savedError = undefined; 22 | } 23 | 24 | async getResources() { 25 | const results = []; 26 | const uniqueArns = new Set(); 27 | for (const params of this._queries) { 28 | LOGGER.log('INFO', `Querying: ${JSON.stringify(params)}`); 29 | try { 30 | const resources = await this._getResourcesPaginated(params); 31 | LOGGER.log('INFO', `Response: ${JSON.stringify(resources)}`); 32 | for (const resource of resources) { 33 | // A resource may be returned by multiple queries, only process it once 34 | const arn = resource.ResourceARN; 35 | if (uniqueArns.has(arn)) { 36 | continue; 37 | } 38 | 39 | // Get the original tag filter string 40 | const tagString = this._tagStrings[getType(arn)]; 41 | 42 | // Only store data on the tag keys that we are interested in 43 | const tags = this._filterTags(arn, resource.Tags); 44 | results.push(createResourceInfo(arn, tags, tagString)); 45 | uniqueArns.add(arn); 46 | } 47 | } catch (err) { 48 | LOGGER.log('ERROR', `Error querying tagging API with ${JSON.stringify(params)}: ${err}`); 49 | // Return information on as many resources as possible 50 | if (this._savedError === undefined) { 51 | // But persist an error if we encounter it so the caller can check for it 52 | this._savedError = err; 53 | } 54 | } 55 | } 56 | return results; 57 | } 58 | 59 | _filterTags(arn, tags) { 60 | const resourceType = getType(arn); 61 | const tagFilter = this._tagFilters[resourceType]; 62 | return tags.filter(({ Key }) => tagFilter.has(Key)); 63 | } 64 | 65 | async _getResourcesPaginated(params) { 66 | const resources = []; 67 | let response = await this._getResourcesThrottled(params); 68 | resources.push(...response.ResourceTagMappingList); 69 | while ('PaginationToken' in response && response.PaginationToken !== '') { 70 | params.PaginationToken = response.PaginationToken; 71 | response = await this._getResourcesThrottled(params); 72 | resources.push(...response.ResourceTagMappingList); 73 | } 74 | return resources; 75 | } 76 | 77 | async _getResourcesThrottled(params) { 78 | await this._throttler.ready(); 79 | const command = new GetResourcesCommand(params); 80 | const response = await this._tagApi.send(command); 81 | return response; 82 | } 83 | 84 | getSavedError() { 85 | return this._savedError; 86 | } 87 | } 88 | 89 | exports.TagQueryEngine = TagQueryEngine; 90 | -------------------------------------------------------------------------------- /source/lambda/tag_query/test/lib/cfn.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | const axios = require('axios'); 5 | const cfn = require('../../lib/cfn.js'); 6 | 7 | const event = { 8 | LogicalResourceId: 'testLRId', 9 | StackId: 'testStackId', 10 | RequestId: 'testRequestId', 11 | ResponseURL: 'http://example.com' 12 | }; 13 | 14 | const logStreamName = 'testLSName'; 15 | const responseData = { status: 200, data: 'testData' }; 16 | 17 | jest.mock('axios'); 18 | axios.put.mockImplementation(() => Promise.resolve({ status: 200, data: {} })); 19 | 20 | describe('Test sending CFN response', () => { 21 | it('should call axios.put to send CFN response', async () => { 22 | await cfn.send(event, logStreamName, responseData); 23 | expect(axios.put).toHaveBeenCalled(); 24 | }); 25 | }); 26 | -------------------------------------------------------------------------------- /source/lambda/tag_query/test/lib/logger.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | let Logger = new (require('../../lib/logger'))(); 7 | 8 | const consoleSpy = jest.spyOn(console, 'log'); 9 | 10 | describe('#Logger', () => { 11 | describe('#logger', () => { 12 | it('check with LOG_LEVEL=INFO', () => { 13 | Logger.loglevel = 'INFO'; 14 | Logger.log('INFO', 'INFO_MESSAGE'); 15 | Logger.log('WARN', 'WARN_MESSAGE'); 16 | Logger.log('ERROR', 'ERROR_MESSAGE'); 17 | Logger.log('DEBUG', 'DEBUG_MESSAGE'); 18 | 19 | expect(consoleSpy).toHaveBeenCalledTimes(3); 20 | }); 21 | 22 | it('check with LOG_LEVEL=WARN', () => { 23 | Logger.loglevel = 'WARN'; 24 | Logger.log('INFO', 'INFO_MESSAGE'); 25 | Logger.log('WARN', 'WARN_MESSAGE'); 26 | Logger.log('ERROR', 'ERROR_MESSAGE'); 27 | Logger.log('DEBUG', 'DEBUG_MESSAGE'); 28 | 29 | expect(consoleSpy).toHaveBeenCalledTimes(5); 30 | }); 31 | 32 | it('check with LOG_LEVEL=ERROR', () => { 33 | Logger.loglevel = 'ERROR'; 34 | Logger.log('INFO', 'INFO_MESSAGE'); 35 | Logger.log('WARN', 'WARN_MESSAGE'); 36 | Logger.log('ERROR', 'ERROR_MESSAGE'); 37 | Logger.log('DEBUG', 'DEBUG_MESSAGE'); 38 | 39 | expect(consoleSpy).toHaveBeenCalledTimes(6); 40 | }); 41 | 42 | it('check with LOG_LEVEL=DEBUG', () => { 43 | Logger.loglevel = 'DEBUG'; 44 | Logger.log('INFO', 'INFO_MESSAGE'); 45 | Logger.log('WARN', 'WARN_MESSAGE'); 46 | Logger.log('ERROR', 'ERROR_MESSAGE'); 47 | Logger.log('DEBUG', 'DEBUG_MESSAGE'); 48 | 49 | expect(consoleSpy).toHaveBeenCalledTimes(10); 50 | }); 51 | }); 52 | }); 53 | -------------------------------------------------------------------------------- /source/lambda/tag_query/test/lib/metrics_helper.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | const axios = require('axios'); 5 | const expect = require('chai').expect; 6 | const MockAdapter = require('axios-mock-adapter'); 7 | 8 | let metricsHelper = require('../../lib/metrics_helper.js'); 9 | 10 | const solutionId = 'SO0000'; 11 | const uuid = '999-999'; 12 | const metricsURL = 'https://metrics.awssolutionsbuilder.com/generic'; 13 | 14 | const data = { 15 | version: 'v1', 16 | data_type: 'lambda', 17 | region: 'us-east-1' 18 | }; 19 | 20 | describe('#SEND METRICS', () => { 21 | it('should return "200" on a send metrics success', async () => { 22 | let mock = new MockAdapter(axios); 23 | mock.onPost().reply(200, {}); 24 | 25 | let response = await metricsHelper.sendMetrics(solutionId, uuid, data, metricsURL); 26 | expect(response).to.equal(200); 27 | }); 28 | 29 | it('should return "Network Error" on connection timeout', async () => { 30 | let mock = new MockAdapter(axios); 31 | mock.onPut().networkError(); 32 | 33 | await metricsHelper.sendMetrics(solutionId, uuid, data, metricsURL).catch(err => { 34 | expect(err.toString()).to.equal('Error: Request failed with status code 404'); 35 | }); 36 | }); 37 | }); 38 | -------------------------------------------------------------------------------- /source/lambda/tag_query/test/lib/query_generator.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const { 7 | generateQueriesForIntersection, 8 | generateQueriesForUnion, 9 | generateTagFilters 10 | } = require('../../lib/query_generator'); 11 | const { TagConfig, ResourceTypes } = require('../../lib/resource_info'); 12 | 13 | describe('Test query generation for union', () => { 14 | test('No tag configs', () => { 15 | const result = generateQueriesForUnion([]); 16 | expect(result).toStrictEqual([]); 17 | }); 18 | 19 | test('Empty tag config', async () => { 20 | const result = generateQueriesForUnion([new TagConfig(ResourceTypes.CodeCommitRepository, '')]); 21 | expect(result).toStrictEqual([]); 22 | }); 23 | 24 | test('Single tag config', () => { 25 | const result = generateQueriesForUnion([ 26 | new TagConfig(ResourceTypes.CodeBuildProject, 'key1,value1;key2,value2;key3,value3') 27 | ]); 28 | expect(result).toStrictEqual([ 29 | { 30 | TagFilters: [{ Key: 'key1', Values: ['value1'] }], 31 | ResourceTypeFilters: [ResourceTypes.CodeBuildProject] 32 | }, 33 | { 34 | TagFilters: [{ Key: 'key2', Values: ['value2'] }], 35 | ResourceTypeFilters: [ResourceTypes.CodeBuildProject] 36 | }, 37 | { 38 | TagFilters: [{ Key: 'key3', Values: ['value3'] }], 39 | ResourceTypeFilters: [ResourceTypes.CodeBuildProject] 40 | } 41 | ]); 42 | }); 43 | }); 44 | 45 | describe('Test query generation for intersection', () => { 46 | test('No tag configs', () => { 47 | const result = generateQueriesForIntersection([]); 48 | expect(result).toStrictEqual([]); 49 | }); 50 | 51 | test('Empty tag config', async () => { 52 | const result = generateQueriesForIntersection([new TagConfig(ResourceTypes.CodeCommitRepository, '')]); 53 | expect(result).toStrictEqual([]); 54 | }); 55 | 56 | test('Single tag config', () => { 57 | const result = generateQueriesForIntersection([ 58 | new TagConfig(ResourceTypes.CodeBuildProject, 'key1,value1;key2,value2;key3,value3') 59 | ]); 60 | expect(result).toStrictEqual([ 61 | { 62 | TagFilters: [ 63 | { Key: 'key1', Values: ['value1'] }, 64 | { Key: 'key2', Values: ['value2'] }, 65 | { Key: 'key3', Values: ['value3'] } 66 | ], 67 | ResourceTypeFilters: [ResourceTypes.CodeBuildProject] 68 | } 69 | ]); 70 | }); 71 | }); 72 | 73 | describe('Test generating tag filters', () => { 74 | test('No tag configs', () => { 75 | const result = generateTagFilters([]); 76 | expect(result).toStrictEqual({}); 77 | }); 78 | 79 | test('Single resource type', () => { 80 | const result = generateTagFilters([ 81 | new TagConfig(ResourceTypes.CodeCommitRepository, 'firstKey,firstValue;secondKey,secondValue') 82 | ]); 83 | expect(result).toStrictEqual({ [ResourceTypes.CodeCommitRepository]: new Set(['firstKey', 'secondKey']) }); 84 | }); 85 | 86 | test('All resource types, overlapping keys', () => { 87 | const result = generateTagFilters([ 88 | new TagConfig(ResourceTypes.CodeCommitRepository, 'k1,v1;k2,v2;k3,v3'), 89 | new TagConfig(ResourceTypes.CodeBuildProject, 'k4,v4;k2,v2'), 90 | new TagConfig(ResourceTypes.CodePipelinePipeline, 'k1,v1') 91 | ]); 92 | expect(result).toStrictEqual({ 93 | [ResourceTypes.CodeCommitRepository]: new Set(['k1', 'k2', 'k3']), 94 | [ResourceTypes.CodeBuildProject]: new Set(['k4', 'k2']), 95 | [ResourceTypes.CodePipelinePipeline]: new Set(['k1']) 96 | }); 97 | }); 98 | }); 99 | -------------------------------------------------------------------------------- /source/lambda/tag_query/test/lib/resource_info.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const { ResourceTypes, getType, createResourceInfo } = require('../../lib/resource_info'); 7 | 8 | describe('Test getType', () => { 9 | test('CodeCommit repository', () => { 10 | const arn = 'arn:aws-us-gov:codecommit:us-gov-west-1:111111111111:repo-name'; 11 | expect(getType(arn)).toStrictEqual(ResourceTypes.CodeCommitRepository); 12 | }); 13 | 14 | test('CodeBuild project', () => { 15 | const arn = 'arn:aws-cn:codebuild:cn-northwest-1:111111111111:project/project-name'; 16 | expect(getType(arn)).toStrictEqual(ResourceTypes.CodeBuildProject); 17 | }); 18 | 19 | test('CodePipeline pipeline', () => { 20 | const arn = 'arn:aws:codepipeline:us-west-2:111111111111:pipeline-name'; 21 | expect(getType(arn)).toStrictEqual(ResourceTypes.CodePipelinePipeline); 22 | }); 23 | 24 | test('Invalid ARN', () => { 25 | const arn = 'arn:aws:s3:::bucket-name'; 26 | expect(() => { 27 | getType(arn); 28 | }).toThrow(Error); 29 | }); 30 | }); 31 | 32 | describe('Test createResourceInfo', () => { 33 | test('CodeCommit repository', () => { 34 | const arn = 'arn:aws:codecommit:us-east-1:111111111111:SomeRepoName'; 35 | const resourceInfo = createResourceInfo(arn, {}); 36 | expect(resourceInfo.service).toStrictEqual('CodeCommit'); 37 | expect(resourceInfo.type).toStrictEqual('repository'); 38 | expect(resourceInfo.account).toStrictEqual('111111111111'); 39 | expect(resourceInfo.name).toStrictEqual('SomeRepoName'); 40 | }); 41 | 42 | test('CodeBuild project', () => { 43 | const arn = 'arn:aws-us-gov:codebuild:us-gov-west-1:111111111111:project/a_project'; 44 | const resourceInfo = createResourceInfo(arn, {}); 45 | expect(resourceInfo.service).toStrictEqual('CodeBuild'); 46 | expect(resourceInfo.type).toStrictEqual('project'); 47 | expect(resourceInfo.account).toStrictEqual('111111111111'); 48 | expect(resourceInfo.name).toStrictEqual('a_project'); 49 | }); 50 | 51 | test('CodePipeline pipeline', () => { 52 | const arn = 'arn:aws-cn:codepipeline:cn-northwest-1:111111111111:pipeline-name'; 53 | const resourceInfo = createResourceInfo(arn, {}); 54 | expect(resourceInfo.service).toStrictEqual('CodePipeline'); 55 | expect(resourceInfo.type).toStrictEqual('pipeline'); 56 | expect(resourceInfo.account).toStrictEqual('111111111111'); 57 | expect(resourceInfo.name).toStrictEqual('pipeline-name'); 58 | }); 59 | 60 | test('Invalid ARN', () => { 61 | const arn = 'arn:aws:iam::111111111111:role/RoleName'; 62 | expect(() => { 63 | createResourceInfo(arn, {}); 64 | }).toThrow(Error); 65 | }); 66 | }); 67 | -------------------------------------------------------------------------------- /source/lambda/tag_query/test/lib/throttler.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const { Throttler } = require('../../lib/throttler'); 7 | 8 | describe('Test Throttler', () => { 9 | test('Throttles execution', async () => { 10 | const throttler = new Throttler(100); 11 | const start = new Date().getTime(); 12 | for (let i = 0; i <= 5; i++) { 13 | await throttler.ready(); 14 | } 15 | const end = new Date().getTime(); 16 | expect(end - start).toBeGreaterThanOrEqual(500); 17 | }); 18 | }); 19 | -------------------------------------------------------------------------------- /source/lambda/tag_query/test/reporter.spec.js: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | 'use strict'; 5 | 6 | const { PutObjectCommand, S3Client } = require('@aws-sdk/client-s3'); 7 | const { STSClient, GetCallerIdentityCommand } = require('@aws-sdk/client-sts'); 8 | const { mockClient } = require('aws-sdk-client-mock'); 9 | const { ResourceTypes, CodeBuildResourceInfo, TagConfig } = require('../lib/resource_info'); 10 | const { Reporter } = require('../reporter'); 11 | require('aws-sdk-client-mock-jest'); 12 | 13 | beforeEach(() => { 14 | process.env.AWS_REGION = 'us-east-1'; 15 | }); 16 | 17 | describe('Test Reporter', () => { 18 | const mockS3Client = mockClient(S3Client); 19 | const mockSTSClient = mockClient(STSClient); 20 | 21 | beforeEach(() => { 22 | process.env.AWS_REGION = 'us-east-1'; 23 | mockS3Client.reset(); 24 | mockSTSClient.reset(); 25 | }); 26 | 27 | test('Validate onstructor', () => { 28 | const myReporter = new Reporter('a_bucket'); 29 | expect(myReporter._bucket).toEqual('a_bucket'); 30 | }); 31 | 32 | test('upload report', async () => { 33 | const bucket = 'bucket_name'; 34 | const filter = 'tag_key,tag_value;another_key,another_value'; 35 | const expectedKey = `TaggedResources/CodeBuild/111111111111_us-east-1_project_tagged.json`; 36 | 37 | mockS3Client.on(PutObjectCommand).resolves([]); 38 | 39 | mockSTSClient.on(GetCallerIdentityCommand).resolves({ 40 | Account: '111111111111', 41 | Arn: 'arn:aws:iam::111111111111/root' 42 | }); 43 | 44 | const reporter = new Reporter(bucket, [new TagConfig(ResourceTypes.CodeBuildProject, filter)]); 45 | const resource = new CodeBuildResourceInfo( 46 | 'arn:aws:codebuild:us-east-1:111111111111:project/test-project', 47 | [ 48 | { Key: 'tag_key', Value: 'tag_value' }, 49 | { Key: 'another_key', Value: 'another_value' } 50 | ], 51 | filter 52 | ); 53 | 54 | await reporter.addResource(resource); 55 | await reporter.uploadReports(); 56 | 57 | const expectedBody = { 58 | account_id: '111111111111', 59 | region: 'us-east-1', 60 | resource_type: 'project', 61 | resource_name: 'test-project', 62 | tag: filter, 63 | create_time_stamp: expect.anything() 64 | }; 65 | 66 | expect(mockS3Client).toHaveReceivedCommandTimes(PutObjectCommand, 1); 67 | const actualBody = JSON.parse(mockS3Client.call(0)['firstArg']['input']['Body']); 68 | const actualKey = mockS3Client.call(0)['firstArg']['input']['Key']; 69 | 70 | expect(actualKey).toEqual(expectedKey); 71 | expect(actualBody).toEqual(expectedBody); 72 | const timestamp = actualBody.create_time_stamp; 73 | expect(timestamp).toMatch(/^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}(?:\.\d+)?$/); 74 | }); 75 | }); 76 | -------------------------------------------------------------------------------- /source/lib/app-registry/apply_tag.ts: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | import { Tags } from 'aws-cdk-lib'; 5 | import { IConstruct } from 'constructs'; 6 | 7 | /** 8 | * @description applies tag to cloudformation resources 9 | * @param resource 10 | * @param key 11 | * @param value 12 | */ 13 | export function applyTag(resource: IConstruct, key: string, value: string) { 14 | Tags.of(resource).add(key, value); 15 | } 16 | -------------------------------------------------------------------------------- /source/lib/app-registry/condition_aspect.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | // SPDX-License-Identifier: Apache-2.0 4 | 5 | import { IAspect, CfnResource, Resource, CfnCondition } from 'aws-cdk-lib'; 6 | import { IConstruct } from 'constructs'; 7 | 8 | /** 9 | * @description cdk aspect to apply conditions 10 | */ 11 | export class ConditionAspect implements IAspect { 12 | /** 13 | * @description condition to apply to cfn resource 14 | */ 15 | readonly condition: CfnCondition; 16 | 17 | /** 18 | * @description aspect constructor 19 | * @param condition 20 | */ 21 | constructor(condition: CfnCondition) { 22 | this.condition = condition; 23 | } 24 | 25 | /** 26 | * @description method to apply while visiting each node 27 | * @param node 28 | */ 29 | public visit(node: IConstruct) { 30 | if (node instanceof CfnResource) { 31 | applyCondition(node, this.condition); 32 | } 33 | } 34 | } 35 | 36 | /** 37 | * @description applies condition on resources 38 | * @param resource - resource on which to apply condition, 39 | * @param condition - condition to apply 40 | */ 41 | function applyCondition(resource: Resource | CfnResource, condition: CfnCondition) { 42 | if (resource) { 43 | if (resource instanceof Resource) { 44 | resource = resource.node.defaultChild as CfnResource; 45 | } 46 | resource.cfnOptions.condition = condition; 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /source/lib/deployment-helper/canary_alarm/alarm_construct.ts: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | import { Construct } from 'constructs'; 5 | import * as cw from 'aws-cdk-lib/aws-cloudwatch'; 6 | import { addCfnSuppressRules } from '@aws-solutions-constructs/core'; 7 | 8 | export interface AlarmProps { 9 | readonly canaryName: string; 10 | readonly evalPeriods: number; 11 | readonly alarmPeriods: number; 12 | readonly threshold: number; 13 | alarmName?: string; 14 | index?: number; 15 | } 16 | 17 | export class AlarmConstruct extends Construct { 18 | constructor(scope: Construct, id: string, props: AlarmProps) { 19 | super(scope, id); 20 | 21 | if (props.index === undefined) { 22 | props.index = 1; 23 | } 24 | if (props.alarmName === undefined) { 25 | props.alarmName = 'Synthetics-Alarm-' + props.canaryName + '-' + props.index; 26 | } 27 | // Alarm State - the canary is "failed" when... 28 | // -------------------------------------------- 29 | // If you choose to create alarms, they are created with the following 30 | // name convention:Synthetics-Alarm-canaryName-index 31 | const alarm = new cw.Alarm(this, 'AppAlarm', { 32 | alarmName: props.alarmName, 33 | metric: new cw.Metric({ 34 | namespace: 'CloudWatchSynthetics', 35 | metricName: 'SuccessPercent', 36 | dimensionsMap: { CanaryName: props.canaryName } 37 | }), 38 | threshold: props.threshold, 39 | comparisonOperator: cw.ComparisonOperator.LESS_THAN_THRESHOLD, 40 | evaluationPeriods: props.evalPeriods, 41 | datapointsToAlarm: props.alarmPeriods, 42 | alarmDescription: 'Alarm when canary success is less than 100% on the most recent check.' 43 | }); 44 | 45 | const alarmResource = alarm.node.findChild('Resource') as cw.CfnAlarm; 46 | addCfnSuppressRules(alarmResource, [ 47 | { 48 | id: 'W28', 49 | reason: 'Static names chosen intentionally to provide fixed name structure required in the solution' 50 | } 51 | ]); 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /source/lib/events/canary_events_construct.ts: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | import { Construct } from 'constructs'; 5 | import * as iam from 'aws-cdk-lib/aws-iam'; 6 | import * as events from 'aws-cdk-lib/aws-events'; 7 | import { overrideProps, DefaultEventsRuleProps } from '@aws-solutions-constructs/core'; 8 | 9 | export interface CanaryEventsProps { 10 | targetArn: string; 11 | eventsRuleRole: iam.Role; 12 | eventBus?: events.IEventBus; 13 | } 14 | 15 | export class CanaryEvents extends Construct { 16 | constructor(scope: Construct, id: string, props: CanaryEventsProps) { 17 | super(scope, id); 18 | 19 | /** 20 | * Create CloudWatch Events Rule for Canary events 21 | */ 22 | const canaryEventPattern = { 23 | source: ['aws.cloudwatch'], 24 | 'detail-type': ['CloudWatch Alarm State Change'], 25 | detail: { 26 | state: { 27 | value: ['OK'] 28 | }, 29 | previousState: { 30 | value: ['ALARM'] 31 | }, 32 | configuration: { 33 | metrics: { 34 | metricStat: { 35 | metric: { 36 | namespace: ['CloudWatchSynthetics'] 37 | } 38 | } 39 | } 40 | } 41 | } 42 | }; 43 | 44 | const canaryAlarmTarget: events.IRuleTarget = { 45 | bind: () => ({ 46 | id: '', 47 | arn: props.targetArn, 48 | role: props.eventsRuleRole 49 | }) 50 | }; 51 | 52 | const canaryEventRuleProps = { 53 | description: 54 | 'DevOps Monitoring Dashboard on AWS solution - Event rule for Amazon CloudWatch Synthetics Canary Alarm', 55 | eventPattern: canaryEventPattern, 56 | enabled: true 57 | }; 58 | 59 | const defaultEventsRuleProps = DefaultEventsRuleProps([canaryAlarmTarget]); 60 | const eventsRuleProps = overrideProps(defaultEventsRuleProps, canaryEventRuleProps, true); 61 | 62 | // Use custom event bus for multi-account events ingestion 63 | if (props.eventBus !== undefined) { 64 | eventsRuleProps.eventBus = props.eventBus; 65 | } 66 | 67 | new events.Rule(this, 'CanaryEventsRule', eventsRuleProps); 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /source/lib/events/code_commit_events_construct.ts: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | import { Construct } from 'constructs'; 5 | import * as iam from 'aws-cdk-lib/aws-iam'; 6 | import * as events from 'aws-cdk-lib/aws-events'; 7 | import { overrideProps, DefaultEventsRuleProps } from '@aws-solutions-constructs/core'; 8 | 9 | export interface CodeCommitEventsProps { 10 | targetArn: string; 11 | eventsRuleRole: iam.Role; 12 | eventBus?: events.IEventBus; 13 | } 14 | 15 | export class CodeCommitEvents extends Construct { 16 | constructor(scope: Construct, id: string, props: CodeCommitEventsProps) { 17 | super(scope, id); 18 | 19 | /** 20 | * Create CloudWatch Events Rule for AWS CodePipeline 21 | */ 22 | const codeCommitEventRulePattern = { 23 | 'detail-type': ['AWS API Call via CloudTrail'], 24 | source: ['aws.codecommit'], 25 | detail: { 26 | eventName: ['PutFile', 'DeleteFile', 'UpdateFile', 'GitPush'] 27 | } 28 | }; 29 | 30 | const codeCommitEventRuleTarget: events.IRuleTarget = { 31 | bind: () => ({ 32 | id: '', 33 | arn: props.targetArn ? props.targetArn : '', 34 | role: props.eventsRuleRole 35 | }) 36 | }; 37 | 38 | const codeCommitEventRuleProps = { 39 | description: 'DevOps Monitoring Dashboard on AWS solution - Event rule for AWS CodeCommit', 40 | eventPattern: codeCommitEventRulePattern, 41 | enabled: true 42 | }; 43 | 44 | const defaultCodeCommitEventsRuleProps = DefaultEventsRuleProps([codeCommitEventRuleTarget]); 45 | let CodeCommitEventsRuleProps = overrideProps(defaultCodeCommitEventsRuleProps, codeCommitEventRuleProps, true); 46 | 47 | // Use custom event bus for multi-account events ingestion 48 | if (props.eventBus !== undefined) { 49 | CodeCommitEventsRuleProps = { ...CodeCommitEventsRuleProps, eventBus: props.eventBus }; 50 | } 51 | 52 | new events.Rule(this, 'CodeCommitEventsRule', CodeCommitEventsRuleProps); 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /source/lib/events/code_deploy_events_construct.ts: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | import { Construct } from 'constructs'; 5 | import * as iam from 'aws-cdk-lib/aws-iam'; 6 | import * as events from 'aws-cdk-lib/aws-events'; 7 | import { overrideProps, DefaultEventsRuleProps } from '@aws-solutions-constructs/core'; 8 | 9 | export interface CodeDeployEventsProps { 10 | targetArn: string; 11 | eventsRuleRole: iam.Role; 12 | eventBus?: events.IEventBus; 13 | } 14 | 15 | export class CodeDeployEvents extends Construct { 16 | constructor(scope: Construct, id: string, props: CodeDeployEventsProps) { 17 | super(scope, id); 18 | 19 | /** 20 | * Create CloudWatch Events Rule for AWS CodeDeploy 21 | */ 22 | const codeDeployEventRulePattern = { 23 | source: ['aws.codedeploy'], 24 | 'detail-type': ['CodeDeploy Deployment State-change Notification'] 25 | }; 26 | 27 | const codeDeployEventRuleTarget: events.IRuleTarget = { 28 | bind: () => ({ 29 | id: '', 30 | arn: props.targetArn, 31 | role: props.eventsRuleRole 32 | }) 33 | }; 34 | 35 | const codeDeployEventRuleProps = { 36 | description: 'DevOps Monitoring Dashboard on AWS solution - Event rule for AWS CodeDeploy', 37 | eventPattern: codeDeployEventRulePattern, 38 | enabled: true 39 | }; 40 | 41 | const defaultCodeDeployEventsRuleProps = DefaultEventsRuleProps([codeDeployEventRuleTarget]); 42 | let codeDeployEventsRuleProps = overrideProps(defaultCodeDeployEventsRuleProps, codeDeployEventRuleProps, true); 43 | 44 | // Use custom event bus for multi-account events ingestion 45 | if (props.eventBus !== undefined) { 46 | codeDeployEventsRuleProps = { ...codeDeployEventsRuleProps, eventBus: props.eventBus }; 47 | } 48 | 49 | new events.Rule(this, 'CodeDeployEventsRule', codeDeployEventsRuleProps); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /source/lib/events/code_pipeline_events_construct.ts: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | import { Construct } from 'constructs'; 5 | import * as iam from 'aws-cdk-lib/aws-iam'; 6 | import * as events from 'aws-cdk-lib/aws-events'; 7 | import { overrideProps, DefaultEventsRuleProps } from '@aws-solutions-constructs/core'; 8 | 9 | export interface CodePipelineEventsProps { 10 | targetArn: string; 11 | eventsRuleRole: iam.Role; 12 | eventBus?: events.IEventBus; 13 | } 14 | 15 | export class CodePipelineEvents extends Construct { 16 | constructor(scope: Construct, id: string, props: CodePipelineEventsProps) { 17 | super(scope, id); 18 | 19 | /** 20 | * Create CloudWatch Events Rule for AWS CodePipeline 21 | */ 22 | const codeDeployEventRulePattern = { 23 | source: ['aws.codepipeline'], 24 | 'detail-type': ['CodePipeline Action Execution State Change'] 25 | }; 26 | 27 | const codeDeployEventRuleTarget: events.IRuleTarget = { 28 | bind: () => ({ 29 | id: '', 30 | arn: props.targetArn ? props.targetArn : '', 31 | role: props.eventsRuleRole 32 | }) 33 | }; 34 | 35 | const codePipelineEventRuleProps = { 36 | description: 'DevOps Monitoring Dashboard on AWS solution - Event rule for AWS CodePipeline', 37 | eventPattern: codeDeployEventRulePattern, 38 | enabled: true 39 | }; 40 | 41 | const defaultCodePipelineEventsRuleProps = DefaultEventsRuleProps([codeDeployEventRuleTarget]); 42 | let codePipelineEventsRuleProps = overrideProps( 43 | defaultCodePipelineEventsRuleProps, 44 | codePipelineEventRuleProps, 45 | true 46 | ); 47 | 48 | // Use custom event bus for multi-account events ingestion 49 | if (props.eventBus !== undefined) { 50 | codePipelineEventsRuleProps = { ...codePipelineEventsRuleProps, eventBus: props.eventBus }; 51 | } 52 | 53 | new events.Rule(this, 'CodePipelineEventsRule', codePipelineEventsRuleProps); 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /source/lib/events/codepipeline_alarm_events_construct.ts: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | import { Construct } from 'constructs'; 5 | import * as iam from 'aws-cdk-lib/aws-iam'; 6 | import * as events from 'aws-cdk-lib/aws-events'; 7 | import { DefaultEventsRuleProps, overrideProps } from '@aws-solutions-constructs/core'; 8 | 9 | export interface CodePipelineAlarmEventsProps { 10 | targetArn: string; 11 | eventsRuleRole: iam.Role; 12 | solutionId: string; 13 | eventBus?: events.IEventBus; 14 | } 15 | 16 | export class CodePipelineAlarmEvents extends Construct { 17 | constructor(scope: Construct, id: string, props: CodePipelineAlarmEventsProps) { 18 | super(scope, id); 19 | 20 | /** 21 | * Create CloudWatch Events Rule for CodePipeline alarm events 22 | */ 23 | const codePipelineAlarmEventPattern = { 24 | source: ['aws.cloudwatch'], 25 | 'detail-type': ['CloudWatch Alarm State Change'], 26 | detail: { 27 | state: { 28 | value: ['OK'] 29 | }, 30 | previousState: { 31 | value: ['ALARM'] 32 | }, 33 | configuration: { 34 | metrics: { 35 | metricStat: { 36 | metric: { 37 | namespace: [`CodePipeline/${props.solutionId}/Pipelines`] 38 | } 39 | } 40 | } 41 | } 42 | } 43 | }; 44 | 45 | const codePipelineAlarmAlarmTarget: events.IRuleTarget = { 46 | bind: () => ({ 47 | id: '', 48 | arn: props.targetArn, 49 | role: props.eventsRuleRole 50 | }) 51 | }; 52 | 53 | const codePipelineAlarmEventRuleProps = { 54 | description: 'DevOps Monitoring Dashboard on AWS solution - Event rule for AWS CodePipeline Alarm', 55 | eventPattern: codePipelineAlarmEventPattern, 56 | enabled: true 57 | }; 58 | 59 | const defaultEventsRuleProps = DefaultEventsRuleProps([codePipelineAlarmAlarmTarget]); 60 | let eventsRuleProps = overrideProps(defaultEventsRuleProps, codePipelineAlarmEventRuleProps, true); 61 | 62 | // Use custom event bus for multi-account events ingestion 63 | if (props.eventBus !== undefined) { 64 | eventsRuleProps = { ...eventsRuleProps, eventBus: props.eventBus }; 65 | } 66 | 67 | new events.Rule(this, 'CodePipelineAlarmEventsRule', eventsRuleProps); 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /source/lib/github/github_stack.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | // SPDX-License-Identifier: Apache-2.0 4 | 5 | import { Construct } from 'constructs'; 6 | import { NestedStack, NestedStackProps } from 'aws-cdk-lib'; 7 | import { GitHubEvents } from './github_construct'; 8 | import { Runtime } from 'aws-cdk-lib/aws-lambda'; 9 | import { Bucket } from 'aws-cdk-lib/aws-s3'; 10 | import { ApplyCfnSuppressRulesToLogRetentionResource } from '../util/apply_to_construct'; 11 | 12 | export interface GitHubNestedStackProps extends NestedStackProps { 13 | readonly solutionId: string; 14 | readonly solutionVersion: string; 15 | readonly solutionName: string; 16 | readonly solutionDistBucket?: string; 17 | readonly solutionDistName?: string; 18 | readonly lambdaRuntimeNode: Runtime; 19 | readonly webhookSecretToken: string; 20 | readonly allowedIPs: string; 21 | readonly metricsBucket: Bucket | undefined; 22 | readonly uuid: string; 23 | readonly metricsGlueDBName: string; 24 | readonly gitHubMetricsGlueTableName: string; 25 | } 26 | 27 | export class GitHubStack extends NestedStack { 28 | private _gitHubEvents: GitHubEvents; 29 | 30 | constructor(scope: Construct, id: string, props: GitHubNestedStackProps) { 31 | super(scope, id, props); 32 | 33 | /** 34 | * Invoke github construct to create AWS resources 35 | */ 36 | this._gitHubEvents = new GitHubEvents(this, 'GitHubEvents', { 37 | solutionId: props.solutionId, 38 | solutionVersion: props.solutionVersion, 39 | solutionName: props.solutionName, 40 | solutionDistBucket: props.solutionDistBucket || '', 41 | solutionDistName: props.solutionDistName || '', 42 | lambdaRuntimeNode: props.lambdaRuntimeNode, 43 | webhookSecretToken: props.webhookSecretToken, 44 | allowedIPs: props.allowedIPs, 45 | metricsBucket: props.metricsBucket, 46 | uuid: props.uuid, 47 | metricsGlueDBName: props.metricsGlueDBName, 48 | gitHubMetricsGlueTableName: props.gitHubMetricsGlueTableName, 49 | userAgentExtra: `AwsSolution/${props.solutionId}/${props.solutionVersion}`, 50 | callingStack: this 51 | }); 52 | 53 | ApplyCfnSuppressRulesToLogRetentionResource(this, 'LogRetentionaae0aa3c5b4d4f87b02d85b201efdd8a'); 54 | } 55 | 56 | public get apiEndpointOutput(): string { 57 | return this._gitHubEvents.apiEndpoint; 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /source/lib/solution-helper/lambda-role-cloudwatch-construct.ts: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | import * as iam from 'aws-cdk-lib/aws-iam'; 5 | import * as cdk from 'aws-cdk-lib'; 6 | import { Construct } from 'constructs'; 7 | import { NagSuppressions } from 'cdk-nag'; 8 | 9 | export interface ExecutionRoleProps { 10 | readonly inlinePolicyName: string; 11 | readonly inlinePolicyDocument: iam.PolicyDocument; 12 | } 13 | 14 | export class ExecutionRole extends Construct { 15 | public readonly Role: iam.IRole; 16 | 17 | constructor(scope: Construct, id: string, props?: ExecutionRoleProps) { 18 | super(scope, id); 19 | 20 | const logsPolicy = new iam.PolicyStatement({ 21 | resources: [`arn:${cdk.Aws.PARTITION}:logs:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:log-group:/aws/lambda/*`], 22 | actions: ['logs:CreateLogGroup', 'logs:CreateLogStream', 'logs:PutLogEvents'] 23 | }); 24 | 25 | const inlinePolicies = { 26 | CloudWatchLogsPolicy: new iam.PolicyDocument({ 27 | statements: [logsPolicy] 28 | }) 29 | }; 30 | 31 | if (props !== undefined) { 32 | (inlinePolicies as any)[props.inlinePolicyName] = props.inlinePolicyDocument; 33 | } 34 | 35 | this.Role = new iam.Role(this, 'Role', { 36 | assumedBy: new iam.ServicePrincipal('lambda.amazonaws.com'), 37 | inlinePolicies 38 | }); 39 | 40 | NagSuppressions.addResourceSuppressions(this.Role, [ 41 | { 42 | id: 'AwsSolutions-IAM5', 43 | reason: 44 | 'The policy is restricted to region, account and lambda resource.' 45 | } 46 | ]); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /source/lib/solution-helper/solution-helper-construct.ts: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | import * as lambda from 'aws-cdk-lib/aws-lambda'; 5 | import * as cdk from 'aws-cdk-lib'; 6 | import { Construct } from 'constructs'; 7 | import { ExecutionRole } from './lambda-role-cloudwatch-construct'; 8 | import { addCfnSuppressRules } from '@aws-solutions-constructs/core'; 9 | import { NagSuppressions } from 'cdk-nag'; 10 | import { RetentionDays } from 'aws-cdk-lib/aws-logs'; 11 | 12 | export interface SolutionHelperProps { 13 | readonly solutionId: string; 14 | readonly version: string; 15 | readonly quickSightPrincipalARN: string; 16 | readonly athenaQueryDataDuration: string; 17 | readonly codeCommitRepo: string; 18 | } 19 | 20 | export class SolutionHelper extends Construct { 21 | private readonly _UuidCustomResource: cdk.CustomResource; 22 | public readonly solutionHelperLambda: lambda.Function; 23 | 24 | constructor(scope: Construct, id: string, props: SolutionHelperProps) { 25 | super(scope, id); 26 | 27 | const helperRole = new ExecutionRole(this, 'HelperRole'); 28 | 29 | const helperFunction = new lambda.Function(this, 'SolutionHelper', { 30 | runtime: lambda.Runtime.PYTHON_3_11, 31 | handler: 'lambda_function.handler', 32 | description: 'DevOps Monitoring Dashboard on AWS solution - This function generates UUID for each deployment.', 33 | role: helperRole.Role, 34 | code: lambda.Code.fromAsset(`${__dirname}/../../lambda/solution_helper`), 35 | timeout: cdk.Duration.seconds(300), 36 | environment: { 37 | UserAgentExtra: `AwsSolution/${props.solutionId}/${props.version}` 38 | }, 39 | logRetention: RetentionDays.THREE_MONTHS 40 | }); 41 | 42 | const refhelperFunction = helperFunction.node.findChild('Resource') as lambda.CfnFunction; 43 | addCfnSuppressRules(refhelperFunction, [ 44 | { 45 | id: 'W89', 46 | reason: 'There is no need to run this lambda in a VPC' 47 | }, 48 | { 49 | id: 'W92', 50 | reason: 'There is no need for Reserved Concurrency' 51 | } 52 | ]); 53 | 54 | NagSuppressions.addResourceSuppressions(helperFunction, [ 55 | { 56 | id: 'AwsSolutions-L1', 57 | reason: 'Running Python 3.11.' 58 | } 59 | ]); 60 | 61 | this.solutionHelperLambda = helperFunction; 62 | 63 | this._UuidCustomResource = new cdk.CustomResource(this, 'CreateUniqueID', { 64 | serviceToken: helperFunction.functionArn, 65 | properties: { 66 | Resource: 'UUID' 67 | }, 68 | resourceType: 'Custom::CreateUUID' 69 | }); 70 | } 71 | 72 | public get UUIDCustomResource(): cdk.CustomResource { 73 | return this._UuidCustomResource; 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /source/lib/util/apply_to_construct.ts: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | import { CfnFunction } from 'aws-cdk-lib/aws-lambda'; 5 | import { Construct } from 'constructs'; 6 | import { Stack } from 'aws-cdk-lib'; 7 | import { CfnPolicy, CfnRole } from 'aws-cdk-lib/aws-iam'; 8 | import { addCfnSuppressRules } from '@aws-solutions-constructs/core'; 9 | import { NagSuppressions } from 'cdk-nag'; 10 | 11 | /** 12 | * 13 | * @param scope 14 | * @param id 15 | */ 16 | export function ApplyCfnSuppressRulesToLogRetentionResource(scope: Construct, id: string) { 17 | // Add rules to suppress to log retention lambda 18 | const logRetentionLambda = Stack.of(scope).node.tryFindChild(id)?.node.findChild('Resource') as CfnFunction; 19 | if (logRetentionLambda) { 20 | addCfnSuppressRules(logRetentionLambda, [ 21 | { 22 | id: 'W58', 23 | reason: 'Lambda has the required permission to write CloudWatch Logs through a custom policy.' 24 | }, 25 | { 26 | id: 'W89', 27 | reason: 'There is no need to deploy this Lambda to a VPC.' 28 | }, 29 | { 30 | id: 'W92', 31 | reason: 'There is no need for Reserved Concurrency.' 32 | } 33 | ]); 34 | } 35 | 36 | // Add rules to suppress to log retention lambda policy 37 | const logRetentionPolicy = Stack.of(scope) 38 | .node.tryFindChild(id) 39 | ?.node.tryFindChild('ServiceRole') 40 | ?.node.findChild('DefaultPolicy') 41 | .node.findChild('Resource') as CfnPolicy; 42 | if (logRetentionPolicy) { 43 | addCfnSuppressRules(logRetentionPolicy, [ 44 | { 45 | id: 'W12', 46 | reason: 47 | 'Resource * is required by the Lambda Execution role, so that the Lambda can add ResourcePolicies to all required resources.' 48 | } 49 | ]); 50 | NagSuppressions.addResourceSuppressions(logRetentionPolicy, [ 51 | { 52 | id: 'AwsSolutions-IAM5', 53 | reason: 54 | 'Resource * is required by the Lambda Execution role, so that the Lambda can add ResourcePolicies to all required resources.' 55 | } 56 | ]); 57 | } 58 | 59 | // Add cdk-nag suppression to log retention service role 60 | const logRetentionServiceRole = Stack.of(scope) 61 | .node.tryFindChild(id) 62 | ?.node.tryFindChild('ServiceRole') 63 | ?.node.findChild('Resource') as CfnRole; 64 | if (logRetentionServiceRole) { 65 | NagSuppressions.addResourceSuppressions(logRetentionServiceRole, [ 66 | { 67 | id: 'AwsSolutions-IAM4', 68 | reason: 'The managed policy is automatically generated by CDK itself to enable log retention.' 69 | } 70 | ]); 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /source/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "aws_devops_monitoring_dashboard", 3 | "description": "CDK app to provision AWS resources for the solution", 4 | "version": "1.8.13", 5 | "bin": { 6 | "aws_devops_monitoring_dashboard": "bin/aws_devops_monitoring_dashboard.js" 7 | }, 8 | "author": { 9 | "name": "Amazon Web Services", 10 | "url": "https://aws.amazon.com/solutions" 11 | }, 12 | "license": "Apache-2.0", 13 | "scripts": { 14 | "cleanup": "tsc --build ./ --clean && rm -rf node_modules && rm -f package-lock.json", 15 | "build": "tsc", 16 | "watch": "tsc -w", 17 | "test": "jest --coverage", 18 | "cdk": "cdk", 19 | "clean": "rm -rf node_modules && rm -fr test/__snapshots__", 20 | "clean:install": "npm run clean && npm install", 21 | "lint": "eslint ./", 22 | "lint:fix": "eslint ./ --fix", 23 | "clean:lint": "npm run clean:install && npm run lint" 24 | }, 25 | "devDependencies": { 26 | "@aws-cdk/assert": "^2.68.0", 27 | "@aws-cdk/aws-servicecatalogappregistry-alpha": "2.171.0-alpha.0", 28 | "@types/jest": "^29.2.5", 29 | "@types/node": "^18.11.18", 30 | "aws-cdk": "^2.171.0", 31 | "eslint": "^8.44.0", 32 | "eslint-config-prettier": "^8.8.0", 33 | "eslint-config-react-app": "^7.0.1", 34 | "eslint-plugin-header": "^3.1.1", 35 | "eslint-plugin-import": "^2.27.5", 36 | "eslint-plugin-node": "^11.1.0", 37 | "eslint-plugin-prettier": "^4.2.1", 38 | "jest": "^29.6.1", 39 | "prettier": "^3.0.0", 40 | "ts-jest": "^29.1.1", 41 | "ts-node": "^10.9.1", 42 | "typescript": "^5.1.6" 43 | }, 44 | "dependencies": { 45 | "@aws-cdk/aws-glue-alpha": "^2.171.0-alpha", 46 | "@aws-solutions-constructs/aws-eventbridge-kinesisfirehose-s3": "2.54.0", 47 | "@aws-solutions-constructs/aws-eventbridge-lambda": "2.54.0", 48 | "@aws-solutions-constructs/aws-kinesisfirehose-s3": "2.54.0", 49 | "@aws-solutions-constructs/core": "2.54.0", 50 | "aws-cdk-lib": "^2.171.0", 51 | "cdk-nag": "^2.21.58", 52 | "constructs": "^10.2.69", 53 | "source-map-support": "^0.5.21" 54 | }, 55 | "overrides": { 56 | "semver": "~7.5.2" 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /source/test/__snapshots__/apply_to_construct.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`adding cfn suppressing rules to lambda 1`] = ` 4 | { 5 | "Resources": { 6 | "ServiceRole": { 7 | "Properties": { 8 | "AssumeRolePolicyDocument": { 9 | "Statement": [ 10 | { 11 | "Action": "sts:AssumeRole", 12 | "Effect": "Allow", 13 | "Principal": { 14 | "Service": "lambda.amazonaws.com", 15 | }, 16 | }, 17 | ], 18 | "Version": "2012-10-17", 19 | }, 20 | "Path": "/", 21 | "Policies": [ 22 | { 23 | "PolicyDocument": { 24 | "Statement": [ 25 | { 26 | "Action": [ 27 | "logs:CreateLogGroup", 28 | "logs:CreateLogStream", 29 | "logs:PutLogEvents", 30 | ], 31 | "Effect": "Allow", 32 | "Resource": { 33 | "Fn::Join": [ 34 | "", 35 | [ 36 | "arn:", 37 | { 38 | "Ref": "AWS::Partition", 39 | }, 40 | ":logs:", 41 | { 42 | "Ref": "AWS::Region", 43 | }, 44 | ":", 45 | { 46 | "Ref": "AWS::AccountId", 47 | }, 48 | ":log-group:/aws/lambda/*", 49 | ], 50 | ], 51 | }, 52 | "Sid": "CreateCWLogs", 53 | }, 54 | ], 55 | "Version": "2012-10-17", 56 | }, 57 | "PolicyName": "LambdaPolicyName", 58 | }, 59 | ], 60 | }, 61 | "Type": "AWS::IAM::Role", 62 | }, 63 | "testFunction483F4CBE": { 64 | "DependsOn": [ 65 | "ServiceRole", 66 | ], 67 | "Metadata": { 68 | "cfn_nag": { 69 | "rules_to_suppress": [ 70 | { 71 | "id": "W58", 72 | "reason": "Lambda has the required permission to write CloudWatch Logs through a custom policy.", 73 | }, 74 | { 75 | "id": "W89", 76 | "reason": "There is no need to deploy this Lambda to a VPC.", 77 | }, 78 | { 79 | "id": "W92", 80 | "reason": "There is no need for Reserved Concurrency.", 81 | }, 82 | ], 83 | }, 84 | }, 85 | "Properties": { 86 | "Code": { 87 | "ZipFile": "exports.handler = (event, context, callback) => {}", 88 | }, 89 | "Handler": "index.handler", 90 | "Role": { 91 | "Fn::GetAtt": [ 92 | "ServiceRole", 93 | "Arn", 94 | ], 95 | }, 96 | "Runtime": "nodejs20.x", 97 | }, 98 | "Type": "AWS::Lambda::Function", 99 | }, 100 | }, 101 | } 102 | `; 103 | -------------------------------------------------------------------------------- /source/test/apply_to_construct.test.ts: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | import { ResourcePart, SynthUtils } from '@aws-cdk/assert'; 5 | import '@aws-cdk/assert/jest'; 6 | import * as lambda from 'aws-cdk-lib/aws-lambda'; 7 | import * as cdk from 'aws-cdk-lib'; 8 | import { Role, ServicePrincipal, PolicyStatement, Effect, PolicyDocument, CfnRole } from 'aws-cdk-lib/aws-iam'; 9 | import { ApplyCfnSuppressRulesToLogRetentionResource } from '../lib/util/apply_to_construct'; 10 | 11 | test('adding cfn suppressing rules to lambda', () => { 12 | const app = new cdk.App(); 13 | const stack = new cdk.Stack(app); 14 | 15 | const cwLogsPS = new PolicyStatement({ 16 | actions: ['logs:CreateLogGroup', 'logs:CreateLogStream', 'logs:PutLogEvents'], 17 | effect: Effect.ALLOW, 18 | resources: [ 19 | stack.formatArn({ 20 | service: 'logs', 21 | resource: 'log-group', 22 | arnFormat: cdk.ArnFormat.COLON_RESOURCE_NAME, 23 | resourceName: '/aws/lambda/*' 24 | }) 25 | ], 26 | sid: 'CreateCWLogs' 27 | }); 28 | 29 | const lambdaRole = new Role(stack, 'ServiceRole', { 30 | assumedBy: new ServicePrincipal('lambda.amazonaws.com'), 31 | path: '/', 32 | inlinePolicies: { 33 | LambdaPolicyName: new PolicyDocument({ 34 | statements: [cwLogsPS] 35 | }) 36 | } 37 | }); 38 | 39 | const cfnLambdaRole = lambdaRole.node.defaultChild as CfnRole; 40 | 41 | cfnLambdaRole.overrideLogicalId('ServiceRole'); 42 | 43 | new lambda.Function(stack, 'testFunction', { 44 | code: lambda.Code.fromInline('exports.handler = (event, context, callback) => {}'), 45 | runtime: lambda.Runtime.NODEJS_20_X, 46 | handler: 'index.handler', 47 | role: lambdaRole 48 | }); 49 | 50 | ApplyCfnSuppressRulesToLogRetentionResource(stack, 'testFunction'); 51 | 52 | expect(SynthUtils.toCloudFormation(stack)).toMatchSnapshot(); 53 | 54 | expect(stack).toHaveResourceLike( 55 | 'AWS::Lambda::Function', 56 | { 57 | Metadata: { 58 | cfn_nag: { 59 | rules_to_suppress: [ 60 | { 61 | id: 'W58', 62 | reason: 'Lambda has the required permission to write CloudWatch Logs through a custom policy.' 63 | }, 64 | { 65 | id: 'W89', 66 | reason: 'There is no need to deploy this Lambda to a VPC.' 67 | }, 68 | { 69 | id: 'W92', 70 | reason: 'There is no need for Reserved Concurrency.' 71 | } 72 | ] 73 | } 74 | } 75 | }, 76 | ResourcePart.CompleteDefinition 77 | ); 78 | }); 79 | -------------------------------------------------------------------------------- /source/test/aws_devops_monitoring_dashboard_stack.test.ts: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | import * as cdk from 'aws-cdk-lib'; 5 | import * as lambda from 'aws-cdk-lib/aws-lambda'; 6 | import { DevOpsDashboardStack } from '../lib/aws_devops_monitoring_dashboard_stack'; 7 | import { SynthUtils } from '@aws-cdk/assert'; 8 | import '@aws-cdk/assert/jest'; 9 | 10 | const SOLUTION_ID = 'SO0143'; 11 | const SOLUTION_NAME = 'DevOps Monitoring Dashboard on AWS'; 12 | const DIST_VERSION = 'v1.0.0'; 13 | const DIST_OUTPUT_BUCKET = 'devops_dashboard_test_bucket'; 14 | const DIST_SOLUTION_NAME = 'aws_devops_monitoring_dashboard'; 15 | const LAMBDA_RUNTIME_NODEJS = lambda.Runtime.NODEJS_20_X; 16 | 17 | /* 18 | * Snapshot test for devopsDashboardStack 19 | */ 20 | test('Snapshot test for primary devopsDashboardStack', () => { 21 | const app = new cdk.App(); 22 | const stack = new DevOpsDashboardStack(app, 'DevopsDashboardStack', { 23 | description: `(${SOLUTION_ID})${SOLUTION_NAME} - Main Template (Monitoring Account). Version: ${DIST_VERSION}`, 24 | solutionId: SOLUTION_ID, 25 | solutionVersion: DIST_VERSION, 26 | solutionName: SOLUTION_NAME, 27 | solutionDistBucket: DIST_OUTPUT_BUCKET, 28 | solutionDistName: DIST_SOLUTION_NAME, 29 | lambdaRuntimeNode: LAMBDA_RUNTIME_NODEJS, 30 | }); 31 | expect(SynthUtils.toCloudFormation(stack)).toMatchSnapshot(); 32 | }); 33 | -------------------------------------------------------------------------------- /source/test/canary_stack.test.ts: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | import * as cdk from 'aws-cdk-lib'; 5 | import { CanaryStack } from '../lib/deployment-helper/canary_alarm/canary_alarm_stack'; 6 | import { SynthUtils } from '@aws-cdk/assert'; 7 | import '@aws-cdk/assert/jest'; 8 | 9 | const SOLUTION_ID = 'SO0143'; 10 | const SOLUTION_NAME = 'DevOps Monitoring Dashboard on AWS'; 11 | const DIST_VERSION = 'v1.0.0'; 12 | const DIST_OUTPUT_BUCKET = 'devops_dashboard_test_bucket'; 13 | const DIST_SOLUTION_NAME = 'aws_devops_monitoring_dashboard'; 14 | 15 | /* 16 | * Snapshot test for Canary Alarm. 17 | */ 18 | test('Snapshot test for canary alarm', () => { 19 | const app = new cdk.App(); 20 | const stack = new CanaryStack(app, 'CanaryAlarm', { 21 | solutionId: SOLUTION_ID, 22 | solutionVersion: DIST_VERSION, 23 | solutionName: SOLUTION_NAME, 24 | solutionDistBucket: DIST_OUTPUT_BUCKET, 25 | solutionDistName: DIST_SOLUTION_NAME 26 | }); 27 | expect(SynthUtils.toCloudFormation(stack)).toMatchSnapshot(); 28 | }); 29 | -------------------------------------------------------------------------------- /source/test/codepipeline_alarm_stack.test.ts: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | import * as cdk from 'aws-cdk-lib'; 5 | import { Stack } from 'aws-cdk-lib'; 6 | import { PipelineAlarmStack } from '../lib/deployment-helper/codepipeline_alarm/codepipeline_alarm_stack'; 7 | import { expect as expectCDK, haveResourceLike, SynthUtils } from '@aws-cdk/assert'; 8 | import '@aws-cdk/assert/jest'; 9 | 10 | const SOLUTION_ID = 'SO0143'; 11 | const DIST_VERSION = 'v1.0.0'; 12 | 13 | /* 14 | * Snapshot test for codepipeline alarm stack. 15 | */ 16 | test('Snapshot test for codepipeline alarm stack', () => { 17 | const app = new cdk.App(); 18 | const stack = new PipelineAlarmStack(app, 'CodePipelineAlarm', { 19 | solutionId: SOLUTION_ID, 20 | solutionVersion: DIST_VERSION 21 | }); 22 | expect(SynthUtils.toCloudFormation(stack)).toMatchSnapshot(); 23 | }); 24 | 25 | describe('PipelineAlarmStack', () => { 26 | let stack: Stack; 27 | 28 | beforeAll(() => { 29 | const app = new cdk.App(); 30 | 31 | stack = new PipelineAlarmStack(app, 'CodePipelineAlarm', { 32 | solutionId: SOLUTION_ID, 33 | solutionVersion: DIST_VERSION 34 | }); 35 | }); 36 | 37 | const logGroupNameRef = { 38 | Ref: 'LogGroupName' 39 | }; 40 | 41 | test('it has a CloudWatch Alarm that checks for pipeline failures', () => { 42 | expectCDK(stack).to( 43 | haveResourceLike('AWS::CloudWatch::Alarm', { 44 | ComparisonOperator: 'GreaterThanOrEqualToThreshold', 45 | Threshold: 1 46 | }) 47 | ); 48 | }); 49 | 50 | test('it has a LogGroup for CodePipeline alarms', () => { 51 | expectCDK(stack).to( 52 | haveResourceLike('AWS::Logs::LogGroup', { 53 | LogGroupName: logGroupNameRef, 54 | RetentionInDays: 90 55 | }) 56 | ); 57 | }); 58 | 59 | test('it has an EventsRule for CodePipeline execution state changes', () => { 60 | expectCDK(stack).to( 61 | haveResourceLike('AWS::Events::Rule', { 62 | State: 'ENABLED' 63 | }) 64 | ); 65 | }); 66 | 67 | test('it has a MetricFilter checking for pipeline failures in the log group', () => { 68 | expectCDK(stack).to( 69 | haveResourceLike('AWS::Logs::MetricFilter', { 70 | LogGroupName: logGroupNameRef, 71 | FilterPattern: '{($.detail.state = "FAILED")}' 72 | }) 73 | ); 74 | }); 75 | }); 76 | -------------------------------------------------------------------------------- /source/test/sharing_account_stack.test.ts: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | import * as cdk from 'aws-cdk-lib'; 5 | import * as lambda from 'aws-cdk-lib/aws-lambda'; 6 | import { SharingAccountStack } from '../lib/multi-account-resources/sharing_account/sharing_account_stack'; 7 | import { SynthUtils } from '@aws-cdk/assert'; 8 | import '@aws-cdk/assert/jest'; 9 | 10 | const SOLUTION_ID = 'SO0143'; 11 | const SOLUTION_NAME = 'DevOps Monitoring Dashboard on AWS'; 12 | const DIST_VERSION = 'v1.0.0'; 13 | const DIST_OUTPUT_BUCKET = 'devops_dashboard_test_bucket'; 14 | const DIST_SOLUTION_NAME = 'aws_devops_monitoring_dashboard'; 15 | const LAMBDA_RUNTIME_NODEJS = lambda.Runtime.NODEJS_20_X; 16 | 17 | /* 18 | * Snapshot test for SharingAccountStack 19 | */ 20 | test('Snapshot test for primary SharingAccountStack', () => { 21 | const app = new cdk.App(); 22 | const stack = new SharingAccountStack(app, 'SharingAccountStack', { 23 | description: `(${SOLUTION_ID})${SOLUTION_NAME} - Sharing Account Template. Version: ${DIST_VERSION}`, 24 | solutionId: SOLUTION_ID, 25 | solutionVersion: DIST_VERSION, 26 | solutionName: SOLUTION_NAME, 27 | solutionDistBucket: DIST_OUTPUT_BUCKET, 28 | solutionDistName: DIST_SOLUTION_NAME, 29 | lambdaRuntimeNode: LAMBDA_RUNTIME_NODEJS, 30 | }); 31 | expect(SynthUtils.toCloudFormation(stack)).toMatchSnapshot(); 32 | }); 33 | -------------------------------------------------------------------------------- /source/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2018", 4 | "module": "commonjs", 5 | "lib": ["es2018"], 6 | "declaration": true, 7 | "strict": true, 8 | "noImplicitAny": true, 9 | "strictNullChecks": true, 10 | "noImplicitThis": true, 11 | "alwaysStrict": true, 12 | "noUnusedLocals": false, 13 | "noUnusedParameters": false, 14 | "noImplicitReturns": true, 15 | "noFallthroughCasesInSwitch": false, 16 | "inlineSourceMap": true, 17 | "inlineSources": true, 18 | "experimentalDecorators": true, 19 | "strictPropertyInitialization": false, 20 | "typeRoots": ["./node_modules/@types"] 21 | }, 22 | "exclude": ["cdk.out"] 23 | } --------------------------------------------------------------------------------