├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
└── PULL_REQUEST_TEMPLATE.md
├── .gitignore
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE.txt
├── NOTICE.txt
├── README.md
├── deployment
├── build-s3-dist.sh
├── ecr
│ ├── README.md
│ └── prebid-server
│ │ ├── .dockerignore
│ │ ├── .gitignore
│ │ ├── Dockerfile
│ │ ├── bootstrap.sh
│ │ ├── current-config
│ │ └── README.md
│ │ ├── default-config
│ │ ├── README.md
│ │ ├── entrypoint.sh
│ │ ├── prebid-config.yaml
│ │ └── prebid-logging.xml
│ │ └── docker-build-config.json
├── run-unit-tests.sh
└── venv_check.py
├── docs
└── prebid-server-deployment-on-aws.png
├── solution-manifest.yaml
└── source
├── .coveragerc
├── cdk_solution_helper_py
├── CHANGELOG.md
├── README.md
├── helpers_cdk
│ ├── aws_solutions
│ │ └── cdk
│ │ │ ├── __init__.py
│ │ │ ├── aspects.py
│ │ │ ├── aws_lambda
│ │ │ ├── __init__.py
│ │ │ ├── cfn_custom_resources
│ │ │ │ ├── __init__.py
│ │ │ │ ├── resource_hash
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── hash.py
│ │ │ │ │ └── src
│ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ └── custom_resources
│ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ ├── hash.py
│ │ │ │ │ │ └── requirements.txt
│ │ │ │ ├── resource_name
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── name.py
│ │ │ │ │ └── src
│ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ └── custom_resources
│ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ ├── name.py
│ │ │ │ │ │ └── requirements.txt
│ │ │ │ └── solutions_metrics
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── metrics.py
│ │ │ │ │ └── src
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── custom_resources
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── metrics.py
│ │ │ │ │ └── requirements.txt
│ │ │ ├── environment.py
│ │ │ ├── environment_variable.py
│ │ │ ├── java
│ │ │ │ ├── __init__.py
│ │ │ │ ├── bundling.py
│ │ │ │ └── function.py
│ │ │ ├── layers
│ │ │ │ ├── __init__.py
│ │ │ │ └── aws_lambda_powertools
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── layer.py
│ │ │ │ │ └── requirements
│ │ │ │ │ └── requirements.txt
│ │ │ └── python
│ │ │ │ ├── __init__.py
│ │ │ │ ├── bundling.py
│ │ │ │ ├── function.py
│ │ │ │ ├── lambda_alarm.py
│ │ │ │ └── layer.py
│ │ │ ├── cfn_nag.py
│ │ │ ├── context.py
│ │ │ ├── helpers
│ │ │ ├── __init__.py
│ │ │ ├── copytree.py
│ │ │ ├── loader.py
│ │ │ └── logger.py
│ │ │ ├── interfaces.py
│ │ │ ├── mappings.py
│ │ │ ├── scripts
│ │ │ ├── __init__.py
│ │ │ └── build_s3_cdk_dist.py
│ │ │ ├── stack.py
│ │ │ ├── stepfunctions
│ │ │ ├── __init__.py
│ │ │ ├── solution_fragment.py
│ │ │ └── solutionstep.py
│ │ │ ├── synthesizers.py
│ │ │ └── tools
│ │ │ ├── __init__.py
│ │ │ └── cleaner.py
│ └── setup.py
├── helpers_common
│ ├── aws_solutions
│ │ ├── core
│ │ │ ├── __init__.py
│ │ │ ├── config.py
│ │ │ ├── helpers.py
│ │ │ └── logging.py
│ │ └── extended
│ │ │ └── resource_lookup.py
│ └── setup.py
└── requirements-dev.txt
├── infrastructure
├── __init__.py
├── app.py
├── aws_lambda_layers
│ ├── __init__.py
│ ├── aws_solutions
│ │ ├── __init__.py
│ │ ├── layer.py
│ │ └── requirements
│ │ │ └── requirements.txt
│ ├── datasync_s3_layer
│ │ ├── __init__.py
│ │ └── python
│ │ │ └── datasync_reports
│ │ │ └── reports.py
│ └── metrics_layer
│ │ ├── __init__.py
│ │ └── python
│ │ ├── __init__.py
│ │ └── cloudwatch_metrics
│ │ ├── __init__.py
│ │ └── metrics.py
├── cdk.json
├── custom_resources
│ ├── __init__.py
│ ├── artifacts_bucket_lambda
│ │ ├── __init__.py
│ │ ├── files
│ │ │ ├── __init__.py
│ │ │ └── glue
│ │ │ │ ├── __init__.py
│ │ │ │ └── metrics_glue_script.py
│ │ ├── requirements.txt
│ │ └── upload_files.py
│ ├── cloudwatch_metrics
│ │ ├── __init__.py
│ │ ├── cloudwatch_metrics_report.py
│ │ └── requirements.txt
│ ├── docker_configs_bucket_lambda
│ │ ├── __init__.py
│ │ ├── requirements.txt
│ │ └── upload_docker_config.py
│ ├── enable_access_logs
│ │ ├── __init__.py
│ │ ├── enable_access_logs.py
│ │ └── requirements.txt
│ ├── header_secret_lambda
│ │ ├── __init__.py
│ │ ├── header_secret_gen.py
│ │ └── requirements.txt
│ ├── operational_metrics
│ │ ├── __init__.py
│ │ ├── ops_metrics.py
│ │ └── requirements.txt
│ ├── prefix_id_lambda
│ │ ├── __init__.py
│ │ ├── get_prefix_id.py
│ │ └── requirements.txt
│ ├── vpc_eni_lambda
│ │ ├── __init__.py
│ │ ├── delete_lambda_eni.py
│ │ └── requirements.txt
│ └── waf_webacl_lambda
│ │ ├── __init__.py
│ │ ├── create_waf_webacl.py
│ │ ├── delete_waf_webacl.py
│ │ └── requirements.txt
├── prebid_server
│ ├── __init__.py
│ ├── alb_access_logs_construct.py
│ ├── alb_entry_deployment.py
│ ├── app_registry_aspect.py
│ ├── cloudfront_entry_deployment.py
│ ├── cloudfront_waf_construct.py
│ ├── cloudtrail_construct.py
│ ├── cloudwatch_alarms_construct.py
│ ├── cloudwatch_metrics_construct.py
│ ├── condition_aspect.py
│ ├── container_image_construct.py
│ ├── docker_configs_construct.py
│ ├── ecs_service_construct.py
│ ├── ecs_task_construct.py
│ ├── efs_cleanup_lambda
│ │ ├── __init__.py
│ │ ├── container_stop_logs.py
│ │ └── delete_efs_files.py
│ ├── efs_construct.py
│ ├── glue_trigger_lambda
│ │ ├── __init__.py
│ │ └── start_glue_job.py
│ ├── operational_metrics_construct.py
│ ├── prebid_artifacts_constructs.py
│ ├── prebid_datasync_constructs.py
│ ├── prebid_glue_constructs.py
│ ├── prebid_metrics_schema.json
│ ├── prebid_server_stack.py
│ ├── stack_constants.py
│ └── vpc_construct.py
└── setup.py
├── loadtest
└── jmx
│ ├── .gitignore
│ ├── README.md
│ └── prebid_server_test_plan.jmx
├── poetry.lock
├── pyproject.toml
├── pytest.ini
├── requirements-poetry.txt
└── tests
├── conftest.py
├── functional_tests
├── README.md
├── conftest.py
├── requirements-test.txt
├── run-functional-tests.sh
├── test_bad_requests.py
├── test_metric_etl.py
├── test_prebid_auction.py
└── test_prebid_server_endpoints.py
├── setup.py
└── unit_tests
├── __init__.py
├── aws_lambda_layers
├── __init__.py
├── aws_solutions
│ ├── __init__.py
│ └── test_layer.py
├── datasync_s3_layer
│ ├── __init__.py
│ └── test_reports.py
└── metrics_layer
│ ├── __init__.py
│ └── test_cloudwatch_metrics.py
├── custom_resources
├── __init__.py
├── test_cloudwatch_metrics_report.py
├── test_create_waf_webacl.py
├── test_delete_lambda_eni.py
├── test_delete_waf_webacl.py
├── test_get_prefix_id.py
├── test_header_secret_gen.py
├── test_metrics_glue_scripts.py
├── test_ops_metrics.py
└── test_upload_files.py
├── prebid_server
├── __init__.py
├── test_container_stop_logs.py
├── test_delete_efs_files.py
├── test_prebid_artifacts_constructs.py
├── test_prebid_datasync_constructs.py
├── test_prebid_glue_constructs.py
└── test_start_glue_job.py
├── test_app.py
├── test_commons.py
└── test_prebid_server_template.py
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: bug
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior.
15 |
16 | **Expected behavior**
17 | A clear and concise description of what you expected to happen.
18 |
19 | **Please complete the following information about the solution:**
20 | - [ ] Version: [e.g. v1.0.0]
21 |
22 | To get the version of the solution, you can look at the description of the created CloudFormation stack. For example, "_(SO0021) - Video On Demand workflow with AWS Step Functions, MediaConvert, MediaPackage, S3, CloudFront and DynamoDB. Version **v5.0.0**_". If you have not yet installed the stack, or are unable to install due to a problem, you can find the version and solution ID in the template with a text editor. Open the .template file and search for `SOLUTION_VERSION` in the content. You will find several matches and they will all be the same value:
23 |
24 | ```json
25 | "Environment": {
26 | "Variables": {
27 | "SOLUTION_ID": "SO0248",
28 | "SOLUTION_VERSION": "v1.1.2"
29 | }
30 | },
31 | ```
32 |
33 | This information is also provided in `source/infrastructure/cdk.json`:
34 |
35 | ```json
36 | "SOLUTION_ID": "SO0248",
37 | "SOLUTION_VERSION": "v1.1.2",
38 | ```
39 |
40 |
41 |
42 | - [ ] Region: [e.g. us-east-1]
43 | - [ ] Was the solution modified from the version published on this repository?
44 | - [ ] If the answer to the previous question was yes, are the changes available on GitHub?
45 | - [ ] Have you checked your [service quotas](https://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html) for the sevices this solution uses?
46 | - [ ] Were there any errors in the CloudWatch Logs?
47 |
48 | **Screenshots**
49 | If applicable, add screenshots to help explain your problem (please **DO NOT include sensitive information**).
50 |
51 | **Additional context**
52 | Add any other context about the problem here.
53 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this solution
4 | title: ''
5 | labels: enhancement
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the feature you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Additional context**
17 | Add any other context or screenshots about the feature request here.
18 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | *Issue #, if available:*
2 |
3 | *Description of changes:*
4 |
5 | By submitting this pull request, I confirm that you can use, modify, copy, and redistribute this contribution, under the terms of your choice.
6 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Modified based on https://www.gitignore.io/api/visualstudiocode,python
2 |
3 | # compiled output
4 | **/global-s3-assets
5 | **/regional-s3-assets
6 | **/build-s3-assets
7 | **/open-source
8 | **/tmp
9 |
10 | ### Python ###
11 | # Byte-compiled / optimized / DLL files
12 | __pycache__/
13 | *.py[cod]
14 | *$py.class
15 |
16 | # Python Distribution / packaging
17 | *.egg-info/
18 | *.egg
19 |
20 | # Python Virtual Environments
21 | **/venv*
22 | **/.venv*
23 | !deployment/venv_check.py
24 | .python-version
25 |
26 | ## Python Testing
27 | **/.pytest_cache
28 | **/.coverage
29 | **/coverage-reports/
30 | **/.coverage-*
31 | source/.coverage.*
32 |
33 | # linting, scanning configurations, sonarqube
34 | .scannerwork/
35 |
36 | ### VisualStudioCode ###
37 | .vscode/*
38 |
39 | ### IntelliJ/ PyCharm ###
40 | **/.idea/*
41 |
42 | # System Files
43 | **/.DS_Store
44 |
45 | # CDK
46 | **/cdk.out
47 |
48 | # Glue
49 | .glue/*
50 |
51 | # Generated test assets
52 | source/infrastructure/tests/assets/*
53 | !source/infrastructure/tests/assets/.keep
54 | source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/build
55 | source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/.gradle
56 | source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/.idea
57 |
58 | # gradle build files
59 | **/.gradle/*
60 |
61 | # java build files
62 | **/java/**/build
63 |
64 | # python build files
65 | source/cdk_solution_helper_py/helpers_cdk/build/*
66 | source/cdk_solution_helper_py/helpers_common/build/*
67 | source/scheduler/common/build/*
68 | source/scheduler/cdk/build/*
69 | source/aws_lambda/shared/util/build/*
70 |
71 | # various temporary file extensions
72 | *.bkp
73 | *.tmp
74 | *.sav
75 | *.dtmp
76 |
77 | # generated files
78 | source/infrastructure/custom_resources/docker_configs_bucket_lambda/current-config/
79 | source/infrastructure/custom_resources/docker_configs_bucket_lambda/default-config/
80 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Change Log
2 |
3 | All notable changes to this project will be documented in this file.
4 |
5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7 |
8 | ## [1.1.2] - 2025-05-22
9 |
10 | - Upgrade Prebid Server Java to v3.25.0
11 | - Upgrade Python dependencies
12 | - Fix anonymized metrics reporting Lambda
13 |
14 | ## [1.1.1] - 2025-03-07
15 |
16 | - Upgrade to Prebid Server v3.22 and underlying Docker base image
17 | - Optimized container image using jlink reducing image size from 774 MB to 142 MB
18 | - Change to Poetry for Python dependency management
19 | - Add script to run Prebid Server container locally with stack settings
20 |
21 | ## [1.1.0] - 2024-10-31
22 |
23 | - Upgrade to Prebid Server v3.13 and underlying Docker base image
24 | - ECS runtime logs in AWS CloudWatch instead of S3
25 | - Option to opt-out of installing CloudFront and WAF
26 | - Customize Prebid Server configuration through files in S3
27 | - Option to specify a custom container image
28 |
29 | ## [1.0.2] - 2024-09-23
30 |
31 | - Upgrade Python `requests` package to version 2.32.3 in requirements.txt
32 | - Bug fix for launch failure of EfsCleanupContainerStop Lambda function
33 |
34 | ## [1.0.1] - 2024-08-02
35 |
36 | - Remove python `setuptools` and `pip` from prebid server docker image
37 | - Include missing copyright header for `source/infrastructure/prebid_server/stack_constants.py`
38 |
39 | ## [1.0.0] - 2024-05-28
40 |
41 | ### Added
42 |
43 | - All files, initial version
44 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | ## Code of Conduct
2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
4 | opensource-codeofconduct@amazon.com with any additional questions or comments.
5 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing Guidelines
2 |
3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
4 | documentation, we greatly value feedback and contributions from our community.
5 |
6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
7 | information to effectively respond to your bug report or contribution.
8 |
9 |
10 | ## Reporting Bugs/Feature Requests
11 |
12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features.
13 |
14 | When filing an issue, please check [existing open](https://github.com/aws-solutions/prebid-server-deployment-on-aws/issues), or [recently closed](https://github.com/aws-solutions/prebid-server-deployment-on-aws/issues?utf8=%E2%9C%93&q=is%3Aissue%20is%3Aclosed%20), issues to make sure somebody else hasn't already
15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
16 |
17 | * A reproducible test case or series of steps
18 | * The version of our code being used
19 | * Any modifications you've made relevant to the bug
20 | * Anything unusual about your environment or deployment
21 |
22 |
23 | ## Contributing via Pull Requests
24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
25 |
26 | 1. You are working against the latest source on the *main* branch.
27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
29 |
30 | To send us a pull request, please:
31 |
32 | 1. Fork the repository.
33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.
34 | 3. Ensure all build processes execute successfully (see README.md for additional guidance).
35 | 4. Ensure all unit, integration, and/or snapshot tests pass, as applicable.
36 | 5. Commit to your fork using clear commit messages.
37 | 6. Send us a pull request, answering any default questions in the pull request interface.
38 | 7. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.
39 |
40 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
41 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/).
42 |
43 |
44 | ## Finding contributions to work on
45 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels ((enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/aws-solutions/prebid-server-deployment-on-aws/labels/help%20wanted) issues is a great place to start.
46 |
47 |
48 | ## Code of Conduct
49 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
50 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
51 | opensource-codeofconduct@amazon.com with any additional questions or comments.
52 |
53 |
54 | ## Security issue notifications
55 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public GitHub issue.
56 |
57 |
58 | ## Licensing
59 |
60 | See the [LICENSE](https://github.com/aws-solutions/prebid-server-deployment-on-aws/blob/main/LICENSE.txt) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
61 |
62 | We may ask you to sign a [Contributor License Agreement (CLA)](https://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes.
63 |
--------------------------------------------------------------------------------
/NOTICE.txt:
--------------------------------------------------------------------------------
1 | Prebid Server Deployment on AWS
2 |
3 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
4 | Licensed under the Apache License Version 2.0 (the "License"). You may not use this file except
5 | in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/
6 | or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS,
7 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied. See the License for the
8 | specific language governing permissions and limitations under the License.
9 |
10 | **********************
11 | THIRD PARTY COMPONENTS
12 | **********************
13 |
14 | This software includes third party software subject to the following copyrights:
15 |
16 | alpinelinux/docker-alpine under the MIT license.
17 | github.com/prebid/prebid-server-java under the Apache-2.0 license.
18 | attrs under the MIT license.
19 | aws-cdk-asset-awscli-v1 under the Apache-2.0 license.
20 | aws-cdk-asset-kubectl-v20 under the Apache-2.0 license.
21 | aws-cdk-asset-node-proxy-agent-v6 under the Apache-2.0 license.
22 | aws-cdk-cloud-assembly-schema under the Apache-2.0 license.
23 | aws-cdk-lib under the Apache-2.0 license.
24 | aws-lambda-powertools under the MIT license.
25 | aws-solutions-cdk under the Apache-2.0 license.
26 | aws-solutions-python under the Apache-2.0 license.
27 | boto3 under the Apache-2.0 license.
28 | botocore under the Apache-2.0 license.
29 | cattrs under the MIT license.
30 | cdk-nag under the Apache-2.0 license.
31 | certifi under the MPL-2.0 license.
32 | cffi under the MIT license.
33 | charset-normalizer under the MIT license.
34 | click under the 0BSD license.
35 | colorama under the 0BSD license.
36 | constructs under the Apache-2.0 license.
37 | coverage under the Apache-2.0 license.
38 | crhelper under the Apache-2.0 license.
39 | cryptography under the Apache-2.0 license.
40 | docker under the Apache-2.0 license.
41 | idna under the 0BSD license.
42 | importlib-resources under the Apache-2.0 license.
43 | infrastructure under the MIT license.
44 | iniconfig under the MIT license.
45 | jinja2 under the 0BSD license.
46 | jmespath under the MIT license.
47 | jsii under the Apache-2.0 license.
48 | markupsafe under the 0BSD license.
49 | moto under the Apache-2.0 license.
50 | packaging under the Apache-2.0 license.
51 | pip under the MIT license.
52 | pluggy under the MIT license.
53 | publication under the MIT license.
54 | pycparser under the 0BSD license.
55 | pyparsing under the MIT license.
56 | pytest under the MIT license.
57 | pytest-cov under the MIT license.
58 | pytest-env under the MIT license.
59 | pytest-mock under the MIT license.
60 | pytest-ordering under the MIT license.
61 | python-dateutil under the Apache-2.0 license.
62 | pywin32 under the PSF-2.0 license.
63 | pyyaml under the MIT license.
64 | requests under the Apache-2.0 license.
65 | responses under the Apache-2.0 license.
66 | s3transfer under the Apache-2.0 license.
67 | six under the MIT license.
68 | typeguard under the MIT license.
69 | typing-extensions under the PSF-2.0 license.
70 | unit-tests under the Apache-2.0 license.
71 | url-normalize under the MIT license.
72 | urllib3 under the MIT license.
73 | werkzeug under the 0BSD license.
74 | xmltodict under the MIT license.
75 |
76 | ********************
77 | OPEN SOURCE LICENSES
78 | ********************
79 |
80 | 0BSD - http://landley.net/toybox/license.html
81 | Apache-2.0 - https://www.apache.org/licenses/LICENSE-2.0
82 | PSF-2.0 - https://opensource.org/licenses/Python-2.0
83 | MIT - https://opensource.org/license/mit
84 | MPL-2.0 - https://www.mozilla.org/en-US/MPL/2.0/
85 |
--------------------------------------------------------------------------------
/deployment/ecr/README.md:
--------------------------------------------------------------------------------
1 | ### Building Prebid Server Locally
2 |
3 | ```bash
4 | cd ./deployment/ecr/prebid-server
5 | docker build --no-cache -t prebid-server .
6 | ```
7 |
8 |
9 | ### Prebid Server Version and Docker Config
10 | You can update `prebid-server-java` git tag release in `deployment/ecr/prebid-server/docker-build-config.json`. The tag must match exactly the tag on the source GitHub repository `https://github.com/prebid/prebid-server-java/tags`. After changing the tag value, run:
11 |
12 | ```bash
13 | cd ./deployment/ecr/prebid-server
14 | docker build --no-cache -t prebid-server .
15 | ```
16 |
17 | You can rebuild the entire stack with the new Prebid Server version using instructions in the main `README.md`.
18 |
19 | ### Running Prebid Server Locally
20 |
21 | The `./deployment/run-local-prebid.sh` script helps you run a local instance of Prebid Server using configuration files stored in AWS.
22 |
23 | #### Prerequisites
24 |
25 | Before using this script, ensure you have:
26 |
27 | 1. A deployed CloudFormation stack containing the Prebid Server configuration bucket
28 | - The bucket must contain the necessary configuration files
29 | - The bucket's logical ID must contain "ConfigFilesBucket"
30 |
31 | 2. A local Docker image of Prebid Server
32 | - Image must be tagged as `prebid-server` (or specify custom tag with `--tag`)
33 | - Build the image using the Dockerfile in `./ecr/prebid/`
34 |
35 | 3. AWS credentials with permissions to:
36 | - List CloudFormation stack resources (if using stack lookup)
37 | - List and read objects from the configuration S3 bucket
38 |
39 | 4. Required tools:
40 | - AWS CLI configured with credentials
41 | - Docker installed and running
42 | - Bash shell
43 |
44 | #### Basic Usage
45 |
46 | Run using CloudFormation stack name:
47 | ```bash
48 | ./run-local-prebid.sh --stack MyStackName
49 | ```
50 |
51 | Run using bucket name directly:
52 | ```bash
53 | ./run-local-prebid.sh --bucket my-config-bucket
54 | ```
55 |
56 | #### Additional Options
57 |
58 | - Mount a local directory for logs:
59 | ```bash
60 | ./run-local-prebid.sh --stack MyStackName --volume /path/to/logs
61 | ```
62 |
63 | - Specify custom ports:
64 | ```bash
65 | ./run-local-prebid.sh --stack MyStackName --app-port 38080 --mgmt-port 38060
66 | ```
67 |
68 | - Use specific region and image tag:
69 | ```bash
70 | ./run-local-prebid.sh --stack MyStackName --region us-west-2 --tag custom-tag
71 | ```
72 |
73 | #### Default Ports
74 |
75 | - Main application: `28080` (container port 8080)
76 | - Management interface: `28060` (container port 8060)
77 |
78 | #### Notes
79 |
80 | - The script will copy AWS credentials from your environment or AWS CLI configuration
81 | - Container logs are written to stdout/stderr unless a volume is mounted
82 | - Use `--help` to see all available options
83 |
84 |
85 | This section provides the essential information for users to understand the prerequisites and basic usage of the script, while also showing more advanced options for customization.
86 |
87 |
88 |
--------------------------------------------------------------------------------
/deployment/ecr/prebid-server/.dockerignore:
--------------------------------------------------------------------------------
1 | # Version control
2 | .git/
3 | .gitignore
4 | .gitattributes
5 |
6 | # Build outputs
7 | target/
8 | build/
9 | dist/
10 | *.class
11 | *.jar
12 | **/.gradle/*
13 | **/java/**/build
14 |
15 | # Logs and databases
16 | *.log
17 | /metrics/*
18 | bootstrap.log
19 |
20 | # OS generated files
21 | .DS_Store
22 |
23 | # Documentation
24 | *.md
25 | docs/
26 | README*
27 | CHANGELOG*
28 | LICENSE*
29 |
30 | # Development configs
31 | .env
32 |
33 |
34 | # Maven
35 | .mvn/
36 |
--------------------------------------------------------------------------------
/deployment/ecr/prebid-server/.gitignore:
--------------------------------------------------------------------------------
1 | /logs/*
2 | /metrics/*
3 | bootstrap.log
4 | sample/configs/prebid-config.yaml
5 | .env
6 | nw-current-config/
7 | amt-bidder/
--------------------------------------------------------------------------------
/deployment/ecr/prebid-server/bootstrap.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
4 | # SPDX-License-Identifier: Apache-2.0
5 |
6 | # -----------------------------------------------------------------------------------------------------------------------------------------------------------------
7 | # PURPOSE:
8 | # * Download Prebid Server configuration files and scripts from an S3 bucket.
9 | # * The S3 bucket name is obtained from the environment variable DOCKER_CONFIGS_S3_BUCKET_NAME.
10 | # * The configuration files are downloaded into a local /prebid-configs directory.
11 | # * The default and current configuration files are fetched from two specific prefixes in the S3 bucket.
12 | # * After download, the script verifies that essential configuration files exist.
13 | # * The entrypoint script is then executed to start the Docker containers.
14 | # -----------------------------------------------------------------------------------------------------------------------------------------------------------------
15 |
16 | set -euo pipefail
17 |
18 | # Set variables
19 | PREBID_CONFIGS_DIR="prebid-configs"
20 | ENTRYPOINT_SCRIPT="entrypoint.sh"
21 | REQUIRED_CONFIG_FILES="${ENTRYPOINT_SCRIPT} prebid-config.yaml prebid-logging.xml"
22 | ENTRYPOINT_DIR="../${PREBID_CONFIGS_DIR}"
23 |
24 | # Check if the S3 bucket environment variable is set
25 | if [ -z "${DOCKER_CONFIGS_S3_BUCKET_NAME:-}" ]; then
26 | echo "Error: DOCKER_CONFIGS_S3_BUCKET_NAME environment variable is not set"
27 | exit 1
28 | else
29 | # Define S3 paths
30 | DEFAULT_S3_PATH="s3://${DOCKER_CONFIGS_S3_BUCKET_NAME}/prebid-server/default/"
31 | CURRENT_S3_PATH="s3://${DOCKER_CONFIGS_S3_BUCKET_NAME}/prebid-server/current/"
32 |
33 | echo "Cleaning up and recreating ${ENTRYPOINT_DIR}"
34 | rm -rvf "${ENTRYPOINT_DIR}" || { echo "Failed to remove ${ENTRYPOINT_DIR}"; exit 1; }
35 | mkdir -pv "${ENTRYPOINT_DIR}" || { echo "Failed to create ${ENTRYPOINT_DIR}"; exit 1; }
36 |
37 | # Download default Prebid configuration files from S3
38 | echo "Downloading default configuration files from S3 bucket: ${DEFAULT_S3_PATH}"
39 | if aws s3 cp "$DEFAULT_S3_PATH" "$ENTRYPOINT_DIR" --recursive --exclude "README.md"; then
40 | echo "Successfully downloaded default configuration files"
41 | else
42 | echo "Failed to download default configuration files"
43 | exit 1
44 | fi
45 |
46 | # Download current Prebid configuration files from S3 (ignore if missing)
47 | echo "Downloading current configuration files from S3 bucket: ${CURRENT_S3_PATH}"
48 | if aws s3 cp "$CURRENT_S3_PATH" "$ENTRYPOINT_DIR" --recursive --exclude "README.md"; then
49 | echo "Successfully downloaded current configuration files"
50 | else
51 | echo "Warning: Failed to download current configuration files, proceeding without them"
52 | fi
53 | fi
54 |
55 | # Check if all required configuration files exist
56 | for required_config_file in $REQUIRED_CONFIG_FILES; do
57 | echo "Checking if ${required_config_file} exists"
58 | if [ ! -f "${ENTRYPOINT_DIR}/${required_config_file}" ]; then
59 | echo "Error: Required configuration file ${required_config_file} is missing"
60 | exit 1
61 | fi
62 | done
63 |
64 | # Execute the entrypoint script to start Docker containers
65 | echo "Executing ${ENTRYPOINT_SCRIPT}"
66 | sh "${ENTRYPOINT_DIR}/${ENTRYPOINT_SCRIPT}" || { echo "Failed to execute ${ENTRYPOINT_SCRIPT}"; exit 1; }
67 |
--------------------------------------------------------------------------------
/deployment/ecr/prebid-server/default-config/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
4 | # SPDX-License-Identifier: Apache-2.0
5 |
6 | # -----------------------------------------------------------------------------
7 | # PURPOSE:
8 | # This entrypoint script is used to start the Prebid Server container.
9 | #
10 | # An environment variable named ECS_CONTAINER_METADATA_URI_V4
11 | # is injected by ECS into each container. The variable contains a URI that
12 | # is used to retrieve container status and data.
13 | #
14 | # See:
15 | # https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-metadata-endpoint-v4.html
16 | #
17 | # The entrypoint defined below retrieves the data and parses the
18 | # container's unique ID from it and uses the ID to ensure
19 | # log data is written to a unique directory under /mnt/efs/.
20 | # The container ID is also included with logs sent directly
21 | # to CloudWatch.
22 | #
23 | # If the environment variable ECS_CONTAINER_METADATA_URI_V4 is not set,
24 | # the string "default-container-id" is returned instead so that the
25 | # container can be run locally.
26 | #
27 | # Metrics are sent to /mnt/efs/metrics folder also using the container ID
28 | # in the path. Files have the name prebid-metrics.log.
29 | #
30 | # The default Java executable entry point specified in this script can be
31 | # customized or replaced with a different command or executable.
32 | # ------------------------------------------------------------------------------
33 |
34 | PREBID_CONFIGS_DIR="/prebid-configs"
35 |
36 | /usr/bin/java \
37 | -DcontainerId=$(if [ -z "$ECS_CONTAINER_METADATA_URI_V4" ]; then echo "default-container-id"; else curl -s "${ECS_CONTAINER_METADATA_URI_V4}/task" | jq -r '.Containers[0].DockerId' 2>/dev/null | cut -d'-' -f1 || echo "default-container-id"; fi) \
38 | -Dlogging.config=${PREBID_CONFIGS_DIR}/prebid-logging.xml \
39 | -XX:+UseParallelGC \
40 | -jar target/prebid-server.jar \
41 | --spring.config.additional-location=${PREBID_CONFIGS_DIR}/prebid-config.yaml
42 |
--------------------------------------------------------------------------------
/deployment/ecr/prebid-server/default-config/prebid-config.yaml:
--------------------------------------------------------------------------------
1 | status-response: "ok"
2 | adapters:
3 | appnexus:
4 | enabled: true
5 | ix:
6 | enabled: true
7 | openx:
8 | enabled: true
9 | pubmatic:
10 | enabled: true
11 | rubicon:
12 | enabled: true
13 | metrics:
14 | logback:
15 | enabled: true
16 | name: METRICS
17 | interval: 30
18 | cache:
19 | scheme: http
20 | host: localhost
21 | path: /cache
22 | query: uuid=
23 | settings:
24 | enforce-valid-account: false
25 | generate-storedrequest-bidrequest-id: true
26 | filesystem:
27 | settings-filename: sample/configs/sample-app-settings.yaml
28 | stored-requests-dir: sample
29 | stored-imps-dir: sample
30 | stored-responses-dir: sample
31 | categories-dir:
32 | gdpr:
33 | default-value: 1
34 | vendorlist:
35 | v2:
36 | cache-dir: /var/tmp/vendor2
37 | v3:
38 | cache-dir: /var/tmp/vendor3
39 | admin-endpoints:
40 | logging-changelevel:
41 | enabled: true
42 | path: /logging/changelevel
43 | on-application-port: true
44 | protected: false
45 |
--------------------------------------------------------------------------------
/deployment/ecr/prebid-server/default-config/prebid-logging.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 | System.out
12 |
13 | {"timestamp":"%d{yyyy-MM-dd'T'HH:mm:ss.SSSZ}", "level":"%p", "logger":"%logger", "thread":"%t", "message":"%msg", "containerId":"${CONTAINER_ID}"}%n
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 | /mnt/efs/metrics/${CONTAINER_ID}/prebid-metrics.log
25 |
26 |
27 |
28 | /mnt/efs/metrics/${CONTAINER_ID}/archived/prebid-metrics.%d{yyyy-MM-dd_HH}.%i.log.gz
29 | 100MB
30 |
31 |
32 | {"timestamp":"%d{yyyy-MM-dd'T'HH:mm:ss.SSSZ}", "level":"%p", "logger":"%logger", "thread":"%t", "message":"%msg", "containerId":"${CONTAINER_ID}"}%n
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
--------------------------------------------------------------------------------
/deployment/ecr/prebid-server/docker-build-config.json:
--------------------------------------------------------------------------------
1 | {
2 | "GIT_TAG_VERSION": "3.25.0",
3 | "MVN_CLI_OPTIONS": "-Dmaven.test.skip"
4 | }
5 |
6 |
--------------------------------------------------------------------------------
/deployment/venv_check.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 | # SPDX-License-Identifier: Apache-2.0
4 | """
5 | This program returns 0 if the current environment is a virtual environment.
6 | """
7 | import sys
8 |
9 | # compare the python prefixes, same == not venv
10 | IN_VENV = (getattr(sys, "base_prefix", None) or getattr(
11 | sys, "real_prefix", None) or sys.prefix) != sys.prefix
12 | # return success (0) if in a venv
13 | sys.exit(IN_VENV is False)
14 |
--------------------------------------------------------------------------------
/docs/prebid-server-deployment-on-aws.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/prebid-server-deployment-on-aws/b51ae1a1c2278aeb15a265b36eef4035160b4f3e/docs/prebid-server-deployment-on-aws.png
--------------------------------------------------------------------------------
/solution-manifest.yaml:
--------------------------------------------------------------------------------
1 | id: SO0248
2 | name: prebid-server-deployment-on-aws
3 | version: v1.1.2
4 | cloudformation_templates:
5 | - template: prebid-server-deployment-on-aws.template
6 | main_template: true
7 | build_environment:
8 | build_image: 'aws/codebuild/standard:7.0'
9 | container_images:
10 | - prebid-server
11 |
--------------------------------------------------------------------------------
/source/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | infrastructure/setup.py
4 | infrastructure/cdk.out/*
5 | tests/*
6 | cdk_solution_helper_py/*
7 | loadtest/*
8 | setup.py
9 | source =
10 | infrastructure
11 |
12 | [report]
13 | fail_under = 82
14 | exclude_lines =
15 | setuptools
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Change Log
2 | All notable changes to this project will be documented in this file.
3 |
4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
6 |
7 | ## [2.0.0] - 2022-01-31
8 | ### Changed
9 | - support for CDK 2.x added, support for CDK 1.x removed
10 |
11 | ## [1.0.0] - 2021-09-23
12 | ### Added
13 | - initial release
14 |
15 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from pathlib import Path
5 |
6 | from aws_solutions.cdk.context import SolutionContext
7 | from aws_solutions.cdk.stack import SolutionStack
8 | from aws_solutions.cdk.synthesizers import SolutionStackSubstitions
9 |
10 |
11 | class CDKSolution:
12 | """
13 | A CDKSolution stores helper utilities for building AWS Solutions using the AWS CDK in Python
14 |
15 | :type cdk_json_path: Path
16 | :param cdk_json_path: The full path to the cdk.json context for your application
17 | :type qualifier: str
18 | :param qualifier: A string that is added to all resources in the CDK bootstrap stack. The default value has no significance.
19 | """
20 |
21 | def __init__(self, cdk_json_path: Path, qualifier="hnb659fds"):
22 | self.qualifier = qualifier
23 | self.context = SolutionContext(cdk_json_path=cdk_json_path)
24 | self.synthesizer = SolutionStackSubstitions(qualifier=self.qualifier)
25 |
26 | def reset(self) -> None:
27 | """
28 | Get a new synthesizer for this CDKSolution - useful for testing
29 | :return: None
30 | """
31 | self.synthesizer = SolutionStackSubstitions(qualifier=self.qualifier, generate_bootstrap_version_rule=False)
32 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aspects.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | import jsii
4 | from aws_cdk import CfnCondition, IAspect
5 | from constructs import IConstruct
6 |
7 |
8 | @jsii.implements(IAspect)
9 | class ConditionalResources:
10 | """Mark any CDK construct as conditional (this is useful to apply to stacks and L2+ constructs)"""
11 |
12 | def __init__(self, condition: CfnCondition):
13 | self.condition = condition
14 |
15 | def visit(self, node: IConstruct):
16 | if "is_cfn_element" in dir(node) and node.is_cfn_element(node):
17 | node.cfn_options.condition = self.condition
18 | elif "is_cfn_element" in dir(node.node.default_child):
19 | node.node.default_child.cfn_options.condition = self.condition
20 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_hash/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from aws_solutions.cdk.aws_lambda.cfn_custom_resources.resource_hash.hash import (
5 | ResourceHash,
6 | )
7 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_hash/hash.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | from pathlib import Path
4 |
5 | from aws_cdk import (
6 | CfnResource,
7 | Stack,
8 | )
9 | from constructs import Construct
10 |
11 | from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction
12 | from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression
13 |
14 |
15 | class ResourceHash(Construct):
16 | """Used to create unique resource names based on the hash of the stack ID"""
17 |
18 | def __init__(
19 | self,
20 | scope: Construct,
21 | construct_id: str,
22 | purpose: str,
23 | max_length: int,
24 | ):
25 | super().__init__(scope, construct_id)
26 |
27 | uuid = "ResourceHashFunction-b8785f53-1531-4bfb-a119-26aa638d7b19"
28 | stack = Stack.of(self)
29 | self._resource_name_function = stack.node.try_find_child(uuid)
30 |
31 | if not self._resource_name_function:
32 | self._resource_name_function = SolutionsPythonFunction(
33 | stack,
34 | uuid,
35 | entrypoint=Path(__file__).parent
36 | / "src"
37 | / "custom_resources"
38 | / "hash.py",
39 | function="handler",
40 | )
41 | add_cfn_nag_suppressions(
42 | resource=self._resource_name_function.node.default_child,
43 | suppressions=[
44 | CfnNagSuppression(
45 | "W89", "This AWS Lambda Function is not deployed to a VPC"
46 | ),
47 | CfnNagSuppression(
48 | "W92",
49 | "This AWS Lambda Function does not require reserved concurrency",
50 | ),
51 | ],
52 | )
53 |
54 | properties = {
55 | "ServiceToken": self._resource_name_function.function_arn,
56 | "Purpose": purpose,
57 | "MaxLength": max_length,
58 | }
59 |
60 | self.logical_name = f"{construct_id}HashResource"
61 | self.resource_name_resource = CfnResource(
62 | self,
63 | self.logical_name,
64 | type="Custom::ResourceHash",
65 | properties=properties,
66 | )
67 |
68 | @property
69 | def resource_name(self):
70 | return self.resource_name_resource.get_att("Name")
71 |
72 | @property
73 | def resource_id(self):
74 | return self.resource_name_resource.get_att("Id")
75 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_hash/src/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_hash/src/custom_resources/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_hash/src/custom_resources/hash.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | import logging
4 | from hashlib import md5
5 | from os import getenv
6 |
7 | from crhelper import CfnResource
8 |
9 | logger = logging.getLogger(__name__)
10 | helper = CfnResource(log_level=getenv("LOG_LEVEL", "WARNING"))
11 |
12 |
13 | class StackId:
14 | def __init__(self, event):
15 | self.stack_id = event.get("StackId")
16 | self.partition = self.get_arn_component(1)
17 | self.service = self.get_arn_component(2)
18 | self.region = self.get_arn_component(3)
19 | self.account = self.get_arn_component(4)
20 | self.stack_name = self.get_arn_component(5).split("/")[1]
21 |
22 | def get_arn_component(self, idx: int) -> str:
23 | return self.stack_id.split(":")[idx]
24 |
25 | @property
26 | def hash(self):
27 | # NOSONAR - safe to hash, not for cryptographic purposes
28 | digest = md5() # nosec
29 | digest.update(bytes(f"{self.stack_id.rsplit('/', 1)[0]}", "ascii"))
30 | return digest.hexdigest().upper()
31 |
32 |
33 | def get_property(event, property_name, property_default=None):
34 | resource_prop = event.get("ResourceProperties", {}).get(
35 | property_name, property_default
36 | )
37 | if not resource_prop:
38 | raise ValueError(f"missing required property {property_name}")
39 | return resource_prop
40 |
41 |
42 | @helper.create
43 | def generate_hash(event, _):
44 | """
45 | Generate a resource name containing a hash of the stack ID (without unique ID) and resource purpose.
46 | This is useful when you need to create named IAM roles
47 |
48 | :param event: The CloudFormation custom resource event
49 | :return: None
50 | """
51 | stack_id = StackId(event)
52 | purpose = get_property(event, "Purpose")
53 | max_length = int(get_property(event, "MaxLength", 64))
54 |
55 | name = f"{purpose}-{stack_id.hash[:8]}"
56 |
57 | if len(name) > max_length:
58 | raise ValueError(
59 | f"the derived resource name {name} is too long ({len(name)} / {max_length}) - please use a shorter Purpose"
60 | )
61 |
62 | logger.info(f"the derived resource name is {name}")
63 | helper.Data["Name"] = name
64 | helper.Data["Id"] = stack_id.hash
65 |
66 |
67 | @helper.update
68 | @helper.delete
69 | def no_op(_, __):
70 | pass # pragma: no cover
71 |
72 |
73 | def handler(event, _):
74 | """
75 | Handler entrypoint - see generate_hash for implementation details
76 | :param event: The CloudFormation custom resource event
77 | :return: PhysicalResourceId
78 | """
79 | helper(event, _) # pragma: no cover
80 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_hash/src/custom_resources/requirements.txt:
--------------------------------------------------------------------------------
1 | crhelper==2.0.6
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from aws_solutions.cdk.aws_lambda.cfn_custom_resources.resource_name.name import (
5 | ResourceName,
6 | )
7 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/name.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | from pathlib import Path
4 | from typing import Optional
5 |
6 | from aws_cdk import (
7 | CfnResource,
8 | Aws,
9 | Stack,
10 | )
11 | from constructs import Construct
12 |
13 | from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction
14 | from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression
15 |
16 |
17 | class ResourceName(Construct):
18 | """Used to create unique resource names of the format {stack_name}-{purpose}-{id}"""
19 |
20 | def __init__(
21 | self,
22 | scope: Construct,
23 | construct_id: str,
24 | purpose: str,
25 | max_length: int,
26 | resource_id: Optional[str] = None,
27 | ):
28 | super().__init__(scope, construct_id)
29 |
30 | uuid = "ResourceNameFunction-d45b185a-fe34-44ab-a375-17f89597d9ec"
31 | stack = Stack.of(self)
32 | self._resource_name_function = stack.node.try_find_child(uuid)
33 |
34 | if not self._resource_name_function:
35 | self._resource_name_function = SolutionsPythonFunction(
36 | stack,
37 | uuid,
38 | entrypoint=Path(__file__).parent
39 | / "src"
40 | / "custom_resources"
41 | / "name.py",
42 | function="handler",
43 | )
44 | add_cfn_nag_suppressions(
45 | resource=self._resource_name_function.node.default_child,
46 | suppressions=[
47 | CfnNagSuppression(
48 | "W89", "This AWS Lambda Function is not deployed to a VPC"
49 | ),
50 | CfnNagSuppression(
51 | "W92",
52 | "This AWS Lambda Function does not require reserved concurrency",
53 | ),
54 | ],
55 | )
56 |
57 | properties = {
58 | "ServiceToken": self._resource_name_function.function_arn,
59 | "Purpose": purpose,
60 | "StackName": Aws.STACK_NAME,
61 | "MaxLength": max_length,
62 | }
63 | if resource_id:
64 | properties["Id"] = resource_id
65 |
66 | self.logical_name = f"{construct_id}NameResource"
67 | self.resource_name_resource = CfnResource(
68 | self,
69 | self.logical_name,
70 | type="Custom::ResourceName",
71 | properties=properties,
72 | )
73 |
74 | @property
75 | def resource_name(self):
76 | return self.resource_name_resource.get_att("Name")
77 |
78 | @property
79 | def resource_id(self):
80 | return self.resource_name_resource.get_att("Id")
81 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/src/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/src/custom_resources/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/src/custom_resources/name.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | import logging
4 | from os import getenv
5 | from uuid import uuid4 as uuid
6 |
7 | from crhelper import CfnResource
8 |
9 | logger = logging.getLogger(__name__)
10 | helper = CfnResource(log_level=getenv("LOG_LEVEL", "WARNING"))
11 |
12 |
13 | def get_property(event, property_name, property_default=None):
14 | resource_prop = event.get("ResourceProperties", {}).get(
15 | property_name, property_default
16 | )
17 | if not resource_prop:
18 | raise ValueError(f"missing required property {property_name}")
19 | return resource_prop
20 |
21 |
22 | @helper.create
23 | def generate_name(event, _):
24 | """
25 | Generate a resource name containing the stack name and the resource purpose. This is useful
26 | when you need to associate policies that refer to a resource by name (and thus need
27 | a predictable resource name). This is commonly used when associating policies with buckets
28 | or other resources that might introduce a circular resource dependency
29 |
30 | :param event: The CloudFormation custom resource event
31 | :return: None
32 | """
33 | resource_id = get_property(event, "Id", uuid().hex[0:12])
34 | stack_name = get_property(event, "StackName")
35 | purpose = get_property(event, "Purpose")
36 | max_length = int(get_property(event, "MaxLength"))
37 |
38 | name = f"{stack_name}-{purpose}-{resource_id}".lower()
39 | if len(name) > max_length:
40 | logger.warning("cannot use stack name in bucket name - trying default")
41 | name = f"{purpose}-{resource_id}".lower()
42 | if len(name) > max_length:
43 | raise ValueError(
44 | f"the derived resource name {name} is too long ({len(name)} / {max_length}) - please use a shorter purpose or stack name"
45 | )
46 |
47 | logger.info(f"the derived resource name is {name}")
48 | helper.Data["Name"] = name
49 | helper.Data["Id"] = resource_id
50 |
51 |
52 | @helper.update
53 | @helper.delete
54 | def no_op(_, __):
55 | pass # pragma: no cover
56 |
57 |
58 | def handler(event, _):
59 | """
60 | Handler entrypoint - see generate_name for implementation details
61 | :param event: The CloudFormation custom resource event
62 | :return: PhysicalResourceId
63 | """
64 | helper(event, _) # pragma: no cover
65 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/src/custom_resources/requirements.txt:
--------------------------------------------------------------------------------
1 | crhelper==2.0.6
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from aws_solutions.cdk.aws_lambda.cfn_custom_resources.solutions_metrics.metrics import (
5 | Metrics,
6 | )
7 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/metrics.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import aws_cdk.aws_lambda as lambda_
5 |
6 | from pathlib import Path
7 | from typing import Dict
8 |
9 | from aws_cdk import (
10 | CfnResource,
11 | Fn,
12 | CfnCondition,
13 | Aws,
14 | )
15 | from constructs import Construct
16 |
17 | from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction
18 | from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression
19 |
20 | from cdk_nag import NagSuppressions
21 |
22 | class Metrics(Construct):
23 | """Used to track anonymous solution deployment metrics."""
24 |
25 | def __init__(
26 | self,
27 | scope: Construct,
28 | construct_id: str,
29 | metrics: Dict[str, str],
30 | ):
31 | super().__init__(scope, construct_id)
32 |
33 | if not isinstance(metrics, dict):
34 | raise ValueError("metrics must be a dictionary")
35 |
36 | self._metrics_function = SolutionsPythonFunction(
37 | self,
38 | "MetricsFunction",
39 | entrypoint=Path(__file__).parent
40 | / "src"
41 | / "custom_resources"
42 | / "metrics.py",
43 | function="handler",
44 | runtime=lambda_.Runtime.PYTHON_3_11,
45 | )
46 | add_cfn_nag_suppressions(
47 | resource=self._metrics_function.node.default_child,
48 | suppressions=[
49 | CfnNagSuppression(
50 | "W89", "This AWS Lambda Function is not deployed to a VPC"
51 | ),
52 | CfnNagSuppression(
53 | "W92",
54 | "This AWS Lambda Function does not require reserved concurrency",
55 | ),
56 | ],
57 | )
58 |
59 | self._send_anonymous_usage_data = CfnCondition(
60 | self,
61 | "SendAnonymizedData",
62 | expression=Fn.condition_equals(
63 | Fn.find_in_map("Solution", "Data", "SendAnonymizedData"), "Yes"
64 | ),
65 | )
66 | self._send_anonymous_usage_data.override_logical_id("SendAnonymizedData")
67 |
68 | properties = {
69 | "ServiceToken": self._metrics_function.function_arn,
70 | "Solution": self.node.try_get_context("SOLUTION_ID"),
71 | "Version": self.node.try_get_context("SOLUTION_VERSION"),
72 | "Region": Aws.REGION,
73 | **metrics,
74 | }
75 | self.solution_metrics = CfnResource(
76 | self,
77 | "SolutionMetricsAnonymousData",
78 | type="Custom::AnonymousData",
79 | properties=properties,
80 | )
81 | self.solution_metrics.override_logical_id("SolutionMetricsAnonymousData")
82 | self.solution_metrics.cfn_options.condition = self._send_anonymous_usage_data
83 |
84 | NagSuppressions.add_resource_suppressions(
85 | self._metrics_function.role,
86 | [
87 | {
88 | "id": 'AwsSolutions-IAM5',
89 | "reason": '* Resources will be suppred by cdk nag and it has to be not suppressed',
90 | "appliesTo": ['Resource::arn::logs:::log-group:/aws/lambda/*']
91 | },
92 | ],
93 | )
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/src/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/src/custom_resources/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/src/custom_resources/metrics.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 |
5 | import logging
6 | import uuid
7 | from datetime import datetime
8 | from os import getenv
9 |
10 | import requests
11 | from crhelper import CfnResource
12 |
13 | logger = logging.getLogger(__name__)
14 | helper = CfnResource(log_level=getenv("LOG_LEVEL", "WARNING"))
15 | METRICS_ENDPOINT = "https://metrics.awssolutionsbuilder.com/generic"
16 |
17 |
18 | def _sanitize_data(event):
19 | resource_properties = event["ResourceProperties"]
20 | # Remove ServiceToken (lambda arn) to avoid sending AccountId
21 | resource_properties.pop("ServiceToken", None)
22 | resource_properties.pop("Resource", None)
23 |
24 | # Solution ID and unique ID are sent separately
25 | resource_properties.pop("Solution", None)
26 | resource_properties.pop("UUID", None)
27 |
28 | # Add some useful fields related to stack change
29 | resource_properties["CFTemplate"] = (
30 | event["RequestType"] + "d"
31 | ) # Created, Updated, or Deleted
32 |
33 | return resource_properties
34 |
35 |
36 | @helper.create
37 | @helper.update
38 | @helper.delete
39 | def send_metrics(event, _):
40 | resource_properties = event["ResourceProperties"]
41 | random_id = event.get("PhysicalResourceId", str(uuid.uuid4()))
42 | helper.Data["UUID"] = random_id
43 |
44 | try:
45 | headers = {"Content-Type": "application/json"}
46 | payload = {
47 | "Solution": resource_properties["Solution"],
48 | "UUID": random_id,
49 | "TimeStamp": datetime.utcnow().isoformat(),
50 | "Data": _sanitize_data(event),
51 | }
52 |
53 | logger.info(f"Sending payload: {payload}")
54 | response = requests.post(METRICS_ENDPOINT, json=payload, headers=headers, timeout=5)
55 | logger.info(
56 | f"Response from metrics endpoint: {response.status_code} {response.reason}"
57 | )
58 | if "stackTrace" in response.text:
59 | logger.exception("Error submitting usage data: %s" % response.text)
60 | # raise when there is an HTTP error (non success code)
61 | response.raise_for_status()
62 | except requests.exceptions.RequestException as exc:
63 | logger.exception(f"Could not send usage data: {exc}")
64 | except Exception as exc:
65 | logger.exception(f"Unknown error when trying to send usage data: {exc}")
66 |
67 | return random_id
68 |
69 |
70 | def handler(event, context):
71 | helper(event, context) # pragma: no cover
72 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/src/custom_resources/requirements.txt:
--------------------------------------------------------------------------------
1 | requests>=2.32.3
2 | urllib3<2.0.0
3 | crhelper==2.0.6
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/environment.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from dataclasses import dataclass, field
5 |
6 | from aws_cdk import Aws
7 | from aws_cdk.aws_lambda import IFunction
8 |
9 | from aws_solutions.cdk.aws_lambda.environment_variable import EnvironmentVariable
10 |
11 |
12 | @dataclass
13 | class Environment:
14 | """
15 | Tracks environment variables common to AWS Lambda functions deployed by this solution
16 | """
17 |
18 | scope: IFunction
19 | solution_name: EnvironmentVariable = field(init=False, repr=False)
20 | solution_id: EnvironmentVariable = field(init=False, repr=False)
21 | solution_version: EnvironmentVariable = field(init=False, repr=False)
22 | log_level: EnvironmentVariable = field(init=False, repr=False)
23 | powertools_service_name: EnvironmentVariable = field(init=False, repr=False)
24 |
25 | def __post_init__(self):
26 | cloudwatch_namespace_id = f"data_connectors_solution_{Aws.STACK_NAME}"
27 | cloudwatch_service_id_default = f"Workflow"
28 |
29 | self.solution_name = EnvironmentVariable(self.scope, "SOLUTION_NAME")
30 | self.solution_id = EnvironmentVariable(self.scope, "SOLUTION_ID")
31 | self.solution_version = EnvironmentVariable(self.scope, "SOLUTION_VERSION")
32 | self.log_level = EnvironmentVariable(self.scope, "LOG_LEVEL", "INFO")
33 | self.powertools_service_name = EnvironmentVariable(
34 | self.scope, "POWERTOOLS_SERVICE_NAME", cloudwatch_service_id_default
35 | )
36 | self.powertools_metrics_namespace = EnvironmentVariable(
37 | self.scope, "POWERTOOLS_METRICS_NAMESPACE", cloudwatch_namespace_id
38 | )
39 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/environment_variable.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from dataclasses import dataclass, field
5 |
6 | from aws_cdk.aws_lambda import IFunction
7 |
8 |
9 | @dataclass
10 | class EnvironmentVariable:
11 | scope: IFunction
12 | name: str
13 | value: str = field(default="")
14 |
15 | def __post_init__(self):
16 | if not self.value:
17 | self.value = self.scope.node.try_get_context(self.name)
18 | self.scope.add_environment(self.name, self.value)
19 |
20 | def __str__(self):
21 | return self.value
22 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/java/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/java/bundling.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | import logging
4 | import shutil
5 | import subprocess
6 | from pathlib import Path
7 | from typing import Union, Dict, Optional
8 |
9 | import jsii
10 | from aws_cdk import ILocalBundling, BundlingOptions
11 |
12 | from aws_solutions.cdk.helpers import copytree
13 |
14 | logger = logging.getLogger("cdk-helper")
15 |
16 |
17 | class UnsupportedBuildEnvironment(Exception):
18 | pass
19 |
20 |
21 | @jsii.implements(ILocalBundling)
22 | class SolutionsJavaBundling:
23 | """This interface allows AWS Solutions to package lambda functions for Java without the use of Docker"""
24 |
25 | def __init__(
26 | self,
27 | to_bundle: Path,
28 | gradle_task: str,
29 | distribution_path: Path,
30 | gradle_test: Optional[str] = None,
31 | ):
32 | self.to_bundle = to_bundle
33 | self.gradle_task = gradle_task
34 | self.gradle_test = gradle_test
35 | self.distribution_path = distribution_path
36 |
37 | def try_bundle(self, output_dir: str, options: BundlingOptions) -> bool:
38 | source = Path(self.to_bundle).absolute()
39 |
40 | is_gradle_build = (source / "gradlew").exists()
41 | if not is_gradle_build:
42 | raise UnsupportedBuildEnvironment("please use a gradle project")
43 |
44 | # Run Tests
45 | if self.gradle_test:
46 | self._invoke_local_command(
47 | name="gradle",
48 | command=["./gradlew", self.gradle_test],
49 | cwd=source,
50 | )
51 |
52 | # Run Build
53 | self._invoke_local_command(
54 | name="gradle",
55 | command=["./gradlew", self.gradle_task],
56 | cwd=source,
57 | )
58 |
59 | # if the distribution path is a path - it should only contain one jar or zip
60 | if self.distribution_path.is_dir():
61 | children = [child for child in self.distribution_path.iterdir()]
62 | if len(children) != 1:
63 | raise ValueError(
64 | "if the distribution path is a path it should only contain one jar or zip file"
65 | )
66 | if children[0].suffix not in (".jar", ".zip"):
67 | raise ValueError(
68 | "the distribution path does not include a single .jar or .zip file"
69 | )
70 | copytree(self.distribution_path, output_dir)
71 | elif self.distribution_path.is_file():
72 | suffix = self.distribution_path.suffix
73 | if suffix not in (".jar", ".zip"):
74 | raise ValueError("the distribution file is not a .zip or .jar file")
75 | shutil.copy(self.distribution_path, output_dir)
76 |
77 | return True
78 |
79 | def _invoke_local_command(
80 | self,
81 | name,
82 | command,
83 | env: Union[Dict, None] = None,
84 | cwd: Union[str, Path, None] = None,
85 | return_stdout: bool = False,
86 | ):
87 |
88 | cwd = Path(cwd)
89 | rv = ""
90 |
91 | with subprocess.Popen(
92 | command,
93 | shell=False,
94 | stdout=subprocess.PIPE,
95 | universal_newlines=True,
96 | cwd=cwd,
97 | env=env,
98 | ) as p:
99 | for line in p.stdout:
100 | logger.info("%s %s: %s" % (self.to_bundle.name, name, line.rstrip()))
101 | if return_stdout:
102 | rv += line
103 |
104 | if p.returncode != 0:
105 | raise subprocess.CalledProcessError(p.returncode, p.args)
106 |
107 | return rv.strip()
108 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/aws_lambda_powertools/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from aws_solutions.cdk.aws_lambda.layers.aws_lambda_powertools.layer import (
5 | PowertoolsLayer,
6 | )
7 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/aws_lambda_powertools/layer.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from pathlib import Path
5 |
6 | from aws_cdk import Stack
7 | from constructs import Construct
8 |
9 | from aws_solutions.cdk.aws_lambda.python.layer import SolutionsPythonLayerVersion
10 |
11 |
12 | class PowertoolsLayer(SolutionsPythonLayerVersion):
13 | def __init__(self, scope: Construct, construct_id: str, **kwargs):
14 | requirements_path: Path = Path(__file__).absolute().parent / "requirements"
15 | super().__init__(scope, construct_id, requirements_path, **kwargs)
16 |
17 | @staticmethod
18 | def get_or_create(scope: Construct, **kwargs):
19 | stack = Stack.of(scope)
20 | construct_id = "PowertoolsLayer-8E932F0F-197D-4026-A354-23D184C2A624"
21 | exists = stack.node.try_find_child(construct_id)
22 | if exists:
23 | return exists
24 | return PowertoolsLayer(stack, construct_id, **kwargs)
25 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/aws_lambda_powertools/requirements/requirements.txt:
--------------------------------------------------------------------------------
1 | aws-lambda-powertools>=1.24.0
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/python/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/python/lambda_alarm.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from constructs import Construct
5 | from aws_cdk import Duration
6 | import aws_cdk.aws_lambda as lambda_
7 | import aws_cdk.aws_cloudwatch as cloudwatch
8 | import aws_cdk.aws_iam as iam
9 |
10 | from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression
11 |
12 |
13 | # Alarm used for Solution lambda functions to alarm when lambda error or quota throttle
14 | class SolutionsLambdaFunctionAlarm(Construct):
15 | def __init__(
16 | self,
17 | scope: Construct,
18 | id: str,
19 | alarm_name: str,
20 | lambda_function: lambda_.Function,
21 | ):
22 | throttles_metric = lambda_function.metric("Throttles", period=Duration.seconds(60))
23 | errors_metric = lambda_function.metric("Errors", period=Duration.seconds(60))
24 |
25 | super().__init__(scope, id)
26 |
27 | throttles_alarm = cloudwatch.Alarm(
28 | self,
29 | id=f'{id}-throttles',
30 | metric=throttles_metric,
31 | evaluation_periods=1,
32 | datapoints_to_alarm=1,
33 | threshold=1,
34 | comparison_operator=cloudwatch.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD
35 | )
36 |
37 | errors_alarm = cloudwatch.Alarm(
38 | self,
39 | id=f'{id}-errors',
40 | metric=errors_metric,
41 | evaluation_periods=1,
42 | datapoints_to_alarm=1,
43 | threshold=1,
44 | comparison_operator=cloudwatch.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD
45 | )
46 |
47 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/python/layer.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from pathlib import Path
5 | from typing import Union, List
6 | from uuid import uuid4
7 |
8 | from aws_cdk import BundlingOptions, DockerImage, AssetHashType
9 | from aws_cdk.aws_lambda import LayerVersion, Code
10 | from constructs import Construct
11 |
12 | from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonBundling
13 |
14 | DEPENDENCY_EXCLUDES = ["*.pyc"]
15 |
16 |
17 | class SolutionsPythonLayerVersion(LayerVersion):
18 | """Handle local packaging of layer versions"""
19 |
20 | def __init__(
21 | self,
22 | scope: Construct,
23 | construct_id: str,
24 | requirements_path: Path,
25 | libraries: Union[List[Path], None] = None,
26 | **kwargs,
27 | ): # NOSONAR
28 | self.scope = scope
29 | self.construct_id = construct_id
30 | self.requirements_path = requirements_path
31 |
32 | # validate requirements path
33 | if not self.requirements_path.is_dir():
34 | raise ValueError(
35 | f"requirements_path {self.requirements_path} must not be a file, but rather a directory containing Python requirements in a requirements.txt file, pipenv format or poetry format"
36 | )
37 |
38 | libraries = [] if not libraries else libraries
39 | for lib in libraries:
40 | if lib.is_file():
41 | raise ValueError(
42 | f"library {lib} must not be a file, but rather a directory"
43 | )
44 |
45 | bundling = SolutionsPythonBundling(
46 | self.requirements_path, libraries=libraries, install_path="python"
47 | )
48 |
49 | kwargs["code"] = self._get_code(bundling)
50 |
51 | # initialize the LayerVersion
52 | super().__init__(scope, construct_id, **kwargs)
53 |
54 | def _get_code(self, bundling: SolutionsPythonBundling) -> Code:
55 | # create the layer version locally
56 | code_parameters = {
57 | "path": str(self.requirements_path),
58 | "asset_hash_type": AssetHashType.CUSTOM,
59 | "asset_hash": uuid4().hex,
60 | "exclude": DEPENDENCY_EXCLUDES,
61 | }
62 |
63 | code = Code.from_asset(
64 | bundling=BundlingOptions(
65 | image=DockerImage.from_registry(
66 | "scratch"
67 | ), # NEVER USED - FOR NOW ALL BUNDLING IS LOCAL
68 | command=["not_used"],
69 | entrypoint=["not_used"],
70 | local=bundling,
71 | ),
72 | **code_parameters,
73 | )
74 |
75 | return code
76 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/cfn_nag.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from dataclasses import dataclass
5 | from typing import List
6 |
7 | import jsii
8 | from aws_cdk import CfnResource, IAspect
9 | from constructs import IConstruct
10 |
11 |
12 | @dataclass
13 | class CfnNagSuppression:
14 | rule_id: str
15 | reason: str
16 |
17 |
18 | def add_cfn_nag_suppressions(
19 | resource: CfnResource, suppressions: List[CfnNagSuppression]
20 | ):
21 | resource.add_metadata(
22 | "cfn_nag",
23 | {
24 | "rules_to_suppress": [
25 | {"id": suppression.rule_id, "reason": suppression.reason}
26 | for suppression in suppressions
27 | ]
28 | },
29 | )
30 |
31 |
32 | @jsii.implements(IAspect)
33 | class CfnNagSuppressAll:
34 | """Suppress certain cfn_nag warnings that can be ignored by this solution"""
35 |
36 | def __init__(self, suppress: List[CfnNagSuppression], resource_type: str):
37 | self.suppressions = suppress
38 | self.resource_type = resource_type
39 |
40 | def visit(self, node: IConstruct):
41 | if "is_cfn_element" in dir(node) and node.is_cfn_element(node):
42 | if getattr(node, "cfn_resource_type", None) == self.resource_type:
43 | add_cfn_nag_suppressions(node, self.suppressions)
44 |
45 | elif "is_cfn_element" in dir(node.node.default_child) and (
46 | getattr(node.node.default_child, "cfn_resource_type", None)
47 | == self.resource_type
48 | ):
49 | add_cfn_nag_suppressions(node.node.default_child, self.suppressions)
50 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/context.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 |
5 | import json
6 | import logging
7 | from functools import wraps
8 | from os import environ
9 | from pathlib import Path
10 | from typing import Union
11 |
12 | ARGUMENT_ERROR = "functions decorated with `with_cdk_context` can only accept one dictionary argument - the additional context overrides to use"
13 |
14 | logger = logging.getLogger("cdk-helper")
15 |
16 |
17 | class SolutionContext:
18 | def __init__(self, cdk_json_path: Union[None, Path] = None):
19 | self.cdk_json_path = cdk_json_path
20 | self.context = self._load_cdk_context()
21 |
22 | def requires( # NOSONAR - higher cognitive complexity allowed
23 | self, context_var_name, context_var_value=None
24 | ):
25 | context = self.context
26 |
27 | def cdk_context_decorator(f):
28 | @wraps(f)
29 | def wrapper(*args):
30 | # validate function arguments
31 | if len(args) > 1:
32 | raise ValueError(ARGUMENT_ERROR)
33 | if len(args) == 1 and not isinstance(args[0], dict):
34 | raise TypeError(ARGUMENT_ERROR)
35 |
36 | if len(args) == 0:
37 | args = (context,)
38 |
39 | # override the CDK context as required
40 | if len(args) == 1:
41 | context.update(args[0])
42 |
43 | env_context_var = environ.get(context_var_name)
44 | if env_context_var:
45 | context[context_var_name] = env_context_var
46 | elif context_var_name and context_var_value:
47 | context[context_var_name] = context_var_value
48 |
49 | if not context.get(context_var_name):
50 | raise ValueError(
51 | f"Missing cdk.json context variable or environment variable for {context_var_name}."
52 | )
53 |
54 | args = (context,)
55 |
56 | return f(*args)
57 |
58 | return wrapper
59 |
60 | return cdk_context_decorator
61 |
62 | def _load_cdk_context(self):
63 | """Load context from cdk.json"""
64 | if not self.cdk_json_path:
65 | return {}
66 |
67 | try:
68 | with open(self.cdk_json_path, "r") as f:
69 | config = json.loads(f.read())
70 | except FileNotFoundError:
71 | logger.warning(f"{self.cdk_json_path} not found, using empty context!")
72 | return {}
73 | context = config.get("context", {})
74 | return context
75 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/helpers/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from aws_solutions.cdk.helpers.copytree import copytree, ignore_globs
5 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/helpers/copytree.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import os
5 | import shutil
6 | from pathlib import Path
7 |
8 |
9 | def ignore_globs(*globs):
10 | """Function that can be used as copytree() ignore parameter.
11 |
12 | Patterns is a sequence of glob-style patterns
13 | that are used to exclude files"""
14 |
15 | def _ignore_globs(path, names):
16 | ignored_names = []
17 | paths = [Path(os.path.join(path, name)).resolve() for name in names]
18 | for pattern in globs:
19 | for i, p in enumerate(paths):
20 | if p.match(pattern):
21 | ignored_names.append(names[i])
22 | return set(ignored_names)
23 |
24 | return _ignore_globs
25 |
26 |
27 | def copytree(src, dst, symlinks=False, ignore=None):
28 | if ignore:
29 | ignore.extend([ignored[:-2] for ignored in ignore if ignored.endswith("/*")])
30 | else:
31 | ignore = []
32 |
33 | if not os.path.exists(dst):
34 | os.makedirs(dst)
35 |
36 | for item in os.listdir(src):
37 | s = os.path.join(src, item)
38 | d = os.path.join(dst, item)
39 |
40 | # ignore full directories upfront
41 | if any(Path(s).match(ignored) for ignored in ignore):
42 | continue
43 |
44 | if os.path.isdir(s):
45 | shutil.copytree(s, d, symlinks, ignore=ignore_globs(*ignore))
46 | else:
47 | shutil.copy2(s, d)
48 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/helpers/loader.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import importlib
5 | import json
6 | import logging
7 | from functools import wraps
8 | from pathlib import Path
9 |
10 | logger = logging.getLogger("cdk-helper")
11 |
12 |
13 | class CDKLoaderException(Exception):
14 | pass
15 |
16 |
17 | def log_error(error):
18 | logger.error(error)
19 | raise CDKLoaderException(error)
20 |
21 |
22 | def _cdk_json_present(func):
23 | @wraps(func)
24 | def cdk_json_present(cdk_app_path: Path, cdk_app_name):
25 | app_path = cdk_app_path.parent
26 | cdk_json_dict = {}
27 | if not Path(app_path / "cdk.json").exists():
28 | log_error(f"please ensure a cdk.json is present at {app_path}")
29 |
30 | try:
31 | cdk_json_dict = json.loads(Path(app_path / "cdk.json").read_text())
32 | except ValueError as exc:
33 | log_error(f"failed to parse cdk.json: {exc}")
34 |
35 | cdk_app = cdk_json_dict.get("app")
36 | if not cdk_app:
37 | log_error(f"failed to find `app` in cdk.json")
38 |
39 | if "python3" not in cdk_app:
40 | log_error(
41 | f"this helper only supports python3 CDK apps at this time - yours was declared as {cdk_app}"
42 | )
43 |
44 | return func(cdk_app_path, cdk_app_name)
45 |
46 | return cdk_json_present
47 |
48 |
49 | @_cdk_json_present
50 | def load_cdk_app(cdk_app_path, cdk_app_name):
51 | """
52 | Load a CDK app from a folder path (dynamically)
53 | :param cdk_app_path: The full path of the CDK app to load
54 | :param cdk_app_name: The module path (starting from cdk_app_path) to find the function returning synth()
55 | :return:
56 | """
57 |
58 | try:
59 | (cdk_app_name, cdk_app_entrypoint) = cdk_app_name.split(":")
60 | except ValueError:
61 | log_error("please provide your `cdk_app_name` as path.to.cdk:function_name")
62 |
63 | if not cdk_app_path.exists():
64 | log_error(f"could not find `{cdk_app_name}` (please use a full path)")
65 |
66 | spec = importlib.util.spec_from_file_location(cdk_app_name, cdk_app_path)
67 | module = importlib.util.module_from_spec(spec)
68 | try:
69 | spec.loader.exec_module(module)
70 | except Exception as exc:
71 | log_error(f"could not load `{cdk_app_entrypoint}` in `{cdk_app_name}`: {exc}")
72 |
73 | try:
74 | cdk_function = getattr(module, cdk_app_entrypoint)
75 | except AttributeError as exc:
76 | log_error(
77 | f"could not find CDK entrypoint `{cdk_app_entrypoint}` in `{cdk_app_name}`"
78 | )
79 |
80 | logger.info(f"loaded AWS CDK app from {cdk_app_path}")
81 | logger.info(
82 | f"loaded AWS CDK app at {cdk_app_name}, entrypoint is {cdk_app_entrypoint}"
83 | )
84 | return cdk_function
85 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/helpers/logger.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import logging
5 |
6 |
7 | class Logger:
8 | """Set up a logger fo this package"""
9 |
10 | @classmethod
11 | def get_logger(cls, name: str) -> logging.Logger:
12 | """
13 | Gets the current logger for this package
14 | :param name: the name of the logger
15 | :return: the logger
16 | """
17 | logger = logging.getLogger(name)
18 | if not len(logger.handlers):
19 | logger.setLevel(logging.INFO)
20 | handler = logging.StreamHandler()
21 | formatter = logging.Formatter("[%(levelname)s]\t%(name)s\t%(message)s")
22 | handler.setFormatter(formatter)
23 | logger.addHandler(handler)
24 | logger.propagate = False
25 | return logger
26 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/interfaces.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import logging
5 | from dataclasses import dataclass
6 | from typing import Union, List
7 |
8 | import jsii
9 | from aws_cdk import (
10 | ITemplateOptions,
11 | Stack,
12 | NestedStack,
13 | CfnParameter,
14 | )
15 |
16 | logger = logging.getLogger("cdk-helper")
17 |
18 |
19 | @dataclass
20 | class _TemplateParameter:
21 | """Stores information about a CloudFormation parameter, its label (description) and group"""
22 |
23 | name: str
24 | label: str
25 | group: str
26 |
27 |
28 | class TemplateOptionsException(Exception):
29 | pass
30 |
31 |
32 | @jsii.implements(ITemplateOptions)
33 | class TemplateOptions:
34 | """Helper class for setting up template CloudFormation parameter groups, labels and solutions metadata"""
35 |
36 | _metadata = {}
37 |
38 | def __init__(
39 | self,
40 | stack: Union[Stack, NestedStack],
41 | construct_id: str,
42 | description: str,
43 | filename: str,
44 | ):
45 | self.stack = stack
46 | self.filename = filename
47 | self._parameters: List[_TemplateParameter] = []
48 | self.stack.template_options.description = description
49 | self.stack.template_options.metadata = self.metadata
50 |
51 | self._metadata = self._get_metadata()
52 |
53 | if not filename.endswith(".template"):
54 | raise TemplateOptionsException("template filenames must end with .template")
55 |
56 | # if this stack is a nested stack, record its CDK ID in the parent stack's resource to it
57 | if getattr(stack, "nested_stack_resource"):
58 | stack.nested_stack_resource.add_metadata(
59 | "aws:solutions:templateid", construct_id
60 | )
61 | stack.nested_stack_resource.add_metadata(
62 | "aws:solutions:templatename", filename
63 | )
64 |
65 | @property
66 | def metadata(self) -> dict:
67 | return self._metadata
68 |
69 | def _get_metadata(self) -> dict:
70 | pgs = set()
71 | parameter_groups = [
72 | p.group
73 | for p in self._parameters
74 | if p.group not in pgs and not pgs.add(p.group)
75 | ]
76 | metadata = {
77 | "AWS::CloudFormation::Interface": {
78 | "ParameterGroups": [
79 | {
80 | "Label": {"default": parameter_group},
81 | "Parameters": [
82 | parameter.name
83 | for parameter in self._parameters
84 | if parameter.group == parameter_group
85 | ],
86 | }
87 | for parameter_group in parameter_groups
88 | ],
89 | "ParameterLabels": {
90 | parameter.name: {"default": parameter.label}
91 | for parameter in self._parameters if parameter.label
92 | },
93 | },
94 | "aws:solutions:templatename": self.filename,
95 | "aws:solutions:solution_id": self.stack.node.try_get_context("SOLUTION_ID"),
96 | "aws:solutions:solution_version": self.stack.node.try_get_context(
97 | "SOLUTION_VERSION"
98 | ),
99 | }
100 | self.stack.template_options.metadata = metadata
101 | return metadata
102 |
103 | def add_parameter(self, parameter: CfnParameter, label: str, group: str):
104 | self._parameters.append(_TemplateParameter(parameter.logical_id, label, group))
105 | self._metadata = self._get_metadata()
106 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/mappings.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from aws_cdk import CfnMapping
5 | from constructs import Construct
6 |
7 |
8 | class Mappings:
9 | def __init__(
10 | self,
11 | parent: Construct,
12 | solution_id: str,
13 | send_anonymous_usage_data: bool = True,
14 | quicksight_template_arn: bool = False,
15 | ):
16 | self.parent = parent
17 |
18 | # Track the solution mapping (ID, version, anonymous usage data)
19 | self.solution_mapping = CfnMapping(
20 | parent,
21 | "Solution",
22 | mapping={
23 | "Data": {
24 | "ID": solution_id,
25 | "Version": "%%SOLUTION_VERSION%%",
26 | "SendAnonymizedData": "Yes"
27 | if send_anonymous_usage_data
28 | else "No",
29 | }
30 | }
31 | )
32 |
33 | # track the s3 bucket, key prefix and (optional) quicksight template source
34 | general = {
35 | "S3Bucket": "%%BUCKET_NAME%%",
36 | "KeyPrefix": "%%SOLUTION_NAME%%/%%SOLUTION_VERSION%%",
37 | }
38 | if quicksight_template_arn:
39 | general["QuickSightSourceTemplateArn"] = "%%QUICKSIGHT_SOURCE%%"
40 |
41 | self.source_mapping = CfnMapping(
42 | parent,
43 | "SourceCode",
44 | mapping={"General": general}
45 | )
46 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/scripts/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stack.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from __future__ import annotations
5 |
6 | import re
7 |
8 | import jsii
9 | from aws_cdk import Stack, Aspects, IAspect
10 | from constructs import Construct, IConstruct
11 |
12 | from aws_solutions.cdk.aws_lambda.cfn_custom_resources.solutions_metrics import Metrics
13 | from aws_solutions.cdk.interfaces import TemplateOptions
14 | from aws_solutions.cdk.mappings import Mappings
15 |
16 | RE_SOLUTION_ID = re.compile(r"^SO\d+$")
17 | RE_TEMPLATE_FILENAME = re.compile(r"^[a-z]+(?:-[a-z]+)*\.template$") # NOSONAR
18 |
19 |
20 | def validate_re(name, value, regex: re.Pattern):
21 | if regex.match(value):
22 | return value
23 | raise ValueError(f"{name} must match '{regex.pattern}")
24 |
25 |
26 | def validate_solution_id(solution_id: str) -> str:
27 | return validate_re("solution_id", solution_id, RE_SOLUTION_ID)
28 |
29 |
30 | def validate_template_filename(template_filename: str) -> str:
31 | return validate_re("template_filename", template_filename, RE_TEMPLATE_FILENAME)
32 |
33 |
34 | @jsii.implements(IAspect)
35 | class MetricsAspect:
36 | def __init__(self, stack: SolutionStack):
37 | self.stack = stack
38 |
39 | def visit(self, node: IConstruct):
40 | """Called before synthesis, this allows us to set metrics at the end of synthesis"""
41 | if node == self.stack:
42 | self.stack.metrics = Metrics(self.stack, "Metrics", self.stack.metrics)
43 |
44 |
45 | class SolutionStack(Stack):
46 | def __init__(
47 | self,
48 | scope: Construct,
49 | construct_id: str,
50 | description: str,
51 | template_filename,
52 | **kwargs,
53 | ):
54 | super().__init__(scope, construct_id, **kwargs)
55 |
56 | self.metrics = {}
57 | self.solution_id = self.node.try_get_context("SOLUTION_ID")
58 | self.solution_version = self.node.try_get_context("SOLUTION_VERSION")
59 | self.mappings = Mappings(self, solution_id=self.solution_id)
60 | self.solutions_template_filename = validate_template_filename(template_filename)
61 | self.description = description.strip(".")
62 | self.solutions_template_options = TemplateOptions(
63 | self,
64 | construct_id=construct_id,
65 | description=f"({self.solution_id}) - {self.description}. Version {self.solution_version}",
66 | filename=template_filename,
67 | )
68 | Aspects.of(self).add(MetricsAspect(self))
69 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stepfunctions/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stepfunctions/solution_fragment.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from typing import List, Dict
5 | from typing import Optional
6 |
7 | from aws_cdk import Duration
8 | from aws_cdk.aws_lambda import CfnFunction
9 | from aws_cdk.aws_stepfunctions import State, INextable, TaskInput, StateMachineFragment
10 | from aws_cdk.aws_stepfunctions_tasks import LambdaInvoke
11 | from constructs import Construct
12 |
13 |
14 | class SolutionFragment(StateMachineFragment):
15 | def __init__(
16 | self, # NOSONAR (python:S107) - allow large number of method parameters
17 | scope: Construct,
18 | id: str,
19 | function: CfnFunction,
20 | payload: Optional[TaskInput] = None,
21 | input_path: Optional[str] = "$",
22 | result_path: Optional[str] = "$",
23 | output_path: Optional[str] = "$",
24 | result_selector: Optional[Dict] = None,
25 | failure_state: Optional[State] = None,
26 | backoff_rate: Optional[int] = 1.05,
27 | interval: Optional[Duration] = Duration.seconds(5),
28 | max_attempts: Optional[int] = 5,
29 | ):
30 | super().__init__(scope, id)
31 |
32 | self.failure_state = failure_state
33 |
34 | self.task = LambdaInvoke(
35 | self,
36 | id,
37 | lambda_function=function,
38 | retry_on_service_exceptions=True,
39 | input_path=input_path,
40 | result_path=result_path,
41 | output_path=output_path,
42 | payload=payload,
43 | payload_response_only=True,
44 | result_selector=result_selector,
45 | )
46 | self.task.add_retry(
47 | backoff_rate=backoff_rate,
48 | interval=interval,
49 | max_attempts=max_attempts,
50 | errors=["ResourcePending"],
51 | )
52 | if self.failure_state:
53 | self.task.add_catch(
54 | failure_state,
55 | errors=["ResourceFailed", "ResourceInvalid"],
56 | result_path="$.statesError",
57 | )
58 | self.task.add_catch(
59 | failure_state, errors=["States.ALL"], result_path="$.statesError"
60 | )
61 |
62 | @property
63 | def start_state(self) -> State:
64 | return self.task
65 |
66 | @property
67 | def end_states(self) -> List[INextable]:
68 | """
69 | Get the end states of this chain
70 | :return: The chainable end states of this chain (i.e. not the failure state)
71 | """
72 | states = [self.task]
73 | return states
74 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/tools/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from aws_solutions.cdk.tools.cleaner import Cleaner
5 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/tools/cleaner.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import logging
5 | import os
6 | import shutil
7 | from dataclasses import dataclass
8 | from pathlib import Path
9 |
10 | logger = logging.getLogger("cdk-helper")
11 |
12 |
13 | @dataclass
14 | class Cleanable:
15 | """Encapsulates something that can be cleaned by the cleaner"""
16 |
17 | name: str
18 | file_type: str
19 | pattern: str
20 |
21 | def __post_init__(self):
22 | if self.file_type not in ("d", "f"):
23 | raise ValueError("only directories and files are allowed ('d' or 'f')")
24 |
25 | def delete(self, source_dir):
26 | source_path = Path(source_dir)
27 |
28 | for path in source_path.rglob(self.pattern):
29 | if "aws_solutions" not in str(
30 | path.name
31 | ): # prevent the module from being unlinked in a dev environment
32 | if self.file_type == "d" and path.is_dir():
33 | logger.info(f"deleting {self.name} directory {path}")
34 | shutil.rmtree(path, ignore_errors=True)
35 | if self.file_type == "f" and path.is_file():
36 | logger.info(f"deleting {self.name} file {path}")
37 | try:
38 | path.unlink()
39 | except FileNotFoundError:
40 | pass
41 |
42 |
43 | class Cleaner:
44 | """Encapsulates functions that help clean up the build environment."""
45 |
46 | TO_CLEAN = [
47 | Cleanable("Python bytecode", "f", "*.py[cod]"),
48 | Cleanable("Python Coverage databases", "f", ".coverage"),
49 | Cleanable("CDK Cloud Assemblies", "d", "cdk.out"),
50 | Cleanable("Python egg", "d", "*.egg-info"),
51 | Cleanable("Python bytecode cache", "d", "__pycache__"),
52 | Cleanable("Python test cache", "d", ".pytest_cache"),
53 | ]
54 |
55 | @staticmethod
56 | def clean_dirs(*args):
57 | """Recursively remove each of its arguments, then recreate the directory"""
58 | for dir_to_remove in args:
59 | logger.info("cleaning %s" % dir_to_remove)
60 | shutil.rmtree(dir_to_remove, ignore_errors=True)
61 | os.makedirs(dir_to_remove)
62 |
63 | @staticmethod
64 | def cleanup_source(source_dir):
65 | """Cleans up all items found in TO_CLEAN"""
66 | for item in Cleaner.TO_CLEAN:
67 | item.delete(source_dir)
68 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_cdk/setup.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import re
5 | from pathlib import Path
6 |
7 | import setuptools
8 |
9 | VERSION_RE = re.compile(r"\#\# \[(?P.*)\]", re.MULTILINE) # NOSONAR
10 |
11 |
12 | def get_version():
13 | """
14 | Detect the solution version from the changelog. Latest version on top.
15 | """
16 | changelog = open(Path(__file__).resolve().parent.parent / "CHANGELOG.md").read()
17 | versions = VERSION_RE.findall(changelog)
18 | if not len(versions):
19 | raise ValueError("use the standard semver format in your CHANGELOG.md")
20 | build_version = versions[0]
21 | print(f"Build Version: {build_version}")
22 | return build_version
23 |
24 |
25 | setuptools.setup(
26 | name="aws-solutions-cdk",
27 | version=get_version(),
28 | description="Tools to make AWS Solutions deployments with CDK + Python more manageable",
29 | long_description=open("../README.md").read(),
30 | author="Amazon Web Services",
31 | url="https://aws.amazon.com/solutions/implementations",
32 | license="Apache License 2.0",
33 | packages=setuptools.find_namespace_packages(),
34 | package_data={
35 | "": [
36 | "requirements.txt",
37 | "Dockerfile",
38 | "__aws_solutions_bundling_version__",
39 | ]
40 | },
41 | install_requires=[
42 | "pip>=21.3",
43 | "aws_cdk_lib>=2.7.0",
44 | "Click>=7.1.2",
45 | "boto3>=1.17.52",
46 | "requests>=2.32.3",
47 | "crhelper>=2.0.6",
48 | ],
49 | entry_points="""
50 | [console_scripts]
51 | build-s3-cdk-dist=aws_solutions.cdk.scripts.build_s3_cdk_dist:cli
52 | """,
53 | python_requires=">=3.7",
54 | classifiers=[
55 | "Development Status :: 4 - Beta",
56 | "Intended Audience :: Developers",
57 | "License :: OSI Approved :: Apache Software License",
58 | "Programming Language :: JavaScript",
59 | "Programming Language :: Python :: 3 :: Only",
60 | "Programming Language :: Python :: 3.7",
61 | "Programming Language :: Python :: 3.8",
62 | "Programming Language :: Python :: 3.9",
63 | "Topic :: Software Development :: Code Generators",
64 | "Topic :: Utilities",
65 | "Typing :: Typed",
66 | ],
67 | zip_safe=False,
68 | )
69 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_common/aws_solutions/core/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from aws_solutions.core.config import Config
5 |
6 | config = Config()
7 |
8 | from aws_solutions.core.helpers import (
9 | get_aws_region,
10 | get_aws_partition,
11 | get_service_client,
12 | get_service_resource,
13 | get_aws_account,
14 | )
15 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_common/aws_solutions/core/config.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import os
5 | import re
6 | from typing import Dict
7 |
8 | import botocore.config
9 |
10 | from aws_solutions.core.logging import get_logger
11 |
12 | logger = get_logger(__name__)
13 |
14 |
15 | SOLUTION_ID_RE = re.compile(r"^SO(?P\d+)(?P[a-zA-Z]*)$") # NOSONAR
16 | SOLUTION_VERSION_RE = re.compile(
17 | r"^v(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$" # NOSONAR
18 | )
19 |
20 |
21 | class SolutionConfigEnv:
22 | def __init__(self, env_var, default: str = "", regex: re.Pattern = None):
23 | self._env_var = env_var
24 | self._regex = regex
25 | self._value = default
26 |
27 | def _get_value_or_default(self) -> str:
28 | if self._value:
29 | return self._value
30 | return os.environ.get(self._env_var)
31 |
32 | def __get__(self, instance, owner) -> str:
33 | value = str(self._get_value_or_default())
34 | if self._regex and not self._regex.match(value):
35 | raise ValueError(
36 | f"`{value}` received, but environment variable {self._env_var} (or default) must be set and match the pattern {self._regex.pattern}"
37 | )
38 | return value
39 |
40 | def __set__(self, instance, value) -> None:
41 | self._value = value
42 |
43 |
44 | class Config:
45 | """Stores information about the current solution"""
46 |
47 | id = SolutionConfigEnv("SOLUTION_ID", regex=SOLUTION_ID_RE)
48 | version = SolutionConfigEnv("SOLUTION_VERSION", regex=SOLUTION_VERSION_RE)
49 | _botocore_config = None
50 |
51 | @property
52 | def botocore_config(self) -> botocore.config.Config:
53 | if not self._botocore_config:
54 | self._botocore_config = botocore.config.Config(
55 | **self._botocore_config_defaults
56 | )
57 | return self._botocore_config
58 |
59 | @botocore_config.setter
60 | def botocore_config(self, other_config: botocore.config.Config):
61 | self._botocore_config = self.botocore_config.merge(other_config)
62 |
63 | @property
64 | def _botocore_config_defaults(self) -> Dict:
65 | return {"user_agent_extra": f"AwsSolution/{self.id}/{self.version}"}
66 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_common/aws_solutions/core/helpers.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import os
5 | import boto3
6 | import aws_solutions.core.config
7 |
8 | _helpers_service_clients = {}
9 | _helpers_service_resources = {}
10 | _session = None
11 |
12 |
13 | class EnvironmentVariableError(Exception):
14 | pass
15 |
16 |
17 | def get_aws_region():
18 | """
19 | Get the caller's AWS region from the environment variable AWS_REGION
20 | :return: the AWS region name (e.g. us-east-1)
21 | """
22 | if region := os.environ.get("AWS_REGION"):
23 | return region
24 | else:
25 | raise EnvironmentVariableError("Missing AWS_REGION environment variable.")
26 |
27 |
28 | def get_aws_partition():
29 | """
30 | Get the caller's AWS partion by driving it from AWS region
31 | :return: partition name for the current AWS region (e.g. aws)
32 | """
33 | region_name = get_aws_region()
34 | china_region_name_prefix = "cn"
35 | us_gov_cloud_region_name_prefix = "us-gov"
36 | aws_us_gov_cloud_regions_partition = "aws-us-gov"
37 | if region_name.startswith(china_region_name_prefix):
38 | return "aws-cn"
39 | elif region_name.startswith(us_gov_cloud_region_name_prefix):
40 | return aws_us_gov_cloud_regions_partition
41 | else:
42 | return "aws"
43 |
44 |
45 | def get_session(**kwargs):
46 | global _session
47 | if not _session:
48 | _session = boto3.session.Session(**kwargs)
49 | return _session
50 |
51 |
52 | def set_session(**kwargs):
53 | global _session
54 | _session = boto3.session.Session(**kwargs)
55 | return _session
56 |
57 |
58 |
59 | def get_service_client(service_name, region_name=None):
60 | global _helpers_service_clients
61 | config = aws_solutions.core.config.botocore_config
62 | session = get_session()
63 |
64 | if region_name is None:
65 | region_name = get_aws_region()
66 |
67 | if (service_name not in _helpers_service_clients) or (region_name != get_aws_region()):
68 | _helpers_service_clients[service_name] = session.client(
69 | service_name, config=config, region_name=region_name
70 | )
71 |
72 | return _helpers_service_clients[service_name]
73 |
74 |
75 | def get_service_resource(service_name):
76 | global _helpers_service_resources
77 | config = aws_solutions.core.config.botocore_config
78 | session = get_session()
79 |
80 | if service_name not in _helpers_service_resources:
81 | _helpers_service_resources[service_name] = session.resource(
82 | service_name, config=config, region_name=get_aws_region()
83 | )
84 | return _helpers_service_resources[service_name]
85 |
86 |
87 | def get_aws_account() -> str:
88 | """
89 | Get the caller's AWS account ID from STS
90 | :return: the AWS account ID of the caller
91 | """
92 | sts = get_service_client("sts")
93 | return sts.get_caller_identity().get("Account")
94 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_common/aws_solutions/core/logging.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import logging
5 | import os
6 |
7 | DEFAULT_LEVEL = "WARNING"
8 |
9 |
10 | def get_level():
11 | """
12 | Get the logging level from the LOG_LEVEL environment variable if it is valid. Otherwise set to WARNING
13 | :return: The logging level to use
14 | """
15 | valid_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
16 | requested_level = os.environ.get("LOG_LEVEL", DEFAULT_LEVEL)
17 |
18 | if requested_level and requested_level in valid_levels:
19 | return requested_level
20 |
21 | return DEFAULT_LEVEL
22 |
23 |
24 | def get_logger(name):
25 | """
26 | Get a configured logger. Compatible with both the AWS Lambda runtime (root logger) and local execution
27 | :param name: The name of the logger (most often __name__ of the calling module)
28 | :return: The logger to use
29 | """
30 | logger = None
31 |
32 | # first case: running as a lambda function or in pytest with conftest
33 | # second case: running a single test or locally under test
34 | if len(logging.getLogger().handlers) > 0:
35 | logger = logging.getLogger()
36 | logger.setLevel(get_level())
37 |
38 | # overrides
39 | logging.getLogger("boto3").setLevel(logging.WARNING)
40 | logging.getLogger("botocore").setLevel(logging.WARNING)
41 | logging.getLogger("urllib3").setLevel(logging.WARNING)
42 | else:
43 | # fmt: off
44 | logging.basicConfig(level=get_level()) # NOSONAR - log level is user-specified; logs to stdout for AWS Lambda
45 | # fmt: on
46 | logger = logging.getLogger(name)
47 |
48 | return logger
49 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_common/aws_solutions/extended/resource_lookup.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from aws_solutions.core.helpers import get_aws_region
5 | import boto3
6 |
7 | class ResourceLookup():
8 | def __init__(
9 | self,
10 | logical_id,
11 | stack_name
12 | ):
13 | self.logical_id = logical_id
14 | self.stack_name = stack_name
15 | self.region = get_aws_region()
16 | self.cfn_client = boto3.client('cloudformation', region_name=self.region)
17 | self.physical_id = self.get_physical_id()
18 |
19 | def get_physical_id(self):
20 | response = self.cfn_client.describe_stack_resource(
21 | StackName=self.stack_name,
22 | LogicalResourceId=self.logical_id
23 | )
24 | return response['StackResourceDetail']['PhysicalResourceId']
25 |
26 | def get_arn(self, resource_type, account_id):
27 | arn_mapping = {
28 | "lambda": f"arn:aws:lambda:{self.region}:{account_id}:function:{self.physical_id}",
29 | "role": f"arn:aws:iam::{account_id}:role/{self.physical_id}"
30 | }
31 |
32 | return arn_mapping[resource_type]
33 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/helpers_common/setup.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import re
5 | from pathlib import Path
6 |
7 | import setuptools
8 |
9 | VERSION_RE = re.compile(r"\#\# \[(?P.*)\]", re.MULTILINE) # NOSONAR
10 |
11 |
12 | def get_version():
13 | """
14 | Detect the solution version from the changelog. Latest version on top.
15 | """
16 | changelog = open(Path(__file__).resolve().parent.parent / "CHANGELOG.md").read()
17 | versions = VERSION_RE.findall(changelog)
18 | if not len(versions):
19 | raise ValueError("use the standard semver format in your CHANGELOG.md")
20 | build_version = versions[0]
21 | print(f"Build Version: {build_version}")
22 | return build_version
23 |
24 |
25 | setuptools.setup(
26 | name="aws-solutions-python",
27 | version=get_version(),
28 | description="Tools to make AWS Solutions deployments with CDK + Python more manageable",
29 | long_description=open("../README.md").read(),
30 | author="Amazon Web Services",
31 | url="https://aws.amazon.com/solutions/implementations",
32 | license="Apache License 2.0",
33 | packages=setuptools.find_namespace_packages(exclude=("build",)),
34 | install_requires=[
35 | "boto3>=1.17.52",
36 | "pip>=22.3",
37 | ],
38 | python_requires=">=3.7",
39 | classifiers=[
40 | "Development Status :: 4 - Beta",
41 | "Intended Audience :: Developers",
42 | "License :: OSI Approved :: Apache Software License",
43 | "Programming Language :: JavaScript",
44 | "Programming Language :: Python :: 3 :: Only",
45 | "Programming Language :: Python :: 3.7",
46 | "Programming Language :: Python :: 3.8",
47 | "Programming Language :: Python :: 3.9",
48 | "Topic :: Software Development :: Code Generators",
49 | "Topic :: Utilities",
50 | "Typing :: Typed",
51 | ],
52 | zip_safe=False,
53 | )
54 |
--------------------------------------------------------------------------------
/source/cdk_solution_helper_py/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | aws-cdk-lib>=2.140.0
2 | boto3<2.0.0,>=1.24.11
3 | aws-lambda-powertools
4 | crhelper~=2.0.11
5 | pytest
6 | pytest-cov
7 | pytest-env>=0.6.2
8 | pytest-mock>=3.10.0
9 | moto>=4.1.10
10 | cdk-nag
11 | pyparsing
12 | -e cdk_solution_helper_py/helpers_cdk
13 | -e cdk_solution_helper_py/helpers_common
14 | -e infrastructure
15 | -e tests
--------------------------------------------------------------------------------
/source/infrastructure/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
--------------------------------------------------------------------------------
/source/infrastructure/app.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import logging
5 | from pathlib import Path
6 |
7 | from aws_cdk import App, Aspects
8 | from aws_solutions.cdk import CDKSolution
9 |
10 | from prebid_server.prebid_server_stack import PrebidServerStack
11 | from prebid_server.app_registry_aspect import AppRegistry
12 |
13 | solution = CDKSolution(cdk_json_path=Path(__file__).parent.absolute() / "cdk.json")
14 |
15 | logger = logging.getLogger("cdk-helper")
16 |
17 |
18 | def synthesizer():
19 | return CDKSolution(
20 | cdk_json_path=Path(__file__).parent.absolute() / "cdk.json"
21 | ).synthesizer
22 |
23 |
24 | @solution.context.requires("SOLUTION_NAME")
25 | @solution.context.requires("SOLUTION_ID")
26 | @solution.context.requires("SOLUTION_VERSION")
27 | @solution.context.requires("BUCKET_NAME")
28 | def build_app(context):
29 | app = App(context=context)
30 | prebid_server_stack = PrebidServerStack(
31 | app,
32 | PrebidServerStack.name,
33 | description=PrebidServerStack.description,
34 | template_filename=PrebidServerStack.template_filename,
35 | synthesizer=synthesizer(),
36 | )
37 | Aspects.of(app).add(AppRegistry(prebid_server_stack, f"AppRegistry-{prebid_server_stack.name}"))
38 | return app.synth(validate_on_synthesis=True, skip_validation=False)
39 |
40 |
41 | if __name__ == "__main__":
42 | build_app()
43 |
--------------------------------------------------------------------------------
/source/infrastructure/aws_lambda_layers/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/infrastructure/aws_lambda_layers/aws_solutions/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/infrastructure/aws_lambda_layers/aws_solutions/layer.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from pathlib import Path
5 | from aws_cdk import Stack
6 | from constructs import Construct
7 | from aws_solutions.cdk.aws_lambda.python.layer import SolutionsPythonLayerVersion
8 |
9 |
10 | class SolutionsLayer(SolutionsPythonLayerVersion):
11 | def __init__(self, scope: Construct, construct_id: str, **kwargs):
12 | requirements_path: Path = Path(__file__).absolute().parent / "requirements"
13 | super().__init__(scope, construct_id, requirements_path, **kwargs)
14 |
15 | @staticmethod
16 | def get_or_create(scope: Construct, **kwargs):
17 | stack = Stack.of(scope)
18 | construct_id = "SolutionsLayer-C36"
19 | exists = stack.node.try_find_child(construct_id)
20 | if exists:
21 | return exists
22 | return SolutionsLayer(stack, construct_id, **kwargs)
23 |
--------------------------------------------------------------------------------
/source/infrastructure/aws_lambda_layers/aws_solutions/requirements/requirements.txt:
--------------------------------------------------------------------------------
1 | ../../../../cdk_solution_helper_py/helpers_common
--------------------------------------------------------------------------------
/source/infrastructure/aws_lambda_layers/datasync_s3_layer/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/prebid-server-deployment-on-aws/b51ae1a1c2278aeb15a265b36eef4035160b4f3e/source/infrastructure/aws_lambda_layers/datasync_s3_layer/__init__.py
--------------------------------------------------------------------------------
/source/infrastructure/aws_lambda_layers/datasync_s3_layer/python/datasync_reports/reports.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import json
5 |
6 | from aws_lambda_powertools import Logger
7 |
8 | logger = Logger(utc=True, service="efs-cleanup-lambda")
9 |
10 | def get_verified_files(files: list) -> list:
11 | """
12 | Function to retrieve DataSync verified report files from S3.
13 | """
14 | # DataSync can write multiple reports depending on the result of the transfer such as:
15 | # - exec-0a12a34ab112233ab.files-verified-v1-00001-0a123abc12d3b0e4f.json
16 | # - exec-0a12a34ab112233ab.files-transferred-v1-00001-0a123abc12d3b0e4f.json
17 | # We split and parse the file name to ensure the file is a verified report before returning the file key back.
18 |
19 | keys = []
20 | for file in files:
21 | key = file["Key"]
22 | key_parts = key.split(".")
23 | report = key_parts[1]
24 | report_parts = report.split("-")
25 | report_type = report_parts[1]
26 | if report_type == "verified":
27 | keys.append(key)
28 |
29 | if len(keys) == 0:
30 | raise ValueError("Verified report files not found.")
31 |
32 | return keys
33 |
34 | def get_transferred_object_keys(event: dict, datasync_report_bucket: str, aws_account_id: str, s3_client) -> list:
35 | """
36 | Function to parse DataSync reports in S3 and return successfully transferred object keys.
37 | """
38 |
39 | object_keys = []
40 | try:
41 | event_parts = event['resources'][0].split('/')
42 | task_id = event_parts[1]
43 | execution_id = event_parts[3]
44 | report_key_prefix = f"datasync/Detailed-Reports/{task_id}/{execution_id}/"
45 |
46 | response = s3_client.list_objects_v2(
47 | Bucket=datasync_report_bucket,
48 | Prefix=report_key_prefix,
49 | ExpectedBucketOwner=aws_account_id
50 | )
51 | report_files = response["Contents"]
52 |
53 | skipped_files = []
54 | verified_keys = get_verified_files(files=report_files)
55 | for key in verified_keys:
56 | response = s3_client.get_object(
57 | Bucket=datasync_report_bucket,
58 | Key=key,
59 | ExpectedBucketOwner=aws_account_id
60 | )
61 | content = response["Body"].read().decode("utf-8")
62 | json_content = json.loads(content)
63 |
64 | verified_transfers = json_content["Verified"]
65 | for transfer in verified_transfers:
66 | key = transfer["RelativePath"]
67 | if transfer['DstMetadata']['Type'] == "Directory":
68 | continue
69 | if transfer["VerifyStatus"] != "SUCCESS":
70 | skipped_files.append(key)
71 | continue
72 | object_keys.append(key)
73 |
74 | if len(skipped_files) > 0:
75 | # The next time DataSync runs, the file will attempt transfer again and overwrite the previous version in S3
76 | logger.info(f"Transfer validation not successful for skipped files: {skipped_files}. Check CloudWatch logs for task execution: {execution_id}.")
77 |
78 | except Exception as e:
79 | logger.error(f"Error getting DataSync report: {e}")
80 |
81 | return object_keys
82 |
--------------------------------------------------------------------------------
/source/infrastructure/aws_lambda_layers/metrics_layer/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/infrastructure/aws_lambda_layers/metrics_layer/python/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/infrastructure/aws_lambda_layers/metrics_layer/python/cloudwatch_metrics/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/infrastructure/aws_lambda_layers/metrics_layer/python/cloudwatch_metrics/metrics.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from datetime import datetime
5 |
6 | from aws_solutions.core.helpers import get_service_client
7 |
8 |
9 | class Metrics:
10 | def __init__(self, metrics_namespace, resource_prefix, logger):
11 | self.metrics_namespace = metrics_namespace
12 | self.resource_prefix = resource_prefix
13 | self.logger = logger
14 |
15 | def put_metrics_count_value_1(self, metric_name):
16 | self.logger.info(
17 | f"Recording 1 (count) for metric {metric_name} in CloudWatch namespace {self.metrics_namespace}")
18 | cloudwatch_client = get_service_client('cloudwatch')
19 |
20 | cloudwatch_client.put_metric_data(
21 | Namespace=self.metrics_namespace,
22 | MetricData=[
23 | {
24 | 'MetricName': metric_name,
25 | 'Dimensions': [{'Name': 'stack-name', 'Value': self.resource_prefix}],
26 | 'Value': 1,
27 | 'Unit': 'Count',
28 | "Timestamp": datetime.utcnow()
29 | }
30 | ]
31 | )
32 |
--------------------------------------------------------------------------------
/source/infrastructure/cdk.json:
--------------------------------------------------------------------------------
1 | {
2 | "app": "python3 app.py",
3 | "watch": {
4 | "include": [
5 | "**"
6 | ],
7 | "exclude": [
8 | "README.md",
9 | "cdk*.json",
10 | "requirements*.txt",
11 | "source.bat",
12 | "**/__init__.py",
13 | "**/__pycache__",
14 | "python/__pycache__",
15 | "tests"
16 | ]
17 | },
18 | "context": {
19 | "@aws-cdk/aws-apigateway:usagePlanKeyOrderInsensitiveId": true,
20 | "@aws-cdk/core:stackRelativeExports": true,
21 | "@aws-cdk/aws-rds:lowercaseDbIdentifier": true,
22 | "@aws-cdk/aws-lambda:recognizeVersionProps": true,
23 | "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": true,
24 | "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true,
25 | "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true,
26 | "@aws-cdk/core:target-partitions": [
27 | "aws",
28 | "aws-cn"
29 | ],
30 | "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true,
31 | "SOLUTION_NAME": "Prebid Server Deployment on AWS",
32 | "SOLUTION_ID": "SO0248",
33 | "SOLUTION_VERSION": "v1.1.2",
34 | "METRICS_NAMESPACE": "prebid-server-deployment-on-aws-metrics",
35 | "BUCKET_NAME": "BUCKET_NAME"
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/artifacts_bucket_lambda/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/artifacts_bucket_lambda/files/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/artifacts_bucket_lambda/files/glue/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/artifacts_bucket_lambda/requirements.txt:
--------------------------------------------------------------------------------
1 | crhelper
2 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/artifacts_bucket_lambda/upload_files.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | """
4 | This module is a custom resource Lambda for uploading files to the solution artifacts S3 bucket.
5 | Any files placed in the artifacts_bucket_lambda/files directory will uploaded to S3 under the same directory prefix.
6 |
7 | Example: files/glue/metrics_glue_script.py is uploaded to the artifacts bucket with the object key: {bucket=name}/glue/metrics_glue_script.py
8 | """
9 |
10 | import os
11 |
12 | from crhelper import CfnResource
13 | from aws_lambda_powertools import Logger
14 | import boto3
15 | from botocore import config
16 |
17 | FILE_DIR = "files"
18 | SOLUTION_ID = os.environ["SOLUTION_ID"]
19 | SOLUTION_VERSION = os.environ["SOLUTION_VERSION"]
20 |
21 | logger = Logger(service="artifacts-bucket-upload-lambda", level="INFO")
22 | helper = CfnResource(log_level="ERROR", boto_level="ERROR")
23 | # Add the solution identifier to boto3 requests for attributing service API usage
24 | boto_config = {
25 | "user_agent_extra": f"AwsSolution/{SOLUTION_ID}/{SOLUTION_VERSION}"
26 | }
27 | s3_client = boto3.client("s3", config=config.Config(**boto_config))
28 |
29 | def event_handler(event, context):
30 | """
31 | This is the Lambda custom resource entry point.
32 | """
33 |
34 | logger.info(event)
35 | helper(event, context)
36 |
37 |
38 | @helper.create
39 | @helper.update
40 | def on_create_or_update(event, _) -> None:
41 | resource_properties = event["ResourceProperties"]
42 | try:
43 | response = upload_file(resource_properties)
44 | except Exception as err:
45 | logger.error(err)
46 | raise err
47 |
48 | helper.Data.update({"Response": response})
49 |
50 |
51 | def upload_file(resource_properties) -> list:
52 | """
53 | This function handles uploading files to the S3 artifacts bucket
54 | """
55 | artifacts_bucket_name = resource_properties["artifacts_bucket_name"]
56 | success = []
57 | for root, dirs, _ in os.walk(FILE_DIR):
58 | for subdir in dirs:
59 | subdir_path = os.path.join(root, subdir)
60 | for artifact_file in os.listdir(subdir_path):
61 | local_obj_path = os.path.join(subdir_path, artifact_file)
62 | object_key = f"{subdir}/{artifact_file}"
63 |
64 | if artifact_file == "__pycache__":
65 | logger.info(f"Encountered pycache {object_key} while uploading")
66 | continue
67 |
68 | s3_client.upload_file(local_obj_path, artifacts_bucket_name, object_key)
69 | success_message = f"Uploaded {object_key}"
70 | logger.info(success_message)
71 |
72 | return success
73 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/cloudwatch_metrics/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/cloudwatch_metrics/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/docker_configs_bucket_lambda/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/docker_configs_bucket_lambda/requirements.txt:
--------------------------------------------------------------------------------
1 | crhelper
2 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/docker_configs_bucket_lambda/upload_docker_config.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | """
4 | This module is a custom resource Lambda for uploading docker config files to an S3 bucket.
5 | """
6 |
7 | import os
8 | from crhelper import CfnResource
9 | from aws_lambda_powertools import Logger
10 | import boto3
11 | from botocore import config
12 |
13 | SOLUTION_ID = os.environ["SOLUTION_ID"]
14 | SOLUTION_VERSION = os.environ["SOLUTION_VERSION"]
15 |
16 | logger = Logger(service="prebid-configs-bucket-upload-lambda", level="INFO")
17 | helper = CfnResource(log_level="ERROR", boto_level="ERROR")
18 |
19 | # Constants for directories and S3 prefixes
20 | CONFIG_DIRECTORIES = [
21 | ("default-config", "prebid-server/default"),
22 | ("current-config", "prebid-server/current"),
23 | ]
24 |
25 |
26 | def event_handler(event, context):
27 | """
28 | This is the Lambda custom resource entry point.
29 | """
30 | logger.info(event)
31 | helper(event, context)
32 |
33 |
34 | @helper.create
35 | @helper.update
36 | def on_create_or_update(event, _) -> None:
37 | try:
38 | bucket_name = event["ResourceProperties"]["docker_configs_bucket_name"]
39 | upload_all_files(bucket_name)
40 | except Exception as err:
41 | logger.error(f"Error uploading files to S3: {err}")
42 | raise err
43 |
44 |
45 | def upload_all_files(bucket_name: str) -> None:
46 | """
47 | Uploads files from predefined directories to the S3 bucket using respective prefixes.
48 | """
49 | for directory, prefix in CONFIG_DIRECTORIES:
50 | upload_directory_to_s3(bucket_name, directory, prefix)
51 |
52 |
53 | def upload_directory_to_s3(bucket_name: str, directory: str, prefix: str) -> None:
54 | """
55 | Uploads all files from a specified directory to the S3 bucket with the given prefix.
56 | """
57 | for root, _, files in os.walk(directory):
58 | for file in files:
59 | file_path = os.path.join(root, file)
60 | object_key = os.path.join(prefix, os.path.relpath(file_path, directory))
61 | upload_file_to_s3(bucket_name, file_path, object_key)
62 |
63 |
64 | def upload_file_to_s3(bucket_name: str, file_path: str, object_key: str) -> None:
65 | """
66 | Uploads a single file to the specified S3 bucket.
67 | """
68 | # Add the solution identifier to boto3 requests for attributing service API usage
69 | boto_config = {
70 | "user_agent_extra": f"AwsSolution/{SOLUTION_ID}/{SOLUTION_VERSION}"
71 | }
72 | s3_client = boto3.client("s3", config=config.Config(**boto_config))
73 | s3_client.upload_file(file_path, bucket_name, object_key)
74 | logger.info(f"Uploaded {file_path} to s3://{bucket_name}/{object_key}")
75 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/enable_access_logs/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/prebid-server-deployment-on-aws/b51ae1a1c2278aeb15a265b36eef4035160b4f3e/source/infrastructure/custom_resources/enable_access_logs/__init__.py
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/enable_access_logs/enable_access_logs.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | """
5 | This module is a custom lambda for enabling access logs for ALB
6 | """
7 |
8 | import os
9 | import boto3
10 | from botocore import config
11 | from aws_lambda_powertools import Logger
12 | from crhelper import CfnResource
13 |
14 |
15 | logger = Logger(utc=True, service="alb-access-log-lambda")
16 | helper = CfnResource(log_level="ERROR", boto_level="ERROR")
17 |
18 | SOLUTION_ID = os.environ["SOLUTION_ID"]
19 | SOLUTION_VERSION = os.environ["SOLUTION_VERSION"]
20 |
21 | def event_handler(event, context):
22 | """
23 | This is the Lambda custom resource entry point.
24 | """
25 |
26 | logger.info(event)
27 | helper(event, context)
28 |
29 |
30 | @helper.create
31 | def on_create(event, _) -> None:
32 | """
33 | Function to enable access logging for ALB
34 | """
35 | # Add the solution identifier to boto3 requests for attributing service API usage
36 | boto_config = {
37 | "user_agent_extra": f"AwsSolution/{SOLUTION_ID}/{SOLUTION_VERSION}"
38 | }
39 | elbv2_client = boto3.client("elbv2", config=config.Config(**boto_config))
40 | alb_arn = event["ResourceProperties"]["ALB_ARN"]
41 | access_log_bucket = event["ResourceProperties"]["ALB_LOG_BUCKET"]
42 |
43 | response = elbv2_client.modify_load_balancer_attributes(
44 | LoadBalancerArn=alb_arn,
45 | Attributes=[
46 | {"Key": "access_logs.s3.enabled", "Value": "true"},
47 | {"Key": "access_logs.s3.bucket", "Value": access_log_bucket},
48 | ],
49 | )
50 |
51 | logger.info(response)
52 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/enable_access_logs/requirements.txt:
--------------------------------------------------------------------------------
1 | crhelper
2 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/header_secret_lambda/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/header_secret_lambda/header_secret_gen.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | """
5 | This module is a custom lambda for producing a secret value for CloudFront header
6 | """
7 |
8 | import secrets
9 | from aws_lambda_powertools import Logger
10 | from crhelper import CfnResource
11 |
12 |
13 | logger = Logger(utc=True, service="header-secret-custom-lambda")
14 | helper = CfnResource(log_level="ERROR", boto_level="ERROR")
15 |
16 |
17 | def event_handler(event, context):
18 | """
19 | This is the Lambda custom resource entry point.
20 | """
21 | logger.info(event)
22 | helper(event, context)
23 |
24 |
25 | @helper.create
26 | def on_create(event, _) -> None:
27 | """
28 | Function to produce a secret value for CloudFront header
29 | """
30 | helper.Data.update({"header_secret_value": secrets.token_urlsafe(16)})
31 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/header_secret_lambda/requirements.txt:
--------------------------------------------------------------------------------
1 | crhelper
2 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/operational_metrics/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/operational_metrics/ops_metrics.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import os
5 | import json
6 | import uuid
7 |
8 | from crhelper import CfnResource
9 | from aws_lambda_powertools import Logger
10 | from aws_solutions.core.helpers import get_service_client
11 |
12 | logger = Logger(service="Ops metrics", level="INFO")
13 |
14 | helper = CfnResource()
15 |
16 | METRICS_NAMESPACE = os.environ["METRICS_NAMESPACE"]
17 | RESOURCE_PREFIX = os.environ["RESOURCE_PREFIX"]
18 | STACK_NAME = os.environ["STACK_NAME"]
19 | SECRET_NAME = f"{STACK_NAME}-anonymous-metrics-uuid"
20 | METRIC_UUID = str(uuid.uuid4())
21 |
22 |
23 | def event_handler(event, context):
24 | """
25 | This function is the entry point for the Lambda-backed custom resource.
26 | """
27 | logger.info(event)
28 | helper(event, context)
29 |
30 |
31 | @helper.create
32 | def on_create(event, _):
33 | """
34 | Lambda entry point. Print the event first.
35 | """
36 | logger.info(f"Create event input: {json.dumps(event)}")
37 | create_uuid()
38 |
39 |
40 | def create_uuid():
41 | """
42 | This function is responsible for creating the Secrets Manager uuid for anonymous metrics.
43 | """
44 |
45 | secrets_manager_client = get_service_client("secretsmanager")
46 |
47 | secrets_manager_client.create_secret(Name=SECRET_NAME, SecretString=METRIC_UUID)
48 |
49 | logger.info("Secret created successfully!")
50 |
51 |
52 | @helper.delete
53 | def on_delete(event, _):
54 | """
55 | This function is responsible for deleting the Secrets Manager uuid.
56 | """
57 | logger.info(f"Delete event input: {json.dumps(event)}")
58 | logger.info(f"Resource marked for deletion: {SECRET_NAME}")
59 | delete_secret()
60 |
61 |
62 | def delete_secret():
63 | secrets_manager_client = get_service_client("secretsmanager")
64 |
65 | # delete the secret
66 | secrets_manager_client.delete_secret(
67 | SecretId=SECRET_NAME, ForceDeleteWithoutRecovery=True
68 | )
69 | logger.info("UUID secret deleted successfully.")
70 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/operational_metrics/requirements.txt:
--------------------------------------------------------------------------------
1 | crhelper
2 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/prefix_id_lambda/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/prefix_id_lambda/get_prefix_id.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | """
5 | This module is a custom lambda for getting the prefix list id for the ALB security group
6 | """
7 |
8 | import os
9 | import boto3
10 | from botocore import config
11 | from aws_lambda_powertools import Logger
12 | from crhelper import CfnResource
13 |
14 | logger = Logger(utc=True, service="prefix-id-custom-lambda")
15 | helper = CfnResource(log_level="ERROR", boto_level="ERROR")
16 |
17 | SOLUTION_ID = os.environ["SOLUTION_ID"]
18 | SOLUTION_VERSION = os.environ["SOLUTION_VERSION"]
19 |
20 | def event_handler(event, context):
21 | """
22 | This is the Lambda custom resource entry point.
23 | """
24 | logger.info(event)
25 | helper(event, context)
26 |
27 |
28 | @helper.create
29 | def on_create(event, _) -> None:
30 | """
31 | Function to get prefix_list_id from prefix_list_name
32 | """
33 | prefix_list_name = "com.amazonaws.global.cloudfront.origin-facing"
34 | # Add the solution identifier to boto3 requests for attributing service API usage
35 | boto_config = {
36 | "user_agent_extra": f"AwsSolution/{SOLUTION_ID}/{SOLUTION_VERSION}"
37 | }
38 | ec2_client = boto3.client("ec2", config=config.Config(**boto_config))
39 | response = ec2_client.describe_managed_prefix_lists()
40 | prefix_list_id = None
41 |
42 | try:
43 | prefix_list_id = next(
44 | prefix_list["PrefixListId"]
45 | for prefix_list in response["PrefixLists"]
46 | if prefix_list["PrefixListName"] == prefix_list_name
47 | )
48 | except StopIteration as exception:
49 | logger.error(exception)
50 | raise exception
51 |
52 | helper.Data.update({"prefix_list_id": prefix_list_id})
53 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/prefix_id_lambda/requirements.txt:
--------------------------------------------------------------------------------
1 | crhelper
2 | boto3
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/vpc_eni_lambda/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/vpc_eni_lambda/delete_lambda_eni.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | """
5 | This module is a custom lambda for deleting VPC ENIs for the Lambda service
6 | """
7 |
8 | import os
9 | import boto3
10 | from botocore import config
11 | from aws_lambda_powertools import Logger
12 | from crhelper import CfnResource
13 |
14 | logger = Logger(utc=True, service="vpc-eni-lambda")
15 | helper = CfnResource(log_level="ERROR", boto_level="ERROR")
16 |
17 | SOLUTION_ID = os.environ["SOLUTION_ID"]
18 | SOLUTION_VERSION = os.environ["SOLUTION_VERSION"]
19 |
20 | def event_handler(event, context):
21 | """
22 | This is the Lambda custom resource entry point.
23 | """
24 | logger.info(event)
25 | helper(event, context)
26 |
27 |
28 | @helper.delete
29 | def on_delete(event, _) -> None:
30 | """
31 | Function to delete Lambda service VPC ENIs
32 | """
33 | SECURITY_GROUP_ID = event["ResourceProperties"]["SECURITY_GROUP_ID"]
34 | # Add the solution identifier to boto3 requests for attributing service API usage
35 | boto_config = {
36 | "user_agent_extra": f"AwsSolution/{SOLUTION_ID}/{SOLUTION_VERSION}"
37 | }
38 | ec2_client = boto3.client("ec2", config=config.Config(**boto_config))
39 |
40 | desribe_response = ec2_client.describe_network_interfaces(
41 | Filters=[{"Name": "group-id", "Values": [SECURITY_GROUP_ID]}]
42 | )
43 | return_responses = []
44 | for network_interface in desribe_response["NetworkInterfaces"]:
45 | try:
46 | attachment_id = network_interface["Attachment"]["AttachmentId"]
47 | ec2_client.detach_network_interface(AttachmentId=attachment_id)
48 |
49 | logger.info(f"Detached ENI: {attachment_id}")
50 |
51 | except Exception as e:
52 | logger.exception(e)
53 |
54 | try:
55 | network_id = network_interface["NetworkInterfaceId"]
56 | response = ec2_client.delete_network_interface(
57 | NetworkInterfaceId=network_id
58 | )
59 |
60 | return_responses.append(response)
61 |
62 | logger.info(f"Deleted ENI: {network_id}")
63 |
64 | except Exception as e:
65 | logger.exception(e)
66 |
67 | helper.Data.update({"Response": return_responses})
68 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/vpc_eni_lambda/requirements.txt:
--------------------------------------------------------------------------------
1 | crhelper
2 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/waf_webacl_lambda/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/prebid-server-deployment-on-aws/b51ae1a1c2278aeb15a265b36eef4035160b4f3e/source/infrastructure/custom_resources/waf_webacl_lambda/__init__.py
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/waf_webacl_lambda/delete_waf_webacl.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | """
5 | This module is a custom lambda for deletion of Waf Web ACL and associations
6 | """
7 |
8 | import boto3
9 | import os
10 | from botocore import config
11 | from aws_lambda_powertools import Logger
12 | from crhelper import CfnResource
13 |
14 | logger = Logger(utc=True, service="waf-custom-lambda")
15 | helper = CfnResource(log_level="ERROR", boto_level="ERROR")
16 |
17 | SOLUTION_ID = os.environ["SOLUTION_ID"]
18 | SOLUTION_VERSION = os.environ["SOLUTION_VERSION"]
19 |
20 | def event_handler(event, context):
21 | """
22 | This is the Lambda custom resource entry point.
23 | """
24 |
25 | logger.info(event)
26 | helper(event, context)
27 |
28 |
29 | @helper.delete
30 | def on_delete(event, _):
31 | # Add the solution identifier to boto3 requests for attributing service API usage
32 | boto_config = {
33 | "user_agent_extra": f"AwsSolution/{SOLUTION_ID}/{SOLUTION_VERSION}"
34 | }
35 | cf_client = boto3.client("cloudfront", config=config.Config(**boto_config))
36 |
37 | # Dissociate web acl resource before deleting web acl
38 | cf_distribution_id = event["ResourceProperties"]["CF_DISTRIBUTION_ID"]
39 | response = cf_client.get_distribution_config(Id=cf_distribution_id)
40 |
41 | cf_distribution_config = response["DistributionConfig"]
42 | cf_distribution_config["WebACLId"] = "" # provide an empty web ACL ID
43 |
44 | _ = cf_client.update_distribution(
45 | DistributionConfig=cf_distribution_config,
46 | Id=cf_distribution_id,
47 | IfMatch=response["ETag"], # rename the ETag field to IfMatch
48 | )
49 |
50 | # Delete Web ACL
51 | wafv2_client = boto3.client("wafv2", region_name="us-east-1")
52 | webacl_name = event["ResourceProperties"]["WAF_WEBACL_NAME"]
53 | webacl_id = event["ResourceProperties"]["WAF_WEBACL_ID"]
54 | webacl_locktoken = event["ResourceProperties"]["WAF_WEBACL_LOCKTOKEN"]
55 |
56 | _ = wafv2_client.delete_web_acl(
57 | Name=webacl_name, Scope="CLOUDFRONT", Id=webacl_id, LockToken=webacl_locktoken
58 | )
59 |
60 | logger.info(f"Deleted WAF WebAcl with name {webacl_name} and id {webacl_id}")
61 |
--------------------------------------------------------------------------------
/source/infrastructure/custom_resources/waf_webacl_lambda/requirements.txt:
--------------------------------------------------------------------------------
1 | crhelper
2 |
--------------------------------------------------------------------------------
/source/infrastructure/prebid_server/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
--------------------------------------------------------------------------------
/source/infrastructure/prebid_server/cloudtrail_construct.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import aws_cdk as cdk
5 | from aws_cdk import Aws
6 | from aws_cdk import (
7 | aws_cloudtrail as cloud_trail,
8 | aws_iam as iam,
9 | aws_kms as kms,
10 | aws_s3 as s3,
11 | )
12 | from constructs import Construct
13 |
14 |
15 | class CloudTrailConstruct(Construct):
16 | def __init__(
17 | self,
18 | scope,
19 | id,
20 | s3_buckets,
21 | ) -> None:
22 | """
23 | This construct creates a CloudTrail resource and sets S3 and Lambda Data Events.
24 | """
25 | super().__init__(scope, id)
26 |
27 | self.s3_buckets = s3_buckets
28 | self.logging_bucket = self._create_logging_bucket()
29 |
30 | self.trail = cloud_trail.Trail(
31 | self,
32 | "S3AndLambdaTrail",
33 | bucket=self.logging_bucket,
34 | is_multi_region_trail=False,
35 | include_global_service_events=False,
36 | management_events=cloud_trail.ReadWriteType.ALL,
37 | )
38 |
39 | self.trail.add_s3_event_selector(
40 | [cloud_trail.S3EventSelector(bucket=bucket) for bucket in self.s3_buckets]
41 | )
42 |
43 | self.trail.node.add_dependency(self.logging_bucket)
44 |
45 | def _create_logging_bucket(self) -> s3.Bucket:
46 | logging_bucket_key = kms.Key(
47 | self,
48 | id="CloudtrailLoggingBucketKey",
49 | description="Cloudtrail Logging Bucket Key",
50 | enable_key_rotation=True,
51 | pending_window=cdk.Duration.days(30),
52 | removal_policy=cdk.RemovalPolicy.RETAIN,
53 | )
54 |
55 | kms_bucket_policy = iam.PolicyStatement(
56 | sid="Allow access to CloudTrailLoggingBucketKey",
57 | principals=[
58 | iam.ServicePrincipal("cloudtrail.amazonaws.com"),
59 | iam.ServicePrincipal("delivery.logs.amazonaws.com"),
60 | ],
61 | effect=iam.Effect.ALLOW,
62 | actions=[
63 | "kms:Encrypt",
64 | "kms:Decrypt",
65 | "kms:ReEncrypt*",
66 | "kms:GenerateDataKey*",
67 | "kms:CreateGrant",
68 | "kms:DescribeKey",
69 | ],
70 | resources=["*"],
71 | conditions={
72 | "StringEquals": {
73 | "aws:SourceAccount": [Aws.ACCOUNT_ID],
74 | }
75 | },
76 | )
77 | logging_bucket_key.add_to_resource_policy(kms_bucket_policy)
78 |
79 | logging_bucket = s3.Bucket(
80 | self,
81 | id="CloudTrailLoggingBucket",
82 | object_ownership=s3.ObjectOwnership.OBJECT_WRITER,
83 | access_control=s3.BucketAccessControl.BUCKET_OWNER_FULL_CONTROL,
84 | block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
85 | encryption_key=logging_bucket_key,
86 | removal_policy=cdk.RemovalPolicy.RETAIN,
87 | versioned=True,
88 | enforce_ssl=True,
89 | object_lock_enabled=True,
90 | server_access_logs_prefix="access-logs/"
91 | )
92 | logging_bucket.node.add_dependency(logging_bucket_key)
93 | enable_s3_access_logs_statement = iam.PolicyStatement(
94 | effect=iam.Effect.ALLOW,
95 | principals=[
96 | iam.ServicePrincipal("logging.s3.amazonaws.com")
97 | ],
98 | actions=["s3:PutObject"],
99 | resources=[
100 | f"{logging_bucket.bucket_arn}/access-logs/*"
101 | ]
102 | )
103 | logging_bucket.add_to_resource_policy(
104 | enable_s3_access_logs_statement
105 | )
106 |
107 | return logging_bucket
108 |
--------------------------------------------------------------------------------
/source/infrastructure/prebid_server/condition_aspect.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import jsii
5 | import aws_cdk
6 | from aws_cdk import CfnCondition, CfnResource
7 | from constructs import Construct, IConstruct
8 |
9 |
10 | @jsii.implements(aws_cdk.IAspect)
11 | class ConditionAspect(Construct):
12 | def __init__(self, scope: Construct, id: str, condition: CfnCondition):
13 | super().__init__(scope, id)
14 |
15 | self.condition = condition
16 |
17 | def visit(self, node: IConstruct) -> None:
18 | if isinstance(node, CfnResource) and node.cfn_options:
19 | node.cfn_options.condition = self.condition
20 |
--------------------------------------------------------------------------------
/source/infrastructure/prebid_server/container_image_construct.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import logging
5 | import os
6 | from aws_cdk import Aws, CfnParameter, CfnOutput
7 | from aws_cdk import aws_ecs as ecs
8 | from constructs import Construct
9 | from aws_cdk.aws_ecr_assets import DockerImageAsset, Platform
10 |
11 | from .docker_configs_construct import DockerConfigsManager
12 |
13 | logging.basicConfig() # NOSONAR
14 | logger = logging.getLogger(__name__)
15 | logger.setLevel(logging.INFO)
16 |
17 | ECR_REPO_NAME = os.getenv("ECR_REPO_NAME") or "prebid-server"
18 | PUBLIC_ECR_REGISTRY = os.getenv("PUBLIC_ECR_REGISTRY")
19 | ECR_REPO_TAG = os.getenv("PUBLIC_ECR_TAG") or "latest"
20 | ECR_REGISTRY = os.getenv("OVERRIDE_ECR_REGISTRY")
21 | if not ECR_REGISTRY and (PUBLIC_ECR_REGISTRY and ECR_REPO_TAG):
22 | ECR_REGISTRY = f"{PUBLIC_ECR_REGISTRY}/{ECR_REPO_NAME}:{ECR_REPO_TAG}"
23 | logger.debug(f"ECR_REGISTRY: {ECR_REGISTRY}")
24 |
25 |
26 | class ContainerImageConstruct(Construct):
27 |
28 | def __init__(
29 | self,
30 | scope,
31 | id,
32 | solutions_template_options
33 | ) -> None:
34 | """
35 | This construct creates Docker image.
36 | """
37 | super().__init__(scope, id)
38 |
39 | docker_build_location = self.get_docker_build_location()
40 |
41 | # Deploy Docker Configuration Files to S3 bucket
42 | docker_configs_manager = DockerConfigsManager(self, "ConfigFiles", docker_build_location)
43 | self.docker_configs_manager_bucket = docker_configs_manager.bucket
44 |
45 | if ECR_REGISTRY is None:
46 | # When running cdk-deploy, unless ECR_REGISTRY is set we will build the image locally
47 | logger.info("Prepare ECS container image from image asset.")
48 |
49 | asset = DockerImageAsset(
50 | self,
51 | ECR_REPO_NAME,
52 | directory=docker_build_location,
53 | platform=Platform.LINUX_AMD64,
54 | )
55 |
56 | self.image_ecs_obj = ecs.ContainerImage.from_docker_image_asset(asset)
57 | self.image_ecs_str = asset.image_uri
58 | else:
59 | # When our pipeline builds the template, ECR_REGISTRY is set and we use a hosted image
60 | logger.info("Prepare ECS container image from registry.")
61 | image_cfn_param = CfnParameter(
62 | self,
63 | id="PrebidServerContainerImage",
64 | type="String",
65 | description="The fully qualified name of the Prebid Server container image to deploy.",
66 | default=ECR_REGISTRY
67 | )
68 | solutions_template_options.add_parameter(image_cfn_param, label="", group="Container Image Settings")
69 |
70 | self.image_ecs_obj = ecs.ContainerImage.from_registry(image_cfn_param.value_as_string)
71 | self.image_ecs_str = image_cfn_param.value_as_string
72 |
73 | CfnOutput(self, "Prebid-ECS-Image", value=self.image_ecs_str)
74 | CfnOutput(self, "Prebid-Solution-Config-Bucket",
75 | value=f"https://{Aws.REGION}.console.aws.amazon.com/s3/home?region={Aws.REGION}&bucket={self.docker_configs_manager_bucket.bucket_name}")
76 |
77 | @staticmethod
78 | def get_docker_build_location():
79 | docker_build_location = "../../deployment/ecr/prebid-server"
80 | if os.getcwd().split("/")[-1] == "source":
81 | docker_build_location = "../deployment/ecr/prebid-server"
82 | elif os.getcwd().split("/")[-1] == "deployment":
83 | docker_build_location = "ecr/prebid-server"
84 |
85 | return docker_build_location
86 |
--------------------------------------------------------------------------------
/source/infrastructure/prebid_server/ecs_service_construct.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from aws_cdk import Duration
5 | from aws_cdk import aws_ec2 as ec2
6 | from aws_cdk import aws_ecs as ecs
7 | from aws_cdk import aws_elasticloadbalancingv2 as elbv2
8 | from constructs import Construct
9 |
10 | import prebid_server.stack_constants as globals
11 |
12 |
13 | class ECSServiceConstruct(Construct):
14 | def __init__(
15 | self,
16 | scope,
17 | id,
18 | prebid_vpc,
19 | prebid_cluster,
20 | prebid_task_definition,
21 | prebid_task_subnets,
22 | prebid_container,
23 | prebid_fs,
24 | ) -> None:
25 | """
26 | This construct creates EFS resources.
27 | """
28 | super().__init__(scope, id)
29 |
30 | fargate_service = ecs.FargateService(
31 | self,
32 | "PrebidFargateService",
33 | cluster=prebid_cluster,
34 | task_definition=prebid_task_definition,
35 | vpc_subnets=ec2.SubnetSelection(subnets=prebid_task_subnets),
36 | capacity_provider_strategies=[
37 | ecs.CapacityProviderStrategy(
38 | capacity_provider="FARGATE",
39 | weight=globals.FARGATE_RESERVED_WEIGHT,
40 | ),
41 | ecs.CapacityProviderStrategy(
42 | capacity_provider="FARGATE_SPOT",
43 | weight=globals.FARGATE_SPOT_WEIGHT,
44 | ),
45 | ],
46 | )
47 |
48 | self.alb_target_group = elbv2.ApplicationTargetGroup(
49 | self,
50 | "ALBTargetGroup",
51 | port=80, targets=[fargate_service.load_balancer_target(
52 | container_name=prebid_container.container_name,
53 | container_port=prebid_container.container_port)],
54 | vpc=prebid_vpc,
55 | )
56 |
57 | # Suppress cfn_guard warning about open egress in the Fargate service security group because Prebid Server containers require open egress in order to connect to demand partners.
58 | fargate_service_security_group = fargate_service.connections.security_groups[0]
59 | security_group_l1_construct = fargate_service_security_group.node.find_child(id='Resource')
60 | security_group_l1_construct.add_metadata("guard", {
61 | 'SuppressedRules': ['EC2_SECURITY_GROUP_EGRESS_OPEN_TO_WORLD_RULE',
62 | 'SECURITY_GROUP_EGRESS_ALL_PROTOCOLS_RULE']})
63 |
64 | # Allow traffic to/from EFS
65 | fargate_service.connections.allow_from(
66 | prebid_fs, ec2.Port.tcp(globals.EFS_PORT)
67 | )
68 | fargate_service.connections.allow_to(
69 | prebid_fs, ec2.Port.tcp(globals.EFS_PORT)
70 | )
71 |
72 | # Add health check
73 | self.alb_target_group.configure_health_check(
74 | path=globals.HEALTH_PATH,
75 | interval=Duration.seconds(globals.HEALTH_CHECK_INTERVAL_SECS),
76 | timeout=Duration.seconds(globals.HEALTH_CHECK_TIMEOUT_SECS),
77 | )
78 |
79 | self.scalable_target = fargate_service.auto_scale_task_count(
80 | min_capacity=globals.TASK_MIN_CAPACITY,
81 | max_capacity=globals.TASK_MAX_CAPACITY,
82 | )
83 |
84 | self.scalable_target.scale_on_cpu_utilization(
85 | "FargateServiceCpuScaling",
86 | target_utilization_percent=globals.CPU_TARGET_UTILIZATION_PCT,
87 | )
88 |
89 | self.scalable_target.scale_on_memory_utilization(
90 | "FargateServiceMemoryScaling",
91 | target_utilization_percent=globals.MEMORY_TARGET_UTILIZATION_PCT,
92 | )
93 |
--------------------------------------------------------------------------------
/source/infrastructure/prebid_server/efs_cleanup_lambda/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
--------------------------------------------------------------------------------
/source/infrastructure/prebid_server/efs_cleanup_lambda/container_stop_logs.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | """
5 | This Lambda function that archives the log files upon Fargate container stopping.
6 | This is to capture any log files that may not have the opportunity to go through the periodic archiving process defined in the prebid-logging.xml.
7 | Triggered by EventBridge event STOPPING (SIGTERM) condition is received by the container.
8 | """
9 |
10 | import tarfile
11 | import os
12 | from pathlib import Path
13 | from datetime import datetime, timezone
14 | from aws_lambda_powertools import Logger
15 |
16 | try:
17 | from cloudwatch_metrics import metrics
18 | except ImportError:
19 | from aws_lambda_layers.metrics_layer.python.cloudwatch_metrics import metrics
20 |
21 | EFS_MOUNT_PATH = os.environ["EFS_MOUNT_PATH"]
22 | EFS_METRICS = os.environ["EFS_METRICS"]
23 | EFS_LOGS = os.environ["EFS_LOGS"]
24 | METRICS_NAMESPACE = os.environ["METRICS_NAMESPACE"]
25 | RESOURCE_PREFIX = os.environ["RESOURCE_PREFIX"]
26 |
27 | logger = Logger(utc=True, service="container-stop-logs")
28 |
29 |
30 | def event_handler(event, _):
31 | """
32 | Entry point into the Lambda function to capture and archives the last active log files on container stop
33 | """
34 |
35 | metrics.Metrics(
36 | METRICS_NAMESPACE, RESOURCE_PREFIX, logger
37 | ).put_metrics_count_value_1(metric_name="ConatinerStopLogs")
38 |
39 | detail = event["detail"]
40 | container_run_id = detail["containers"][0]["runtimeId"].split('-')[0]
41 | logger.info(f"Container run id {container_run_id} status {detail['lastStatus']}")
42 |
43 | efs_mount_path = Path(EFS_MOUNT_PATH)
44 | metrics_log_folder = efs_mount_path.joinpath(EFS_METRICS).joinpath(container_run_id)
45 | compress_log_file(metrics_log_folder, "prebid-metrics.log")
46 |
47 |
48 | def compress_log_file(log_folder_path: Path, log_file_name: str):
49 | archived_folder = create_or_retreive_archived_folder(log_folder_path)
50 |
51 | log_file_path = log_folder_path / log_file_name
52 | if not log_file_path.exists():
53 | logger.warning(f"{log_file_path} does not exist")
54 | return
55 |
56 | utc_time = datetime.now(timezone.utc)
57 | file_to_compress = (
58 | archived_folder
59 | / f"{log_file_name.split('.')[0]}.{utc_time.year}-{utc_time.month:02d}-{utc_time.day:02d}_{utc_time.hour:02d}.log.gz"
60 | )
61 |
62 | with tarfile.open(file_to_compress, "w:gz") as tar: # NOSONAR
63 | tar.add(log_file_path)
64 |
65 | logger.info(f"Log file compressed: {file_to_compress}")
66 |
67 |
68 | def create_or_retreive_archived_folder(log_folder_path) -> Path:
69 | archived_folder = Path(log_folder_path).joinpath("archived")
70 | try:
71 | # only create if folder does not exist
72 | archived_folder.mkdir(exist_ok=True, parents=True)
73 | except PermissionError as p:
74 | logger.error(f"Permission error: {p}")
75 | raise p
76 |
77 | return archived_folder
78 |
--------------------------------------------------------------------------------
/source/infrastructure/prebid_server/efs_cleanup_lambda/delete_efs_files.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | """
4 | This module is a Lambda function that deletes files from EFS after they have been transferred to S3 for longterm storage.
5 | It is triggered by EventBridge after a successful DataSync task execution of the metrics or logs transfer tasks.
6 | """
7 |
8 | import os
9 |
10 | import boto3
11 | from botocore import config
12 | from aws_lambda_powertools import Logger
13 | try:
14 | from cloudwatch_metrics import metrics
15 | except ImportError:
16 | from aws_lambda_layers.metrics_layer.python.cloudwatch_metrics import metrics
17 | try:
18 | from datasync_reports import reports
19 | except ImportError:
20 | from aws_lambda_layers.datasync_s3_layer.python.datasync_reports import reports
21 |
22 |
23 | logger = Logger(utc=True, service="efs-cleanup-lambda")
24 |
25 | EFS_MOUNT_PATH = os.environ["EFS_MOUNT_PATH"]
26 | METRICS_TASK_ARN = os.environ["METRICS_TASK_ARN"]
27 | METRICS_NAMESPACE = os.environ['METRICS_NAMESPACE']
28 | RESOURCE_PREFIX = os.environ['RESOURCE_PREFIX']
29 | DATASYNC_REPORT_BUCKET = os.environ["DATASYNC_REPORT_BUCKET"]
30 | AWS_ACCOUNT_ID = os.environ["AWS_ACCOUNT_ID"]
31 | EFS_METRICS = os.environ["EFS_METRICS"]
32 | EFS_LOGS = os.environ["EFS_LOGS"]
33 |
34 | DIRECTORY_MAP = {
35 | METRICS_TASK_ARN: EFS_METRICS
36 | }
37 | SOLUTION_VERSION = os.environ["SOLUTION_VERSION"]
38 | SOLUTION_ID = os.environ["SOLUTION_ID"]
39 | append_solution_identifier = {
40 | "user_agent_extra": f"AwsSolution/{SOLUTION_ID}/{SOLUTION_VERSION}"
41 | }
42 | default_config = config.Config(**append_solution_identifier)
43 | s3_client = boto3.client("s3", config=default_config)
44 |
45 | def event_handler(event, _):
46 | """
47 | This function is the entry point for the Lambda and handles retrieving transferred S3 object keys and deleting them from the mounted EFS filesystem.
48 | """
49 | metrics.Metrics(METRICS_NAMESPACE, RESOURCE_PREFIX, logger).put_metrics_count_value_1(metric_name="DeleteEfsFiles")
50 |
51 | object_keys = reports.get_transferred_object_keys(
52 | event=event,
53 | datasync_report_bucket=DATASYNC_REPORT_BUCKET,
54 | aws_account_id=AWS_ACCOUNT_ID,
55 | s3_client=s3_client
56 | )
57 |
58 | # extract the task arn from the task execution arn
59 | task_arn = event['resources'][0].split("/execution/")[0]
60 | directory = DIRECTORY_MAP.get(task_arn)
61 |
62 | if len(object_keys) > 0:
63 | logger.info(f"{len(object_keys)} new {directory} files to process: {object_keys}")
64 |
65 | failed = []
66 | for key in object_keys:
67 | path = f"{EFS_MOUNT_PATH}/{directory}/{key}"
68 | try:
69 | os.remove(path)
70 | except OSError as e:
71 | failed.append(key)
72 | logger.error(f"Error: {e}")
73 |
74 | if len(failed) == 0:
75 | logger.info("All files deleted successfully.")
76 | else:
77 | logger.error(f"{len(failed)} files failed to delete: {failed}")
78 |
79 | else:
80 | logger.info(f"No new {directory} files to delete from EFS.") # nosec
81 |
--------------------------------------------------------------------------------
/source/infrastructure/prebid_server/efs_construct.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from aws_cdk import RemovalPolicy, CfnOutput
5 | from aws_cdk import aws_ec2 as ec2
6 | from aws_cdk import aws_efs as efs
7 | from constructs import Construct
8 |
9 | import prebid_server.stack_constants as globals
10 |
11 |
12 | class EfsConstruct(Construct):
13 | def __init__(
14 | self,
15 | scope,
16 | id,
17 | prebid_vpc,
18 | ) -> None:
19 | """
20 | This construct creates EFS resources.
21 | """
22 | super().__init__(scope, id)
23 |
24 | # Define EFS file system
25 | efs_security_group = ec2.SecurityGroup(self, "EfsSecurityGroup", vpc=prebid_vpc, allow_all_outbound=False)
26 | efs_security_group.node.default_child.add_metadata(
27 | "guard", {
28 | 'SuppressedRules': ['SECURITY_GROUP_MISSING_EGRESS_RULE']
29 | }
30 | )
31 |
32 | self.prebid_fs = efs.FileSystem(
33 | self,
34 | "Prebid-fs",
35 | vpc=prebid_vpc,
36 | security_group=efs_security_group,
37 | performance_mode=efs.PerformanceMode.GENERAL_PURPOSE,
38 | lifecycle_policy=efs.LifecyclePolicy.AFTER_7_DAYS,
39 | vpc_subnets=ec2.SubnetSelection(subnet_group_name=globals.PVT_SUBNET_NAME),
40 | removal_policy=RemovalPolicy.DESTROY,
41 | encrypted=True,
42 | )
43 |
44 | self.prebid_fs_access_point = efs.AccessPoint(
45 | self,
46 | "Prebid-fs-access-point",
47 | file_system=self.prebid_fs,
48 | path="/logging",
49 | create_acl=efs.Acl(owner_uid="1001", owner_gid="1001", permissions="770"),
50 | posix_user=efs.PosixUser(uid="1001", gid="1001"),
51 | )
52 |
--------------------------------------------------------------------------------
/source/infrastructure/prebid_server/glue_trigger_lambda/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/prebid-server-deployment-on-aws/b51ae1a1c2278aeb15a265b36eef4035160b4f3e/source/infrastructure/prebid_server/glue_trigger_lambda/__init__.py
--------------------------------------------------------------------------------
/source/infrastructure/prebid_server/glue_trigger_lambda/start_glue_job.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | """
4 | This module is a Lambda function that starts the Metrics ETL Glue Job with a list of object keys to be ingested.
5 | It is triggered by EventBridge after a successful DataSync task execution of the metrics transfer task.
6 | """
7 |
8 | import json
9 | import os
10 |
11 | import boto3
12 | from botocore import config
13 | from aws_lambda_powertools import Logger
14 | try:
15 | from cloudwatch_metrics import metrics
16 | except ImportError:
17 | from aws_lambda_layers.metrics_layer.python.cloudwatch_metrics import metrics
18 | try:
19 | from datasync_reports import reports
20 | except ImportError:
21 | from aws_lambda_layers.datasync_s3_layer.python.datasync_reports import reports
22 |
23 |
24 | logger = Logger(utc=True, service="glue-trigger-lambda")
25 |
26 | GLUE_JOB_NAME = os.environ["GLUE_JOB_NAME"]
27 | METRICS_NAMESPACE = os.environ['METRICS_NAMESPACE']
28 | RESOURCE_PREFIX = os.environ['RESOURCE_PREFIX']
29 | DATASYNC_REPORT_BUCKET = os.environ['DATASYNC_REPORT_BUCKET']
30 | AWS_ACCOUNT_ID = os.environ["AWS_ACCOUNT_ID"]
31 | SOLUTION_VERSION = os.environ.get("SOLUTION_VERSION")
32 | SOLUTION_ID = os.environ.get("SOLUTION_ID")
33 | append_solution_identifier = {
34 | "user_agent_extra": f"AwsSolution/{SOLUTION_ID}/{SOLUTION_VERSION}"
35 | }
36 | default_config = config.Config(**append_solution_identifier)
37 | glue_client = boto3.client("glue", config=default_config)
38 | s3_client = boto3.client("s3", config=default_config)
39 |
40 | def event_handler(event, _):
41 | """
42 | This function is the entry point for the Lambda and handles retrieving transferred S3 object keys and starting the Glue Job.
43 | """
44 | metrics.Metrics(METRICS_NAMESPACE, RESOURCE_PREFIX, logger).put_metrics_count_value_1(metric_name="StartGlueJob")
45 |
46 | object_keys = reports.get_transferred_object_keys(
47 | event=event,
48 | datasync_report_bucket=DATASYNC_REPORT_BUCKET,
49 | aws_account_id=AWS_ACCOUNT_ID,
50 | s3_client=s3_client
51 | )
52 |
53 | if len(object_keys) > 0:
54 | logger.info(f"{len(object_keys)} new files to process: {object_keys}")
55 | try:
56 | response = glue_client.start_job_run(
57 | JobName=GLUE_JOB_NAME,
58 | Arguments={
59 | "--object_keys": json.dumps(object_keys)
60 | }
61 | )
62 | logger.info(f"Glue Job response: {response}")
63 |
64 | except Exception as err:
65 | logger.error(f"Error starting Glue Job: {err}")
66 | raise err
67 | else:
68 | logger.info("No new files to send to Glue.")
69 |
70 |
--------------------------------------------------------------------------------
/source/infrastructure/prebid_server/prebid_metrics_schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "Counter": {
3 | "container_id": "string",
4 | "name": "string",
5 | "timestamp": "timestamp",
6 | "count": "int"
7 | },
8 | "Gauge": {
9 | "container_id": "string",
10 | "name": "string",
11 | "timestamp": "timestamp",
12 | "value": "string"
13 | },
14 | "Histogram": {
15 | "container_id": "string",
16 | "name": "string",
17 | "timestamp": "timestamp",
18 | "count": "bigint",
19 | "min": "bigint",
20 | "max": "bigint",
21 | "mean": "double",
22 | "stddev": "double",
23 | "median": "double",
24 | "p75": "double",
25 | "p95": "double",
26 | "p98": "double",
27 | "p99": "double",
28 | "p999": "double"
29 | },
30 | "Meter": {
31 | "container_id": "string",
32 | "name": "string",
33 | "timestamp": "timestamp",
34 | "count": "bigint",
35 | "mean_rate": "double",
36 | "m1": "double",
37 | "m5": "double",
38 | "m15": "double",
39 | "rate_unit": "string"
40 | },
41 | "Timer": {
42 | "container_id": "string",
43 | "name": "string",
44 | "timestamp": "timestamp",
45 | "count": "bigint",
46 | "min": "double",
47 | "max": "double",
48 | "mean": "double",
49 | "stddev": "double",
50 | "median": "double",
51 | "p75": "double",
52 | "p95": "double",
53 | "p98": "double",
54 | "p99": "double",
55 | "p999": "double",
56 | "mean_rate": "double",
57 | "m1": "double",
58 | "m5": "double",
59 | "m15": "double",
60 | "rate_unit": "string",
61 | "duration_unit": "string"
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/source/infrastructure/prebid_server/stack_constants.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from pathlib import Path
5 |
6 | CUSTOM_RESOURCES_PATH = Path(__file__).absolute().parents[1] / "custom_resources"
7 |
8 | X_SECRET_HEADER_NAME = "X-Header-Secret"
9 |
10 | PVT_SUBNET_NAME = "Prebid-Private"
11 | PUB_SUBNET_NAME = "Prebid-Public"
12 | VPC_CIDR = "10.8.0.0/16" # NOSONAR
13 | CIDR_MASK = 20
14 | MAX_AZS = 2
15 | NAT_GATEWAYS = 2
16 |
17 | CONTAINER_PORT = 8080
18 | MEMORY_LIMIT_MIB = 4096
19 | VCPU = 1024
20 |
21 | HEALTH_URL_DOMAIN = "http://localhost:8080"
22 | HEALTH_PATH = "/status"
23 | HEALTH_ENDPOINT = HEALTH_URL_DOMAIN + HEALTH_PATH
24 | HEALTH_CHECK_INTERVAL_SECS = 60
25 | HEALTH_CHECK_TIMEOUT_SECS = 5
26 |
27 | EFS_VOLUME_NAME = "prebid-efs-volume"
28 | EFS_PORT = 2049
29 | EFS_MOUNT_PATH = "/mnt/efs"
30 | EFS_METRICS = "metrics"
31 | EFS_LOGS = "logs"
32 |
33 | # Configure for container autoscaling
34 | CPU_TARGET_UTILIZATION_PCT = 66
35 | MEMORY_TARGET_UTILIZATION_PCT = 50
36 | REQUESTS_PER_TARGET = 5000
37 | TASK_MIN_CAPACITY = 2
38 | TASK_MAX_CAPACITY = 300
39 |
40 | FARGATE_RESERVED_WEIGHT = 1
41 | FARGATE_SPOT_WEIGHT = 1
42 |
43 | DATASYNC_METRICS_SCHEDULE = "cron(30 * * * ? *)" # hourly on the half hour
44 | DATASYNC_LOGS_SCHEDULE = "cron(30 * * * ? *)" # hourly on the half hour
45 | DATASYNC_REPORT_LIFECYCLE_DAYS = 1
46 |
47 | GLUE_MAX_CONCURRENT_RUNS = 10
48 | GLUE_TIMEOUT_MINS = 120
49 | GLUE_ATHENA_OUTPUT_LIFECYCLE_DAYS = 1
50 |
51 | # CloudFront managed headers policy CORS-with-preflight-and-SecurityHeadersPolicy
52 | RESPONSE_HEADERS_POLICY_ID = "eaab4381-ed33-4a86-88ca-d9558dc6cd63"
53 |
54 | CLOUDWATCH_ALARM_TYPE = "AWS::CloudWatch::Alarm"
55 | CLOUDWATCH_ALARM_NAMESPACE = "AWS/ApplicationELB"
56 |
57 | # Anomaly detection with 2 stdev (medium band)
58 | ANOMALY_DETECTION_BAND_2 = "ANOMALY_DETECTION_BAND(m1, 2)"
59 |
60 | CLOUD_FRONT_NAMESPACE = "AWS/CloudFront"
61 | RESOURCE_NAMESPACE = "aws:ResourceAccount"
62 |
--------------------------------------------------------------------------------
/source/infrastructure/setup.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import json
5 | from pathlib import Path
6 |
7 | import setuptools
8 |
9 | readme_path = Path(__file__).resolve().parent.parent.parent / "README.md"
10 | long_description = Path(readme_path).read_text()
11 | cdk_json_path = Path(__file__).resolve().parent / "cdk.json"
12 | cdk_json = json.loads(cdk_json_path.read_text())
13 | VERSION = cdk_json["context"]["SOLUTION_VERSION"]
14 |
15 |
16 | setuptools.setup(
17 | name="infrastructure",
18 | version=VERSION,
19 | description="Deploy and use Prebid Server Deployment on AWS",
20 | long_description=long_description,
21 | long_description_content_type="text/markdown",
22 | author="AWS Solutions Builders",
23 | packages=setuptools.find_packages(),
24 | install_requires=[
25 | "aws-cdk-lib>=2.140.0",
26 | "pip>=24.0",
27 | ],
28 | python_requires=">=3.7",
29 | classifiers=[
30 | "Development Status :: 4 - Beta",
31 | "Intended Audience :: Developers",
32 | "License :: OSI Approved :: Apache Software License",
33 | "Programming Language :: JavaScript",
34 | "Programming Language :: Python :: 3 :: Only",
35 | "Programming Language :: Python :: 3.7",
36 | "Programming Language :: Python :: 3.8",
37 | "Programming Language :: Python :: 3.9",
38 | "Topic :: Software Development :: Code Generators",
39 | "Topic :: Utilities",
40 | "Typing :: Typed",
41 | ],
42 | )
--------------------------------------------------------------------------------
/source/loadtest/jmx/.gitignore:
--------------------------------------------------------------------------------
1 | *.jtl
2 | *.log
--------------------------------------------------------------------------------
/source/loadtest/jmx/README.md:
--------------------------------------------------------------------------------
1 | ### Prerequisite
2 | * [Apache JMeter](https://jmeter.apache.org/)
3 |
4 |
5 | ````
6 | $ jmeter -n -t prebid_server_test_plan.jmx -l log.jtl
7 | ````
8 | ### Test Plan
9 | #### prebid_server_test_plan.jmx
10 | This test plan uses several commercial bidding adapters in Prebid server configured to respond in test mode. The bidding adapters do not make connections over the Internet when invoked this way and respond with fixed data. This test plan is suitable for verifying basic operations of the deployed stack are working.
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/source/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "prebid-server-deployment-on-aws"
3 | version = "1.1.1"
4 | description = "Prebid Server Deployment on AWS - Easily deploy and operate a scalable Prebid Server for programmatic advertising auctions"
5 | license = "MIT"
6 | authors = ["https://docs.aws.amazon.com/solutions/latest/prebid-server-deployment-on-aws/contributors.html"]
7 | packages = [
8 | { include = "source" },
9 | { include = "source/**/*.py" }
10 | ]
11 | package-mode = false
12 |
13 | [tool.poetry.dependencies]
14 | python = "^3.11"
15 | aws-cdk-lib = ">=2.140.0"
16 | boto3 = ">=1.24.11,<2.0.0"
17 | aws-lambda-powertools = "*"
18 | crhelper = "~2.0.11"
19 | pyparsing = "*"
20 | url_normalize = "*"
21 | docker = "*"
22 | cryptography = ">=44.0.1"
23 |
24 | [tool.poetry.group.dev.dependencies]
25 | pytest = "*"
26 | pytest-cov = "*"
27 | pytest-env = ">=0.6.2"
28 | pytest-mock = ">=3.10.0"
29 | moto = ">=5.0.0"
30 | cdk-nag = "*"
31 | responses = "*"
32 | pytest-ordering = "*"
33 |
34 | [tool.poetry.group.local.dependencies]
35 | aws-solutions-cdk = {path = "cdk_solution_helper_py/helpers_cdk", develop = true}
36 | aws-solutions-python = {path = "cdk_solution_helper_py/helpers_common", develop = true}
37 | infrastructure = {path = "infrastructure", develop = true}
38 | unit-tests = {path = "tests", develop = true}
39 |
40 | [build-system]
41 | requires = ["poetry-core>=1.0.0"]
42 | build-backend = "poetry.core.masonry.api"
43 |
--------------------------------------------------------------------------------
/source/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | env =
3 | MOTO_ACCOUNT_ID=111111111111
4 | POWERTOOLS_TRACE_DISABLED=1
5 | SOLUTION_ID=SO9999test
6 | SOLUTION_VERSION=v99.99.99
7 | SOLUTION_NAME=Prebid Server Deployment on AWS
8 | AWS_REGION=us-east-1
9 | AWS_DEFAULT_REGION=us-east-1
10 | RESOURCE_PREFIX=prefix
11 | TEAM=team
12 | PIPELINE=pipeline
13 | STAGE=stage
14 | stage_bucket=stage_bucket
15 | ENV=dev
16 | OCTAGON_PIPELINE_TABLE_NAME=octagon-Pipelines-dev-prefix
17 | OCTAGON_DATASET_TABLE_NAME=octagon-Datasets-dev-prefix
18 | OCTAGON_METADATA_TABLE_NAME=octagon-Object-Metadata-dev-prefix
19 | SDLF_CUSTOMER_CONFIG=sdlf-customer-config
20 | STACK_NAME=prefix
21 | METRICS_NAMESPACE=metrics-namespace
22 | norecursedirs = cdk_solution_helper_py infrastructure scripts cdk.out .venv
23 | markers=
24 | no_cdk_lambda_mock: marks test that need to build AWS Lambda Functions or Layers with CDK
--------------------------------------------------------------------------------
/source/requirements-poetry.txt:
--------------------------------------------------------------------------------
1 | poetry==2.0.1
--------------------------------------------------------------------------------
/source/tests/conftest.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import os
5 | import pytest
6 |
7 |
8 | @pytest.fixture(scope="session", autouse=True)
9 | def aws_credentials():
10 | """Mocked AWS Credentials for moto."""
11 | os.environ["AWS_ACCESS_KEY_ID"] = "123456789"
12 | os.environ["AWS_SECRET_ACCESS_KEY"] = "987654321"
13 | os.environ["AWS_SECURITY_TOKEN"] = "test_securitytoken"
14 | os.environ["AWS_SESSION_TOKEN"] = "test_session_token"
15 | os.environ["AWS_DEFAULT_REGION"] = "us-east-1"
16 | os.environ["AWS_REGION"] = os.environ["AWS_DEFAULT_REGION"]
17 |
18 |
19 | @pytest.fixture(autouse=True)
20 | def handler_env():
21 | os.environ['METRICS_NAMESPACE'] = "testmetrics"
22 | os.environ['STACK_NAME'] = "test_stack_name"
23 | os.environ['RESOURCE_PREFIX'] = "test_stack_name"
24 | os.environ['SEND_ANONYMIZED_DATA'] = "Yes"
25 |
26 |
--------------------------------------------------------------------------------
/source/tests/functional_tests/README.md:
--------------------------------------------------------------------------------
1 | ## Run functional tests
2 |
3 | ### Prerequisite
4 | * Deploy the solution before running the functional tests
5 |
6 | ### Usage:
7 | ```shell
8 | cd source/tests/functional_tests
9 |
10 | ./run-functional-tests.sh [-h] [-v] [--in-venv] [--test-file-name] [--extras] [--region] --stack-name {STACK_NAME} --profile {PROFILE}
11 | ```
12 |
13 | #### Required Parameter Details:
14 | * `STACK_NAME`: name of the Cloudformation stack where the solution is running.
15 | * `PROFILE`: the profile that you have setup in ~/.aws/credentials that you want to use for AWS CLI commands.
16 |
17 | #### Optional Parameter Details:
18 | * `--in-venv`: Run functional tests in an existing virtual environment. If not running the tests in a venv, leave this parameter. [--in-venv 1]
19 | * `--test-file-name`: Run individual test file (optional) e.g --test-file-name test_bad_requests.py, --test-file-name test_bad_requests.py::test_request_rejected_by_waf_1
20 | * `--region`: AWS region for CLI commands (optional, default to us-east-1)
21 | * `--extras`: Append more commands to pytest run (optional)
22 |
23 | #### The following options are available:
24 | * `-h | --help`: Print usage
25 | * `-v | --verbose`: Print script debug info
26 |
27 | #### Test Histogram table
28 | * Follow instructions in [Load-Test README.MD](../../../source/loadtest/README.md)
29 | * Histogram test requires a load test of a deployed prebid-server stack with `AMT_ADAPTER_ENABLED` and `AMT_BIDDING_SERVER_SIMULATOR_ENDPOINT` running.
30 |
31 |
32 |
--------------------------------------------------------------------------------
/source/tests/functional_tests/conftest.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import logging
5 |
6 | TEST_REGIONS = ["us-east-1"]
7 | SKIP_REASON=f"Test ETL metrics for only these regions: {TEST_REGIONS}"
8 |
9 |
10 | # Configure logging
11 | logging.basicConfig(
12 | level=logging.INFO,
13 | format="%(asctime)s - %(levelname)s - %(message)s"
14 | )
--------------------------------------------------------------------------------
/source/tests/functional_tests/requirements-test.txt:
--------------------------------------------------------------------------------
1 | pytest
2 | requests
3 | boto3
4 | pytest-ordering
5 | pytest-repeat
6 |
--------------------------------------------------------------------------------
/source/tests/functional_tests/test_bad_requests.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import requests
5 | import os
6 |
7 | CLOUDFRONT_ENDPOINT = os.environ["CLOUDFRONT_ENDPOINT"]
8 | contentType = "application/json"
9 | method = "GET"
10 |
11 |
12 | def test_request_rejected_by_waf_1():
13 | url = f"https://{CLOUDFRONT_ENDPOINT}/status/admine/password=xyz"
14 | response = requests.request(method, url)
15 | assert response.status_code == 403
16 |
17 |
18 | def test_request_rejected_by_waf_2():
19 | url = f"https://{CLOUDFRONT_ENDPOINT}/logs/activity.log"
20 | response = requests.request(method, url)
21 | assert response.status_code == 403
22 |
--------------------------------------------------------------------------------
/source/tests/setup.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import json
5 | from pathlib import Path
6 |
7 | import setuptools
8 |
9 | readme_path = Path(__file__).resolve().parent.parent.parent / "README.md"
10 | long_description = Path(readme_path).read_text()
11 | cdk_json_path = Path(__file__).resolve().parent.parent / "infrastructure" / "cdk.json"
12 | cdk_json = json.loads(cdk_json_path.read_text())
13 | VERSION = cdk_json["context"]["SOLUTION_VERSION"]
14 |
15 |
16 | setuptools.setup(
17 | name="unit-tests",
18 | version=VERSION,
19 | description="Unit tests for Prebid Server Deployment on AWS",
20 | long_description=long_description,
21 | long_description_content_type="text/markdown",
22 | author="AWS Solutions Builders",
23 | packages=setuptools.find_packages(),
24 | install_requires=[
25 | "aws-cdk-lib>=2.140.0",
26 | "pip>=24.0",
27 | ],
28 | python_requires=">=3.7",
29 | classifiers=[
30 | "Development Status :: 4 - Beta",
31 | "Intended Audience :: Developers",
32 | "License :: OSI Approved :: Apache Software License",
33 | "Programming Language :: JavaScript",
34 | "Programming Language :: Python :: 3 :: Only",
35 | "Programming Language :: Python :: 3.7",
36 | "Programming Language :: Python :: 3.8",
37 | "Programming Language :: Python :: 3.9",
38 | "Topic :: Software Development :: Code Generators",
39 | "Topic :: Utilities",
40 | "Typing :: Typed",
41 | ],
42 | )
43 |
--------------------------------------------------------------------------------
/source/tests/unit_tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/prebid-server-deployment-on-aws/b51ae1a1c2278aeb15a265b36eef4035160b4f3e/source/tests/unit_tests/__init__.py
--------------------------------------------------------------------------------
/source/tests/unit_tests/aws_lambda_layers/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/prebid-server-deployment-on-aws/b51ae1a1c2278aeb15a265b36eef4035160b4f3e/source/tests/unit_tests/aws_lambda_layers/__init__.py
--------------------------------------------------------------------------------
/source/tests/unit_tests/aws_lambda_layers/aws_solutions/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/prebid-server-deployment-on-aws/b51ae1a1c2278aeb15a265b36eef4035160b4f3e/source/tests/unit_tests/aws_lambda_layers/aws_solutions/__init__.py
--------------------------------------------------------------------------------
/source/tests/unit_tests/aws_lambda_layers/aws_solutions/test_layer.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # * Unit test for infrastructure/aws_lambda_layers/aws_solutions/layer.py.
6 | # USAGE:
7 | # ./run-unit-tests.sh --test-file-name aws_lambda_layers/aws_solutions/test_layer.py
8 | ###############################################################################
9 |
10 |
11 | import uuid
12 | from unittest.mock import MagicMock, patch
13 | from unit_tests.test_commons import FakeClass
14 |
15 |
16 | def test_solutions_layer():
17 | from aws_lambda_layers.aws_solutions.layer import SolutionsLayer
18 |
19 | with patch("aws_lambda_layers.aws_solutions.layer.super") as mock_super:
20 | mock_def = MagicMock()
21 | SolutionsLayer.__init__(self=mock_def, scope=FakeClass(), construct_id=str(uuid.uuid4()))
22 | mock_super.assert_called_once()
23 |
24 | node_mock_cls = MagicMock(node=MagicMock(try_find_child=MagicMock(return_value=True)))
25 | with patch("aws_cdk.Stack.of", return_value=node_mock_cls) as mock_cdk_stack_of:
26 | assert SolutionsLayer.get_or_create(self=mock_def, scope=mock_def, **{}) is True
27 | mock_cdk_stack_of.assert_called_once()
--------------------------------------------------------------------------------
/source/tests/unit_tests/aws_lambda_layers/datasync_s3_layer/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/prebid-server-deployment-on-aws/b51ae1a1c2278aeb15a265b36eef4035160b4f3e/source/tests/unit_tests/aws_lambda_layers/datasync_s3_layer/__init__.py
--------------------------------------------------------------------------------
/source/tests/unit_tests/aws_lambda_layers/datasync_s3_layer/test_reports.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # * Unit test for infrastructure/aws_lambda_layers/datasync_s3_layer/datasync_reports/reports.py
6 | # USAGE:
7 | # ./run-unit-tests.sh --test-file-name aws_lambda_layers/datasync_s3_layer/test_reports.py
8 | ###############################################################################
9 |
10 | import json
11 |
12 | import pytest
13 | from unittest.mock import patch
14 |
15 | def test_get_verified_files():
16 | from aws_lambda_layers.datasync_s3_layer.python.datasync_reports.reports import get_verified_files
17 |
18 | # test parsing of DataSync file names
19 | test_files_1 = [
20 | {
21 | "Key": "task-id.execution_id-verified-12345"
22 | },
23 | {
24 | "Key": "task-id.execution_id-failed-12345"
25 | }
26 | ]
27 | keys = get_verified_files(files=test_files_1)
28 | assert keys == ["task-id.execution_id-verified-12345"]
29 |
30 | # test error raising when no verified files found
31 | test_files_2 = [
32 | {
33 | "Key": "task-id.execution_id-failed-12345"
34 | }
35 | ]
36 | with pytest.raises(ValueError):
37 | get_verified_files(files=test_files_2)
38 |
39 |
40 | @patch('boto3.client')
41 | def test_get_transferred_object_keys(
42 | mock_boto3
43 | ):
44 | from aws_lambda_layers.datasync_s3_layer.python.datasync_reports.reports import get_transferred_object_keys
45 |
46 | mock_boto3.list_objects_v2.return_value = {
47 | "Contents": [
48 | {
49 | "Key": "task-id.execution_id-verified-12345"
50 | },
51 | {
52 | "Key": "task-id.execution_id-failed-12345"
53 | }
54 | ]
55 | }
56 | mock_boto3.get_object.return_value = {
57 | "Body": {"read": lambda: json.dumps({"Verified": [{"RelativePath": "file.txt", "VerifyStatus": "SUCCESS", "DstMetadata": {"Type": "File"}}]})}
58 | }
59 |
60 | test_event = {
61 | "resources": ["arn:aws:sync:us-west-2:9111122223333:task/task-example2/execution/exec-example316440271f"]
62 | }
63 | test_datasync_bucket = "test-bucket"
64 | test_aws_account = "9111122223333"
65 |
66 | get_transferred_object_keys(
67 | event=test_event,
68 | datasync_report_bucket=test_datasync_bucket,
69 | aws_account_id=test_aws_account,
70 | s3_client=mock_boto3
71 | )
72 | mock_boto3.list_objects_v2.assert_called_once_with(
73 | Bucket=test_datasync_bucket,
74 | Prefix="datasync/Detailed-Reports/task-example2/exec-example316440271f/",
75 | ExpectedBucketOwner=test_aws_account
76 | )
77 | mock_boto3.get_object.assert_called_once_with(
78 | Bucket=test_datasync_bucket,
79 | Key="task-id.execution_id-verified-12345",
80 | ExpectedBucketOwner=test_aws_account
81 | )
82 |
--------------------------------------------------------------------------------
/source/tests/unit_tests/aws_lambda_layers/metrics_layer/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/prebid-server-deployment-on-aws/b51ae1a1c2278aeb15a265b36eef4035160b4f3e/source/tests/unit_tests/aws_lambda_layers/metrics_layer/__init__.py
--------------------------------------------------------------------------------
/source/tests/unit_tests/aws_lambda_layers/metrics_layer/test_cloudwatch_metrics.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # * Unit test for infrastructure/aws_lambda_layers/metrics_layer/metrics/cloudwatch_metrics.
6 | # USAGE:
7 | # ./run-unit-tests.sh --test-file-name aws_lambda_layers/metrics_layer/test_cloudwatch_metrics.py
8 | ###############################################################################
9 |
10 |
11 | import boto3
12 | import logging
13 | from moto import mock_aws
14 |
15 |
16 | logger = logging.getLogger()
17 |
18 | @mock_aws
19 | def test_metrics():
20 | from aws_lambda_layers.metrics_layer.python.cloudwatch_metrics.metrics import Metrics
21 |
22 | metrics_namespace = "test"
23 | resource_prefix = "test"
24 | metrics_cls = Metrics(metrics_namespace=metrics_namespace, resource_prefix=resource_prefix, logger=logger)
25 | assert metrics_namespace == metrics_cls.metrics_namespace
26 | assert resource_prefix == metrics_cls.resource_prefix
27 | assert logger == metrics_cls.logger
28 |
29 | metric_name = "test_metric"
30 | expected_dimension = [{'Name': 'stack-name', 'Value': resource_prefix}]
31 | metrics_cls.put_metrics_count_value_1(metric_name=metric_name)
32 | cw_client = boto3.client("cloudwatch")
33 | resp = cw_client.list_metrics(
34 | Namespace=metrics_namespace,
35 | MetricName=metric_name,
36 | Dimensions=expected_dimension
37 | )
38 |
39 | assert resp["Metrics"] == [
40 | {
41 | 'Namespace': metrics_namespace,
42 | 'MetricName': metric_name,
43 | 'Dimensions': expected_dimension
44 | }
45 | ]
--------------------------------------------------------------------------------
/source/tests/unit_tests/custom_resources/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/prebid-server-deployment-on-aws/b51ae1a1c2278aeb15a265b36eef4035160b4f3e/source/tests/unit_tests/custom_resources/__init__.py
--------------------------------------------------------------------------------
/source/tests/unit_tests/custom_resources/test_create_waf_webacl.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # * Unit test for infrastructure/custom_resources/create_waf_webacl.
6 | # USAGE:
7 | # ./run-unit-tests.sh --test-file-name custom_resources/test_create_waf_webacl.py
8 | ###############################################################################
9 |
10 |
11 | from unittest.mock import patch
12 |
13 |
14 | @patch("crhelper.CfnResource")
15 | @patch("custom_resources.waf_webacl_lambda.create_waf_webacl.helper")
16 | def test_event_handler(helper_mock, _):
17 | from custom_resources.waf_webacl_lambda.create_waf_webacl import event_handler
18 |
19 | event_handler({}, None)
20 | helper_mock.assert_called_once()
21 |
22 |
23 | @patch("custom_resources.waf_webacl_lambda.create_waf_webacl.boto3.client")
24 | @patch("crhelper.CfnResource")
25 | def test_on_create(_, mock_boto3):
26 | expected_resp = {
27 | "Summary":
28 | {
29 | "ARN": 1234,
30 | "Name": "test_name",
31 | "Id": 1234,
32 | "LockToken": "lock_token",
33 | },
34 | }
35 | mock_boto3.return_value.create_web_acl.return_value = expected_resp
36 | from custom_resources.waf_webacl_lambda.create_waf_webacl import on_create
37 |
38 | with patch("custom_resources.waf_webacl_lambda.create_waf_webacl.helper.Data", {}) as helper_update_mock:
39 | on_create({
40 | "StackId": "test/id12345"
41 | }, None)
42 |
43 | assert helper_update_mock["webacl_arn"] == expected_resp["Summary"]["ARN"]
44 | assert helper_update_mock["webacl_name"] == expected_resp["Summary"]["Name"]
45 | assert helper_update_mock["webacl_id"] == expected_resp["Summary"]["Id"]
46 | assert helper_update_mock["webacl_locktoken"] == expected_resp["Summary"]["LockToken"]
--------------------------------------------------------------------------------
/source/tests/unit_tests/custom_resources/test_delete_lambda_eni.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # * Unit test for infrastructure/custom_resources/delete_lambda_eni.
6 | # USAGE:
7 | # ./run-unit-tests.sh --test-file-name custom_resources/test_delete_lambda_eni.py
8 | ###############################################################################
9 |
10 | import boto3
11 | from unittest.mock import patch
12 | from moto import mock_aws
13 |
14 |
15 | @patch("crhelper.CfnResource")
16 | @patch("custom_resources.vpc_eni_lambda.delete_lambda_eni.helper")
17 | def test_event_handler(helper_mock, _):
18 | from custom_resources.vpc_eni_lambda.delete_lambda_eni import event_handler
19 |
20 | event_handler({}, None)
21 | helper_mock.assert_called_once()
22 |
23 |
24 | @mock_aws
25 | @patch("crhelper.CfnResource")
26 | def test_on_delete(_):
27 | ec2_client = boto3.client("ec2")
28 | ec2_resource = boto3.resource("ec2")
29 | vpc_resp = ec2_client.create_vpc(CidrBlock="10.0.0.0/16")
30 | subnet_resp = ec2_client.create_subnet(VpcId=vpc_resp["Vpc"]["VpcId"], CidrBlock="10.0.0.0/16")
31 | network_resp = ec2_client.create_network_interface(Groups=["test-123"], SubnetId=subnet_resp["Subnet"]["SubnetId"])
32 | instance_resp = ec2_resource.create_instances(ImageId="some-image", MinCount=1, MaxCount=1)
33 | ec2_client.attach_network_interface(NetworkInterfaceId=network_resp["NetworkInterface"]["NetworkInterfaceId"], InstanceId=instance_resp[0].id, DeviceIndex=0)
34 |
35 | from custom_resources.vpc_eni_lambda.delete_lambda_eni import on_delete
36 |
37 | with patch("custom_resources.vpc_eni_lambda.delete_lambda_eni.helper.Data", {}) as helper_update_mock:
38 | events = {
39 | "ResourceProperties": {
40 | "SECURITY_GROUP_ID": "test-123"
41 | }
42 | }
43 | on_delete(events, None)
44 | assert helper_update_mock["Response"] is not None
--------------------------------------------------------------------------------
/source/tests/unit_tests/custom_resources/test_delete_waf_webacl.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # * Unit test for infrastructure/custom_resources/delete_waf_webacl.
6 | # USAGE:
7 | # ./run-unit-tests.sh --test-file-name custom_resources/test_delete_waf_webacl.py
8 | ###############################################################################
9 |
10 |
11 | from unittest.mock import patch
12 |
13 |
14 | @patch("crhelper.CfnResource")
15 | @patch("custom_resources.waf_webacl_lambda.delete_waf_webacl.helper")
16 | def test_event_handler(helper_mock, _):
17 | from custom_resources.waf_webacl_lambda.delete_waf_webacl import event_handler
18 |
19 | event_handler({}, None)
20 | helper_mock.assert_called_once()
21 |
22 |
23 | @patch("custom_resources.waf_webacl_lambda.delete_waf_webacl.boto3.client")
24 | @patch("crhelper.CfnResource")
25 | def test_on_delete(_, mock_boto3):
26 | event = {
27 | "ResourceProperties":
28 | {
29 | "CF_DISTRIBUTION_ID": "1234",
30 | "WAF_WEBACL_NAME": "test_name",
31 | "WAF_WEBACL_ID": 1234,
32 | "WAF_WEBACL_LOCKTOKEN": "lock_token",
33 | },
34 | }
35 |
36 | cf_resp = {
37 | "DistributionConfig": {
38 | "WebACLId": ""
39 | },
40 | "ETag": "testtag"
41 | }
42 |
43 | mock_boto3.return_value.get_distribution_config.return_value = cf_resp
44 | mock_boto3.return_value.update_distribution.return_value = None
45 | mock_boto3.return_value.delete_web_acl.return_value = None
46 | from custom_resources.waf_webacl_lambda.delete_waf_webacl import on_delete
47 |
48 | with patch("custom_resources.waf_webacl_lambda.delete_waf_webacl.helper.Data", {}):
49 | on_delete(event, None)
--------------------------------------------------------------------------------
/source/tests/unit_tests/custom_resources/test_get_prefix_id.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # * Unit test for infrastructure/custom_resources/get_prefix_id.
6 | # USAGE:
7 | # ./run-unit-tests.sh --test-file-name custom_resources/test_get_prefix_id.py
8 | ###############################################################################
9 |
10 |
11 | from unittest.mock import patch
12 | from moto import mock_aws
13 |
14 |
15 | @patch("crhelper.CfnResource")
16 | @patch("custom_resources.prefix_id_lambda.get_prefix_id.helper")
17 | def test_event_handler(helper_mock, _):
18 | from custom_resources.prefix_id_lambda.get_prefix_id import event_handler
19 |
20 | event_handler({}, None)
21 | helper_mock.assert_called_once()
22 |
23 |
24 | @mock_aws
25 | @patch("crhelper.CfnResource")
26 | def test_on_create(_):
27 | from custom_resources.prefix_id_lambda.get_prefix_id import on_create
28 |
29 | with patch("custom_resources.prefix_id_lambda.get_prefix_id.helper.Data", {}) as helper_update_mock:
30 | on_create({}, None)
31 | assert helper_update_mock["prefix_list_id"] is not None
--------------------------------------------------------------------------------
/source/tests/unit_tests/custom_resources/test_header_secret_gen.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # * Unit test for infrastructure/custom_resources/header_secret_gen.
6 | # USAGE:
7 | # ./run-unit-tests.sh --test-file-name custom_resources/test_header_secret_gen.py
8 | ###############################################################################
9 |
10 |
11 | from unittest.mock import patch
12 |
13 |
14 | @patch("crhelper.CfnResource")
15 | @patch("custom_resources.header_secret_lambda.header_secret_gen.helper")
16 | def test_event_handler(helper_mock, _):
17 | from custom_resources.header_secret_lambda.header_secret_gen import event_handler
18 |
19 | event_handler({}, None)
20 | helper_mock.assert_called_once()
21 |
22 |
23 | @patch("crhelper.CfnResource")
24 | def test_on_create(_):
25 | from custom_resources.header_secret_lambda.header_secret_gen import on_create
26 |
27 | with patch("custom_resources.header_secret_lambda.header_secret_gen.helper.Data", {}) as helper_update_mock:
28 | on_create({}, None)
29 | assert helper_update_mock["header_secret_value"] is not None
--------------------------------------------------------------------------------
/source/tests/unit_tests/custom_resources/test_ops_metrics.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # * Unit test for infrastructure/custom_resources/operational_metrics/ops_metrics.py
6 | # USAGE:
7 | # ./run-unit-tests.sh --test-file-name custom_resources/test_ops_metrics.py
8 | ###############################################################################
9 |
10 | import uuid
11 | import os
12 |
13 | import pytest
14 | import boto3
15 | from unittest.mock import patch, MagicMock
16 | from moto import mock_aws
17 |
18 | @pytest.fixture
19 | def test_configs():
20 | return {
21 | "TEST_METRIC_UUID": str(uuid.uuid4()),
22 | "SECRET_NAME": f"{os.environ['STACK_NAME']}-anonymous-metrics-uuid"
23 | }
24 |
25 |
26 |
27 | @patch("custom_resources.operational_metrics.ops_metrics.create_uuid")
28 | def test_on_create(create_uuid_mock):
29 | from custom_resources.operational_metrics.ops_metrics import on_create
30 |
31 | on_create({}, None)
32 | create_uuid_mock.assert_called_once()
33 |
34 |
35 | @patch("custom_resources.operational_metrics.ops_metrics.delete_secret")
36 | def test_on_delete(delete_secret_mock):
37 | from custom_resources.operational_metrics.ops_metrics import on_delete
38 |
39 | on_delete({"PhysicalResourceId": "1234"}, None)
40 | delete_secret_mock.assert_called_once()
41 |
42 |
43 | @patch("crhelper.CfnResource")
44 | @patch("custom_resources.operational_metrics.ops_metrics.helper")
45 | def test_event_handler(helper_mock, _):
46 | from custom_resources.operational_metrics.ops_metrics import event_handler
47 |
48 | event_handler({}, None)
49 | helper_mock.assert_called_once()
50 |
51 |
52 | @patch("crhelper.CfnResource")
53 | @mock_aws
54 | def test_create_uuid(_, test_configs):
55 | session = boto3.session.Session(region_name=os.environ["AWS_REGION"])
56 | client = session.client("secretsmanager")
57 |
58 | fake_uuid = MagicMock()
59 | fake_uuid.uuid4() == test_configs["TEST_METRIC_UUID"]
60 |
61 |
62 | with patch("custom_resources.operational_metrics.ops_metrics.uuid", fake_uuid) as mock_uuid:
63 | from custom_resources.operational_metrics.ops_metrics import create_uuid
64 |
65 | create_uuid()
66 | res = client.get_secret_value(
67 | SecretId=test_configs["SECRET_NAME"],
68 | )
69 | res["SecretString"] == test_configs["TEST_METRIC_UUID"]
70 | mock_uuid.uuid4.assert_called()
71 |
72 |
73 | @patch("crhelper.CfnResource.delete")
74 | @mock_aws
75 | def test_delete_secret(_, test_configs):
76 | session = boto3.session.Session(region_name=os.environ["AWS_REGION"])
77 | client = session.client("secretsmanager")
78 | from custom_resources.operational_metrics.ops_metrics import delete_secret
79 |
80 | client.create_secret(
81 | Name=test_configs["SECRET_NAME"],
82 | SecretString=test_configs["TEST_METRIC_UUID"],
83 | )
84 |
85 | delete_secret()
86 |
87 | with pytest.raises(Exception) as ex:
88 | client.get_secret_value(
89 | SecretId=test_configs["SECRET_NAME"],
90 | )
91 | assert "Secrets Manager can't find the specified secret" in str(ex.value)
--------------------------------------------------------------------------------
/source/tests/unit_tests/custom_resources/test_upload_files.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # * Unit test for infrastructure/custom_resources/artifacts_bucket_lambda/upload_files.py
6 | # USAGE:
7 | # ./run-unit-tests.sh --test-file-name custom_resources/test_upload_files.py
8 | ###############################################################################
9 |
10 |
11 | from unittest.mock import patch, call
12 |
13 |
14 | @patch("crhelper.CfnResource")
15 | @patch("custom_resources.artifacts_bucket_lambda.upload_files.helper")
16 | def test_event_handler(helper_mock, _):
17 | from custom_resources.artifacts_bucket_lambda.upload_files import event_handler
18 |
19 | event_handler({}, None)
20 | helper_mock.assert_called_once()
21 |
22 | @patch("crhelper.CfnResource")
23 | @patch("custom_resources.artifacts_bucket_lambda.upload_files.upload_file")
24 | def test_on_create_or_update(mock_upload_file, _):
25 | from custom_resources.artifacts_bucket_lambda.upload_files import on_create_or_update
26 |
27 | test_event = {
28 | "ResourceProperties": {
29 | "test_key" : "test_value"
30 | }
31 | }
32 |
33 | on_create_or_update(test_event, None)
34 | mock_upload_file.assert_called_once_with(
35 | {"test_key": "test_value"}
36 | )
37 |
38 | @patch("crhelper.CfnResource")
39 | @patch("custom_resources.artifacts_bucket_lambda.upload_files.s3_client")
40 | @patch("custom_resources.artifacts_bucket_lambda.upload_files.os.walk")
41 | @patch("custom_resources.artifacts_bucket_lambda.upload_files.os.listdir")
42 | def test_upload_file(mock_listdir, mock_walk, mock_s3_client, _):
43 | from custom_resources.artifacts_bucket_lambda.upload_files import upload_file
44 |
45 | mock_walk.return_value = [("/some/root", ["dir1", "dir2"], [])]
46 | mock_listdir.return_value = ["file1.txt", "file2.txt"]
47 |
48 | test_properties = {
49 | "artifacts_bucket_name": "test_bucket"
50 | }
51 | upload_file(resource_properties=test_properties)
52 |
53 | mock_s3_client.upload_file.assert_has_calls([
54 | call('/some/root/dir1/file1.txt', 'test_bucket', 'dir1/file1.txt'),
55 | call('/some/root/dir1/file2.txt', 'test_bucket', 'dir1/file2.txt'),
56 | call('/some/root/dir2/file1.txt', 'test_bucket', 'dir2/file1.txt'),
57 | call('/some/root/dir2/file2.txt', 'test_bucket', 'dir2/file2.txt')
58 | ])
59 |
--------------------------------------------------------------------------------
/source/tests/unit_tests/prebid_server/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/prebid-server-deployment-on-aws/b51ae1a1c2278aeb15a265b36eef4035160b4f3e/source/tests/unit_tests/prebid_server/__init__.py
--------------------------------------------------------------------------------
/source/tests/unit_tests/prebid_server/test_delete_efs_files.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # * Unit test for infrastructure/prebid_server/efs_cleanup_lambda/delete_efs_files.py.
6 | # USAGE:
7 | # ./run-unit-tests.sh --test-file-name prebid_server/test_delete_efs_files.py
8 | ###############################################################################
9 |
10 | import os
11 |
12 | from unittest.mock import patch
13 |
14 | METRICS_TASK_ARN = "arn:aws:sync:us-west-2:9111122223333:task/task-example2"
15 |
16 | test_environ = {
17 | "EFS_MOUNT_PATH": "mnt/efs",
18 | "METRICS_TASK_ARN": METRICS_TASK_ARN,
19 | "DATASYNC_REPORT_BUCKET": "test-report-bucket",
20 | "AWS_ACCOUNT_ID": "9111122223333",
21 | "METRICS_NAMESPACE": "test-namespace",
22 | "RESOURCE_PREFIX": "test-prefix",
23 | "EFS_METRICS" : "metrics",
24 | "EFS_LOGS" : "logs",
25 | "SOLUTION_VERSION" : "v1.9.99",
26 | "SOLUTION_ID" : "SO000123",
27 | }
28 |
29 | @patch.dict(os.environ, test_environ, clear=True)
30 | @patch('aws_lambda_layers.metrics_layer.python.cloudwatch_metrics.metrics.Metrics.put_metrics_count_value_1')
31 | @patch('aws_lambda_layers.datasync_s3_layer.python.datasync_reports.reports.get_transferred_object_keys')
32 | @patch('os.remove')
33 | @patch('aws_lambda_powertools.Logger.info')
34 | @patch('aws_lambda_powertools.Logger.error')
35 | def test_event_handler(
36 | mock_error,
37 | mock_info,
38 | mock_os_remove,
39 | mock_get_transferred_object_keys,
40 | mock_metrics,
41 | ):
42 | from prebid_server.efs_cleanup_lambda.delete_efs_files import event_handler
43 |
44 | mock_metrics.return_value = None
45 |
46 | # test metric arn mapping with no file processing
47 | mock_get_transferred_object_keys.return_value = []
48 | test_event_2 = {
49 | "resources": [f"{METRICS_TASK_ARN}/execution/exec-example316440271f"]
50 | }
51 | event_handler(test_event_2, None)
52 | mock_info.assert_any_call("No new metrics files to delete from EFS.")
53 |
54 | # test unsuccessful file deletion
55 | mock_get_transferred_object_keys.return_value = ["key1", "key2"]
56 | mock_os_remove.side_effect = OSError()
57 | test_event_3 = {
58 | "resources": [f"{METRICS_TASK_ARN}/execution/exec-example316440271f"]
59 | }
60 | event_handler(test_event_3, None)
61 | mock_error.assert_any_call("2 files failed to delete: ['key1', 'key2']")
62 |
--------------------------------------------------------------------------------
/source/tests/unit_tests/prebid_server/test_prebid_artifacts_constructs.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # * Unit test for infrastructure/prebid_server/prebid_artifacts_constructs.py.
6 | # USAGE:
7 | # ./run-unit-tests.sh --test-file-name prebid_server/test_prebid_artifacts_constructs.py
8 | ###############################################################################
9 |
10 |
11 | import uuid
12 | from unittest.mock import MagicMock
13 | from unit_tests.test_commons import mocked_common_services, FakeClass, reload_module
14 |
15 |
16 |
17 | @mocked_common_services(
18 | add_patch=[
19 | "prebid_server.prebid_artifacts_constructs.super",
20 | ]
21 | )
22 | def test_artifact_manager():
23 |
24 | mock_def = MagicMock()
25 | reload_module("prebid_server.prebid_artifacts_constructs")
26 | from prebid_server.prebid_artifacts_constructs import ArtifactsManager
27 |
28 | ArtifactsManager.__init__(
29 | self=mock_def,
30 | scope=FakeClass(),
31 | id=str(uuid.uuid4()))
32 |
33 | ArtifactsManager.create_artifact_bucket(self=mock_def)
34 | ArtifactsManager.create_custom_resource_lambda(self=mock_def)
--------------------------------------------------------------------------------
/source/tests/unit_tests/prebid_server/test_prebid_datasync_constructs.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # * Unit test for infrastructure/prebid_server/prebid_datasync_constructs.py.
6 | # USAGE:
7 | # ./run-unit-tests.sh --test-file-name prebid_server/test_prebid_datasync_constructs.py
8 | ###############################################################################
9 |
10 |
11 | import uuid
12 | from unittest.mock import MagicMock
13 | from unit_tests.test_commons import mocked_common_services, FakeClass, reload_module
14 |
15 | add_patch = [
16 | "prebid_server.prebid_datasync_constructs.super",
17 | ]
18 |
19 |
20 | @mocked_common_services(
21 | add_patch=add_patch,
22 | )
23 | def test_efs_location():
24 | mock_def = MagicMock()
25 | reload_module("prebid_server.prebid_datasync_constructs")
26 | from prebid_server.prebid_datasync_constructs import EfsLocation
27 |
28 | EfsLocation.__init__(self=mock_def, scope=FakeClass(), id=str(uuid.uuid4()), prebid_vpc=mock_def,
29 | efs_path="test/path", efs_ap=mock_def, efs_filesystem=mock_def)
30 | datasync_efs_location = EfsLocation._create_efs_location(self=mock_def)
31 | assert len(datasync_efs_location.method_calls) == 1
32 |
33 |
34 | @mocked_common_services(
35 | add_patch=add_patch,
36 | )
37 | def test_efs_cleanup():
38 | mock_def = MagicMock(task="datasync-task", node=MagicMock(try_get_context=MagicMock(return_value="12345"),
39 | try_find_child=MagicMock(return_value=True)),
40 | bucket="s3-bucket")
41 | reload_module("prebid_server.prebid_datasync_constructs")
42 | from prebid_server.prebid_datasync_constructs import EfsCleanup
43 |
44 | EfsCleanup.__init__(
45 | self=mock_def,
46 | scope=FakeClass(),
47 | id=str(uuid.uuid4()),
48 | vpc=mock_def,
49 | efs_ap=mock_def,
50 | efs_filesystem=mock_def,
51 | report_bucket=mock_def,
52 | fargate_cluster_arn=mock_def,
53 | datasync_tasks={
54 | "logs": mock_def,
55 | "metrics": mock_def,
56 | },
57 | )
58 |
--------------------------------------------------------------------------------
/source/tests/unit_tests/prebid_server/test_prebid_glue_constructs.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # * Unit test for infrastructure/prebid_server/prebid_glue_constructs.py.
6 | # USAGE:
7 | # ./run-unit-tests.sh --test-file-name prebid_server/test_prebid_glue_constructs.py
8 | ###############################################################################
9 |
10 | import uuid
11 | from unittest.mock import MagicMock, Mock
12 | from unit_tests.test_commons import mocked_common_services, FakeClass, reload_module
13 |
14 |
15 | @mocked_common_services(
16 | add_patch=[
17 | "prebid_server.prebid_glue_constructs.super",
18 | ],
19 | )
20 | def test_s3_location():
21 | mock_def = MagicMock(bucket_arn="arn/bucket")
22 | reload_module("prebid_server.prebid_glue_constructs")
23 | from prebid_server.prebid_glue_constructs import S3Location
24 |
25 | S3Location.__init__(self=mock_def, scope=FakeClass(), id=str(uuid.uuid4()), s3_bucket=mock_def)
26 | s3_location = S3Location._create_s3_location(self=mock_def)
27 | assert len(s3_location.method_calls) == 3
28 |
29 |
30 | @mocked_common_services(
31 | add_patch=[
32 | "prebid_server.prebid_artifacts_constructs.ArtifactsManager",
33 | "prebid_server.prebid_glue_constructs.super",
34 | ]
35 | )
36 | def test_glue_etl():
37 | mock_src_bucket = MagicMock(bucket_arn="test_arn/source_bucket")
38 | mock_artifact_bucket = MagicMock(bucket_arn="test_arn/artifactbucket")
39 | from prebid_server.prebid_glue_constructs import GlueEtl
40 | mock_def = Mock(spec=GlueEtl)
41 | mock_def.artifact_bucket = mock_artifact_bucket
42 | mock_def.source_bucket = mock_src_bucket
43 | reload_module("prebid_server.prebid_glue_constructs")
44 |
45 | mock_def.__init__(
46 | scope=FakeClass(),
47 | id=str(uuid.uuid4()),
48 | artifacts_construct=MagicMock(bucket=mock_artifact_bucket),
49 | script_file_name="filename",
50 | )
51 |
52 | mock_def._create_output_bucket()
53 | mock_def._create_glue_database()
54 | mock_def._create_glue_job()
55 | mock_def._create_glue_job_trigger()
56 |
--------------------------------------------------------------------------------
/source/tests/unit_tests/prebid_server/test_start_glue_job.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # * Unit test for infrastructure/prebid_server/glue_trigger_lambda/start_glue_job.py.
6 | # USAGE:
7 | # ./run-unit-tests.sh --test-file-name prebid_server/test_start_glue_job.py
8 | ###############################################################################
9 |
10 | import os
11 | import json
12 |
13 | from unittest.mock import patch
14 |
15 | GLUE_JOB_NAME = "test-glue-job"
16 |
17 | test_environ = {
18 | "GLUE_JOB_NAME": GLUE_JOB_NAME,
19 | "DATASYNC_REPORT_BUCKET": "test-report-bucket",
20 | "AWS_ACCOUNT_ID": "9111122223333",
21 | "METRICS_NAMESPACE": "test-namespace",
22 | "RESOURCE_PREFIX": "test-prefix",
23 | "SOLUTION_VERSION": "v0.0.0",
24 | "SOLUTION_ID": "SO0248",
25 | "AWS_REGION": "us-east-1"
26 | }
27 |
28 | @patch.dict(os.environ, test_environ, clear=True)
29 | @patch('aws_lambda_layers.metrics_layer.python.cloudwatch_metrics.metrics.Metrics.put_metrics_count_value_1')
30 | @patch('aws_lambda_layers.datasync_s3_layer.python.datasync_reports.reports.get_transferred_object_keys')
31 | @patch('boto3.client')
32 | def test_event_handler(
33 | mock_boto3,
34 | mock_get_transferred_object_keys,
35 | mock_metrics
36 | ):
37 | from prebid_server.glue_trigger_lambda.start_glue_job import event_handler
38 |
39 | mock_metrics.return_value = None
40 |
41 | # test starting glue job with returned object keys
42 | mock_get_transferred_object_keys.return_value = ["key1", "key2"]
43 | test_event_1 = {
44 | "resources": ["arn:aws:sync:us-west-2:9111122223333:task/task-example2/execution/exec-example316440271f"]
45 | }
46 | event_handler(test_event_1, None)
47 | mock_boto3.return_value.start_job_run.assert_called_with(
48 | JobName=GLUE_JOB_NAME,
49 | Arguments={
50 | "--object_keys": json.dumps(["key1", "key2"])
51 | }
52 | )
53 |
54 | # test skipping glue job when no object keys returned
55 | mock_boto3.reset_mock()
56 | mock_get_transferred_object_keys.return_value = []
57 | test_event_1 = {
58 | "resources": ["arn:aws:sync:us-west-2:9111122223333:task/task-example2/execution/exec-example316440271f"]
59 | }
60 | event_handler(test_event_1, None)
61 | mock_boto3.return_value.start_job_run.assert_not_called()
62 |
--------------------------------------------------------------------------------
/source/tests/unit_tests/test_app.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # * Unit test for infrastructure/app.py.
6 | # USAGE:
7 | # ./run-unit-tests.sh --test-file-name test_app.py
8 | ###############################################################################
9 |
10 | import os.path
11 | import shutil
12 | import sys
13 | from unittest.mock import patch
14 | from unit_tests.test_commons import mocked_common_services
15 |
16 | import pytest
17 |
18 |
19 | @pytest.fixture
20 | def build_app_fix():
21 | solution_helper_build_path = "../source/cdk_solution_helper_py/helpers_common/build"
22 | if os.path.isdir(solution_helper_build_path):
23 | try:
24 | shutil.rmtree(solution_helper_build_path)
25 | except OSError:
26 | pass
27 |
28 | sys.path.insert(0, "./infrastructure")
29 |
30 | with patch("app.__name__", "__main__"):
31 | from app import build_app
32 | return build_app()
33 |
34 |
35 | @pytest.mark.run(order=1)
36 | @mocked_common_services(
37 | add_patch=[
38 | "aws_cdk.App",
39 | "aws_cdk.aws_ecr_assets.DockerImageAsset",
40 | "prebid_server.prebid_server_stack.PrebidServerStack",
41 | "prebid_server.prebid_datasync_constructs.DataSyncMonitoring",
42 | "prebid_server.prebid_artifacts_constructs.ArtifactsManager",
43 | "prebid_server.operational_metrics_construct.OperationalMetricsConstruct",
44 | "prebid_server.cloudfront_entry_deployment.CloudFrontEntryDeployment",
45 | "prebid_server.alb_entry_deployment.ALBEntryDeployment",
46 | "prebid_server.vpc_construct.VpcConstruct",
47 | "prebid_server.container_image_construct.ContainerImageConstruct",
48 | "prebid_server.alb_access_logs_construct.AlbAccessLogsConstruct",
49 | "prebid_server.docker_configs_construct.DockerConfigsManager",
50 | ]
51 | )
52 | def test_build_app(build_app_fix):
53 | app_stack = build_app_fix.get_stack_by_name("prebid-server-deployment-on-aws")
54 | assert app_stack is not None
55 | assert app_stack.stack_name == "prebid-server-deployment-on-aws"
56 | assert app_stack.template is not None
57 | assert app_stack.template["Description"] == "(SO9999test) - Prebid Server Deployment on AWS. Version v99.99.99"
--------------------------------------------------------------------------------
/source/tests/unit_tests/test_commons.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import importlib
5 | import logging
6 | from functools import wraps
7 | from unittest.mock import patch
8 | from contextlib import ExitStack, contextmanager
9 |
10 |
11 | logging.basicConfig()
12 | logger = logging.getLogger(__name__)
13 | logger.setLevel(logging.INFO)
14 |
15 | class FakeClass():
16 | def __init__(self) -> None:
17 | logger.debug("Fake init.")
18 |
19 |
20 | def reload_module(module):
21 | module = importlib.import_module(module)
22 | importlib.reload(module)
23 |
24 |
25 | aws_cdk_services = [
26 | "aws_cdk.Stack.of",
27 | "aws_cdk.aws_datasync",
28 | "aws_solutions.cdk.aws_lambda.python.function.SolutionsPythonFunction",
29 | "aws_cdk.CustomResource",
30 | "aws_cdk.aws_iam",
31 | "aws_cdk.aws_ec2",
32 | "aws_cdk.aws_efs",
33 | "aws_cdk.aws_ecs_patterns",
34 | "aws_cdk.aws_elasticloadbalancingv2",
35 | "aws_cdk.aws_cloudfront",
36 | "aws_cdk.aws_cloudfront_origins",
37 | "aws_cdk.aws_s3",
38 | "aws_cdk.aws_kms",
39 | "aws_cdk.CfnResource",
40 | "aws_cdk.CfnOutput",
41 | "aws_cdk.aws_ecs",
42 | "aws_cdk.aws_events",
43 | "aws_cdk.aws_events_targets",
44 | "aws_cdk.aws_lambda_event_sources",
45 | "aws_cdk.aws_lambda.FileSystem",
46 | "aws_cdk.aws_cloudtrail",
47 | "aws_cdk.aws_glue",
48 | "aws_cdk.aws_lambda.LayerVersion"
49 | ]
50 |
51 | @contextmanager
52 | def handle_contexts(patched_services):
53 | with ExitStack() as exit_stack:
54 | yield [ exit_stack.enter_context(patch_service) for patch_service in patched_services]
55 |
56 |
57 | def mocked_common_services(**test_kwargs):
58 |
59 | def mocked_services_decorator(test_func):
60 | @wraps(test_func)
61 | def wrapper(*args, **kwargs):
62 | mock_services = [*test_kwargs.get("override_aws_cdk_services", aws_cdk_services), *test_kwargs.get("add_patch", [])]
63 | patched_services = tuple(list([ patch(mock_service) for mock_service in mock_services]))
64 | with handle_contexts(patched_services) as services:
65 | test_func(*args, **kwargs)
66 | for service in services:
67 | try:
68 | service.assert_called()
69 | except AssertionError as assertionExcption:
70 | if test_kwargs.get("validate_mocks"):
71 | raise
72 | logger.warning(assertionExcption)
73 | services[service].reset_mock()
74 | return wrapper
75 | return mocked_services_decorator
--------------------------------------------------------------------------------