├── source ├── api │ ├── chalicelib │ │ ├── __init__.py │ │ ├── setup.py │ │ ├── snap_api.py │ │ └── tiktok_api.py │ ├── .gitignore │ ├── requirements.txt │ ├── __init__.py │ ├── .chalice │ │ ├── config.json │ │ └── dev-app-policy.json │ ├── setup.py │ └── external_resources.json ├── infrastructure │ ├── __init__.py │ ├── lib │ │ ├── __init__.py │ │ ├── aws_lambda │ │ │ ├── __init__.py │ │ │ └── layers │ │ │ │ ├── aws_solutions │ │ │ │ ├── __init__.py │ │ │ │ ├── requirements │ │ │ │ │ └── requirements.txt │ │ │ │ └── layer.py │ │ │ │ └── __init__.py │ │ ├── secrets │ │ │ ├── __init__.py │ │ │ ├── tiktok_secrets.py │ │ │ └── snap_secrets.py │ │ ├── eventbridge_mappings.py │ │ ├── base_uploader_stack.py │ │ ├── tiktok_uploader_stack.py │ │ └── snap_uploader_stack.py │ ├── aspects │ │ ├── __init__.py │ │ └── app_registry.py │ ├── cdk_destroy.sh │ ├── cdk_deploy.sh │ ├── setup.py │ ├── app.py │ └── cdk.json ├── tests │ ├── infrastructure │ │ ├── lib │ │ │ ├── __init__.py │ │ │ ├── test_glue_stack.py │ │ │ └── test_web_stack.py │ │ ├── aspects │ │ │ └── __init__.py │ │ ├── __init__.py │ │ └── test_app.py │ ├── __init__.py │ ├── api │ │ └── chalicelib │ │ │ ├── test_snap_api.py │ │ │ └── test_tiktok_api.py │ └── aws_lambda │ │ ├── lambda_helpers.py │ │ ├── test_tiktok_lambda_handler.py │ │ └── test_snap_lambda_handler.py ├── aws_lambda │ ├── tiktok │ │ └── uploader │ │ │ └── __init__.py │ ├── __init__.py │ ├── snap │ │ ├── __init__.py │ │ └── uploader │ │ │ └── __init__.py │ └── setup.py ├── website │ ├── public │ │ ├── robots.txt │ │ ├── runtimeConfig.json │ │ ├── manifest.json │ │ └── index.html │ ├── src │ │ ├── store │ │ │ ├── actions.js │ │ │ ├── state.js │ │ │ ├── mutations.js │ │ │ └── index.js │ │ ├── App.vue │ │ ├── views │ │ │ └── Login.vue │ │ ├── registerServiceWorker.js │ │ ├── router.js │ │ ├── components │ │ │ ├── Sidebar.vue │ │ │ └── Header.vue │ │ ├── main.js │ │ └── tiktokViews │ │ │ └── Step2.vue │ ├── babel.config.js │ ├── vue.config.js │ └── package.json ├── tiktok_setup │ └── tiktok_credentials.json.sample ├── images │ ├── snap-setup.jpg │ ├── ui_snap_00.jpg │ ├── ui_snap_01.jpg │ ├── ui_snap_02.jpg │ ├── ui_snap_03.jpg │ ├── ui_snap_04.jpg │ ├── ui_snap_05.jpg │ ├── tiktok_secrets.jpg │ ├── tiktok_secrets_add.jpg │ ├── solution-architecture.jpg │ └── tiktok_secrets_retrive.jpg ├── cdk_solution_helper_py │ ├── helpers_cdk │ │ ├── aws_solutions │ │ │ └── cdk │ │ │ │ ├── aws_lambda │ │ │ │ ├── cfn_custom_resources │ │ │ │ │ ├── resource_hash │ │ │ │ │ │ ├── src │ │ │ │ │ │ │ ├── custom_resources │ │ │ │ │ │ │ │ ├── requirements.txt │ │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ │ └── hash.py │ │ │ │ │ │ │ └── __init__.py │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ └── hash.py │ │ │ │ │ ├── resource_name │ │ │ │ │ │ ├── src │ │ │ │ │ │ │ ├── custom_resources │ │ │ │ │ │ │ │ ├── requirements.txt │ │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ │ └── name.py │ │ │ │ │ │ │ └── __init__.py │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ └── name.py │ │ │ │ │ ├── solutions_metrics │ │ │ │ │ │ ├── src │ │ │ │ │ │ │ ├── custom_resources │ │ │ │ │ │ │ │ ├── requirements.txt │ │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ │ └── metrics.py │ │ │ │ │ │ │ └── __init__.py │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ └── metrics.py │ │ │ │ │ └── __init__.py │ │ │ │ ├── layers │ │ │ │ │ ├── aws_lambda_powertools │ │ │ │ │ │ ├── requirements │ │ │ │ │ │ │ └── requirements.txt │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ └── layer.py │ │ │ │ │ └── __init__.py │ │ │ │ ├── __init__.py │ │ │ │ ├── java │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── bundling.py │ │ │ │ │ └── function.py │ │ │ │ ├── python │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── layer.py │ │ │ │ ├── environment_variable.py │ │ │ │ └── environment.py │ │ │ │ ├── scripts │ │ │ │ └── __init__.py │ │ │ │ ├── stepfunctions │ │ │ │ ├── __init__.py │ │ │ │ ├── solution_fragment.py │ │ │ │ └── solutionstep.py │ │ │ │ ├── tools │ │ │ │ ├── __init__.py │ │ │ │ └── cleaner.py │ │ │ │ ├── helpers │ │ │ │ ├── __init__.py │ │ │ │ ├── logger.py │ │ │ │ ├── copytree.py │ │ │ │ └── loader.py │ │ │ │ ├── aspects.py │ │ │ │ ├── __init__.py │ │ │ │ ├── cfn_nag.py │ │ │ │ ├── mappings.py │ │ │ │ ├── context.py │ │ │ │ ├── stack.py │ │ │ │ └── interfaces.py │ │ └── setup.py │ ├── requirements-dev.txt │ ├── helpers_common │ │ ├── aws_solutions │ │ │ └── core │ │ │ │ ├── __init__.py │ │ │ │ ├── logging.py │ │ │ │ ├── config.py │ │ │ │ └── helpers.py │ │ └── setup.py │ └── CHANGELOG.md ├── snap_setup │ └── snapchat_credentials.json.sample ├── .coveragerc ├── pytest.ini ├── requirements-dev.txt └── glue │ └── snap_transformations.py ├── .viperlightrc ├── .github ├── PULL_REQUEST_TEMPLATE.md ├── workflows │ ├── trufflehog.yml │ ├── codeql.yml │ ├── push-workflow.yml │ ├── viperlight.yml │ └── pull-request-workflow.yml └── ISSUE_TEMPLATE │ ├── feature_request.md │ └── bug_report.md ├── CODE_OF_CONDUCT.md ├── .gitignore ├── CHANGELOG.md ├── deployment ├── build-open-source-dist.sh └── run-unit-tests.sh ├── .viperlightignore ├── sonar-project.properties ├── THIRD_PARTY_LICENSES.txt └── CONTRIBUTING.md /source/api/chalicelib/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /source/infrastructure/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /source/tests/infrastructure/lib/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.viperlightrc: -------------------------------------------------------------------------------- 1 | {"failOn":"medium","all":true} 2 | -------------------------------------------------------------------------------- /source/aws_lambda/tiktok/uploader/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /source/tests/infrastructure/aspects/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /source/website/public/robots.txt: -------------------------------------------------------------------------------- 1 | User-agent: * 2 | Disallow: 3 | -------------------------------------------------------------------------------- /source/api/.gitignore: -------------------------------------------------------------------------------- 1 | .chalice/deployments/ 2 | .chalice/venv/ 3 | __pycache__/ 4 | -------------------------------------------------------------------------------- /source/api/requirements.txt: -------------------------------------------------------------------------------- 1 | requests==2.28.1 2 | aws-xray-sdk==2.11.0 3 | wrapt==1.12.1 4 | -e chalicelib -------------------------------------------------------------------------------- /source/api/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tiktok_setup/tiktok_credentials.json.sample: -------------------------------------------------------------------------------- 1 | { 2 | "ADVERTISER_ID": "", 3 | "ACCESS_TOKEN": "" 4 | } -------------------------------------------------------------------------------- /source/website/public/runtimeConfig.json: -------------------------------------------------------------------------------- 1 | {"API_ENDPOINT": "", "AWS_REGION":"","USER_POOL_ID":"","USER_POOL_CLIENT_ID":"","IDENTITY_POOL_ID":""} -------------------------------------------------------------------------------- /source/aws_lambda/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/images/snap-setup.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/audience-uploader-from-aws-clean-rooms/main/source/images/snap-setup.jpg -------------------------------------------------------------------------------- /source/images/ui_snap_00.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/audience-uploader-from-aws-clean-rooms/main/source/images/ui_snap_00.jpg -------------------------------------------------------------------------------- /source/images/ui_snap_01.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/audience-uploader-from-aws-clean-rooms/main/source/images/ui_snap_01.jpg -------------------------------------------------------------------------------- /source/images/ui_snap_02.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/audience-uploader-from-aws-clean-rooms/main/source/images/ui_snap_02.jpg -------------------------------------------------------------------------------- /source/images/ui_snap_03.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/audience-uploader-from-aws-clean-rooms/main/source/images/ui_snap_03.jpg -------------------------------------------------------------------------------- /source/images/ui_snap_04.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/audience-uploader-from-aws-clean-rooms/main/source/images/ui_snap_04.jpg -------------------------------------------------------------------------------- /source/images/ui_snap_05.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/audience-uploader-from-aws-clean-rooms/main/source/images/ui_snap_05.jpg -------------------------------------------------------------------------------- /source/aws_lambda/snap/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/images/tiktok_secrets.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/audience-uploader-from-aws-clean-rooms/main/source/images/tiktok_secrets.jpg -------------------------------------------------------------------------------- /source/infrastructure/lib/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/snap/uploader/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_hash/src/custom_resources/requirements.txt: -------------------------------------------------------------------------------- 1 | crhelper==2.0.6 -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/src/custom_resources/requirements.txt: -------------------------------------------------------------------------------- 1 | crhelper==2.0.6 -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/aws_lambda_powertools/requirements/requirements.txt: -------------------------------------------------------------------------------- 1 | aws-lambda-powertools>=1.24.0 -------------------------------------------------------------------------------- /source/tests/infrastructure/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/images/tiktok_secrets_add.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/audience-uploader-from-aws-clean-rooms/main/source/images/tiktok_secrets_add.jpg -------------------------------------------------------------------------------- /source/infrastructure/lib/aws_lambda/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/infrastructure/lib/secrets/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/images/solution-architecture.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/audience-uploader-from-aws-clean-rooms/main/source/images/solution-architecture.jpg -------------------------------------------------------------------------------- /source/images/tiktok_secrets_retrive.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/audience-uploader-from-aws-clean-rooms/main/source/images/tiktok_secrets_retrive.jpg -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/src/custom_resources/requirements.txt: -------------------------------------------------------------------------------- 1 | requests==2.28.1 2 | crhelper==2.0.6 -------------------------------------------------------------------------------- /source/infrastructure/aspects/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 4 | # SPDX-License-Identifier: Apache-2.0 5 | -------------------------------------------------------------------------------- /source/infrastructure/lib/aws_lambda/layers/aws_solutions/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/website/src/store/actions.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | export default {}; 7 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/scripts/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/java/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stepfunctions/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/python/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_hash/src/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/src/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | *Issue #, if available:* 2 | 3 | *Description of changes:* 4 | 5 | By submitting this pull request, I confirm that you can use, modify, copy, and redistribute this contribution, under the terms of your choice. -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/src/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/infrastructure/lib/aws_lambda/layers/aws_solutions/requirements/requirements.txt: -------------------------------------------------------------------------------- 1 | ../../../../../../cdk_solution_helper_py/helpers_common 2 | avro==1.10.2 3 | cronex==0.1.3.1 4 | jmespath==0.10.0 5 | parsedatetime==2.6 6 | boto3>=1.20.28 -------------------------------------------------------------------------------- /source/infrastructure/lib/aws_lambda/layers/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from lib.aws_lambda.layers.aws_solutions.layer import SolutionsLayer 5 | -------------------------------------------------------------------------------- /source/website/babel.config.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | module.exports = { 7 | presets: [ 8 | '@vue/app' 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_hash/src/custom_resources/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/src/custom_resources/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/tools/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_solutions.cdk.tools.cleaner import Cleaner 5 | -------------------------------------------------------------------------------- /source/website/public/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "media_indexer", 3 | "short_name": "media_indexer", 4 | "start_url": "./index.html", 5 | "display": "standalone", 6 | "background_color": "#000000", 7 | "theme_color": "#4DBA87" 8 | } 9 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/src/custom_resources/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/helpers/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_solutions.cdk.helpers.copytree import copytree, ignore_globs 5 | -------------------------------------------------------------------------------- /source/snap_setup/snapchat_credentials.json.sample: -------------------------------------------------------------------------------- 1 | { 2 | "client_id": "", 3 | "client_secret": "", 4 | "redirect_url": "", 5 | "organization_id": "", 6 | "ad_account_id": "" 7 | } -------------------------------------------------------------------------------- /source/infrastructure/cdk_destroy.sh: -------------------------------------------------------------------------------- 1 | 2 | 3 | # python virtual envirnment 4 | 5 | python3 -m venv .env 6 | 7 | source .env/bin/activate 8 | 9 | # install cdk 10 | 11 | pip3 install -r requirements.txt 12 | 13 | npm install -g aws-cdk 14 | 15 | cdk destroy --all -------------------------------------------------------------------------------- /source/website/src/store/state.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | export default { 7 | s3key: "", 8 | dataset_definition: {}, 9 | step3_form_input: {}, 10 | }; 11 | -------------------------------------------------------------------------------- /source/.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | **/setup.py 4 | infrastructure/cdk.out/* 5 | tests/* 6 | cdk_solution_helper_py/helpers_common/* 7 | source = 8 | infrastructure 9 | aws_lambda 10 | 11 | [report] 12 | fail_under = 0.0 13 | exclude_lines = 14 | setuptools -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/aws_lambda_powertools/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_solutions.cdk.aws_lambda.layers.aws_lambda_powertools.layer import ( 5 | PowertoolsLayer, 6 | ) 7 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_hash/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_solutions.cdk.aws_lambda.cfn_custom_resources.resource_hash.hash import ( 5 | ResourceHash, 6 | ) 7 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_solutions.cdk.aws_lambda.cfn_custom_resources.resource_name.name import ( 5 | ResourceName, 6 | ) 7 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_solutions.cdk.aws_lambda.cfn_custom_resources.solutions_metrics.metrics import ( 5 | Metrics, 6 | ) 7 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/requirements-dev.txt: -------------------------------------------------------------------------------- 1 | aws_cdk_lib>=2.7.0 2 | black 3 | boto3>=1.17.49 4 | requests==2.28.1 5 | crhelper>=2.0.6 6 | Click 7 | moto 8 | pipenv 9 | poetry 10 | pytest 11 | pytest-cov>=2.11.1 12 | pytest-mock>=3.5.1 13 | tox 14 | tox-pyenv 15 | aws_solutions_constructs.aws_eventbridge_sqs~=2.25.0 16 | -e helpers_cdk 17 | -e helpers_common -------------------------------------------------------------------------------- /source/infrastructure/cdk_deploy.sh: -------------------------------------------------------------------------------- 1 | 2 | 3 | # python virtual envirnment 4 | 5 | python3 -m venv .env 6 | 7 | source .env/bin/activate 8 | 9 | # install cdk 10 | pip3 install -r requirements.txt 11 | 12 | npm install -g aws-cdk 13 | 14 | # bootstrap and deploy 15 | cdk bootstrap 16 | 17 | cdk deploy --all 18 | 19 | # securiity check 20 | git secrets --scan -------------------------------------------------------------------------------- /source/pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | env = 3 | MOTO_ACCOUNT_ID=111111111111 4 | POWERTOOLS_TRACE_DISABLED=1 5 | SOLUTION_ID=SO0226 6 | SOLUTION_VERSION=v1.0.0 7 | SOLUTION_NAME=audience-uploader-from-aws-clean-rooms 8 | AWS_REGION=us-east-1 9 | AWS_DEFAULT_REGION=us-east-1 10 | norecursedirs = cdk.out* 11 | markers= 12 | no_cdk_lambda_mock: marks test that need to build AWS Lambda Functions or Layers with CDK 13 | -------------------------------------------------------------------------------- /source/website/src/store/mutations.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | export default { 7 | updateDatasetDefinition(state, value) { 8 | state.dataset_definition = value; 9 | }, 10 | saveStep3FormInput(state, value) { 11 | state.step3_form_input = value; 12 | }, 13 | updateS3key(state, value) { 14 | state.s3key = value; 15 | }, 16 | }; 17 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_common/aws_solutions/core/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_solutions.core.config import Config 5 | 6 | config = Config() 7 | 8 | from aws_solutions.core.helpers import ( 9 | get_aws_region, 10 | get_aws_partition, 11 | get_service_client, 12 | get_service_resource, 13 | get_aws_account, 14 | ) 15 | -------------------------------------------------------------------------------- /.github/workflows/trufflehog.yml: -------------------------------------------------------------------------------- 1 | 2 | name: "TruffleHog" 3 | 4 | on: [push, pull_request] 5 | 6 | jobs: 7 | content-scan: 8 | name: Content Scan 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | - name: Check out repo 13 | uses: actions/checkout@v3 14 | # scan for various types of keys and tokens 15 | - name: TruffleHog 16 | uses: edplato/trufflehog-actions-scan@master 17 | with: 18 | scanArguments: "--regex --entropy=False" 19 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## [2.0.0] - 2022-01-31 8 | ### Changed 9 | - support for CDK 2.x added, support for CDK 1.x removed 10 | 11 | ## [1.0.0] - 2021-09-23 12 | ### Added 13 | - initial release 14 | 15 | -------------------------------------------------------------------------------- /source/website/src/App.vue: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | 11 | 12 | 22 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this solution 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the feature you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Additional context** 17 | Add any other context or screenshots about the feature request here. -------------------------------------------------------------------------------- /source/website/vue.config.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | const SriPlugin = require('webpack-subresource-integrity'); 7 | 8 | module.exports = { 9 | configureWebpack: { 10 | output: { 11 | crossOriginLoading: 'anonymous', 12 | }, 13 | plugins: [ 14 | new SriPlugin({ 15 | hashFuncNames: ['sha256', 'sha384'], 16 | enabled: false 17 | }), 18 | ], 19 | performance: { 20 | hints: false 21 | } 22 | } 23 | }; 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | source/website/public/runtimeConfig.json 2 | */.DS_Store 3 | .venv 4 | .idea/* 5 | node_modules 6 | package 7 | deployment/*-s3-assets 8 | deployment/*-s3-assets-tmp 9 | deployment/*-assets 10 | deployment/*-assets-tmp 11 | deployment/cfn-templates 12 | deployment/regional-assets 13 | deployment/open-source 14 | dist 15 | webapp-manifest.json 16 | cdk.out 17 | *.pyc 18 | snapchat_credentials.json 19 | source/.coverage 20 | source/.coverage.* 21 | source/tests/coverage-reports 22 | *.DS_Store 23 | *.egg-info/ 24 | source/cdk_solution_helper_py/helpers_common/build/ 25 | package-lock.json 26 | .vscode/ 27 | -------------------------------------------------------------------------------- /source/website/src/store/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | import Vue from "vue"; 7 | import Vuex from "vuex"; 8 | import state from "./state"; 9 | import mutations from "./mutations"; 10 | import actions from "./actions"; 11 | import createPersistedState from "vuex-persistedstate"; 12 | 13 | Vue.use(Vuex); 14 | 15 | export default new Vuex.Store({ 16 | state, 17 | mutations, 18 | actions, 19 | plugins: [ 20 | createPersistedState({ 21 | paths: ["execution_history"], 22 | }), 23 | ], 24 | }); 25 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 7 | 8 | ## [1.0.0] - 2023-01-31 9 | 10 | ### Added 11 | 12 | - Initial release. 13 | 14 | ## [1.0.1] - 2023-04-19 15 | 16 | ### Added 17 | 18 | - Update permissions for new S3 bucket defaults 19 | - Update to various build scripts for version changes 20 | - Add common GitHub workflows 21 | - Updates to unit tests for above changes 22 | 23 | -------------------------------------------------------------------------------- /source/api/.chalice/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "1.0", 3 | "app_name": "audience-uploader-from-aws-clean-rooms", 4 | "environment_variables": { 5 | "botoConfig": "{}", 6 | "VERSION": "", 7 | "AMC_ENDPOINT_URL": "", 8 | "AMC_API_ROLE_ARN": "", 9 | "AMC_GLUE_JOB_NAME": "" 10 | }, 11 | "stages": { 12 | "dev": { 13 | "api_gateway_stage": "api", 14 | "lambda_memory_size": 2048, 15 | "lambda_timeout": 600, 16 | "autogen_policy": false, 17 | "iam_policy_file": "dev-app-policy.json", 18 | "xray": true, 19 | "tags": { 20 | "environment": "audience-uploader-from-aws-clean-rooms" 21 | } 22 | } 23 | } 24 | } -------------------------------------------------------------------------------- /source/website/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | Audience Uploader from AWS Clean Rooms Uploader 9 | 10 | 11 | 14 |
15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/environment_variable.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from dataclasses import dataclass, field 5 | 6 | from aws_cdk.aws_lambda import IFunction 7 | 8 | 9 | @dataclass 10 | class EnvironmentVariable: 11 | scope: IFunction 12 | name: str 13 | value: str = field(default="") 14 | 15 | def __post_init__(self): 16 | if not self.value: 17 | self.value = self.scope.node.try_get_context(self.name) 18 | self.scope.add_environment(self.name, self.value) 19 | 20 | def __str__(self): 21 | return self.value 22 | -------------------------------------------------------------------------------- /source/requirements-dev.txt: -------------------------------------------------------------------------------- 1 | avro==1.11.1 2 | aws-lambda-powertools==1.30.0 3 | aws_cdk_lib==2.44.0 4 | aws-cdk.aws-servicecatalogappregistry-alpha==2.44.0a0 5 | aws_solutions_constructs.aws_eventbridge_sqs~=2.25.0 6 | aws_solutions_constructs.aws_lambda_sns~=2.25.0 7 | black 8 | boto3 9 | requests==2.28.1 10 | requests-oauthlib 11 | chalice 12 | constructs>=10.0.0,<11.0.0 13 | requests-mock==1.10.0 14 | crhelper~=2.0.10 15 | cronex==0.1.3.1 16 | moto==4.0.5 17 | path 18 | pandas 19 | parsedatetime==2.6 20 | pytest 21 | pytest-cov>=2.11.1 22 | pytest-env>=0.6.2 23 | pytest-mock>=3.5.1 24 | pyyaml==6.0.0 25 | responses~=0.17.0 26 | -e cdk_solution_helper_py/helpers_cdk 27 | -e cdk_solution_helper_py/helpers_common 28 | -e infrastructure 29 | -e aws_lambda 30 | -e api -------------------------------------------------------------------------------- /deployment/build-open-source-dist.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # This assumes all of the OS-level configuration has been completed and git repo has already been cloned 4 | # 5 | # This script should be run from the repo's deployment directory 6 | # cd deployment 7 | # ./build-open-source-dist.sh 8 | # 9 | 10 | source_template_dir="$PWD" 11 | dist_dir="$source_template_dir/open-source" 12 | placeholder="$dist_dir/placeholder.txt" 13 | 14 | echo "------------------------------------------------------------------------------" 15 | echo "[Init] Clean old open-source folder" 16 | echo "------------------------------------------------------------------------------" 17 | 18 | set -xeuo pipefail 19 | 20 | rm -rf $dist_dir 21 | mkdir -p $dist_dir 22 | touch $placeholder 23 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | 2 | name: "CodeQL" 3 | 4 | on: [push, pull_request] 5 | 6 | jobs: 7 | analyze: 8 | name: Analyze 9 | runs-on: ubuntu-latest 10 | permissions: 11 | actions: read 12 | contents: read 13 | security-events: write 14 | 15 | strategy: 16 | fail-fast: false 17 | matrix: 18 | language: [ 'python' ] 19 | 20 | steps: 21 | - name: Checkout repository 22 | uses: actions/checkout@v3 23 | 24 | - name: Initialize CodeQL 25 | uses: github/codeql-action/init@v2 26 | with: 27 | languages: ${{ matrix.language }} 28 | 29 | - name: Perform CodeQL Analysis 30 | uses: github/codeql-action/analyze@v2 31 | with: 32 | category: "/language:${{matrix.language}}" 33 | -------------------------------------------------------------------------------- /source/api/.chalice/dev-app-policy.json: -------------------------------------------------------------------------------- 1 | { 2 | "Version": "2012-10-17", 3 | "Statement": [ 4 | { 5 | "Action": [ 6 | "logs:CreateLogGroup", 7 | "logs:CreateLogStream", 8 | "logs:PutLogEvents" 9 | ], 10 | "Resource": "*", 11 | "Effect": "Allow", 12 | "Sid": "Logging" 13 | }, 14 | { 15 | "Effect": "Allow", 16 | "Action": "lambda:InvokeFunction", 17 | "Resource": "*" 18 | }, 19 | { 20 | "Effect": "Allow", 21 | "Action": [ 22 | "iam:PassRole" 23 | ], 24 | "Resource": [ 25 | "*" 26 | ] 27 | } 28 | 29 | ] 30 | } 31 | -------------------------------------------------------------------------------- /.github/workflows/push-workflow.yml: -------------------------------------------------------------------------------- 1 | name: Push Workflow 2 | 3 | env: 4 | REGION: us-east-1 5 | 6 | on: push 7 | 8 | jobs: 9 | pipeline-job: 10 | name: Pipeline Job 11 | if: github.repository_owner == 'aws-solutions' 12 | runs-on: ubuntu-latest 13 | permissions: 14 | id-token: write 15 | steps: 16 | - name: Configure AWS credentials 17 | uses: aws-actions/configure-aws-credentials@v1 18 | with: 19 | role-to-assume: ${{ secrets.DISPATCHER_ROLE_ARN }} 20 | aws-region: ${{ env.REGION }} 21 | role-duration-seconds: 900 22 | role-session-name: OIDCSession 23 | - name: Run CodeBuild 24 | uses: aws-actions/aws-codebuild-run-build@v1 25 | with: 26 | project-name: ${{ secrets.DISPATCHER_CODEBUILD_PROJECT_NAME }} 27 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aspects.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import jsii 5 | from aws_cdk import CfnCondition, IAspect 6 | from constructs import IConstruct 7 | 8 | 9 | @jsii.implements(IAspect) 10 | class ConditionalResources: 11 | """Mark any CDK construct as conditional (this is useful to apply to stacks and L2+ constructs)""" 12 | 13 | def __init__(self, condition: CfnCondition): 14 | self.condition = condition 15 | 16 | def visit(self, node: IConstruct): 17 | if "is_cfn_element" in dir(node) and node.is_cfn_element(node): 18 | node.cfn_options.condition = self.condition 19 | elif "is_cfn_element" in dir(node.node.default_child): 20 | node.node.default_child.cfn_options.condition = self.condition 21 | -------------------------------------------------------------------------------- /.github/workflows/viperlight.yml: -------------------------------------------------------------------------------- 1 | 2 | name: "Viperlight" 3 | 4 | on: [push, pull_request] 5 | 6 | jobs: 7 | content-scan: 8 | name: Content Scan 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Checkout 12 | uses: actions/checkout@v3 13 | - name: Viperlight 14 | run: | 15 | wget -q https://viperlight-scanner.s3.amazonaws.com/latest/.viperlightrc 16 | wget -q https://viperlight-scanner.s3.amazonaws.com/latest/viperlight.zip 17 | unzip -q viperlight.zip -d ../viperlight 18 | rm -r ./viperlight.zip 19 | echo "Content scanning utility installation complete `date`" 20 | echo "Starting content scanning `date` in `pwd`" 21 | ../viperlight/bin/viperlight scan -m files-contents -m files-aws -m files-binary -m files-entropy -m files-secrets 22 | echo "Completed content scanning `date`" 23 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/helpers/logger.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | 6 | 7 | class Logger: 8 | """Set up a logger fo this package""" 9 | 10 | @classmethod 11 | def get_logger(cls, name: str) -> logging.Logger: 12 | """ 13 | Gets the current logger for this package 14 | :param name: the name of the logger 15 | :return: the logger 16 | """ 17 | logger = logging.getLogger(name) 18 | if not len(logger.handlers): 19 | logger.setLevel(logging.INFO) 20 | handler = logging.StreamHandler() 21 | formatter = logging.Formatter("[%(levelname)s]\t%(name)s\t%(message)s") 22 | handler.setFormatter(formatter) 23 | logger.addHandler(handler) 24 | logger.propagate = False 25 | return logger 26 | -------------------------------------------------------------------------------- /.github/workflows/pull-request-workflow.yml: -------------------------------------------------------------------------------- 1 | name: Pull Request Workflow 2 | 3 | on: 4 | pull_request: 5 | types: [opened, edited, reopened, synchronize] 6 | 7 | jobs: 8 | pull-request-job: 9 | name: Status Checks 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout 13 | uses: actions/checkout@v3 14 | - name: Viperlight 15 | run: | 16 | wget -q https://viperlight-scanner.s3.amazonaws.com/latest/.viperlightrc 17 | wget -q https://viperlight-scanner.s3.amazonaws.com/latest/viperlight.zip 18 | unzip -q viperlight.zip -d ../viperlight 19 | rm -r ./viperlight.zip 20 | echo "Content scanning utility installation complete `date`" 21 | echo "Starting content scanning `date` in `pwd`" 22 | ../viperlight/bin/viperlight scan -m files-contents -m files-aws -m files-binary -m files-entropy -m files-secrets 23 | echo "Completed content scanning `date`" -------------------------------------------------------------------------------- /source/infrastructure/lib/secrets/tiktok_secrets.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import json 5 | from constructs import Construct 6 | from aws_cdk import ( 7 | RemovalPolicy, 8 | aws_secretsmanager as secretsmanager, 9 | ) 10 | 11 | 12 | class TiktokSecrets(Construct): 13 | 14 | def __init__(self, scope: Construct, id: str, **kwargs) -> None: 15 | super().__init__(scope, id, **kwargs) 16 | 17 | self.tiktok_uploader_secret = secretsmanager.Secret( 18 | self, 19 | "tiktok_uploader_credentials", 20 | description="tiktok ads marketing api - credentials", 21 | removal_policy=RemovalPolicy.RETAIN, 22 | generate_secret_string=secretsmanager.SecretStringGenerator( 23 | secret_string_template=json.dumps({"credentials": ""}), 24 | generate_string_key="credentials", 25 | ), 26 | ) -------------------------------------------------------------------------------- /source/infrastructure/lib/aws_lambda/layers/aws_solutions/layer.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | from aws_cdk import Stack 7 | from constructs import Construct 8 | 9 | from aws_solutions.cdk.aws_lambda.python.layer import SolutionsPythonLayerVersion 10 | 11 | 12 | class SolutionsLayer(SolutionsPythonLayerVersion): 13 | def __init__(self, scope: Construct, construct_id: str, **kwargs): 14 | requirements_path: Path = Path(__file__).absolute().parent / "requirements" 15 | super().__init__(scope, construct_id, requirements_path, **kwargs) 16 | 17 | @staticmethod 18 | def get_or_create(scope: Construct, **kwargs): 19 | stack = Stack.of(scope) 20 | construct_id = "SolutionsLayer-DAE8E12F-3DEA-43FB-A4AA-E55AC50BD2E9" 21 | exists = stack.node.try_find_child(construct_id) 22 | if exists: 23 | return exists 24 | return SolutionsLayer(stack, construct_id, **kwargs) 25 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/aws_lambda_powertools/layer.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | from aws_cdk import Stack 7 | from constructs import Construct 8 | 9 | from aws_solutions.cdk.aws_lambda.python.layer import SolutionsPythonLayerVersion 10 | 11 | 12 | class PowertoolsLayer(SolutionsPythonLayerVersion): 13 | def __init__(self, scope: Construct, construct_id: str, **kwargs): 14 | requirements_path: Path = Path(__file__).absolute().parent / "requirements" 15 | super().__init__(scope, construct_id, requirements_path, **kwargs) 16 | 17 | @staticmethod 18 | def get_or_create(scope: Construct, **kwargs): 19 | stack = Stack.of(scope) 20 | construct_id = "PowertoolsLayer-8E932F0F-197D-4026-A354-23D184C2A624" 21 | exists = stack.node.try_find_child(construct_id) 22 | if exists: 23 | return exists 24 | return PowertoolsLayer(stack, construct_id, **kwargs) 25 | -------------------------------------------------------------------------------- /source/aws_lambda/setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | 5 | import setuptools 6 | 7 | 8 | setuptools.setup( 9 | name="aws_lambda", 10 | version="0.0.0", 11 | description="Audience Uploader from AWS Clean Rooms - Lambda Functions", 12 | author="AWS Solutions Builders", 13 | packages=setuptools.find_packages(exclude=("shared",)), 14 | package_data={"": ["*.json", "*.yaml"]}, 15 | include_package_data=True, 16 | python_requires=">=3.7", 17 | classifiers=[ 18 | "Development Status :: 4 - Beta", 19 | "Intended Audience :: Developers", 20 | "License :: OSI Approved :: Apache Software License", 21 | "Programming Language :: JavaScript", 22 | "Programming Language :: Python :: 3 :: Only", 23 | "Programming Language :: Python :: 3.7", 24 | "Programming Language :: Python :: 3.8", 25 | "Programming Language :: Python :: 3.9", 26 | "Topic :: Software Development :: Code Generators", 27 | "Topic :: Utilities", 28 | "Typing :: Typed", 29 | ], 30 | ) -------------------------------------------------------------------------------- /source/api/setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | 5 | import setuptools 6 | 7 | 8 | setuptools.setup( 9 | name="api", 10 | version="2.0.0", 11 | description="Audience Uploader from AWS Clean Rooms - API", 12 | author="AWS Solutions Builders", 13 | packages=setuptools.find_packages(exclude=("shared",)), 14 | package_data={"": ["*.json", "*.yaml"]}, 15 | include_package_data=True, 16 | python_requires=">=3.7", 17 | install_requires=["chalice"], 18 | classifiers=[ 19 | "Development Status :: 4 - Beta", 20 | "Intended Audience :: Developers", 21 | "License :: OSI Approved :: Apache Software License", 22 | "Programming Language :: JavaScript", 23 | "Programming Language :: Python :: 3 :: Only", 24 | "Programming Language :: Python :: 3.7", 25 | "Programming Language :: Python :: 3.8", 26 | "Programming Language :: Python :: 3.9", 27 | "Topic :: Software Development :: Code Generators", 28 | "Topic :: Utilities", 29 | "Typing :: Typed", 30 | ], 31 | ) -------------------------------------------------------------------------------- /source/api/chalicelib/setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | 5 | import setuptools 6 | 7 | 8 | setuptools.setup( 9 | name="chalicelib", 10 | version="2.0.0", 11 | description="Audience Uploader from AWS Clean Rooms - Lambda Functions", 12 | author="AWS Solutions Builders", 13 | packages=setuptools.find_packages(exclude=("shared",)), 14 | package_data={"": ["*.json", "*.yaml"]}, 15 | include_package_data=True, 16 | python_requires=">=3.7", 17 | install_requires=["chalice"], 18 | classifiers=[ 19 | "Development Status :: 4 - Beta", 20 | "Intended Audience :: Developers", 21 | "License :: OSI Approved :: Apache Software License", 22 | "Programming Language :: JavaScript", 23 | "Programming Language :: Python :: 3 :: Only", 24 | "Programming Language :: Python :: 3.7", 25 | "Programming Language :: Python :: 3.8", 26 | "Programming Language :: Python :: 3.9", 27 | "Topic :: Software Development :: Code Generators", 28 | "Topic :: Utilities", 29 | "Typing :: Typed", 30 | ], 31 | ) -------------------------------------------------------------------------------- /source/website/src/views/Login.vue: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | 11 | 12 | -------------------------------------------------------------------------------- /source/website/src/registerServiceWorker.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | /* eslint-disable no-console */ 7 | 8 | import { register } from "register-service-worker"; 9 | 10 | if (process.env.NODE_ENV === "production") { 11 | register(`${process.env.BASE_URL}service-worker.js`, { 12 | ready() { 13 | console.log( 14 | "App is being served from cache by a service worker.\n" + 15 | "For more details, visit https://goo.gl/AFskqB" 16 | ); 17 | }, 18 | registered() { 19 | console.log("Service worker has been registered."); 20 | }, 21 | cached() { 22 | console.log("Content has been cached for offline use."); 23 | }, 24 | updatefound() { 25 | console.log("New content is downloading."); 26 | }, 27 | updated() { 28 | console.log("New content is available; please refresh."); 29 | }, 30 | offline() { 31 | console.log( 32 | "No internet connection found. App is running in offline mode." 33 | ); 34 | }, 35 | error(error) { 36 | console.error("Error during service worker registration:", error); 37 | }, 38 | }); 39 | } 40 | -------------------------------------------------------------------------------- /.viperlightignore: -------------------------------------------------------------------------------- 1 | # Empty .viperlightignore created - none existed 2 | 3 | # ignore Config used with code.amazon.com 4 | Config 5 | 6 | # For local desktop build suppression 7 | .*/node_modules/ 8 | .venv* 9 | .*/cdk.out/ 10 | deployment/global-s3-assets/ 11 | deployment/regional-s3-assets/ 12 | deployment/regional-assets/ 13 | source/api/.chalice/deployments/ 14 | source/helper/dist/ 15 | 16 | # Account Id is public and available through an AWS Lambda Managed layer for Lambda Insights extension 17 | source/infrastructure/lib/snap_uploader_stack.py:90 18 | source/infrastructure/lib/tiktok_uploader_stack.py:91 19 | source/api/external_resources.json:150 20 | 21 | # Account Id is public and available through an AWS Lambda Managed layer for AWS Data Wrangler Lambda 22 | deployment/uploader-from-clean-rooms.yaml:214 23 | source/api/external_resources.json:148 24 | source/infrastructure/lib/snap_uploader_stack.py:53 25 | source/infrastructure/lib/snap_uploader_stack.py:80 26 | source/infrastructure/lib/tiktok_uploader_stack.py:55 27 | source/infrastructure/lib/tiktok_uploader_stack.py:81 28 | # Fake email addresses 29 | README.md:234 30 | README.md:247 31 | # Fake test data 32 | source/tests/glue/test_data/glue_transformation_input.json 33 | source/sample_data/input_sample_data.json 34 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior. 15 | 16 | **Expected behavior** 17 | A clear and concise description of what you expected to happen. 18 | 19 | **Please complete the following information about the solution:** 20 | - [ ] Version: [e.g. v1.3.0] 21 | 22 | To get the version of the solution, you can look at the description of the created CloudFormation stack. For example, "(SOWXYZ) Solution Name v0.0.1[...]". 23 | 24 | - [ ] Region: [e.g. us-east-1] 25 | - [ ] Was the solution modified from the version published on this repository? 26 | - [ ] If the answer to the previous question was yes, are the changes available on GitHub? 27 | - [ ] Have you checked your [service quotas](https://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html) for the sevices this solution uses? 28 | - [ ] Were there any errors in the CloudWatch Logs? 29 | 30 | **Screenshots** 31 | If applicable, add screenshots to help explain your problem (please **DO NOT include sensitive information**). 32 | 33 | **Additional context** 34 | Add any other context about the problem here. -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | from aws_solutions.cdk.context import SolutionContext 7 | from aws_solutions.cdk.stack import SolutionStack 8 | from aws_solutions.cdk.synthesizers import SolutionStackSubstitutions 9 | 10 | 11 | class CDKSolution: 12 | """ 13 | A CDKSolution stores helper utilities for building AWS Solutions using the AWS CDK in Python 14 | 15 | :type cdk_json_path: Path 16 | :param cdk_json_path: The full path to the cdk.json context for your application 17 | :type qualifier: str 18 | :param qualifier: A string that is added to all resources in the CDK bootstrap stack. The default value has no significance. 19 | """ 20 | 21 | def __init__(self, cdk_json_path: Path, qualifier="hnb659fds"): 22 | self.qualifier = qualifier 23 | self.context = SolutionContext(cdk_json_path=cdk_json_path) 24 | self.synthesizer = SolutionStackSubstitutions(qualifier=self.qualifier) 25 | 26 | def reset(self) -> None: 27 | """ 28 | Get a new synthesizer for this CDKSolution - useful for testing 29 | :return: None 30 | """ 31 | self.synthesizer = SolutionStackSubstitutions(qualifier=self.qualifier) 32 | -------------------------------------------------------------------------------- /source/infrastructure/lib/secrets/snap_secrets.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import json 5 | from constructs import Construct 6 | from aws_cdk import ( 7 | RemovalPolicy, 8 | aws_secretsmanager as secretsmanager, 9 | ) 10 | 11 | 12 | class SnapSecrets(Construct): 13 | 14 | def __init__(self, scope: Construct, id: str, **kwargs) -> None: 15 | super().__init__(scope, id, **kwargs) 16 | self.oauth_refresh_secret = secretsmanager.Secret( 17 | self, 18 | "snap_uploader_credentials_oauth_refresh", 19 | description="snapads marketing api - oauth refresh", 20 | removal_policy=RemovalPolicy.RETAIN, 21 | generate_secret_string=secretsmanager.SecretStringGenerator( 22 | secret_string_template=json.dumps({"credentials": ""}), 23 | generate_string_key="credentials", 24 | ), 25 | ) 26 | 27 | self.snap_uploader_secret = secretsmanager.Secret( 28 | self, 29 | "snap_uploader_credentials", 30 | description="snapads marketing api - credentials", 31 | removal_policy=RemovalPolicy.RETAIN, 32 | generate_secret_string=secretsmanager.SecretStringGenerator( 33 | secret_string_template=json.dumps({"credentials": ""}), 34 | generate_string_key="credentials", 35 | ), 36 | ) -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/helpers/copytree.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import os 5 | import shutil 6 | from pathlib import Path 7 | 8 | 9 | def ignore_globs(*globs): 10 | """Function that can be used as copytree() ignore parameter. 11 | 12 | Patterns is a sequence of glob-style patterns 13 | that are used to exclude files""" 14 | 15 | def _ignore_globs(path, names): 16 | ignored_names = [] 17 | paths = [Path(os.path.join(path, name)).resolve() for name in names] 18 | for pattern in globs: 19 | for i, p in enumerate(paths): 20 | if p.match(pattern): 21 | ignored_names.append(names[i]) 22 | return set(ignored_names) 23 | 24 | return _ignore_globs 25 | 26 | 27 | def copytree(src, dst, symlinks=False, ignore=None): 28 | if ignore: 29 | ignore.extend([ignored[:-2] for ignored in ignore if ignored.endswith("/*")]) 30 | else: 31 | ignore = [] 32 | 33 | if not os.path.exists(dst): 34 | os.makedirs(dst) 35 | 36 | for item in os.listdir(src): 37 | s = os.path.join(src, item) 38 | d = os.path.join(dst, item) 39 | 40 | # ignore full directories upfront 41 | if any(Path(s).match(ignored) for ignored in ignore): 42 | continue 43 | 44 | if os.path.isdir(s): 45 | shutil.copytree(s, d, symlinks, ignore=ignore_globs(*ignore)) 46 | else: 47 | shutil.copy2(s, d) 48 | -------------------------------------------------------------------------------- /source/website/src/router.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | import Vue from "vue"; 7 | import VueRouter from "vue-router"; 8 | 9 | Vue.use(VueRouter); 10 | 11 | const platformPages = { 12 | snap: ["Step1", "Step2", "Step3", "Step4", "Step5"], 13 | tiktok: ["Step1", "Step2", "Step3", "Step4", "Step5"], 14 | }; 15 | const routerRoutes = [ 16 | { 17 | path: "/login", 18 | name: "Login", 19 | component: () => import("@/views/Login.vue"), 20 | meta: { requiresAuth: false }, 21 | alias: "/", 22 | }, 23 | ]; 24 | for (const platform in platformPages) { 25 | for (const page of platformPages[platform]) { 26 | const routeParams = { 27 | path: "/" + platform + page.toLowerCase(), 28 | name: platform + page, 29 | component: () => import("@/" + platform + "Views/" + page + ".vue"), 30 | meta: { requiresAuth: true }, 31 | }; 32 | routerRoutes.push(routeParams); 33 | } 34 | } 35 | 36 | const platform = "snap"; 37 | const router = new VueRouter({ 38 | mode: "history", 39 | base: process.env.BASE_URL, 40 | routes: routerRoutes, 41 | }); 42 | 43 | router.beforeResolve(async (to, from, next) => { 44 | if (to.matched.some((record) => record.meta.requiresAuth)) { 45 | try { 46 | await Vue.prototype.$Amplify.Auth.currentAuthenticatedUser(); 47 | next(); 48 | } catch (e) { 49 | console.log(e); 50 | next({ 51 | path: "/login", 52 | }); 53 | } 54 | } 55 | console.log(next); 56 | next(); 57 | }); 58 | 59 | export default router; 60 | -------------------------------------------------------------------------------- /source/infrastructure/setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | 5 | import json 6 | from pathlib import Path 7 | 8 | import setuptools 9 | 10 | readme_path = Path(__file__).resolve().parent.parent.parent / "README.md" 11 | with open(readme_path) as fp: 12 | long_description = fp.read() 13 | 14 | cdk_json_path = Path(__file__).resolve().parent / "cdk.json" 15 | cdk_json = json.loads(cdk_json_path.read_text()) 16 | VERSION = cdk_json["context"]["SOLUTION_VERSION"] 17 | 18 | 19 | setuptools.setup( 20 | name="infrastructure", 21 | version=VERSION, 22 | description="AWS CDK stack to deploy Audience Uploader from AWS Clean Rooms.", 23 | long_description=long_description, 24 | long_description_content_type="text/markdown", 25 | author="AWS Solutions Builders", 26 | packages=setuptools.find_packages(), 27 | install_requires=[ 28 | "aws-cdk-lib>=2.7.0", 29 | "pip>=21.3", 30 | ], 31 | python_requires=">=3.7", 32 | classifiers=[ 33 | "Development Status :: 4 - Beta", 34 | "Intended Audience :: Developers", 35 | "License :: OSI Approved :: Apache Software License", 36 | "Programming Language :: JavaScript", 37 | "Programming Language :: Python :: 3 :: Only", 38 | "Programming Language :: Python :: 3.7", 39 | "Programming Language :: Python :: 3.8", 40 | "Programming Language :: Python :: 3.9", 41 | "Topic :: Software Development :: Code Generators", 42 | "Topic :: Utilities", 43 | "Typing :: Typed", 44 | ], 45 | ) 46 | -------------------------------------------------------------------------------- /source/infrastructure/app.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | #!/usr/bin/env python3 5 | import os 6 | import aws_cdk as cdk 7 | from lib.uploader_stack import UploaderStack 8 | 9 | import logging 10 | from pathlib import Path 11 | 12 | from aws_cdk import CfnParameter, App 13 | from constructs import Construct 14 | 15 | from aws_solutions.cdk import CDKSolution 16 | from aws_solutions.cdk.stack import SolutionStack 17 | from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction 18 | from aspects.app_registry import AppRegistry 19 | 20 | # The solution helper build script expects this logger to be used 21 | logger = logging.getLogger("cdk-helper") 22 | 23 | # Initialize the CDKSolution helper - it will be used to build the templates in a solution-compatible manner 24 | solution = CDKSolution(cdk_json_path=Path(__file__).parent.absolute() / "cdk.json") 25 | 26 | 27 | @solution.context.requires("SOLUTION_NAME") 28 | @solution.context.requires("SOLUTION_ID") 29 | @solution.context.requires("SOLUTION_VERSION") 30 | @solution.context.requires("BUCKET_NAME") 31 | def build_app(context): 32 | app = App(context=context) 33 | stack = UploaderStack( 34 | app, 35 | "uploader", 36 | stack_name=app.node.try_get_context("STACK_NAME"), 37 | description="Audience Uploader from AWS Clean Rooms Solution CDK stack", 38 | template_filename="audience-uploader-from-aws-clean-rooms.template", 39 | synthesizer=solution.synthesizer, 40 | ) 41 | cdk.Aspects.of(app).add(AppRegistry(stack, "AppRegistryAspect")) 42 | return app.synth() 43 | 44 | 45 | if __name__ == "__main__": 46 | build_app() 47 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/cfn_nag.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from dataclasses import dataclass 5 | from typing import List 6 | 7 | import jsii 8 | from aws_cdk import CfnResource, IAspect 9 | from constructs import IConstruct 10 | 11 | 12 | @dataclass 13 | class CfnNagSuppression: 14 | rule_id: str 15 | reason: str 16 | 17 | 18 | def add_cfn_nag_suppressions( 19 | resource: CfnResource, suppressions: List[CfnNagSuppression] 20 | ): 21 | resource.add_metadata( 22 | "cfn_nag", 23 | { 24 | "rules_to_suppress": [ 25 | {"id": suppression.rule_id, "reason": suppression.reason} 26 | for suppression in suppressions 27 | ] 28 | }, 29 | ) 30 | 31 | 32 | @jsii.implements(IAspect) 33 | class CfnNagSuppressAll: 34 | """Suppress certain cfn_nag warnings that can be ignored by this solution""" 35 | 36 | def __init__(self, suppress: List[CfnNagSuppression], resource_type: str): 37 | self.suppressions = suppress 38 | self.resource_type = resource_type 39 | 40 | def visit(self, node: IConstruct): 41 | if "is_cfn_element" in dir(node) and node.is_cfn_element(node): 42 | if getattr(node, "cfn_resource_type", None) == self.resource_type: 43 | add_cfn_nag_suppressions(node, self.suppressions) 44 | 45 | elif "is_cfn_element" in dir(node.node.default_child) and ( 46 | getattr(node.node.default_child, "cfn_resource_type", None) 47 | == self.resource_type 48 | ): 49 | add_cfn_nag_suppressions(node.node.default_child, self.suppressions) 50 | -------------------------------------------------------------------------------- /source/website/src/components/Sidebar.vue: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | 53 | 54 | 66 | 67 | 72 | -------------------------------------------------------------------------------- /source/infrastructure/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "python3 app.py", 3 | "watch": { 4 | "include": [ 5 | "**" 6 | ], 7 | "exclude": [ 8 | "README.md", 9 | "cdk*.json", 10 | "requirements*.txt", 11 | "source.bat", 12 | "**/__init__.py", 13 | "python/__pycache__", 14 | "tests" 15 | ] 16 | }, 17 | "context": { 18 | "@aws-cdk/aws-apigateway:usagePlanKeyOrderInsensitiveId": true, 19 | "@aws-cdk/core:stackRelativeExports": true, 20 | "@aws-cdk/aws-rds:lowercaseDbIdentifier": true, 21 | "@aws-cdk/aws-lambda:recognizeVersionProps": true, 22 | "@aws-cdk/aws-lambda:recognizeLayerVersion": true, 23 | "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": true, 24 | "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, 25 | "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, 26 | "@aws-cdk/core:checkSecretUsage": true, 27 | "@aws-cdk/aws-iam:minimizePolicies": true, 28 | "@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true, 29 | "@aws-cdk/core:validateSnapshotRemovalPolicy": true, 30 | "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true, 31 | "@aws-cdk/aws-s3:createDefaultLoggingPolicy": true, 32 | "@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true, 33 | "@aws-cdk/aws-apigateway:disableCloudWatchRole": true, 34 | "@aws-cdk/core:enablePartitionLiterals": true, 35 | "@aws-cdk/core:target-partitions": [ 36 | "aws", 37 | "aws-cn" 38 | ], 39 | "SOLUTION_VERSION": "v1.0.0", 40 | "SOLUTION_NAME": "audience-uploader-from-aws-clean-rooms", 41 | "SOLUTION_ID": "SO0226", 42 | "APP_REGISTRY_NAME": "audience-uploader-from-aws-clean-rooms", 43 | "APPLICATION_TYPE": "AWS-Solutions", 44 | "VERSION": "v1.0.0" 45 | } 46 | } -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/mappings.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_cdk import CfnMapping 5 | from constructs import Construct 6 | 7 | 8 | class Mappings: 9 | def __init__( 10 | self, 11 | parent: Construct, 12 | solution_id: str, 13 | send_anonymous_usage_data: bool = True, 14 | quicksight_template_arn: bool = False, 15 | ): 16 | self.parent = parent 17 | 18 | # Track the solution mapping (ID, version, anonymous usage data) 19 | self.solution_mapping = CfnMapping( 20 | parent, 21 | "Solution", 22 | mapping={ 23 | "Data": { 24 | "ID": solution_id, 25 | "Version": "%%SOLUTION_VERSION%%", 26 | "SendAnonymousUsageData": "Yes" 27 | if send_anonymous_usage_data 28 | else "No", 29 | "SolutionName": "%%SOLUTION_NAME%%", 30 | "AppRegistryName": "%%APP_REGISTRY_NAME%%", 31 | "ApplicationType": "AWS-Solutions", 32 | } 33 | }, 34 | ) 35 | 36 | # track the s3 bucket, key prefix and (optional) quicksight template source 37 | general = { 38 | "S3Bucket": "%%BUCKET_NAME%%", 39 | "KeyPrefix": "%%SOLUTION_NAME%%/%%SOLUTION_VERSION%%", 40 | } 41 | if quicksight_template_arn: 42 | general["QuickSightSourceTemplateArn"] = "%%QUICKSIGHT_SOURCE%%" 43 | 44 | self.source_mapping = CfnMapping( 45 | parent, 46 | "SourceCode", 47 | mapping={"General": general}, 48 | lazy=False, 49 | ) 50 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_common/aws_solutions/core/logging.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | import os 6 | 7 | DEFAULT_LEVEL = "WARNING" 8 | 9 | 10 | def get_level(): 11 | """ 12 | Get the logging level from the LOG_LEVEL environment variable if it is valid. Otherwise set to WARNING 13 | :return: The logging level to use 14 | """ 15 | valid_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] 16 | requested_level = os.environ.get("LOG_LEVEL", DEFAULT_LEVEL) 17 | 18 | if requested_level and requested_level in valid_levels: 19 | return requested_level 20 | 21 | return DEFAULT_LEVEL 22 | 23 | 24 | def get_logger(name): 25 | """ 26 | Get a configured logger. Compatible with both the AWS Lambda runtime (root logger) and local execution 27 | :param name: The name of the logger (most often __name__ of the calling module) 28 | :return: The logger to use 29 | """ 30 | logger = None 31 | 32 | # first case: running as a lambda function or in pytest with conftest 33 | # second case: running a single test or locally under test 34 | if len(logging.getLogger().handlers) > 0: 35 | logger = logging.getLogger() 36 | logger.setLevel(get_level()) 37 | 38 | # overrides 39 | logging.getLogger("boto3").setLevel(logging.WARNING) 40 | logging.getLogger("botocore").setLevel(logging.WARNING) 41 | logging.getLogger("urllib3").setLevel(logging.WARNING) 42 | else: 43 | # fmt: off 44 | logging.basicConfig(level=get_level()) # NOSONAR - log level is user-specified; logs to stdout for AWS Lambda 45 | # fmt: on 46 | logger = logging.getLogger(name) 47 | 48 | return logger 49 | -------------------------------------------------------------------------------- /source/tests/infrastructure/test_app.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | 5 | import json 6 | import os 7 | from pathlib import Path 8 | from unittest import mock 9 | 10 | import pytest 11 | 12 | from infrastructure.app import build_app 13 | from infrastructure.app import solution as cdk_solution 14 | 15 | 16 | @pytest.fixture 17 | def cdk_json(): 18 | path = Path(__file__).parents[2] / "infrastructure" / "cdk.json" 19 | return json.loads(path.read_text()) 20 | 21 | 22 | @mock.patch.dict(os.environ, {"BUCKET_NAME": "FAKEBUCKET"}) 23 | def test_deploy(cdk_json): 24 | """Ensures the template generates as expected and contains the correct metadata and mappings""" 25 | bucket_name = os.environ["BUCKET_NAME"] 26 | cdk_solution.reset() 27 | 28 | synth = build_app({"BUCKET_NAME": bucket_name}) 29 | stack = synth.get_stack_by_name("uploader") 30 | 31 | assert stack.template["Metadata"]["aws:solutions:templatename"] == "audience-uploader-from-aws-clean-rooms.template" 32 | assert stack.template["Metadata"]["aws:solutions:solution_id"] == cdk_json["context"]["SOLUTION_ID"] 33 | assert stack.template["Metadata"]["aws:solutions:solution_version"] == cdk_json["context"]["SOLUTION_VERSION"] 34 | 35 | assert stack.template["Mappings"]["Solution"]["Data"]["ID"] == cdk_json["context"]["SOLUTION_ID"] 36 | assert stack.template["Mappings"]["Solution"]["Data"]["Version"] == cdk_json["context"]["SOLUTION_VERSION"] 37 | 38 | assert stack.template["Mappings"]["SourceCode"]["General"]["S3Bucket"] == bucket_name 39 | assert ( 40 | stack.template["Mappings"]["SourceCode"]["General"]["KeyPrefix"] 41 | == f"{cdk_json['context']['SOLUTION_NAME']}/{cdk_json['context']['SOLUTION_VERSION']}" 42 | ) 43 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/environment.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from dataclasses import dataclass, field 5 | 6 | from aws_cdk import Aws 7 | from aws_cdk.aws_lambda import IFunction 8 | 9 | from aws_solutions.cdk.aws_lambda.environment_variable import EnvironmentVariable 10 | 11 | 12 | @dataclass 13 | class Environment: 14 | """ 15 | Tracks environment variables common to AWS Lambda functions deployed by this solution 16 | """ 17 | 18 | scope: IFunction 19 | solution_name: EnvironmentVariable = field(init=False, repr=False) 20 | solution_id: EnvironmentVariable = field(init=False, repr=False) 21 | solution_version: EnvironmentVariable = field(init=False, repr=False) 22 | log_level: EnvironmentVariable = field(init=False, repr=False) 23 | powertools_service_name: EnvironmentVariable = field(init=False, repr=False) 24 | 25 | def __post_init__(self): 26 | cloudwatch_namespace_id = f"audience_uploader-from-aws-clean-rooms_solution_{Aws.STACK_NAME}" 27 | cloudwatch_service_id_default = f"Workflow" 28 | 29 | self.solution_name = EnvironmentVariable(self.scope, "SOLUTION_NAME") 30 | self.solution_id = EnvironmentVariable(self.scope, "SOLUTION_ID") 31 | self.solution_version = EnvironmentVariable(self.scope, "SOLUTION_VERSION") 32 | self.log_level = EnvironmentVariable(self.scope, "LOG_LEVEL", "INFO") 33 | self.powertools_service_name = EnvironmentVariable( 34 | self.scope, "POWERTOOLS_SERVICE_NAME", cloudwatch_service_id_default 35 | ) 36 | self.powertools_metrics_namespace = EnvironmentVariable( 37 | self.scope, "POWERTOOLS_METRICS_NAMESPACE", cloudwatch_namespace_id 38 | ) 39 | -------------------------------------------------------------------------------- /source/website/src/components/Header.vue: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | 7 | 25 | 26 | 66 | 67 | 72 | -------------------------------------------------------------------------------- /source/tests/api/chalicelib/test_snap_api.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import os 5 | import json 6 | from chalice.test import Client 7 | import pytest 8 | os.environ['AMC_ENDPOINT_URL'] = "Test" 9 | os.environ['AMC_API_ROLE_ARN'] = "Test" 10 | os.environ['VERSION'] = "v1.0.0" 11 | os.environ['AMC_GLUE_JOB_NAME'] = "Test" 12 | os.environ['AWS_REGION'] = "us-east-1" 13 | import app 14 | 15 | @pytest.mark.filterwarnings("ignore:IAMAuthorizer") 16 | def test_start_snap_transformation(mocker): 17 | session_client_mocker = mocker.MagicMock() 18 | session_client_mocker.client.return_value = session_client_mocker 19 | expected_return = {"JobRunId": "test_id"} 20 | session_client_mocker.start_job_run.return_value = expected_return 21 | mocker.patch("app.boto3.session.Session", return_value=session_client_mocker) 22 | 23 | with Client(app.app) as client: 24 | response = client.http.post('/start_snap_transformation?', 25 | headers={'Content-Type': 'application/json'}, 26 | body=json.dumps({"sourceBucket": "1", "sourceKey": "2", "outputBucket": "3", "piiFields": "4", "segmentName": "5"})) 27 | assert response.json_body == expected_return 28 | 29 | expected_return_2 = {"JobRunId": "test_id_2", "SomeOtherImportantData": "test_important_data"} 30 | session_client_mocker.start_job_run.return_value = expected_return_2 31 | 32 | with Client(app.app) as client: 33 | response = client.http.post('/start_snap_transformation?', 34 | headers={'Content-Type': 'application/json'}, 35 | body=json.dumps({"sourceBucket": "1", "sourceKey": "2", "outputBucket": "3", "piiFields": "4", "segmentName": "5"})) 36 | assert response.json_body == {"JobRunId": "test_id_2"} # Assert that only the JobRunId is returned 37 | -------------------------------------------------------------------------------- /source/tests/api/chalicelib/test_tiktok_api.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import os 5 | import json 6 | from chalice.test import Client 7 | import pytest 8 | os.environ['AMC_ENDPOINT_URL'] = "Test" 9 | os.environ['AMC_API_ROLE_ARN'] = "Test" 10 | os.environ['VERSION'] = "v1.0.0" 11 | os.environ['AMC_GLUE_JOB_NAME'] = "Test" 12 | os.environ['AWS_REGION'] = "us-east-1" 13 | import app 14 | 15 | @pytest.mark.filterwarnings("ignore:IAMAuthorizer") 16 | def test_start_tiktok_transformation(mocker): 17 | session_client_mocker = mocker.MagicMock() 18 | session_client_mocker.client.return_value = session_client_mocker 19 | expected_return = {"JobRunId": "test_id"} 20 | session_client_mocker.start_job_run.return_value = expected_return 21 | mocker.patch("app.boto3.session.Session", return_value=session_client_mocker) 22 | 23 | with Client(app.app) as client: 24 | response = client.http.post('/start_tiktok_transformation?', 25 | headers={'Content-Type': 'application/json'}, 26 | body=json.dumps({"sourceBucket": "1", "sourceKey": "2", "outputBucket": "3", "piiFields": "4", "segmentName": "5"})) 27 | assert response.json_body == expected_return 28 | 29 | expected_return_2 = {"JobRunId": "test_id_2", "SomeOtherImportantData": "test_important_data"} 30 | session_client_mocker.start_job_run.return_value = expected_return_2 31 | 32 | with Client(app.app) as client: 33 | response = client.http.post('/start_tiktok_transformation?', 34 | headers={'Content-Type': 'application/json'}, 35 | body=json.dumps({"sourceBucket": "1", "sourceKey": "2", "outputBucket": "3", "piiFields": "4", "segmentName": "5"})) 36 | assert response.json_body == {"JobRunId": "test_id_2"} # Assert that only the JobRunId is returned 37 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_common/setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import re 5 | from pathlib import Path 6 | 7 | import setuptools 8 | 9 | VERSION_RE = re.compile(r"\#\# \[(?P.*)\]", re.MULTILINE) # NOSONAR 10 | 11 | 12 | def get_version(): 13 | """ 14 | Detect the solution version from the changelog. Latest version on top. 15 | """ 16 | changelog = open(Path(__file__).resolve().parent.parent / "CHANGELOG.md").read() 17 | versions = VERSION_RE.findall(changelog) 18 | if not len(versions): 19 | raise ValueError("use the standard semver format in your CHANGELOG.md") 20 | build_version = versions[0] 21 | print(f"Build Version: {build_version}") 22 | return build_version 23 | 24 | 25 | setuptools.setup( 26 | name="aws-solutions-python", 27 | version=get_version(), 28 | description="Tools to make AWS Solutions deployments with CDK + Python more manageable", 29 | long_description=open("../README.md").read(), 30 | author="Amazon Web Services", 31 | url="https://aws.amazon.com/solutions/implementations", 32 | license="Apache License 2.0", 33 | packages=setuptools.find_namespace_packages(exclude=("build",)), 34 | install_requires=[ 35 | "boto3>=1.17.52", 36 | "pip>=21.3", 37 | ], 38 | python_requires=">=3.7", 39 | classifiers=[ 40 | "Development Status :: 4 - Beta", 41 | "Intended Audience :: Developers", 42 | "License :: OSI Approved :: Apache Software License", 43 | "Programming Language :: JavaScript", 44 | "Programming Language :: Python :: 3 :: Only", 45 | "Programming Language :: Python :: 3.7", 46 | "Programming Language :: Python :: 3.8", 47 | "Programming Language :: Python :: 3.9", 48 | "Topic :: Software Development :: Code Generators", 49 | "Topic :: Utilities", 50 | "Typing :: Typed", 51 | ], 52 | zip_safe=False, 53 | ) 54 | -------------------------------------------------------------------------------- /sonar-project.properties: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | # Note: Currently testing and supported with code coverage sonarqube 5 | # collection for python lambda (python pytest, python unittest) and javascript jest 6 | # and CDK TypeScript 7 | 8 | # Refer to https://docs.sonarqube.org/latest/project-administration/narrowing-the-focus/ 9 | # for details on sources and exclusions. Note also .gitignore 10 | # TODO: customize sonar.tests if needed. Currently source/tests and source are not mutually exclusive 11 | sonar.sources= source, deployment 12 | 13 | # Focusing sonarqube analysis on non test code first and reducing noise from analysis of test code. Projects 14 | # can extend analysis to test code at a later stage. 15 | # - The deployment/*-assets/** directory for this solution includes glue source code which is already scanned 16 | # as part of the source scan. Therefore, excluding them from rescan under deployment (avoids false calculation 17 | # for duplicate, unit test coverage) 18 | sonar.exclusions= \ 19 | **/test/**, \ 20 | **/tests/**, \ 21 | deployment/*.yaml, \ 22 | deployment/**/*.yaml, \ 23 | deployment/*-assets/**, \ 24 | **/*.html, \ 25 | source/infrastructure/cdk.out/*, \ 26 | source/cdk_solution_helper_py/**, \ 27 | source/website/**, \ 28 | source/api/**, \ 29 | source/glue/**, \ 30 | source/snap_setup/**, \ 31 | source/helper/**, \ 32 | **/setup.py 33 | 34 | sonar.sourceEncoding=UTF-8 35 | 36 | ## Python Specific Properties* 37 | # coverage 38 | # https://docs.sonarqube.org/pages/viewpage.action?pageId=4784149 39 | # Comma-separated list of ant pattern describing paths to coverage reports, relative to projects 40 | # root. Leave unset to use the default ("coverage-reports/*coverage-*.xml"). 41 | sonar.python.coverage.reportPaths=source/tests/coverage-reports/*.coverage.xml 42 | 43 | # Uncomment to enable debugging by default 44 | # sonar.verbose=true 45 | # sonar.log.level=DEBUG 46 | 47 | # Disable if needed 48 | # sonar.scm.disabled=true 49 | -------------------------------------------------------------------------------- /source/infrastructure/lib/eventbridge_mappings.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_cdk import ( 5 | aws_events as events, 6 | aws_sqs as sqs, 7 | Stack, 8 | Duration, 9 | aws_kms as kms 10 | ) 11 | from aws_solutions_constructs.aws_eventbridge_sqs import EventbridgeToSqs 12 | from constructs import Construct 13 | 14 | 15 | class EventbridgeToSQS(Construct): 16 | """Link the Glue Job Event Bridge notifications to the SQS queues to kick off activators""" 17 | 18 | def __init__(self, scope: Construct): 19 | super().__init__(scope, "EventBrSqs") 20 | 21 | key = kms.Key(self, "Key", enable_key_rotation=True) 22 | queue = sqs.Queue( 23 | self, 24 | "activator_connector", 25 | encryption=sqs.QueueEncryption.KMS, 26 | encryption_master_key=key, 27 | data_key_reuse=Duration.days(1), 28 | visibility_timeout=Duration.minutes(90), 29 | ) 30 | 31 | # create the bus object to watch for the Glue Job notifications 32 | construct_stack = EventbridgeToSqs( 33 | self, 34 | "EventBrToSqs", 35 | event_rule_props=events.RuleProps( 36 | event_pattern=events.EventPattern( 37 | source=["aws.s3"], 38 | detail_type=["Object Created"], 39 | account=[Stack.of(self).account], 40 | region=[Stack.of(self).region], 41 | detail={"bucket": {"name": [{"prefix": "uploader-etl-artifacts"}]}, 42 | "object": {"key": [{"prefix": "output/"}]} 43 | } 44 | ) 45 | ), 46 | deploy_dead_letter_queue=True, 47 | existing_queue_obj=queue, 48 | dead_letter_queue_props=sqs.QueueProps( 49 | encryption=sqs.QueueEncryption.KMS, 50 | encryption_master_key=key 51 | ), 52 | ) 53 | 54 | # return the new stack with the new Eventbridge and new SQS 55 | self.construct_stack = construct_stack -------------------------------------------------------------------------------- /source/website/src/main.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | import Vue from "vue"; 7 | import VueHighlightJS from "vue-highlightjs"; 8 | import BootstrapVue from "bootstrap-vue"; 9 | 10 | import "bootstrap/dist/css/bootstrap.css"; 11 | import "bootstrap-vue/dist/bootstrap-vue.css"; 12 | import "dropzone/dist/min/dropzone.min.css"; 13 | import "highlight.js/styles/github.css"; 14 | 15 | import App from "./App.vue"; 16 | import store from "./store"; 17 | import router from "./router.js"; 18 | import Amplify, * as AmplifyModules from "aws-amplify"; 19 | import { AmplifyPlugin } from "aws-amplify-vue"; 20 | 21 | const getRuntimeConfig = async () => { 22 | const runtimeConfig = await fetch("/runtimeConfig.json"); 23 | return runtimeConfig.json(); 24 | }; 25 | 26 | getRuntimeConfig().then(function (json) { 27 | const awsconfig = { 28 | Auth: { 29 | region: json.AWS_REGION, 30 | userPoolId: json.USER_POOL_ID, 31 | userPoolWebClientId: json.USER_POOL_CLIENT_ID, 32 | identityPoolId: json.IDENTITY_POOL_ID, 33 | }, 34 | Storage: { 35 | AWSS3: { 36 | region: json.AWS_REGION, 37 | }, 38 | }, 39 | API: { 40 | endpoints: [ 41 | { 42 | name: "audience-uploader-from-aws-clean-rooms", 43 | endpoint: json.API_ENDPOINT, 44 | service: "execute-api", 45 | region: json.AWS_REGION, 46 | }, 47 | ], 48 | }, 49 | }; 50 | console.log("Runtime config: " + JSON.stringify(json)); 51 | Amplify.configure(awsconfig); 52 | Vue.config.productionTip = false; 53 | Vue.mixin({ 54 | data() { 55 | return { 56 | // Distribute runtime configs into every Vue component 57 | AWS_REGION: json.AWS_REGION, 58 | DATA_BUCKET_NAME: json.DATA_BUCKET_NAME, 59 | ARTIFACT_BUCKET_NAME: json.ARTIFACT_BUCKET_NAME, 60 | TARGET_PLATFORM: json.TARGET_PLATFORM, 61 | }; 62 | }, 63 | }); 64 | Vue.use(AmplifyPlugin, AmplifyModules); 65 | Vue.use(BootstrapVue); 66 | Vue.use(VueHighlightJS); 67 | new Vue({ 68 | router, 69 | store, 70 | render: (h) => h(App), 71 | }).$mount("#app"); 72 | }); 73 | -------------------------------------------------------------------------------- /source/website/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "audience-uploader-from-aws-clean-rooms", 3 | "version": "1.0.0", 4 | "private": true, 5 | "scripts": { 6 | "serve": "vue-cli-service serve", 7 | "build": "vue-cli-service build", 8 | "lint": "vue-cli-service lint --no-fix", 9 | "deploy": "aws s3 sync --acl public-read --profile mie --delete dist/ s3://[your_website_bucket]" 10 | }, 11 | "dependencies": { 12 | "aws-amplify": "^4.3.36", 13 | "aws-amplify-vue": "2.1.5", 14 | "axios": ">=0.21.4", 15 | "core-js": "^3.14.0", 16 | "dropzone": "^5.7.0", 17 | "jquery": "^3.4.1", 18 | "jwt-decode": "^3.1.2", 19 | "lodash": "^4.17.21", 20 | "register-service-worker": "^1.7.2", 21 | "vue": "^2.6.12", 22 | "vue-highlightjs": "^1.3.3", 23 | "vue-router": "^3.5.1", 24 | "vuex": "^3.6.2", 25 | "vuex-persistedstate": "^v4.0.0-beta.3" 26 | }, 27 | "devDependencies": { 28 | "@vue/cli-plugin-babel": "^4.5.11", 29 | "@vue/cli-plugin-eslint": "^4.5.11", 30 | "@vue/cli-plugin-pwa": "^4.5.11", 31 | "@vue/cli-service": "^3.12.1", 32 | "babel-eslint": "^10.0.3", 33 | "bootstrap": "^4.6.0", 34 | "bootstrap-vue": "^2.21.2", 35 | "eslint": "^6.8.0", 36 | "eslint-plugin-vue": "^7.5.0", 37 | "vue-template-compiler": "^2.6.12", 38 | "webpack-subresource-integrity": "^1.5.2" 39 | }, 40 | "eslintConfig": { 41 | "root": false, 42 | "env": { 43 | "node": false 44 | }, 45 | "extends": [ 46 | "plugin:vue/recommended", 47 | "plugin:vue/essential", 48 | "eslint:recommended" 49 | ], 50 | "rules": { 51 | "no-console": "off", 52 | "no-undef": "off", 53 | "vue/require-prop-types": "off", 54 | "vue/attribute-hyphenation": "off", 55 | "vue/valid-v-for": "off", 56 | "vue/max-attributes-per-line": "off", 57 | "vue/html-self-closing": "off" 58 | }, 59 | "parserOptions": { 60 | "parser": "babel-eslint" 61 | } 62 | }, 63 | "eslintIgnore": [ 64 | "src/dist/*.js", 65 | "src/dist/min/*.js" 66 | ], 67 | "postcss": { 68 | "plugins": { 69 | "autoprefixer": {} 70 | } 71 | }, 72 | "browserslist": [ 73 | "> 1%", 74 | "last 2 versions" 75 | ], 76 | "description": "This solution uploads audience data from AWS clean rooms", 77 | "keywords": [], 78 | "author": "Amazon Web Services (https://aws.amazon.com/solutions)", 79 | "license": "Apache-2.0" 80 | } -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import re 5 | from pathlib import Path 6 | 7 | import setuptools 8 | 9 | VERSION_RE = re.compile(r"\#\# \[(?P.*)\]", re.MULTILINE) # NOSONAR 10 | 11 | 12 | def get_version(): 13 | """ 14 | Detect the solution version from the changelog. Latest version on top. 15 | """ 16 | changelog = open(Path(__file__).resolve().parent.parent / "CHANGELOG.md").read() 17 | versions = VERSION_RE.findall(changelog) 18 | if not len(versions): 19 | raise ValueError("use the standard semver format in your CHANGELOG.md") 20 | build_version = versions[0] 21 | print(f"Build Version: {build_version}") 22 | return build_version 23 | 24 | 25 | setuptools.setup( 26 | name="aws-solutions-cdk", 27 | version=get_version(), 28 | description="Tools to make AWS Solutions deployments with CDK + Python more manageable", 29 | long_description=open("../README.md").read(), 30 | author="Amazon Web Services", 31 | url="https://aws.amazon.com/solutions/implementations", 32 | license="Apache License 2.0", 33 | packages=setuptools.find_namespace_packages(exclude=("build",)), 34 | package_data={ 35 | "": [ 36 | "requirements.txt", 37 | "Dockerfile", 38 | "__aws_solutions_bundling_version__", 39 | ] 40 | }, 41 | install_requires=[ 42 | "pip>=21.3", 43 | "aws_cdk_lib>=2.7.0", 44 | "Click>=7.1.2", 45 | "boto3>=1.17.52", 46 | "requests>=2.28.1", 47 | "crhelper>=2.0.6", 48 | ], 49 | entry_points=""" 50 | [console_scripts] 51 | build-s3-cdk-dist=aws_solutions.cdk.scripts.build_s3_cdk_dist:cli 52 | """, 53 | python_requires=">=3.7", 54 | classifiers=[ 55 | "Development Status :: 4 - Beta", 56 | "Intended Audience :: Developers", 57 | "License :: OSI Approved :: Apache Software License", 58 | "Programming Language :: JavaScript", 59 | "Programming Language :: Python :: 3 :: Only", 60 | "Programming Language :: Python :: 3.7", 61 | "Programming Language :: Python :: 3.8", 62 | "Programming Language :: Python :: 3.9", 63 | "Topic :: Software Development :: Code Generators", 64 | "Topic :: Utilities", 65 | "Typing :: Typed", 66 | ], 67 | zip_safe=False, 68 | ) 69 | -------------------------------------------------------------------------------- /source/api/chalicelib/snap_api.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from chalice import Blueprint, IAMAuthorizer 5 | import boto3 6 | import os 7 | import logging 8 | 9 | logger = logging.getLogger() 10 | logger.setLevel(logging.INFO) 11 | 12 | authorizer = IAMAuthorizer() 13 | 14 | snap_routes = Blueprint(__name__) 15 | 16 | # Environment variables 17 | AMC_GLUE_JOB_NAME = os.environ['AMC_GLUE_JOB_NAME'] 18 | 19 | 20 | @snap_routes.route('/start_snap_transformation', cors=True, methods=['POST'], authorizer=authorizer) 21 | def start_snap_transformation(): 22 | """ 23 | Invoke Glue job to prepare data for uploading into Snap. 24 | 25 | """ 26 | try: 27 | log_request_parameters() 28 | source_bucket = snap_routes.current_request.json_body['sourceBucket'] 29 | source_key = snap_routes.current_request.json_body['sourceKey'] 30 | output_bucket = snap_routes.current_request.json_body['outputBucket'] 31 | pii_fields = snap_routes.current_request.json_body['piiFields'] 32 | segment_name = snap_routes.current_request.json_body['segmentName'] 33 | 34 | session = boto3.session.Session(region_name=os.environ['AWS_REGION']) 35 | client = session.client('glue') 36 | 37 | args = { 38 | "--source_bucket": source_bucket, 39 | "--output_bucket": output_bucket, 40 | "--source_key": source_key, 41 | "--pii_fields": pii_fields, 42 | "--segment_name": segment_name, 43 | } 44 | response = client.start_job_run(JobName=AMC_GLUE_JOB_NAME, Arguments=args) 45 | return {'JobRunId': response['JobRunId']} 46 | except Exception as e: 47 | logger.error("Something went wrong while starting Snap transformation - ERROR: {}".format(e)) 48 | raise Exception("Something went wrong while starting Snap transformation") 49 | 50 | def log_request_parameters(): 51 | logger.info("Processing the following request:\n") 52 | logger.info("resource path: " + snap_routes.current_request.context['resourcePath']) 53 | logger.info("method: " + snap_routes.current_request.method) 54 | logger.info("uri parameters: " + str(snap_routes.current_request.uri_params)) 55 | logger.info("query parameters: " + str(snap_routes.current_request.query_params)) 56 | logger.info("request ID: " + snap_routes.current_request.context.get('requestId', "")) 57 | logger.info('request body: ' + snap_routes.current_request.raw_body.decode()) 58 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/src/custom_resources/name.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | from os import getenv 6 | from uuid import uuid4 as uuid 7 | 8 | from crhelper import CfnResource 9 | 10 | logger = logging.getLogger(__name__) 11 | helper = CfnResource(log_level=getenv("LOG_LEVEL", "WARNING")) 12 | 13 | 14 | def get_property(event, property_name, property_default=None): 15 | resource_prop = event.get("ResourceProperties", {}).get( 16 | property_name, property_default 17 | ) 18 | if not resource_prop: 19 | raise ValueError(f"missing required property {property_name}") 20 | return resource_prop 21 | 22 | 23 | @helper.create 24 | def generate_name(event, _): 25 | """ 26 | Generate a resource name containing the stack name and the resource purpose. This is useful 27 | when you need to associate policies that refer to a resource by name (and thus need 28 | a predictable resource name). This is commonly used when associating policies with buckets 29 | or other resources that might introduce a circular resource dependency 30 | 31 | :param event: The CloudFormation custom resource event 32 | :return: None 33 | """ 34 | resource_id = get_property(event, "Id", uuid().hex[0:12]) 35 | stack_name = get_property(event, "StackName") 36 | purpose = get_property(event, "Purpose") 37 | max_length = int(get_property(event, "MaxLength")) 38 | 39 | name = f"{stack_name}-{purpose}-{resource_id}".lower() 40 | if len(name) > max_length: 41 | logger.warning("cannot use stack name in bucket name - trying default") 42 | name = f"{purpose}-{resource_id}".lower() 43 | if len(name) > max_length: 44 | raise ValueError( 45 | f"the derived resource name {name} is too long ({len(name)} / {max_length}) - please use a shorter purpose or stack name" 46 | ) 47 | 48 | logger.info(f"the derived resource name is {name}") 49 | helper.Data["Name"] = name 50 | helper.Data["Id"] = resource_id 51 | 52 | 53 | @helper.update 54 | @helper.delete 55 | def no_op(_, __): 56 | pass # pragma: no cover 57 | 58 | 59 | def handler(event, _): 60 | """ 61 | Handler entrypoint - see generate_name for implementation details 62 | :param event: The CloudFormation custom resource event 63 | :return: PhysicalResourceId 64 | """ 65 | helper(event, _) # pragma: no cover 66 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_common/aws_solutions/core/config.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import os 5 | import re 6 | from typing import Dict 7 | 8 | import botocore.config 9 | 10 | from aws_solutions.core.logging import get_logger 11 | 12 | logger = get_logger(__name__) 13 | 14 | 15 | SOLUTION_ID_RE = re.compile(r"^SO(?P\d+)(?P[a-zA-Z]*)$") # NOSONAR 16 | SOLUTION_VERSION_RE = re.compile( 17 | r"^v(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$" # NOSONAR 18 | ) 19 | 20 | 21 | class SolutionConfigEnv: 22 | def __init__(self, env_var, default: str = "", regex: re.Pattern = None): 23 | self._env_var = env_var 24 | self._regex = regex 25 | self._value = default 26 | 27 | def _get_value_or_default(self) -> str: 28 | if self._value: 29 | return self._value 30 | return os.environ.get(self._env_var) 31 | 32 | def __get__(self, instance, owner) -> str: 33 | value = str(self._get_value_or_default()) 34 | if self._regex and not self._regex.match(value): 35 | raise ValueError( 36 | f"`{value}` received, but environment variable {self._env_var} (or default) must be set and match the pattern {self._regex.pattern}" 37 | ) 38 | return value 39 | 40 | def __set__(self, instance, value) -> None: 41 | self._value = value 42 | 43 | 44 | class Config: 45 | """Stores information about the current solution""" 46 | 47 | id = SolutionConfigEnv("SOLUTION_ID", regex=SOLUTION_ID_RE) 48 | version = SolutionConfigEnv("SOLUTION_VERSION", regex=SOLUTION_VERSION_RE) 49 | _botocore_config = None 50 | 51 | @property 52 | def botocore_config(self) -> botocore.config.Config: 53 | if not self._botocore_config: 54 | self._botocore_config = botocore.config.Config( 55 | **self._botocore_config_defaults 56 | ) 57 | return self._botocore_config 58 | 59 | @botocore_config.setter 60 | def botocore_config(self, other_config: botocore.config.Config): 61 | self._botocore_config = self.botocore_config.merge(other_config) 62 | 63 | @property 64 | def _botocore_config_defaults(self) -> Dict: 65 | return {"user_agent_extra": f"AwsSolution/{self.id}/{self.version}"} 66 | -------------------------------------------------------------------------------- /source/api/chalicelib/tiktok_api.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from chalice import Blueprint, IAMAuthorizer 5 | import boto3 6 | import os 7 | import logging 8 | 9 | logger = logging.getLogger() 10 | logger.setLevel(logging.INFO) 11 | 12 | authorizer = IAMAuthorizer() 13 | 14 | tiktok_routes = Blueprint(__name__) 15 | 16 | # Environment variables 17 | AMC_GLUE_JOB_NAME = os.environ['AMC_GLUE_JOB_NAME'] 18 | 19 | 20 | @tiktok_routes.route('/start_tiktok_transformation', cors=True, methods=['POST'], authorizer=authorizer) 21 | def start_tiktok_transformation(): 22 | """ 23 | Invoke Glue job to prepare data for uploading into Tiktok. 24 | 25 | """ 26 | try: 27 | log_request_parameters() 28 | source_bucket = tiktok_routes.current_request.json_body['sourceBucket'] 29 | source_key = tiktok_routes.current_request.json_body['sourceKey'] 30 | output_bucket = tiktok_routes.current_request.json_body['outputBucket'] 31 | pii_fields = tiktok_routes.current_request.json_body['piiFields'] 32 | segment_name = tiktok_routes.current_request.json_body['segmentName'] 33 | 34 | session = boto3.session.Session(region_name=os.environ['AWS_REGION']) 35 | client = session.client('glue') 36 | 37 | args = { 38 | "--source_bucket": source_bucket, 39 | "--output_bucket": output_bucket, 40 | "--source_key": source_key, 41 | "--pii_fields": pii_fields, 42 | "--segment_name": segment_name, 43 | } 44 | response = client.start_job_run(JobName=AMC_GLUE_JOB_NAME, Arguments=args) 45 | return {'JobRunId': response['JobRunId']} 46 | except Exception as e: 47 | logger.error("Something went wrong while starting TikTok transformation - ERROR: {}".format(e)) 48 | raise Exception("Something went wrong while starting TikTok transformation") 49 | 50 | 51 | def log_request_parameters(): 52 | logger.info("Processing the following request:\n") 53 | logger.info("resource path: " + tiktok_routes.current_request.context['resourcePath']) 54 | logger.info("method: " + tiktok_routes.current_request.method) 55 | logger.info("uri parameters: " + str(tiktok_routes.current_request.uri_params)) 56 | logger.info("query parameters: " + str(tiktok_routes.current_request.query_params)) 57 | logger.info("request ID: " + tiktok_routes.current_request.context.get('requestId', "")) 58 | logger.info('request body: ' + tiktok_routes.current_request.raw_body.decode()) 59 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/tools/cleaner.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | import os 6 | import shutil 7 | from dataclasses import dataclass 8 | from pathlib import Path 9 | 10 | logger = logging.getLogger("cdk-helper") 11 | 12 | 13 | @dataclass 14 | class Cleanable: 15 | """Encapsulates something that can be cleaned by the cleaner""" 16 | 17 | name: str 18 | file_type: str 19 | pattern: str 20 | 21 | def __post_init__(self): 22 | if self.file_type not in ("d", "f"): 23 | raise ValueError("only directories and files are allowed ('d' or 'f')") 24 | 25 | def delete(self, source_dir): 26 | source_path = Path(source_dir) 27 | 28 | for path in source_path.rglob(self.pattern): 29 | if "aws_solutions" not in str( 30 | path.name 31 | ): # prevent the module from being unlinked in a dev environment 32 | if self.file_type == "d" and path.is_dir(): 33 | logger.info(f"deleting {self.name} directory {path}") 34 | shutil.rmtree(path, ignore_errors=True) 35 | if self.file_type == "f" and path.is_file(): 36 | logger.info(f"deleting {self.name} file {path}") 37 | try: 38 | path.unlink() 39 | except FileNotFoundError: 40 | pass 41 | 42 | 43 | class Cleaner: 44 | """Encapsulates functions that help clean up the build environment.""" 45 | 46 | TO_CLEAN = [ 47 | Cleanable("Python bytecode", "f", "*.py[cod]"), 48 | Cleanable("Python Coverage databases", "f", ".coverage"), 49 | Cleanable("CDK Cloud Assemblies", "d", "cdk.out"), 50 | Cleanable("Python egg", "d", "*.egg-info"), 51 | Cleanable("Python bytecode cache", "d", "__pycache__"), 52 | Cleanable("Python test cache", "d", ".pytest_cache"), 53 | ] 54 | 55 | @staticmethod 56 | def clean_dirs(*args): 57 | """Recursively remove each of its arguments, then recreate the directory""" 58 | for dir_to_remove in args: 59 | logger.info("cleaning %s" % dir_to_remove) 60 | shutil.rmtree(dir_to_remove, ignore_errors=True) 61 | os.makedirs(dir_to_remove) 62 | 63 | @staticmethod 64 | def cleanup_source(source_dir): 65 | """Cleans up all items found in TO_CLEAN""" 66 | for item in Cleaner.TO_CLEAN: 67 | item.delete(source_dir) 68 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/lambda_helpers.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import os 5 | import pytest 6 | import json 7 | import boto3 8 | import pandas as pd 9 | 10 | from moto import mock_secretsmanager 11 | from datetime import datetime, timedelta 12 | 13 | 14 | os.environ["REFRESH_SECRET_NAME"] = "Test" 15 | os.environ["CRED_SECRET_NAME"] = "Test" 16 | os.environ["SOLUTION_ID"] = "SO0226" 17 | os.environ["SOLUTION_VERSION"] = "v1.0.0" 18 | os.environ["SOLUTION_NAME"] = "audience-uploader-from-aws-clean-rooms" 19 | os.environ["AWS_REGION"] = "us-east-1" 20 | 21 | TEST_CREDENTIALS = {"ad_account_id": "test_account_id", "expires_at": "test", "access_token": "test_access_token", "client_id": "test", "client_secret": "test", "refresh_token": "test"} 22 | TEST_CREDENTIALS_2 = {"ad_account_id": "test_account_id_2"} 23 | TEST_SEGMENT_ID = "test_segment_id" 24 | TEST_SEGMENT_DATA = {"segments": [ 25 | {"segment": {"name": "segment1", "id": 1}}, 26 | {"segment": {"name": "segment2", "id": 2}}, 27 | {"segment": {"name": "segment3", "id": 3}} 28 | ]} 29 | 30 | FAKE_GZ_EVENT = {"Records": [{"body":"""{"detail": {"bucket": {"name": "test_bucket_name"}, "object": {"key": "test1/test2/test3/PHONE_SHA256/test4.gz"}}}"""}]} 31 | FAKE_CSV_EVENT = {"Records": [{"body":"""{"detail": {"bucket": {"name": "test_bucket_name"}, "object": {"key": "test1/test2/test3/PHONE_SHA256/test4.csv"}}}"""}]} 32 | 33 | SCHEMA_HASH_VALUES = pd.DataFrame(data = {"schema": ["EMAIL_SHA256", "EMAIL_SHA256"], "hash": ["test_hash", "test_hash_2"]}) 34 | 35 | RESPONSE_SUCCESS = {"result": "success"} 36 | 37 | SUCCESSFUL_UPLOAD_2 = {"result": "success", "users": [{"user": {"number_uploaded_users": 2}}]} 38 | 39 | EXPECTED_EXPIRY_OFFSET = 3 40 | 41 | @mock_secretsmanager 42 | @pytest.fixture 43 | def setup_secrets_client(): 44 | with mock_secretsmanager(): 45 | yield boto3.client("secretsmanager", region_name="us-east-1") 46 | 47 | @pytest.fixture 48 | def create_secrets(setup_secrets_client, create_times): 49 | expired, _ = create_times 50 | secret_name = "test_secret" 51 | 52 | secret_value = TEST_CREDENTIALS 53 | secret_value["expires_at"] = expired 54 | setup_secrets_client.create_secret(Name=secret_name, SecretString=json.dumps(secret_value)) 55 | yield setup_secrets_client, secret_name, secret_value 56 | 57 | @pytest.fixture 58 | def create_times(): 59 | now = datetime.now() 60 | expired = now - timedelta(hours=1) 61 | unexpired = now + timedelta(hours=1) 62 | yield expired.strftime("%Y-%m-%d %H:00:00"), unexpired.strftime("%Y-%m-%d %H:00:00") -------------------------------------------------------------------------------- /source/infrastructure/lib/base_uploader_stack.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from constructs import Construct 5 | from aws_cdk.aws_lambda_event_sources import SqsEventSource 6 | from aws_cdk import ( 7 | aws_sqs as sqs, 8 | CfnParameter, 9 | aws_kms as kms, 10 | Stack 11 | ) 12 | from aws_solutions.cdk.stack import NestedSolutionStack 13 | from lib.aws_lambda.layers.aws_solutions.layer import SolutionsLayer 14 | 15 | SOLUTION_ID = "SOLUTION_ID" 16 | SOLUTION_VERSION = "SOLUTION_VERSION" 17 | 18 | class BaseUploaderStack(NestedSolutionStack): 19 | def __init__(self, scope: Construct, construct_id: str, *args, **kwargs) -> None: 20 | super().__init__(scope, construct_id, *args, **kwargs) 21 | 22 | self.queue_arn_parameter = CfnParameter(self, "SQSArn") 23 | 24 | self.queue = sqs.Queue.from_queue_arn( 25 | self, 26 | id="SQS", 27 | queue_arn=self.queue_arn_parameter.value_as_string, 28 | ) 29 | 30 | stack = Stack.of(self) 31 | self.solution_id = stack.node.try_get_context(SOLUTION_ID) 32 | self.solution_version = stack.node.try_get_context(SOLUTION_VERSION) 33 | 34 | #Layers 35 | self.layer_solutions = SolutionsLayer.get_or_create(self) 36 | 37 | ############################################################################## 38 | # Lambda dest failure Queue 39 | ############################################################################## 40 | def add_lambda_dest_failure_queue(self): 41 | key = kms.Key(self, "Key", enable_key_rotation=True) 42 | self.lambda_dest_failure_queue = sqs.Queue( 43 | self, 44 | "connector_dest_failure_queue", 45 | encryption=sqs.QueueEncryption.KMS, 46 | encryption_master_key=key, 47 | ) 48 | 49 | ############################################################################## 50 | # Get Platform name 51 | ############################################################################## 52 | def get_platform_name(self): 53 | return self.TARGET_PLATFORM 54 | 55 | ############################################################################## 56 | # Add Lambda Event Source 57 | ############################################################################## 58 | 59 | def add_lambda_event_source(self, uploader_lambda, queue): 60 | event_source = SqsEventSource(queue, batch_size=1) 61 | queue.grant_consume_messages(uploader_lambda) 62 | uploader_lambda.add_event_source(event_source) 63 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/src/custom_resources/metrics.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | 5 | import logging 6 | import uuid 7 | from datetime import datetime 8 | from os import getenv 9 | 10 | import requests 11 | from crhelper import CfnResource 12 | 13 | logger = logging.getLogger(__name__) 14 | helper = CfnResource(log_level=getenv("LOG_LEVEL", "WARNING")) 15 | METRICS_ENDPOINT = "https://metrics.awssolutionsbuilder.com/generic" 16 | 17 | 18 | def _sanitize_data(event): 19 | resource_properties = event["ResourceProperties"] 20 | # Remove ServiceToken (lambda arn) to avoid sending AccountId 21 | resource_properties.pop("ServiceToken", None) 22 | resource_properties.pop("Resource", None) 23 | 24 | # Solution ID and unique ID are sent separately 25 | resource_properties.pop("Solution", None) 26 | resource_properties.pop("UUID", None) 27 | 28 | # Add some useful fields related to stack change 29 | resource_properties["CFTemplate"] = ( 30 | event["RequestType"] + "d" 31 | ) # Created, Updated, or Deleted 32 | 33 | return resource_properties 34 | 35 | 36 | @helper.create 37 | @helper.update 38 | @helper.delete 39 | def send_metrics(event, _): 40 | resource_properties = event["ResourceProperties"] 41 | random_id = event.get("PhysicalResourceId", str(uuid.uuid4())) 42 | helper.Data["UUID"] = random_id 43 | 44 | try: 45 | headers = {"Content-Type": "application/json"} 46 | payload = { 47 | "Solution": resource_properties["Solution"], 48 | "UUID": random_id, 49 | "TimeStamp": datetime.utcnow().isoformat(), 50 | "Data": _sanitize_data(event), 51 | } 52 | 53 | logger.info(f"Sending payload: {payload}") 54 | response = requests.post(METRICS_ENDPOINT, json=payload, headers=headers) 55 | logger.info( 56 | f"Response from metrics endpoint: {response.status_code} {response.reason}" 57 | ) 58 | if "stackTrace" in response.text: 59 | logger.exception("Error submitting usage data: %s" % response.text) 60 | # raise when there is an HTTP error (non success code) 61 | response.raise_for_status() 62 | except requests.exceptions.RequestException as exc: 63 | logger.exception(f"Could not send usage data: {exc}") 64 | except Exception as exc: 65 | logger.exception(f"Unknown error when trying to send usage data: {exc}") 66 | 67 | return random_id 68 | 69 | 70 | def handler(event, context): 71 | helper(event, context) # pragma: no cover 72 | -------------------------------------------------------------------------------- /THIRD_PARTY_LICENSES.txt: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Redistribution and use in source and binary forms, with or without 4 | modification, are permitted provided that the following conditions are met: 5 | 6 | 1. Redistributions of source code must retain the above copyright notice, this 7 | list of conditions and the following disclaimer. 8 | 9 | 2. Redistributions in binary form must reproduce the above copyright notice, 10 | this list of conditions and the following disclaimer in the documentation 11 | and/or other materials provided with the distribution. 12 | 13 | 3. Neither the name of the copyright holder nor the names of its 14 | contributors may be used to endorse or promote products derived from 15 | this software without specific prior written permission. 16 | 17 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 18 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 19 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 20 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 21 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 22 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 23 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 24 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 25 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 | 28 | ------------------------------------------------- 29 | 30 | MIT License 31 | 32 | Permission is hereby granted, free of charge, to any person obtaining a copy 33 | of this software and associated documentation files (the "Software"), to deal 34 | in the Software without restriction, including without limitation the rights 35 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 36 | copies of the Software, and to permit persons to whom the Software is 37 | furnished to do so, subject to the following conditions: 38 | 39 | The above copyright notice and this permission notice shall be included in all 40 | copies or substantial portions of the Software. 41 | 42 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 43 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 44 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 45 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 46 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 47 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 48 | SOFTWARE. 49 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_hash/hash.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | from aws_cdk import ( 7 | CfnResource, 8 | Stack, 9 | ) 10 | from constructs import Construct 11 | 12 | from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction 13 | from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression 14 | 15 | 16 | class ResourceHash(Construct): 17 | """Used to create unique resource names based on the hash of the stack ID""" 18 | 19 | def __init__( 20 | self, 21 | scope: Construct, 22 | construct_id: str, 23 | purpose: str, 24 | max_length: int, 25 | ): 26 | super().__init__(scope, construct_id) 27 | 28 | uuid = "ResourceHashFunction-b8785f53-1531-4bfb-a119-26aa638d7b19" 29 | stack = Stack.of(self) 30 | self._resource_name_function = stack.node.try_find_child(uuid) 31 | 32 | if not self._resource_name_function: 33 | self._resource_name_function = SolutionsPythonFunction( 34 | stack, 35 | uuid, 36 | entrypoint=Path(__file__).parent 37 | / "src" 38 | / "custom_resources" 39 | / "hash.py", 40 | function="handler", 41 | ) 42 | add_cfn_nag_suppressions( 43 | resource=self._resource_name_function.node.default_child, 44 | suppressions=[ 45 | CfnNagSuppression( 46 | "W89", "This AWS Lambda Function is not deployed to a VPC" 47 | ), 48 | CfnNagSuppression( 49 | "W92", 50 | "This AWS Lambda Function does not require reserved concurrency", 51 | ), 52 | ], 53 | ) 54 | 55 | properties = { 56 | "ServiceToken": self._resource_name_function.function_arn, 57 | "Purpose": purpose, 58 | "MaxLength": max_length, 59 | } 60 | 61 | self.logical_name = f"{construct_id}HashResource" 62 | self.resource_name_resource = CfnResource( 63 | self, 64 | self.logical_name, 65 | type="Custom::ResourceHash", 66 | properties=properties, 67 | ) 68 | 69 | @property 70 | def resource_name(self): 71 | return self.resource_name_resource.get_att("Name") 72 | 73 | @property 74 | def resource_id(self): 75 | return self.resource_name_resource.get_att("Id") 76 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stepfunctions/solution_fragment.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import List, Dict 5 | from typing import Optional 6 | 7 | from aws_cdk import Duration 8 | from aws_cdk.aws_lambda import CfnFunction 9 | from aws_cdk.aws_stepfunctions import State, INextable, TaskInput, StateMachineFragment 10 | from aws_cdk.aws_stepfunctions_tasks import LambdaInvoke 11 | from constructs import Construct 12 | 13 | 14 | class SolutionFragment(StateMachineFragment): 15 | def __init__( 16 | self, # NOSONAR (python:S107) - allow large number of method parameters 17 | scope: Construct, 18 | id: str, 19 | function: CfnFunction, 20 | payload: Optional[TaskInput] = None, 21 | input_path: Optional[str] = "$", 22 | result_path: Optional[str] = "$", 23 | output_path: Optional[str] = "$", 24 | result_selector: Optional[Dict] = None, 25 | failure_state: Optional[State] = None, 26 | backoff_rate: Optional[int] = 1.05, 27 | interval: Optional[Duration] = Duration.seconds(5), 28 | max_attempts: Optional[int] = 5, 29 | ): 30 | super().__init__(scope, id) 31 | 32 | self.failure_state = failure_state 33 | 34 | self.task = LambdaInvoke( 35 | self, 36 | id, 37 | lambda_function=function, 38 | retry_on_service_exceptions=True, 39 | input_path=input_path, 40 | result_path=result_path, 41 | output_path=output_path, 42 | payload=payload, 43 | payload_response_only=True, 44 | result_selector=result_selector, 45 | ) 46 | self.task.add_retry( 47 | backoff_rate=backoff_rate, 48 | interval=interval, 49 | max_attempts=max_attempts, 50 | errors=["ResourcePending"], 51 | ) 52 | if self.failure_state: 53 | self.task.add_catch( 54 | failure_state, 55 | errors=["ResourceFailed", "ResourceInvalid"], 56 | result_path="$.statesError", 57 | ) 58 | self.task.add_catch( 59 | failure_state, errors=["States.ALL"], result_path="$.statesError" 60 | ) 61 | 62 | @property 63 | def start_state(self) -> State: 64 | return self.task 65 | 66 | @property 67 | def end_states(self) -> List[INextable]: 68 | """ 69 | Get the end states of this chain 70 | :return: The chainable end states of this chain (i.e. not the failure state) 71 | """ 72 | states = [self.task] 73 | return states 74 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/context.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | 5 | import json 6 | import logging 7 | from functools import wraps 8 | from os import environ 9 | from pathlib import Path 10 | from typing import Union 11 | 12 | ARGUMENT_ERROR = "functions decorated with `with_cdk_context` can only accept one dictionary argument - the additional context overrides to use" 13 | 14 | logger = logging.getLogger("cdk-helper") 15 | 16 | 17 | class SolutionContext: 18 | def __init__(self, cdk_json_path: Union[None, Path] = None): 19 | self.cdk_json_path = cdk_json_path 20 | self.context = self._load_cdk_context() 21 | 22 | def requires( # NOSONAR - higher cognitive complexity allowed 23 | self, context_var_name, context_var_value=None 24 | ): 25 | context = self.context 26 | 27 | def cdk_context_decorator(f): 28 | @wraps(f) 29 | def wrapper(*args): 30 | # validate function arguments 31 | if len(args) > 1: 32 | raise ValueError(ARGUMENT_ERROR) 33 | if len(args) == 1 and not isinstance(args[0], dict): 34 | raise TypeError(ARGUMENT_ERROR) 35 | 36 | if len(args) == 0: 37 | args = (context,) 38 | 39 | # override the CDK context as required 40 | if len(args) == 1: 41 | context.update(args[0]) 42 | 43 | env_context_var = environ.get(context_var_name) 44 | if env_context_var: 45 | context[context_var_name] = env_context_var 46 | elif context_var_name and context_var_value: 47 | context[context_var_name] = context_var_value 48 | 49 | if not context.get(context_var_name): 50 | raise ValueError( 51 | f"Missing cdk.json context variable or environment variable for {context_var_name}." 52 | ) 53 | 54 | args = (context,) 55 | 56 | return f(*args) 57 | 58 | return wrapper 59 | 60 | return cdk_context_decorator 61 | 62 | def _load_cdk_context(self): 63 | """Load context from cdk.json""" 64 | if not self.cdk_json_path: 65 | return {} 66 | 67 | try: 68 | with open(self.cdk_json_path, "r") as f: 69 | config = json.loads(f.read()) 70 | except FileNotFoundError: 71 | logger.warning(f"{self.cdk_json_path} not found, using empty context!") 72 | return {} 73 | context = config.get("context", {}) 74 | return context 75 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/python/layer.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | from typing import Union, List 6 | from uuid import uuid4 7 | 8 | from aws_cdk import BundlingOptions, DockerImage, AssetHashType 9 | from aws_cdk.aws_lambda import LayerVersion, Code 10 | from constructs import Construct 11 | 12 | from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonBundling 13 | 14 | DEPENDENCY_EXCLUDES = ["*.pyc"] 15 | 16 | 17 | class SolutionsPythonLayerVersion(LayerVersion): 18 | """Handle local packaging of layer versions""" 19 | 20 | def __init__( 21 | self, 22 | scope: Construct, 23 | construct_id: str, 24 | requirements_path: Path, 25 | libraries: Union[List[Path], None] = None, 26 | **kwargs, 27 | ): # NOSONAR 28 | self.scope = scope 29 | self.construct_id = construct_id 30 | self.requirements_path = requirements_path 31 | 32 | # validate requirements path 33 | if not self.requirements_path.is_dir(): 34 | raise ValueError( 35 | f"requirements_path {self.requirements_path} must not be a file, but rather a directory containing Python requirements in a requirements.txt file, pipenv format or poetry format" 36 | ) 37 | 38 | libraries = [] if not libraries else libraries 39 | for lib in libraries: 40 | if lib.is_file(): 41 | raise ValueError( 42 | f"library {lib} must not be a file, but rather a directory" 43 | ) 44 | 45 | bundling = SolutionsPythonBundling( 46 | self.requirements_path, libraries=libraries, install_path="python" 47 | ) 48 | 49 | kwargs["code"] = self._get_code(bundling) 50 | 51 | # initialize the LayerVersion 52 | super().__init__(scope, construct_id, **kwargs) 53 | 54 | def _get_code(self, bundling: SolutionsPythonBundling) -> Code: 55 | # create the layer version locally 56 | code_parameters = { 57 | "path": str(self.requirements_path), 58 | "asset_hash_type": AssetHashType.CUSTOM, 59 | "asset_hash": uuid4().hex, 60 | "exclude": DEPENDENCY_EXCLUDES, 61 | } 62 | 63 | code = Code.from_asset( 64 | bundling=BundlingOptions( 65 | image=DockerImage.from_registry( 66 | "scratch" 67 | ), # NEVER USED - FOR NOW ALL BUNDLING IS LOCAL 68 | command=["not_used"], 69 | entrypoint=["not_used"], 70 | local=bundling, 71 | ), 72 | **code_parameters, 73 | ) 74 | 75 | return code 76 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_hash/src/custom_resources/hash.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | from hashlib import md5 6 | from os import getenv 7 | 8 | from crhelper import CfnResource 9 | 10 | logger = logging.getLogger(__name__) 11 | helper = CfnResource(log_level=getenv("LOG_LEVEL", "WARNING")) 12 | 13 | 14 | class StackId: 15 | def __init__(self, event): 16 | self.stack_id = event.get("StackId") 17 | self.partition = self.get_arn_component(1) 18 | self.service = self.get_arn_component(2) 19 | self.region = self.get_arn_component(3) 20 | self.account = self.get_arn_component(4) 21 | self.stack_name = self.get_arn_component(5).split("/")[1] 22 | 23 | def get_arn_component(self, idx: int) -> str: 24 | return self.stack_id.split(":")[idx] 25 | 26 | @property 27 | def hash(self): 28 | # NOSONAR - safe to hash, not for cryptographic purposes 29 | digest = md5() # nosec # NOSONAR 30 | digest.update(bytes(f"{self.stack_id.rsplit('/', 1)[0]}", "ascii")) 31 | return digest.hexdigest().upper() 32 | 33 | 34 | def get_property(event, property_name, property_default=None): 35 | resource_prop = event.get("ResourceProperties", {}).get( 36 | property_name, property_default 37 | ) 38 | if not resource_prop: 39 | raise ValueError(f"missing required property {property_name}") 40 | return resource_prop 41 | 42 | 43 | @helper.create 44 | def generate_hash(event, _): 45 | """ 46 | Generate a resource name containing a hash of the stack ID (without unique ID) and resource purpose. 47 | This is useful when you need to create named IAM roles 48 | 49 | :param event: The CloudFormation custom resource event 50 | :return: None 51 | """ 52 | stack_id = StackId(event) 53 | purpose = get_property(event, "Purpose") 54 | max_length = int(get_property(event, "MaxLength", 64)) 55 | 56 | name = f"{purpose}-{stack_id.hash[:8]}" 57 | 58 | if len(name) > max_length: 59 | raise ValueError( 60 | f"the derived resource name {name} is too long ({len(name)} / {max_length}) - please use a shorter Purpose" 61 | ) 62 | 63 | logger.info(f"the derived resource name is {name}") 64 | helper.Data["Name"] = name 65 | helper.Data["Id"] = stack_id.hash 66 | 67 | 68 | @helper.update 69 | @helper.delete 70 | def no_op(_, __): 71 | pass # pragma: no cover 72 | 73 | 74 | def handler(event, _): 75 | """ 76 | Handler entrypoint - see generate_hash for implementation details 77 | :param event: The CloudFormation custom resource event 78 | :return: PhysicalResourceId 79 | """ 80 | helper(event, _) # pragma: no cover 81 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stack.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | import re 7 | 8 | import jsii 9 | from aws_cdk import Stack, Aspects, IAspect, NestedStack 10 | from constructs import Construct, IConstruct 11 | 12 | from aws_solutions.cdk.aws_lambda.cfn_custom_resources.solutions_metrics import Metrics 13 | from aws_solutions.cdk.interfaces import TemplateOptions 14 | from aws_solutions.cdk.mappings import Mappings 15 | 16 | RE_SOLUTION_ID = re.compile(r"^SO\d+$") 17 | RE_TEMPLATE_FILENAME = re.compile(r"^[a-z]+(?:-[a-z]+)*\.template$") # NOSONAR 18 | 19 | 20 | def validate_re(name, value, regex: re.Pattern): 21 | if regex.match(value): 22 | return value 23 | raise ValueError(f"{name} must match '{regex.pattern}") 24 | 25 | 26 | def validate_solution_id(solution_id: str) -> str: 27 | return validate_re("solution_id", solution_id, RE_SOLUTION_ID) 28 | 29 | 30 | def validate_template_filename(template_filename: str) -> str: 31 | return validate_re("template_filename", template_filename, RE_TEMPLATE_FILENAME) 32 | 33 | 34 | @jsii.implements(IAspect) 35 | class MetricsAspect: 36 | def __init__(self, stack: SolutionStack): 37 | self.stack = stack 38 | 39 | def visit(self, node: IConstruct): 40 | """Called before synthesis, this allows us to set metrics at the end of synthesis""" 41 | if node == self.stack: 42 | self.stack.metrics = Metrics(self.stack, "Metrics", self.stack.metrics) 43 | 44 | 45 | class SolutionStack(Stack): 46 | def __init__( 47 | self, 48 | scope: Construct, 49 | construct_id: str, 50 | description: str, 51 | template_filename, 52 | **kwargs, 53 | ): 54 | super().__init__(scope, construct_id, **kwargs) 55 | 56 | self.metrics = {} 57 | self.solution_id = self.node.try_get_context("SOLUTION_ID") 58 | self.solution_version = self.node.try_get_context("SOLUTION_VERSION") 59 | self.mappings = Mappings(self, solution_id=self.solution_id) 60 | self.solutions_template_filename = validate_template_filename(template_filename) 61 | self.description = description.strip(".") 62 | self.solutions_template_options = TemplateOptions( 63 | self, 64 | construct_id=construct_id, 65 | description=f"({self.solution_id}) - {self.description}. Version {self.solution_version}", 66 | filename=template_filename, 67 | ) 68 | Aspects.of(self).add(MetricsAspect(self)) 69 | 70 | 71 | class NestedSolutionStack(SolutionStack, NestedStack): 72 | """A nested version of SolutionStack""" 73 | 74 | def __init__(self, scope: Construct, id: str, **kwargs) -> None: 75 | super().__init__(scope, id, **kwargs) -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/metrics.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | from typing import Dict 6 | 7 | from aws_cdk import ( 8 | CfnResource, 9 | Fn, 10 | CfnCondition, 11 | Aws, 12 | ) 13 | from constructs import Construct 14 | 15 | from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction 16 | from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression 17 | 18 | 19 | class Metrics(Construct): 20 | """Used to track anonymous solution deployment metrics.""" 21 | 22 | def __init__( 23 | self, 24 | scope: Construct, 25 | construct_id: str, 26 | metrics: Dict[str, str], 27 | ): 28 | super().__init__(scope, construct_id) 29 | 30 | if not isinstance(metrics, dict): 31 | raise ValueError("metrics must be a dictionary") 32 | 33 | self._metrics_function = SolutionsPythonFunction( 34 | self, 35 | "MetricsFunction", 36 | entrypoint=Path(__file__).parent 37 | / "src" 38 | / "custom_resources" 39 | / "metrics.py", 40 | function="handler", 41 | ) 42 | add_cfn_nag_suppressions( 43 | resource=self._metrics_function.node.default_child, 44 | suppressions=[ 45 | CfnNagSuppression( 46 | "W89", "This AWS Lambda Function is not deployed to a VPC" 47 | ), 48 | CfnNagSuppression( 49 | "W92", 50 | "This AWS Lambda Function does not require reserved concurrency", 51 | ), 52 | ], 53 | ) 54 | 55 | self._send_anonymous_usage_data = CfnCondition( 56 | self, 57 | "SendAnonymousUsageData", 58 | expression=Fn.condition_equals( 59 | Fn.find_in_map("Solution", "Data", "SendAnonymousUsageData"), "Yes" 60 | ), 61 | ) 62 | self._send_anonymous_usage_data.override_logical_id("SendAnonymousUsageData") 63 | 64 | properties = { 65 | "ServiceToken": self._metrics_function.function_arn, 66 | "Solution": self.node.try_get_context("SOLUTION_NAME"), 67 | "Version": self.node.try_get_context("VERSION"), 68 | "Region": Aws.REGION, 69 | **metrics, 70 | } 71 | self.solution_metrics = CfnResource( 72 | self, 73 | "SolutionMetricsAnonymousData", 74 | type="Custom::AnonymousData", 75 | properties=properties, 76 | ) 77 | self.solution_metrics.override_logical_id("SolutionMetricsAnonymousData") 78 | self.solution_metrics.cfn_options.condition = self._send_anonymous_usage_data 79 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/name.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | from typing import Optional 6 | 7 | from aws_cdk import ( 8 | CfnResource, 9 | Aws, 10 | Stack, 11 | ) 12 | from constructs import Construct 13 | 14 | from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction 15 | from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression 16 | 17 | 18 | class ResourceName(Construct): 19 | """Used to create unique resource names of the format {stack_name}-{purpose}-{id}""" 20 | 21 | def __init__( 22 | self, 23 | scope: Construct, 24 | construct_id: str, 25 | purpose: str, 26 | max_length: int, 27 | resource_id: Optional[str] = None, 28 | ): 29 | super().__init__(scope, construct_id) 30 | 31 | uuid = "ResourceNameFunction-d45b185a-fe34-44ab-a375-17f89597d9ec" 32 | stack = Stack.of(self) 33 | self._resource_name_function = stack.node.try_find_child(uuid) 34 | 35 | if not self._resource_name_function: 36 | self._resource_name_function = SolutionsPythonFunction( 37 | stack, 38 | uuid, 39 | entrypoint=Path(__file__).parent 40 | / "src" 41 | / "custom_resources" 42 | / "name.py", 43 | function="handler", 44 | ) 45 | add_cfn_nag_suppressions( 46 | resource=self._resource_name_function.node.default_child, 47 | suppressions=[ 48 | CfnNagSuppression( 49 | "W89", "This AWS Lambda Function is not deployed to a VPC" 50 | ), 51 | CfnNagSuppression( 52 | "W92", 53 | "This AWS Lambda Function does not require reserved concurrency", 54 | ), 55 | ], 56 | ) 57 | 58 | properties = { 59 | "ServiceToken": self._resource_name_function.function_arn, 60 | "Purpose": purpose, 61 | "StackName": Aws.STACK_NAME, 62 | "MaxLength": max_length, 63 | } 64 | if resource_id: 65 | properties["Id"] = resource_id 66 | 67 | self.logical_name = f"{construct_id}NameResource" 68 | self.resource_name_resource = CfnResource( 69 | self, 70 | self.logical_name, 71 | type="Custom::ResourceName", 72 | properties=properties, 73 | ) 74 | 75 | @property 76 | def resource_name(self): 77 | return self.resource_name_resource.get_att("Name") 78 | 79 | @property 80 | def resource_id(self): 81 | return self.resource_name_resource.get_att("Id") 82 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_common/aws_solutions/core/helpers.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import os 5 | 6 | import boto3 7 | 8 | import aws_solutions.core.config 9 | 10 | _helpers_service_clients = dict() 11 | _helpers_service_resources = dict() 12 | _session = None 13 | 14 | 15 | class EnvironmentVariableError(Exception): 16 | pass 17 | 18 | 19 | def get_aws_region(): 20 | """ 21 | Get the caller's AWS region from the environment variable AWS_REGION 22 | :return: the AWS region name (e.g. us-east-1) 23 | """ 24 | region = os.environ.get("AWS_REGION") 25 | if not region: 26 | raise EnvironmentVariableError("Missing AWS_REGION environment variable.") 27 | 28 | return region 29 | 30 | 31 | def get_aws_partition(): 32 | """ 33 | Get the caller's AWS partition by driving it from AWS region 34 | :return: partition name for the current AWS region (e.g. aws) 35 | """ 36 | region_name = get_aws_region() 37 | china_region_name_prefix = "cn" 38 | us_gov_cloud_region_name_prefix = "us-gov" 39 | aws_regions_partition = "aws" 40 | aws_china_regions_partition = "aws-cn" 41 | aws_us_gov_cloud_regions_partition = "aws-us-gov" 42 | 43 | # China regions 44 | if region_name.startswith(china_region_name_prefix): 45 | return aws_china_regions_partition 46 | # AWS GovCloud(US) Regions 47 | elif region_name.startswith(us_gov_cloud_region_name_prefix): 48 | return aws_us_gov_cloud_regions_partition 49 | else: 50 | return aws_regions_partition 51 | 52 | 53 | def get_session(): 54 | global _session 55 | if not _session: 56 | _session = boto3.session.Session() 57 | return _session 58 | 59 | 60 | def get_service_client(service_name): 61 | global _helpers_service_clients 62 | config = aws_solutions.core.config.botocore_config 63 | session = get_session() 64 | 65 | if service_name not in _helpers_service_clients: 66 | _helpers_service_clients[service_name] = session.client( 67 | service_name, config=config, region_name=get_aws_region() 68 | ) 69 | return _helpers_service_clients[service_name] 70 | 71 | 72 | def get_service_resource(service_name): 73 | global _helpers_service_resources 74 | config = aws_solutions.core.config.botocore_config 75 | session = get_session() 76 | 77 | if service_name not in _helpers_service_resources: 78 | _helpers_service_resources[service_name] = session.resource( 79 | service_name, config=config, region_name=get_aws_region() 80 | ) 81 | return _helpers_service_resources[service_name] 82 | 83 | 84 | def get_aws_account() -> str: 85 | """ 86 | Get the caller's AWS account ID from STS 87 | :return: the AWS account ID of the caller 88 | """ 89 | sts = get_service_client("sts") 90 | return sts.get_caller_identity().get("Account") 91 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/helpers/loader.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import importlib 5 | import json 6 | import logging 7 | from functools import wraps 8 | from pathlib import Path 9 | 10 | logger = logging.getLogger("cdk-helper") 11 | 12 | 13 | class CDKLoaderException(Exception): 14 | pass 15 | 16 | 17 | def log_error(error): 18 | logger.error(error) 19 | raise CDKLoaderException(error) 20 | 21 | 22 | def _cdk_json_present(func): 23 | @wraps(func) 24 | def cdk_json_present(cdk_app_path: Path, cdk_app_name): 25 | app_path = cdk_app_path.parent 26 | cdk_json_dict = {} 27 | if not Path(app_path / "cdk.json").exists(): 28 | log_error(f"please ensure a cdk.json is present at {app_path}") 29 | 30 | try: 31 | cdk_json_dict = json.loads(Path(app_path / "cdk.json").read_text()) 32 | except ValueError as exc: 33 | log_error(f"failed to parse cdk.json: {exc}") 34 | 35 | cdk_app = cdk_json_dict.get("app") 36 | if not cdk_app: 37 | log_error(f"failed to find `app` in cdk.json") 38 | 39 | if "python3" not in cdk_app: 40 | log_error( 41 | f"this helper only supports python3 CDK apps at this time - yours was declared as {cdk_app}" 42 | ) 43 | 44 | return func(cdk_app_path, cdk_app_name) 45 | 46 | return cdk_json_present 47 | 48 | 49 | @_cdk_json_present 50 | def load_cdk_app(cdk_app_path, cdk_app_name): 51 | """ 52 | Load a CDK app from a folder path (dynamically) 53 | :param cdk_app_path: The full path of the CDK app to load 54 | :param cdk_app_name: The module path (starting from cdk_app_path) to find the function returning synth() 55 | :return: 56 | """ 57 | 58 | try: 59 | (cdk_app_name, cdk_app_entrypoint) = cdk_app_name.split(":") 60 | except ValueError: 61 | log_error("please provide your `cdk_app_name` as path.to.cdk:function_name") 62 | 63 | if not cdk_app_path.exists(): 64 | log_error(f"could not find `{cdk_app_name}` (please use a full path)") 65 | 66 | spec = importlib.util.spec_from_file_location(cdk_app_name, cdk_app_path) 67 | module = importlib.util.module_from_spec(spec) 68 | try: 69 | spec.loader.exec_module(module) 70 | except Exception as exc: 71 | log_error(f"could not load `{cdk_app_entrypoint}` in `{cdk_app_name}`: {exc}") 72 | 73 | try: 74 | cdk_function = getattr(module, cdk_app_entrypoint) 75 | except AttributeError as exc: 76 | log_error( 77 | f"could not find CDK entrypoint `{cdk_app_entrypoint}` in `{cdk_app_name}`" 78 | ) 79 | 80 | logger.info(f"loaded AWS CDK app from {cdk_app_path}") 81 | logger.info( 82 | f"loaded AWS CDK app at {cdk_app_name}, entrypoint is {cdk_app_entrypoint}" 83 | ) 84 | return cdk_function 85 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | 10 | ## Reporting Bugs/Feature Requests 11 | 12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 13 | 14 | When filing an issue, please check [existing open](https://github.com/aws-solutions/audience-uploader-from-aws-clean-rooms/issues?q=is%3Aopen+is%3Aissue), or [recently closed](https://github.com/aws-solutions/audience-uploader-from-aws-clean-rooms/issues?q=is%3Aissue+is%3Aclosed), issues to make sure somebody else hasn't already 15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 16 | 17 | * A reproducible test case or series of steps 18 | * The version of our code being used 19 | * Any modifications you've made relevant to the bug 20 | * Anything unusual about your environment or deployment 21 | 22 | 23 | ## Contributing via Pull Requests 24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 25 | 26 | 1. You are working against the latest source on the *main* branch. 27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 29 | 30 | To send us a pull request, please: 31 | 32 | 1. Fork the repository. 33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 34 | 3. Ensure local tests pass. 35 | 4. Commit to your fork using clear commit messages. 36 | 5. Send us a pull request, answering any default questions in the pull request interface. 37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 38 | 39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 41 | 42 | 43 | ## Finding contributions to work on 44 | 45 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels ((enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/aws-solutions/audience-uploader-from-aws-clean-rooms/labels/help%20wanted) issues is a great place to start. 46 | 47 | ## Code of Conduct 48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 50 | opensource-codeofconduct@amazon.com with any additional questions or comments. 51 | 52 | 53 | ## Security issue notifications 54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 55 | 56 | 57 | ## Licensing 58 | 59 | See the [LICENSE](https://github.com/aws-solutions/audience-uploader-from-aws-clean-rooms/blob/main/LICENSE.txt) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 60 | 61 | We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes. 62 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/interfaces.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | from dataclasses import dataclass 6 | from typing import Union, List 7 | 8 | import jsii 9 | from aws_cdk import ( 10 | ITemplateOptions, 11 | Stack, 12 | NestedStack, 13 | CfnParameter, 14 | ) 15 | 16 | logger = logging.getLogger("cdk-helper") 17 | 18 | 19 | @dataclass 20 | class _TemplateParameter: 21 | """Stores information about a CloudFormation parameter, its label (description) and group""" 22 | 23 | name: str 24 | label: str 25 | group: str 26 | 27 | 28 | class TemplateOptionsException(Exception): 29 | pass 30 | 31 | 32 | @jsii.implements(ITemplateOptions) 33 | class TemplateOptions: 34 | """Helper class for setting up template CloudFormation parameter groups, labels and solutions metadata""" 35 | 36 | _metadata = {} 37 | 38 | def __init__( 39 | self, 40 | stack: Union[Stack, NestedStack], 41 | construct_id: str, 42 | description: str, 43 | filename: str, 44 | ): 45 | self.stack = stack 46 | self.filename = filename 47 | self._parameters: List[_TemplateParameter] = [] 48 | self.stack.template_options.description = description 49 | self.stack.template_options.metadata = self.metadata 50 | 51 | self._metadata = self._get_metadata() 52 | 53 | if not filename.endswith(".template"): 54 | raise TemplateOptionsException("template filenames must end with .template") 55 | 56 | # if this stack is a nested stack, record its CDK ID in the parent stack's resource to it 57 | if getattr(stack, "nested_stack_resource"): 58 | stack.nested_stack_resource.add_metadata( 59 | "aws:solutions:templateid", construct_id 60 | ) 61 | stack.nested_stack_resource.add_metadata( 62 | "aws:solutions:templatename", filename 63 | ) 64 | 65 | @property 66 | def metadata(self) -> dict: 67 | return self._metadata 68 | 69 | def _get_metadata(self) -> dict: 70 | pgs = set() 71 | parameter_groups = [ 72 | p.group 73 | for p in self._parameters 74 | if p.group not in pgs and not pgs.add(p.group) 75 | ] 76 | metadata = { 77 | "AWS::CloudFormation::Interface": { 78 | "ParameterGroups": [ 79 | { 80 | "Label": {"default": parameter_group}, 81 | "Parameters": [ 82 | parameter.name 83 | for parameter in self._parameters 84 | if parameter.group == parameter_group 85 | ], 86 | } 87 | for parameter_group in parameter_groups 88 | ], 89 | "ParameterLabels": { 90 | parameter.name: {"default": parameter.label} 91 | for parameter in self._parameters 92 | }, 93 | }, 94 | "aws:solutions:templatename": self.filename, 95 | "aws:solutions:solution_id": self.stack.node.try_get_context("SOLUTION_ID"), 96 | "aws:solutions:solution_version": self.stack.node.try_get_context( 97 | "SOLUTION_VERSION" 98 | ), 99 | } 100 | self.stack.template_options.metadata = metadata 101 | return metadata 102 | 103 | def add_parameter(self, parameter: CfnParameter, label: str, group: str): 104 | self._parameters.append(_TemplateParameter(parameter.logical_id, label, group)) 105 | self._metadata = self._get_metadata() 106 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/test_tiktok_lambda_handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from lambda_helpers import * 5 | import tiktok.uploader.lambda_handler 6 | from tiktok.uploader.lambda_handler import * 7 | from aws_xray_sdk.core import xray_recorder 8 | xray_recorder.configure(context_missing='LOG_ERROR') 9 | 10 | 11 | def test_get_tiktok_credentials(create_secrets, mocker): 12 | client, name, value = create_secrets 13 | mocker.patch.object(tiktok.uploader.lambda_handler, 14 | 'tiktok_uploader_credentials', name) 15 | mocker.patch('tiktok.uploader.lambda_handler.boto3.client', 16 | return_value=client) 17 | assert get_tiktok_credentials() == value 18 | 19 | 20 | def test_update_tiktok_credentials(create_secrets, mocker): 21 | client, name, value = create_secrets 22 | mocker.patch.object(tiktok.uploader.lambda_handler, 'tiktok_uploader_credentials', name) 23 | mocker.patch('tiktok.uploader.lambda_handler.boto3.client', return_value=client) 24 | assert get_tiktok_credentials() == value 25 | update_tiktok_credentials(name, value) 26 | assert get_tiktok_credentials() == {"ACCESS_TOKEN": name, "ADVERTISER_ID": value} 27 | 28 | 29 | def test_build_url(): 30 | path, query = "test_path", "test_query" 31 | assert build_url("/" + path, query) == "https://business-api.tiktok.com/test_path?test_query" 32 | assert build_url(path, query) == "https://business-api.tiktok.com/test_path?test_query" 33 | 34 | def test_get_custom_audience_obj(): 35 | audience_1 = {"name": "test_audience_name_1"} 36 | audience_2 = {"name": "test_audience_name_2"} 37 | audience_3 = {"name": "test_audience_name_3"} 38 | audience_list = [audience_1, audience_2, audience_3] 39 | assert get_custom_audience_obj(audience_list, "test_audience_name_1") == audience_1 40 | assert get_custom_audience_obj(audience_list, "test_audience_name_3") == audience_3 41 | assert get_custom_audience_obj(audience_list, "test_audience_name") == None 42 | assert get_custom_audience_obj(audience_list, "") == None 43 | assert get_custom_audience_obj(audience_list, None) == None 44 | 45 | def test_get_upload_audience_info(mocker): 46 | mocker.patch.dict(os.environ, {"SUPPORTED_CALCULATE_TYPES": "TYPE_1,TYPE_2"}) 47 | assert get_upload_audience_info("test1/test2/test3/PHONE_SHA256/test4.zip") == ("test4.zip", "PHONE_SHA256", "test3") 48 | with pytest.raises(ValueError): 49 | get_upload_audience_info("test1/test4.zip") 50 | 51 | def test_get_calculate_type(mocker): 52 | assert get_calculate_type("PHONE_SHA256") == "PHONE_SHA256" 53 | assert get_calculate_type("phone_sha256") == "PHONE_SHA256" 54 | assert get_calculate_type("idfa_sha256") == "IDFA_SHA256" 55 | with pytest.raises(ValueError): 56 | get_calculate_type("type_3") 57 | 58 | def test_lambda_handler_happy_path(mocker): 59 | mocker.patch("tiktok.uploader.lambda_handler.upload_custom_audience_data", return_value={"code": 0, "data": {"file_path": "test1/test2/test3/test4.zip"}}) 60 | mocker.patch("tiktok.uploader.lambda_handler.check_custom_audience_exist", return_value={"audience_id": "test_audience_id"}) 61 | mocker.patch("tiktok.uploader.lambda_handler.update_custom_audience_data", return_value={"code": 0}) 62 | mocker.patch("tiktok.uploader.lambda_handler.clean_up") 63 | assert lambda_handler(FAKE_CSV_EVENT, None) == {"statusCode": 200, "body": '"Custom Audience test3 is successfully updated in TikTok Ads!"'} 64 | mocker.patch("tiktok.uploader.lambda_handler.check_custom_audience_exist", return_value=None) 65 | mocker.patch("tiktok.uploader.lambda_handler.create_custom_audience_data", return_value={"code": 0}) 66 | assert lambda_handler(FAKE_CSV_EVENT, None) == {"statusCode": 200, "body": '"Custom Audience test3 is successfully created to TikTok Ads!"'} 67 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/java/bundling.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | import shutil 6 | import subprocess 7 | from pathlib import Path 8 | from typing import Union, Dict, Optional 9 | 10 | import jsii 11 | from aws_cdk import ILocalBundling, BundlingOptions 12 | 13 | from aws_solutions.cdk.helpers import copytree 14 | 15 | logger = logging.getLogger("cdk-helper") 16 | 17 | 18 | class UnsupportedBuildEnvironment(Exception): 19 | pass 20 | 21 | 22 | @jsii.implements(ILocalBundling) 23 | class SolutionsJavaBundling: 24 | """This interface allows AWS Solutions to package lambda functions for Java without the use of Docker""" 25 | 26 | def __init__( 27 | self, 28 | to_bundle: Path, 29 | gradle_task: str, 30 | distribution_path: Path, 31 | gradle_test: Optional[str] = None, 32 | ): 33 | self.to_bundle = to_bundle 34 | self.gradle_task = gradle_task 35 | self.gradle_test = gradle_test 36 | self.distribution_path = distribution_path 37 | 38 | def try_bundle(self, output_dir: str, options: BundlingOptions) -> bool: # NOSONAR 39 | source = Path(self.to_bundle).absolute() 40 | 41 | is_gradle_build = (source / "gradlew").exists() 42 | if not is_gradle_build: 43 | raise UnsupportedBuildEnvironment("please use a gradle project") 44 | 45 | # Run Tests 46 | if self.gradle_test: 47 | self._invoke_local_command( 48 | name="gradle", 49 | command=["./gradlew", self.gradle_test], 50 | cwd=source, 51 | ) 52 | 53 | # Run Build 54 | self._invoke_local_command( 55 | name="gradle", 56 | command=["./gradlew", self.gradle_task], 57 | cwd=source, 58 | ) 59 | 60 | # if the distribution path is a path - it should only contain one jar or zip 61 | if self.distribution_path.is_dir(): 62 | children = [child for child in self.distribution_path.iterdir()] 63 | if len(children) != 1: 64 | raise ValueError( 65 | "if the distribution path is a path it should only contain one jar or zip file" 66 | ) 67 | if children[0].suffix not in (".jar", ".zip"): 68 | raise ValueError( 69 | "the distribution path does not include a single .jar or .zip file" 70 | ) 71 | copytree(self.distribution_path, output_dir) 72 | elif self.distribution_path.is_file(): 73 | suffix = self.distribution_path.suffix 74 | if suffix not in (".jar", ".zip"): 75 | raise ValueError("the distribution file is not a .zip or .jar file") 76 | shutil.copy(self.distribution_path, output_dir) 77 | 78 | return True 79 | 80 | def _invoke_local_command( 81 | self, 82 | name, 83 | command, 84 | env: Union[Dict, None] = None, 85 | cwd: Union[str, Path, None] = None, 86 | return_stdout: bool = False, 87 | ): 88 | 89 | cwd = Path(cwd) 90 | rv = "" 91 | 92 | with subprocess.Popen( 93 | command, 94 | shell=False, 95 | stdout=subprocess.PIPE, 96 | universal_newlines=True, 97 | cwd=cwd, 98 | env=env, 99 | ) as p: 100 | for line in p.stdout: 101 | logger.info("%s %s: %s" % (self.to_bundle.name, name, line.rstrip())) 102 | if return_stdout: 103 | rv += line 104 | 105 | if p.returncode != 0: 106 | raise subprocess.CalledProcessError(p.returncode, p.args) 107 | 108 | return rv.strip() 109 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/test_snap_lambda_handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from lambda_helpers import * 5 | from snap.uploader.lambda_handler import * 6 | from aws_xray_sdk.core import xray_recorder 7 | xray_recorder.configure(context_missing='LOG_ERROR') 8 | 9 | 10 | def test_get_snap_credentials(create_secrets, mocker): 11 | client, name, value = create_secrets 12 | mocker.patch('snap.uploader.lambda_handler.get_service_client', return_value = client) 13 | assert get_snap_credentials(name) == value 14 | 15 | 16 | def test_update_snap_credentials(create_secrets, mocker): 17 | client, name, value = create_secrets 18 | mocker.patch('snap.uploader.lambda_handler.get_service_client', return_value = client) 19 | assert get_snap_credentials(name) == value 20 | update_snap_credentials(name, TEST_CREDENTIALS_2) 21 | assert get_snap_credentials(name) == TEST_CREDENTIALS_2 22 | 23 | 24 | 25 | def test_refresh_token(mocker): 26 | content_mock = mocker.MagicMock() 27 | content_mock.decode.return_value = """{"refresh_token": "test"}""" 28 | content_mock.read.return_value = content_mock 29 | mocker.patch("snap.uploader.lambda_handler.urllib.request.urlopen", return_value = content_mock) 30 | 31 | expected_expiry = (datetime.now() + timedelta(seconds=1800)) 32 | actual_expiry = datetime.strptime(refresh_token(TEST_CREDENTIALS, TEST_CREDENTIALS)["expires_at"], "%Y-%m-%d %H:%M:%S") 33 | #Assert our expiry times are almost equal to one another 34 | assert timedelta(seconds=-EXPECTED_EXPIRY_OFFSET) <= expected_expiry - actual_expiry <= timedelta(seconds=EXPECTED_EXPIRY_OFFSET) 35 | 36 | 37 | def test_add_users(requests_mock): 38 | requests_mock.post(f"https://adsapi.snapchat.com/v1/segments/{TEST_SEGMENT_ID}/users", 39 | json=RESPONSE_SUCCESS 40 | ) 41 | assert add_users("", TEST_SEGMENT_ID, "", "") == RESPONSE_SUCCESS 42 | 43 | 44 | def test_get_segment_id_by_name(requests_mock): 45 | requests_mock.get(f"https://adsapi.snapchat.com/v1/adaccounts/{TEST_CREDENTIALS['ad_account_id']}/segments", 46 | json=TEST_SEGMENT_DATA 47 | ) 48 | 49 | # Assert segment is found 50 | assert get_segment_id_by_name(TEST_CREDENTIALS, TEST_CREDENTIALS, "segment2") == 2 51 | 52 | # Assert segment is not found 53 | assert get_segment_id_by_name(TEST_CREDENTIALS, TEST_CREDENTIALS, "segment") == 0 54 | 55 | 56 | def test_user_hash(): 57 | assert user_hash(["a, b, c", "d, e", "f", "g, h", "i, j, k"]) == [["a", "b", "c"], ["d", "e"], ["f"], ["g", "h"], ["i", "j", "k"]] 58 | 59 | 60 | def test_is_token_expired(create_times): 61 | expired, unexpired = create_times 62 | assert is_token_expired(expired) 63 | assert not is_token_expired(unexpired) 64 | 65 | 66 | def test_lambda_handler(mocker): 67 | data_mock = mocker.MagicMock() 68 | data_mock.read_csv.return_value = data_mock 69 | data_mock.groupby.return_value = data_mock 70 | data_mock.groups.keys.return_value = ["EMAIL_SHA256"] 71 | data_mock.get_group.return_value = SCHEMA_HASH_VALUES 72 | 73 | mocker.patch("snap.uploader.lambda_handler.get_snap_credentials", return_value = TEST_CREDENTIALS) 74 | mocker.patch("snap.uploader.lambda_handler.is_token_expired", return_value = True) 75 | mocker.patch("snap.uploader.lambda_handler.refresh_token", return_value = TEST_CREDENTIALS) 76 | mocker.patch("snap.uploader.lambda_handler.update_snap_credentials") 77 | mocker.patch("snap.uploader.lambda_handler.get_segment_id_by_name", return_value = 1) 78 | mocker.patch("snap.uploader.lambda_handler.s3_client.get_object", return_value = {"Body": "test_body"}) 79 | mocker.patch("snap.uploader.lambda_handler.pd.read_csv", return_value = data_mock) 80 | mocker.patch("snap.uploader.lambda_handler.add_users", return_value = SUCCESSFUL_UPLOAD_2) 81 | 82 | assert lambda_handler(FAKE_GZ_EVENT, None)["uploader"]["response"] == SUCCESSFUL_UPLOAD_2 83 | 84 | data_mock.groups.keys.return_value = [] 85 | assert lambda_handler(FAKE_GZ_EVENT, None)["uploader"]["response"] == "no schemas were found" 86 | 87 | assert lambda_handler(FAKE_CSV_EVENT, None)["uploader"]["response"] == "not a supported file" 88 | -------------------------------------------------------------------------------- /source/infrastructure/lib/tiktok_uploader_stack.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from constructs import Construct 5 | from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction 6 | from aws_cdk import ( 7 | aws_lambda as _lambda, 8 | aws_iam as iam, 9 | CfnOutput, 10 | Duration, 11 | aws_lambda_destinations as _lambda_dest, 12 | ) 13 | from pathlib import Path 14 | from lib.base_uploader_stack import BaseUploaderStack 15 | from lib.secrets.tiktok_secrets import TiktokSecrets 16 | 17 | 18 | class TiktokUploaderStack(BaseUploaderStack): 19 | TARGET_PLATFORM = "tiktok" 20 | 21 | def __init__(self, scope: Construct, id: str, **kwargs) -> None: 22 | super().__init__(scope, id, **kwargs) 23 | 24 | 25 | self.tiktok_secrets = TiktokSecrets(self, "TiktokSecrets") 26 | 27 | self.add_lambda_dest_failure_queue() 28 | self.add_tiktok_lambda() 29 | 30 | self.add_lambda_event_source(self.tiktok_uploader_lambda, self.queue) 31 | 32 | ############################################################################## 33 | # Tiktok Lambda functions 34 | ############################################################################## 35 | 36 | def add_tiktok_lambda(self): 37 | s3_read_policy_stmt = iam.PolicyStatement( 38 | resources=[ 39 | "arn:aws:s3:::uploader-etl-artifacts*" 40 | ], 41 | actions=[ 42 | "S3:ListBucket", 43 | "S3:GetObjectTagging", 44 | "S3:ListBucket", 45 | "S3:GetObject", 46 | "S3:PutBucketNotification", 47 | ], 48 | ) 49 | 50 | queue_decrypt_policy_stmt = iam.PolicyStatement( 51 | actions=["kms:Decrypt"], 52 | resources=["*"], 53 | ) 54 | 55 | layer_arn = f"arn:aws:lambda:{self.region}:580247275435:layer:LambdaInsightsExtension:21" 56 | 57 | # segment uploader for SQS 58 | self.tiktok_uploader_lambda = SolutionsPythonFunction( 59 | self, 60 | "tiktok-uploader-segment-sqs", 61 | entrypoint=Path(__file__).parent.parent.parent.absolute() / "aws_lambda" / "tiktok" / "uploader" / "lambda_handler.py", 62 | function="lambda_handler", 63 | runtime=_lambda.Runtime.PYTHON_3_9, 64 | description="activate users to segment", 65 | timeout=Duration.seconds(900), 66 | memory_size=256, 67 | reserved_concurrent_executions=2, 68 | insights_version=_lambda.LambdaInsightsVersion.from_insight_version_arn( 69 | layer_arn 70 | ), 71 | tracing=_lambda.Tracing.ACTIVE, 72 | environment={ 73 | "CRED_SECRET_NAME": self.tiktok_secrets.tiktok_uploader_secret.secret_name, 74 | "SOLUTION_ID": self.solution_id, 75 | "SOLUTION_VERSION": self.solution_version 76 | }, 77 | layers=[ 78 | _lambda.LayerVersion.from_layer_version_arn( 79 | self, 80 | "datawrangler-02", 81 | f"arn:aws:lambda:{self.region}:336392948345:layer:AWSDataWrangler-Python39:9", 82 | ), 83 | self.layer_solutions 84 | ], 85 | on_failure=_lambda_dest.SqsDestination(self.lambda_dest_failure_queue), 86 | ) 87 | 88 | # Add inline policy to the lambda 89 | self.tiktok_uploader_lambda.add_to_role_policy(s3_read_policy_stmt) 90 | self.tiktok_uploader_lambda.add_to_role_policy(queue_decrypt_policy_stmt) 91 | 92 | # Add read secret permissions for both secrets and write to oAuth 93 | self.tiktok_secrets.tiktok_uploader_secret.grant_read( 94 | self.tiktok_uploader_lambda 95 | ) 96 | self.tiktok_secrets.tiktok_uploader_secret.grant_write( 97 | self.tiktok_uploader_lambda 98 | ) 99 | 100 | 101 | CfnOutput(self, "tiktokCredentialsSecretName", value=self.tiktok_secrets.tiktok_uploader_secret.secret_name) 102 | 103 | -------------------------------------------------------------------------------- /source/infrastructure/lib/snap_uploader_stack.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from constructs import Construct 5 | from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction 6 | from aws_cdk import ( 7 | aws_lambda as _lambda, 8 | aws_iam as iam, 9 | CfnOutput, 10 | Duration, 11 | aws_lambda_destinations as _lambda_dest, 12 | ) 13 | from pathlib import Path 14 | from lib.base_uploader_stack import BaseUploaderStack 15 | from lib.secrets.snap_secrets import SnapSecrets 16 | 17 | 18 | class SnapUploaderStack(BaseUploaderStack): 19 | TARGET_PLATFORM = "snap" 20 | 21 | def __init__(self, scope: Construct, id: str, **kwargs) -> None: 22 | super().__init__(scope, id, **kwargs) 23 | 24 | self.snap_secrets = SnapSecrets(self, "SnapSecrets") 25 | 26 | self.add_lambda_dest_failure_queue() 27 | self.add_snap_lambda() 28 | 29 | self.add_lambda_event_source(self.snap_uploader_lambda, self.queue) 30 | 31 | ############################################################################## 32 | # Snap Lambda functions 33 | ############################################################################## 34 | 35 | # def add_snap_lambda(self, data_bucket_name): 36 | def add_snap_lambda(self): 37 | s3_read_policy_stmt = iam.PolicyStatement( 38 | resources=["arn:aws:s3:::uploader-etl-artifacts*"], 39 | actions=[ 40 | "S3:ListBucket", 41 | "S3:GetObjectTagging", 42 | "S3:ListBucket", 43 | "S3:GetObject", 44 | "S3:PutBucketNotification", 45 | ], 46 | ) 47 | 48 | queue_decrypt_policy_stmt = iam.PolicyStatement( 49 | actions=["kms:Decrypt"], 50 | resources=["*"], 51 | ) 52 | 53 | layer_arn = f"arn:aws:lambda:{self.region}:580247275435:layer:LambdaInsightsExtension:21" 54 | 55 | # segment uploader 56 | self.snap_uploader_lambda = SolutionsPythonFunction( 57 | self, 58 | "snap-uploader-segment", 59 | entrypoint=Path(__file__).parent.parent.parent.absolute() / "aws_lambda" / "snap" / "uploader" / "lambda_handler.py", 60 | function="lambda_handler", 61 | runtime=_lambda.Runtime.PYTHON_3_9, 62 | description="activate users to segment", 63 | timeout=Duration.seconds(900), 64 | memory_size=256, 65 | reserved_concurrent_executions=2, 66 | insights_version=_lambda.LambdaInsightsVersion.from_insight_version_arn( 67 | layer_arn 68 | ), 69 | tracing=_lambda.Tracing.ACTIVE, 70 | environment={ 71 | "REFRESH_SECRET_NAME": self.snap_secrets.oauth_refresh_secret.secret_name, 72 | "CRED_SECRET_NAME": self.snap_secrets.snap_uploader_secret.secret_name, 73 | "SOLUTION_ID": self.solution_id, 74 | "SOLUTION_VERSION": self.solution_version 75 | }, 76 | layers=[ 77 | _lambda.LayerVersion.from_layer_version_arn( 78 | self, 79 | "datawrangler-02", 80 | f"arn:aws:lambda:{self.region}:336392948345:layer:AWSDataWrangler-Python39:9", 81 | ), 82 | self.layer_solutions 83 | ], 84 | on_failure=_lambda_dest.SqsDestination(self.lambda_dest_failure_queue), 85 | ) 86 | 87 | # Add inline policy to the lambda 88 | self.snap_uploader_lambda.add_to_role_policy(s3_read_policy_stmt) 89 | self.snap_uploader_lambda.add_to_role_policy(queue_decrypt_policy_stmt) 90 | 91 | # Add read secret permissions for both secrets and write to oAuth 92 | self.snap_secrets.oauth_refresh_secret.grant_read(self.snap_uploader_lambda) 93 | self.snap_secrets.snap_uploader_secret.grant_read(self.snap_uploader_lambda) 94 | self.snap_secrets.oauth_refresh_secret.grant_write(self.snap_uploader_lambda) 95 | 96 | CfnOutput(self, "snapCredentialsOauthRefreshSecretName", value=self.snap_secrets.oauth_refresh_secret.secret_name) 97 | CfnOutput(self, "snapCredentialsSecretName", value=self.snap_secrets.snap_uploader_secret.secret_name) 98 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/java/function.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | from typing import Optional 6 | 7 | import aws_cdk.aws_iam as iam 8 | from aws_cdk import ( 9 | BundlingOptions, 10 | BundlingOutput, 11 | Aws, 12 | DockerImage, 13 | ) 14 | from aws_cdk.aws_lambda import Function, Runtime, RuntimeFamily, Code 15 | from constructs import Construct 16 | 17 | from aws_solutions.cdk.aws_lambda.java.bundling import SolutionsJavaBundling 18 | 19 | 20 | class SolutionsJavaFunction(Function): 21 | """This is similar to aws-cdk/aws-lambda-python, however it handles local building of Java Lambda Functions""" 22 | 23 | def __init__( 24 | self, # NOSONAR 25 | scope: Construct, 26 | construct_id: str, 27 | project_path: Path, 28 | distribution_path: str, 29 | gradle_task: str, 30 | gradle_test: Optional[str] = None, 31 | **kwargs, 32 | ): 33 | self.scope = scope 34 | self.construct_id = construct_id 35 | self.project_path = project_path 36 | self.gradle_task = gradle_task 37 | self.gradle_test = gradle_test 38 | 39 | if not project_path.is_dir(): 40 | raise ValueError( 41 | f"project_path {project_path} must be a directory, not a file" 42 | ) 43 | 44 | # create default least privileged role for this function unless a role is passed 45 | if not kwargs.get("role"): 46 | kwargs["role"] = self._create_role() 47 | 48 | # Java 11 is the default runtime (Lambda supports 8/ 11) 49 | if not kwargs.get("runtime"): 50 | kwargs["runtime"] = Runtime.JAVA_11 51 | 52 | if kwargs["runtime"].family != RuntimeFamily.JAVA: 53 | raise ValueError( 54 | f"SolutionsJavaFunction must use a Java runtime ({kwargs['runtime']} was provided)" 55 | ) 56 | 57 | # This Construct will handle the creation of the 'code' parameter 58 | if kwargs.get("code"): 59 | raise ValueError( 60 | f"SolutionsJavaFunction expects a Path `project_path` (python file) and `function` (function in the entrypoint for AWS Lambda to invoke)" 61 | ) 62 | 63 | bundling = SolutionsJavaBundling( 64 | to_bundle=project_path, 65 | gradle_task=gradle_task, 66 | gradle_test=gradle_test, 67 | distribution_path=distribution_path, 68 | ) 69 | 70 | kwargs["code"] = Code.from_asset( 71 | path=str(project_path), 72 | bundling=BundlingOptions( 73 | image=DockerImage.from_registry("scratch"), # NOT USED 74 | command=["NOT-USED"], 75 | entrypoint=["NOT-USED"], 76 | local=bundling, 77 | output_type=BundlingOutput.ARCHIVED, 78 | ), 79 | ) 80 | super().__init__(scope, construct_id, **kwargs) 81 | 82 | def _create_role(self) -> iam.Role: 83 | """ 84 | Build a role that allows an AWS Lambda Function to log to CloudWatch 85 | :param name: The name of the role. The final name will be "{name}-Role" 86 | :return: aws_cdk.aws_iam.Role 87 | """ 88 | return iam.Role( 89 | self.scope, 90 | f"{self.construct_id}-Role", 91 | assumed_by=iam.ServicePrincipal("lambda.amazonaws.com"), 92 | inline_policies={ 93 | "LambdaFunctionServiceRolePolicy": iam.PolicyDocument( 94 | statements=[ 95 | iam.PolicyStatement( 96 | actions=[ 97 | "logs:CreateLogGroup", 98 | "logs:CreateLogStream", 99 | "logs:PutLogEvents", 100 | ], 101 | resources=[ 102 | f"arn:{Aws.PARTITION}:logs:{Aws.REGION}:{Aws.ACCOUNT_ID}:log-group:/aws/lambda/*" 103 | ], 104 | ) 105 | ] 106 | ) 107 | }, 108 | ) 109 | -------------------------------------------------------------------------------- /deployment/run-unit-tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | # 6 | # This assumes all of the OS-level configuration has been completed and git repo has already been cloned 7 | # 8 | # This script should be run from the repo's deployment directory 9 | # cd deployment 10 | # ./run-unit-tests.sh 11 | # 12 | 13 | set -exuo pipefail 14 | 15 | # Get reference for all important folders 16 | template_dir="$PWD" 17 | source_dir="$(cd $template_dir/../source; pwd -P)" 18 | root_dir="$template_dir/.." 19 | mkdir -p "$template_dir/cfn-templates" 20 | 21 | echo "------------------------------------------------------------------------------" 22 | echo "[Init] Clean old folders" 23 | echo "------------------------------------------------------------------------------" 24 | 25 | cd $root_dir 26 | if [ -d ".venv" ]; then 27 | rm -rf ".venv" 28 | fi 29 | 30 | echo "------------------------------------------------------------------------------" 31 | echo "Copy templates for unit tests" 32 | echo "------------------------------------------------------------------------------" 33 | cp "$template_dir/uploader-from-clean-rooms.yaml" "$template_dir/cfn-templates/uploader-from-clean-rooms.yaml" 34 | cp "$template_dir/uploader-from-clean-rooms-glue.yaml" "$template_dir/cfn-templates/uploader-from-clean-rooms-glue.yaml" 35 | cp "$template_dir/uploader-from-clean-rooms-auth.yaml" "$template_dir/cfn-templates/uploader-from-clean-rooms-auth.yaml" 36 | cp "$template_dir/uploader-from-clean-rooms-web.yaml" "$template_dir/cfn-templates/uploader-from-clean-rooms-web.yaml" 37 | 38 | echo "------------------------------------------------------------------------------" 39 | echo "[Env] Create virtual environment and install dependencies" 40 | echo "------------------------------------------------------------------------------" 41 | 42 | python3 -m venv .venv 43 | source .venv/bin/activate 44 | 45 | cd $source_dir 46 | pip install --upgrade pip 47 | pip install -r $source_dir/requirements-dev.txt 48 | cd - 49 | 50 | echo "------------------------------------------------------------------------------" 51 | echo "Audience Uploader from AWS Clean Rooms API Stack" 52 | echo "------------------------------------------------------------------------------" 53 | 54 | echo "Building API Lambda handler" 55 | cd "$source_dir/api" || exit 1 56 | [ -e dist ] && rm -rf dist 57 | mkdir -p dist 58 | if ! [ -x "$(command -v chalice)" ]; then 59 | echo 'Chalice is not installed. It is required for this solution. Exiting.' 60 | exit 1 61 | fi 62 | 63 | # Remove chalice deployments to force redeploy when there are changes to configuration only 64 | # Otherwise, chalice will use the existing deployment package 65 | [ -e .chalice/deployments ] && rm -rf .chalice/deployments 66 | 67 | echo "Running chalice..." 68 | chalice package --merge-template external_resources.json dist 69 | echo "Finished running chalice." 70 | echo "cp ./dist/sam.json $template_dir/cfn-templates/uploader-from-clean-rooms-api.yaml" 71 | cp dist/sam.json "$template_dir/cfn-templates/uploader-from-clean-rooms-api.yaml" 72 | if [ $? -ne 0 ]; then 73 | echo "ERROR: Failed to build api template" 74 | exit 1 75 | fi 76 | rm -rf ./dist 77 | 78 | echo "------------------------------------------------------------------------------" 79 | echo "[Test] Run pytest with coverage" 80 | echo "------------------------------------------------------------------------------" 81 | cd $source_dir 82 | # setup coverage report path 83 | coverage_report_path=$source_dir/tests/coverage-reports/source.coverage.xml 84 | echo "coverage report path set to $coverage_report_path" 85 | 86 | pytest --cov --cov-report term-missing --cov-report term --cov-report "xml:$coverage_report_path" --ignore-glob='*tiktok*.py' --ignore-glob='*snap*.py' 87 | 88 | # The pytest --cov with its parameters and .coveragerc generates a xml cov-report with `coverage/sources` list 89 | # with absolute path for the source directories. To avoid dependencies of tools (such as SonarQube) on different 90 | # absolute paths for source directories, this substitution is used to convert each absolute source directory 91 | # path to the corresponding project relative path. The $source_dir holds the absolute path for source directory. 92 | sed -i -e "s,$source_dir,source,g" $coverage_report_path 93 | 94 | # deactivate the virtual environment 95 | deactivate 96 | 97 | cd $template_dir -------------------------------------------------------------------------------- /source/infrastructure/aspects/app_registry.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | import jsii 7 | 8 | import aws_cdk as cdk 9 | from aws_cdk import aws_applicationinsights as applicationinsights 10 | from aws_cdk import aws_servicecatalogappregistry_alpha as appreg 11 | 12 | from constructs import Construct, IConstruct 13 | 14 | 15 | @jsii.implements(cdk.IAspect) 16 | class AppRegistry(Construct): 17 | """This construct creates the resources required for AppRegistry and injects them as Aspects""" 18 | 19 | def __init__(self, scope: Construct, id: str): 20 | super().__init__(scope, id) 21 | self.solution_name = scope.node.try_get_context("SOLUTION_NAME") 22 | self.app_registry_name = scope.node.try_get_context("APP_REGISTRY_NAME") 23 | self.solution_id = scope.node.try_get_context("SOLUTION_ID") 24 | self.solution_version = scope.node.try_get_context("SOLUTION_VERSION") 25 | self.application_type = scope.node.try_get_context("APPLICATION_TYPE") 26 | self.application: appreg.Application = None 27 | 28 | def visit(self, node: IConstruct) -> None: 29 | """The visitor method invoked during cdk synthesis""" 30 | if isinstance(node, cdk.Stack): 31 | if not node.nested: 32 | # parent stack 33 | stack: cdk.Stack = node 34 | self.__create_app_for_app_registry() 35 | self.application.associate_stack(stack) 36 | self.__create_attribute_group() 37 | self.__add_tags_for_application() 38 | self.__create_app_for_app_insights() 39 | else: 40 | # nested stack 41 | if not self.application: 42 | self.__create_app_for_app_registry() 43 | 44 | self.application.associate_stack(node) 45 | 46 | def __create_app_for_app_registry(self) -> None: 47 | """Method to create an AppRegistry Application""" 48 | self.application = appreg.Application( 49 | self, 50 | "RegistrySetup", 51 | application_name=cdk.Fn.join("-", [cdk.Aws.STACK_NAME, self.app_registry_name]), 52 | description=f"Service Catalog application to track and manage all your resources for the solution {self.solution_name}", 53 | ) 54 | 55 | def __add_tags_for_application(self) -> None: 56 | """Method to add tags to the AppRegistry's Application instance""" 57 | if not self.application: 58 | self.__create_app_for_app_registry() 59 | 60 | cdk.Tags.of(self.application).add("Solutions:SolutionID", self.solution_id) 61 | cdk.Tags.of(self.application).add("Solutions:SolutionName", self.solution_name) 62 | cdk.Tags.of(self.application).add("Solutions:SolutionVersion", self.solution_version) 63 | cdk.Tags.of(self.application).add("Solutions:ApplicationType", self.application_type) 64 | 65 | def __create_attribute_group(self) -> None: 66 | """Method to add attributes to be as associated with the Application's instance in AppRegistry""" 67 | if not self.application: 68 | self.__create_app_for_app_registry() 69 | 70 | self.application.associate_attribute_group( 71 | appreg.AttributeGroup( 72 | self, 73 | "AppAttributes", 74 | attribute_group_name=cdk.Aws.STACK_NAME, 75 | description="Attributes for Solutions Metadata", 76 | attributes={ 77 | "applicationType": self.application_type, 78 | "version": self.solution_version, 79 | "solutionID": self.solution_id, 80 | "solutionName": self.solution_name, 81 | }, 82 | ) 83 | ) 84 | 85 | def __create_app_for_app_insights(self) -> None: 86 | """Method to create resources to enable application insights""" 87 | if not self.application: 88 | self.__create_app_for_app_registry() 89 | 90 | dependent_resource: cdk.CfnResource = self.application.node.default_child 91 | 92 | applicationinsights.CfnApplication( 93 | self, 94 | "AppInsights", 95 | resource_group_name=cdk.Fn.join( 96 | "-", 97 | [ 98 | "AWS_AppRegistry_Application", 99 | cdk.Aws.STACK_NAME, 100 | self.app_registry_name, 101 | ], 102 | ), 103 | auto_configuration_enabled=True, 104 | cwe_monitor_enabled=True, 105 | ops_center_enabled=True, 106 | ).add_depends_on(dependent_resource) 107 | -------------------------------------------------------------------------------- /source/tests/infrastructure/lib/test_glue_stack.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | 5 | import aws_cdk.assertions as assertions 6 | import pytest 7 | from pathlib import Path 8 | from aws_cdk import cloudformation_include as cfn_inc 9 | from aws_cdk.assertions import Template, Match, Capture 10 | import aws_cdk as cdk 11 | from lib.uploader_stack import UploaderStack 12 | from aws_solutions.cdk import CDKSolution 13 | 14 | # Create the stack in a fixture - pass the fixture to each test 15 | @pytest.fixture(scope="module") 16 | def synth_nested_template(): 17 | 18 | FAKE_CONTEXT = { 19 | "SOLUTION_ID": "SO0226", 20 | "SOLUTION_VERSION": "V1.0.0", 21 | "BUCKET_NAME": "FAKEBUCKETNAME", 22 | "SOLUTION_NAME": "FAKESOLUTIONNAME", 23 | "APP_REGISTRY_NAME": "FAKEAPPREGISTRYNAME", 24 | "VERSION": "v1.0.0", 25 | } 26 | app = cdk.App(context=FAKE_CONTEXT) 27 | solution = CDKSolution(cdk_json_path=Path(__file__).parent.parent.parent.absolute() / "infrastructure/cdk.json") 28 | 29 | uploader_stack = UploaderStack( 30 | app, 31 | "uploader", 32 | stack_name=app.node.try_get_context("STACK_NAME"), 33 | description=f"Audience Uploader from AWS Clean Rooms Solution CDK stack", 34 | template_filename="audience-uploader-from-aws-clean-rooms.template", 35 | synthesizer=solution.synthesizer, 36 | ) 37 | # these next two lines of code will work once we get the JSON error fixed! 38 | # snap_uploader = uploader_stack.snap_stack 39 | glue_stack = uploader_stack.cf_stack.get_nested_stack("GlueStack") 40 | # web_stack = uploader_stack.cf_stack.get_nested_stack("WebStack") 41 | # api_stack = uploader_stack.cf_stack.get_nested_stack("ApiStack") 42 | 43 | template = Template.from_stack(glue_stack.stack) 44 | yield template 45 | 46 | 47 | # there are two roles for this nested stack 48 | def test_iam_role_creation(synth_nested_template): 49 | template = synth_nested_template 50 | template.resource_count_is("AWS::IAM::Role", 2) 51 | 52 | template.has_resource_properties( 53 | "AWS::IAM::Role", 54 | { 55 | "AssumeRolePolicyDocument": { 56 | "Version": "2012-10-17", 57 | "Statement": [ 58 | { 59 | "Effect": "Allow", 60 | "Principal": {"Service": ["lambda.amazonaws.com"]}, 61 | "Action": ["sts:AssumeRole"], 62 | } 63 | ], 64 | } 65 | }, 66 | ) 67 | 68 | 69 | def test_lambda_creation(synth_nested_template): 70 | template = synth_nested_template 71 | template.resource_count_is("AWS::Lambda::Function", 1) 72 | 73 | template.has_resource_properties( 74 | "AWS::Lambda::Function", 75 | { 76 | "Handler": "index.lambda_handler", 77 | "MemorySize": 256, 78 | "Runtime": "python3.9", 79 | "Tags": [{"Key": "environment", "Value": Match.any_value()}], 80 | "Timeout": 900, 81 | }, 82 | ) 83 | 84 | 85 | def test_glue_job_creation(synth_nested_template): 86 | template = synth_nested_template 87 | template.resource_count_is("AWS::Glue::Job", 1) 88 | 89 | role_arn = Capture() 90 | artifact_bucket_name = Capture() 91 | 92 | template.has_resource_properties( 93 | "AWS::Glue::Job", 94 | { 95 | "Command": { 96 | "Name": "glueetl", 97 | "PythonVersion": "3", 98 | "ScriptLocation": { 99 | "Fn::Join": [ 100 | "", 101 | [ 102 | {"Fn::Sub": artifact_bucket_name}, # "s3://${ArtifactBucketName}/" 103 | {"Fn::FindInMap": ["Glue", "Script", "Filename"]}, 104 | ], 105 | ] 106 | }, 107 | }, 108 | "Role": {"Fn::GetAtt": role_arn}, 109 | "DefaultArguments": { 110 | "--job-bookmark-option": "job-bookmark-enable", 111 | "--job-language": "python", 112 | "--extra-py-files": "s3://aws-data-wrangler-public-artifacts/releases/2.14.0/awswrangler-2.14.0-py3-none-any.whl", 113 | "--additional-python-modules": "awswrangler==2.14.0", 114 | "--source_bucket": {"Fn::Sub": Match.any_value()}, # "${DataBucketName}" 115 | "--output_bucket": {"Fn::Sub": Match.any_value()}, # "${ArtifactBucketName}" 116 | "--source_key": "", 117 | "--pii_fields": "", 118 | }, 119 | "Description": Match.any_value(), 120 | "ExecutionProperty": {"MaxConcurrentRuns": 2}, 121 | "GlueVersion": "3.0", 122 | "MaxRetries": 0, 123 | "Name": {"Fn::Sub": Match.any_value()}, # "${AWS::StackName}-amc-transformation-job" 124 | "NumberOfWorkers": 2, 125 | "WorkerType": "Standard", 126 | }, 127 | ) 128 | 129 | # assert that the user role is correctly created 130 | assert template.to_json()["Resources"][role_arn.as_string().replace(".Arn", "")]["Type"] == "AWS::IAM::Role" 131 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stepfunctions/solutionstep.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | from typing import Optional, List 6 | 7 | from aws_cdk import Duration 8 | from aws_cdk.aws_events import EventBus 9 | from aws_cdk.aws_lambda import Tracing, Runtime, RuntimeFamily 10 | from aws_cdk.aws_stepfunctions import IChainable, TaskInput, State 11 | from constructs import Construct 12 | 13 | from aws_solutions.cdk.aws_lambda.environment import Environment 14 | from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction 15 | from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression 16 | from aws_solutions.cdk.stepfunctions.solution_fragment import SolutionFragment 17 | 18 | 19 | class SolutionStep(Construct): 20 | def __init__( 21 | self, # NOSONAR (python:S107) - allow large number of method parameters 22 | scope: Construct, 23 | id: str, 24 | function: str = "lambda_handler", 25 | entrypoint: Path = None, 26 | input_path: str = "$", 27 | result_path: str = "$", 28 | output_path: str = "$", 29 | payload: Optional[TaskInput] = None, 30 | layers=None, 31 | failure_state: Optional[IChainable] = None, 32 | libraries: Optional[List[Path]] = None, 33 | ): 34 | super().__init__(scope, f"{id} Solution Step") 35 | 36 | self.function = self._CreateLambdaFunction( 37 | self, 38 | f"{self._snake_case(id)}_fn", 39 | layers=layers, 40 | function=function, 41 | entrypoint=entrypoint, 42 | libraries=libraries, 43 | ) 44 | add_cfn_nag_suppressions( 45 | self.function.role.node.try_find_child("DefaultPolicy").node.find_child( 46 | "Resource" 47 | ), 48 | [ 49 | CfnNagSuppression( 50 | "W12", "IAM policy for AWS X-Ray requires an allow on *" 51 | ) 52 | ], 53 | ) 54 | 55 | self._input_path = input_path 56 | self._result_path = result_path 57 | self._output_path = output_path 58 | self._payload = payload 59 | self._failure_state = failure_state 60 | 61 | self._create_resources() 62 | self._set_permissions() 63 | self.environment = self._set_environment() 64 | 65 | def state( 66 | self, # NOSONAR (python:S107) - allow large number of method parameters 67 | scope: Construct, 68 | construct_id, 69 | payload: Optional[TaskInput] = None, 70 | input_path: Optional[str] = None, 71 | result_path: Optional[str] = None, 72 | result_selector: Optional[str] = None, 73 | output_path: Optional[str] = None, 74 | failure_state: Optional[State] = None, 75 | **kwargs, 76 | ): 77 | payload = payload or self._payload 78 | input_path = input_path or self._input_path 79 | result_path = result_path or self._result_path 80 | output_path = output_path or self._output_path 81 | failure_state = failure_state or self._failure_state 82 | 83 | return SolutionFragment( 84 | scope, 85 | construct_id, 86 | function=self.function, 87 | payload=payload, 88 | input_path=input_path, 89 | result_path=result_path, 90 | output_path=output_path, 91 | failure_state=failure_state, 92 | result_selector=result_selector, 93 | **kwargs, 94 | ) 95 | 96 | def _snake_case(self, name) -> str: 97 | return name.replace(" ", "_").lower() 98 | 99 | def _set_permissions(self) -> None: 100 | raise NotImplementedError("please implement _set_permissions") 101 | 102 | def grant_put_events(self, bus: EventBus): 103 | self.function.add_environment("EVENT_BUS_ARN", bus.event_bus_arn) 104 | bus.grant_put_events_to(self.function) 105 | 106 | def _create_resources(self) -> None: 107 | pass # not required 108 | 109 | def _set_environment(self) -> Environment: 110 | return Environment(self.function) 111 | 112 | class _CreateLambdaFunction(SolutionsPythonFunction): 113 | def __init__(self, scope: Construct, construct_id: str, **kwargs): 114 | entrypoint = kwargs.pop("entrypoint", None) 115 | if not entrypoint or not entrypoint.exists(): 116 | raise ValueError("an entrypoint (Path to a .py file) must be provided") 117 | 118 | libraries = kwargs.pop("libraries", None) 119 | if libraries and any(not l.exists() for l in libraries): 120 | raise ValueError(f"libraries provided, but do not exist at {libraries}") 121 | 122 | function = kwargs.pop("function") 123 | kwargs["layers"] = kwargs.get("layers", []) 124 | kwargs["tracing"] = Tracing.ACTIVE 125 | kwargs["timeout"] = Duration.seconds(15) 126 | kwargs["runtime"] = Runtime("python3.9", RuntimeFamily.PYTHON) 127 | 128 | super().__init__( 129 | scope, 130 | construct_id, 131 | entrypoint, 132 | function, 133 | libraries=libraries, 134 | **kwargs, 135 | ) 136 | -------------------------------------------------------------------------------- /source/glue/snap_transformations.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | ############################################################################### 5 | # PURPOSE: 6 | # Normalize, hash, and partition datasets for Snap. 7 | # Currently only supporting JSON file formats. 8 | # 9 | # INPUT: 10 | # --source_bucket: S3 bucket containing input file (optional) 11 | # --output_bucket: S3 bucket for output data (optional) 12 | # --source_key: S3 key of input file also used as the key for the outputted file 13 | # --pii_fields: json formatted array containing column names that need to be hashed and the PII type of their data. The type must be PHONE, EMAIL,or MOBILE_AD_ID. 14 | # --segment_name: the name of the specific segment/audience that the data is being uploaded for 15 | # 16 | # OUTPUT: 17 | # - Transformed data files in user-specified output bucket 18 | # 19 | # SAMPLE COMMAND-LINE USAGE: 20 | # 21 | # export JOB_NAME=mystack-GlueStack-12BSLR8H1F79M-snap-transformation-job 22 | # export SOURCE_BUCKET=mybucket 23 | # export SOURCE_KEY=mydata.json 24 | # export OUTPUT_BUCKET=mystack-etl-artifacts-zmtmhi 25 | # export PII_FIELDS='[{"column_name":"e-mail", "pii_type":"EMAIL"}, {"column_name":"phone_number", "pii_type":"PHONE"}, {"column_name":"mobile_advertiser_id", "pii_type":"MOBILE_AD_ID"}]' 26 | # export SEGMENT_NAME='myaudience' 27 | # export REGION=us-east-1 28 | # aws glue start-job-run --job-name $JOB_NAME --arguments '{"--source_bucket": "'$SOURCE_BUCKET'", "--output_bucket": "'$OUTPUT_BUCKET'", "--source_key": "'$SOURCE_KEY'", "--pii_fields": "'$PII_FIELDS'"}' --segment_name $SEGMENT_NAME --region $REGION 29 | # 30 | ############################################################################### 31 | 32 | import sys 33 | import os 34 | import json 35 | import math 36 | import hashlib 37 | import numpy as np 38 | import pandas as pd 39 | import awswrangler as wr 40 | from awsglue.utils import getResolvedOptions 41 | 42 | snap_api_limit = 100000 43 | 44 | ############################### 45 | # PARSE ARGS 46 | ############################### 47 | 48 | args = getResolvedOptions(sys.argv, ['JOB_NAME', 'source_bucket', 'source_key', 'output_bucket', 'pii_fields', 'segment_name']) 49 | print("Runtime args for job " + args['JOB_NAME'] + ":") 50 | print(args) 51 | if 'source_bucket' not in args: 52 | sys.exit("ERROR: Missing source_bucket job parameter") 53 | if 'source_key' not in args: 54 | sys.exit("ERROR: Missing source_key job parameter") 55 | if 'output_bucket' not in args: 56 | sys.exit("ERROR: Missing output_bucket job parameter") 57 | if 'segment_name' not in args: 58 | sys.exit("ERROR: Missing segment_name job parameter") 59 | 60 | pii_fields = [] 61 | if 'pii_fields' in args: 62 | pii_fields = json.loads(args['pii_fields']) 63 | 64 | ############################### 65 | # LOAD INPUT DATA 66 | ############################### 67 | 68 | source_bucket = args['source_bucket'] 69 | source_key = args['source_key'] 70 | output_bucket = args['output_bucket'] 71 | output_key = os.path.splitext(source_key)[0] 72 | segment_name = args['segment_name'] 73 | 74 | chunksize = 2000 75 | 76 | print('Reading input file from: ') 77 | print('s3://'+source_bucket+'/'+source_key) 78 | 79 | dfs = wr.s3.read_json(path=['s3://'+source_bucket+'/'+source_key], chunksize=chunksize, lines=True, orient='records') 80 | df = pd.DataFrame() 81 | for chunk in dfs: 82 | # Save each chunk 83 | df = pd.concat([df, chunk]) 84 | 85 | ############################### 86 | # DATA NORMALIZATION 87 | ############################### 88 | 89 | # df1 will contain integer, float, and datetime columns. This is not currently being used 90 | df1 = df.select_dtypes(exclude=[object]) 91 | # df2 will contain string columns 92 | df2 = df.select_dtypes(include=[object]) 93 | df2 = df2.apply(lambda x: x.astype(str).str.normalize('NFKD').str.strip()) 94 | 95 | for field in pii_fields: 96 | if field['pii_type'] == "PHONE": 97 | column_name = field['column_name'] 98 | df2[column_name] = df2[column_name].str.replace(r'[^0-9]+', '').str.lstrip('0') 99 | elif field['pii_type'] == "EMAIL": 100 | column_name = field['column_name'] 101 | elif field['pii_type'] == "MOBILE_AD_ID": 102 | column_name = field['column_name'] 103 | df2[column_name] = df2[column_name].str.lower() 104 | 105 | ############################### 106 | # PII HASHING 107 | ############################### 108 | 109 | for field in pii_fields: 110 | column = field['column_name'] 111 | df2[column] = df2[column].apply(lambda x: hashlib.sha256(x.encode()).hexdigest()) 112 | df2.rename(columns = {column:field['pii_type']+'_SHA256'}, inplace = True) 113 | 114 | ############################### 115 | # SAVE OUTPUT DATA 116 | ############################### 117 | 118 | # df = pd.concat([df1, df2], axis=1) 119 | # Melt and rename dataframe to fit input of Snap Activator 120 | df2 = df2.melt() 121 | df2.rename(columns = {'variable':'schema', 'value':'hash'}, inplace = True) 122 | df2['segment_name'] = segment_name 123 | 124 | list_df = np.array_split(df2, math.ceil(df2.shape[0]/snap_api_limit)) 125 | num_file_digits = int(math.log10(len(list_df)))+1 126 | 127 | for i in range(len(list_df)): 128 | output_file = 's3://'+output_bucket+'/output/snap/'+segment_name+'/'+output_key+str(i+1).zfill(num_file_digits)+'.csv'+'.gz' 129 | wr.s3.to_csv(df=list_df[i], path=output_file, compression='gzip') 130 | -------------------------------------------------------------------------------- /source/tests/infrastructure/lib/test_web_stack.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import pytest 5 | from aws_cdk.assertions import Template, Match, Capture 6 | import aws_cdk as cdk 7 | from lib.uploader_stack import UploaderStack 8 | 9 | # Create the stack in a fixture - pass the fixture to each test 10 | @pytest.fixture(scope="module") 11 | def synth_nested_template(): 12 | 13 | FAKE_CONTEXT = { 14 | "SOLUTION_ID": "SO0226", 15 | "SOLUTION_VERSION": "V1.0.0", 16 | "BUCKET_NAME": "FAKEBUCKETNAME", 17 | "SOLUTION_NAME": "FAKESOLUTIONNAME", 18 | "APP_REGISTRY_NAME": "FAKEAPPREGISTRYNAME", 19 | } 20 | app = cdk.App(context=FAKE_CONTEXT) 21 | 22 | uploader_stack = UploaderStack( 23 | app, 24 | "uploader", 25 | stack_name=app.node.try_get_context("STACK_NAME"), 26 | description=f"Audience Uploader from AWS Clean Rooms Solution CDK stack", 27 | template_filename="audience-uploader-from-aws-clean-rooms.template", 28 | ) 29 | web_stack = uploader_stack.cf_stack.get_nested_stack("WebStack") 30 | 31 | template = Template.from_stack(web_stack.stack) 32 | yield template 33 | 34 | 35 | def test_lambda_creation(synth_nested_template): 36 | template = synth_nested_template 37 | template.resource_count_is("AWS::Lambda::Function", 2) 38 | 39 | bucket_execution_role = Capture() 40 | template.has_resource_properties( 41 | "AWS::Lambda::Function", 42 | { 43 | "Role": {"Fn::GetAtt": bucket_execution_role}, 44 | "Handler": "index.handler", 45 | "Runtime": Match.string_like_regexp("python3.*"), 46 | }, 47 | ) 48 | 49 | # make sure the role was created with the correct name 50 | assert ( 51 | template.to_json()["Resources"][bucket_execution_role.as_string().replace(".Arn", "")]["Type"] 52 | == "AWS::IAM::Role" 53 | ) 54 | 55 | 56 | def test_s3_bucket_creation(synth_nested_template): 57 | template = synth_nested_template 58 | template.resource_count_is("AWS::S3::Bucket", 1) 59 | 60 | template.has_resource_properties( 61 | "AWS::S3::Bucket", 62 | { 63 | "AccessControl": "LogDeliveryWrite", 64 | "BucketEncryption": { 65 | "ServerSideEncryptionConfiguration": [{"ServerSideEncryptionByDefault": {"SSEAlgorithm": "AES256"}}] 66 | }, 67 | "BucketName": {"Fn::GetAtt": Match.any_value()}, 68 | "LifecycleConfiguration": { 69 | "Rules": [ 70 | { 71 | "AbortIncompleteMultipartUpload": {"DaysAfterInitiation": 1}, 72 | "ExpirationInDays": 3, 73 | "Id": Match.any_value(), 74 | "Prefix": "access_logs/", 75 | "Status": "Enabled", 76 | }, 77 | { 78 | "AbortIncompleteMultipartUpload": {"DaysAfterInitiation": 1}, 79 | "ExpirationInDays": 3, 80 | "Id": Match.any_value(), 81 | "Prefix": "cf_logs/", 82 | "Status": "Enabled", 83 | }, 84 | ] 85 | }, 86 | }, 87 | ) 88 | 89 | 90 | # Policy to deny all and then one to allow GetObject 91 | 92 | 93 | def test_bucket_policy_creation(synth_nested_template): 94 | template = synth_nested_template 95 | template.resource_count_is("AWS::S3::BucketPolicy", 1) 96 | 97 | website_bucket = Capture() 98 | # least privilege 99 | template.has_resource_properties( 100 | "AWS::S3::BucketPolicy", 101 | { 102 | "Bucket": {"Ref": website_bucket}, 103 | "PolicyDocument": { 104 | "Statement": [ 105 | { 106 | "Effect": "Allow", 107 | "Action": ["s3:GetObject"], 108 | "Resource": [{"Fn::Sub": website_bucket}], 109 | }, 110 | { 111 | "Effect": "Deny", 112 | "Principal": "*", 113 | "Action": "*", 114 | "Resource": [{"Fn::Sub": website_bucket}, {"Fn::Sub": website_bucket}], 115 | "Condition": {"Bool": {"aws:SecureTransport": False}}, 116 | }, 117 | { 118 | "Effect": "Deny", 119 | "Action": "*", 120 | "Resource": [{"Fn::Sub": website_bucket}], 121 | } 122 | ] 123 | }, 124 | }, 125 | ) 126 | 127 | 128 | def test_lambda_permissions_creation(synth_nested_template): 129 | template = synth_nested_template 130 | template.resource_count_is("AWS::Lambda::Permission", 1) 131 | 132 | website_bucket_function = Capture() 133 | template.has_resource_properties( 134 | "AWS::Lambda::Permission", 135 | { 136 | "Action": "lambda:InvokeFunction", 137 | "FunctionName": {"Fn::GetAtt": website_bucket_function}, 138 | "Principal": "cloudformation.amazonaws.com", 139 | }, 140 | ) 141 | 142 | # make sure the function was created with the correct name 143 | assert ( 144 | template.to_json()["Resources"][website_bucket_function.as_string().replace(".Arn", "")]["Type"] 145 | == "AWS::Lambda::Function" 146 | ) 147 | -------------------------------------------------------------------------------- /source/api/external_resources.json: -------------------------------------------------------------------------------- 1 | { 2 | "Description": "This AWS CloudFormation template provisions the REST API for the Audience Uploader from AWS Clean Rooms.", 3 | "Parameters": { 4 | "botoConfig": { 5 | "Type": "String", 6 | "Description": "Botocore config" 7 | }, 8 | "Version": { 9 | "Type": "String", 10 | "Description": "Solution version" 11 | }, 12 | "DeploymentPackageBucket": { 13 | "Type": "String", 14 | "Description": "Bucket that contains the deployment package for Lambda API handlers" 15 | }, 16 | "DeploymentPackageKey": { 17 | "Type": "String", 18 | "Description": "S3 Key of the deployment package for Lambda API handlers" 19 | }, 20 | "DataBucketName": { 21 | "Type": "String", 22 | "Description": "S3 bucket containing first-party data object for ingest" 23 | }, 24 | "AmcGlueJobName": { 25 | "Type": "String", 26 | "Description": "Glue ETL Job name for AMC" 27 | } 28 | }, 29 | "Resources": { 30 | "ApiHandlerRole": { 31 | "Type": "AWS::IAM::Role", 32 | "Description": "This role is used by the api lambda when invoked by API Gateway", 33 | "Metadata": { 34 | "cfn_nag": { 35 | "rules_to_suppress": [ 36 | { 37 | "id": "W11", 38 | "reason": "The X-Ray policy uses actions that must be applied to all resources. See https://docs.aws.amazon.com/xray/latest/devguide/security_iam_id-based-policy-examples.html#xray-permissions-resources" 39 | } 40 | ] 41 | } 42 | }, 43 | "Properties": { 44 | "AssumeRolePolicyDocument": { 45 | "Version": "2012-10-17", 46 | "Statement": [ 47 | { 48 | "Sid": "", 49 | "Effect": "Allow", 50 | "Principal": { 51 | "Service": "lambda.amazonaws.com" 52 | }, 53 | "Action": "sts:AssumeRole" 54 | } 55 | ] 56 | }, 57 | "Policies": [ 58 | { 59 | "PolicyDocument": { 60 | "Version": "2012-10-17", 61 | "Statement": [ 62 | { 63 | "Effect": "Allow", 64 | "Action": [ 65 | "s3:GetObject" 66 | ], 67 | "Resource": { 68 | "Fn::Sub": "arn:aws:s3:::${DataBucketName}/*" 69 | } 70 | }, 71 | { 72 | "Effect": "Allow", 73 | "Action": [ 74 | "s3:ListBucket" 75 | ], 76 | "Resource": { 77 | "Fn::Sub": "arn:aws:s3:::${DataBucketName}" 78 | } 79 | }, 80 | { 81 | "Effect": "Allow", 82 | "Action": [ 83 | "glue:StartJobRun", 84 | "glue:GetJobRuns" 85 | ], 86 | "Resource": { 87 | "Fn::Sub": "arn:aws:glue:${AWS::Region}:${AWS::AccountId}:job/${AmcGlueJobName}" 88 | } 89 | }, 90 | { 91 | "Action": [ 92 | "logs:CreateLogGroup", 93 | "logs:CreateLogStream", 94 | "logs:PutLogEvents" 95 | ], 96 | "Resource": { 97 | "Fn::Sub": "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:/aws/lambda/*" 98 | }, 99 | "Effect": "Allow", 100 | "Sid": "Logging" 101 | }, 102 | { 103 | "Action": [ 104 | "xray:PutTraceSegments", 105 | "xray:PutTelemetryRecords" 106 | ], 107 | "Resource": [ 108 | "*" 109 | ], 110 | "Effect": "Allow" 111 | } 112 | ] 113 | }, 114 | "PolicyName": "ApiHandlerRolePolicy" 115 | } 116 | ] 117 | } 118 | }, 119 | "APIHandler": { 120 | "Metadata": { 121 | "cfn_nag": { 122 | "rules_to_suppress": [ 123 | { 124 | "id": "W89", 125 | "reason": "This Lambda function does not need to access any resource provisioned within a VPC." 126 | }, 127 | { 128 | "id": "W92", 129 | "reason": "This function does not require performance optimization, so the default concurrency limits suffice." 130 | } 131 | ] 132 | } 133 | }, 134 | "Properties": { 135 | "Runtime": "python3.9", 136 | "Environment": { 137 | "Variables": { 138 | "botoConfig": { 139 | "Ref": "botoConfig" 140 | }, 141 | "VERSION": { 142 | "Ref": "Version" 143 | }, 144 | "AMC_GLUE_JOB_NAME": { 145 | "Ref": "AmcGlueJobName" 146 | } 147 | } 148 | }, 149 | "Layers": [ 150 | "arn:aws:lambda:us-east-1:336392948345:layer:AWSDataWrangler-Python39:9" 151 | ], 152 | "Role": { 153 | "Fn::GetAtt": [ 154 | "ApiHandlerRole", 155 | "Arn" 156 | ] 157 | }, 158 | "CodeUri": { 159 | "Bucket": { 160 | "Ref": "DeploymentPackageBucket" 161 | }, 162 | "Key": { 163 | "Ref": "DeploymentPackageKey" 164 | } 165 | } 166 | } 167 | } 168 | } 169 | } -------------------------------------------------------------------------------- /source/website/src/tiktokViews/Step2.vue: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | 92 | 93 | --------------------------------------------------------------------------------