├── source ├── tests │ ├── cdk_solution_helper │ │ ├── aws_lambda │ │ │ ├── python │ │ │ │ └── fixtures │ │ │ │ │ ├── hash_fixture │ │ │ │ │ ├── a │ │ │ │ │ │ └── z.txt │ │ │ │ │ ├── c.txt │ │ │ │ │ └── z │ │ │ │ │ │ └── a.txt │ │ │ │ │ ├── requirements.txt │ │ │ │ │ ├── Pipfile │ │ │ │ │ ├── lambda │ │ │ │ │ └── package │ │ │ │ │ │ ├── minimal │ │ │ │ │ │ └── __init__.py │ │ │ │ │ │ └── setup.py │ │ │ │ │ ├── packages │ │ │ │ │ └── package2 │ │ │ │ │ │ ├── minimal2 │ │ │ │ │ │ └── __init__.py │ │ │ │ │ │ └── setup.py │ │ │ │ │ └── pyproject.toml │ │ │ ├── java │ │ │ │ ├── fixtures │ │ │ │ │ └── java_sample │ │ │ │ │ │ ├── settings.gradle │ │ │ │ │ │ ├── gradle │ │ │ │ │ │ └── wrapper │ │ │ │ │ │ │ ├── gradle-wrapper.jar │ │ │ │ │ │ │ └── gradle-wrapper.properties │ │ │ │ │ │ ├── src │ │ │ │ │ │ ├── main │ │ │ │ │ │ │ ├── java │ │ │ │ │ │ │ │ └── example │ │ │ │ │ │ │ │ │ ├── Handler.java │ │ │ │ │ │ │ │ │ └── UserData.java │ │ │ │ │ │ │ └── main.iml │ │ │ │ │ │ └── test │ │ │ │ │ │ │ ├── java │ │ │ │ │ │ │ └── example │ │ │ │ │ │ │ │ └── HandlerTest.java │ │ │ │ │ │ │ └── test1.iml │ │ │ │ │ │ └── build.gradle │ │ │ │ └── test_java_function.py │ │ │ └── cfn_custom_resources │ │ │ │ ├── solution_metrics │ │ │ │ └── test_metrics_cdk.py │ │ │ │ ├── resource_hash │ │ │ │ ├── test_resource_name_cdk.py │ │ │ │ └── test_resource_name.py │ │ │ │ └── resource_name │ │ │ │ ├── test_resource_hash_cdk.py │ │ │ │ └── test_resource_hash.py │ │ ├── __init__.py │ │ ├── helpers │ │ │ └── test_logger.py │ │ ├── test_mappings.py │ │ ├── test_cfn_nag_suppressions.py │ │ ├── test_logging.py │ │ ├── test_helpers.py │ │ ├── test_aspects.py │ │ └── test_cdk_interfaces.py │ ├── __init__.py │ ├── aws_lambda │ │ ├── __init__.py │ │ ├── create_campaign │ │ │ └── __init__.py │ │ ├── create_config │ │ │ └── __init__.py │ │ ├── create_dataset │ │ │ └── __init__.py │ │ ├── create_filter │ │ │ └── __init__.py │ │ ├── create_schema │ │ │ ├── __init__.py │ │ │ └── create_schema_handler.py │ │ ├── create_solution │ │ │ └── __init__.py │ │ ├── create_dataset_group │ │ │ └── __init__.py │ │ ├── create_event_tracker │ │ │ └── __init__.py │ │ ├── create_recommender │ │ │ └── __init__.py │ │ ├── create_batch_inference_job │ │ │ └── __init__.py │ │ ├── create_batch_segment_job │ │ │ └── __init__.py │ │ ├── create_dataset_import_job │ │ │ └── __init__.py │ │ ├── create_solution_version │ │ │ └── __init__.py │ │ └── test_events.py │ ├── fixtures │ │ └── config │ │ │ └── users.csv │ ├── test_personalize_stack.py │ └── test_resources.py ├── aws_lambda │ ├── __init__.py │ ├── s3_event │ │ └── __init__.py │ ├── shared │ │ ├── __init__.py │ │ ├── personalize │ │ │ └── __init__.py │ │ ├── notifiers │ │ │ └── __init__.py │ │ ├── resource │ │ │ ├── schema.py │ │ │ ├── filter.py │ │ │ ├── campaign.py │ │ │ ├── event_tracker.py │ │ │ ├── batch_segment_job.py │ │ │ ├── batch_inference_job.py │ │ │ ├── dataset_import_job.py │ │ │ ├── dataset.py │ │ │ ├── solution.py │ │ │ ├── recommender.py │ │ │ ├── solution_version.py │ │ │ ├── dataset_group.py │ │ │ ├── base.py │ │ │ ├── __init__.py │ │ │ └── name.py │ │ ├── exceptions.py │ │ ├── date_helpers.py │ │ ├── s3.py │ │ └── events.py │ ├── create_config │ │ ├── __init__.py │ │ └── handler.py │ ├── create_filter │ │ ├── __init__.py │ │ └── handler.py │ ├── create_schema │ │ ├── __init__.py │ │ └── handler.py │ ├── prepare_input │ │ ├── __init__.py │ │ └── handler.py │ ├── create_campaign │ │ ├── __init__.py │ │ └── handler.py │ ├── create_dataset │ │ ├── __init__.py │ │ └── handler.py │ ├── create_recommender │ │ ├── __init__.py │ │ └── handler.py │ ├── create_solution │ │ ├── __init__.py │ │ └── handler.py │ ├── create_timestamp │ │ ├── __init__.py │ │ └── handler.py │ ├── sns_notification │ │ └── __init__.py │ ├── create_dataset_group │ │ ├── __init__.py │ │ └── handler.py │ ├── create_event_tracker │ │ ├── __init__.py │ │ └── handler.py │ ├── create_solution_version │ │ ├── __init__.py │ │ └── handler.py │ ├── create_batch_inference_job │ │ ├── __init__.py │ │ └── handler.py │ ├── create_batch_segment_job │ │ ├── __init__.py │ │ └── handler.py │ └── create_dataset_import_job │ │ ├── __init__.py │ │ └── handler.py ├── infrastructure │ ├── __init__.py │ ├── personalize │ │ ├── __init__.py │ │ ├── sns │ │ │ └── __init__.py │ │ ├── aws_lambda │ │ │ ├── __init__.py │ │ │ ├── layers │ │ │ │ ├── aws_solutions │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── requirements │ │ │ │ │ │ └── requirements.txt │ │ │ │ │ └── layer.py │ │ │ │ └── __init__.py │ │ │ └── functions │ │ │ │ ├── create_timestamp.py │ │ │ │ ├── prepare_input.py │ │ │ │ ├── create_scheduled_task.py │ │ │ │ ├── __init__.py │ │ │ │ ├── create_schema.py │ │ │ │ ├── environment.py │ │ │ │ ├── create_solution.py │ │ │ │ ├── create_recommender.py │ │ │ │ ├── create_solution_version.py │ │ │ │ ├── create_filter.py │ │ │ │ ├── create_campaign.py │ │ │ │ ├── create_dataset.py │ │ │ │ ├── create_config.py │ │ │ │ └── s3_event.py │ │ ├── cloudwatch │ │ │ └── __init__.py │ │ ├── step_functions │ │ │ ├── __init__.py │ │ │ ├── schedules.py │ │ │ ├── failure_fragment.py │ │ │ ├── event_tracker_fragment.py │ │ │ ├── dataset_imports_fragment.py │ │ │ └── filter_fragment.py │ │ └── s3 │ │ │ ├── data_bucket.py │ │ │ ├── __init__.py │ │ │ ├── access_logs_bucket.py │ │ │ └── utils.py │ ├── cdk.json │ ├── deploy.py │ └── setup.py ├── scheduler │ ├── cdk │ │ ├── __init__.py │ │ ├── aws_solutions │ │ │ └── scheduler │ │ │ │ └── cdk │ │ │ │ ├── __init__.py │ │ │ │ └── aws_lambda │ │ │ │ ├── scheduler │ │ │ │ ├── __init__.py │ │ │ │ ├── requirements.txt │ │ │ │ └── handler.py │ │ │ │ ├── get_next_scheduled_event │ │ │ │ ├── settings.gradle │ │ │ │ ├── gradle │ │ │ │ │ └── wrapper │ │ │ │ │ │ ├── gradle-wrapper.jar │ │ │ │ │ │ └── gradle-wrapper.properties │ │ │ │ ├── src │ │ │ │ │ ├── main │ │ │ │ │ │ └── java │ │ │ │ │ │ │ └── com │ │ │ │ │ │ │ └── amazonaws │ │ │ │ │ │ │ └── solutions │ │ │ │ │ │ │ └── schedule_sfn_task │ │ │ │ │ │ │ ├── ScheduleException.java │ │ │ │ │ │ │ ├── ScheduleEvent.java │ │ │ │ │ │ │ └── HandleScheduleEvent.java │ │ │ │ │ └── test │ │ │ │ │ │ └── java │ │ │ │ │ │ └── com │ │ │ │ │ │ └── amazonaws │ │ │ │ │ │ └── solutions │ │ │ │ │ │ └── schedule_sfn_task │ │ │ │ │ │ └── HandleScheduleEventTest.java │ │ │ │ └── build.gradle │ │ │ │ ├── __init__.py │ │ │ │ ├── read_scheduled_task.py │ │ │ │ ├── delete_scheduled_task.py │ │ │ │ ├── create_scheduled_task.py │ │ │ │ └── update_scheduled_task.py │ │ └── setup.py │ ├── common │ │ ├── __init__.py │ │ ├── aws_solutions │ │ │ └── scheduler │ │ │ │ └── common │ │ │ │ ├── scripts │ │ │ │ └── __init__.py │ │ │ │ ├── __init__.py │ │ │ │ ├── task_resource.py │ │ │ │ └── task.py │ │ └── setup.py │ └── CHANGELOG.md ├── cdk_solution_helper_py │ ├── helpers_cdk │ │ ├── aws_solutions │ │ │ └── cdk │ │ │ │ ├── aws_lambda │ │ │ │ ├── cfn_custom_resources │ │ │ │ │ ├── resource_hash │ │ │ │ │ │ ├── src │ │ │ │ │ │ │ ├── custom_resources │ │ │ │ │ │ │ │ ├── requirements.txt │ │ │ │ │ │ │ │ └── __init__.py │ │ │ │ │ │ │ └── __init__.py │ │ │ │ │ │ └── __init__.py │ │ │ │ │ ├── resource_name │ │ │ │ │ │ ├── src │ │ │ │ │ │ │ ├── custom_resources │ │ │ │ │ │ │ │ ├── requirements.txt │ │ │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ │ │ └── name.py │ │ │ │ │ │ │ └── __init__.py │ │ │ │ │ │ └── __init__.py │ │ │ │ │ ├── solutions_metrics │ │ │ │ │ │ ├── src │ │ │ │ │ │ │ ├── custom_resources │ │ │ │ │ │ │ │ ├── requirements.txt │ │ │ │ │ │ │ │ └── __init__.py │ │ │ │ │ │ │ └── __init__.py │ │ │ │ │ │ └── __init__.py │ │ │ │ │ └── __init__.py │ │ │ │ ├── layers │ │ │ │ │ ├── aws_lambda_powertools │ │ │ │ │ │ ├── requirements │ │ │ │ │ │ │ └── requirements.txt │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ └── layer.py │ │ │ │ │ └── __init__.py │ │ │ │ ├── __init__.py │ │ │ │ ├── java │ │ │ │ │ └── __init__.py │ │ │ │ ├── python │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── hash_utils.py │ │ │ │ ├── environment_variable.py │ │ │ │ └── environment.py │ │ │ │ ├── scripts │ │ │ │ └── __init__.py │ │ │ │ ├── stepfunctions │ │ │ │ └── __init__.py │ │ │ │ ├── tools │ │ │ │ ├── __init__.py │ │ │ │ └── cleaner.py │ │ │ │ ├── helpers │ │ │ │ ├── __init__.py │ │ │ │ ├── logger.py │ │ │ │ └── copytree.py │ │ │ │ ├── aspects.py │ │ │ │ ├── __init__.py │ │ │ │ ├── mappings.py │ │ │ │ ├── cfn_nag.py │ │ │ │ └── cfn_guard.py │ │ └── setup.py │ ├── requirements-dev.txt │ ├── helpers_common │ │ ├── aws_solutions │ │ │ └── core │ │ │ │ ├── __init__.py │ │ │ │ ├── logging.py │ │ │ │ └── config.py │ │ └── setup.py │ └── CHANGELOG.md ├── images │ └── solution-architecture.png ├── .coveragerc ├── requirements-dev.txt └── pytest.ini ├── .github ├── PULL_REQUEST_TEMPLATE.md └── ISSUE_TEMPLATE │ ├── feature_request.md │ └── bug_report.md ├── CODE_OF_CONDUCT.md ├── SECURITY.md ├── .gitignore └── deployment └── run-unit-tests.sh /source/tests/cdk_solution_helper/aws_lambda/python/fixtures/hash_fixture/a/z.txt: -------------------------------------------------------------------------------- 1 | a -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/python/fixtures/hash_fixture/c.txt: -------------------------------------------------------------------------------- 1 | c -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/python/fixtures/hash_fixture/z/a.txt: -------------------------------------------------------------------------------- 1 | z -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/python/fixtures/requirements.txt: -------------------------------------------------------------------------------- 1 | ./package -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/python/fixtures/Pipfile: -------------------------------------------------------------------------------- 1 | [packages] 2 | minimal = {path = "./package"} -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/java/fixtures/java_sample/settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'java_sample' 2 | 3 | -------------------------------------------------------------------------------- /source/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/infrastructure/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/scheduler/cdk/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/s3_event/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/scheduler/common/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/create_config/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/create_filter/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/create_schema/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/prepare_input/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/create_campaign/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/create_dataset/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/create_recommender/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/create_solution/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/create_timestamp/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/personalize/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/sns_notification/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_hash/src/custom_resources/requirements.txt: -------------------------------------------------------------------------------- 1 | crhelper==2.0.11 -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/src/custom_resources/requirements.txt: -------------------------------------------------------------------------------- 1 | crhelper==2.0.11 -------------------------------------------------------------------------------- /source/infrastructure/personalize/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/create_dataset_group/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/create_event_tracker/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/create_solution_version/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/sns/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/create_campaign/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/create_config/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/create_dataset/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/create_filter/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/create_schema/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/create_solution/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/create_batch_inference_job/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/create_batch_segment_job/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/create_dataset_import_job/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/cloudwatch/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/create_dataset_group/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/create_event_tracker/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/create_recommender/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/step_functions/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/create_batch_inference_job/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/create_batch_segment_job/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/create_dataset_import_job/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/create_solution_version/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/aws_lambda_powertools/requirements/requirements.txt: -------------------------------------------------------------------------------- 1 | aws-lambda-powertools==2.15.0 2 | aws-xray-sdk==2.12.0 -------------------------------------------------------------------------------- /source/images/solution-architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/maintaining-personalized-experiences-with-machine-learning/main/source/images/solution-architecture.png -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/layers/aws_solutions/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/scheduler/common/aws_solutions/scheduler/common/scripts/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/scripts/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/scheduler/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/java/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stepfunctions/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/src/custom_resources/requirements.txt: -------------------------------------------------------------------------------- 1 | requests==2.32.4 2 | urllib3==2.5.0 3 | crhelper==2.0.11 4 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/python/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/notifiers/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from shared.notifiers.notify_eventbridge import NotifyEventBridge 5 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_hash/src/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/src/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/src/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | *Issue #, if available:* 2 | 3 | *Description of changes:* 4 | 5 | By submitting this pull request, I confirm that you can use, modify, copy, and redistribute this contribution, under the terms of your choice. 6 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_hash/src/custom_resources/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/src/custom_resources/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/tools/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_solutions.cdk.tools.cleaner import Cleaner 5 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/resource/schema.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from shared.resource import Resource 5 | 6 | 7 | class Schema(Resource): 8 | pass 9 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/src/custom_resources/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/resource/filter.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from shared.resource.base import Resource 5 | 6 | 7 | class Filter(Resource): 8 | pass 9 | -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/settings.gradle: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | rootProject.name = 'sfn-schedule-task' -------------------------------------------------------------------------------- /source/aws_lambda/shared/resource/campaign.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from shared.resource.base import Resource 5 | 6 | 7 | class Campaign(Resource): 8 | pass 9 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/layers/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from personalize.aws_lambda.layers.aws_solutions.layer import SolutionsLayer 5 | -------------------------------------------------------------------------------- /source/.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | infrastructure/setup.py 4 | infrastructure/cdk.out/* 5 | tests/* 6 | source = 7 | infrastructure 8 | aws_lambda 9 | cdk_solution_helper_py 10 | scheduler 11 | 12 | [report] 13 | fail_under = 80.0 -------------------------------------------------------------------------------- /source/aws_lambda/shared/resource/event_tracker.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from shared.resource.base import Resource 5 | 6 | 7 | class EventTracker(Resource): 8 | pass 9 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/helpers/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_solutions.cdk.helpers.copytree import copytree, ignore_globs 5 | -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/scheduler/requirements.txt: -------------------------------------------------------------------------------- 1 | avro==1.11.3 2 | cronex==0.1.3.1 3 | jmespath==1.0.1 4 | parsedatetime==2.6 5 | ../../../../../../../scheduler/common 6 | ../../../../../../../cdk_solution_helper_py/helpers_common 7 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/resource/batch_segment_job.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from shared.resource.base import Resource 5 | 6 | 7 | class BatchSegmentJob(Resource): 8 | pass 9 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/s3/data_bucket.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from personalize.s3.utils import SecureBucket 5 | 6 | 7 | class DataBucket(SecureBucket): 8 | pass 9 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/resource/batch_inference_job.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from shared.resource.base import Resource 5 | 6 | 7 | class BatchInferenceJob(Resource): 8 | pass 9 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/layers/aws_solutions/requirements/requirements.txt: -------------------------------------------------------------------------------- 1 | ../../../../../../cdk_solution_helper_py/helpers_common 2 | ../../../../../../scheduler/common 3 | avro==1.11.3 4 | cronex==0.1.3.1 5 | jmespath==1.0.1 6 | parsedatetime==2.6 7 | boto3==1.26.47 8 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/s3/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from personalize.s3.access_logs_bucket import AccessLogsBucket 5 | from personalize.s3.data_bucket import DataBucket 6 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/s3/access_logs_bucket.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from personalize.s3.utils import SecureBucket 5 | 6 | 7 | class AccessLogsBucket(SecureBucket): 8 | pass 9 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/python/fixtures/lambda/package/minimal/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | def function_in_package(): 5 | return "hello from function_in_package" 6 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/resource/dataset_import_job.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from shared.resource.base import Resource 5 | 6 | 7 | class DatasetImportJob(Resource): 8 | has_soft_limit = True 9 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/python/fixtures/packages/package2/minimal2/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | def function_in_package(): 5 | return "hello from minimal2 function_in_package" 6 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/java/fixtures/java_sample/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/maintaining-personalized-experiences-with-machine-learning/main/source/tests/cdk_solution_helper/aws_lambda/java/fixtures/java_sample/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/java/fixtures/java_sample/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/aws_lambda_powertools/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_solutions.cdk.aws_lambda.layers.aws_lambda_powertools.layer import ( 5 | PowertoolsLayer, 6 | ) 7 | -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions/maintaining-personalized-experiences-with-machine-learning/main/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_hash/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_solutions.cdk.aws_lambda.cfn_custom_resources.resource_hash.hash import ( 5 | ResourceHash, 6 | ) 7 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_solutions.cdk.aws_lambda.cfn_custom_resources.resource_name.name import ( 5 | ResourceName, 6 | ) 7 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_solutions.cdk.aws_lambda.cfn_custom_resources.solutions_metrics.metrics import ( 5 | Metrics, 6 | ) 7 | -------------------------------------------------------------------------------- /source/infrastructure/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "python3 deploy.py", 3 | "context": { 4 | "SOLUTION_NAME": "Maintaining Personalized Experiences with Machine Learning", 5 | "SOLUTION_ID": "SO0170", 6 | "SOLUTION_VERSION": "v1.5.0", 7 | "APPLICATION_TYPE": "AWS-Solutions", 8 | "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true 9 | } 10 | } -------------------------------------------------------------------------------- /source/tests/fixtures/config/users.csv: -------------------------------------------------------------------------------- 1 | USER_ID,AGE,GENDER 2 | 0,71,F 3 | 1,67,M 4 | 2,25,F 5 | 3,70,F 6 | 4,28,F 7 | 5,34,F 8 | 6,66,F 9 | 7,74,F 10 | 8,79,F 11 | 9,57,M 12 | 10,58,M 13 | 11,18,M 14 | 12,88,M 15 | 13,73,F 16 | 14,77,F 17 | 15,23,F 18 | 16,85,M 19 | 17,31,M 20 | 18,48,M 21 | 19,44,M 22 | 20,24,F 23 | 21,63,M 24 | 22,66,F 25 | 23,21,F 26 | 24,81,M 27 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/step_functions/schedules.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from dataclasses import dataclass 5 | 6 | from aws_cdk.aws_stepfunctions import StateMachineFragment 7 | 8 | 9 | @dataclass 10 | class Schedules: 11 | dataset_import: StateMachineFragment 12 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/requirements-dev.txt: -------------------------------------------------------------------------------- 1 | aws_cdk_lib==2.88.0 2 | black 3 | boto3==1.26.47 4 | requests==2.32.4 5 | crhelper==2.0.11 6 | Click==8.1.3 7 | moto==2.3.0 8 | pipenv 9 | poetry==1.6.1 10 | pytest==7.4.4 11 | pytest-cov==4.1.0 12 | pytest-mock==3.12.0 13 | tox==4.11.4 14 | tox-pyenv 15 | -e ./source/cdk_solution_helper_py/helpers_cdk 16 | -e ./source/cdk_solution_helper_py/helpers_common 17 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/resource/dataset.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from shared.resource.base import Resource 5 | from shared.resource.dataset_import_job import DatasetImportJob 6 | 7 | 8 | class Dataset(Resource): 9 | children = [DatasetImportJob()] 10 | allowed_types = {"INTERACTIONS", "ITEMS", "USERS"} 11 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/resource/solution.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from shared.resource.base import Resource 5 | from shared.resource.campaign import Campaign 6 | from shared.resource.solution_version import SolutionVersion 7 | 8 | 9 | class Solution(Resource): 10 | children = [Campaign(), SolutionVersion()] 11 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/resource/recommender.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from shared.resource.base import Resource 5 | from shared.resource.batch_inference_job import BatchInferenceJob 6 | from shared.resource.batch_segment_job import BatchSegmentJob 7 | 8 | 9 | class Recommender(Resource): 10 | children = [BatchInferenceJob(), BatchSegmentJob()] 11 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/python/fixtures/lambda/package/setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from setuptools import setup 5 | 6 | setup( 7 | name="minimal", 8 | version="0.1", 9 | description="a small package for testing", 10 | author="AWS Solutions Builders", 11 | packages=["minimal"], 12 | zip_safe=True, 13 | ) 14 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_common/aws_solutions/core/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_solutions.core.config import Config 5 | 6 | config = Config() 7 | 8 | from aws_solutions.core.helpers import ( 9 | get_aws_region, 10 | get_aws_partition, 11 | get_service_client, 12 | get_service_resource, 13 | get_aws_account, 14 | ) 15 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/python/fixtures/packages/package2/setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from setuptools import setup 5 | 6 | setup( 7 | name="minimal2", 8 | version="0.1", 9 | description="a second small package for testing", 10 | author="AWS Solutions Builders", 11 | packages=["minimal2"], 12 | zip_safe=True, 13 | ) 14 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/python/fixtures/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "python-bundling" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["AWS Solutions Builders"] 6 | 7 | [tool.poetry.dependencies] 8 | python = "^3.11" 9 | minimal = {path = "package"} 10 | 11 | [tool.poetry.dev-dependencies] 12 | pytest = "^7.4.2" 13 | 14 | [build-system] 15 | requires = ["poetry-core>=1.2.0"] 16 | build-backend = "poetry.core.masonry.api" -------------------------------------------------------------------------------- /source/tests/aws_lambda/create_schema/create_schema_handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import pytest 5 | from aws_lambda.create_schema.handler import CONFIG, RESOURCE, lambda_handler 6 | 7 | 8 | def test_create_schema_handler(validate_handler_config): 9 | validate_handler_config(RESOURCE, CONFIG) 10 | with pytest.raises(ValueError): 11 | lambda_handler({}, None) 12 | -------------------------------------------------------------------------------- /source/scheduler/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## [2.0.0] - 2022-01-31 8 | ### Changed 9 | - support for CDK 2.x added, support for CDK 1.x removed 10 | 11 | ## [1.1.0] - 2021-11-11 12 | ### Added 13 | - initial release 14 | 15 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/resource/solution_version.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from shared.resource.base import Resource 5 | from shared.resource.batch_inference_job import BatchInferenceJob 6 | from shared.resource.batch_segment_job import BatchSegmentJob 7 | 8 | 9 | class SolutionVersion(Resource): 10 | children = [BatchInferenceJob(), BatchSegmentJob()] 11 | has_soft_limit = True 12 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## [2.0.0] - 2022-01-31 8 | ### Changed 9 | - support for CDK 2.x added, support for CDK 1.x removed 10 | 11 | ## [1.0.0] - 2021-09-23 12 | ### Added 13 | - initial release 14 | 15 | -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/src/main/java/com/amazonaws/solutions/schedule_sfn_task/ScheduleException.java: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | package com.amazonaws.solutions.schedule_sfn_task; 5 | 6 | public class ScheduleException extends RuntimeException { 7 | public ScheduleException(String message) { 8 | super(message); 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/exceptions.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | class ResourcePending(Exception): 5 | pass 6 | 7 | 8 | class SolutionVersionPending(Exception): 9 | pass 10 | 11 | 12 | class ResourceFailed(Exception): 13 | pass 14 | 15 | 16 | class ResourceInvalid(Exception): 17 | pass 18 | 19 | 20 | class ResourceNeedsUpdate(Exception): 21 | pass 22 | 23 | 24 | class NotificationError(Exception): 25 | pass 26 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | ## Reporting Security Issues 2 | 3 | We take all security reports seriously. When we receive such reports, 4 | we will investigate and subsequently address any potential vulnerabilities as 5 | quickly as possible. If you discover a potential security issue in this project, 6 | please notify AWS/Amazon Security via our [vulnerability reporting page] 7 | (http://aws.amazon.com/security/vulnerability-reporting/) or directly via email 8 | to [AWS Security](mailto:aws-security@amazon.com). 9 | Please do *not* create a public GitHub issue in this project. -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/java/fixtures/java_sample/src/main/java/example/Handler.java: -------------------------------------------------------------------------------- 1 | package example; 2 | 3 | import com.amazonaws.services.lambda.runtime.Context; 4 | import com.amazonaws.services.lambda.runtime.RequestHandler; 5 | 6 | public class Handler implements RequestHandler { 7 | 8 | @Override 9 | public UserData handleRequest(UserData input, Context context) { 10 | UserData output = input; 11 | output.setGreeting("Hello there " + input.getName()); 12 | return output; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /source/scheduler/common/aws_solutions/scheduler/common/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | TASK_PK = "name" 5 | TASK_SK = "version" 6 | CRON_ANY_WILDCARD = "?" 7 | CRON_MIN_MAX_YEAR = (1970, 2199) 8 | 9 | 10 | from aws_solutions.scheduler.common.base import Scheduler 11 | from aws_solutions.scheduler.common.schedule import Schedule, ScheduleError 12 | from aws_solutions.scheduler.common.task import Task 13 | from aws_solutions.scheduler.common.task_resource import TaskResource 14 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this solution 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the feature you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Additional context** 17 | Add any other context or screenshots about the feature request here. 18 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/resource/dataset_group.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from shared.resource.base import Resource 5 | from shared.resource.dataset import Dataset 6 | from shared.resource.event_tracker import EventTracker 7 | from shared.resource.filter import Filter 8 | from shared.resource.recommender import Recommender 9 | from shared.resource.solution import Solution 10 | 11 | 12 | class DatasetGroup(Resource): 13 | children = [Dataset(), Filter(), Solution(), Recommender(), EventTracker()] 14 | -------------------------------------------------------------------------------- /source/requirements-dev.txt: -------------------------------------------------------------------------------- 1 | avro==1.11.3 2 | black==24.3.0 3 | boto3==1.26.47 4 | aws_cdk_lib==2.88.0 5 | aws_solutions_constructs.aws_lambda_sns==2.41.0 6 | cdk-nag==2.27.107 7 | requests==2.32.4 8 | crhelper==2.0.11 9 | cronex==0.1.3.1 10 | moto==2.3.0 11 | parsedatetime==2.6 12 | pytest==7.4.4 13 | pytest-cov==4.1.0 14 | pytest-env==1.1.3 15 | pytest-mock==3.12.0 16 | pyyaml==6.0.2 17 | responses==0.17.0 18 | tenacity==8.0.1 19 | -e cdk_solution_helper_py/helpers_cdk 20 | -e cdk_solution_helper_py/helpers_common 21 | -e scheduler/cdk 22 | -e scheduler/common 23 | docker==6.0.0 24 | -e infrastructure -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_solutions.scheduler.cdk.aws_lambda.create_scheduled_task import ( 5 | CreateScheduledTask, 6 | ) 7 | from aws_solutions.scheduler.cdk.aws_lambda.delete_scheduled_task import ( 8 | DeleteScheduledTask, 9 | ) 10 | from aws_solutions.scheduler.cdk.aws_lambda.read_scheduled_task import ReadScheduledTask 11 | from aws_solutions.scheduler.cdk.aws_lambda.update_scheduled_task import ( 12 | UpdateScheduledTask, 13 | ) 14 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/environment_variable.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from dataclasses import dataclass, field 5 | 6 | from aws_cdk.aws_lambda import IFunction 7 | 8 | 9 | @dataclass 10 | class EnvironmentVariable: 11 | scope: IFunction 12 | name: str 13 | value: str = field(default="") 14 | 15 | def __post_init__(self): 16 | if not self.value: 17 | self.value = self.scope.node.try_get_context(self.name) 18 | self.scope.add_environment(self.name, self.value) 19 | 20 | def __str__(self): 21 | return self.value 22 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/java/fixtures/java_sample/src/main/main.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 14 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/java/fixtures/java_sample/src/test/java/example/HandlerTest.java: -------------------------------------------------------------------------------- 1 | package example; 2 | 3 | import org.junit.jupiter.api.BeforeEach; 4 | import org.junit.jupiter.api.Test; 5 | 6 | import static org.junit.jupiter.api.Assertions.*; 7 | 8 | class HandlerTest { 9 | Handler handler; 10 | UserData userData; 11 | 12 | @BeforeEach 13 | void setUp() { 14 | handler = new Handler(); 15 | userData = new UserData("AWS Solutions"); 16 | } 17 | 18 | @Test 19 | void handleRequest() { 20 | UserData result = this.handler.handleRequest(userData, null); 21 | assert result.getGreeting().equals("Hello there AWS Solutions"); 22 | } 23 | } -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/java/fixtures/java_sample/src/main/java/example/UserData.java: -------------------------------------------------------------------------------- 1 | package example; 2 | 3 | public class UserData { 4 | public UserData() { 5 | } 6 | 7 | public UserData(String name) { 8 | this.name = name; 9 | } 10 | 11 | public String getName() { 12 | return name; 13 | } 14 | 15 | public void setName(String name) { 16 | this.name = name; 17 | } 18 | 19 | private String name = ""; 20 | 21 | public String getGreeting() { 22 | return greeting; 23 | } 24 | 25 | public void setGreeting(String greeting) { 26 | this.greeting = greeting; 27 | } 28 | 29 | private String greeting = ""; 30 | } 31 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/java/fixtures/java_sample/src/test/test1.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 14 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/helpers/test_logger.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | 6 | from aws_solutions.cdk.helpers.logger import Logger 7 | 8 | 9 | def test_logger(caplog): 10 | logger = Logger.get_logger("test-logger") 11 | logger.propagate = True # for test 12 | 13 | assert logger.level == logging.INFO 14 | 15 | with caplog.at_level(logging.INFO): 16 | logger.critical("CRITICAL") 17 | logger.error("ERROR") 18 | logger.warning("WARNING") 19 | logger.info("INFO") 20 | logging.debug("DEBUG") 21 | 22 | for level in "CRITICAL ERROR WARNING INFO".split(" "): 23 | assert level in caplog.text 24 | -------------------------------------------------------------------------------- /source/aws_lambda/create_timestamp/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import datetime 5 | from typing import Dict, Any 6 | 7 | from aws_lambda_powertools import Logger, Tracer 8 | from aws_lambda_powertools.utilities.typing import LambdaContext 9 | 10 | logger = Logger() 11 | tracer = Tracer() 12 | 13 | 14 | @tracer.capture_lambda_handler 15 | def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> str: 16 | """Create a timestamp matching YYYY_mm_dd_HH_MM_SS 17 | :param event: AWS Lambda Event 18 | :param context: AWS Lambda Context 19 | :return: the timestamp (string) 20 | """ 21 | return datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S") 22 | -------------------------------------------------------------------------------- /source/pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | env = 3 | MOTO_ACCOUNT_ID=111111111111 4 | POWERTOOLS_TRACE_DISABLED=1 5 | SOLUTION_ID=SO0170test 6 | SOLUTION_VERSION=v99.99.99 7 | SOLUTION_NAME=Maintaining Personalized Experiences with Machine Learning 8 | APPLICATION_TYPE=AWS-Solutions 9 | AWS_REGION=us-east-1 10 | AWS_DEFAULT_REGION=us-east-1 11 | DDB_SCHEDULES_TABLE=scheduler 12 | DDB_SCHEDULER_STEPFUNCTION=arn:aws:states:us-east-1:111111111111:stateMachine:personalizestack-personalize-scheduler 13 | POWERTOOLS_SERVICE_NAME=personalize_solution_teststack 14 | POWERTOOLS_METRICS_NAMESPACE=personalize_solution_teststack 15 | norecursedirs = cdk.out* 16 | markers= 17 | no_cdk_lambda_mock: marks test that need to build AWS Lambda Functions or Layers with CDK -------------------------------------------------------------------------------- /source/aws_lambda/prepare_input/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Dict, Any 5 | 6 | from aws_lambda_powertools import Logger, Tracer, Metrics 7 | from aws_lambda_powertools.utilities.typing import LambdaContext 8 | 9 | from shared.sfn_middleware import set_workflow_config 10 | 11 | logger = Logger() 12 | tracer = Tracer() 13 | metrics = Metrics() 14 | 15 | 16 | def lambda_handler(event: Dict[str, Any], _) -> Dict: 17 | """Add timeStarted to the workflowConfig of all items 18 | :param event: AWS Lambda Event 19 | :param context: AWS Lambda Context 20 | :return: the modified input 21 | """ 22 | config = set_workflow_config(event) 23 | return config 24 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/date_helpers.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import datetime 5 | 6 | import parsedatetime as pdt 7 | from aws_lambda_powertools import Logger 8 | 9 | logger = Logger() 10 | 11 | 12 | def parse_datetime(tm: str) -> int: 13 | if "month" in tm: 14 | logger.warning("while months are supported, they are based off of the calendar of the start of year 1 CE") 15 | if "year" in tm: 16 | logger.warning("while years are supported, they are based off of the calendar of the start of year 1 CE") 17 | 18 | start_of_time = datetime.datetime.min 19 | cal = pdt.Calendar(version=pdt.VERSION_CONTEXT_STYLE) 20 | timedelta = cal.parseDT(tm, sourceTime=start_of_time)[0] - start_of_time 21 | return int(timedelta.total_seconds()) 22 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aspects.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import jsii 5 | from aws_cdk import CfnCondition, IAspect 6 | from constructs import IConstruct 7 | 8 | 9 | @jsii.implements(IAspect) 10 | class ConditionalResources: 11 | """Mark any CDK construct as conditional (this is useful to apply to stacks and L2+ constructs)""" 12 | 13 | def __init__(self, condition: CfnCondition): 14 | self.condition = condition 15 | 16 | def visit(self, node: IConstruct): 17 | if "is_cfn_element" in dir(node) and node.is_cfn_element(node): 18 | node.cfn_options.condition = self.condition 19 | elif "is_cfn_element" in dir(node.node.default_child): 20 | node.node.default_child.cfn_options.condition = self.condition 21 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/helpers/logger.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | 6 | 7 | class Logger: 8 | """Set up a logger fo this package""" 9 | 10 | @classmethod 11 | def get_logger(cls, name: str) -> logging.Logger: 12 | """ 13 | Gets the current logger for this package 14 | :param name: the name of the logger 15 | :return: the logger 16 | """ 17 | logger = logging.getLogger(name) 18 | if not len(logger.handlers): 19 | logger.setLevel(logging.INFO) 20 | handler = logging.StreamHandler() 21 | formatter = logging.Formatter("[%(levelname)s]\t%(name)s\t%(message)s") 22 | handler.setFormatter(formatter) 23 | logger.addHandler(handler) 24 | logger.propagate = False 25 | return logger 26 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/layers/aws_solutions/layer.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | from aws_cdk import Stack 7 | from constructs import Construct 8 | 9 | from aws_solutions.cdk.aws_lambda.python.layer import SolutionsPythonLayerVersion 10 | 11 | 12 | class SolutionsLayer(SolutionsPythonLayerVersion): 13 | def __init__(self, scope: Construct, construct_id: str, **kwargs): 14 | requirements_path: Path = Path(__file__).absolute().parent / "requirements" 15 | super().__init__(scope, construct_id, requirements_path, **kwargs) 16 | 17 | @staticmethod 18 | def get_or_create(scope: Construct, **kwargs): 19 | stack = Stack.of(scope) 20 | construct_id = "SolutionsLayer-DAE8E12F-3DEA-43FB-A4AA-E55AC50BD2E9" 21 | exists = stack.node.try_find_child(construct_id) 22 | if exists: 23 | return exists 24 | return SolutionsLayer(stack, construct_id, **kwargs) 25 | -------------------------------------------------------------------------------- /source/scheduler/common/aws_solutions/scheduler/common/task_resource.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import dataclasses 5 | import functools 6 | 7 | from aws_solutions.scheduler.common.schedule import Schedule 8 | from aws_solutions.scheduler.common.task import Task 9 | 10 | 11 | class TaskResource: 12 | """Used as a decorator on AWS Lambda Functions to transform the AWS Lambda Event input as a Task""" 13 | 14 | def __init__(self, func): 15 | functools.update_wrapper(self, func) 16 | self.func = func 17 | 18 | def __call__(self, *args, **kwargs): 19 | task: Task = Task(**args[0]) 20 | task: Task = self.func(task, args[1], **kwargs) 21 | 22 | if not task: 23 | return None 24 | else: 25 | # convert the schedule into a string 26 | if isinstance(task.schedule, Schedule): 27 | task.schedule = task.schedule.expression 28 | return dataclasses.asdict(task) 29 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/aws_lambda_powertools/layer.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | from aws_cdk import Stack 7 | from constructs import Construct 8 | 9 | from aws_solutions.cdk.aws_lambda.python.layer import SolutionsPythonLayerVersion 10 | 11 | 12 | class PowertoolsLayer(SolutionsPythonLayerVersion): 13 | def __init__(self, scope: Construct, construct_id: str, **kwargs): 14 | requirements_path: Path = Path(__file__).absolute().parent / "requirements" 15 | super().__init__(scope, construct_id, requirements_path, **kwargs) 16 | 17 | @staticmethod 18 | def get_or_create(scope: Construct, **kwargs): 19 | stack = Stack.of(scope) 20 | construct_id = "PowertoolsLayer-8E932F0F-197D-4026-A354-23D184C2A624" 21 | exists = stack.node.try_find_child(construct_id) 22 | if exists: 23 | return exists 24 | return PowertoolsLayer(stack, construct_id, **kwargs) 25 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/functions/create_timestamp.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | from constructs import Construct 7 | 8 | from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep 9 | from aws_solutions.cdk.cfn_guard import add_cfn_guard_suppressions 10 | 11 | 12 | class CreateTimestamp(SolutionStep): 13 | def __init__( 14 | self, 15 | scope: Construct, 16 | id: str, 17 | layers=None, 18 | ): 19 | super().__init__( 20 | scope, 21 | id, 22 | layers=layers, 23 | entrypoint=(Path(__file__).absolute().parents[4] / "aws_lambda" / "create_timestamp" / "handler.py"), 24 | ) 25 | 26 | add_cfn_guard_suppressions( 27 | self.function.role.node.try_find_child("Resource"), 28 | ["IAM_NO_INLINE_POLICY_CHECK"] 29 | ) 30 | 31 | def _set_permissions(self): 32 | pass # NOSONAR (python:S1186) - no permissions required 33 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/test_mappings.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import pytest 5 | from aws_cdk import App, Stack 6 | 7 | from aws_solutions.cdk.mappings import Mappings 8 | 9 | 10 | @pytest.mark.parametrize("send_data,result", [(True, "Yes"), (False, "No")]) 11 | def test_mappings(send_data, result): 12 | solution_id = "SO001" 13 | app = App() 14 | stack = Stack(app) 15 | Mappings(stack, solution_id=solution_id, send_anonymous_usage_data=send_data) 16 | 17 | template = app.synth().stacks[0].template 18 | 19 | assert template["Mappings"]["Solution"]["Data"]["ID"] == solution_id 20 | assert template["Mappings"]["Solution"]["Data"]["Version"] == "%%SOLUTION_VERSION%%" 21 | assert template["Mappings"]["Solution"]["Data"]["SendAnonymousUsageData"] == result 22 | 23 | assert template["Mappings"]["SourceCode"]["General"]["S3Bucket"] == "%%BUCKET_NAME%%" 24 | assert template["Mappings"]["SourceCode"]["General"]["KeyPrefix"] == "%%SOLUTION_NAME%%/%%SOLUTION_VERSION%%" 25 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/test_cfn_nag_suppressions.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_cdk import CfnResource, App, Stack 5 | 6 | from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression 7 | 8 | 9 | def test_cfn_nag_suppression(): 10 | rule_id = "W10" 11 | reason = "some reason" 12 | sup = CfnNagSuppression(rule_id=rule_id, reason=reason) 13 | 14 | assert sup.rule_id == rule_id 15 | assert sup.reason == reason 16 | 17 | 18 | def test_add_cfn_nag_suppression(): 19 | app = App() 20 | stack = Stack(app) 21 | resource = CfnResource(stack, "test", type="Custom::Test") 22 | 23 | add_cfn_nag_suppressions( 24 | resource, 25 | [ 26 | CfnNagSuppression(rule_id="W1", reason="reason 1"), 27 | CfnNagSuppression("W2", "reason 2"), 28 | ], 29 | ) 30 | 31 | assert resource.get_metadata("cfn_nag") == { 32 | "rules_to_suppress": [ 33 | {"id": "W1", "reason": "reason 1"}, 34 | {"id": "W2", "reason": "reason 2"}, 35 | ] 36 | } 37 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/functions/prepare_input.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | from constructs import Construct 7 | 8 | from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep 9 | from aws_solutions.cdk.cfn_guard import add_cfn_guard_suppressions 10 | 11 | class PrepareInput(SolutionStep): 12 | def __init__( 13 | self, 14 | scope: Construct, 15 | id: str, 16 | layers=None, 17 | ): 18 | super().__init__( 19 | scope, 20 | id, 21 | layers=layers, 22 | entrypoint=(Path(__file__).absolute().parents[4] / "aws_lambda" / "prepare_input" / "handler.py"), 23 | libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], 24 | ) 25 | 26 | add_cfn_guard_suppressions( 27 | self.function.role.node.try_find_child("Resource"), 28 | ["IAM_NO_INLINE_POLICY_CHECK"] 29 | ) 30 | 31 | def _set_permissions(self): 32 | pass # NOSONAR (python:S1186) - no permissions required 33 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/functions/create_scheduled_task.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | from constructs import Construct 7 | 8 | from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep 9 | from aws_solutions.cdk.cfn_guard import add_cfn_guard_suppressions 10 | 11 | class CreateScheduledTask(SolutionStep): 12 | def __init__( 13 | self, 14 | scope: Construct, 15 | id: str, 16 | layers=None, 17 | ): 18 | super().__init__( 19 | scope, 20 | id, 21 | layers=layers, 22 | entrypoint=(Path(__file__).absolute().parents[4] / "aws_lambda" / "create_scheduled_task" / "handler.py"), 23 | libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], 24 | ) 25 | 26 | add_cfn_guard_suppressions( 27 | self.function.role.node.try_find_child("Resource"), 28 | ["IAM_NO_INLINE_POLICY_CHECK"] 29 | ) 30 | 31 | def _set_permissions(self): 32 | pass # NOSONAR (python:S1186) - no permissions required 33 | -------------------------------------------------------------------------------- /source/aws_lambda/create_config/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import json 5 | import os 6 | 7 | from aws_lambda_powertools import Logger, Tracer, Metrics 8 | from aws_lambda_powertools.metrics import MetricUnit 9 | from aws_lambda_powertools.utilities.data_classes import S3Event 10 | 11 | from shared.personalize.service_model import ServiceModel 12 | from shared.personalize_service import Personalize 13 | 14 | 15 | logger = Logger() 16 | tracer = Tracer() 17 | metrics = Metrics() 18 | 19 | 20 | @metrics.log_metrics 21 | @tracer.capture_lambda_handler 22 | def lambda_handler(event, context): 23 | """Generate and return a solution configuration file derived from the properties of a dataset group 24 | :param dict event: AWS Lambda Event (in this case, the dataset group and schedules) 25 | :param context: AWS Lambda Context 26 | :return: Dict 27 | """ 28 | dataset_group_name = event["datasetGroupName"] 29 | schedules = event.get("schedules") 30 | 31 | cli = Personalize() 32 | model = ServiceModel(cli, dataset_group_name=dataset_group_name) 33 | return model.get_config(dataset_group_name=dataset_group_name, schedules=schedules) 34 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/java/fixtures/java_sample/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | } 4 | 5 | repositories { 6 | mavenCentral() 7 | } 8 | 9 | dependencies { 10 | implementation 'com.amazonaws:aws-lambda-java-core:1.2.1' 11 | implementation 'com.amazonaws:aws-lambda-java-events:3.1.0' 12 | runtimeOnly 'com.amazonaws:aws-lambda-java-log4j2:1.2.0' 13 | testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.0' 14 | testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.0' 15 | } 16 | 17 | test { 18 | useJUnitPlatform() 19 | } 20 | 21 | task packageFat(type: Zip) { 22 | from compileJava 23 | from processResources 24 | into('lib') { 25 | from configurations.runtimeClasspath 26 | } 27 | dirMode = 0755 28 | fileMode = 0755 29 | } 30 | 31 | task packageLibs(type: Zip) { 32 | into('java/lib') { 33 | from configurations.runtimeClasspath 34 | } 35 | dirMode = 0755 36 | fileMode = 0755 37 | } 38 | 39 | task packageSkinny(type: Zip) { 40 | from compileJava 41 | from processResources 42 | } 43 | 44 | java { 45 | sourceCompatibility = JavaVersion.VERSION_11 46 | targetCompatibility = JavaVersion.VERSION_11 47 | } 48 | 49 | build.dependsOn packageSkinny -------------------------------------------------------------------------------- /source/aws_lambda/shared/resource/base.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | from typing import List 7 | 8 | from aws_solutions.core import get_aws_partition, get_aws_region, get_aws_account 9 | from shared.resource.name import ResourceName 10 | 11 | 12 | class Resource: 13 | children: List[Resource] = [] 14 | has_soft_limit: bool = False 15 | 16 | def __init__(self): 17 | name = self.__class__.__name__ 18 | name = name[0].lower() + name[1:] 19 | self.name = ResourceName(name) 20 | 21 | def arn(self, name: str, **kwargs) -> str: 22 | if self.name.camel == "solutionVersion": 23 | arn_prefix = f"arn:{get_aws_partition()}:personalize:{get_aws_region()}:{get_aws_account()}" 24 | return f"{arn_prefix}:solution/{name}/{kwargs.get('sv_id', 'unknown')}" 25 | else: 26 | arn_prefix = f"arn:{get_aws_partition()}:personalize:{get_aws_region()}:{get_aws_account()}" 27 | return f"{arn_prefix}:{self.name.dash}/{name}" 28 | 29 | def __eq__(self, other): 30 | return self.name.camel == other.name.camel 31 | 32 | def __hash__(self): 33 | return hash(self.name.camel) 34 | -------------------------------------------------------------------------------- /source/tests/test_personalize_stack.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import pytest 5 | from aws_cdk import App 6 | from aws_solutions.cdk.synthesizers import SolutionStackSubstitutions 7 | from infrastructure.personalize.stack import PersonalizeStack 8 | 9 | 10 | @pytest.fixture 11 | def emails_context(): 12 | yield { 13 | "SOLUTION_NAME": "Maintaining Personalized Experiences with Machine Learning", 14 | "SOLUTION_ID": "99.99.99", 15 | "SOLUTION_VERSION": "SO0170test", 16 | "APPLICATION_TYPE": "AWS-Solutions", 17 | "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": True, 18 | "BUCKET_NAME": "test-solution-bucket", 19 | } 20 | 21 | 22 | def test_personalize_stack_email(solution, emails_context, monkeypatch): 23 | app = App(context=emails_context) 24 | 25 | PersonalizeStack( 26 | app, 27 | "PersonalizeStack", 28 | description="meta-stack", 29 | template_filename="maintaining-personalized-experiences-with-machine-learning-test.template", 30 | synthesizer=solution.synthesizer, 31 | ) 32 | synth = app.synth() 33 | 34 | # ensure the email parameter is present 35 | assert synth.get_stack_by_name("PersonalizeStack").template["Parameters"]["Email"] 36 | -------------------------------------------------------------------------------- /source/aws_lambda/create_schema/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Dict, Any 5 | 6 | from aws_lambda_powertools import Logger, Tracer, Metrics 7 | from aws_lambda_powertools.utilities.typing import LambdaContext 8 | 9 | from shared.sfn_middleware import PersonalizeResource 10 | 11 | RESOURCE = "schema" 12 | CONFIG = { 13 | "name": { 14 | "source": "event", 15 | "path": "serviceConfig.name", 16 | }, 17 | "domain": { 18 | "source": "event", 19 | "path": "serviceConfig.domain", 20 | "default": "omit", 21 | }, 22 | "schema": {"source": "event", "path": "serviceConfig.schema", "as": "string"}, 23 | } 24 | logger = Logger() 25 | tracer = Tracer() 26 | metrics = Metrics() 27 | 28 | 29 | @metrics.log_metrics 30 | @tracer.capture_lambda_handler 31 | @PersonalizeResource( 32 | resource=RESOURCE, 33 | config=CONFIG, 34 | ) 35 | def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: 36 | """Create a schema in Amazon Personalize based on the configuration in `event` 37 | :param event: AWS Lambda Event 38 | :param context: AWS Lambda Context 39 | :return: the configured schema 40 | """ 41 | return event.get("resource") # return the resource 42 | -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/scheduler/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Dict 5 | 6 | from aws_lambda_powertools import Logger, Tracer, Metrics 7 | from aws_lambda_powertools.utilities.typing import LambdaContext 8 | 9 | from aws_solutions.scheduler.common import ( 10 | Scheduler, 11 | Task, 12 | TaskResource, 13 | ) 14 | 15 | logger = Logger() 16 | tracer = Tracer() 17 | scheduler = Scheduler() 18 | metrics = Metrics(service="Scheduler") 19 | 20 | 21 | @metrics.log_metrics 22 | @tracer.capture_lambda_handler 23 | @TaskResource 24 | def create_schedule(task: Task, _: LambdaContext) -> Dict: 25 | return scheduler.create(task) 26 | 27 | 28 | @metrics.log_metrics 29 | @tracer.capture_lambda_handler 30 | @TaskResource 31 | def read_schedule(task: Task, _: LambdaContext) -> Dict: 32 | return scheduler.read(task) 33 | 34 | 35 | @metrics.log_metrics 36 | @tracer.capture_lambda_handler 37 | @TaskResource 38 | def update_schedule(task: Task, _: LambdaContext) -> Dict: 39 | return scheduler.update(task) 40 | 41 | 42 | @metrics.log_metrics 43 | @tracer.capture_lambda_handler 44 | @TaskResource 45 | def delete_schedule(task: Task, _: LambdaContext) -> Dict: 46 | return scheduler.delete(task) 47 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/cfn_custom_resources/solution_metrics/test_metrics_cdk.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import pytest 5 | from aws_cdk import Stack, App 6 | from constructs import Construct 7 | 8 | from aws_solutions.cdk.aws_lambda.cfn_custom_resources.solutions_metrics.metrics import ( 9 | Metrics, 10 | ) 11 | 12 | ADDITIONAL_METRICS_VALID = { 13 | "one": 1, 14 | "two": {"three": "3"}, 15 | } 16 | 17 | 18 | class SomeStack(Stack): 19 | def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: 20 | super().__init__(scope, construct_id, **kwargs) 21 | Metrics(self, construct_id, dict(**ADDITIONAL_METRICS_VALID)) 22 | 23 | 24 | @pytest.fixture 25 | def test_stack_metrics(): 26 | app = App() 27 | SomeStack(app, "some-test-metrics") 28 | yield app.synth().get_stack_by_name("some-test-metrics").template 29 | 30 | 31 | def test_metrics_valid(test_stack_metrics): 32 | metric_resource = test_stack_metrics["Resources"]["SolutionMetricsAnonymousData"] 33 | 34 | assert metric_resource["Type"] == "Custom::AnonymousData" 35 | assert all(metric_resource["Properties"][k] == v for k, v in ADDITIONAL_METRICS_VALID.items()) 36 | assert metric_resource["Properties"]["Region"]["Ref"] == "AWS::Region" 37 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior. 15 | 16 | **Expected behavior** 17 | A clear and concise description of what you expected to happen. 18 | 19 | **Please complete the following information about the solution:** 20 | - [ ] Version: [e.g. v0.0.1] 21 | 22 | To get the version of the solution, you can look at the description of the created CloudFormation stack. For example, "(SO0170) Maintaining Personalized Experiences with Machine Learning [...]". 23 | 24 | - [ ] Region: [e.g. us-east-1] 25 | - [ ] Was the solution modified from the version published on this repository? 26 | - [ ] If the answer to the previous question was yes, are the changes available on GitHub? 27 | - [ ] Have you checked your [service quotas](https://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html) for the sevices this solution uses? 28 | - [ ] Were there any errors in the CloudWatch Logs? 29 | 30 | **Screenshots** 31 | If applicable, add screenshots to help explain your problem (please **DO NOT include sensitive information**). 32 | 33 | **Additional context** 34 | Add any other context about the problem here. 35 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/cfn_custom_resources/resource_hash/test_resource_name_cdk.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import pytest 5 | from aws_cdk import Stack, App 6 | from constructs import Construct 7 | 8 | from aws_solutions.cdk.aws_lambda.cfn_custom_resources.resource_name.name import ( 9 | ResourceName, 10 | ) 11 | 12 | 13 | class SomeStack(Stack): 14 | def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: 15 | super().__init__(scope, construct_id, **kwargs) 16 | self.name_1 = ResourceName(self, "name_1", purpose="var_1", max_length=32) 17 | self.name_2 = ResourceName(self, "name_2", purpose="var_2", max_length=32) 18 | 19 | 20 | @pytest.fixture 21 | def resource_naming_stack(): 22 | app = App() 23 | SomeStack(app, "some-test-naming") 24 | yield app.synth().get_stack_by_name("some-test-naming").template 25 | 26 | 27 | def test_resource_service_tokens(resource_naming_stack): 28 | # There should be only one lambda function generated. 29 | service_tokens = [ 30 | resource["Properties"]["ServiceToken"] 31 | for resource in resource_naming_stack["Resources"].values() 32 | if resource["Type"] == "Custom::ResourceName" 33 | ] 34 | assert all(st == service_tokens[0] for st in service_tokens) 35 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/cfn_custom_resources/resource_name/test_resource_hash_cdk.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import pytest 5 | from aws_cdk import Stack, App 6 | from constructs import Construct 7 | 8 | from aws_solutions.cdk.aws_lambda.cfn_custom_resources.resource_name.name import ( 9 | ResourceName, 10 | ) 11 | 12 | 13 | class SomeStack(Stack): 14 | def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: 15 | super().__init__(scope, construct_id, **kwargs) 16 | self.name_1 = ResourceName(self, "name_1", purpose="var_1", max_length=32) 17 | self.name_2 = ResourceName(self, "name_2", purpose="var_2", max_length=32) 18 | 19 | 20 | @pytest.fixture 21 | def resource_naming_stack(): 22 | app = App() 23 | SomeStack(app, "some-test-naming") 24 | yield app.synth().get_stack_by_name("some-test-naming").template 25 | 26 | 27 | def test_resource_service_tokens(resource_naming_stack): 28 | # There should be only one lambda function generated. 29 | service_tokens = [ 30 | resource["Properties"]["ServiceToken"] 31 | for resource in resource_naming_stack["Resources"].values() 32 | if resource["Type"] == "Custom::ResourceName" 33 | ] 34 | assert all(st == service_tokens[0] for st in service_tokens) 35 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | from aws_solutions.cdk.context import SolutionContext 7 | from aws_solutions.cdk.stack import SolutionStack 8 | from aws_solutions.cdk.synthesizers import SolutionStackSubstitutions 9 | 10 | 11 | class CDKSolution: 12 | """ 13 | A CDKSolution stores helper utilities for building AWS Solutions using the AWS CDK in Python 14 | 15 | :type cdk_json_path: Path 16 | :param cdk_json_path: The full path to the cdk.json context for your application 17 | :type qualifier: str 18 | :param qualifier: A string that is added to all resources in the CDK bootstrap stack. The default value has no significance. 19 | """ 20 | 21 | def __init__(self, cdk_json_path: Path, qualifier="hnb659fds"): 22 | self.qualifier = qualifier 23 | self.context = SolutionContext(cdk_json_path=cdk_json_path) 24 | self.synthesizer = SolutionStackSubstitutions(qualifier=self.qualifier) 25 | 26 | def reset(self) -> None: 27 | """ 28 | Get a new synthesizer for this CDKSolution - useful for testing 29 | :return: None 30 | """ 31 | self.synthesizer = SolutionStackSubstitutions(qualifier=self.qualifier, generate_bootstrap_version_rule=False) 32 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/step_functions/failure_fragment.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import List 5 | 6 | from aws_cdk.aws_stepfunctions import ( 7 | StateMachineFragment, 8 | State, 9 | INextable, 10 | Fail, 11 | TaskInput, 12 | ) 13 | from constructs import Construct 14 | 15 | from personalize.sns.notifications import Notifications 16 | 17 | 18 | class FailureFragment(StateMachineFragment): 19 | def __init__( 20 | self, 21 | scope: Construct, 22 | notifications: Notifications, 23 | construct_id: str = "Failure", 24 | ): 25 | if construct_id != "Failure": 26 | construct_id = " ".join([construct_id, "Failure"]).strip() 27 | super().__init__(scope, construct_id) 28 | 29 | self.failure_state = Fail(self, construct_id) 30 | 31 | self.notification_state = notifications.state( 32 | self, 33 | construct_id=f"Send {construct_id} Message", 34 | payload=TaskInput.from_object( 35 | { 36 | "datasetGroup.$": "$.datasetGroup.serviceConfig.name", 37 | "statesError.$": "$.statesError", 38 | } 39 | ), 40 | ).next(self.failure_state) 41 | 42 | @property 43 | def start_state(self) -> State: 44 | return self.notification_state 45 | 46 | @property 47 | def end_states(self) -> List[INextable]: 48 | return [self.failure_state] 49 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/test_logging.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | import os 6 | 7 | import pytest 8 | 9 | from aws_solutions.core.logging import get_level, get_logger 10 | 11 | 12 | @pytest.fixture(scope="function", autouse=True) 13 | def reset_logging_defaults(): 14 | """Remove any logging configuration defaults that might have existed before starting any test""" 15 | try: 16 | os.environ.pop("LOG_LEVEL") 17 | except KeyError: 18 | pass 19 | 20 | 21 | @pytest.mark.parametrize("level", ["DEBUG", "INFO", "WARNING", "ERROR"]) 22 | def test_valid_levels(level): 23 | os.environ["LOG_LEVEL"] = level 24 | assert get_level() == level 25 | 26 | 27 | def test_invalid_level(): 28 | os.environ["LOG_LEVEL"] = "TRACE" 29 | assert get_level() == "WARNING" 30 | os.environ["LOG_LEVEL"] = "INFO" 31 | 32 | 33 | def test_get_logger(): 34 | logger = get_logger(__name__) 35 | assert logger.level == logging.WARNING 36 | 37 | 38 | def test_logger_log(caplog): 39 | logger = get_logger(__name__) 40 | logger.error("This is an error") 41 | logger.warning("This is a warning") 42 | logger.info("This is an informational message") 43 | logger.debug("This is a debug message") 44 | 45 | assert "This is an error" in caplog.text 46 | assert "This is a warning" in caplog.text 47 | assert "This is an informational message" not in caplog.text 48 | assert "This is a debug message" not in caplog.text 49 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/helpers/copytree.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import os 5 | import shutil 6 | from pathlib import Path 7 | 8 | 9 | def ignore_globs(*globs): 10 | """Function that can be used as copytree() ignore parameter. 11 | 12 | Patterns is a sequence of glob-style patterns 13 | that are used to exclude files""" 14 | 15 | def _ignore_globs(path, names): 16 | ignored_names = [] 17 | paths = [Path(os.path.join(path, name)).resolve() for name in names] 18 | for pattern in globs: 19 | for i, p in enumerate(paths): 20 | if p.match(pattern): 21 | ignored_names.append(names[i]) 22 | return set(ignored_names) 23 | 24 | return _ignore_globs 25 | 26 | 27 | def copytree(src, dst, symlinks=False, ignore=None): 28 | if ignore: 29 | ignore.extend([ignored[:-2] for ignored in ignore if ignored.endswith("/*")]) 30 | else: 31 | ignore = [] 32 | 33 | if not os.path.exists(dst): 34 | os.makedirs(dst) 35 | 36 | for item in os.listdir(src): 37 | s = os.path.join(src, item) 38 | d = os.path.join(dst, item) 39 | 40 | # ignore full directories upfront 41 | if any(Path(s).match(ignored) for ignored in ignore): 42 | continue 43 | 44 | if os.path.isdir(s): 45 | shutil.copytree(s, d, symlinks, ignore=ignore_globs(*ignore)) 46 | else: 47 | shutil.copy2(s, d) 48 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/functions/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from personalize.aws_lambda.functions.create_batch_inference_job import ( 5 | CreateBatchInferenceJob, 6 | ) 7 | from personalize.aws_lambda.functions.create_batch_segment_job import ( 8 | CreateBatchSegmentJob, 9 | ) 10 | from personalize.aws_lambda.functions.create_campaign import CreateCampaign 11 | from personalize.aws_lambda.functions.create_config import CreateConfig 12 | from personalize.aws_lambda.functions.create_dataset import CreateDataset 13 | from personalize.aws_lambda.functions.create_dataset_group import CreateDatasetGroup 14 | from personalize.aws_lambda.functions.create_dataset_import_job import ( 15 | CreateDatasetImportJob, 16 | ) 17 | from personalize.aws_lambda.functions.create_event_tracker import CreateEventTracker 18 | from personalize.aws_lambda.functions.create_filter import CreateFilter 19 | from personalize.aws_lambda.functions.create_recommender import CreateRecommender 20 | from personalize.aws_lambda.functions.create_scheduled_task import CreateScheduledTask 21 | from personalize.aws_lambda.functions.create_schema import CreateSchema 22 | from personalize.aws_lambda.functions.create_solution import CreateSolution 23 | from personalize.aws_lambda.functions.create_solution_version import ( 24 | CreateSolutionVersion, 25 | ) 26 | from personalize.aws_lambda.functions.create_timestamp import CreateTimestamp 27 | from personalize.aws_lambda.functions.s3_event import S3EventHandler 28 | -------------------------------------------------------------------------------- /source/infrastructure/deploy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 4 | # SPDX-License-Identifier: Apache-2.0 5 | 6 | import logging 7 | from pathlib import Path 8 | 9 | import aws_cdk as cdk 10 | from aws_solutions.cdk import CDKSolution 11 | from cdk_nag import AwsSolutionsChecks, NagPackSuppression, NagSuppressions 12 | from personalize.stack import PersonalizeStack 13 | 14 | logger = logging.getLogger("cdk-helper") 15 | solution = CDKSolution(cdk_json_path=Path(__file__).parent.absolute() / "cdk.json") 16 | 17 | 18 | @solution.context.requires("SOLUTION_NAME") 19 | @solution.context.requires("SOLUTION_ID") 20 | @solution.context.requires("SOLUTION_VERSION") 21 | @solution.context.requires("BUCKET_NAME") 22 | def build_app(context): 23 | app = cdk.App(context=context) 24 | stack = PersonalizeStack( 25 | app, 26 | "PersonalizeStack", 27 | description=f"Maintaining Personalized Experiences with Machine Learning", 28 | template_filename="maintaining-personalized-experiences-with-machine-learning.template", 29 | synthesizer=solution.synthesizer, 30 | ) 31 | cdk.Aspects.of(app).add(AwsSolutionsChecks(verbose=True)) 32 | 33 | NagSuppressions.add_stack_suppressions( 34 | stack, 35 | [ 36 | NagPackSuppression( 37 | id="AwsSolutions-L1", reason="Python lambda runtime is maintained at version 3.11 as a stable version." 38 | ) 39 | ], 40 | ) 41 | 42 | return app.synth() 43 | 44 | 45 | if __name__ == "__main__": 46 | build_app() 47 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # compiled output 2 | **/global-s3-assets 3 | **/regional-s3-assets 4 | **/build-s3-assets 5 | **/open-source 6 | **/tmp 7 | 8 | ### Python ### 9 | # Byte-compiled / optimized / DLL files 10 | __pycache__/ 11 | *.py[cod] 12 | *$py.class 13 | 14 | # Python Distribution / packaging 15 | *.egg-info/ 16 | *.egg 17 | 18 | # Python Virtual Environments 19 | **/venv* 20 | **/.venv* 21 | .python-version 22 | pyvenv.cfg 23 | 24 | ## Python Testing 25 | **/.pytest_cache 26 | **/.coverage 27 | **/coverage-reports/ 28 | .coverage.* 29 | 30 | # linting, scanning configurations, sonarqube 31 | .scannerwork/ 32 | 33 | ### VisualStudioCode ### 34 | .vscode/* 35 | 36 | ### IntelliJ/ PyCharm ### 37 | **/.idea/* 38 | 39 | # System Files 40 | **/.DS_Store 41 | 42 | # CDK 43 | **/cdk.out 44 | 45 | # Glue 46 | .glue/* 47 | 48 | # Generated test assets 49 | source/infrastructure/tests/assets/* 50 | !source/infrastructure/tests/assets/.keep 51 | source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/build 52 | source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/.gradle 53 | source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/.idea 54 | 55 | # gradle build files 56 | **/.gradle/* 57 | 58 | # java build files 59 | **/java/**/build 60 | 61 | # python build files 62 | source/cdk_solution_helper_py/helpers_cdk/build/* 63 | source/cdk_solution_helper_py/helpers_common/build/* 64 | source/scheduler/common/build/* 65 | source/scheduler/cdk/build/* 66 | 67 | # nightswatch test results 68 | .nightswatch/functional/test-results.xml 69 | 70 | # Brazil Build files 71 | /build -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/build.gradle: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | plugins { 5 | id 'java' 6 | id 'jacoco' 7 | id 'org.sonarqube' version '3.3' 8 | } 9 | 10 | group 'com.amazonaws.solutions' 11 | version '1.0-SNAPSHOT' 12 | 13 | repositories { 14 | mavenCentral() 15 | } 16 | 17 | dependencies { 18 | testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.0' 19 | testImplementation 'org.junit.jupiter:junit-jupiter-params:5.7.0' 20 | testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.0' 21 | implementation 'com.amazonaws:aws-lambda-java-core:1.2.1' 22 | implementation platform('com.amazonaws:aws-java-sdk-bom:1.11.1000') 23 | implementation 'org.quartz-scheduler:quartz:2.3.2' 24 | } 25 | 26 | java { 27 | sourceCompatibility = JavaVersion.VERSION_11 28 | targetCompatibility = JavaVersion.VERSION_11 29 | } 30 | 31 | jacocoTestReport { 32 | reports { 33 | xml.enabled true 34 | csv.enabled false 35 | html.enabled false 36 | } 37 | } 38 | 39 | test { 40 | useJUnitPlatform() 41 | } 42 | test.finalizedBy jacocoTestReport 43 | 44 | sonarqube { 45 | properties { 46 | property "sonar.sourceEncoding", "UTF-8" 47 | } 48 | } 49 | 50 | tasks.named('sonarqube').configure { 51 | dependsOn test 52 | } 53 | 54 | task buildZip(type: Zip) { 55 | from compileJava 56 | from processResources 57 | into('lib') { 58 | from configurations.runtimeClasspath 59 | } 60 | } 61 | 62 | build.dependsOn buildZip -------------------------------------------------------------------------------- /source/aws_lambda/create_event_tracker/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Dict, Any 5 | 6 | from aws_lambda_powertools import Logger, Tracer, Metrics 7 | from aws_lambda_powertools.utilities.typing import LambdaContext 8 | 9 | from shared.sfn_middleware import PersonalizeResource 10 | 11 | RESOURCE = "eventTracker" 12 | STATUS = "eventTracker.status" 13 | CONFIG = { 14 | "name": { 15 | "source": "event", 16 | "path": "serviceConfig.name", 17 | }, 18 | "datasetGroupArn": { 19 | "source": "event", 20 | "path": "serviceConfig.datasetGroupArn", 21 | }, 22 | "timeStarted": { 23 | "source": "event", 24 | "path": "workflowConfig.timeStarted", 25 | "default": "omit", 26 | "as": "iso8601", 27 | }, 28 | "tags": { 29 | "source": "event", 30 | "path": "serviceConfig.tags", 31 | "default": "omit", 32 | }, 33 | } 34 | 35 | logger = Logger() 36 | tracer = Tracer() 37 | metrics = Metrics() 38 | 39 | 40 | @metrics.log_metrics 41 | @tracer.capture_lambda_handler 42 | @PersonalizeResource( 43 | resource=RESOURCE, 44 | status=STATUS, 45 | config=CONFIG, 46 | ) 47 | def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: 48 | """Create an event tracker in Amazon Personalize based on the configuration in `event` 49 | :param event: AWS Lambda Event 50 | :param context: AWS Lambda Context 51 | :return: the configured event tracker 52 | """ 53 | return event.get("resource") # return the event tracker 54 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/mappings.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from aws_cdk import CfnMapping 5 | from constructs import Construct 6 | 7 | 8 | class Mappings: 9 | def __init__( 10 | self, 11 | parent: Construct, 12 | solution_id: str, 13 | send_anonymous_usage_data: bool = True, 14 | quicksight_template_arn: bool = False, 15 | ): 16 | self.parent = parent 17 | 18 | # Track the solution mapping (ID, version, anonymous usage data) 19 | self.solution_mapping = CfnMapping( 20 | parent, 21 | "Solution", 22 | mapping={ 23 | "Data": { 24 | "ID": solution_id, 25 | "Version": "%%SOLUTION_VERSION%%", 26 | "SendAnonymousUsageData": "Yes" if send_anonymous_usage_data else "No", 27 | "SolutionName": "%%SOLUTION_NAME%%", 28 | "ApplicationType": "AWS-Solutions", 29 | } 30 | }, 31 | lazy=False, 32 | ) 33 | 34 | # track the s3 bucket, key prefix and (optional) quicksight template source 35 | general = {"S3Bucket": "%%BUCKET_NAME%%", "KeyPrefix": "%%SOLUTION_NAME%%/%%SOLUTION_VERSION%%"} 36 | if quicksight_template_arn: 37 | general["QuickSightSourceTemplateArn"] = "%%QUICKSIGHT_SOURCE%%" 38 | 39 | self.source_mapping = CfnMapping( 40 | parent, 41 | "SourceCode", 42 | mapping={"General": general}, 43 | lazy=False, 44 | ) 45 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/cfn_nag.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from dataclasses import dataclass 5 | from typing import List 6 | 7 | import jsii 8 | from aws_cdk import CfnResource, IAspect 9 | from constructs import IConstruct 10 | 11 | 12 | @dataclass 13 | class CfnNagSuppression: 14 | rule_id: str 15 | reason: str 16 | 17 | 18 | def add_cfn_nag_suppressions(resource: CfnResource, suppressions: List[CfnNagSuppression]): 19 | resource.add_metadata( 20 | "cfn_nag", 21 | { 22 | "rules_to_suppress": [ 23 | {"id": suppression.rule_id, "reason": suppression.reason} for suppression in suppressions 24 | ] 25 | }, 26 | ) 27 | 28 | 29 | @jsii.implements(IAspect) 30 | class CfnNagSuppressAll: 31 | """Suppress certain cfn_nag warnings that can be ignored by this solution""" 32 | 33 | def __init__(self, suppress: List[CfnNagSuppression], resource_type: str): 34 | self.suppressions = suppress 35 | self.resource_type = resource_type 36 | 37 | def visit(self, node: IConstruct): 38 | if "is_cfn_element" in dir(node) and node.is_cfn_element(node): 39 | if getattr(node, "cfn_resource_type", None) == self.resource_type: 40 | add_cfn_nag_suppressions(node, self.suppressions) 41 | 42 | elif "is_cfn_element" in dir(node.node.default_child) and ( 43 | getattr(node.node.default_child, "cfn_resource_type", None) == self.resource_type 44 | ): 45 | add_cfn_nag_suppressions(node.node.default_child, self.suppressions) 46 | -------------------------------------------------------------------------------- /source/aws_lambda/create_filter/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Dict, Any 5 | 6 | from aws_lambda_powertools import Logger, Tracer, Metrics 7 | from aws_lambda_powertools.utilities.typing import LambdaContext 8 | 9 | from shared.sfn_middleware import PersonalizeResource 10 | 11 | RESOURCE = "filter" 12 | STATUS = "filter.status" 13 | CONFIG = { 14 | "name": { 15 | "source": "event", 16 | "path": "serviceConfig.name", 17 | }, 18 | "datasetGroupArn": { 19 | "source": "event", 20 | "path": "serviceConfig.datasetGroupArn", 21 | }, 22 | "filterExpression": { 23 | "source": "event", 24 | "path": "serviceConfig.filterExpression", 25 | }, 26 | "timeStarted": { 27 | "source": "event", 28 | "path": "workflowConfig.timeStarted", 29 | "default": "omit", 30 | "as": "iso8601", 31 | }, 32 | "tags": { 33 | "source": "event", 34 | "path": "serviceConfig.tags", 35 | "default": "omit", 36 | }, 37 | } 38 | 39 | logger = Logger() 40 | tracer = Tracer() 41 | metrics = Metrics() 42 | 43 | 44 | @metrics.log_metrics 45 | @tracer.capture_lambda_handler 46 | @PersonalizeResource( 47 | resource=RESOURCE, 48 | status=STATUS, 49 | config=CONFIG, 50 | ) 51 | def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: 52 | """Create a filter in Amazon Personalize based on the configuration in `event` 53 | :param event: AWS Lambda Event 54 | :param context: AWS Lambda Context 55 | :return: the configured dataset 56 | """ 57 | return event.get("resource") # return the filter 58 | -------------------------------------------------------------------------------- /source/infrastructure/setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import json 5 | from pathlib import Path 6 | 7 | import setuptools 8 | 9 | readme_path = Path(__file__).resolve().parent.parent.parent / "README.md" 10 | with open(readme_path) as fp: 11 | long_description = fp.read() 12 | 13 | cdk_json_path = Path(__file__).resolve().parent / "cdk.json" 14 | cdk_json = json.loads(cdk_json_path.read_text()) 15 | VERSION = cdk_json["context"]["SOLUTION_VERSION"] 16 | 17 | 18 | setuptools.setup( 19 | name="infrastructure", 20 | version=VERSION, 21 | description="AWS CDK stack to deploy the AWS MLOps for Amazon Personalize solution.", 22 | long_description=long_description, 23 | long_description_content_type="text/markdown", 24 | author="AWS Solutions Builders", 25 | packages=setuptools.find_packages(), 26 | install_requires=[ 27 | "aws-cdk-lib==2.88.0", 28 | "pip>=22.3.1", 29 | ], 30 | python_requires=">=3.11", 31 | classifiers=[ 32 | "Development Status :: 4 - Beta", 33 | "Intended Audience :: Developers", 34 | "License :: OSI Approved :: Apache Software License", 35 | "Programming Language :: JavaScript", 36 | "Programming Language :: Python :: 3 :: Only", 37 | "Programming Language :: Python :: 3.7", 38 | "Programming Language :: Python :: 3.8", 39 | "Programming Language :: Python :: 3.9", 40 | "Programming Language :: Python :: 3.10", 41 | "Programming Language :: Python :: 3.11", 42 | "Topic :: Software Development :: Code Generators", 43 | "Topic :: Utilities", 44 | "Typing :: Typed", 45 | ], 46 | ) 47 | -------------------------------------------------------------------------------- /source/aws_lambda/create_dataset/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Dict, Any 5 | 6 | from aws_lambda_powertools import Logger, Tracer, Metrics 7 | from aws_lambda_powertools.utilities.typing import LambdaContext 8 | 9 | from shared.sfn_middleware import PersonalizeResource 10 | 11 | RESOURCE = "dataset" 12 | CONFIG = { 13 | "name": { 14 | "source": "event", 15 | "path": "serviceConfig.name", 16 | }, 17 | "datasetType": { 18 | "source": "event", 19 | "path": "serviceConfig.datasetType", 20 | }, 21 | "datasetGroupArn": { 22 | "source": "event", 23 | "path": "serviceConfig.datasetGroupArn", 24 | }, 25 | "schemaArn": {"source": "event", "path": "serviceConfig.schemaArn"}, 26 | "timeStarted": { 27 | "source": "event", 28 | "path": "workflowConfig.timeStarted", 29 | "default": "omit", 30 | "as": "iso8601", 31 | }, 32 | "tags": { 33 | "source": "event", 34 | "path": "serviceConfig.tags", 35 | "default": "omit", 36 | }, 37 | } 38 | 39 | logger = Logger() 40 | tracer = Tracer() 41 | metrics = Metrics() 42 | 43 | 44 | @metrics.log_metrics 45 | @tracer.capture_lambda_handler 46 | @PersonalizeResource( 47 | resource=RESOURCE, 48 | config=CONFIG, 49 | ) 50 | def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: 51 | """Create a dataset in Amazon Personalize based on the configuration in `event` 52 | :param event: AWS Lambda Event 53 | :param context: AWS Lambda Context 54 | :return: the configured dataset 55 | """ 56 | return event.get("resource") # return the dataset 57 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/test_helpers.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import os 5 | 6 | import pytest 7 | from moto import mock_sts 8 | 9 | from aws_solutions.core import ( 10 | get_aws_region, 11 | get_service_client, 12 | get_aws_partition, 13 | get_aws_account, 14 | get_service_resource, 15 | ) 16 | 17 | 18 | @pytest.fixture(autouse=True, scope="module") 19 | def valid_solution_env(): 20 | os.environ["AWS_REGION"] = "us-east-1" 21 | os.environ["SOLUTION_ID"] = "SO0100" 22 | os.environ["SOLUTION_VERSION"] = "1.0.0" 23 | yield 24 | del os.environ["AWS_REGION"] 25 | del os.environ["SOLUTION_ID"] 26 | del os.environ["SOLUTION_VERSION"] 27 | 28 | 29 | def test_get_aws_region_valid(): 30 | assert get_aws_region() == "us-east-1" 31 | 32 | 33 | def test_get_service_client(): 34 | cli = get_service_client("ec2") 35 | assert cli.meta.service_model.service_name == "ec2" 36 | 37 | 38 | def test_get_service_resource(): 39 | ec2 = get_service_resource("ec2") 40 | assert ec2.meta.service_name == "ec2" 41 | 42 | 43 | @pytest.mark.parametrize( 44 | "region,partition", 45 | [ 46 | ("us-east-1", "aws"), 47 | ("us-gov-west-1", "aws-us-gov"), 48 | ("us-gov-west-2", "aws-us-gov"), 49 | ("cn-north-1", "aws-cn"), 50 | ("cn-northwest-1", "aws-cn"), 51 | ], 52 | ) 53 | def test_get_aws_partition(region, partition, mocker): 54 | mocker.patch("aws_solutions.core.helpers.get_aws_region", return_value=region) 55 | assert get_aws_partition() == partition 56 | 57 | 58 | @mock_sts 59 | def test_get_aws_account_id(mocker): 60 | assert get_aws_account() == "1" * 12 61 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/functions/create_schema.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | from typing import Optional 6 | 7 | import aws_cdk.aws_iam as iam 8 | from aws_cdk import Aws 9 | from aws_cdk.aws_stepfunctions import IChainable 10 | from constructs import Construct 11 | 12 | from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep 13 | from aws_solutions.cdk.cfn_guard import add_cfn_guard_suppressions 14 | 15 | class CreateSchema(SolutionStep): 16 | def __init__( 17 | self, 18 | scope: Construct, 19 | id: str, 20 | layers=None, 21 | failure_state: Optional[IChainable] = None, 22 | ): 23 | super().__init__( 24 | scope, 25 | id, 26 | layers=layers, 27 | failure_state=failure_state, 28 | entrypoint=(Path(__file__).absolute().parents[4] / "aws_lambda" / "create_schema" / "handler.py"), 29 | libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], 30 | ) 31 | 32 | add_cfn_guard_suppressions( 33 | self.function.role.node.try_find_child("Resource"), 34 | ["IAM_NO_INLINE_POLICY_CHECK"] 35 | ) 36 | 37 | def _set_permissions(self): 38 | self.function.add_to_role_policy( 39 | statement=iam.PolicyStatement( 40 | actions=[ 41 | "personalize:DescribeSchema", 42 | "personalize:CreateSchema", 43 | ], 44 | effect=iam.Effect.ALLOW, 45 | resources=[f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:schema/*"], 46 | ) 47 | ) 48 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/functions/environment.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from dataclasses import dataclass, field 5 | 6 | from aws_cdk import Aws 7 | from aws_cdk.aws_lambda import IFunction 8 | 9 | from aws_solutions.cdk.aws_lambda.environment_variable import EnvironmentVariable 10 | 11 | 12 | @dataclass 13 | class Environment: 14 | """ 15 | Tracks environment variables common to AWS Lambda functions deployed by this solution 16 | """ 17 | 18 | scope: IFunction 19 | solution_name: EnvironmentVariable = field(init=False, repr=False) 20 | solution_id: EnvironmentVariable = field(init=False, repr=False) 21 | solution_version: EnvironmentVariable = field(init=False, repr=False) 22 | log_level: EnvironmentVariable = field(init=False, repr=False) 23 | powertools_service_name: EnvironmentVariable = field(init=False, repr=False) 24 | 25 | def __post_init__(self): 26 | cloudwatch_namespace_id = f"personalize_solution_{Aws.STACK_NAME}" 27 | cloudwatch_service_id_default = f"Workflow" 28 | 29 | self.solution_name = EnvironmentVariable(self.scope, "SOLUTION_NAME") 30 | self.solution_id = EnvironmentVariable(self.scope, "SOLUTION_ID") 31 | self.solution_version = EnvironmentVariable(self.scope, "SOLUTION_VERSION") 32 | self.log_level = EnvironmentVariable(self.scope, "LOG_LEVEL", "INFO") 33 | self.powertools_service_name = EnvironmentVariable( 34 | self.scope, "POWERTOOLS_SERVICE_NAME", cloudwatch_service_id_default 35 | ) 36 | self.powertools_metrics_namespace = EnvironmentVariable( 37 | self.scope, "POWERTOOLS_METRICS_NAMESPACE", cloudwatch_namespace_id 38 | ) 39 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/test_aspects.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import pytest 5 | from aws_cdk import App, Stack, Aspects, CfnCondition, Fn 6 | from aws_cdk.aws_sqs import Queue, CfnQueue 7 | from constructs import Construct 8 | 9 | from aws_solutions.cdk.aspects import ConditionalResources 10 | 11 | 12 | class SomeConstruct(Construct): 13 | def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: 14 | super().__init__(scope, construct_id, **kwargs) 15 | q1 = Queue(self, "TestQueue1") 16 | q1.node.default_child.override_logical_id("TestQueue1") 17 | q2 = Queue(self, "TestQueue2") 18 | q2.node.default_child.override_logical_id("TestQueue2") 19 | q3 = CfnQueue(self, "TestQueu3") 20 | q3.override_logical_id("TestQueue3") 21 | 22 | 23 | class SomeStack(Stack): 24 | def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: 25 | super().__init__(scope, construct_id, **kwargs) 26 | 27 | condition = CfnCondition(self, "SomeCondition", expression=Fn.condition_equals("1", "1")) 28 | queues = SomeConstruct(self, "SomeQueues") 29 | Aspects.of(queues).add(ConditionalResources(condition)) 30 | 31 | 32 | @pytest.fixture 33 | def stack_conditional(): 34 | app = App() 35 | SomeStack(app, "some-test-queues") 36 | yield app.synth().get_stack_by_name("some-test-queues").template 37 | 38 | 39 | def test_conditional_resources(stack_conditional): 40 | assert stack_conditional["Conditions"]["SomeCondition"]["Fn::Equals"] == [ 41 | "1", 42 | "1", 43 | ] 44 | for k, v in stack_conditional["Resources"].items(): 45 | assert v["Condition"] == "SomeCondition" 46 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/resource/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from shared.resource.base import Resource 5 | from shared.resource.batch_inference_job import BatchInferenceJob 6 | from shared.resource.batch_segment_job import BatchSegmentJob 7 | from shared.resource.campaign import Campaign 8 | from shared.resource.dataset import Dataset 9 | from shared.resource.dataset_group import DatasetGroup 10 | from shared.resource.dataset_import_job import DatasetImportJob 11 | from shared.resource.event_tracker import EventTracker 12 | from shared.resource.filter import Filter 13 | from shared.resource.recommender import Recommender 14 | from shared.resource.schema import Schema 15 | from shared.resource.solution import Solution 16 | from shared.resource.solution_version import SolutionVersion 17 | 18 | 19 | def get_resource(resource_type: str) -> Resource: 20 | return { 21 | "datasetGroup": DatasetGroup(), 22 | "schema": Schema(), 23 | "dataset": Dataset(), 24 | "datasetImportJob": DatasetImportJob(), 25 | "solution": Solution(), 26 | "solutionVersion": SolutionVersion(), 27 | "campaign": Campaign(), 28 | "eventTracker": EventTracker(), 29 | "filter": Filter(), 30 | "batchInferenceJob": BatchInferenceJob(), 31 | "batchSegmentJob": BatchSegmentJob(), 32 | "recommender": Recommender(), 33 | }[resource_type] 34 | 35 | 36 | MANAGED_RESOURCES = [ 37 | DatasetGroup(), 38 | Schema(), 39 | Dataset(), 40 | DatasetImportJob(), 41 | Solution(), 42 | SolutionVersion(), 43 | Campaign(), 44 | EventTracker(), 45 | Filter(), 46 | BatchInferenceJob(), 47 | BatchSegmentJob(), 48 | Recommender(), 49 | ] 50 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/environment.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from dataclasses import dataclass, field 5 | 6 | from aws_cdk import Aws 7 | from aws_cdk.aws_lambda import IFunction 8 | 9 | from aws_solutions.cdk.aws_lambda.environment_variable import EnvironmentVariable 10 | 11 | 12 | @dataclass 13 | class Environment: 14 | """ 15 | Tracks environment variables common to AWS Lambda functions deployed by this solution 16 | """ 17 | 18 | scope: IFunction 19 | solution_name: EnvironmentVariable = field(init=False, repr=False) 20 | solution_id: EnvironmentVariable = field(init=False, repr=False) 21 | solution_version: EnvironmentVariable = field(init=False, repr=False) 22 | log_level: EnvironmentVariable = field(init=False, repr=False) 23 | powertools_service_name: EnvironmentVariable = field(init=False, repr=False) 24 | 25 | def __post_init__(self): 26 | cloudwatch_namespace_id = f"personalize_solution_{Aws.STACK_NAME}" 27 | cloudwatch_service_id_default = f"Workflow" 28 | 29 | self.solution_name = EnvironmentVariable(self.scope, "SOLUTION_NAME") 30 | self.solution_id = EnvironmentVariable(self.scope, "SOLUTION_ID") 31 | self.solution_version = EnvironmentVariable(self.scope, "SOLUTION_VERSION") 32 | self.log_level = EnvironmentVariable(self.scope, "LOG_LEVEL", "INFO") 33 | self.powertools_service_name = EnvironmentVariable( 34 | self.scope, "POWERTOOLS_SERVICE_NAME", cloudwatch_service_id_default 35 | ) 36 | self.powertools_metrics_namespace = EnvironmentVariable( 37 | self.scope, "POWERTOOLS_METRICS_NAMESPACE", cloudwatch_namespace_id 38 | ) 39 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_common/aws_solutions/core/logging.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | import os 6 | 7 | DEFAULT_LEVEL = "WARNING" 8 | 9 | 10 | def get_level(): 11 | """ 12 | Get the logging level from the LOG_LEVEL environment variable if it is valid. Otherwise set to WARNING 13 | :return: The logging level to use 14 | """ 15 | valid_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] 16 | requested_level = os.environ.get("LOG_LEVEL", DEFAULT_LEVEL) 17 | 18 | if requested_level and requested_level in valid_levels: 19 | return requested_level 20 | 21 | return DEFAULT_LEVEL 22 | 23 | 24 | def get_logger(name): 25 | """ 26 | Get a configured logger. Compatible with both the AWS Lambda runtime (root logger) and local execution 27 | :param name: The name of the logger (most often __name__ of the calling module) 28 | :return: The logger to use 29 | """ 30 | logger = None 31 | 32 | # first case: running as a lambda function or in pytest with conftest 33 | # second case: running a single test or locally under test 34 | if len(logging.getLogger().handlers) > 0: 35 | logger = logging.getLogger() 36 | logger.setLevel(get_level()) 37 | 38 | # overrides 39 | logging.getLogger("boto3").setLevel(logging.WARNING) 40 | logging.getLogger("botocore").setLevel(logging.WARNING) 41 | logging.getLogger("urllib3").setLevel(logging.WARNING) 42 | else: 43 | # fmt: off 44 | logging.basicConfig(level=get_level()) # NOSONAR - log level is user-specified; logs to stdout for AWS Lambda 45 | # fmt: on 46 | logger = logging.getLogger(name) 47 | 48 | return logger 49 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/step_functions/event_tracker_fragment.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import List 5 | 6 | from aws_cdk import Duration 7 | from aws_cdk.aws_stepfunctions import ( 8 | StateMachineFragment, 9 | State, 10 | INextable, 11 | Choice, 12 | Pass, 13 | Condition, 14 | ) 15 | from constructs import Construct 16 | 17 | from personalize.aws_lambda.functions import ( 18 | CreateEventTracker, 19 | ) 20 | 21 | 22 | class EventTrackerFragment(StateMachineFragment): 23 | def __init__( 24 | self, 25 | scope: Construct, 26 | id: str, 27 | create_event_tracker: CreateEventTracker, 28 | ): 29 | super().__init__(scope, id) 30 | 31 | # total allowed elapsed duration ~ 11m30s 32 | retry_config = { 33 | "backoff_rate": 1.25, 34 | "interval": Duration.seconds(8), 35 | "max_attempts": 15, 36 | } 37 | 38 | self.create_event_tracker = create_event_tracker.state( 39 | self, 40 | "Create Event Tracker", 41 | **retry_config, 42 | ) 43 | self.not_required = Pass(self, "Event Tracker not Required") 44 | self.start = ( 45 | Choice(self, "Check if Event Tracker Required") 46 | .when( 47 | Condition.is_present("$.eventTracker.serviceConfig.name"), 48 | self.create_event_tracker, 49 | ) 50 | .otherwise(self.not_required) 51 | ) 52 | 53 | @property 54 | def start_state(self) -> State: 55 | return self.start.start_state 56 | 57 | @property 58 | def end_states(self) -> List[INextable]: 59 | return [self.not_required, self.create_event_tracker] 60 | -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/src/main/java/com/amazonaws/solutions/schedule_sfn_task/ScheduleEvent.java: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | package com.amazonaws.solutions.schedule_sfn_task; 5 | 6 | public class ScheduleEvent { 7 | private String schedule; 8 | private String next; 9 | 10 | public String getNext() { 11 | return next; 12 | } 13 | 14 | public String setNext(String next) { 15 | this.next = next; 16 | return next; 17 | } 18 | 19 | public void setSchedule(String schedule) { 20 | /* 21 | cron schedules have 7 fields (seconds, minutes, hours, day-of-month month day-of-week and year), we use only the 22 | last 6 fields (omitting seconds). To do this, we always set seconds to 0, and keep the remainder of the provided 23 | schedule. When generating a next scheduled time, we use a random number of seconds in the minute to avoid hot 24 | spots at the start of each minute. An example string schedule provided might look like * * * * ? * (e.g. every 25 | minute) 26 | */ 27 | schedule = validateSchedule(schedule); 28 | this.schedule = "0 " + schedule; 29 | } 30 | 31 | public String getSchedule() { 32 | return schedule; 33 | } 34 | 35 | private String validateSchedule(String schedule) { 36 | schedule = schedule 37 | .replace("cron(", "") 38 | .replace(")", ""); 39 | 40 | String[] fields = schedule.split("\\s+"); 41 | 42 | if(fields.length != 6) { 43 | throw new ScheduleException("schedule " + schedule + " is not a valid schedule (requires 6 fields)"); 44 | } 45 | return schedule; 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /source/aws_lambda/create_recommender/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Dict, Any 5 | 6 | from aws_lambda_powertools import Logger, Tracer, Metrics 7 | from aws_lambda_powertools.utilities.typing import LambdaContext 8 | 9 | from shared.sfn_middleware import PersonalizeResource 10 | 11 | RESOURCE = "recommender" 12 | STATUS = "recommender.status" 13 | CONFIG = { 14 | "name": { 15 | "source": "event", 16 | "path": "serviceConfig.name", 17 | }, 18 | "datasetGroupArn": { 19 | "source": "event", 20 | "path": "serviceConfig.datasetGroupArn", 21 | }, 22 | "recipeArn": {"source": "event", "path": "serviceConfig.recipeArn"}, 23 | "recommenderConfig": { 24 | "source": "event", 25 | "path": "serviceConfig.recommenderConfig", 26 | "default": "omit", 27 | }, 28 | "timeStarted": { 29 | "source": "event", 30 | "path": "workflowConfig.timeStarted", 31 | "default": "omit", 32 | "as": "iso8601", 33 | }, 34 | "tags": { 35 | "source": "event", 36 | "path": "serviceConfig.tags", 37 | "default": "omit", 38 | }, 39 | } 40 | 41 | logger = Logger() 42 | tracer = Tracer() 43 | metrics = Metrics() 44 | 45 | 46 | @metrics.log_metrics 47 | @tracer.capture_lambda_handler 48 | @PersonalizeResource( 49 | resource=RESOURCE, 50 | status=STATUS, 51 | config=CONFIG, 52 | ) 53 | def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: 54 | """Create a recommender in Amazon Personalize based on the configuration in `event` 55 | :param event: AWS Lambda Event 56 | :param context: AWS Lambda Context 57 | :return: the configured dataset 58 | """ 59 | return event.get("resource") # return the dataset 60 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/step_functions/dataset_imports_fragment.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import List 5 | 6 | from aws_cdk.aws_stepfunctions import ( 7 | StateMachineFragment, 8 | Chain, 9 | Parallel, 10 | JsonPath, 11 | State, 12 | INextable, 13 | ) 14 | from constructs import Construct 15 | 16 | from personalize.aws_lambda.functions import ( 17 | CreateSchema, 18 | CreateDataset, 19 | CreateDatasetImportJob, 20 | ) 21 | from personalize.step_functions.dataset_import_fragment import DatasetImportFragment 22 | 23 | 24 | class DatasetImportsFragment(StateMachineFragment): 25 | def __init__( 26 | self, 27 | scope: Construct, 28 | construct_id: str, 29 | create_schema: CreateSchema, 30 | create_dataset: CreateDataset, 31 | create_dataset_import_job: CreateDatasetImportJob, 32 | ): 33 | super().__init__(scope, construct_id) 34 | 35 | dataset_management_functions = { 36 | "create_schema": create_schema, 37 | "create_dataset": create_dataset, 38 | "create_dataset_import_job": create_dataset_import_job, 39 | } 40 | 41 | self.chain = Chain.start( 42 | Parallel(self, "Create and Import Datasets", result_path=JsonPath.DISCARD) 43 | .branch(DatasetImportFragment(self, "Interactions", **dataset_management_functions)) 44 | .branch(DatasetImportFragment(self, "Users", **dataset_management_functions)) 45 | .branch(DatasetImportFragment(self, "Items", **dataset_management_functions)) 46 | ) 47 | 48 | @property 49 | def start_state(self) -> State: 50 | return self.chain.start_state 51 | 52 | @property 53 | def end_states(self) -> List[INextable]: 54 | return self.chain.end_states 55 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/java/test_java_function.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | from pathlib import Path 6 | 7 | import pytest 8 | from aws_cdk import ( 9 | Stack, 10 | App, 11 | ) 12 | from constructs import Construct 13 | 14 | from aws_solutions.cdk.aws_lambda.java.function import SolutionsJavaFunction 15 | 16 | 17 | @pytest.fixture 18 | def java_function_synth(caplog): 19 | class FunctionStack(Stack): 20 | def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: 21 | super().__init__(scope, construct_id, **kwargs) 22 | 23 | project_path = Path(__file__).parent.resolve() / "fixtures" / "java_sample" 24 | distribution_path = project_path / "build" / "distributions" 25 | 26 | func = SolutionsJavaFunction( 27 | self, 28 | "TestFunction", 29 | project_path=project_path, 30 | distribution_path=distribution_path, 31 | gradle_task="packageFat", 32 | gradle_test="test", 33 | handler="example.Handler", 34 | ) 35 | func.node.default_child.override_logical_id("TestFunction") 36 | 37 | with caplog.at_level(logging.DEBUG): 38 | app = App() 39 | FunctionStack(app, "test-function-lambda") 40 | synth = app.synth() 41 | print(f"CDK synth directory: {synth.directory}") 42 | yield synth 43 | 44 | 45 | @pytest.mark.no_cdk_lambda_mock 46 | def test_java_function_synth(java_function_synth): 47 | function_stack = java_function_synth.get_stack_by_name("test-function-lambda").template 48 | func = function_stack["Resources"]["TestFunction"] 49 | 50 | assert func["Type"] == "AWS::Lambda::Function" 51 | assert func["Properties"]["Runtime"] == "java11" 52 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/cfn_guard.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from dataclasses import dataclass 5 | from typing import List 6 | 7 | import jsii 8 | from aws_cdk import CfnResource, IAspect 9 | from constructs import IConstruct 10 | 11 | 12 | def add_cfn_guard_suppressions( 13 | resource: CfnResource, suppressions: List[str] 14 | ): 15 | if resource.node.default_child: 16 | resource.node.default_child.add_metadata( 17 | "guard", 18 | { 19 | "SuppressedRules": suppressions 20 | }, 21 | ) 22 | else: 23 | resource.add_metadata( 24 | "guard", 25 | { 26 | "SuppressedRules": suppressions 27 | }, 28 | ) 29 | 30 | @jsii.implements(IAspect) 31 | class CfnGuardSuppressResourceList: 32 | """Suppress certain cfn_guard warnings that can be ignored by this solution""" 33 | 34 | def __init__(self, resource_suppressions: dict): 35 | self.resource_suppressions = resource_suppressions 36 | 37 | def visit(self, node: IConstruct): 38 | if "is_cfn_element" in dir(node) and \ 39 | node.is_cfn_element(node) and \ 40 | getattr(node, "cfn_resource_type", None) is not None and \ 41 | node.cfn_resource_type in self.resource_suppressions: 42 | add_cfn_guard_suppressions(node, self.resource_suppressions[node.cfn_resource_type]) 43 | elif "is_cfn_element" in dir(node.node.default_child) and \ 44 | getattr(node.node.default_child, "cfn_resource_type", None) is not None and \ 45 | node.node.default_child.cfn_resource_type in self.resource_suppressions: 46 | add_cfn_guard_suppressions(node.node.default_child, self.resource_suppressions[node.node.default_child.cfn_resource_type]) -------------------------------------------------------------------------------- /source/tests/test_resources.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import pytest 5 | 6 | from shared.resource import ( 7 | DatasetGroup, 8 | Schema, 9 | Dataset, 10 | DatasetImportJob, 11 | Solution, 12 | SolutionVersion, 13 | Campaign, 14 | EventTracker, 15 | BatchSegmentJob, 16 | BatchInferenceJob, 17 | ) 18 | 19 | 20 | @pytest.mark.parametrize( 21 | "klass,camel,dash,snake", 22 | [ 23 | (DatasetGroup, "datasetGroup", "dataset-group", "dataset_group"), 24 | (Schema, "schema", "schema", "schema"), 25 | (Dataset, "dataset", "dataset", "dataset"), 26 | ( 27 | DatasetImportJob, 28 | "datasetImportJob", 29 | "dataset-import-job", 30 | "dataset_import_job", 31 | ), 32 | (Solution, "solution", "solution", "solution"), 33 | (SolutionVersion, "solutionVersion", "solution-version", "solution_version"), 34 | (Campaign, "campaign", "campaign", "campaign"), 35 | (EventTracker, "eventTracker", "event-tracker", "event_tracker"), 36 | ( 37 | BatchInferenceJob, 38 | "batchInferenceJob", 39 | "batch-inference-job", 40 | "batch_inference_job", 41 | ), 42 | (BatchSegmentJob, "batchSegmentJob", "batch-segment-job", "batch_segment_job"), 43 | ], 44 | ids=[ 45 | "DatasetGroup", 46 | "Schema", 47 | "Dataset", 48 | "DatasetImportJob", 49 | "Solution", 50 | "SolutionVersion", 51 | "Campaign", 52 | "EventTracker", 53 | "BatchInferenceJob", 54 | "BatchSegmentJob,", 55 | ], 56 | ) 57 | def test_resource_naming(klass, camel, dash, snake): 58 | assert klass().name.camel == camel 59 | assert klass().name.dash == dash 60 | assert klass().name.snake == snake 61 | -------------------------------------------------------------------------------- /source/aws_lambda/create_dataset_group/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Dict, Any 5 | 6 | from aws_lambda_powertools import Tracer, Logger, Metrics 7 | from aws_lambda_powertools.utilities.typing import LambdaContext 8 | 9 | from shared.sfn_middleware import PersonalizeResource 10 | 11 | RESOURCE = "datasetGroup" 12 | STATUS = "datasetGroup.status" 13 | CONFIG = { 14 | "name": { 15 | "source": "event", 16 | "path": "serviceConfig.name", 17 | }, 18 | "domain": { 19 | "source": "event", 20 | "path": "serviceConfig.domain", 21 | "default": "omit", 22 | }, 23 | "roleArn": { 24 | "source": "environment", 25 | "path": "KMS_ROLE_ARN", 26 | "default": "omit", 27 | }, 28 | "kmsKeyArn": { 29 | "source": "environment", 30 | "path": "KMS_KEY_ARN", 31 | "default": "omit", 32 | }, 33 | "timeStarted": { 34 | "source": "event", 35 | "path": "workflowConfig.timeStarted", 36 | "default": "omit", 37 | "as": "iso8601", 38 | }, 39 | "tags": { 40 | "source": "event", 41 | "path": "serviceConfig.tags", 42 | "default": "omit", 43 | }, 44 | } 45 | 46 | tracer = Tracer() 47 | logger = Logger() 48 | metrics = Metrics() 49 | 50 | 51 | @metrics.log_metrics 52 | @tracer.capture_lambda_handler 53 | @PersonalizeResource( 54 | resource=RESOURCE, 55 | status=STATUS, 56 | config=CONFIG, 57 | ) 58 | def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: 59 | """Create a dataset group in Amazon Personalize based on the configuration in `event` 60 | :param event: AWS Lambda Event 61 | :param context: AWS Lambda Context 62 | :return: the configured dataset group 63 | """ 64 | return event.get("resource") # return the dataset group 65 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/cfn_custom_resources/resource_name/test_resource_hash.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import pytest 5 | 6 | from aws_solutions.cdk.aws_lambda.cfn_custom_resources.resource_hash.src.custom_resources.hash import ( 7 | generate_hash, 8 | get_property, 9 | helper, 10 | ) 11 | 12 | EXPECTED_DIGEST = "DCB88E2D2EC20C11929E7C2C0366FEB6" 13 | 14 | 15 | @pytest.fixture() 16 | def lambda_event(): 17 | event = { 18 | "StackId": f"arn:aws:cloudformation:us-west-2:{''.join([str(i % 10) for i in range(1,13)])}:stack/stack-name/guid", 19 | "ResourceProperties": { 20 | "Purpose": "set-me", 21 | "MaxLength": 64, 22 | }, 23 | } 24 | yield event 25 | 26 | 27 | def test_generate_hashed_name(lambda_event): 28 | generate_hash(lambda_event, None) 29 | assert helper.Data["Name"] == f"{lambda_event['ResourceProperties']['Purpose']}-{EXPECTED_DIGEST[:8]}" 30 | 31 | 32 | def test_generate_hashed_name_long(lambda_event): 33 | lambda_event["ResourceProperties"]["Purpose"] = "a" * (64 - 9) 34 | generate_hash(lambda_event, None) 35 | assert helper.Data["Name"] == f"{lambda_event['ResourceProperties']['Purpose']}-{EXPECTED_DIGEST[:8]}" 36 | 37 | 38 | def test_generate_hashed_name_long(lambda_event): 39 | lambda_event["ResourceProperties"]["Purpose"] = "a" * (64 - 8) 40 | with pytest.raises(ValueError): 41 | generate_hash(lambda_event, None) 42 | 43 | 44 | def test_get_property_present(lambda_event): 45 | assert get_property(lambda_event, "MaxLength") == 64 46 | 47 | 48 | def test_get_property_default(lambda_event): 49 | assert get_property(lambda_event, "MissingProperty", "DEFAULT") == "DEFAULT" 50 | 51 | 52 | def test_get_property_missing(lambda_event): 53 | with pytest.raises(ValueError): 54 | get_property(lambda_event, "MissingProperty") 55 | -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/src/test/java/com/amazonaws/solutions/schedule_sfn_task/HandleScheduleEventTest.java: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | package com.amazonaws.solutions.schedule_sfn_task; 5 | 6 | import org.junit.jupiter.api.Assertions; 7 | import org.junit.jupiter.api.BeforeEach; 8 | import org.junit.jupiter.api.DisplayName; 9 | import org.junit.jupiter.api.Test; 10 | import org.junit.jupiter.params.ParameterizedTest; 11 | import org.junit.jupiter.params.provider.ValueSource; 12 | 13 | import java.text.DateFormat; 14 | import java.text.SimpleDateFormat; 15 | 16 | 17 | class HandleScheduleEventTest { 18 | private ScheduleEvent event; 19 | private HandleScheduleEvent handler; 20 | 21 | @BeforeEach 22 | public void setUp() { 23 | event = new ScheduleEvent(); 24 | handler = new HandleScheduleEvent(); 25 | } 26 | 27 | @Test 28 | @DisplayName("returns ISO 8601 in UTC with seconds") 29 | public void testScheduleEventOutput() { 30 | this.event.setSchedule("cron(* * * * ? *)"); 31 | String result = handler.handleRequest(this.event, null); 32 | 33 | DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); 34 | Assertions.assertDoesNotThrow(() -> { 35 | sdf.parse(result); 36 | }); 37 | } 38 | 39 | @ParameterizedTest 40 | @ValueSource(strings = {"cron(1)", "* * * * * *", "* * *", "* * * * *"}) 41 | @DisplayName("com.amazonaws.solutions.schedule_sfn_task.ScheduleEvent invalid representation raises com.amazonaws.solutions.schedule_sfn_task.ScheduleException") 42 | public void testScheduleEventInvalid(String schedule) { 43 | Assertions.assertThrows(ScheduleException.class, () -> { 44 | this.event.setSchedule(schedule); 45 | handler.handleRequest(this.event, null); 46 | }); 47 | } 48 | } -------------------------------------------------------------------------------- /source/aws_lambda/create_solution_version/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Dict, Any 5 | 6 | from aws_lambda_powertools import Logger, Tracer, Metrics 7 | from aws_lambda_powertools.utilities.typing import LambdaContext 8 | 9 | from shared.sfn_middleware import PersonalizeResource 10 | 11 | RESOURCE = "solutionVersion" 12 | STATUS = "solutionVersion.status" 13 | CONFIG = { 14 | "solutionArn": { 15 | "source": "event", 16 | "path": "serviceConfig.solutionArn", 17 | }, 18 | "trainingMode": { 19 | "source": "event", 20 | "path": "serviceConfig.trainingMode", 21 | "default": "omit", 22 | }, 23 | "maxAge": { 24 | "source": "event", 25 | "path": "workflowConfig.maxAge", 26 | "default": "omit", 27 | "as": "seconds", 28 | }, 29 | "solutionVersionArn": { 30 | "source": "event", 31 | "path": "workflowConfig.solutionVersionArn", 32 | "default": "omit", 33 | }, 34 | "timeStarted": { 35 | "source": "event", 36 | "path": "workflowConfig.timeStarted", 37 | "default": "omit", 38 | "as": "iso8601", 39 | }, 40 | "tags": { 41 | "source": "event", 42 | "path": "serviceConfig.tags", 43 | "default": "omit", 44 | }, 45 | } 46 | logger = Logger() 47 | tracer = Tracer() 48 | metrics = Metrics() 49 | 50 | 51 | @metrics.log_metrics 52 | @tracer.capture_lambda_handler 53 | @PersonalizeResource( 54 | resource=RESOURCE, 55 | status=STATUS, 56 | config=CONFIG, 57 | ) 58 | def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: 59 | """Create a solution version in Amazon Personalize based on the configuration in `event` 60 | :param event: AWS Lambda Event 61 | :param context: AWS Lambda Context 62 | :return: the configured solution version 63 | """ 64 | return event.get("resource") # return the solution version 65 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/aws_lambda/cfn_custom_resources/resource_hash/test_resource_name.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import re 5 | 6 | import pytest 7 | 8 | from aws_solutions.cdk.aws_lambda.cfn_custom_resources.resource_name.src.custom_resources.name import ( 9 | generate_name, 10 | get_property, 11 | helper, 12 | ) 13 | 14 | 15 | @pytest.fixture() 16 | def lambda_event(): 17 | event = { 18 | "ResourceProperties": { 19 | "Id": "UniqueId", 20 | "StackName": "StackName", 21 | "Purpose": "Purpose", 22 | "MaxLength": 63, 23 | } 24 | } 25 | yield event 26 | 27 | 28 | def test_generate_name(lambda_event): 29 | generate_name(lambda_event, None) 30 | assert helper.Data["Name"] == "stackname-purpose-uniqueid" 31 | 32 | 33 | def test_generate_long_name(lambda_event): 34 | lambda_event["ResourceProperties"]["StackName"] = "a" * 63 35 | generate_name(lambda_event, None) 36 | assert helper.Data["Name"] == "purpose-uniqueid" 37 | 38 | 39 | def test_generate_invalid_name(lambda_event): 40 | lambda_event["ResourceProperties"]["Purpose"] = "a" * 630 41 | with pytest.raises(ValueError): 42 | generate_name(lambda_event, None) 43 | 44 | 45 | def test_generate_name_random_id(lambda_event): 46 | del lambda_event["ResourceProperties"]["Id"] 47 | generate_name(lambda_event, None) 48 | helper_id = helper.Data["Id"] 49 | assert len(helper_id) == 12 50 | assert re.match(r"[a-f0-9]{12}", helper_id) 51 | 52 | 53 | def test_get_property_present(lambda_event): 54 | assert get_property(lambda_event, "StackName") == "StackName" 55 | 56 | 57 | def test_get_property_default(lambda_event): 58 | assert get_property(lambda_event, "MissingProperty", "DEFAULT") == "DEFAULT" 59 | 60 | 61 | def test_get_property_missing(lambda_event): 62 | with pytest.raises(ValueError): 63 | get_property(lambda_event, "MissingProperty") 64 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/functions/create_solution.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | import aws_cdk.aws_iam as iam 7 | from aws_cdk import Aws 8 | from constructs import Construct 9 | 10 | from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep 11 | from aws_solutions.cdk.cfn_guard import add_cfn_guard_suppressions 12 | 13 | class CreateSolution(SolutionStep): 14 | def __init__( 15 | self, 16 | scope: Construct, 17 | id: str, 18 | layers=None, 19 | ): 20 | super().__init__( 21 | scope, 22 | id, 23 | layers=layers, 24 | entrypoint=(Path(__file__).absolute().parents[4] / "aws_lambda" / "create_solution" / "handler.py"), 25 | libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], 26 | ) 27 | 28 | add_cfn_guard_suppressions( 29 | self.function.role.node.try_find_child("Resource"), 30 | ["IAM_NO_INLINE_POLICY_CHECK"] 31 | ) 32 | 33 | def _set_permissions(self): 34 | self.function.add_to_role_policy( 35 | statement=iam.PolicyStatement( 36 | actions=[ 37 | "personalize:DescribeSolution", 38 | "personalize:CreateSolution", 39 | "personalize:ListSolutions", 40 | "personalize:DescribeDatasetGroup", 41 | "personalize:TagResource", 42 | "personalize:ListTagsForResource", 43 | ], 44 | effect=iam.Effect.ALLOW, 45 | resources=[ 46 | f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:solution/*", 47 | f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset-group/*", 48 | f"arn:{Aws.PARTITION}:personalize:::recipe/*" 49 | ], 50 | ) 51 | ) 52 | -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/read_scheduled_task.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | from typing import Optional 6 | 7 | import aws_cdk.aws_iam as iam 8 | from aws_cdk.aws_dynamodb import ITable 9 | from aws_cdk.aws_stepfunctions import IChainable 10 | from constructs import Construct 11 | 12 | from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep 13 | from aws_solutions.cdk.cfn_guard import add_cfn_guard_suppressions 14 | 15 | 16 | class ReadScheduledTask(SolutionStep): 17 | def __init__( 18 | self, 19 | scope: Construct, 20 | id: str, 21 | layers=None, 22 | failure_state: Optional[IChainable] = None, 23 | scheduler_table: ITable = None, 24 | state_machine_arn: str = None, 25 | ): 26 | self.scheduler_table = scheduler_table 27 | self.state_machine_arn = state_machine_arn 28 | 29 | super().__init__( 30 | scope, 31 | id, 32 | layers=layers, 33 | failure_state=failure_state, 34 | function="read_schedule", 35 | entrypoint=Path(__file__).parents[1].resolve() / "aws_lambda" / "scheduler" / "handler.py", 36 | ) 37 | 38 | add_cfn_guard_suppressions( 39 | self.function.role.node.try_find_child("Resource"), 40 | ["IAM_NO_INLINE_POLICY_CHECK"] 41 | ) 42 | 43 | def _set_permissions(self): 44 | self.function.add_environment("DDB_SCHEDULER_STEPFUNCTION", self.state_machine_arn) 45 | self.function.add_to_role_policy( 46 | iam.PolicyStatement( 47 | actions=["states:StartExecution"], 48 | effect=iam.Effect.ALLOW, 49 | resources=[self.state_machine_arn], 50 | ) 51 | ) 52 | 53 | self.scheduler_table.grant_read_data(self.function) 54 | self.function.add_environment("DDB_SCHEDULES_TABLE", self.scheduler_table.table_name) 55 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/functions/create_recommender.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | import aws_cdk.aws_iam as iam 7 | from aws_cdk import Aws 8 | from constructs import Construct 9 | 10 | from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep 11 | from aws_solutions.cdk.cfn_guard import add_cfn_guard_suppressions 12 | 13 | 14 | 15 | class CreateRecommender(SolutionStep): 16 | def __init__( 17 | self, 18 | scope: Construct, 19 | id: str, 20 | layers=None, 21 | ): 22 | super().__init__( 23 | scope, 24 | id, 25 | layers=layers, 26 | entrypoint=(Path(__file__).absolute().parents[4] / "aws_lambda" / "create_recommender" / "handler.py"), 27 | libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], 28 | ) 29 | 30 | add_cfn_guard_suppressions( 31 | self.function.role.node.try_find_child("Resource"), 32 | ["IAM_NO_INLINE_POLICY_CHECK"] 33 | ) 34 | 35 | def _set_permissions(self): 36 | self.function.add_to_role_policy( 37 | statement=iam.PolicyStatement( 38 | actions=[ 39 | "personalize:DescribeRecommender", 40 | "personalize:CreateRecommender", 41 | "personalize:ListRecommenders", 42 | "personalize:DescribeDatasetGroup", 43 | "personalize:TagResource", 44 | "personalize:ListTagsForResource", 45 | ], 46 | effect=iam.Effect.ALLOW, 47 | resources=[ 48 | f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:recommender/*", 49 | f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset-group/*", 50 | f"arn:{Aws.PARTITION}:personalize:::recipe/*" 51 | ], 52 | ) 53 | ) 54 | -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/delete_scheduled_task.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | from typing import Optional 6 | 7 | import aws_cdk.aws_iam as iam 8 | from aws_cdk.aws_dynamodb import ITable 9 | from aws_cdk.aws_stepfunctions import IChainable 10 | from constructs import Construct 11 | 12 | from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep 13 | from aws_solutions.cdk.cfn_guard import add_cfn_guard_suppressions 14 | 15 | 16 | class DeleteScheduledTask(SolutionStep): 17 | def __init__( 18 | self, 19 | scope: Construct, 20 | id: str, 21 | layers=None, 22 | failure_state: Optional[IChainable] = None, 23 | scheduler_table: ITable = None, 24 | state_machine_arn: str = None, 25 | ): 26 | self.scheduler_table = scheduler_table 27 | self.state_machine_arn = state_machine_arn 28 | 29 | super().__init__( 30 | scope, 31 | id, 32 | layers=layers, 33 | failure_state=failure_state, 34 | function="delete_schedule", 35 | entrypoint=Path(__file__).parents[1].resolve() / "aws_lambda" / "scheduler" / "handler.py", 36 | ) 37 | 38 | add_cfn_guard_suppressions( 39 | self.function.role.node.try_find_child("Resource"), 40 | ["IAM_NO_INLINE_POLICY_CHECK"] 41 | ) 42 | 43 | def _set_permissions(self): 44 | self.function.add_environment("DDB_SCHEDULER_STEPFUNCTION", self.state_machine_arn) 45 | self.function.add_to_role_policy( 46 | iam.PolicyStatement( 47 | actions=["states:StartExecution"], 48 | effect=iam.Effect.ALLOW, 49 | resources=[self.state_machine_arn], 50 | ) 51 | ) 52 | 53 | self.scheduler_table.grant_read_write_data(self.function) 54 | self.function.add_environment("DDB_SCHEDULES_TABLE", self.scheduler_table.table_name) 55 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/functions/create_solution_version.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | import aws_cdk.aws_iam as iam 7 | from aws_cdk import Aws 8 | from constructs import Construct 9 | 10 | from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep 11 | from aws_solutions.cdk.cfn_guard import add_cfn_guard_suppressions 12 | 13 | 14 | class CreateSolutionVersion(SolutionStep): 15 | def __init__( 16 | self, 17 | scope: Construct, 18 | id: str, 19 | layers=None, 20 | ): 21 | super().__init__( 22 | scope, 23 | id, 24 | layers=layers, 25 | entrypoint=(Path(__file__).absolute().parents[4] / "aws_lambda" / "create_solution_version" / "handler.py"), 26 | libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], 27 | ) 28 | 29 | add_cfn_guard_suppressions( 30 | self.function.role.node.try_find_child("Resource"), 31 | ["IAM_NO_INLINE_POLICY_CHECK"] 32 | ) 33 | 34 | def _set_permissions(self): 35 | self.function.add_to_role_policy( 36 | statement=iam.PolicyStatement( 37 | actions=[ 38 | "personalize:DescribeSolutionVersion", 39 | "personalize:CreateSolutionVersion", 40 | "personalize:ListSolutionVersions", 41 | "personalize:DescribeSolution", 42 | "personalize:GetSolutionMetrics", 43 | "personalize:TagResource", 44 | "personalize:ListTagsForResource", 45 | ], 46 | effect=iam.Effect.ALLOW, 47 | resources=[ 48 | f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:solution-version/*", 49 | f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:solution/*", 50 | ], 51 | ) 52 | ) 53 | -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/src/main/java/com/amazonaws/solutions/schedule_sfn_task/HandleScheduleEvent.java: -------------------------------------------------------------------------------- 1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | package com.amazonaws.solutions.schedule_sfn_task; 5 | 6 | import com.amazonaws.services.lambda.runtime.Context; 7 | import com.amazonaws.services.lambda.runtime.RequestHandler; 8 | import org.quartz.CronExpression; 9 | 10 | import java.security.SecureRandom; 11 | import java.text.ParseException; 12 | import java.text.SimpleDateFormat; 13 | import java.time.Instant; 14 | import java.util.Date; 15 | import java.util.TimeZone; 16 | 17 | public class HandleScheduleEvent implements RequestHandler { 18 | @Override 19 | public String handleRequest(ScheduleEvent event, Context context) { 20 | try { 21 | setNextSchedule(event); 22 | } catch (ParseException e) { 23 | throw new ScheduleException(e.getMessage()); 24 | } 25 | return event.getNext(); 26 | } 27 | 28 | private ScheduleEvent setNextSchedule(ScheduleEvent event) throws ParseException { 29 | String schedule = event.getSchedule(); 30 | SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX"); 31 | dateFormatter.setTimeZone(TimeZone.getTimeZone("UTC")); 32 | 33 | // create the expression (this throws a ParseException on failure) 34 | CronExpression expression = new CronExpression(schedule); 35 | 36 | // set up the next date as a string 37 | int seconds = getRandomSeconds(); 38 | Date dt = Date.from(expression.getNextValidTimeAfter(Date.from(Instant.now())).toInstant().plusSeconds(seconds)); 39 | String dtText = dateFormatter.format(dt); 40 | event.setNext(event.setNext(dtText)); 41 | 42 | return event; 43 | } 44 | 45 | private int getRandomSeconds() { 46 | SecureRandom random = new SecureRandom(); 47 | return random.nextInt(60); 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/functions/create_filter.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | from typing import Optional 6 | 7 | import aws_cdk.aws_iam as iam 8 | from aws_cdk import Aws 9 | from aws_cdk.aws_stepfunctions import IChainable 10 | from constructs import Construct 11 | 12 | from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep 13 | from aws_solutions.cdk.cfn_guard import add_cfn_guard_suppressions 14 | 15 | class CreateFilter(SolutionStep): 16 | def __init__( 17 | self, 18 | scope: Construct, 19 | id: str, 20 | layers=None, 21 | failure_state: Optional[IChainable] = None, 22 | ): 23 | super().__init__( 24 | scope, 25 | id, 26 | layers=layers, 27 | failure_state=failure_state, 28 | entrypoint=(Path(__file__).absolute().parents[4] / "aws_lambda" / "create_filter" / "handler.py"), 29 | libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], 30 | ) 31 | 32 | add_cfn_guard_suppressions( 33 | self.function.role.node.try_find_child("Resource"), 34 | ["IAM_NO_INLINE_POLICY_CHECK"] 35 | ) 36 | 37 | def _set_permissions(self): 38 | self.function.add_to_role_policy( 39 | statement=iam.PolicyStatement( 40 | actions=[ 41 | "personalize:DescribeDatasetGroup", 42 | "personalize:CreateFilter", 43 | "personalize:DescribeFilter", 44 | "personalize:TagResource", 45 | "personalize:ListTagsForResource", 46 | ], 47 | effect=iam.Effect.ALLOW, 48 | resources=[ 49 | f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:filter/*", 50 | f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset-group/*", 51 | ], 52 | ) 53 | ) 54 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_common/setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import re 5 | from pathlib import Path 6 | 7 | import setuptools 8 | 9 | VERSION_RE = re.compile(r"\#\# \[(?P.*)\]", re.MULTILINE) # NOSONAR 10 | 11 | 12 | def get_version(): 13 | """ 14 | Detect the solution version from the changelog. Latest version on top. 15 | """ 16 | changelog = open(Path(__file__).resolve().parent.parent / "CHANGELOG.md").read() 17 | versions = VERSION_RE.findall(changelog) 18 | if not len(versions): 19 | raise ValueError("use the standard semver format in your CHANGELOG.md") 20 | build_version = versions[0] 21 | print(f"Build Version: {build_version}") 22 | return build_version 23 | 24 | 25 | setuptools.setup( 26 | name="aws-solutions-python", 27 | version=get_version(), 28 | description="Tools to make AWS Solutions deployments with CDK + Python more manageable", 29 | long_description=open("../README.md").read(), 30 | author="Amazon Web Services", 31 | url="https://aws.amazon.com/solutions/implementations", 32 | license="Apache License 2.0", 33 | packages=setuptools.find_namespace_packages(exclude=["build*"]), 34 | install_requires=[ 35 | "boto3==1.26.47", 36 | "pip>=22.3.1", 37 | ], 38 | python_requires=">=3.11", 39 | classifiers=[ 40 | "Development Status :: 4 - Beta", 41 | "Intended Audience :: Developers", 42 | "License :: OSI Approved :: Apache Software License", 43 | "Programming Language :: JavaScript", 44 | "Programming Language :: Python :: 3 :: Only", 45 | "Programming Language :: Python :: 3.7", 46 | "Programming Language :: Python :: 3.8", 47 | "Programming Language :: Python :: 3.9", 48 | "Programming Language :: Python :: 3.10", 49 | "Programming Language :: Python :: 3.11", 50 | "Topic :: Software Development :: Code Generators", 51 | "Topic :: Utilities", 52 | "Typing :: Typed", 53 | ], 54 | zip_safe=False, 55 | ) 56 | -------------------------------------------------------------------------------- /source/scheduler/cdk/setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import re 5 | from pathlib import Path 6 | 7 | import setuptools 8 | 9 | VERSION_RE = re.compile(r"\#\# \[(?P.*)\]", re.MULTILINE) # NOSONAR 10 | 11 | 12 | def get_version(): 13 | """ 14 | Detect the solution version from the changelog. Latest version on top. 15 | """ 16 | changelog = open(Path(__file__).resolve().parent.parent / "CHANGELOG.md").read() 17 | versions = VERSION_RE.findall(changelog) 18 | if not len(versions): 19 | raise ValueError("use the standard semver format in your CHANGELOG.md") 20 | build_version = versions[0] 21 | print(f"Build Version: {build_version}") 22 | return build_version 23 | 24 | 25 | setuptools.setup( 26 | name="aws-solutions-scheduler-cdk", 27 | version=get_version(), 28 | description="Scheduler CDK Constructs", 29 | long_description=open("../README.md").read(), 30 | author="Amazon Web Services", 31 | url="https://aws.amazon.com/solutions/implementations", 32 | license="Apache License 2.0", 33 | packages=setuptools.find_namespace_packages(exclude=["build*"]), 34 | install_requires=[ 35 | "pip>=22.3.1", 36 | "aws_cdk_lib==2.88.0", 37 | "Click==8.1.3", 38 | "boto3==1.26.47", 39 | ], 40 | python_requires=">=3.11", 41 | classifiers=[ 42 | "Development Status :: 4 - Beta", 43 | "Intended Audience :: Developers", 44 | "License :: OSI Approved :: Apache Software License", 45 | "Programming Language :: JavaScript", 46 | "Programming Language :: Python :: 3 :: Only", 47 | "Programming Language :: Python :: 3.7", 48 | "Programming Language :: Python :: 3.8", 49 | "Programming Language :: Python :: 3.9", 50 | "Programming Language :: Python :: 3.10", 51 | "Programming Language :: Python :: 3.11", 52 | "Topic :: Software Development :: Code Generators", 53 | "Topic :: Utilities", 54 | "Typing :: Typed", 55 | ], 56 | zip_safe=False, 57 | ) 58 | -------------------------------------------------------------------------------- /source/aws_lambda/create_campaign/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Dict, Any 5 | 6 | from aws_lambda_powertools import Logger, Tracer, Metrics 7 | from aws_lambda_powertools.utilities.typing import LambdaContext 8 | 9 | from shared.sfn_middleware import PersonalizeResource 10 | 11 | RESOURCE = "campaign" 12 | STATUS = "campaign.latestCampaignUpdate.status || campaign.status" 13 | CONFIG = { 14 | "name": { 15 | "source": "event", 16 | "path": "serviceConfig.name", 17 | }, 18 | "solutionVersionArn": { 19 | "source": "event", 20 | "path": "serviceConfig.solutionVersionArn", 21 | }, 22 | "minProvisionedTPS": { 23 | "source": "event", 24 | "path": "serviceConfig.minProvisionedTPS", 25 | "as": "int", 26 | }, 27 | "campaignConfig": { 28 | "source": "event", 29 | "path": "serviceConfig.campaignConfig", 30 | "default": "omit", 31 | }, 32 | "maxAge": { 33 | "source": "event", 34 | "path": "workflowConfig.maxAge", 35 | "default": "omit", 36 | "as": "seconds", 37 | }, 38 | "timeStarted": { 39 | "source": "event", 40 | "path": "workflowConfig.timeStarted", 41 | "default": "omit", 42 | "as": "iso8601", 43 | }, 44 | "tags": { 45 | "source": "event", 46 | "path": "serviceConfig.tags", 47 | "default": "omit", 48 | }, 49 | } 50 | 51 | logger = Logger() 52 | tracer = Tracer() 53 | metrics = Metrics() 54 | 55 | 56 | @metrics.log_metrics 57 | @tracer.capture_lambda_handler 58 | @PersonalizeResource( 59 | resource=RESOURCE, 60 | config=CONFIG, 61 | status=STATUS, 62 | ) 63 | def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: 64 | """Create a campaign in Amazon Personalize based on the configuration in `event` 65 | :param event: AWS Lambda Event 66 | :param context: AWS Lambda Context 67 | :return: the configured dataset 68 | """ 69 | return event.get("resource") # return the campaign 70 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/s3/utils.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | from typing import List 6 | 7 | import aws_cdk.aws_iam as iam 8 | from aws_cdk import CfnResource, RemovalPolicy 9 | from aws_cdk.aws_s3 import BlockPublicAccess, Bucket, BucketEncryption 10 | from aws_solutions.cdk.cfn_nag import CfnNagSuppression, add_cfn_nag_suppressions 11 | from constructs import Construct 12 | 13 | logger = logging.getLogger("cdk-helper") 14 | 15 | 16 | class SecureBucket(Bucket): 17 | def __init__( 18 | self, 19 | scope: Construct, 20 | construct_id: str, 21 | suppress: List[CfnNagSuppression] = None, 22 | **kwargs, 23 | ): 24 | self.construct_id = construct_id 25 | 26 | kwargs = self.override_configuration(kwargs, "removal_policy", RemovalPolicy.RETAIN) 27 | kwargs = self.override_configuration(kwargs, "encryption", BucketEncryption.S3_MANAGED) 28 | kwargs = self.override_configuration(kwargs, "block_public_access", BlockPublicAccess.BLOCK_ALL) 29 | 30 | super().__init__(scope, construct_id, **kwargs) 31 | 32 | self.add_to_resource_policy( 33 | iam.PolicyStatement( 34 | sid="HttpsOnly", 35 | resources=[ 36 | self.arn_for_objects("*"), 37 | ], 38 | actions=["*"], 39 | effect=iam.Effect.DENY, 40 | principals=[iam.AnyPrincipal()], 41 | conditions={"Bool": {"aws:SecureTransport": False}}, 42 | ) 43 | ) 44 | 45 | bucket_cfn: CfnResource = self.node.default_child 46 | bucket_cfn.override_logical_id(construct_id) 47 | if suppress: 48 | add_cfn_nag_suppressions(bucket_cfn, suppress) 49 | 50 | def override_configuration(self, config, key, default=None): 51 | if not config.get(key): 52 | config[key] = default 53 | else: 54 | logger.warning(f"overriding {key} may reduce the security of the solution") 55 | return config 56 | -------------------------------------------------------------------------------- /source/tests/cdk_solution_helper/test_cdk_interfaces.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | import pytest 7 | 8 | from aws_solutions.cdk.helpers import copytree 9 | 10 | 11 | @pytest.fixture(scope="function") 12 | def dir_to_copy(tmp_path): 13 | Path(tmp_path / "exists" / "sub1" / "sub2").mkdir(parents=True) 14 | Path(tmp_path / "exists" / "sub1" / "sub1_f").touch() 15 | Path(tmp_path / "exists" / "sub1" / "sub2") 16 | Path(tmp_path / "exists" / "sub1" / "sub2", "sub2_f").touch() 17 | Path(tmp_path / "exists" / "subroot_f").touch() 18 | Path(tmp_path / "other" / "sub3").mkdir(parents=True) 19 | Path(tmp_path / "other" / "sub3" / "sub3_f").touch() 20 | 21 | yield tmp_path 22 | 23 | 24 | def test_copytree_dir_exists(dir_to_copy): 25 | Path(dir_to_copy / "new").mkdir() 26 | copytree(src=dir_to_copy / "exists", dst=dir_to_copy / "new") 27 | 28 | assert Path(dir_to_copy / "new" / "sub1" / "sub1_f").exists() 29 | assert Path(dir_to_copy / "new" / "sub1" / "sub2" / "sub2_f").exists() 30 | assert Path(dir_to_copy / "new" / "subroot_f").exists() 31 | 32 | 33 | def test_copytree_dir_does_not_exist(dir_to_copy): 34 | copytree(src=dir_to_copy / "exists", dst=dir_to_copy / "new") 35 | copytree(src=dir_to_copy / "other", dst=dir_to_copy / "new") 36 | 37 | assert Path(dir_to_copy / "new" / "sub1" / "sub1_f").exists() 38 | assert Path(dir_to_copy / "new" / "sub1" / "sub2" / "sub2_f").exists() 39 | assert Path(dir_to_copy / "new" / "subroot_f").exists() 40 | assert Path(dir_to_copy / "new" / "sub3" / "sub3_f").exists() 41 | 42 | 43 | def test_copytree_globs(dir_to_copy): 44 | copytree( 45 | src=dir_to_copy / "exists", 46 | dst=dir_to_copy / "new", 47 | ignore=["**/sub2/*", "subroot_f"], 48 | ) 49 | 50 | assert not (Path(dir_to_copy) / "new" / "subroot_f").exists() 51 | assert (Path(dir_to_copy) / "new" / "sub1").exists() 52 | assert (Path(dir_to_copy) / "new" / "sub1" / "sub1_f").exists() 53 | assert not (Path(dir_to_copy) / "new" / "sub1" / "sub2").exists() 54 | -------------------------------------------------------------------------------- /source/scheduler/common/aws_solutions/scheduler/common/task.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | from dataclasses import dataclass, field 7 | from decimal import Decimal 8 | from typing import Union, Dict 9 | from uuid import uuid4 10 | 11 | from aws_solutions.scheduler.common import TASK_PK, TASK_SK 12 | from aws_solutions.scheduler.common.schedule import Schedule 13 | 14 | 15 | @dataclass 16 | class Task: 17 | """Represents a Scheduler scheduled task""" 18 | 19 | name: str 20 | schedule: Union[None, str, Schedule] = "" 21 | state_machine: Dict = field(default_factory=dict, repr=False) 22 | latest: Decimal = field(default=Decimal(0), repr=False, compare=False) 23 | version: str = field(default="v0", repr=False, compare=False) 24 | next_task_id: str = field(repr=False, compare=False, init=False) 25 | 26 | def __post_init__(self): 27 | if self.schedule: 28 | self.schedule = Schedule(self.schedule) 29 | self.next_task_id = self.get_next_task_id() 30 | 31 | def __str__(self) -> str: 32 | rv = f"{self.name}" 33 | if self.schedule: 34 | rv = f"{rv} ({self.schedule.expression})" 35 | return rv 36 | 37 | def get_next_task_id(self) -> str: 38 | """ 39 | Get a random next task ID (max 80 characters length) 40 | :return: 41 | """ 42 | return f"{self.name[:67]}-{uuid4().hex[:12]}" 43 | 44 | @staticmethod 45 | def key(task: Union[Task, str], version: int = 0) -> Dict: 46 | """ 47 | Get the dynamo db key associated with this task 48 | :param task: the full task name 49 | :param version: the task version key to request (defaults to 0, the latest task) 50 | :return: the key 51 | """ 52 | if isinstance(task, Task): 53 | task_name = task.name 54 | elif isinstance(task, str): 55 | task_name = task 56 | else: 57 | raise ValueError("task must be a string or a Task") 58 | 59 | return {TASK_PK: task_name, TASK_SK: f"v{version}"} 60 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/functions/create_campaign.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | import aws_cdk.aws_iam as iam 7 | from aws_cdk import Aws 8 | from constructs import Construct 9 | 10 | from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep 11 | from aws_solutions.cdk.cfn_guard import add_cfn_guard_suppressions 12 | 13 | class CreateCampaign(SolutionStep): 14 | def __init__( 15 | self, 16 | scope: Construct, 17 | id: str, 18 | layers=None, 19 | ): 20 | super().__init__( 21 | scope, 22 | id, 23 | layers=layers, 24 | entrypoint=(Path(__file__).absolute().parents[4] / "aws_lambda" / "create_campaign" / "handler.py"), 25 | libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], 26 | ) 27 | 28 | add_cfn_guard_suppressions( 29 | self.function.role.node.try_find_child("Resource"), 30 | ["IAM_NO_INLINE_POLICY_CHECK"] 31 | ) 32 | 33 | def _set_permissions(self): 34 | self.function.add_to_role_policy( 35 | statement=iam.PolicyStatement( 36 | actions=[ 37 | "personalize:DescribeSolutionVersion", 38 | "personalize:ListSolutionVersions", 39 | "personalize:CreateCampaign", 40 | "personalize:ListCampaigns", 41 | "personalize:DescribeCampaign", 42 | "personalize:UpdateCampaign", 43 | "personalize:TagResource", 44 | "personalize:ListTagsForResource", 45 | ], 46 | effect=iam.Effect.ALLOW, 47 | resources=[ 48 | f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:solution-version/*", 49 | f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:solution/*", 50 | f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:campaign/*", 51 | ], 52 | ) 53 | ) 54 | -------------------------------------------------------------------------------- /source/aws_lambda/create_solution/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Dict, Any 5 | 6 | from aws_lambda_powertools import Logger, Tracer, Metrics 7 | from aws_lambda_powertools.utilities.typing import LambdaContext 8 | 9 | from shared.sfn_middleware import PersonalizeResource 10 | 11 | RESOURCE = "solution" 12 | STATUS = "solution.status" 13 | CONFIG = { 14 | "name": { 15 | "source": "event", 16 | "path": "serviceConfig.name", 17 | }, 18 | "performHPO": { 19 | "source": "event", 20 | "path": "serviceConfig.performHPO", 21 | "default": "omit", 22 | }, 23 | "recipeArn": { 24 | "source": "event", 25 | "path": "serviceConfig.recipeArn", 26 | "default": "omit", 27 | }, 28 | "datasetGroupArn": { 29 | "source": "event", 30 | "path": "serviceConfig.datasetGroupArn", 31 | }, 32 | "eventType": { 33 | "source": "event", 34 | "path": "serviceConfig.eventType", 35 | "default": "omit", 36 | }, 37 | "solutionConfig": { 38 | "source": "event", 39 | "path": "serviceConfig.solutionConfig", 40 | "default": "omit", 41 | }, 42 | "timeStarted": { 43 | "source": "event", 44 | "path": "workflowConfig.timeStarted", 45 | "default": "omit", 46 | "as": "iso8601", 47 | }, 48 | "tags": { 49 | "source": "event", 50 | "path": "serviceConfig.tags", 51 | "default": "omit", 52 | }, 53 | } 54 | logger = Logger() 55 | tracer = Tracer() 56 | metrics = Metrics() 57 | 58 | 59 | @metrics.log_metrics 60 | @tracer.capture_lambda_handler 61 | @PersonalizeResource( 62 | resource=RESOURCE, 63 | status=STATUS, 64 | config=CONFIG, 65 | ) 66 | def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: 67 | """Create a solution in Amazon Personalize based on the configuration in `event` 68 | :param event: AWS Lambda Event 69 | :param context: AWS Lambda Context 70 | :return: the configured solution version 71 | """ 72 | return event.get("resource") # return the solution 73 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_common/aws_solutions/core/config.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import os 5 | import re 6 | from typing import Dict 7 | 8 | import botocore.config 9 | 10 | from aws_solutions.core.logging import get_logger 11 | 12 | logger = get_logger(__name__) 13 | 14 | 15 | SOLUTION_ID_RE = re.compile(r"^SO(?P\d+)(?P[a-zA-Z]*)$") # NOSONAR 16 | 17 | 18 | class SolutionConfigEnv: 19 | def __init__(self, env_var, default: str = "", regex: re.Pattern = None): 20 | self._env_var = env_var 21 | self._regex = regex 22 | self._value = default 23 | 24 | def _get_value_or_default(self) -> str: 25 | if self._value: 26 | return self._value 27 | return os.environ.get(self._env_var) 28 | 29 | def __get__(self, instance, owner) -> str: 30 | value = str(self._get_value_or_default()) 31 | if self._regex and not self._regex.match(value): 32 | raise ValueError( 33 | f"`{value}` received, but environment variable {self._env_var} (or default) must be set and match the pattern {self._regex.pattern}" 34 | ) 35 | return value 36 | 37 | def __set__(self, instance, value) -> None: 38 | self._value = value 39 | 40 | 41 | class Config: 42 | """Stores information about the current solution""" 43 | 44 | id = SolutionConfigEnv("SOLUTION_ID", regex=SOLUTION_ID_RE) 45 | version = SolutionConfigEnv("SOLUTION_VERSION", regex=None) 46 | _botocore_config = None 47 | 48 | @property 49 | def botocore_config(self) -> botocore.config.Config: 50 | if not self._botocore_config: 51 | self._botocore_config = botocore.config.Config(**self._botocore_config_defaults) 52 | return self._botocore_config 53 | 54 | @botocore_config.setter 55 | def botocore_config(self, other_config: botocore.config.Config): 56 | self._botocore_config = self.botocore_config.merge(other_config) 57 | 58 | @property 59 | def _botocore_config_defaults(self) -> Dict: 60 | return {"user_agent_extra": f"AwsSolution/{self.id}/{self.version}"} 61 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/resource/name.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | def camel_to_snake(s): 5 | """ 6 | Convert a camelCasedName to a snake_cased_name 7 | :param s: the camelCasedName 8 | :return: the snake_cased_name 9 | """ 10 | return "".join(["_" + c.lower() if c.isupper() else c for c in s]).lstrip("_") 11 | 12 | 13 | def snake_to_camel(s: str): 14 | """ 15 | Convert a snake_cased_name to a camelCasedName 16 | :param s: the snake_cased_name 17 | :return: camelCasedName 18 | """ 19 | components = s.split("_") 20 | return components[0] + "".join(y.title() for y in components[1:]) 21 | 22 | 23 | def camel_to_dash(s: str): 24 | """ 25 | Convert a camelCasedName to a dash-cased-name 26 | :param s: the camelCasedName 27 | :return: the dash-cased-name 28 | """ 29 | return "".join(["-" + c.lower() if c.isupper() else c for c in s]).lstrip("-") 30 | 31 | 32 | class ResourceName: 33 | def __init__(self, name: str): 34 | self.name = self._validated_name(name) 35 | 36 | def _validated_name(self, name) -> str: 37 | """ 38 | Validate that a name is valid, raising ValueError if it is not 39 | :param name: the name to validate 40 | :return: the validated name 41 | """ 42 | if not name.isalpha(): 43 | raise ValueError("name must be camelCased") 44 | if not name[0].islower(): 45 | raise ValueError("name must start with a lower case character") 46 | return name 47 | 48 | @property 49 | def dash(self) -> str: 50 | """ 51 | Get the dash-cased-name of the resource 52 | :return: the dash-cased-name 53 | """ 54 | return camel_to_dash(self.name) 55 | 56 | @property 57 | def snake(self) -> str: 58 | """ 59 | Get the snake_cased_name of the resource 60 | :return: the snake_cased_name 61 | """ 62 | return camel_to_snake(self.name) 63 | 64 | @property 65 | def camel(self) -> str: 66 | """ 67 | Get the camelCasedName of the resource 68 | :return: the camelCasedName 69 | """ 70 | return self.name 71 | -------------------------------------------------------------------------------- /source/aws_lambda/create_dataset_import_job/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Dict, Any 5 | 6 | from aws_lambda_powertools import Logger, Tracer, Metrics 7 | from aws_lambda_powertools.utilities.typing import LambdaContext 8 | 9 | from shared.sfn_middleware import PersonalizeResource 10 | 11 | RESOURCE = "datasetImportJob" 12 | STATUS = "datasetImportJob.status" 13 | CONFIG = { 14 | "jobName": { 15 | "source": "event", 16 | "path": "serviceConfig.jobName", 17 | }, 18 | "datasetArn": { 19 | "source": "event", 20 | "path": "serviceConfig.datasetArn", 21 | }, 22 | "dataSource": { 23 | "source": "event", 24 | "path": "serviceConfig.dataSource", 25 | }, 26 | "roleArn": {"source": "environment", "path": "ROLE_ARN"}, 27 | "maxAge": { 28 | "source": "event", 29 | "path": "workflowConfig.maxAge", 30 | "default": "omit", 31 | "as": "seconds", 32 | }, 33 | "timeStarted": { 34 | "source": "event", 35 | "path": "workflowConfig.timeStarted", 36 | "default": "omit", 37 | "as": "iso8601", 38 | }, 39 | "importMode": {"source": "event", "path": "serviceConfig.importMode", "default": "omit"}, 40 | "tags": { 41 | "source": "event", 42 | "path": "serviceConfig.tags", 43 | "default": "omit", 44 | }, 45 | "publishAttributionMetricsToS3": { 46 | "source": "event", 47 | "path": "serviceConfig.publishAttributionMetricsToS3", 48 | "default": "omit", 49 | }, 50 | } 51 | 52 | logger = Logger() 53 | tracer = Tracer() 54 | metrics = Metrics() 55 | 56 | 57 | @metrics.log_metrics 58 | @tracer.capture_lambda_handler 59 | @PersonalizeResource( 60 | resource=RESOURCE, 61 | status=STATUS, 62 | config=CONFIG, 63 | ) 64 | def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: 65 | """Create a dataset import job in Amazon Personalize based on the configuration in `event` 66 | :param event: AWS Lambda Event 67 | :param context: AWS Lambda Context 68 | :return: the configured dataset import job 69 | """ 70 | return event.get("resource") # return the dataset import job 71 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/functions/create_dataset.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | from typing import Optional 6 | 7 | import aws_cdk.aws_iam as iam 8 | from aws_cdk import Aws 9 | from aws_cdk.aws_stepfunctions import IChainable 10 | from constructs import Construct 11 | 12 | from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep 13 | from aws_solutions.cdk.cfn_guard import add_cfn_guard_suppressions 14 | 15 | class CreateDataset(SolutionStep): 16 | def __init__( 17 | self, 18 | scope: Construct, 19 | id: str, 20 | layers=None, 21 | failure_state: Optional[IChainable] = None, 22 | ): 23 | super().__init__( 24 | scope, 25 | id, 26 | layers=layers, 27 | failure_state=failure_state, 28 | entrypoint=(Path(__file__).absolute().parents[4] / "aws_lambda" / "create_dataset" / "handler.py"), 29 | libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], 30 | ) 31 | 32 | add_cfn_guard_suppressions( 33 | self.function.role.node.try_find_child("Resource"), 34 | ["IAM_NO_INLINE_POLICY_CHECK"] 35 | ) 36 | 37 | def _set_permissions(self): 38 | self.function.add_to_role_policy( 39 | statement=iam.PolicyStatement( 40 | actions=[ 41 | "personalize:DescribeDatasetGroup", 42 | "personalize:DescribeSchema", 43 | "personalize:CreateDataset", 44 | "personalize:DescribeDataset", 45 | "personalize:ListDatasets", 46 | "personalize:TagResource", 47 | "personalize:ListTagsForResource", 48 | ], 49 | effect=iam.Effect.ALLOW, 50 | resources=[ 51 | f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset-group/*", 52 | f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:schema/*", 53 | f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:dataset/*", 54 | ], 55 | ) 56 | ) 57 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/functions/create_config.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | import aws_cdk.aws_iam as iam 7 | from aws_cdk import Duration, Aws 8 | from aws_cdk.aws_lambda import Tracing, Runtime, RuntimeFamily 9 | from constructs import Construct 10 | 11 | from aws_solutions.cdk.aws_lambda.environment import Environment 12 | from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction 13 | from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression 14 | from aws_solutions.cdk.cfn_guard import add_cfn_guard_suppressions 15 | 16 | 17 | class CreateConfig(SolutionsPythonFunction): 18 | def __init__(self, scope: Construct, construct_id: str, **kwargs): 19 | entrypoint = Path(__file__).absolute().parents[4] / "aws_lambda" / "create_config" / "handler.py" 20 | function_name = "lambda_handler" 21 | kwargs["libraries"] = [Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"] 22 | kwargs["tracing"] = Tracing.ACTIVE 23 | kwargs["timeout"] = Duration.seconds(90) 24 | kwargs["runtime"] = Runtime("python3.11", RuntimeFamily.PYTHON) 25 | 26 | super().__init__(scope, construct_id, entrypoint, function_name, **kwargs) 27 | 28 | self.environment = Environment(self) 29 | 30 | add_cfn_nag_suppressions( 31 | self.role.node.try_find_child("DefaultPolicy").node.find_child("Resource"), 32 | [CfnNagSuppression("W12", "IAM policy for AWS X-Ray requires an allow on *")], 33 | ) 34 | 35 | add_cfn_guard_suppressions( 36 | self.role.node.try_find_child("Resource"), 37 | ["IAM_NO_INLINE_POLICY_CHECK"] 38 | ) 39 | 40 | self._set_permissions() 41 | 42 | def _set_permissions(self): 43 | self.add_to_role_policy( 44 | statement=iam.PolicyStatement( 45 | actions=[ 46 | "personalize:Describe*", 47 | "personalize:List*", 48 | ], 49 | effect=iam.Effect.ALLOW, 50 | resources=[ 51 | f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:*", 52 | ], 53 | ) 54 | ) 55 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/s3.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from functools import lru_cache 5 | from urllib.parse import urlparse 6 | 7 | import botocore.exceptions 8 | 9 | from aws_solutions.core import get_service_resource 10 | 11 | 12 | class S3: 13 | cli = get_service_resource("s3") 14 | 15 | def __init__(self, url, expected_suffix=".csv"): 16 | self.cli = get_service_resource("s3") 17 | self.expected_suffix = expected_suffix 18 | self.url = url 19 | self._last_modified = None 20 | self.bucket, self.key = self._urlparse() 21 | 22 | def _urlparse(self): 23 | parsed = urlparse(self.url, allow_fragments=False) 24 | bucket = parsed.netloc 25 | key = parsed.path.lstrip("/") 26 | return bucket, key 27 | 28 | @property 29 | @lru_cache() 30 | def exists(self): 31 | if self.url.endswith(self.expected_suffix): 32 | return self._exists_one() 33 | else: 34 | return self._exists_any() 35 | 36 | @property 37 | def last_modified(self): 38 | if self.exists: 39 | return self._last_modified 40 | 41 | def _exists_one(self): 42 | try: 43 | metadata = self.cli.Object(self.bucket, self.key) 44 | metadata.load() 45 | except botocore.exceptions.ClientError as exc: 46 | if exc.response["Error"]["Code"] == "404": 47 | return False 48 | 49 | self._last_modified = metadata.last_modified 50 | return True 51 | 52 | def _exists_any(self): 53 | latest = None 54 | try: 55 | bucket = self.cli.Bucket(self.bucket) 56 | objects = [ 57 | o 58 | for o in bucket.objects.filter(Prefix=self.key + "/", Delimiter="/") 59 | if o.key.endswith(self.expected_suffix) 60 | ] 61 | latest = next(iter(sorted(objects, key=lambda k: k.last_modified, reverse=True)), None) 62 | except botocore.exceptions.ClientError as exc: 63 | if exc.response["Error"]["Code"] == "404": 64 | return False 65 | 66 | if latest: 67 | self._last_modified = latest.last_modified 68 | return True 69 | else: 70 | return False 71 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/aws_lambda/functions/s3_event.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | 6 | from aws_cdk import Duration 7 | from aws_cdk.aws_lambda import Tracing, Runtime, RuntimeFamily 8 | from aws_cdk.aws_s3 import Bucket 9 | from aws_cdk.aws_sns import Topic 10 | from aws_cdk.aws_stepfunctions import StateMachine 11 | from constructs import Construct 12 | 13 | from aws_solutions.cdk.aws_lambda.environment import Environment 14 | from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction 15 | from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression 16 | from aws_solutions.cdk.cfn_guard import add_cfn_guard_suppressions 17 | 18 | class S3EventHandler(SolutionsPythonFunction): 19 | def __init__( 20 | self, scope: Construct, construct_id: str, state_machine: StateMachine, bucket: Bucket, topic: Topic, **kwargs 21 | ): 22 | entrypoint = Path(__file__).absolute().parents[4] / "aws_lambda" / "s3_event" / "handler.py" 23 | function_name = "lambda_handler" 24 | kwargs["libraries"] = [Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"] 25 | kwargs["tracing"] = Tracing.ACTIVE 26 | kwargs["timeout"] = Duration.seconds(15) 27 | kwargs["runtime"] = Runtime("python3.11", RuntimeFamily.PYTHON) 28 | 29 | super().__init__(scope, construct_id, entrypoint, function_name, **kwargs) 30 | 31 | self.environment = Environment(self) 32 | self.add_environment("STATE_MACHINE_ARN", state_machine.state_machine_arn) 33 | 34 | add_cfn_nag_suppressions( 35 | self.role.node.try_find_child("DefaultPolicy").node.find_child("Resource"), 36 | [CfnNagSuppression("W12", "IAM policy for AWS X-Ray requires an allow on *")], 37 | ) 38 | 39 | add_cfn_guard_suppressions( 40 | self.role.node.try_find_child("Resource"), 41 | ["IAM_NO_INLINE_POLICY_CHECK"] 42 | ) 43 | 44 | bucket.grant_read(self, objects_key_pattern="train/*") 45 | state_machine.grant_start_execution(self) 46 | 47 | self.grant_publish(topic) 48 | 49 | def grant_publish(self, topic: Topic): 50 | topic.grant_publish(self) 51 | self.add_environment("SNS_TOPIC_ARN", topic.topic_arn) 52 | -------------------------------------------------------------------------------- /source/aws_lambda/create_batch_segment_job/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Dict, Any 5 | 6 | from aws_lambda_powertools import Logger, Tracer, Metrics 7 | from aws_lambda_powertools.utilities.typing import LambdaContext 8 | 9 | from shared.sfn_middleware import PersonalizeResource 10 | 11 | RESOURCE = "batchSegmentJob" 12 | STATUS = "batchSegmentJob.status" 13 | CONFIG = { 14 | "filterArn": { 15 | "source": "event", 16 | "path": "serviceConfig.filterArn", 17 | "default": "omit", 18 | }, 19 | "jobInput": { 20 | "source": "event", 21 | "path": "serviceConfig.jobInput", 22 | }, 23 | "jobName": { 24 | "source": "event", 25 | "path": "serviceConfig.jobName", 26 | }, 27 | "jobOutput": {"source": "event", "path": "serviceConfig.jobOutput"}, 28 | "solutionVersionArn": { 29 | "source": "event", 30 | "path": "serviceConfig.solutionVersionArn", 31 | }, 32 | "numResults": { 33 | "source": "event", 34 | "path": "serviceConfig.numResults", 35 | "default": "omit", 36 | }, 37 | "roleArn": {"source": "environment", "path": "ROLE_ARN"}, 38 | "maxAge": { 39 | "source": "event", 40 | "path": "workflowConfig.maxAge", 41 | "default": "omit", 42 | "as": "seconds", 43 | }, 44 | "timeStarted": { 45 | "source": "event", 46 | "path": "workflowConfig.timeStarted", 47 | "default": "omit", 48 | "as": "iso8601", 49 | }, 50 | "tags": { 51 | "source": "event", 52 | "path": "serviceConfig.tags", 53 | "default": "omit", 54 | }, 55 | } 56 | 57 | logger = Logger() 58 | tracer = Tracer() 59 | metrics = Metrics() 60 | 61 | 62 | @metrics.log_metrics 63 | @tracer.capture_lambda_handler 64 | @PersonalizeResource( 65 | resource=RESOURCE, 66 | status=STATUS, 67 | config=CONFIG, 68 | ) 69 | def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: 70 | """Create a batch segment job in Amazon Personalize based on the configuration in `event` 71 | :param event: AWS Lambda Event 72 | :param context: AWS Lambda Context 73 | :return: the configured batch inference job 74 | """ 75 | return event.get("resource") # return the batch inference job 76 | -------------------------------------------------------------------------------- /source/aws_lambda/shared/events.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from datetime import datetime 5 | from typing import Dict, Optional 6 | 7 | from aws_lambda_powertools import Logger 8 | 9 | from shared.exceptions import ( 10 | NotificationError, 11 | SolutionVersionPending, 12 | ) 13 | from shared.notifiers import NotifyEventBridge 14 | from shared.resource import Resource 15 | 16 | logger = Logger() 17 | 18 | 19 | NOTIFY_LIST = [NotifyEventBridge()] 20 | 21 | 22 | class Notifies: 23 | """Decorates a resource creation or describe call to provide event notifications""" 24 | 25 | def __init__(self, status: str): 26 | self.status = status 27 | 28 | def __call__(self, function): 29 | def wrapper(caller, resource: Resource, **kwargs): 30 | try: 31 | result = function(caller, resource, **kwargs) 32 | except SolutionVersionPending as exc: 33 | # because of how solution versions are handled, we must manually notify and re-raise 34 | self.notify( 35 | resource=resource, 36 | result={ 37 | "solutionVersionArn": str(exc), 38 | "status": "CREATE IN_PROGRESS", 39 | }, 40 | cutoff=None, 41 | ) 42 | raise exc 43 | 44 | # run the notifier 45 | cutoff = kwargs.get("timeStarted") 46 | self.notify(resource, result, cutoff) 47 | 48 | return result 49 | 50 | return wrapper 51 | 52 | def notify(self, resource: Resource, result: Dict, cutoff: Optional[datetime]) -> None: 53 | """ 54 | Notify each target in the NOTIFY_LIST 55 | :param resource: the subject of the notification 56 | :param result: the description of the subject of the notification 57 | :param cutoff: the cutoff datetime for notifications (UTC required, timezone aware) 58 | :return: None 59 | """ 60 | for notifier in NOTIFY_LIST: 61 | notifier.set_cutoff(cutoff) 62 | try: 63 | notifier.notify(self.status, resource, result) 64 | except NotificationError as exc: 65 | logger.error(f"notifier {notifier.name} failed: {str(exc)}") # log and continue through notifiers 66 | -------------------------------------------------------------------------------- /source/tests/aws_lambda/test_events.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from datetime import datetime 5 | 6 | from dateutil.relativedelta import relativedelta 7 | from dateutil.tz import tzlocal 8 | 9 | from shared.events import Notifies 10 | from shared.resource import DatasetGroup 11 | 12 | 13 | def test_notifies_decorator_create(notifier_stubber): 14 | status = "ACTIVE" 15 | 16 | class RequiresNotification: 17 | @Notifies(status=status) 18 | def notifies_something(self, resource, **kwargs): 19 | return {"datasetGroupArn": "SOME_ARN"} 20 | 21 | rn = RequiresNotification() 22 | rn.notifies_something(DatasetGroup(), timeStarted="2021-10-10T10:00:00Z") 23 | 24 | assert notifier_stubber.creation_notifications[0] == { 25 | "resource": "datasetGroup", 26 | "status": "ACTIVE", 27 | "result": { 28 | "datasetGroupArn": "SOME_ARN", 29 | }, 30 | } 31 | assert len(notifier_stubber.creation_notifications) == 1 32 | assert len(notifier_stubber.completion_notifications) == 0 33 | 34 | 35 | def test_notifies_decorator_complete(mocker, notifier_stubber): 36 | status = "ACTIVE" 37 | 38 | created = datetime.now(tzlocal()) 39 | updated = created + relativedelta(seconds=120) 40 | 41 | class RequiresNotification: 42 | @Notifies(status=status) 43 | def notifies_something(self, resource, **kwargs): 44 | return { 45 | "datasetGroup": { 46 | "datasetGroupArn": "SOME_ARN", 47 | "creationDateTime": created, 48 | "lastUpdatedDateTime": updated, 49 | "status": "ACTIVE", 50 | } 51 | } 52 | 53 | rn = RequiresNotification() 54 | rn.notifies_something(DatasetGroup(), timeStarted=created) 55 | 56 | assert notifier_stubber.completion_notifications[0] == { 57 | "resource": "datasetGroup", 58 | "result": { 59 | "datasetGroup": { 60 | "datasetGroupArn": "SOME_ARN", 61 | "lastUpdatedDateTime": updated, 62 | "creationDateTime": created, 63 | "status": "ACTIVE", 64 | } 65 | }, 66 | "status": "ACTIVE", 67 | } 68 | assert len(notifier_stubber.creation_notifications) == 0 69 | assert len(notifier_stubber.completion_notifications) == 1 70 | -------------------------------------------------------------------------------- /source/scheduler/common/setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import re 5 | from pathlib import Path 6 | 7 | import setuptools 8 | 9 | VERSION_RE = re.compile(r"\#\# \[(?P.*)\]", re.MULTILINE) # NOSONAR 10 | 11 | 12 | def get_version(): 13 | """ 14 | Detect the solution version from the changelog. Latest version on top. 15 | """ 16 | changelog = open(Path(__file__).resolve().parent.parent / "CHANGELOG.md").read() 17 | versions = VERSION_RE.findall(changelog) 18 | if not len(versions): 19 | raise ValueError("use the standard semver format in your CHANGELOG.md") 20 | build_version = versions[0] 21 | print(f"Build Version: {build_version}") 22 | return build_version 23 | 24 | 25 | setuptools.setup( 26 | name="aws-solutions-scheduler-common", 27 | version=get_version(), 28 | description="Scheduler shared libraries and CLI", 29 | long_description=open("../README.md").read(), 30 | author="Amazon Web Services", 31 | url="https://aws.amazon.com/solutions/implementations", 32 | license="Apache License 2.0", 33 | packages=setuptools.find_namespace_packages(exclude=["build*"]), 34 | install_requires=[ 35 | "pip>=22.3.1", 36 | "aws-lambda-powertools==2.15.0", 37 | "aws-xray-sdk==2.12.0", 38 | "aws-solutions-python==2.0.0", 39 | "click==8.1.3", 40 | "cronex==0.1.3.1", 41 | "boto3==1.26.47", 42 | "requests==2.32.4", 43 | "crhelper==2.0.11", 44 | "rich==12.6.0", 45 | ], 46 | entry_points=""" 47 | [console_scripts] 48 | aws-solutions-scheduler=aws_solutions.scheduler.common.scripts.scheduler_cli:cli 49 | """, 50 | python_requires=">=3.11", 51 | classifiers=[ 52 | "Development Status :: 4 - Beta", 53 | "Intended Audience :: Developers", 54 | "License :: OSI Approved :: Apache Software License", 55 | "Programming Language :: JavaScript", 56 | "Programming Language :: Python :: 3 :: Only", 57 | "Programming Language :: Python :: 3.7", 58 | "Programming Language :: Python :: 3.8", 59 | "Programming Language :: Python :: 3.9", 60 | "Programming Language :: Python :: 3.10", 61 | "Programming Language :: Python :: 3.11", 62 | "Topic :: Software Development :: Code Generators", 63 | "Topic :: Utilities", 64 | "Typing :: Typed", 65 | ], 66 | zip_safe=False, 67 | ) 68 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/resource_name/src/custom_resources/name.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | from os import getenv 6 | from uuid import uuid4 as uuid 7 | 8 | from crhelper import CfnResource 9 | 10 | logger = logging.getLogger(__name__) 11 | helper = CfnResource(log_level=getenv("LOG_LEVEL", "WARNING")) 12 | 13 | 14 | def get_property(event, property_name, property_default=None): 15 | resource_prop = event.get("ResourceProperties", {}).get(property_name, property_default) 16 | if not resource_prop: 17 | raise ValueError(f"missing required property {property_name}") 18 | return resource_prop 19 | 20 | 21 | @helper.create 22 | def generate_name(event, _): 23 | """ 24 | Generate a resource name containing the stack name and the resource purpose. This is useful 25 | when you need to associate policies that refer to a resource by name (and thus need 26 | a predictable resource name). This is commonly used when associating policies with buckets 27 | or other resources that might introduce a circular resource dependency 28 | 29 | :param event: The CloudFormation custom resource event 30 | :return: None 31 | """ 32 | resource_id = get_property(event, "Id", uuid().hex[0:12]) 33 | stack_name = get_property(event, "StackName") 34 | purpose = get_property(event, "Purpose") 35 | max_length = int(get_property(event, "MaxLength")) 36 | 37 | name = f"{stack_name}-{purpose}-{resource_id}".lower() 38 | if len(name) > max_length: 39 | logger.warning("cannot use stack name in bucket name - trying default") 40 | name = f"{purpose}-{resource_id}".lower() 41 | if len(name) > max_length: 42 | raise ValueError( 43 | f"the derived resource name {name} is too long ({len(name)} / {max_length}) - please use a shorter purpose or stack name" 44 | ) 45 | 46 | logger.info(f"the derived resource name is {name}") 47 | helper.Data["Name"] = name 48 | helper.Data["Id"] = resource_id 49 | 50 | 51 | @helper.update 52 | @helper.delete 53 | def no_op(_, __): 54 | pass # pragma: no cover 55 | 56 | 57 | def handler(event, _): 58 | """ 59 | Handler entrypoint - see generate_name for implementation details 60 | :param event: The CloudFormation custom resource event 61 | :return: PhysicalResourceId 62 | """ 63 | helper(event, _) # pragma: no cover 64 | -------------------------------------------------------------------------------- /deployment/run-unit-tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | # 6 | # This assumes all of the OS-level configuration has been completed and git repo has already been cloned 7 | # 8 | # This script should be run from the repo's deployment directory 9 | # cd deployment 10 | # ./run-unit-tests.sh 11 | # 12 | 13 | [ "$DEBUG" == 'true' ] && set -x 14 | set -e 15 | 16 | # Get reference for all important folders 17 | template_dir="$PWD" 18 | source_dir="$(cd $template_dir/../source; pwd -P)" 19 | root_dir="$template_dir/.." 20 | 21 | echo "------------------------------------------------------------------------------" 22 | echo "[Init] Clean old folders" 23 | echo "------------------------------------------------------------------------------" 24 | 25 | cd $root_dir 26 | if [ -d ".venv" ]; then 27 | rm -rf ".venv" 28 | fi 29 | 30 | echo "------------------------------------------------------------------------------" 31 | echo "[Env] Create virtual environment and install dependencies" 32 | echo "------------------------------------------------------------------------------" 33 | 34 | #Set the python version to 3.11 35 | pip install virtualenv 36 | python3.11 -m virtualenv .venv 37 | source .venv/bin/activate 38 | 39 | cd $source_dir 40 | pip install --upgrade pip 41 | pip install -r $source_dir/requirements-dev.txt 42 | cd - 43 | 44 | echo "------------------------------------------------------------------------------" 45 | echo "[Test] Run pytest with coverage" 46 | echo "------------------------------------------------------------------------------" 47 | cd $source_dir 48 | # setup coverage report path 49 | coverage_report_path=$source_dir/tests/coverage-reports/source.coverage.xml 50 | echo "coverage report path set to $coverage_report_path" 51 | 52 | pytest --cov --cov-report=term-missing --cov-report "xml:$coverage_report_path" 53 | 54 | # The pytest --cov with its parameters and .coveragerc generates a xml cov-report with `coverage/sources` list 55 | # with absolute path for the source directories. To avoid dependencies of tools (such as SonarQube) on different 56 | # absolute paths for source directories, this substitution is used to convert each absolute source directory 57 | # path to the corresponding project relative path. The $source_dir holds the absolute path for source directory. 58 | sed -i -e "s,$source_dir,source,g" $coverage_report_path 59 | 60 | # deactivate the virtual environment 61 | deactivate 62 | 63 | cd $template_dir 64 | 65 | -------------------------------------------------------------------------------- /source/infrastructure/personalize/step_functions/filter_fragment.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import List 5 | 6 | from aws_cdk import Duration 7 | from aws_cdk.aws_stepfunctions import ( 8 | StateMachineFragment, 9 | State, 10 | INextable, 11 | Choice, 12 | Pass, 13 | Condition, 14 | Map, 15 | JsonPath, 16 | ) 17 | from constructs import Construct 18 | 19 | from personalize.aws_lambda.functions import ( 20 | CreateFilter, 21 | ) 22 | 23 | 24 | class FilterFragment(StateMachineFragment): 25 | def __init__( 26 | self, 27 | scope: Construct, 28 | id: str, 29 | create_filter: CreateFilter, 30 | ): 31 | super().__init__(scope, id) 32 | 33 | # total allowed elapsed duration ~ 11m30s 34 | retry_config = { 35 | "backoff_rate": 1.25, 36 | "interval": Duration.seconds(8), 37 | "max_attempts": 15, 38 | } 39 | 40 | self.prepare_filter_input = Pass( 41 | self, 42 | "Prepare Filter Input Data", 43 | input_path="$.datasetGroupArn", 44 | result_path="$.filter.serviceConfig.datasetGroupArn", 45 | ) 46 | self.create_filter = create_filter.state( 47 | self, 48 | "Create Filter", 49 | input_path="$.filter", 50 | **retry_config, 51 | ) 52 | self.not_required = Pass(self, "Filters Not Required") 53 | self.create_filters = Map( 54 | self, 55 | "Create Filters", 56 | items_path="$.filters", 57 | parameters={ 58 | "datasetGroupArn.$": "$.datasetGroup.serviceConfig.datasetGroupArn", 59 | "filter.$": "$$.Map.Item.Value", # NOSONAR (python:S1192) - string for clarity 60 | }, 61 | result_path=JsonPath.DISCARD, 62 | ) 63 | self.start = ( 64 | Choice(self, "Check if Filters Required") 65 | .when( 66 | Condition.is_present("$.filters[0]"), 67 | self.create_filters.iterator(self.prepare_filter_input.next(self.create_filter)), 68 | ) 69 | .otherwise(self.not_required) 70 | ) 71 | 72 | @property 73 | def start_state(self) -> State: 74 | return self.start.start_state 75 | 76 | @property 77 | def end_states(self) -> List[INextable]: 78 | return [self.not_required, self.create_filters] 79 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/tools/cleaner.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import logging 5 | import os 6 | import shutil 7 | from dataclasses import dataclass 8 | from pathlib import Path 9 | 10 | logger = logging.getLogger("cdk-helper") 11 | 12 | 13 | @dataclass 14 | class Cleanable: 15 | """Encapsulates something that can be cleaned by the cleaner""" 16 | 17 | name: str 18 | file_type: str 19 | pattern: str 20 | 21 | def __post_init__(self): 22 | if self.file_type not in ("d", "f"): 23 | raise ValueError("only directories and files are allowed ('d' or 'f')") 24 | 25 | def delete(self, source_dir): 26 | source_path = Path(source_dir) 27 | 28 | for path in source_path.rglob(self.pattern): 29 | if "aws_solutions" not in str(path.name): # prevent the module from being unlinked in a dev environment 30 | if self.file_type == "d" and path.is_dir(): 31 | logger.info(f"deleting {self.name} directory {path}") 32 | shutil.rmtree(path, ignore_errors=True) 33 | if self.file_type == "f" and path.is_file(): 34 | logger.info(f"deleting {self.name} file {path}") 35 | try: 36 | path.unlink() 37 | except FileNotFoundError: 38 | pass 39 | 40 | 41 | class Cleaner: 42 | """Encapsulates functions that help clean up the build environment.""" 43 | 44 | TO_CLEAN = [ 45 | Cleanable("Python bytecode", "f", "*.py[cod]"), 46 | Cleanable("Python Coverage databases", "f", ".coverage"), 47 | Cleanable("CDK Cloud Assemblies", "d", "cdk.out"), 48 | Cleanable("Python egg", "d", "*.egg-info"), 49 | Cleanable("Python bytecode cache", "d", "__pycache__"), 50 | Cleanable("Python test cache", "d", ".pytest_cache"), 51 | ] 52 | 53 | @staticmethod 54 | def clean_dirs(*args): 55 | """Recursively remove each of its arguments, then recreate the directory""" 56 | for dir_to_remove in args: 57 | logger.info("cleaning %s" % dir_to_remove) 58 | shutil.rmtree(dir_to_remove, ignore_errors=True) 59 | os.makedirs(dir_to_remove) 60 | 61 | @staticmethod 62 | def cleanup_source(source_dir): 63 | """Cleans up all items found in TO_CLEAN""" 64 | for item in Cleaner.TO_CLEAN: 65 | item.delete(source_dir) 66 | -------------------------------------------------------------------------------- /source/aws_lambda/create_batch_inference_job/handler.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Dict, Any 5 | 6 | from aws_lambda_powertools import Logger, Tracer, Metrics 7 | from aws_lambda_powertools.utilities.typing import LambdaContext 8 | 9 | from shared.sfn_middleware import PersonalizeResource 10 | 11 | RESOURCE = "batchInferenceJob" 12 | STATUS = "batchInferenceJob.status" 13 | CONFIG = { 14 | "jobName": { 15 | "source": "event", 16 | "path": "serviceConfig.jobName", 17 | }, 18 | "solutionVersionArn": { 19 | "source": "event", 20 | "path": "serviceConfig.solutionVersionArn", 21 | }, 22 | "filterArn": { 23 | "source": "event", 24 | "path": "serviceConfig.filterArn", 25 | "default": "omit", 26 | }, 27 | "numResults": { 28 | "source": "event", 29 | "path": "serviceConfig.numResults", 30 | "default": "omit", 31 | }, 32 | "jobInput": { 33 | "source": "event", 34 | "path": "serviceConfig.jobInput", 35 | }, 36 | "jobOutput": {"source": "event", "path": "serviceConfig.jobOutput"}, 37 | "roleArn": {"source": "environment", "path": "ROLE_ARN"}, 38 | "batchInferenceJobConfig": { 39 | "source": "event", 40 | "path": "serviceConfig.batchInferenceJobConfig", 41 | "default": "omit", 42 | }, 43 | "maxAge": { 44 | "source": "event", 45 | "path": "workflowConfig.maxAge", 46 | "default": "omit", 47 | "as": "seconds", 48 | }, 49 | "timeStarted": { 50 | "source": "event", 51 | "path": "workflowConfig.timeStarted", 52 | "default": "omit", 53 | "as": "iso8601", 54 | }, 55 | "tags": { 56 | "source": "event", 57 | "path": "serviceConfig.tags", 58 | "default": "omit", 59 | }, 60 | } 61 | 62 | logger = Logger() 63 | tracer = Tracer() 64 | metrics = Metrics() 65 | 66 | 67 | @metrics.log_metrics 68 | @tracer.capture_lambda_handler 69 | @PersonalizeResource( 70 | resource=RESOURCE, 71 | status=STATUS, 72 | config=CONFIG, 73 | ) 74 | def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: 75 | """Create a batch inference job in Amazon Personalize based on the configuration in `event` 76 | :param event: AWS Lambda Event 77 | :param context: AWS Lambda Context 78 | :return: the configured batch inference job 79 | """ 80 | return event.get("resource") # return the batch inference job 81 | -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/create_scheduled_task.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | from typing import Optional 6 | 7 | import aws_cdk.aws_iam as iam 8 | from aws_cdk.aws_dynamodb import ITable 9 | from aws_cdk.aws_stepfunctions import IChainable 10 | from constructs import Construct 11 | 12 | from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep 13 | from aws_solutions.cdk.cfn_guard import add_cfn_guard_suppressions 14 | 15 | 16 | class CreateScheduledTask(SolutionStep): 17 | def __init__( 18 | self, # NOSONAR (python: S107) - allow large number of method parameters 19 | scope: Construct, 20 | id: str, 21 | layers=None, 22 | failure_state: Optional[IChainable] = None, 23 | scheduler_table: ITable = None, 24 | state_machine_arn: str = None, 25 | state_machine_executions_arn: str = None, 26 | ): 27 | self.scheduler_table = scheduler_table 28 | self.state_machine_arn = state_machine_arn 29 | self.state_machine_executions_arn = state_machine_executions_arn 30 | 31 | super().__init__( 32 | scope, 33 | id, 34 | layers=layers, 35 | failure_state=failure_state, 36 | function="create_schedule", 37 | entrypoint=Path(__file__).parents[1].resolve() / "aws_lambda" / "scheduler" / "handler.py", 38 | ) 39 | 40 | add_cfn_guard_suppressions( 41 | self.function.role.node.try_find_child("Resource"), 42 | ["IAM_NO_INLINE_POLICY_CHECK"] 43 | ) 44 | 45 | def _set_permissions(self): 46 | self.function.add_environment("DDB_SCHEDULER_STEPFUNCTION", self.state_machine_arn) 47 | self.function.add_to_role_policy( 48 | iam.PolicyStatement( 49 | actions=[ 50 | "states:StartExecution", 51 | "states:ListExecutions", 52 | "states:StopExecution", 53 | "states:DescribeExecution", 54 | ], 55 | effect=iam.Effect.ALLOW, 56 | resources=[ 57 | self.state_machine_arn, 58 | self.state_machine_executions_arn, 59 | ], 60 | ) 61 | ) 62 | 63 | self.scheduler_table.grant_read_write_data(self.function) 64 | self.function.add_environment("DDB_SCHEDULES_TABLE", self.scheduler_table.table_name) 65 | -------------------------------------------------------------------------------- /source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/update_scheduled_task.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from pathlib import Path 5 | from typing import Optional 6 | 7 | import aws_cdk.aws_iam as iam 8 | from aws_cdk.aws_dynamodb import ITable 9 | from aws_cdk.aws_stepfunctions import IChainable 10 | from constructs import Construct 11 | 12 | from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep 13 | from aws_solutions.cdk.cfn_guard import add_cfn_guard_suppressions 14 | 15 | 16 | class UpdateScheduledTask(SolutionStep): 17 | def __init__( 18 | self, # NOSONAR (python:S107) - allow large number of method parameters 19 | scope: Construct, 20 | id: str, 21 | layers=None, 22 | failure_state: Optional[IChainable] = None, 23 | scheduler_table: ITable = None, 24 | state_machine_arn: str = None, 25 | state_machine_executions_arn: str = None, 26 | ): 27 | self.scheduler_table = scheduler_table 28 | self.state_machine_arn = state_machine_arn 29 | self.state_machine_executions_arn = state_machine_executions_arn 30 | 31 | super().__init__( 32 | scope, 33 | id, 34 | layers=layers, 35 | failure_state=failure_state, 36 | function="update_schedule", 37 | entrypoint=Path(__file__).parents[1].resolve() / "aws_lambda" / "scheduler" / "handler.py", 38 | ) 39 | 40 | add_cfn_guard_suppressions( 41 | self.function.role.node.try_find_child("Resource"), 42 | ["IAM_NO_INLINE_POLICY_CHECK"] 43 | ) 44 | 45 | def _set_permissions(self): 46 | self.function.add_environment("DDB_SCHEDULER_STEPFUNCTION", self.state_machine_arn) 47 | self.function.add_to_role_policy( 48 | iam.PolicyStatement( 49 | actions=[ 50 | "states:StartExecution", 51 | "states:ListExecutions", 52 | "states:StopExecution", 53 | "states:DescribeExecution", 54 | ], 55 | effect=iam.Effect.ALLOW, 56 | resources=[ 57 | self.state_machine_arn, 58 | self.state_machine_executions_arn, 59 | ], 60 | ) 61 | ) 62 | 63 | self.scheduler_table.grant_read_write_data(self.function) 64 | self.function.add_environment("DDB_SCHEDULES_TABLE", self.scheduler_table.table_name) 65 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/setup.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import re 5 | from pathlib import Path 6 | 7 | import setuptools 8 | 9 | VERSION_RE = re.compile(r"\#\# \[(?P.*)\]", re.MULTILINE) # NOSONAR 10 | 11 | 12 | def get_version(): 13 | """ 14 | Detect the solution version from the changelog. Latest version on top. 15 | """ 16 | changelog = open(Path(__file__).resolve().parent.parent / "CHANGELOG.md").read() 17 | versions = VERSION_RE.findall(changelog) 18 | if not len(versions): 19 | raise ValueError("use the standard semver format in your CHANGELOG.md") 20 | build_version = versions[0] 21 | print(f"Build Version: {build_version}") 22 | return build_version 23 | 24 | 25 | setuptools.setup( 26 | name="aws-solutions-cdk", 27 | version=get_version(), 28 | description="Tools to make AWS Solutions deployments with CDK + Python more manageable", 29 | long_description=open("../README.md").read(), 30 | author="Amazon Web Services", 31 | url="https://aws.amazon.com/solutions/implementations", 32 | license="Apache License 2.0", 33 | packages=setuptools.find_namespace_packages(exclude=["build*"]), 34 | package_data={ 35 | "": [ 36 | "requirements.txt", 37 | "Dockerfile", 38 | "__aws_solutions_bundling_version__", 39 | ] 40 | }, 41 | install_requires=[ 42 | "pip>=22.3.1", 43 | "aws_cdk_lib==2.88.0", 44 | "Click==8.1.3", 45 | "boto3==1.26.47", 46 | "requests==2.32.4", 47 | "crhelper==2.0.11", 48 | ], 49 | entry_points=""" 50 | [console_scripts] 51 | build-s3-cdk-dist=aws_solutions.cdk.scripts.build_s3_cdk_dist:cli 52 | """, 53 | python_requires=">=3.11", 54 | classifiers=[ 55 | "Development Status :: 4 - Beta", 56 | "Intended Audience :: Developers", 57 | "License :: OSI Approved :: Apache Software License", 58 | "Programming Language :: JavaScript", 59 | "Programming Language :: Python :: 3 :: Only", 60 | "Programming Language :: Python :: 3.7", 61 | "Programming Language :: Python :: 3.8", 62 | "Programming Language :: Python :: 3.9", 63 | "Programming Language :: Python :: 3.10", 64 | "Programming Language :: Python :: 3.11", 65 | "Topic :: Software Development :: Code Generators", 66 | "Topic :: Utilities", 67 | "Typing :: Typed", 68 | ], 69 | zip_safe=False, 70 | ) 71 | -------------------------------------------------------------------------------- /source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/python/hash_utils.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import hashlib 5 | import os 6 | from pathlib import Path 7 | 8 | 9 | class DirectoryHash: 10 | # fmt: off 11 | _hash = hashlib.sha1() # nosec NOSONAR - safe to hash; side-effect of collision is to create new bundle 12 | # fmt: on 13 | 14 | @classmethod 15 | def hash(cls, *directories: Path): 16 | DirectoryHash._hash = hashlib.sha1() # nosec NOSONAR - safe to hash; see above 17 | if isinstance(directories, Path): 18 | directories = [directories] 19 | for directory in sorted(directories): 20 | DirectoryHash._hash_dir(str(directory.absolute())) 21 | return DirectoryHash._hash.hexdigest() 22 | 23 | @classmethod 24 | def _hash_dir(cls, directory: Path): 25 | for path, dirs, files in os.walk(directory): 26 | for file in sorted(files): 27 | DirectoryHash._hash_file(Path(path) / file) 28 | for directory in sorted(dirs): 29 | DirectoryHash._hash_dir(str((Path(path) / directory).absolute())) 30 | break 31 | 32 | @classmethod 33 | def _hash_file(cls, file: Path): 34 | with file.open("rb") as f: 35 | while True: 36 | block = f.read(2**10) 37 | if not block: 38 | break 39 | DirectoryHash._hash.update(block) 40 | 41 | 42 | class LayerHash: 43 | @classmethod 44 | def hash(cls, requirements: Path): 45 | if not requirements.exists(): 46 | raise ValueError("requirements directory must exist") 47 | if not requirements.is_dir(): 48 | raise ValueError("requirements must be a directory") 49 | 50 | requirements_txt = requirements / "requirements.txt" 51 | if not requirements_txt.exists() or not requirements_txt.is_file(): 52 | raise ValueError("requirements.txt file must exist") 53 | 54 | # build the directories to check 55 | directories = [requirements] 56 | 57 | with open(requirements_txt, "r") as f: 58 | line = f.readline().strip() 59 | if line.startswith("-e"): 60 | raise ValueError(f"editable requirements are not allowed, so {line} is not allowed") 61 | 62 | if line and (line.startswith(".") or "/" in line): 63 | directories.append(requirements / line) 64 | 65 | return DirectoryHash.hash(*directories) 66 | --------------------------------------------------------------------------------