├── deployment ├── run-unit-tests.sh ├── build-s3-dist.sh └── amazon-personalize-optimizer-using-amazon-pinpoint-events.template ├── CODE_OF_CONDUCT.md ├── CHANGELOG.md ├── source ├── 01-cleanup-lambda │ ├── package.json │ └── index.js ├── 05-query-status-lambda │ ├── package.json │ └── index.js ├── custom-resource-helper │ ├── package.json │ └── index.js ├── 03-export-status-lambda │ ├── package.json │ └── index.js ├── 06-dataset-import-lambda │ ├── package.json │ └── index.js ├── 10-update-campaign-lambda │ ├── package.json │ └── index.js ├── 04-query-augment-start-lambda │ ├── package.json │ └── index.js ├── 07-dataset-import-status-lambda │ ├── package.json │ └── index.js ├── 11-update-campaign-status-lambda │ ├── package.json │ └── index.js ├── 08-create-solution-version-lambda │ ├── package.json │ └── index.js ├── firehose-record-conversion-lambda │ ├── package.json │ └── index.js ├── 02-export-pinpoint-endpoints-lambda │ ├── package.json │ └── index.js └── 09-create-solution-version-status-lambda │ ├── package.json │ └── index.js ├── NOTICE.txt ├── CONTRIBUTING.md ├── README.md └── LICENSE.txt /deployment/run-unit-tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # This script should be run from the repo's deployment directory 4 | # cd deployment 5 | # ./run-unit-tests.sh 6 | 7 | # Run unit tests 8 | echo "Unit test completed" 9 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | 8 | ## [1.0.1] - 2020-05-19 9 | ### Updated 10 | - Updated the Role permissions for DataSetImportLambdaRole 11 | - Updated Data Lake S3 bucket policy to only allow SSL traffic 12 | 13 | 14 | ## [1.0.0] - 2020-02-10 15 | ### Added 16 | - initial repository version 17 | -------------------------------------------------------------------------------- /source/01-cleanup-lambda/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "01-cleanup-lambda", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "pretest": "npm install", 8 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 9 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml 01-cleanup-lambda.zip .", 10 | "build:dist": "mkdir dist && mv 01-cleanup-lambda.zip dist/", 11 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist", 12 | "coverage": "nyc npm test" 13 | }, 14 | "author": { 15 | "name": "Ryan Lowe" 16 | }, 17 | "license": "Apache 2.0" 18 | } 19 | -------------------------------------------------------------------------------- /source/05-query-status-lambda/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "05-query-status-lambda", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "pretest": "npm install", 8 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 9 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml 05-query-status-lambda.zip .", 10 | "build:dist": "mkdir dist && mv 05-query-status-lambda.zip dist/", 11 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist", 12 | "coverage": "nyc npm test" 13 | }, 14 | "author": { 15 | "name": "Ryan Lowe" 16 | }, 17 | "license": "Apache 2.0" 18 | } 19 | -------------------------------------------------------------------------------- /source/custom-resource-helper/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "custom-resource-helper", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "pretest": "npm install", 8 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 9 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml custom-resource-helper.zip .", 10 | "build:dist": "mkdir dist && mv custom-resource-helper.zip dist/", 11 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist", 12 | "coverage": "nyc npm test" 13 | }, 14 | "author": { 15 | "name": "Ryan Lowe" 16 | }, 17 | "license": "Apache 2.0" 18 | } 19 | -------------------------------------------------------------------------------- /source/03-export-status-lambda/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "03-export-status-lambda", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "pretest": "npm install", 8 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 9 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml 03-export-status-lambda.zip .", 10 | "build:dist": "mkdir dist && mv 03-export-status-lambda.zip dist/", 11 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist", 12 | "coverage": "nyc npm test" 13 | }, 14 | "author": { 15 | "name": "Ryan Lowe" 16 | }, 17 | "license": "Apache 2.0" 18 | } 19 | -------------------------------------------------------------------------------- /source/06-dataset-import-lambda/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "06-dataset-import-lambda", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "pretest": "npm install", 8 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 9 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml 06-dataset-import-lambda.zip .", 10 | "build:dist": "mkdir dist && mv 06-dataset-import-lambda.zip dist/", 11 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist", 12 | "coverage": "nyc npm test" 13 | }, 14 | "author": { 15 | "name": "Ryan Lowe" 16 | }, 17 | "license": "Apache 2.0" 18 | } 19 | -------------------------------------------------------------------------------- /source/10-update-campaign-lambda/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "10-update-campaign-lambda", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "pretest": "npm install", 8 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 9 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml 10-update-campaign-lambda.zip .", 10 | "build:dist": "mkdir dist && mv 10-update-campaign-lambda.zip dist/", 11 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist", 12 | "coverage": "nyc npm test" 13 | }, 14 | "author": { 15 | "name": "Ryan Lowe" 16 | }, 17 | "license": "Apache 2.0" 18 | } 19 | -------------------------------------------------------------------------------- /source/04-query-augment-start-lambda/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "04-query-augment-start-lambda", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "pretest": "npm install", 8 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 9 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml 04-query-augment-start-lambda.zip .", 10 | "build:dist": "mkdir dist && mv 04-query-augment-start-lambda.zip dist/", 11 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist", 12 | "coverage": "nyc npm test" 13 | }, 14 | "author": { 15 | "name": "Ryan Lowe" 16 | }, 17 | "license": "Apache 2.0" 18 | } 19 | -------------------------------------------------------------------------------- /source/07-dataset-import-status-lambda/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "07-dataset-import-status-lambda", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "pretest": "npm install", 8 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 9 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml 07-dataset-import-status-lambda.zip .", 10 | "build:dist": "mkdir dist && mv 07-dataset-import-status-lambda.zip dist/", 11 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist", 12 | "coverage": "nyc npm test" 13 | }, 14 | "author": { 15 | "name": "Ryan Lowe" 16 | }, 17 | "license": "Apache 2.0" 18 | } 19 | -------------------------------------------------------------------------------- /source/11-update-campaign-status-lambda/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "11-update-campaign-status-lambda", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "pretest": "npm install", 8 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 9 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml 11-update-campaign-status-lambda.zip .", 10 | "build:dist": "mkdir dist && mv 11-update-campaign-status-lambda.zip dist/", 11 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist", 12 | "coverage": "nyc npm test" 13 | }, 14 | "author": { 15 | "name": "Ryan Lowe" 16 | }, 17 | "license": "Apache 2.0" 18 | } 19 | -------------------------------------------------------------------------------- /source/08-create-solution-version-lambda/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "08-create-solution-version-lambda", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "pretest": "npm install", 8 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 9 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml 08-create-solution-version-lambda.zip .", 10 | "build:dist": "mkdir dist && mv 08-create-solution-version-lambda.zip dist/", 11 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist", 12 | "coverage": "nyc npm test" 13 | }, 14 | "author": { 15 | "name": "Ryan Lowe" 16 | }, 17 | "license": "Apache 2.0" 18 | } 19 | -------------------------------------------------------------------------------- /source/firehose-record-conversion-lambda/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "firehose-record-conversion-lambda", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "pretest": "npm install", 8 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 9 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml firehose-record-conversion-lambda.zip .", 10 | "build:dist": "mkdir dist && mv firehose-record-conversion-lambda.zip dist/", 11 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist", 12 | "coverage": "nyc npm test" 13 | }, 14 | "author": { 15 | "name": "Ryan Lowe" 16 | }, 17 | "license": "Apache 2.0" 18 | } 19 | -------------------------------------------------------------------------------- /NOTICE.txt: -------------------------------------------------------------------------------- 1 | Optimize Amazon Personalize Campaigns using Amazon Pinpoint Events 2 | Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | Licensed under the Apache License Version 2.0 (the "License"). You may not use this file except 4 | in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/ 5 | or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, 6 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied. See the License for the 7 | specific language governing permissions and limitations under the License. 8 | 9 | ********************** 10 | THIRD PARTY COMPONENTS 11 | ********************** 12 | This software includes third party software subject to the following copyrights: 13 | 14 | AWS SDK under the Apache License Version 2.0 15 | -------------------------------------------------------------------------------- /source/02-export-pinpoint-endpoints-lambda/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "02-export-pinpoint-endpoints-lambda", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "pretest": "npm install", 8 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 9 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml 02-export-pinpoint-endpoints-lambda.zip .", 10 | "build:dist": "mkdir dist && mv 02-export-pinpoint-endpoints-lambda.zip dist/", 11 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist", 12 | "coverage": "nyc npm test" 13 | }, 14 | "author": { 15 | "name": "Ryan Lowe" 16 | }, 17 | "license": "Apache 2.0" 18 | } 19 | -------------------------------------------------------------------------------- /source/09-create-solution-version-status-lambda/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "09-create-solution-version-status-lambda", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "pretest": "npm install", 8 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 9 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml 09-create-solution-version-status-lambda.zip .", 10 | "build:dist": "mkdir dist && mv 09-create-solution-version-status-lambda.zip dist/", 11 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist", 12 | "coverage": "nyc npm test" 13 | }, 14 | "author": { 15 | "name": "Ryan Lowe" 16 | }, 17 | "license": "Apache 2.0" 18 | } 19 | -------------------------------------------------------------------------------- /source/11-update-campaign-status-lambda/index.js: -------------------------------------------------------------------------------- 1 | /********************************************************************************************************************* 2 | * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * 3 | * * 4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance * 5 | * with the License. A copy of the License is located at * 6 | * * 7 | * http://www.apache.org/licenses/ * 8 | * * 9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES * 10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions * 11 | * and limitations under the License. * 12 | *********************************************************************************************************************/ 13 | 14 | const AWS = require('aws-sdk'); 15 | AWS.config.update({ 16 | region: process.env.AWS_REGION 17 | }); 18 | const personalize = new AWS.Personalize(); 19 | exports.handler = async (event) => { 20 | return personalize.describeCampaign({ 21 | campaignArn: process.env.CAMPAIGN_ARN 22 | }).promise() 23 | .catch((err) => { 24 | console.error('Encountered Error calling describeCampaign with campaignArn: ' + process.env.CAMPAIGN_ARN + ', error: ' + JSON.stringify(err)); 25 | return Promise.reject(err); 26 | }) 27 | .then((response) => { 28 | return { 29 | CampaignStatus: response.campaign.status 30 | }; 31 | }); 32 | }; 33 | -------------------------------------------------------------------------------- /source/08-create-solution-version-lambda/index.js: -------------------------------------------------------------------------------- 1 | /********************************************************************************************************************* 2 | * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * 3 | * * 4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance * 5 | * with the License. A copy of the License is located at * 6 | * * 7 | * http://www.apache.org/licenses/ * 8 | * * 9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES * 10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions * 11 | * and limitations under the License. * 12 | *********************************************************************************************************************/ 13 | 14 | const AWS = require('aws-sdk'); 15 | AWS.config.update({ 16 | region: process.env.AWS_REGION 17 | }); 18 | const personalize = new AWS.Personalize(); 19 | exports.handler = async (event) => { 20 | return personalize.createSolutionVersion({ 21 | solutionArn: process.env.SOLUTION_ARN 22 | }).promise() 23 | .catch((err) => { 24 | console.error('Encountered Error calling createSolutionVersion with solutionArn: ' + process.env.SOLUTION_ARN + ', error: ' + JSON.stringify(err)); 25 | return Promise.reject(err); 26 | }) 27 | .then((response) => { 28 | return { 29 | SolutionVersionArn: response.solutionVersionArn 30 | }; 31 | }); 32 | }; 33 | -------------------------------------------------------------------------------- /source/09-create-solution-version-status-lambda/index.js: -------------------------------------------------------------------------------- 1 | /********************************************************************************************************************* 2 | * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * 3 | * * 4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance * 5 | * with the License. A copy of the License is located at * 6 | * * 7 | * http://www.apache.org/licenses/ * 8 | * * 9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES * 10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions * 11 | * and limitations under the License. * 12 | *********************************************************************************************************************/ 13 | 14 | const AWS = require('aws-sdk'); 15 | AWS.config.update({ 16 | region: process.env.AWS_REGION 17 | }); 18 | const personalize = new AWS.Personalize(); 19 | exports.handler = async (event) => { 20 | return personalize.describeSolutionVersion({ 21 | solutionVersionArn: event.SolutionVersionArn 22 | }).promise() 23 | .catch((err) => { 24 | console.error('Encountered Error calling describeSolutionVersion with solutionVersionArn: ' + solutionVersionArn + ', error: ' + JSON.stringify(err)); 25 | return Promise.reject(err); 26 | }) 27 | .then((response) => { 28 | return { 29 | SolutionVersionArn: event.SolutionVersionArn, 30 | SolutionVersionStatus: response.solutionVersion.status 31 | }; 32 | }); 33 | }; 34 | -------------------------------------------------------------------------------- /source/07-dataset-import-status-lambda/index.js: -------------------------------------------------------------------------------- 1 | /********************************************************************************************************************* 2 | * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * 3 | * * 4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance * 5 | * with the License. A copy of the License is located at * 6 | * * 7 | * http://www.apache.org/licenses/ * 8 | * * 9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES * 10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions * 11 | * and limitations under the License. * 12 | *********************************************************************************************************************/ 13 | 14 | const AWS = require('aws-sdk'); 15 | AWS.config.update({ 16 | region: process.env.AWS_REGION 17 | }); 18 | const personalize = new AWS.Personalize(); 19 | exports.handler = async (event) => { 20 | return personalize.describeDatasetImportJob({ 21 | datasetImportJobArn: event.DatasetImportJobArn 22 | }).promise() 23 | .catch((err) => { 24 | console.error('Encountered Error calling describeDatasetImportJob with datasetImportJobArn: ' + event.DatasetImportJobArn + ', error: ' + JSON.stringify(err)); 25 | return Promise.reject(err); 26 | }) 27 | .then((response) => { 28 | return { 29 | DatasetImportJobArn: event.DatasetImportJobArn, 30 | DatasetImportJobStatus: response.datasetImportJob.status 31 | }; 32 | }); 33 | }; 34 | -------------------------------------------------------------------------------- /source/03-export-status-lambda/index.js: -------------------------------------------------------------------------------- 1 | /********************************************************************************************************************* 2 | * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * 3 | * * 4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance * 5 | * with the License. A copy of the License is located at * 6 | * * 7 | * http://www.apache.org/licenses/ * 8 | * * 9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES * 10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions * 11 | * and limitations under the License. * 12 | *********************************************************************************************************************/ 13 | 14 | const AWS = require('aws-sdk'); 15 | AWS.config.update({ 16 | region: process.env.AWS_REGION 17 | }); 18 | const pinpoint = new AWS.Pinpoint(); 19 | exports.handler = async (event) => { 20 | 21 | const params = { 22 | ApplicationId: process.env.PINPOINT_APPLICATION_ID, 23 | JobId: event.ExportJobId 24 | }; 25 | 26 | return pinpoint.getExportJob(params).promise() 27 | .catch((err) => { 28 | console.error('Encountered Error calling getExportJob with parameters: ' + JSON.stringify(params) + ', error: ' + JSON.stringify(err)); 29 | return Promise.reject(err); 30 | }) 31 | .then((data) => { 32 | return { 33 | ExportJobStatus: data.ExportJobResponse.JobStatus, 34 | ExportJobId: data.ExportJobResponse.Id 35 | }; 36 | }); 37 | }; 38 | -------------------------------------------------------------------------------- /source/10-update-campaign-lambda/index.js: -------------------------------------------------------------------------------- 1 | /********************************************************************************************************************* 2 | * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * 3 | * * 4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance * 5 | * with the License. A copy of the License is located at * 6 | * * 7 | * http://www.apache.org/licenses/ * 8 | * * 9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES * 10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions * 11 | * and limitations under the License. * 12 | *********************************************************************************************************************/ 13 | 14 | const AWS = require('aws-sdk'); 15 | AWS.config.update({ 16 | region: process.env.AWS_REGION 17 | }); 18 | const personalize = new AWS.Personalize(); 19 | exports.handler = async (event) => { 20 | 21 | const params = { 22 | campaignArn: process.env.CAMPAIGN_ARN, 23 | solutionVersionArn: event.SolutionVersionArn, 24 | minProvisionedTPS: parseInt(process.env.MIN_PROVISIONED_TPS) 25 | }; 26 | 27 | return personalize.updateCampaign(params).promise() 28 | .catch((err) => { 29 | console.error('Encountered Error calling updateCampaign with parameters: ' + JSON.stringify(params) + ', error: ' + JSON.stringify(err)); 30 | return Promise.reject(err); 31 | }) 32 | .then((response) => { 33 | return { 34 | Success: 'true' 35 | }; 36 | }); 37 | }; 38 | -------------------------------------------------------------------------------- /source/05-query-status-lambda/index.js: -------------------------------------------------------------------------------- 1 | /********************************************************************************************************************* 2 | * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * 3 | * * 4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance * 5 | * with the License. A copy of the License is located at * 6 | * * 7 | * http://www.apache.org/licenses/ * 8 | * * 9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES * 10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions * 11 | * and limitations under the License. * 12 | *********************************************************************************************************************/ 13 | 14 | const AWS = require('aws-sdk'); 15 | AWS.config.update({ 16 | region: process.env.AWS_REGION 17 | }); 18 | const athena = new AWS.Athena(); 19 | exports.handler = async (event) => { 20 | return athena.getQueryExecution({QueryExecutionId: event.QueryExecutionId}).promise() 21 | .catch((err) => { 22 | console.error('Encountered Error calling getQueryExecution with QueryExecutionId: ' + event.QueryExecutionId + ', error: ' + JSON.stringify(err)); 23 | return Promise.reject(err); 24 | }) 25 | .then((data) => { 26 | console.log(JSON.stringify(data)); 27 | return { 28 | QueryExecutionId: data.QueryExecution.QueryExecutionId, 29 | OutputLocation: data.QueryExecution.ResultConfiguration.OutputLocation, 30 | Status: data.QueryExecution.Status.State 31 | }; 32 | }); 33 | }; 34 | -------------------------------------------------------------------------------- /source/06-dataset-import-lambda/index.js: -------------------------------------------------------------------------------- 1 | /********************************************************************************************************************* 2 | * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * 3 | * * 4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance * 5 | * with the License. A copy of the License is located at * 6 | * * 7 | * http://www.apache.org/licenses/ * 8 | * * 9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES * 10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions * 11 | * and limitations under the License. * 12 | *********************************************************************************************************************/ 13 | 14 | const AWS = require('aws-sdk'); 15 | AWS.config.update({ 16 | region: process.env.AWS_REGION 17 | }); 18 | const personalize = new AWS.Personalize(); 19 | exports.handler = async (event) => { 20 | 21 | const params = { 22 | dataSource: { 23 | dataLocation: event.OutputLocation 24 | }, 25 | datasetArn: process.env.DATA_SET_ARN, 26 | jobName: 'interactions-importjob-' + event.QueryExecutionId, 27 | roleArn: process.env.IMPORT_ROLE 28 | }; 29 | 30 | return personalize.createDatasetImportJob(params).promise() 31 | .catch((err) => { 32 | console.error('Encountered Error calling createDatasetImportJob with parameters: ' + JSON.stringify(params) + ', error: ' + JSON.stringify(err)); 33 | return Promise.reject(err); 34 | }) 35 | .then((response) => { 36 | return { 37 | DatasetImportJobArn: response.datasetImportJobArn 38 | }; 39 | }); 40 | }; 41 | -------------------------------------------------------------------------------- /source/02-export-pinpoint-endpoints-lambda/index.js: -------------------------------------------------------------------------------- 1 | /********************************************************************************************************************* 2 | * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * 3 | * * 4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance * 5 | * with the License. A copy of the License is located at * 6 | * * 7 | * http://www.apache.org/licenses/ * 8 | * * 9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES * 10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions * 11 | * and limitations under the License. * 12 | *********************************************************************************************************************/ 13 | 14 | const AWS = require('aws-sdk'); 15 | AWS.config.update({ 16 | region: process.env.AWS_REGION 17 | }); 18 | const pinpoint = new AWS.Pinpoint(); 19 | exports.handler = async (event) => { 20 | 21 | const d = new Date(); 22 | const bucketPrefix = `endpoint_exports/${d.getUTCFullYear()}/${d.getUTCMonth() + 1}/${d.getUTCDate()}`; 23 | const bucket = `s3://${process.env.S3_BUCKET}/${bucketPrefix}`; 24 | 25 | const params = { 26 | ApplicationId: process.env.PINPOINT_APPLICATION_ID, 27 | ExportJobRequest: { 28 | RoleArn: process.env.ROLE_ARN, 29 | S3UrlPrefix: bucket 30 | } 31 | }; 32 | 33 | return pinpoint.createExportJob(params).promise() 34 | .catch((err) => { 35 | console.error('Encountered Error calling createExportJob with parameters: ' + JSON.stringify(params) + ', error: ' + JSON.stringify(err)); 36 | return Promise.reject(err); 37 | }) 38 | .then((data) => { 39 | console.log(JSON.stringify(data)); 40 | return { 41 | ExportJobId: data.ExportJobResponse.Id 42 | }; 43 | }); 44 | 45 | }; 46 | -------------------------------------------------------------------------------- /source/04-query-augment-start-lambda/index.js: -------------------------------------------------------------------------------- 1 | /********************************************************************************************************************* 2 | * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * 3 | * * 4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance * 5 | * with the License. A copy of the License is located at * 6 | * * 7 | * http://www.apache.org/licenses/ * 8 | * * 9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES * 10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions * 11 | * and limitations under the License. * 12 | *********************************************************************************************************************/ 13 | 14 | const AWS = require('aws-sdk'); 15 | AWS.config.update({ 16 | region: process.env.AWS_REGION 17 | }); 18 | const athena = new AWS.Athena(); 19 | exports.handler = async (event) => { 20 | return athena.getNamedQuery({NamedQueryId: process.env.NAMED_QUERY}).promise() 21 | .catch((err) => { 22 | console.error('Encountered Error calling getNamedQuery with NamedQueryId: ' + process.env.NAMED_QUERY + ', error: ' + JSON.stringify(err)); 23 | return Promise.reject(err); 24 | }) 25 | .then((data) => { 26 | 27 | const params = { 28 | QueryString: data.NamedQuery.QueryString, 29 | QueryExecutionContext: { 30 | Database: data.NamedQuery.Database 31 | }, 32 | ResultConfiguration: { 33 | OutputLocation: `s3://${process.env.S3_BUCKET}/optimizer-daily-query-results/` 34 | } 35 | }; 36 | 37 | return athena.startQueryExecution(params).promise() 38 | .catch((err) => { 39 | console.error('Encountered Error calling startQueryExecution with parameters: ' + JSON.stringify(params) + ', error: ' + JSON.stringify(err)); 40 | return Promise.reject(err); 41 | }); 42 | }) 43 | .then((data) => { 44 | console.log(JSON.stringify(data)); 45 | return { 46 | QueryExecutionId: data.QueryExecutionId 47 | }; 48 | }); 49 | }; 50 | -------------------------------------------------------------------------------- /source/firehose-record-conversion-lambda/index.js: -------------------------------------------------------------------------------- 1 | /********************************************************************************************************************* 2 | * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * 3 | * * 4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance * 5 | * with the License. A copy of the License is located at * 6 | * * 7 | * http://www.apache.org/licenses/ * 8 | * * 9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES * 10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions * 11 | * and limitations under the License. * 12 | *********************************************************************************************************************/ 13 | 14 | exports.handler = async (event) => { 15 | 16 | const output = []; 17 | 18 | event.records.forEach(record => { 19 | 20 | try { 21 | // Decode the base64 message 22 | const decoded = JSON.parse(Buffer.from(record.data, 'base64').toString('ascii')); 23 | 24 | // Filter out Test Messages 25 | if (decoded.event_type === '_test.event_stream') { 26 | output.push({ 27 | data: record.data, 28 | recordId: record.recordId, 29 | result: 'Dropped' 30 | }); 31 | 32 | } else { 33 | 34 | // Trim off millisecond precision 35 | decoded.arrival_timestamp = Math.round(decoded.arrival_timestamp / 1000); 36 | decoded.event_timestamp = Math.round(decoded.event_timestamp / 1000); 37 | 38 | output.push({ 39 | // Add a linebreak for easier Glue crawling 40 | data: Buffer.from(JSON.stringify(decoded) + '\n').toString('base64'), 41 | recordId: record.recordId, 42 | result: 'Ok' 43 | }); 44 | } 45 | } 46 | catch(err) { 47 | console.error('Encountered Error when processing Kinesis event record for RecordId: ' + record.recordId + ', error: ' + JSON.stringify(err)); 48 | output.push({ 49 | data: record.data, 50 | recordId: record.recordId, 51 | result: 'Dropped' 52 | }); 53 | } 54 | 55 | }); 56 | 57 | return {records: output}; 58 | }; 59 | -------------------------------------------------------------------------------- /source/01-cleanup-lambda/index.js: -------------------------------------------------------------------------------- 1 | /********************************************************************************************************************* 2 | * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * 3 | * * 4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance * 5 | * with the License. A copy of the License is located at * 6 | * * 7 | * http://www.apache.org/licenses/ * 8 | * * 9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES * 10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions * 11 | * and limitations under the License. * 12 | *********************************************************************************************************************/ 13 | 14 | const AWS = require('aws-sdk'); 15 | AWS.config.update({ 16 | region: process.env.AWS_REGION 17 | }); 18 | 19 | const s3 = new AWS.S3(); 20 | exports.handler = async (event) => { 21 | 22 | console.log(event); 23 | 24 | const bucketPrefix1 = 'endpoint_exports'; 25 | const promise1 = s3.listObjects({ 26 | Bucket: process.env.S3_BUCKET, 27 | Prefix: bucketPrefix1 28 | }).promise() 29 | .catch((err) => { 30 | console.error('Encountered Error calling listObjects on endpoint exports prefix for Prefix: ' + bucketPrefix1 + ', error: ' + JSON.stringify(err)); 31 | return Promise.reject(err); 32 | }) 33 | .then((data) => { 34 | 35 | var toDelete = data.Contents.filter((i) => i.Key.startsWith('endpoint_exports/20')).map((i) => {return {Key: i.Key};}); 36 | console.log('Cleaning up and Deleting Objects: ' + JSON.stringify(toDelete)); 37 | 38 | return deleteKeys(toDelete); 39 | }); 40 | 41 | const bucketPrefix2 = 'optimizer-daily-query-results'; 42 | const promise2 = s3.listObjects({ 43 | Bucket: process.env.S3_BUCKET, 44 | Prefix: bucketPrefix2 45 | }).promise() 46 | .catch((err) => { 47 | console.error('Encountered Error calling listObjects on query results prefix for Prefix: ' + bucketPrefix2 + ', error: ' + JSON.stringify(err)); 48 | return Promise.reject(err); 49 | }) 50 | .then((data) => { 51 | 52 | var toDelete = data.Contents.map((i) => {return {Key: i.Key};}); 53 | console.log('Cleaning up and Deleting Objects: ' + JSON.stringify(toDelete)); 54 | 55 | return deleteKeys(toDelete); 56 | }); 57 | 58 | 59 | return Promise.all([promise1, promise2]); 60 | }; 61 | 62 | const deleteKeys = function(toDelete) { 63 | if (toDelete.length > 0) { 64 | return s3.deleteObjects({ 65 | Bucket: process.env.S3_BUCKET, 66 | Delete: { 67 | Objects: toDelete 68 | } 69 | }).promise() 70 | .catch((err) => { 71 | console.error('Encountered Error calling deleteObjects with toDelete: ' + JSON.stringify(toDelete) + ', error: ' + JSON.stringify(err)); 72 | return Promise.reject(err); 73 | }) 74 | .then((ret) => { 75 | return { 76 | Done: true 77 | }; 78 | }); 79 | } else { 80 | return { 81 | Done: true 82 | }; 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | 10 | ## Reporting Bugs/Feature Requests 11 | 12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 13 | 14 | When filing an issue, please check [existing open](https://github.com/awslabs/optimize-amazon-personalize-campaigns-using-amazon-pinpoint-events/issues), or [recently closed](https://github.com/awslabs/optimize-amazon-personalize-campaigns-using-amazon-pinpoint-events/issues?utf8=%E2%9C%93&q=is%3Aissue%20is%3Aclosed%20), issues to make sure somebody else hasn't already 15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 16 | 17 | * A reproducible test case or series of steps 18 | * The version of our code being used 19 | * Any modifications you've made relevant to the bug 20 | * Anything unusual about your environment or deployment 21 | 22 | 23 | ## Contributing via Pull Requests 24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 25 | 26 | 1. You are working against the latest source on the *master* branch. 27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 29 | 30 | To send us a pull request, please: 31 | 32 | 1. Fork the repository. 33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 34 | 3. Ensure local tests pass. 35 | 4. Commit to your fork using clear commit messages. 36 | 5. Send us a pull request, answering any default questions in the pull request interface. 37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 38 | 39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 41 | 42 | 43 | ## Finding contributions to work on 44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels ((enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/awslabs/optimize-amazon-personalize-campaigns-using-amazon-pinpoint-events/labels/help%20wanted) issues is a great place to start. 45 | 46 | 47 | ## Code of Conduct 48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 50 | opensource-codeofconduct@amazon.com with any additional questions or comments. 51 | 52 | 53 | ## Security issue notifications 54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 55 | 56 | 57 | ## Licensing 58 | 59 | See the [LICENSE](https://github.com/awslabs/optimize-amazon-personalize-campaigns-using-amazon-pinpoint-events/blob/master/LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 60 | 61 | We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes. 62 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Amazon Personalize Optimizer Using Amazon Pinpoint Events 2 | Enabling the automated process of using the events generated by their Pinpoint campaigns to rapidly train and optimize their Personalize campaigns. 3 | 4 | ## Running unit tests for customization 5 | * Clone the repository, then make the desired code changes 6 | * Next, run unit tests to make sure added customization passes the tests 7 | ``` 8 | cd ./deployment 9 | chmod +x ./run-unit-tests.sh \n 10 | ./run-unit-tests.sh \n 11 | ``` 12 | 13 | ## Building distributable for customization 14 | * Configure the bucket name of your target Amazon S3 distribution bucket 15 | ``` 16 | export DIST_OUTPUT_BUCKET=my-bucket-name # bucket where customized code will reside 17 | export SOLUTION_NAME=my-solution-name 18 | export VERSION=my-version # version number for the customized code 19 | ``` 20 | _Note:_ You would have to create an S3 bucket with the prefix 'my-bucket-name-'; aws_region is where you are testing the customized solution. Also, the assets in bucket should be publicly accessible. 21 | 22 | * Now build the distributable: 23 | ``` 24 | chmod +x ./build-s3-dist.sh \n 25 | ./build-s3-dist.sh $DIST_OUTPUT_BUCKET $SOLUTION_NAME $VERSION \n 26 | ``` 27 | 28 | * Deploy the distributable to an Amazon S3 bucket in your account. _Note:_ you must have the AWS Command Line Interface installed. 29 | ``` 30 | aws s3 cp ./dist/ s3://my-bucket-name-/$SOLUTION_NAME/$VERSION/ --recursive --acl bucket-owner-full-control --profile aws-cred-profile-name \n 31 | ``` 32 | 33 | * Get the link of the solution template uploaded to your Amazon S3 bucket. 34 | * Deploy the solution to your account by launching a new AWS CloudFormation stack using the link of the solution template in Amazon S3. 35 | 36 | *** 37 | 38 | ## File Structure 39 | 40 | ``` 41 | |-deployment/ 42 | |-build-s3-dist.sh [ shell script for packaging distribution assets ] 43 | |-run-unit-tests.sh [ shell script for executing unit tests ] 44 | |-amazon-personalize-optimizer-using-amazon-pinpoint-events.yaml [ solution CloudFormation deployment template ] 45 | |-source/ 46 | |-01-cleanup-lambda [ micoservice to clean up temp files ] 47 | |-02-export-pinpoint-endpoints-lambda [ microservice to initiate an Amazon Pinpoint export ] 48 | |-03-export-status-lambda [ microservice to check on the export status ] 49 | |-04-query-augment-start-lambda [ microservice to initiate an Amazon Athena query ] 50 | |-05-query-status-lambda [ microservice to check on the query status ] 51 | |-06-dataset-import-lambda [ microservice to initiate an Amazon Personalize dataset import ] 52 | |-07-dataset-import-status-lambda [ microservice to check on the import status ] 53 | |-08-create-solution-version-lambda [ microservice to initiate an Amazon Personalize solution version ] 54 | |-09-create-solution-version-status-lambda [ microservice to check on the solution version ] 55 | |-10-update-campaign-lambda [ microservice to initiate an Amazon Personalize campaign update ] 56 | |-11-update-campaign-status-lambda [ microservice to check on the campaign update status ] 57 | |-custom-resource-helper [ microservice to help deploy queries during CF deployment ] 58 | |-firehose-record-conversion-lambda [ microservice to transform the Amazon Pinpoint event stream events ] 59 | ``` 60 | 61 | Each microservice follows the structure of: 62 | 63 | ``` 64 | |-service-name/ 65 | |-index.js [injection point for microservice] 66 | |-package.json 67 | ``` 68 | 69 | *** 70 | 71 | 72 | Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. 73 | 74 | Licensed under the Apache License Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at 75 | 76 | http://www.apache.org/licenses/ 77 | 78 | or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions and limitations under the License. 79 | -------------------------------------------------------------------------------- /deployment/build-s3-dist.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # This assumes all of the OS-level configuration has been completed and git repo has already been cloned 4 | # 5 | # This script should be run from the repo's deployment directory 6 | # cd deployment 7 | # ./build-s3-dist.sh source-bucket-base-name solution-name version-code 8 | # 9 | # Paramenters: 10 | # - source-bucket-base-name: Name for the S3 bucket location where the template will source the Lambda 11 | # code from. The template will append '-[region_name]' to this bucket name. 12 | # For example: ./build-s3-dist.sh solutions my-solution v1.0.0 13 | # The template will then expect the source code to be located in the solutions-[region_name] bucket 14 | # 15 | # - solution-name: name of the solution for consistency 16 | # 17 | # - version-code: version of the package 18 | 19 | # Check to see if input has been provided: 20 | if [ -z "$1" ] || [ -z "$2" ] || [ -z "$3" ]; then 21 | echo "Please provide the base source bucket name, trademark approved solution name and version where the lambda code will eventually reside." 22 | echo "For example: ./build-s3-dist.sh solutions trademarked-solution-name v1.0.0" 23 | exit 1 24 | fi 25 | 26 | # Get reference for all important folders 27 | template_dir="$PWD" 28 | template_dist_dir="$template_dir/global-s3-assets" 29 | build_dist_dir="$template_dir/regional-s3-assets" 30 | source_dir="$template_dir/../source" 31 | 32 | echo "------------------------------------------------------------------------------" 33 | echo "[Init] Clean old dist, node_modules and bower_components folders" 34 | echo "------------------------------------------------------------------------------" 35 | echo "rm -rf $template_dist_dir" 36 | rm -rf $template_dist_dir 37 | echo "mkdir -p $template_dist_dir" 38 | mkdir -p $template_dist_dir 39 | echo "rm -rf $build_dist_dir" 40 | rm -rf $build_dist_dir 41 | echo "mkdir -p $build_dist_dir" 42 | mkdir -p $build_dist_dir 43 | 44 | echo "------------------------------------------------------------------------------" 45 | echo "[Packing] Templates" 46 | echo "------------------------------------------------------------------------------" 47 | echo "cp $template_dir/*.template $template_dist_dir/" 48 | cp $template_dir/*.template $template_dist_dir/ 49 | echo "copy yaml templates and rename" 50 | cp $template_dir/*.yaml $template_dist_dir/ 51 | cd $template_dist_dir 52 | # Rename all *.yaml to *.template 53 | for f in *.yaml; do 54 | mv -- "$f" "${f%.yaml}.template" 55 | done 56 | 57 | cd .. 58 | echo "Updating code source bucket in template with $1" 59 | replace="s/%%BUCKET_NAME%%/$1/g" 60 | echo "sed -i '' -e $replace $template_dist_dir/*.template" 61 | sed -i '' -e $replace $template_dist_dir/*.template 62 | replace="s/%%SOLUTION_NAME%%/$2/g" 63 | echo "sed -i '' -e $replace $template_dist_dir/*.template" 64 | sed -i '' -e $replace $template_dist_dir/*.template 65 | replace="s/%%VERSION%%/$3/g" 66 | echo "sed -i '' -e $replace $template_dist_dir/*.template" 67 | sed -i '' -e $replace $template_dist_dir/*.template 68 | 69 | echo "------------------------------------------------------------------------------" 70 | echo "Build Functions" 71 | echo "------------------------------------------------------------------------------" 72 | echo "Building Function 01-cleanup-lambda" 73 | cd $source_dir/01-cleanup-lambda 74 | npm run build 75 | cp ./dist/01-cleanup-lambda.zip $build_dist_dir/01-cleanup-lambda.zip 76 | 77 | echo "Building Function 02-export-pinpoint-endpoints-lambda" 78 | cd $source_dir/02-export-pinpoint-endpoints-lambda 79 | npm run build 80 | cp ./dist/02-export-pinpoint-endpoints-lambda.zip $build_dist_dir/02-export-pinpoint-endpoints-lambda.zip 81 | 82 | echo "Building Function 03-export-status-lambda" 83 | cd $source_dir/03-export-status-lambda 84 | npm run build 85 | cp ./dist/03-export-status-lambda.zip $build_dist_dir/03-export-status-lambda.zip 86 | 87 | echo "Building Function 04-query-augment-start-lambda" 88 | cd $source_dir/04-query-augment-start-lambda 89 | npm run build 90 | cp ./dist/04-query-augment-start-lambda.zip $build_dist_dir/04-query-augment-start-lambda.zip 91 | 92 | echo "Building Function 05-query-status-lambda" 93 | cd $source_dir/05-query-status-lambda 94 | npm run build 95 | cp ./dist/05-query-status-lambda.zip $build_dist_dir/05-query-status-lambda.zip 96 | 97 | echo "Building Function 06-dataset-import-lambda" 98 | cd $source_dir/06-dataset-import-lambda 99 | npm run build 100 | cp ./dist/06-dataset-import-lambda.zip $build_dist_dir/06-dataset-import-lambda.zip 101 | 102 | echo "Building Function 07-dataset-import-status-lambda" 103 | cd $source_dir/07-dataset-import-status-lambda 104 | npm run build 105 | cp ./dist/07-dataset-import-status-lambda.zip $build_dist_dir/07-dataset-import-status-lambda.zip 106 | 107 | echo "Building Function 08-create-solution-version-lambda" 108 | cd $source_dir/08-create-solution-version-lambda 109 | npm run build 110 | cp ./dist/08-create-solution-version-lambda.zip $build_dist_dir/08-create-solution-version-lambda.zip 111 | 112 | echo "Building Function 09-create-solution-version-status-lambda" 113 | cd $source_dir/09-create-solution-version-status-lambda 114 | npm run build 115 | cp ./dist/09-create-solution-version-status-lambda.zip $build_dist_dir/09-create-solution-version-status-lambda.zip 116 | 117 | echo "Building Function 10-update-campaign-lambda" 118 | cd $source_dir/10-update-campaign-lambda 119 | npm run build 120 | cp ./dist/10-update-campaign-lambda.zip $build_dist_dir/10-update-campaign-lambda.zip 121 | 122 | echo "Building Function 11-update-campaign-status-lambda" 123 | cd $source_dir/11-update-campaign-status-lambda 124 | npm run build 125 | cp ./dist/11-update-campaign-status-lambda.zip $build_dist_dir/11-update-campaign-status-lambda.zip 126 | 127 | echo "Building Function firehose-record-conversion-lambda" 128 | cd $source_dir/firehose-record-conversion-lambda 129 | npm run build 130 | cp ./dist/firehose-record-conversion-lambda.zip $build_dist_dir/firehose-record-conversion-lambda.zip 131 | 132 | echo "Building Function custom-resource-helper" 133 | cd $source_dir/custom-resource-helper 134 | npm run build 135 | cp ./dist/custom-resource-helper.zip $build_dist_dir/custom-resource-helper.zip 136 | 137 | # Finish Up 138 | echo "Completed building distribution" 139 | cd $template_dir 140 | -------------------------------------------------------------------------------- /source/custom-resource-helper/index.js: -------------------------------------------------------------------------------- 1 | /********************************************************************************************************************* 2 | * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * 3 | * * 4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance * 5 | * with the License. A copy of the License is located at * 6 | * * 7 | * http://www.apache.org/licenses/ * 8 | * * 9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES * 10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions * 11 | * and limitations under the License. * 12 | *********************************************************************************************************************/ 13 | 14 | const AWS = require('aws-sdk'); 15 | const fs = require("fs"); 16 | const https = require("https"); 17 | const stream = require('stream'); 18 | const url = require('url'); 19 | const crypto = require("crypto"); 20 | 21 | AWS.config.update({ 22 | region: process.env.AWS_REGION 23 | }); 24 | 25 | const s3 = new AWS.S3(); 26 | const athena = new AWS.Athena(); 27 | 28 | const uuidv4 = function() { 29 | return ([1e7]+-1e3+-4e3+-8e3+-1e11).replace(/[018]/g, c => 30 | (c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16) 31 | ); 32 | }; 33 | 34 | const setupEventRules = function() { 35 | const event_types = process.env.EVENT_TYPES.split(','); 36 | const event_values = process.env.EVENT_VALUES.split(','); 37 | 38 | let csv = 'EVENT_TYPE,EVENT_VALUE\n'; 39 | for (var i = 0; i < event_types.length; i++) { 40 | csv += `${event_types[i]},${event_values[i]}\n`; 41 | } 42 | 43 | const params = { 44 | Bucket: process.env.S3_DATA_BUCKET, 45 | Key: 'event_values/values.csv', 46 | Body: Buffer.from(csv) 47 | }; 48 | 49 | return s3.putObject(params).promise() 50 | .catch((err) => { 51 | console.error('Encountered Error calling putObject with parameters: ' + JSON.stringify(params) + ', error: ' + JSON.stringify(err)); 52 | return Promise.reject(err); 53 | }) 54 | 55 | } 56 | 57 | const runNamedQuery = function(namedQuery) { 58 | return athena.getNamedQuery({NamedQueryId: namedQuery}).promise() 59 | .then((data) => { 60 | 61 | const params = { 62 | QueryString: data.NamedQuery.QueryString, 63 | ResultConfiguration: { 64 | OutputLocation: `s3://${process.env.S3_DATA_BUCKET}/temp/` 65 | } 66 | }; 67 | 68 | return athena.startQueryExecution(params).promise() 69 | .catch((err) => { 70 | console.error('Encountered Error calling startQueryExecution with parameters: ' + JSON.stringify(params) + ', error: ' + JSON.stringify(err)); 71 | return Promise.reject(err); 72 | }) 73 | }); 74 | } 75 | 76 | exports.handler = (event, context, callback) => { 77 | 78 | let responseStatus = 'FAILED'; 79 | let responseData = {}; 80 | 81 | if (event.ResourceProperties.CustomResourceAction === 'GenerateUUID') { 82 | 83 | responseStatus = 'SUCCESS'; 84 | responseData = { 85 | UUID: uuidv4() 86 | }; 87 | return sendResponse(event, callback, context.logStreamName, responseStatus, responseData); 88 | 89 | } else if (event.ResourceProperties.CustomResourceAction === 'SetupQueries') { 90 | 91 | return setupEventRules() 92 | .then((results) => { 93 | 94 | return Promise.all([ 95 | runNamedQuery(process.env.EVENT_NAMED_QUERY), 96 | runNamedQuery(process.env.EXPORT_NAMED_QUERY), 97 | runNamedQuery(process.env.VALUE_NAMED_QUERY) 98 | ]); 99 | 100 | }) 101 | .then((results) => { 102 | responseStatus = 'SUCCESS'; 103 | responseData = { 104 | success: true 105 | }; 106 | return sendResponse(event, callback, context.logStreamName, responseStatus, responseData); 107 | 108 | }) 109 | .catch((results) => { 110 | 111 | console.log('Received Error: ' + JSON.stringify(results)); 112 | 113 | responseStatus = 'FAILED'; 114 | responseData = { 115 | success: false 116 | }; 117 | return sendResponse(event, callback, context.logStreamName, responseStatus, responseData); 118 | 119 | }); 120 | } 121 | }; 122 | 123 | /** 124 | * Sends a response to the pre-signed S3 URL 125 | */ 126 | let sendResponse = function(event, callback, logStreamName, responseStatus, responseData) { 127 | return new Promise((resolve, reject) => { 128 | try { 129 | const responseBody = JSON.stringify({ 130 | Status: responseStatus, 131 | Reason: `See the details in CloudWatch Log Stream: ${logStreamName}`, 132 | PhysicalResourceId: logStreamName, 133 | StackId: event.StackId, 134 | RequestId: event.RequestId, 135 | LogicalResourceId: event.LogicalResourceId, 136 | Data: responseData, 137 | }); 138 | 139 | console.log('RESPONSE BODY:\n', responseBody); 140 | const parsedUrl = url.parse(event.ResponseURL); 141 | const options = { 142 | hostname: parsedUrl.hostname, 143 | port: 443, 144 | path: parsedUrl.path, 145 | method: 'PUT', 146 | headers: { 147 | 'Content-Type': '', 148 | 'Content-Length': responseBody.length, 149 | } 150 | }; 151 | 152 | const req = https.request(options, (res) => { 153 | console.log('STATUS:', res.statusCode); 154 | console.log('HEADERS:', JSON.stringify(res.headers)); 155 | resolve('Successfully sent stack response!'); 156 | }); 157 | 158 | req.on('error', (err) => { 159 | console.log('sendResponse Error:\n', err); 160 | reject(err); 161 | }); 162 | 163 | req.write(responseBody); 164 | req.end(); 165 | 166 | } catch(err) { 167 | console.log('GOT ERROR'); 168 | console.log(err); 169 | reject(err); 170 | } 171 | }); 172 | }; 173 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. -------------------------------------------------------------------------------- /deployment/amazon-personalize-optimizer-using-amazon-pinpoint-events.template: -------------------------------------------------------------------------------- 1 | # Amazon Personalize Optimizer Using Amazon Pinpoint Events Solution 2 | # 3 | # template for amazon-personalize-optimizer-using-amazon-pinpoint-events 4 | # **DO NOT DELETE** 5 | # 6 | # author: rjlowe@ 7 | AWSTemplateFormatVersion: 2010-09-09 8 | 9 | Description: (SO0095) - Amazon Personalize Optimizer Using Amazon Pinpoint Events - %%VERSION%% 10 | 11 | Parameters: 12 | EventTypes: 13 | Description: "Select which Amazon Pinpoints you wish to submit to Amazon Personalize for Interaction retraining. Full list of Pinpoint events can be found here: https://docs.aws.amazon.com/pinpoint/latest/developerguide/event-streams.html" 14 | Default: "_campaign.opened_notification, _email.open, _email.click, _email.unsubscribe" 15 | Type: CommaDelimitedList 16 | EventValues: 17 | Description: List of Event Values that match to the List of Events used to submit to Amazon Personalize for Interaction retraining 18 | Default: 100, 50, 100, -200 19 | Type: List 20 | PersonalizeCampaignArn: 21 | Description: Amazon Personalize Campaign ARN to use 22 | Type: String 23 | PersonalizeSolutionArn: 24 | Description: Amazon Personalize Solution ARN used in the Personalize Campaign 25 | Type: String 26 | PersonalizeInteractionDatasetArn: 27 | Description: Amazon Personalize Interaction Dataset ARN 28 | Type: String 29 | PinpointProjectId: 30 | Description: Amazon Pinpoint Project ID to use 31 | Type: String 32 | InteractionsQueryDateScope: 33 | Description: How many days should we query to find interactions, for all time use -1 34 | Default: -1 35 | Type: Number 36 | 37 | 38 | Metadata: 39 | AWS::CloudFormation::Interface: 40 | ParameterGroups: 41 | - 42 | Label: 43 | default: "Personalize Configuration" 44 | Parameters: 45 | - PersonalizeCampaignArn 46 | - PersonalizeSolutionArn 47 | - PersonalizeInteractionDatasetArn 48 | - 49 | Label: 50 | default: "Pinpoint and Event Configuration" 51 | Parameters: 52 | - PinpointProjectId 53 | # - EndpointOrUser 54 | - EventTypes 55 | - EventValues 56 | - InteractionsQueryDateScope 57 | ParameterLabels: 58 | PinpointProjectId: 59 | default: Pinpoint Project ID 60 | EventTypes: 61 | default: Pinpoint Event Types 62 | EventValues: 63 | default: Pinpoint Event Type Interaction Values 64 | PersonalizeCampaignArn: 65 | default: Amazon Personalize Campaign ARN 66 | PersonalizeSolutionArn: 67 | default: Amazon Personalize Solution ARN 68 | PersonalizeInteractionDatasetArn: 69 | default: Amazon Personalize Interaction Dataset ARN 70 | InteractionsQueryDateScope: 71 | default: Interaction History Date Scope 72 | 73 | Mappings: 74 | MetricsMap: 75 | Send-Data: 76 | SendAnonymousData: "Yes" 77 | 78 | SourceCode: 79 | General: 80 | S3Bucket: "%%BUCKET_NAME%%" 81 | KeyPrefix: "%%SOLUTION_NAME%%/%%VERSION%%" 82 | 83 | 84 | Conditions: 85 | NeedsPersonalizeCampaignArn: !Equals 86 | - '' 87 | - !Ref PersonalizeCampaignArn 88 | 89 | Resources: 90 | 91 | ### Buckets 92 | DataS3Bucket: 93 | Type: AWS::S3::Bucket 94 | DeletionPolicy: Retain 95 | Metadata: 96 | cfn_nag: 97 | rules_to_suppress: 98 | - id: W51 99 | reason: Not public facing. 100 | Properties: 101 | PublicAccessBlockConfiguration: 102 | BlockPublicAcls: True 103 | BlockPublicPolicy: True 104 | IgnorePublicAcls: True 105 | RestrictPublicBuckets: True 106 | BucketEncryption: 107 | ServerSideEncryptionConfiguration: 108 | - ServerSideEncryptionByDefault: 109 | SSEAlgorithm: AES256 110 | LoggingConfiguration: 111 | DestinationBucketName: !Ref LogBucket 112 | LogFilePrefix: optimize-personalize-campaigns-data/ 113 | 114 | DataS3BucketPolicy: 115 | Type: AWS::S3::BucketPolicy 116 | Properties: 117 | Bucket: !Ref DataS3Bucket 118 | PolicyDocument: 119 | Version: 2012-10-17 120 | Statement: 121 | - Sid: AllowPersonalizeAccess 122 | Effect: "Allow" 123 | Principal: 124 | Service: "personalize.amazonaws.com" 125 | Action: 126 | - "s3:GetObject" 127 | - "s3:ListBucket" 128 | Resource: 129 | - !Sub "arn:aws:s3:::${DataS3Bucket}" 130 | - !Sub "arn:aws:s3:::${DataS3Bucket}/*" 131 | - Sid: AllowSSLRequestsOnly 132 | Effect: Deny 133 | Principal: "*" 134 | Action: "s3:*" 135 | Resource: 136 | - !Sub "arn:aws:s3:::${DataS3Bucket}/*" 137 | - !Sub "arn:aws:s3:::${DataS3Bucket}" 138 | Condition: 139 | Bool: 140 | "aws:SecureTransport": "false" 141 | 142 | LogBucket: 143 | Type: AWS::S3::Bucket 144 | DeletionPolicy: Retain 145 | Metadata: 146 | cfn_nag: 147 | rules_to_suppress: 148 | - id: W35 149 | reason: This is the log bucket. 150 | Properties: 151 | AccessControl: LogDeliveryWrite 152 | PublicAccessBlockConfiguration: 153 | BlockPublicAcls: True 154 | BlockPublicPolicy: True 155 | IgnorePublicAcls: True 156 | RestrictPublicBuckets: True 157 | BucketEncryption: 158 | ServerSideEncryptionConfiguration: 159 | - ServerSideEncryptionByDefault: 160 | SSEAlgorithm: AES256 161 | LogBucketPolicy: 162 | Type: AWS::S3::BucketPolicy 163 | Properties: 164 | Bucket: !Ref LogBucket 165 | PolicyDocument: 166 | Version: 2012-10-17 167 | Statement: 168 | - Sid: AWSCloudTrailAclCheck 169 | Effect: Allow 170 | Principal: 171 | Service: cloudtrail.amazonaws.com 172 | Action: "s3:GetBucketAcl" 173 | Resource: !Sub arn:aws:s3:::${LogBucket} 174 | - Sid: AWSCloudTrailWrite 175 | Effect: Allow 176 | Principal: 177 | Service: cloudtrail.amazonaws.com 178 | Action: "s3:PutObject" 179 | Resource: !Sub arn:aws:s3:::${LogBucket}/AWSLogs/${AWS::AccountId}/* 180 | Condition: 181 | StringEquals: 182 | "s3:x-amz-acl": "bucket-owner-full-control" 183 | 184 | ### Event Stream 185 | PinpointEventStream: 186 | Type: AWS::Pinpoint::EventStream 187 | Properties: 188 | ApplicationId: !Ref PinpointProjectId 189 | DestinationStreamArn: !GetAtt PinpointEventFirehose.Arn 190 | RoleArn: !GetAtt PinpointKinesisStreamRole.Arn 191 | 192 | PinpointEventFirehose: 193 | Type: AWS::KinesisFirehose::DeliveryStream 194 | Properties: 195 | DeliveryStreamType: "DirectPut" 196 | ExtendedS3DestinationConfiguration: 197 | BucketARN: !Sub "arn:aws:s3:::${DataS3Bucket}" 198 | BufferingHints: 199 | IntervalInSeconds: 300 200 | SizeInMBs: 5 201 | CompressionFormat: "UNCOMPRESSED" 202 | Prefix: "events/" 203 | RoleARN: !GetAtt PinpointKinesisFirehoseRole.Arn 204 | ProcessingConfiguration: 205 | Enabled: TRUE 206 | Processors: 207 | - 208 | Parameters: 209 | - 210 | ParameterName: "LambdaArn" 211 | ParameterValue: !GetAtt FirehoseRecordConversionLambda.Arn 212 | - 213 | ParameterName: "BufferIntervalInSeconds" 214 | ParameterValue: 60 215 | - 216 | ParameterName: "BufferSizeInMBs" 217 | ParameterValue: 3 218 | - 219 | ParameterName: "NumberOfRetries" 220 | ParameterValue: 3 221 | - 222 | ParameterName: "RoleArn" 223 | ParameterValue: !GetAtt PinpointKinesisFirehoseRole.Arn 224 | Type: "Lambda" 225 | 226 | FirehoseRecordConversionLambda: 227 | Type: AWS::Lambda::Function 228 | Properties: 229 | Handler: index.handler 230 | Role: !GetAtt FirehoseRecordConversionLambdaRole.Arn 231 | Runtime: "nodejs12.x" 232 | Timeout: 60 233 | Code: 234 | S3Bucket: !Join ["-", [!FindInMap ["SourceCode", "General", "S3Bucket"], Ref: "AWS::Region" ]] 235 | S3Key: !Join ["/", [!FindInMap ["SourceCode", "General", "KeyPrefix"], "firehose-record-conversion-lambda.zip"]] 236 | 237 | 238 | 239 | PinpointKinesisStreamRole: 240 | Type: AWS::IAM::Role 241 | Properties: 242 | AssumeRolePolicyDocument: 243 | Version: 2012-10-17 244 | Statement: 245 | - Effect: Allow 246 | Principal: 247 | Service: 248 | - pinpoint.amazonaws.com 249 | Action: 250 | - 'sts:AssumeRole' 251 | Path: "/" 252 | Policies: 253 | - 254 | PolicyName: "root" 255 | PolicyDocument: 256 | Version: "2012-10-17" 257 | Statement: 258 | - 259 | Effect: "Allow" 260 | Action: 261 | - "firehose:PutRecordBatch" 262 | - "firehose:DescribeDeliveryStream" 263 | Resource: !GetAtt PinpointEventFirehose.Arn 264 | 265 | PinpointKinesisFirehoseRole: 266 | Type: AWS::IAM::Role 267 | Properties: 268 | AssumeRolePolicyDocument: 269 | Version: "2012-10-17" 270 | Statement: 271 | - Effect: Allow 272 | Principal: 273 | Service: 274 | - firehose.amazonaws.com 275 | Action: 276 | - 'sts:AssumeRole' 277 | Path: "/" 278 | Policies: 279 | - 280 | PolicyName: "root" 281 | PolicyDocument: 282 | Version: "2012-10-17" 283 | Statement: 284 | - 285 | Effect: "Allow" 286 | Action: 287 | - "s3:AbortMultipartUpload" 288 | - "s3:GetBucketLocation" 289 | - "s3:GetObject" 290 | - "s3:ListBucket" 291 | - "s3:ListBucketMultipartUploads" 292 | - "s3:PutObject" 293 | Resource: 294 | - !Sub "arn:aws:s3:::${DataS3Bucket}*" 295 | - !Sub "arn:aws:s3:::${DataS3Bucket}" 296 | - 297 | Effect: "Allow" 298 | Action: 299 | - "lambda:InvokeFunction" 300 | - "lambda:GetFunctionConfiguration" 301 | Resource: !GetAtt FirehoseRecordConversionLambda.Arn 302 | - 303 | Effect: "Allow" 304 | Action: "logs:PutLogEvents" 305 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:/aws/kinesisfirehose/*:log-stream:*" 306 | 307 | FirehoseRecordConversionLambdaRole: 308 | Type: AWS::IAM::Role 309 | Properties: 310 | AssumeRolePolicyDocument: 311 | Version: 2012-10-17 312 | Statement: 313 | - Effect: Allow 314 | Principal: 315 | Service: 316 | - lambda.amazonaws.com 317 | Action: 318 | - 'sts:AssumeRole' 319 | Path: "/" 320 | Policies: 321 | - 322 | PolicyName: "root" 323 | PolicyDocument: 324 | Version: "2012-10-17" 325 | Statement: 326 | - 327 | Effect: "Allow" 328 | Action: 329 | - "logs:CreateLogGroup" 330 | - "logs:CreateLogStream" 331 | - "logs:PutLogEvents" 332 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*" 333 | 334 | ### Glue 335 | PinpointEventDatabase: 336 | Type: AWS::Glue::Database 337 | Properties: 338 | CatalogId: !Ref AWS::AccountId 339 | DatabaseInput: 340 | Name: !Sub "${AWS::StackName}-pinpoint-events" 341 | Description: "Pinpoint Streaming Event Database" 342 | 343 | ### State Machine & Lambdas 344 | 345 | CleanupLambda: 346 | Type: AWS::Lambda::Function 347 | Properties: 348 | Handler: index.handler 349 | Role: !GetAtt CleanupLambdaRole.Arn 350 | Runtime: "nodejs12.x" 351 | Timeout: 60 352 | Environment: 353 | Variables: 354 | S3_BUCKET: !Ref DataS3Bucket 355 | Code: 356 | S3Bucket: !Join ["-", [!FindInMap ["SourceCode", "General", "S3Bucket"], Ref: "AWS::Region" ]] 357 | S3Key: !Join ["/", [!FindInMap ["SourceCode", "General", "KeyPrefix"], "01-cleanup-lambda.zip"]] 358 | 359 | ExportPinpointEndpointsLambda: 360 | Type: AWS::Lambda::Function 361 | Properties: 362 | Handler: index.handler 363 | Role: !GetAtt ExportPinpointEndpointsLambdaRole.Arn 364 | Runtime: "nodejs12.x" 365 | Timeout: 60 366 | Environment: 367 | Variables: 368 | PINPOINT_APPLICATION_ID: !Ref PinpointProjectId 369 | S3_BUCKET: !Ref DataS3Bucket 370 | ROLE_ARN: !GetAtt PinpointExportRole.Arn 371 | Code: 372 | S3Bucket: !Join ["-", [!FindInMap ["SourceCode", "General", "S3Bucket"], Ref: "AWS::Region" ]] 373 | S3Key: !Join ["/", [!FindInMap ["SourceCode", "General", "KeyPrefix"], "02-export-pinpoint-endpoints-lambda.zip"]] 374 | 375 | 376 | ExportStatusLambda: 377 | Type: AWS::Lambda::Function 378 | Properties: 379 | Handler: index.handler 380 | Role: !GetAtt ExportStatusLambdaRole.Arn 381 | Runtime: "nodejs12.x" 382 | Timeout: 60 383 | Environment: 384 | Variables: 385 | PINPOINT_APPLICATION_ID: !Ref PinpointProjectId 386 | Code: 387 | S3Bucket: !Join ["-", [!FindInMap ["SourceCode", "General", "S3Bucket"], Ref: "AWS::Region" ]] 388 | S3Key: !Join ["/", [!FindInMap ["SourceCode", "General", "KeyPrefix"], "03-export-status-lambda.zip"]] 389 | 390 | QueryAugmentStartLambda: 391 | Type: AWS::Lambda::Function 392 | Properties: 393 | Handler: index.handler 394 | Role: !GetAtt QueryAugmentStartLambdaRole.Arn 395 | Runtime: "nodejs12.x" 396 | Timeout: 60 397 | Environment: 398 | Variables: 399 | S3_BUCKET: !Ref DataS3Bucket 400 | NAMED_QUERY: !Ref RetrieveInteractionsNamedQuery 401 | Code: 402 | S3Bucket: !Join ["-", [!FindInMap ["SourceCode", "General", "S3Bucket"], Ref: "AWS::Region" ]] 403 | S3Key: !Join ["/", [!FindInMap ["SourceCode", "General", "KeyPrefix"], "04-query-augment-start-lambda.zip"]] 404 | 405 | 406 | QueryStatusLambda: 407 | Type: AWS::Lambda::Function 408 | Properties: 409 | Handler: index.handler 410 | Role: !GetAtt QueryStatusLambdaRole.Arn 411 | Runtime: "nodejs12.x" 412 | Timeout: 60 413 | Code: 414 | S3Bucket: !Join ["-", [!FindInMap ["SourceCode", "General", "S3Bucket"], Ref: "AWS::Region" ]] 415 | S3Key: !Join ["/", [!FindInMap ["SourceCode", "General", "KeyPrefix"], "05-query-status-lambda.zip"]] 416 | 417 | DataSetImportLambda: 418 | Type: AWS::Lambda::Function 419 | Properties: 420 | Handler: index.handler 421 | Role: !GetAtt DataSetImportLambdaRole.Arn 422 | Runtime: "nodejs12.x" 423 | Timeout: 60 424 | Environment: 425 | Variables: 426 | IMPORT_ROLE: !GetAtt PersonalizeDatasetImportRole.Arn 427 | DATA_SET_ARN: !Ref PersonalizeInteractionDatasetArn 428 | Code: 429 | S3Bucket: !Join ["-", [!FindInMap ["SourceCode", "General", "S3Bucket"], Ref: "AWS::Region" ]] 430 | S3Key: !Join ["/", [!FindInMap ["SourceCode", "General", "KeyPrefix"], "06-dataset-import-lambda.zip"]] 431 | 432 | DataSetImportStatusLambda: 433 | Type: AWS::Lambda::Function 434 | Properties: 435 | Handler: index.handler 436 | Role: !GetAtt DataSetImportStatusLambdaRole.Arn 437 | Runtime: "nodejs12.x" 438 | Timeout: 60 439 | Code: 440 | S3Bucket: !Join ["-", [!FindInMap ["SourceCode", "General", "S3Bucket"], Ref: "AWS::Region" ]] 441 | S3Key: !Join ["/", [!FindInMap ["SourceCode", "General", "KeyPrefix"], "07-dataset-import-status-lambda.zip"]] 442 | 443 | CreateSolutionVersionLambda: 444 | Type: AWS::Lambda::Function 445 | Properties: 446 | Handler: index.handler 447 | Role: !GetAtt CreateSolutionVersionLambdaRole.Arn 448 | Runtime: "nodejs12.x" 449 | Timeout: 60 450 | Environment: 451 | Variables: 452 | SOLUTION_ARN: !Ref PersonalizeSolutionArn 453 | Code: 454 | S3Bucket: !Join ["-", [!FindInMap ["SourceCode", "General", "S3Bucket"], Ref: "AWS::Region" ]] 455 | S3Key: !Join ["/", [!FindInMap ["SourceCode", "General", "KeyPrefix"], "08-create-solution-version-lambda.zip"]] 456 | 457 | CreateSolutionVersionStatusLambda: 458 | Type: AWS::Lambda::Function 459 | Properties: 460 | Handler: index.handler 461 | Role: !GetAtt CreateSolutionVersionStatusLambdaRole.Arn 462 | Runtime: "nodejs12.x" 463 | Timeout: 60 464 | Code: 465 | S3Bucket: !Join ["-", [!FindInMap ["SourceCode", "General", "S3Bucket"], Ref: "AWS::Region" ]] 466 | S3Key: !Join ["/", [!FindInMap ["SourceCode", "General", "KeyPrefix"], "09-create-solution-version-status-lambda.zip"]] 467 | 468 | UpdateCampaignLambda: 469 | Type: AWS::Lambda::Function 470 | Properties: 471 | Handler: index.handler 472 | Role: !GetAtt UpdateCampaignLambdaRole.Arn 473 | Runtime: "nodejs12.x" 474 | Timeout: 60 475 | Environment: 476 | Variables: 477 | MIN_PROVISIONED_TPS: 1 478 | CAMPAIGN_ARN: !Ref PersonalizeCampaignArn 479 | Code: 480 | S3Bucket: !Join ["-", [!FindInMap ["SourceCode", "General", "S3Bucket"], Ref: "AWS::Region" ]] 481 | S3Key: !Join ["/", [!FindInMap ["SourceCode", "General", "KeyPrefix"], "10-update-campaign-lambda.zip"]] 482 | 483 | UpdateCampaignStatusLambda: 484 | Type: AWS::Lambda::Function 485 | Properties: 486 | Handler: index.handler 487 | Role: !GetAtt UpdateCampaignStatusLambdaRole.Arn 488 | Runtime: "nodejs12.x" 489 | Timeout: 60 490 | Environment: 491 | Variables: 492 | CAMPAIGN_ARN: !Ref PersonalizeCampaignArn 493 | Code: 494 | S3Bucket: !Join ["-", [!FindInMap ["SourceCode", "General", "S3Bucket"], Ref: "AWS::Region" ]] 495 | S3Key: !Join ["/", [!FindInMap ["SourceCode", "General", "KeyPrefix"], "11-update-campaign-status-lambda.zip"]] 496 | 497 | DailyRetrainingStateMachine: 498 | Type: AWS::StepFunctions::StateMachine 499 | DependsOn: CustomResourceHelper 500 | Properties: 501 | RoleArn: !GetAtt DailyRetrainingStateMachineRole.Arn 502 | DefinitionString: 503 | !Sub 504 | - |- 505 | { 506 | "StartAt": "CleanStart", 507 | "States": { 508 | "CleanStart": { 509 | "Type": "Task", 510 | "Resource": "${CleanupArn}", 511 | "Next": "ExportPinpointEndpoints" 512 | }, 513 | "ExportPinpointEndpoints": { 514 | "Type": "Task", 515 | "Resource": "${ExportPinpointEndpointsArn}", 516 | "Next": "ExportWait" 517 | }, 518 | "ExportWait": { 519 | "Type": "Wait", 520 | "Seconds": 30, 521 | "Next": "ExportStatus" 522 | }, 523 | "ExportStatus": { 524 | "Type": "Task", 525 | "Resource": "${ExportStatusArn}", 526 | "Next": "IsExportFinished" 527 | }, 528 | "IsExportFinished": { 529 | "Type": "Choice", 530 | "Default": "ExportWait", 531 | "Choices": [ 532 | { 533 | "Variable": "$.ExportJobStatus", 534 | "StringEquals": "FAILED", 535 | "Next": "ExportFailed" 536 | }, 537 | { 538 | "Variable": "$.ExportJobStatus", 539 | "StringEquals": "COMPLETED", 540 | "Next": "QueryAugmentStart" 541 | } 542 | ] 543 | }, 544 | "ExportFailed": { 545 | "Type": "Fail", 546 | "Cause": "Pinpoint Export failed", 547 | "Error": "Pinpoint Export failed" 548 | }, 549 | "QueryAugmentStart": { 550 | "Type": "Task", 551 | "Resource": "${QueryAugmentStartArn}", 552 | "Next": "QueryWait" 553 | }, 554 | "QueryWait" : { 555 | "Type": "Wait", 556 | "Seconds": 5, 557 | "Next": "QueryStatus" 558 | }, 559 | "QueryStatus": { 560 | "Type": "Task", 561 | "Resource": "${QueryStatusArn}", 562 | "Next": "IsQueryFinished" 563 | }, 564 | "IsQueryFinished":{ 565 | "Type": "Choice", 566 | "Default": "QueryWait", 567 | "Choices": [{ 568 | "Variable": "$.Status", 569 | "StringEquals": "FAILED", 570 | "Next": "QueryFailed" 571 | },{ 572 | "Variable": "$.Status", 573 | "StringEquals": "SUCCEEDED", 574 | "Next": "InteractionDatasetImport" 575 | }] 576 | }, 577 | "QueryFailed": { 578 | "Type": "Fail", 579 | "Cause": "Athena Query failed", 580 | "Error": "Athena Query failed" 581 | }, 582 | "InteractionDatasetImport": { 583 | "Type": "Task", 584 | "Resource": "${DataSetImportLambdaArn}", 585 | "Next": "InteractionImportWait" 586 | }, 587 | "InteractionImportWait": { 588 | "Type": "Wait", 589 | "Seconds": 150, 590 | "Next": "InteractionImportStatus" 591 | }, 592 | "InteractionImportStatus": { 593 | "Type": "Task", 594 | "Resource": "${DataSetImportStatusLambdaArn}", 595 | "Next": "IsInteractionImportFinished" 596 | }, 597 | "IsInteractionImportFinished": { 598 | "Type": "Choice", 599 | "Default": "InteractionImportWait", 600 | "Choices": [ 601 | { 602 | "Variable": "$.DatasetImportJobStatus", 603 | "StringEquals": "CREATE FAILED", 604 | "Next": "InteractionImportFailed" 605 | }, 606 | { 607 | "Variable": "$.DatasetImportJobStatus", 608 | "StringEquals": "ACTIVE", 609 | "Next": "CreateSolutionVersion" 610 | } 611 | ] 612 | }, 613 | "InteractionImportFailed": { 614 | "Type": "Fail", 615 | "Cause": "Interaction Dataset Import failed", 616 | "Error": "Interaction Dataset Import failed" 617 | }, 618 | 619 | "CreateSolutionVersion": { 620 | "Type": "Task", 621 | "Resource": "${CreateSolutionVersionLambdaArn}", 622 | "Next": "SolutionVersionWait" 623 | }, 624 | "SolutionVersionWait": { 625 | "Type": "Wait", 626 | "Seconds": 200, 627 | "Next": "SolutionVersionStatus" 628 | }, 629 | "SolutionVersionStatus": { 630 | "Type": "Task", 631 | "Resource": "${CreateSolutionVersionStatusLambdaArn}", 632 | "Next": "IsSolutionVersionFinished" 633 | }, 634 | "IsSolutionVersionFinished": { 635 | "Type": "Choice", 636 | "Default": "SolutionVersionWait", 637 | "Choices": [ 638 | { 639 | "Variable": "$.SolutionVersionStatus", 640 | "StringEquals": "CREATE FAILED", 641 | "Next": "SolutionVersionFailed" 642 | }, 643 | { 644 | "Variable": "$.SolutionVersionStatus", 645 | "StringEquals": "ACTIVE", 646 | "Next": "SolutionCampaign" 647 | } 648 | ] 649 | }, 650 | "SolutionVersionFailed": { 651 | "Type": "Fail", 652 | "Cause": "Solution Version failed", 653 | "Error": "Solution Version failed" 654 | }, 655 | 656 | 657 | "SolutionCampaign": { 658 | "Type": "Task", 659 | "Resource": "${UpdateCampaignLambdaArn}", 660 | "Next": "CampaignWait" 661 | }, 662 | "CampaignWait": { 663 | "Type": "Wait", 664 | "Seconds": 150, 665 | "Next": "CampaignStatus" 666 | }, 667 | "CampaignStatus": { 668 | "Type": "Task", 669 | "Resource": "${UpdateCampaignStatusLambdaArn}", 670 | "Next": "IsCampaignFinished" 671 | }, 672 | "IsCampaignFinished": { 673 | "Type": "Choice", 674 | "Default": "CampaignWait", 675 | "Choices": [ 676 | { 677 | "Variable": "$.CampaignStatus", 678 | "StringEquals": "CREATE FAILED", 679 | "Next": "CampaignFailed" 680 | }, 681 | { 682 | "Variable": "$.CampaignStatus", 683 | "StringEquals": "ACTIVE", 684 | "Next": "CampaignEnd" 685 | } 686 | ] 687 | }, 688 | "CampaignFailed": { 689 | "Type": "Fail", 690 | "Cause": "Campaign failed", 691 | "Error": "Campaign failed" 692 | }, 693 | "CampaignEnd": { 694 | "Type": "Pass", 695 | "End": true 696 | } 697 | } 698 | } 699 | - {CleanupArn: !GetAtt CleanupLambda.Arn, ExportPinpointEndpointsArn: !GetAtt ExportPinpointEndpointsLambda.Arn, ExportStatusArn: !GetAtt ExportStatusLambda.Arn, QueryAugmentStartArn: !GetAtt QueryAugmentStartLambda.Arn, QueryStatusArn: !GetAtt QueryStatusLambda.Arn, DataSetImportLambdaArn: !GetAtt DataSetImportLambda.Arn, DataSetImportStatusLambdaArn: !GetAtt DataSetImportStatusLambda.Arn, CreateSolutionVersionLambdaArn: !GetAtt CreateSolutionVersionLambda.Arn, CreateSolutionVersionStatusLambdaArn: !GetAtt CreateSolutionVersionStatusLambda.Arn, UpdateCampaignLambdaArn: !GetAtt UpdateCampaignLambda.Arn, UpdateCampaignStatusLambdaArn: !GetAtt UpdateCampaignStatusLambda.Arn} 700 | 701 | DailyRetrainintCloudWatchEvent: 702 | Type: AWS::Events::Rule 703 | Properties: 704 | Description: "Run the DailyRetrainingStateMachine Daily" 705 | ScheduleExpression: "cron(0 2 * * ? *)" 706 | State: "ENABLED" 707 | RoleArn: !GetAtt DailyRetrainintCloudWatchEventRole.Arn 708 | Targets: 709 | - 710 | Arn: !Ref DailyRetrainingStateMachine 711 | Id: "DailyRetrainingStateMachine" 712 | RoleArn: !GetAtt DailyRetrainintCloudWatchEventRole.Arn 713 | 714 | DailyRetrainintCloudWatchEventRole: 715 | Type: AWS::IAM::Role 716 | Properties: 717 | AssumeRolePolicyDocument: 718 | Version: 2012-10-17 719 | Statement: 720 | - Effect: Allow 721 | Principal: 722 | Service: 723 | - "events.amazonaws.com" 724 | Action: 725 | - 'sts:AssumeRole' 726 | Path: "/" 727 | Policies: 728 | - 729 | PolicyName: "root" 730 | PolicyDocument: 731 | Version: "2012-10-17" 732 | Statement: 733 | - 734 | Effect: "Allow" 735 | Action: "states:StartExecution" 736 | Resource: !Ref DailyRetrainingStateMachine 737 | 738 | DailyRetrainingStateMachineRole: 739 | Type: AWS::IAM::Role 740 | Metadata: 741 | cfn_nag: 742 | rules_to_suppress: 743 | - id: W76 744 | reason: Suppressing SPCM for IAM policy document 745 | Properties: 746 | AssumeRolePolicyDocument: 747 | Version: "2012-10-17" 748 | Statement: 749 | - 750 | Effect: "Allow" 751 | Principal: 752 | Service: 753 | - "states.amazonaws.com" 754 | Action: 755 | - "sts:AssumeRole" 756 | Path: "/" 757 | ManagedPolicyArns: 758 | - "arn:aws:iam::aws:policy/service-role/AWSGlueServiceRole" 759 | Policies: 760 | - 761 | PolicyName: "root" 762 | PolicyDocument: 763 | Version: "2012-10-17" 764 | Statement: 765 | - 766 | Effect: "Allow" 767 | Action: "lambda:InvokeFunction" 768 | Resource: 769 | - !GetAtt CleanupLambda.Arn 770 | - !GetAtt ExportPinpointEndpointsLambda.Arn 771 | - !GetAtt ExportStatusLambda.Arn 772 | - !GetAtt QueryAugmentStartLambda.Arn 773 | - !GetAtt QueryStatusLambda.Arn 774 | - !GetAtt DataSetImportLambda.Arn 775 | - !GetAtt DataSetImportStatusLambda.Arn 776 | - !GetAtt CreateSolutionVersionLambda.Arn 777 | - !GetAtt CreateSolutionVersionStatusLambda.Arn 778 | - !GetAtt UpdateCampaignLambda.Arn 779 | - !GetAtt UpdateCampaignStatusLambda.Arn 780 | 781 | CleanupLambdaRole: 782 | Type: AWS::IAM::Role 783 | Properties: 784 | AssumeRolePolicyDocument: 785 | Version: 2012-10-17 786 | Statement: 787 | - Effect: Allow 788 | Principal: 789 | Service: 790 | - lambda.amazonaws.com 791 | Action: 792 | - 'sts:AssumeRole' 793 | Path: "/" 794 | Policies: 795 | - 796 | PolicyName: "root" 797 | PolicyDocument: 798 | Version: "2012-10-17" 799 | Statement: 800 | - 801 | Effect: "Allow" 802 | Action: 803 | - "logs:CreateLogGroup" 804 | - "logs:CreateLogStream" 805 | - "logs:PutLogEvents" 806 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*" 807 | - 808 | Effect: "Allow" 809 | Action: 810 | - "s3:ListBucket" 811 | - "s3:DeleteObject" 812 | Resource: 813 | - !Sub "arn:aws:s3:::${DataS3Bucket}/*" 814 | - !Sub "arn:aws:s3:::${DataS3Bucket}" 815 | 816 | ExportPinpointEndpointsLambdaRole: 817 | Type: AWS::IAM::Role 818 | Properties: 819 | AssumeRolePolicyDocument: 820 | Version: 2012-10-17 821 | Statement: 822 | - Effect: Allow 823 | Principal: 824 | Service: 825 | - lambda.amazonaws.com 826 | Action: 827 | - 'sts:AssumeRole' 828 | Path: "/" 829 | Policies: 830 | - 831 | PolicyName: "root" 832 | PolicyDocument: 833 | Version: "2012-10-17" 834 | Statement: 835 | - 836 | Effect: "Allow" 837 | Action: 838 | - "logs:CreateLogGroup" 839 | - "logs:CreateLogStream" 840 | - "logs:PutLogEvents" 841 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*" 842 | - 843 | Effect: "Allow" 844 | Action: "iam:PassRole" 845 | Resource: 846 | - !GetAtt PinpointExportRole.Arn 847 | - 848 | Effect: "Allow" 849 | Action: 850 | - "mobiletargeting:CreateExportJob" 851 | Resource: 852 | - !Sub "arn:aws:mobiletargeting:${AWS::Region}:${AWS::AccountId}:apps/${PinpointProjectId}/jobs/export" 853 | - !Sub "arn:aws:mobiletargeting:${AWS::Region}:${AWS::AccountId}:apps/${PinpointProjectId}" 854 | 855 | PinpointExportRole: 856 | Type: AWS::IAM::Role 857 | Properties: 858 | AssumeRolePolicyDocument: 859 | Version: 2012-10-17 860 | Statement: 861 | - Effect: Allow 862 | Principal: 863 | Service: 864 | - pinpoint.amazonaws.com 865 | Action: 866 | - 'sts:AssumeRole' 867 | Path: "/" 868 | Policies: 869 | - 870 | PolicyName: "root" 871 | PolicyDocument: 872 | Version: "2012-10-17" 873 | Statement: 874 | - 875 | Effect: "Allow" 876 | Action: 877 | - "s3:PutObject" 878 | - "s3:GetObjectAcl" 879 | - "s3:GetObject" 880 | - "s3:DeleteObjectVersion" 881 | - "s3:GetObjectTagging" 882 | - "s3:DeleteObject" 883 | - "s3:GetObjectVersion" 884 | Resource: 885 | - !Sub "arn:aws:s3:::${DataS3Bucket}*" 886 | - !Sub "arn:aws:s3:::${DataS3Bucket}" 887 | - !Sub "arn:aws:s3:::${DataS3Bucket}/" 888 | - !Sub "arn:aws:s3:::${DataS3Bucket}/*" 889 | - 890 | Effect: "Allow" 891 | Action: 892 | - "s3:ListAllMyBuckets" 893 | - "s3:GetBucketLocation" 894 | Resource: 895 | - !Sub "arn:aws:s3:::*" 896 | 897 | ExportStatusLambdaRole: 898 | Type: AWS::IAM::Role 899 | Properties: 900 | AssumeRolePolicyDocument: 901 | Version: 2012-10-17 902 | Statement: 903 | - Effect: Allow 904 | Principal: 905 | Service: 906 | - lambda.amazonaws.com 907 | Action: 908 | - 'sts:AssumeRole' 909 | Path: "/" 910 | Policies: 911 | - 912 | PolicyName: "root" 913 | PolicyDocument: 914 | Version: "2012-10-17" 915 | Statement: 916 | - 917 | Effect: "Allow" 918 | Action: 919 | - "logs:CreateLogGroup" 920 | - "logs:CreateLogStream" 921 | - "logs:PutLogEvents" 922 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*" 923 | - 924 | Effect: "Allow" 925 | Action: 926 | - "mobiletargeting:GetExportJob" 927 | Resource: 928 | - !Sub "arn:aws:mobiletargeting:${AWS::Region}:${AWS::AccountId}:apps/${PinpointProjectId}/jobs/export/*" 929 | 930 | QueryAugmentStartLambdaRole: 931 | Type: AWS::IAM::Role 932 | Properties: 933 | AssumeRolePolicyDocument: 934 | Version: 2012-10-17 935 | Statement: 936 | - Effect: Allow 937 | Principal: 938 | Service: 939 | - lambda.amazonaws.com 940 | Action: 941 | - 'sts:AssumeRole' 942 | Path: "/" 943 | Policies: 944 | - 945 | PolicyName: "root" 946 | PolicyDocument: 947 | Version: "2012-10-17" 948 | Statement: 949 | - 950 | Effect: "Allow" 951 | Action: 952 | - "logs:CreateLogGroup" 953 | - "logs:CreateLogStream" 954 | - "logs:PutLogEvents" 955 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*" 956 | - 957 | Effect: "Allow" 958 | Action: 959 | - "s3:GetBucketLocation" 960 | - "s3:GetObject" 961 | - "s3:ListBucket" 962 | - "s3:ListBucketMultipartUploads" 963 | - "s3:ListMultipartUploadParts" 964 | - "s3:AbortMultipartUpload" 965 | - "s3:CreateBucket" 966 | - "s3:PutObject" 967 | Resource: 968 | - !Sub "arn:aws:s3:::${DataS3Bucket}/*" 969 | - !Sub "arn:aws:s3:::${DataS3Bucket}" 970 | - 971 | Effect: "Allow" 972 | Action: 973 | - "athena:StartQueryExecution" 974 | - "athena:GetNamedQuery" 975 | Resource: !Sub "arn:aws:athena:${AWS::Region}:${AWS::AccountId}:workgroup/*" 976 | - 977 | Effect: "Allow" 978 | Action: 979 | - "glue:GetDatabase" 980 | - "glue:GetDatabases" 981 | - "glue:GetTable" 982 | - "glue:GetTables" 983 | - "glue:GetPartition" 984 | - "glue:GetPartitions" 985 | Resource: 986 | - !Sub "arn:aws:glue:${AWS::Region}:${AWS::AccountId}:table/${PinpointEventDatabase}/*" 987 | - !Sub "arn:aws:glue:${AWS::Region}:${AWS::AccountId}:database/${PinpointEventDatabase}" 988 | - !Sub "arn:aws:glue:${AWS::Region}:${AWS::AccountId}:catalog" 989 | 990 | QueryStatusLambdaRole: 991 | Type: AWS::IAM::Role 992 | Properties: 993 | AssumeRolePolicyDocument: 994 | Version: 2012-10-17 995 | Statement: 996 | - Effect: Allow 997 | Principal: 998 | Service: 999 | - lambda.amazonaws.com 1000 | Action: 1001 | - 'sts:AssumeRole' 1002 | Path: "/" 1003 | Policies: 1004 | - 1005 | PolicyName: "root" 1006 | PolicyDocument: 1007 | Version: "2012-10-17" 1008 | Statement: 1009 | - 1010 | Effect: "Allow" 1011 | Action: 1012 | - "logs:CreateLogGroup" 1013 | - "logs:CreateLogStream" 1014 | - "logs:PutLogEvents" 1015 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*" 1016 | - 1017 | Effect: "Allow" 1018 | Action: 1019 | - "athena:GetQueryExecution" 1020 | Resource: !Sub "arn:aws:athena:${AWS::Region}:${AWS::AccountId}:workgroup/*" 1021 | 1022 | DataSetImportLambdaRole: 1023 | Type: AWS::IAM::Role 1024 | Properties: 1025 | AssumeRolePolicyDocument: 1026 | Version: 2012-10-17 1027 | Statement: 1028 | - Effect: Allow 1029 | Principal: 1030 | Service: 1031 | - lambda.amazonaws.com 1032 | Action: 1033 | - 'sts:AssumeRole' 1034 | Path: "/" 1035 | Policies: 1036 | - 1037 | PolicyName: "root" 1038 | PolicyDocument: 1039 | Version: "2012-10-17" 1040 | Statement: 1041 | - 1042 | Effect: "Allow" 1043 | Action: 1044 | - "logs:CreateLogGroup" 1045 | - "logs:CreateLogStream" 1046 | - "logs:PutLogEvents" 1047 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*" 1048 | - 1049 | Effect: "Allow" 1050 | Action: 1051 | - "personalize:CreateDatasetImportJob" 1052 | Resource: 1053 | - !Sub "arn:aws:personalize:${AWS::Region}:${AWS::AccountId}:dataset/*" 1054 | - !Sub "arn:aws:personalize:${AWS::Region}:${AWS::AccountId}:dataset-group/*" 1055 | - !Sub "arn:aws:personalize:${AWS::Region}:${AWS::AccountId}:dataset-import-job/*" 1056 | - 1057 | Effect: "Allow" 1058 | Action: "iam:PassRole" 1059 | Resource: 1060 | - !GetAtt PersonalizeDatasetImportRole.Arn 1061 | 1062 | DataSetImportStatusLambdaRole: 1063 | Type: AWS::IAM::Role 1064 | Properties: 1065 | AssumeRolePolicyDocument: 1066 | Version: 2012-10-17 1067 | Statement: 1068 | - Effect: Allow 1069 | Principal: 1070 | Service: 1071 | - lambda.amazonaws.com 1072 | Action: 1073 | - 'sts:AssumeRole' 1074 | Path: "/" 1075 | Policies: 1076 | - 1077 | PolicyName: "root" 1078 | PolicyDocument: 1079 | Version: "2012-10-17" 1080 | Statement: 1081 | - 1082 | Effect: "Allow" 1083 | Action: 1084 | - "logs:CreateLogGroup" 1085 | - "logs:CreateLogStream" 1086 | - "logs:PutLogEvents" 1087 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*" 1088 | - 1089 | Effect: "Allow" 1090 | Action: 1091 | - "personalize:DescribeDatasetImportJob" 1092 | Resource: !Sub "arn:aws:personalize:${AWS::Region}:${AWS::AccountId}:dataset-import-job/*" 1093 | 1094 | CreateSolutionVersionLambdaRole: 1095 | Type: AWS::IAM::Role 1096 | Properties: 1097 | AssumeRolePolicyDocument: 1098 | Version: 2012-10-17 1099 | Statement: 1100 | - Effect: Allow 1101 | Principal: 1102 | Service: 1103 | - lambda.amazonaws.com 1104 | Action: 1105 | - 'sts:AssumeRole' 1106 | Path: "/" 1107 | Policies: 1108 | - 1109 | PolicyName: "root" 1110 | PolicyDocument: 1111 | Version: "2012-10-17" 1112 | Statement: 1113 | - 1114 | Effect: "Allow" 1115 | Action: 1116 | - "logs:CreateLogGroup" 1117 | - "logs:CreateLogStream" 1118 | - "logs:PutLogEvents" 1119 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*" 1120 | - 1121 | Effect: "Allow" 1122 | Action: 1123 | - "personalize:CreateSolutionVersion" 1124 | Resource: !Sub "arn:aws:personalize:${AWS::Region}:${AWS::AccountId}:solution/*" 1125 | 1126 | CreateSolutionVersionStatusLambdaRole: 1127 | Type: AWS::IAM::Role 1128 | Properties: 1129 | AssumeRolePolicyDocument: 1130 | Version: 2012-10-17 1131 | Statement: 1132 | - Effect: Allow 1133 | Principal: 1134 | Service: 1135 | - lambda.amazonaws.com 1136 | Action: 1137 | - 'sts:AssumeRole' 1138 | Path: "/" 1139 | Policies: 1140 | - 1141 | PolicyName: "root" 1142 | PolicyDocument: 1143 | Version: "2012-10-17" 1144 | Statement: 1145 | - 1146 | Effect: "Allow" 1147 | Action: 1148 | - "logs:CreateLogGroup" 1149 | - "logs:CreateLogStream" 1150 | - "logs:PutLogEvents" 1151 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*" 1152 | - 1153 | Effect: "Allow" 1154 | Action: 1155 | - "personalize:DescribeSolutionVersion" 1156 | Resource: 1157 | - !Sub "arn:aws:personalize:${AWS::Region}:${AWS::AccountId}:solution-version/*" 1158 | - !Sub "arn:aws:personalize:${AWS::Region}:${AWS::AccountId}:solution/*" 1159 | 1160 | UpdateCampaignLambdaRole: 1161 | Type: AWS::IAM::Role 1162 | Properties: 1163 | AssumeRolePolicyDocument: 1164 | Version: 2012-10-17 1165 | Statement: 1166 | - Effect: Allow 1167 | Principal: 1168 | Service: 1169 | - lambda.amazonaws.com 1170 | Action: 1171 | - 'sts:AssumeRole' 1172 | Path: "/" 1173 | Policies: 1174 | - 1175 | PolicyName: "root" 1176 | PolicyDocument: 1177 | Version: "2012-10-17" 1178 | Statement: 1179 | - 1180 | Effect: "Allow" 1181 | Action: 1182 | - "logs:CreateLogGroup" 1183 | - "logs:CreateLogStream" 1184 | - "logs:PutLogEvents" 1185 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*" 1186 | - 1187 | Effect: "Allow" 1188 | Action: 1189 | - "personalize:UpdateCampaign" 1190 | Resource: 1191 | - !Sub "arn:aws:personalize:${AWS::Region}:${AWS::AccountId}:solution-version/*" 1192 | - !Sub "arn:aws:personalize:${AWS::Region}:${AWS::AccountId}:solution/*" 1193 | - !Sub "arn:aws:personalize:${AWS::Region}:${AWS::AccountId}:campaign/*" 1194 | 1195 | UpdateCampaignStatusLambdaRole: 1196 | Type: AWS::IAM::Role 1197 | Properties: 1198 | AssumeRolePolicyDocument: 1199 | Version: 2012-10-17 1200 | Statement: 1201 | - Effect: Allow 1202 | Principal: 1203 | Service: 1204 | - lambda.amazonaws.com 1205 | Action: 1206 | - 'sts:AssumeRole' 1207 | Path: "/" 1208 | Policies: 1209 | - 1210 | PolicyName: "root" 1211 | PolicyDocument: 1212 | Version: "2012-10-17" 1213 | Statement: 1214 | - 1215 | Effect: "Allow" 1216 | Action: 1217 | - "logs:CreateLogGroup" 1218 | - "logs:CreateLogStream" 1219 | - "logs:PutLogEvents" 1220 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*" 1221 | - 1222 | Effect: "Allow" 1223 | Action: 1224 | - "personalize:DescribeCampaign" 1225 | Resource: 1226 | - !Sub "arn:aws:personalize:${AWS::Region}:${AWS::AccountId}:solution-version/*" 1227 | - !Sub "arn:aws:personalize:${AWS::Region}:${AWS::AccountId}:solution/*" 1228 | - !Sub "arn:aws:personalize:${AWS::Region}:${AWS::AccountId}:campaign/*" 1229 | 1230 | 1231 | PersonalizeDatasetImportRole: 1232 | Type: AWS::IAM::Role 1233 | Properties: 1234 | AssumeRolePolicyDocument: 1235 | Version: 2012-10-17 1236 | Statement: 1237 | - Effect: Allow 1238 | Principal: 1239 | Service: 1240 | - personalize.amazonaws.com 1241 | Action: 1242 | - 'sts:AssumeRole' 1243 | Path: "/" 1244 | Policies: 1245 | - 1246 | PolicyName: "root" 1247 | PolicyDocument: 1248 | Version: "2012-10-17" 1249 | Statement: 1250 | - 1251 | Effect: "Allow" 1252 | Action: 1253 | - s3:GetObject 1254 | - s3:ListBucket 1255 | Resource: 1256 | - !Sub "arn:aws:s3:::${DataS3Bucket}" 1257 | - !Sub "arn:aws:s3:::${DataS3Bucket}/*" 1258 | 1259 | 1260 | ### Athena Named Queries 1261 | 1262 | RetrieveInteractionsNamedQuery: 1263 | Type: AWS::Athena::NamedQuery 1264 | DeletionPolicy: Delete 1265 | Properties: 1266 | Database: !Ref PinpointEventDatabase 1267 | Description: "Query to select events from recommendations for Amazon Personalize." 1268 | QueryString: 1269 | !Sub 1270 | | 1271 | WITH evs AS ( 1272 | SELECT 1273 | client.client_id as endpoint_id, 1274 | attributes.campaign_id as campaign_id, 1275 | event_type, 1276 | arrival_timestamp 1277 | FROM event 1278 | WHERE 1279 | ( 1280 | ${InteractionsQueryDateScope} > 0 1281 | AND arrival_timestamp >= date_add('day', -1, CURRENT_DATE) 1282 | AND arrival_timestamp < CURRENT_DATE 1283 | ) OR ( 1284 | ${InteractionsQueryDateScope} = -1 1285 | ) 1286 | AND 1287 | event_type != '_custom.recommender' 1288 | ), 1289 | recs AS ( 1290 | SELECT 1291 | attributes.personalize_user_id as personalize_user_id, 1292 | client.client_id as endpoint_id, 1293 | attributes.campaign_id as campaign_id, 1294 | attributes.item_id as item_id, 1295 | event_type, 1296 | arrival_timestamp 1297 | FROM event 1298 | WHERE 1299 | ( 1300 | ${InteractionsQueryDateScope} > 0 1301 | AND arrival_timestamp >= date_add('day', -1, CURRENT_DATE) 1302 | AND arrival_timestamp < CURRENT_DATE 1303 | ) OR ( 1304 | ${InteractionsQueryDateScope} = -1 1305 | ) 1306 | AND 1307 | event_type = '_custom.recommender' 1308 | ) 1309 | SELECT 1310 | r.personalize_user_id as USER_ID, 1311 | r.item_id AS ITEM_ID, 1312 | b.event_type AS EVENT_TYPE, 1313 | v.EVENT_VALUE, 1314 | CAST(to_unixtime(b.arrival_timestamp) AS BIGINT) AS TIMESTAMP 1315 | FROM endpoint_export a 1316 | INNER JOIN recs r 1317 | ON a.id = r.endpoint_id 1318 | INNER JOIN evs b 1319 | ON a.id = b.endpoint_id AND r.campaign_id = b.campaign_id 1320 | INNER JOIN event_value v 1321 | ON b.event_type = v.event_type 1322 | 1323 | 1324 | 1325 | AthenaCreateTableEventsNamedQuery: 1326 | Type: AWS::Athena::NamedQuery 1327 | DeletionPolicy: Delete 1328 | Properties: 1329 | Database: !Ref PinpointEventDatabase 1330 | Description: "Create the events table from S3." 1331 | QueryString: 1332 | !Sub 1333 | | 1334 | CREATE EXTERNAL TABLE IF NOT EXISTS `${PinpointEventDatabase}`.event ( 1335 | client struct, 1336 | attributes struct, 1337 | event_type string, 1338 | arrival_timestamp timestamp 1339 | ) 1340 | ROW FORMAT SERDE 'org.openx.data.jsonserde.JsonSerDe' 1341 | WITH SERDEPROPERTIES ( 1342 | 'serialization.format' = '1' 1343 | ) LOCATION 's3://${DataS3Bucket}/events/' 1344 | TBLPROPERTIES ('has_encrypted_data'='false'); 1345 | 1346 | AthenaCreateTableEndpointExportsNamedQuery: 1347 | Type: AWS::Athena::NamedQuery 1348 | DeletionPolicy: Delete 1349 | Properties: 1350 | Database: !Ref PinpointEventDatabase 1351 | Description: "Create the Endpoint Exports table from S3." 1352 | QueryString: 1353 | !Sub 1354 | | 1355 | CREATE EXTERNAL TABLE IF NOT EXISTS `${PinpointEventDatabase}`.endpoint_export ( 1356 | id string, 1357 | channeltype string, 1358 | address string, 1359 | endpointstatus string, 1360 | optout string, 1361 | effectivedate string 1362 | ) 1363 | ROW FORMAT SERDE 'org.openx.data.jsonserde.JsonSerDe' 1364 | WITH SERDEPROPERTIES ( 1365 | 'serialization.format' = '1' 1366 | ) LOCATION 's3://${DataS3Bucket}/endpoint_exports/' 1367 | TBLPROPERTIES ('has_encrypted_data'='false'); 1368 | 1369 | AthenaCreateTableEventValueNamedQuery: 1370 | Type: AWS::Athena::NamedQuery 1371 | DeletionPolicy: Delete 1372 | Properties: 1373 | Database: !Ref PinpointEventDatabase 1374 | Description: "Create the Event Values table from S3." 1375 | QueryString: 1376 | !Sub 1377 | | 1378 | CREATE EXTERNAL TABLE IF NOT EXISTS `${PinpointEventDatabase}`.event_value ( 1379 | event_type string, 1380 | event_value double 1381 | ) 1382 | ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 1383 | WITH SERDEPROPERTIES ( 1384 | 'serialization.format' = ',', 1385 | 'field.delim' = ',' 1386 | ) LOCATION 's3://${DataS3Bucket}/event_values/' 1387 | TBLPROPERTIES ('has_encrypted_data'='false', 'skip.header.line.count'='1'); 1388 | 1389 | 1390 | 1391 | 1392 | 1393 | ### Deployment Helper 1394 | SetupQueries: 1395 | Type: Custom::LoadLambda 1396 | Properties: 1397 | ServiceToken: !GetAtt CustomResourceHelper.Arn 1398 | CustomResourceAction: SetupQueries 1399 | 1400 | CustomResourceHelper: 1401 | Type: AWS::Lambda::Function 1402 | DependsOn: 1403 | - PinpointEventDatabase 1404 | - AthenaCreateTableEventsNamedQuery 1405 | - AthenaCreateTableEndpointExportsNamedQuery 1406 | - AthenaCreateTableEventValueNamedQuery 1407 | Properties: 1408 | Environment: 1409 | Variables: 1410 | SOLUTION_BUCKET: !Join 1411 | - "-" 1412 | - - !FindInMap ["SourceCode", "General", "S3Bucket"] 1413 | - !Ref AWS::Region 1414 | SOLUTION_S3KEYPREFIX: !FindInMap ["SourceCode", "General", "KeyPrefix"] 1415 | S3_DATA_BUCKET: !Ref DataS3Bucket 1416 | EVENT_TYPES: !Join [ ",", !Ref EventTypes] 1417 | EVENT_VALUES: !Join [ ",", !Ref EventValues] 1418 | EVENT_NAMED_QUERY: !Ref AthenaCreateTableEventsNamedQuery 1419 | EXPORT_NAMED_QUERY: !Ref AthenaCreateTableEndpointExportsNamedQuery 1420 | VALUE_NAMED_QUERY: !Ref AthenaCreateTableEventValueNamedQuery 1421 | Description: Helps set up the Optimize Amazon Personalize Campaigns using Amazon Pinpoint Events solution. 1422 | Handler: index.handler 1423 | MemorySize: 256 1424 | Role: !GetAtt CustomResourceHelperRole.Arn 1425 | Runtime: nodejs12.x 1426 | Timeout: 300 1427 | Code: 1428 | S3Bucket: !Join ["-", [!FindInMap ["SourceCode", "General", "S3Bucket"], Ref: "AWS::Region" ]] 1429 | S3Key: !Join ["/", [!FindInMap ["SourceCode", "General", "KeyPrefix"], "custom-resource-helper.zip"]] 1430 | 1431 | 1432 | CustomResourceHelperRole: 1433 | Type: AWS::IAM::Role 1434 | Properties: 1435 | AssumeRolePolicyDocument: 1436 | Version: '2012-10-17' 1437 | Statement: 1438 | - Effect: Allow 1439 | Principal: 1440 | Service: 1441 | - lambda.amazonaws.com 1442 | Action: 1443 | - sts:AssumeRole 1444 | Path: / 1445 | Policies: 1446 | - PolicyName: root 1447 | PolicyDocument: 1448 | Version: '2012-10-17' 1449 | Statement: 1450 | - 1451 | Effect: Allow 1452 | Action: 1453 | - "s3:GetObject" 1454 | Resource: 1455 | - !Join 1456 | - "" 1457 | - - "arn:aws:s3:::" 1458 | - !FindInMap ["SourceCode", "General", "S3Bucket"] 1459 | - "-" 1460 | - !Ref AWS::Region 1461 | - !Join 1462 | - "" 1463 | - - "arn:aws:s3:::" 1464 | - !FindInMap ["SourceCode", "General", "S3Bucket"] 1465 | - "-" 1466 | - !Ref AWS::Region 1467 | - "/*" 1468 | - 1469 | Effect: Allow 1470 | Action: 1471 | - "s3:GetBucketLocation" 1472 | - "s3:GetObject" 1473 | - "s3:ListBucket" 1474 | - "s3:ListBucketMultipartUploads" 1475 | - "s3:ListMultipartUploadParts" 1476 | - "s3:AbortMultipartUpload" 1477 | - "s3:CreateBucket" 1478 | - "s3:PutObject" 1479 | Resource: 1480 | - !Sub "arn:aws:s3:::${DataS3Bucket}" 1481 | - !Sub "arn:aws:s3:::${DataS3Bucket}/*" 1482 | - 1483 | Effect: Allow 1484 | Action: 1485 | - logs:CreateLogGroup 1486 | - logs:CreateLogStream 1487 | - logs:PutDestination 1488 | - logs:PutLogEvents 1489 | Resource: 1490 | - !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*" 1491 | - 1492 | Effect: "Allow" 1493 | Action: 1494 | - "athena:StartQueryExecution" 1495 | - "athena:GetNamedQuery" 1496 | Resource: !Sub "arn:aws:athena:${AWS::Region}:${AWS::AccountId}:workgroup/*" 1497 | - 1498 | Effect: "Allow" 1499 | Action: 1500 | - "glue:GetDatabase" 1501 | - "glue:GetDatabases" 1502 | - "glue:GetTable" 1503 | - "glue:GetTables" 1504 | - "glue:GetPartition" 1505 | - "glue:GetPartitions" 1506 | - "glue:CreateTable" 1507 | Resource: 1508 | - !Sub "arn:aws:glue:${AWS::Region}:${AWS::AccountId}:table/${PinpointEventDatabase}/*" 1509 | - !Sub "arn:aws:glue:${AWS::Region}:${AWS::AccountId}:database/${PinpointEventDatabase}" 1510 | - !Sub "arn:aws:glue:${AWS::Region}:${AWS::AccountId}:catalog" 1511 | 1512 | Outputs: 1513 | DataS3BucketName: 1514 | Description: S3 Bucket Name where the scoring model, events, and export data are stored 1515 | Value: !Ref DataS3Bucket 1516 | PathToScoringModel: 1517 | Description: Path to the Scoring Model CSV file 1518 | Value: !Sub "s3://${DataS3Bucket}/event_values/values.csv" 1519 | --------------------------------------------------------------------------------