├── .gitignore
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Config
├── LICENSE
├── LICENSE.txt
├── NOTICE.txt
├── README.md
├── Using AI & ML in Customer Engagement
├── AWS Lambda
│ ├── LexIntegration.zip
│ └── RE_Order_Validation.zip
├── CloudFormation
└── Pinpoint_Import_Segment.csv
├── cloudformation
├── Advanced_Segmentation_S3.yaml
├── Amazon_S3_triggered_import.yaml
├── AppFlow_Salesforce.yaml
├── Combining_import_validate_campaign.yaml
├── Connect_as_a_Channel.yaml
├── Create_campaign.yaml
├── Event_Based_Segmentation.yaml
├── Event_dashboard.yaml
├── External_Attributes.yaml
├── External_Templates.yaml
├── Federated_Segmentation.yaml
├── Phone_number_validate.yaml
├── Pinpoint_Campaing_Journey_Segment_DB
│ ├── Assets
│ │ ├── Architecture-Diagram.PNG
│ │ ├── DynamoDB-Campaign-Journey.PNG
│ │ └── DynamoDB-Segments.PNG
│ ├── CF-PinpointCampaignJourneySegmentDB.yaml
│ └── README.md
├── Pinpoint_Custom_Channel_Attachment
│ ├── README.md
│ ├── assets
│ │ ├── architecture-nn.PNG
│ │ ├── attachment-scenarios.PNG
│ │ ├── custom-channel-cost-usd-nn.PNG
│ │ └── pinpoint-journey.png
│ ├── email_templates
│ │ ├── EmailAttachedFile.html
│ │ ├── EmailNoAttachment.html
│ │ └── EmailS3URL.html
│ ├── functions
│ │ └── custom-channel-attachments
│ │ │ └── index.py
│ └── template.yaml
├── Pinpoint_Event_Processing.yaml
├── Pinpoint_Journey_Copy_Mechanism
│ ├── PinpointJourneyCopingMechanismCF.yaml
│ ├── PinpointJourneyCopyProcess.png
│ └── README.md
├── Pinpoint_SMS_Event_DB
│ ├── README.md
│ ├── SMS-event-db-architecture.PNG
│ └── SMS-events-database.yaml
├── S3_triggered_import.yaml
├── SES_Auto_Reply.yaml
├── SES_Event_DB
│ ├── README.md
│ ├── email-event-db-architecture.PNG
│ ├── example-data
│ │ ├── email_status_view.csv
│ │ └── ses_events_table.csv
│ └── ses-events-db.yaml
├── SES_Event_Processing.yaml
├── SES_Pinpoint_Messages_Queuing
│ ├── LICENSE
│ ├── README.md
│ ├── SES_Pinpoint_Messages_Queuing.yaml
│ ├── aws-lambda-code
│ │ ├── sqs_message_poller.zip
│ │ └── sqs_message_publisher.zip
│ └── images
│ │ ├── ArchDiagram.PNG
│ │ ├── CloudWatch-Dashboard-Metrics.PNG
│ │ └── queuing-logic.PNG
├── SES_Scheduled_Export_VDM_Report
│ └── SES_Scheduled_Export_VDM_Report.yaml
├── SMS-Retry
│ ├── FirehoseEventStreamConfigured.yaml
│ ├── KinesisEventStreamConfigured.yaml
│ ├── NoEventStreamConfigured.yaml
│ └── parent.yaml
├── SMS_S3_drop.yaml
├── Self-managed_Opt_Outs.yaml
├── Simple_CMS.yaml
└── ses_bounce_logging_blog.yml
├── examples
├── CustomChannels
│ ├── Nodejs
│ │ ├── LICENSE
│ │ ├── README.md
│ │ ├── customchannel
│ │ │ ├── .npmignore
│ │ │ ├── app.js
│ │ │ ├── package.json
│ │ │ └── tests
│ │ │ │ └── unit
│ │ │ │ └── test-handler.js
│ │ ├── events
│ │ │ └── event.json
│ │ └── template.yaml
│ └── Python
│ │ ├── LICENSE
│ │ ├── README.md
│ │ ├── customchannel
│ │ ├── __init__.py
│ │ ├── app.py
│ │ └── requirements.txt
│ │ ├── events
│ │ └── event.json
│ │ ├── template.yaml
│ │ └── tests
│ │ └── unit
│ │ ├── __init__.py
│ │ ├── __pycache__
│ │ ├── __init__.cpython-37.pyc
│ │ └── test_handler.cpython-37-pytest-7.1.2.pyc
│ │ └── test_handler.py
├── Global SMS Planning Sheet.xlsx
├── Outbound_calling.json
├── multi-region
│ ├── README.md
│ └── images
│ │ ├── endpoint-imports-api.png
│ │ ├── endpoint-imports-s3-active-active.png
│ │ ├── endpoint-imports-s3-active-passive.png
│ │ ├── picture1.png
│ │ ├── pinpoint-active-active.png
│ │ ├── pinpoint-active-passive.png
│ │ ├── ses-event-based-list-sync.png
│ │ └── ses-list-replication.png
├── sample_data_S3_triggered_import.csv
└── sample_file.csv
├── images
├── Advanced_Segmentation_S3.png
├── AppFlow_Salesforce.png
├── Connect_as_a_Channel.png
├── Create_campaign.png
├── DUE-engageEvents-dashboardauto_x2.jpg
├── EventDB_ArchDiagram.png
├── Event_Based_Segmentation.png
├── External_Attributes.png
├── External_Templates.png
├── Federated_Segmentation.png
├── Message_Archiver.png
├── Phone_number_validate_statemachine.png
├── Pinpoint_Event_Processing.png
├── S3_Data_Lake.png
├── S3_triggered_import.png
├── SES_Auto_Reply.png
├── SES_Event_Processing.png
├── SMS-retry-mechanism.png
├── SMS_S3_drop.png
├── Scheduled-VDM-stats-export.png
├── Self-managed_Opt_Outs.png
└── Simple_CMS.png
├── integrations
└── amplitude-sam
│ ├── .gitignore
│ ├── Architecture.png
│ ├── README.md
│ ├── __tests__
│ ├── integration
│ │ └── test-sns-logger.js
│ └── unit
│ │ └── handlers
│ │ └── sqs-payload-logger.test.js
│ ├── buildspec.yml
│ ├── events
│ └── event-sqs.json
│ ├── package.json
│ ├── src
│ └── handlers
│ │ ├── s3-trigger-processor
│ │ ├── index.js
│ │ └── package.json
│ │ └── sqs-queue-processor
│ │ ├── index.js
│ │ └── package.json
│ └── template.yml
├── lambda
└── Message_Archiver
│ ├── archiver
│ ├── app.js
│ ├── lib
│ │ ├── archiver.js
│ │ ├── archiver.spec.js
│ │ ├── index.js
│ │ ├── index.spec.js
│ │ ├── pinpoint.js
│ │ ├── pinpoint.spec.js
│ │ ├── renderer.js
│ │ ├── renderer.spec.js
│ │ └── test-setup.spec.js
│ ├── package.json
│ └── tests
│ │ └── event.json
│ └── queuer
│ ├── app.js
│ ├── lib
│ ├── index.js
│ ├── processor.js
│ ├── processor.spec.js
│ ├── queuer.js
│ ├── s3Path.js
│ ├── s3Path.spec.js
│ └── test-setup.spec.js
│ ├── package.json
│ └── tests
│ └── event.json
└── solutions
├── README.md
└── architectures
├── amazon-pinpoint-preference-center-arch-diagram.27719954c8638a8569a88b5448edab04d932b953.png
├── digital-user-engagement-events-database-architecture-diagram.b1f4423b5b7e11c22879e599ee5b085b29ea16e9.png
├── iot-channel-using-amazon-pinpoint-architecture.61f861b69135996efd52ec971a9f352d7e2786d7.png
├── location-based-notifications-pinpoint-ra.a89caa813efd5e212ea1295bfec42561c25f32a3.png
├── predictive-segmentation-using-amazon-pinpoint-and-amazon-sagemaker-architecture.b6341ce6d26ce5a90d4984f1060c27d17d3b7f95.png
└── predictive-user-engagement-architecture.7e3bdd3b55e962e74e3c638f68fc0e88beb98c3a.png
/.gitignore:
--------------------------------------------------------------------------------
1 | # compiled output
2 | **/dist
3 | **/global-s3-assets
4 | **/regional-s3-assets
5 | **/open-source
6 | **/.zip
7 | **/tmp
8 | **/out-tsc
9 |
10 | # dependencies
11 | **/node_modules
12 |
13 | file/
14 |
15 | # e2e
16 | **/e2e/*.js
17 | **/e2e/*.map
18 |
19 | # misc
20 | **/npm-debug.log
21 | **/testem.log
22 | **/package-lock.json
23 | **/.vscode/settings.json
24 |
25 | # System Files
26 | **/.DS_Store
27 | **/.vscode
28 |
29 | images/Thumbs.db
30 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | ## Code of Conduct
2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
4 | opensource-codeofconduct@amazon.com with any additional questions or comments.
5 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing Guidelines
2 |
3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
4 | documentation, we greatly value feedback and contributions from our community.
5 |
6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
7 | information to effectively respond to your bug report or contribution.
8 |
9 |
10 | ## Reporting Bugs/Feature Requests
11 |
12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features.
13 |
14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already
15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
16 |
17 | * A reproducible test case or series of steps
18 | * The version of our code being used
19 | * Any modifications you've made relevant to the bug
20 | * Anything unusual about your environment or deployment
21 |
22 |
23 | ## Contributing via Pull Requests
24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
25 |
26 | 1. You are working against the latest source on the *master* branch.
27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
29 |
30 | To send us a pull request, please:
31 |
32 | 1. Fork the repository.
33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.
34 | 3. Ensure local tests pass.
35 | 4. Commit to your fork using clear commit messages.
36 | 5. Send us a pull request, answering any default questions in the pull request interface.
37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.
38 |
39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/).
41 |
42 |
43 | ## Finding contributions to work on
44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start.
45 |
46 |
47 | ## Code of Conduct
48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
50 | opensource-codeofconduct@amazon.com with any additional questions or comments.
51 |
52 |
53 | ## Security issue notifications
54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
55 |
56 |
57 | ## Licensing
58 |
59 | See the [LICENSE](https://github.com/aws-samples/digital-user-engagement-reference-architectures/blob/master/LICSENSE.txt) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
60 |
61 | We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes.
62 |
--------------------------------------------------------------------------------
/Config:
--------------------------------------------------------------------------------
1 | package.Digital-user-engagement-reference-architectures = {
2 | interfaces = (1.0);
3 |
4 | # Use NoOpBuild. See https://w.amazon.com/index.php/BrazilBuildSystem/NoOpBuild
5 | build-system = no-op;
6 | build-tools = {
7 | 1.0 = {
8 | NoOpBuild = 1.0;
9 | };
10 | };
11 |
12 | # Use runtime-dependencies for when you want to bring in additional
13 | # packages when deploying.
14 | # Use dependencies instead if you intend for these dependencies to
15 | # be exported to other packages that build against you.
16 | dependencies = {
17 | 1.0 = {
18 | };
19 | };
20 |
21 | runtime-dependencies = {
22 | 1.0 = {
23 | };
24 | };
25 |
26 | };
27 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of
4 | this software and associated documentation files (the "Software"), to deal in
5 | the Software without restriction, including without limitation the rights to
6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
7 | the Software, and to permit persons to whom the Software is furnished to do so.
8 |
9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
10 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
11 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
12 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
13 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
14 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
15 |
--------------------------------------------------------------------------------
/NOTICE.txt:
--------------------------------------------------------------------------------
1 | Optimize Amazon Personalize Campaigns using Amazon Pinpoint Events
2 | Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 | Licensed under the Apache License Version 2.0 (the "License"). You may not use this file except
4 | in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/
5 | or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS,
6 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied. See the License for the
7 | specific language governing permissions and limitations under the License.
8 |
9 | **********************
10 | THIRD PARTY COMPONENTS
11 | **********************
12 | This software includes third party software subject to the following copyrights:
13 |
14 | AWS SDK under the Apache License Version 2.0
15 |
--------------------------------------------------------------------------------
/Using AI & ML in Customer Engagement/AWS Lambda/LexIntegration.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/Using AI & ML in Customer Engagement/AWS Lambda/LexIntegration.zip
--------------------------------------------------------------------------------
/Using AI & ML in Customer Engagement/AWS Lambda/RE_Order_Validation.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/Using AI & ML in Customer Engagement/AWS Lambda/RE_Order_Validation.zip
--------------------------------------------------------------------------------
/Using AI & ML in Customer Engagement/Pinpoint_Import_Segment.csv:
--------------------------------------------------------------------------------
1 | Id,ChannelType,Address,User.UserId
2 | 111,SMS,+1214......2,user1
3 | 222,Email,abc@gmail.com,user1
4 |
--------------------------------------------------------------------------------
/cloudformation/Connect_as_a_Channel.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: 2010-09-09
2 | Description: Amazon Connect as an Amazon Pinpoint Channel
3 |
4 | Parameters:
5 | PinpointProjectId:
6 | Type: String
7 | Description: Amazon Pinpoint Project ID if one already exists, blank to create one
8 | PinpointProjectName:
9 | Type: String
10 | Default: "My Pinpoint Project"
11 | Description: "If no PinpointProjectId provided, name to be used to create the Pinpoint project"
12 | ConnectContactFlowId:
13 | Type: String
14 | Description: Amazon Connect Contact Flow ID to use for the outbound call
15 | ConnectInstanceId:
16 | Type: String
17 | Description: Amazon Connect Instance ID to use for the outbound call
18 | ConnectQueueId:
19 | Type: String
20 | Description: Amazon Connect Queue ID to use for the outbound call
21 |
22 | Conditions:
23 | NeedsPinpointProjectId: !Equals
24 | - ''
25 | - !Ref PinpointProjectId
26 |
27 | Resources:
28 | PinpointApplication:
29 | Type: AWS::Pinpoint::App
30 | Condition: NeedsPinpointProjectId
31 | Properties:
32 | Name: !Ref PinpointProjectName
33 |
34 | PinpointApplicationSettings:
35 | Type: AWS::Pinpoint::ApplicationSettings
36 | Properties:
37 | ApplicationId: !If
38 | - NeedsPinpointProjectId
39 | - !Ref PinpointApplication
40 | - !Ref PinpointProjectId
41 | CampaignHook:
42 | LambdaFunctionName: !GetAtt DeliveryCampaignHookLambdaFunction.Arn
43 | Mode: 'DELIVERY'
44 | DependsOn: LambdaPermission
45 |
46 | LambdaPermission:
47 | Type: AWS::Lambda::Permission
48 | Properties:
49 | Action: 'lambda:InvokeFunction'
50 | FunctionName: !GetAtt DeliveryCampaignHookLambdaFunction.Arn
51 | Principal: !Sub 'pinpoint.${AWS::Region}.amazonaws.com'
52 | SourceArn:
53 | !Sub
54 | - 'arn:aws:mobiletargeting:${AWS::Region}:${AWS::AccountId}:/apps/${ProjectId}*'
55 | - {ProjectId: !If [NeedsPinpointProjectId, !Ref PinpointApplication, !Ref PinpointProjectId] }
56 |
57 | DeliveryCampaignHookLambdaFunction:
58 | Type: AWS::Lambda::Function
59 | Properties:
60 | Handler: index.lambda_handler
61 | Role: !GetAtt DeliveryCampaignHookLambdaFunctionRole.Arn
62 | Runtime: "python3.7"
63 | Timeout: 60
64 | MemorySize: 1024
65 | Environment:
66 | Variables:
67 | CONNECT_CONTACT_FLOW_ID: !Ref ConnectContactFlowId
68 | CONNECT_INSTANCE_ID: !Ref ConnectInstanceId
69 | CONNECT_QUEUE_ID: !Ref ConnectQueueId
70 | Code:
71 | ZipFile: |
72 | import json
73 | import logging
74 | import boto3
75 | import os
76 |
77 | connect = boto3.client('connect')
78 |
79 | def lambda_handler(event, context):
80 | logging.getLogger().setLevel('DEBUG')
81 | logging.debug(json.dumps(event))
82 |
83 | message = event['Message']['smsmessage']['body']
84 |
85 | # Loop over each incoming Endpoint
86 | for endpointId,endpoint in event['Endpoints'].items():
87 |
88 | if endpoint['ChannelType'] == 'SMS':
89 |
90 | # initiate outbound voice call
91 | response = connect.start_outbound_voice_contact(
92 | DestinationPhoneNumber=endpoint['Address'],
93 | ContactFlowId=os.environ['CONNECT_CONTACT_FLOW_ID'],
94 | InstanceId=os.environ['CONNECT_INSTANCE_ID'],
95 | QueueId=os.environ['CONNECT_QUEUE_ID'],
96 | Attributes={
97 | 'Message': message
98 | }
99 | )
100 |
101 | logging.info(json.dumps(response))
102 |
103 |
104 | DeliveryCampaignHookLambdaFunctionRole:
105 | Type: AWS::IAM::Role
106 | Properties:
107 | AssumeRolePolicyDocument:
108 | Version: "2012-10-17"
109 | Statement:
110 | -
111 | Effect: "Allow"
112 | Principal:
113 | Service:
114 | - "lambda.amazonaws.com"
115 | Action:
116 | - "sts:AssumeRole"
117 | Policies:
118 | -
119 | PolicyName: "LambdaExecutionPolicy"
120 | PolicyDocument:
121 | Version: "2012-10-17"
122 | Statement:
123 | -
124 | Effect: "Allow"
125 | Action:
126 | - "connect:StartOutboundVoiceContact"
127 | Resource: !Sub "arn:aws:connect:${AWS::Region}:${AWS::AccountId}:instance/${ConnectInstanceId}/contact/*"
128 | -
129 | Effect: "Allow"
130 | Action:
131 | - "logs:CreateLogGroup"
132 | - "logs:CreateLogStream"
133 | - "logs:PutLogEvents"
134 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*"
135 |
--------------------------------------------------------------------------------
/cloudformation/Event_Based_Segmentation.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: 2010-09-09
2 | Description: Event Based Segmentation
3 |
4 | Parameters:
5 | PinpointProjectId:
6 | Type: String
7 | Description: Amazon Pinpoint Project ID if one already exists, blank to create one
8 | PinpointProjectName:
9 | Type: String
10 | Default: "My Pinpoint Project"
11 | Description: "If no PinpointProjectId provided, name to be used to create the Pinpoint project"
12 |
13 | Conditions:
14 | NeedsPinpointProjectId: !Equals
15 | - ''
16 | - !Ref PinpointProjectId
17 |
18 | Resources:
19 | PinpointApplication:
20 | Type: AWS::Pinpoint::App
21 | Condition: NeedsPinpointProjectId
22 | Properties:
23 | Name: !Ref PinpointProjectName
24 |
25 |
26 | PinpointEventStream:
27 | Type: AWS::Pinpoint::EventStream
28 | Properties:
29 | ApplicationId: !If
30 | - NeedsPinpointProjectId
31 | - !Ref PinpointApplication
32 | - !Ref PinpointProjectId
33 | DestinationStreamArn: !GetAtt PinpointEventKinesis.Arn
34 | RoleArn: !GetAtt PinpointKinesisStreamRole.Arn
35 |
36 | PinpointEventKinesis:
37 | Type: AWS::Kinesis::Stream
38 | Properties:
39 | ShardCount: 1
40 | StreamEncryption:
41 | EncryptionType: KMS
42 | KeyId: alias/aws/kinesis
43 |
44 |
45 |
46 | PinpointKinesisStreamRole:
47 | Type: AWS::IAM::Role
48 | Properties:
49 | AssumeRolePolicyDocument:
50 | Version: 2012-10-17
51 | Statement:
52 | - Effect: Allow
53 | Principal:
54 | Service:
55 | - pinpoint.amazonaws.com
56 | Action:
57 | - 'sts:AssumeRole'
58 | Path: "/"
59 | Policies:
60 | -
61 | PolicyName: "root"
62 | PolicyDocument:
63 | Version: "2012-10-17"
64 | Statement:
65 | -
66 | Effect: "Allow"
67 | Action:
68 | - "kinesis:PutRecords"
69 | - "kinesis:DescribeStream"
70 | Resource: !GetAtt PinpointEventKinesis.Arn
71 |
72 | EventProcessingLambda:
73 | Type: AWS::Lambda::Function
74 | Properties:
75 | Handler: index.lambda_handler
76 | Role: !GetAtt EventProcessingLambdaRole.Arn
77 | Runtime: "python3.7"
78 | Timeout: 60
79 | Environment:
80 | Variables:
81 | LOG_LEVEL: "INFO"
82 | PINPOINT_PROJECT_ID: !If
83 | - NeedsPinpointProjectId
84 | - !Ref PinpointApplication
85 | - !Ref PinpointProjectId
86 | Code:
87 | ZipFile: |
88 | import boto3
89 | import logging
90 | import os
91 | import json
92 | import base64
93 |
94 | pinpoint = boto3.client('pinpoint')
95 |
96 | def lambda_handler(event, context):
97 |
98 | global log_level
99 | log_level = str(os.environ.get('LOG_LEVEL')).upper()
100 | if log_level not in [
101 | 'DEBUG', 'INFO',
102 | 'WARNING', 'ERROR',
103 | 'CRITICAL'
104 | ]:
105 | log_level = 'ERROR'
106 | logging.getLogger().setLevel(log_level)
107 |
108 | logging.info(event)
109 |
110 | for record in event['Records']:
111 | #Kinesis data is base64 encoded so decode here
112 | try:
113 | payload=base64.b64decode(record["kinesis"]["data"])
114 |
115 | logging.info('Found Event: %s', payload)
116 |
117 | pinpoint_event = json.loads(payload)
118 |
119 | if pinpoint_event['event_type'] == '_email.open':
120 |
121 | # We have an email open event, update the endpoint with the attribute "opened_email" = "true"
122 |
123 | pinpoint.update_endpoint(
124 | ApplicationId=os.environ['PINPOINT_PROJECT_ID'],
125 | EndpointId=pinpoint_event['client']['client_id'], #this is true for campaign sends
126 | EndpointRequest={
127 | 'Attributes': {
128 | 'opened_email': [
129 | 'true'
130 | ]
131 | }
132 | }
133 | )
134 |
135 | elif pinpoint_event['event_type'] == '_custom.registered_for_webinar5':
136 |
137 | # We have a custom event via the Pinpoint PutEvents API, update attribute
138 |
139 | pinpoint.update_endpoint(
140 | ApplicationId=os.environ['PINPOINT_PROJECT_ID'],
141 | EndpointId=pinpoint_event['client']['client_id'],
142 | EndpointRequest={
143 | 'Attributes': {
144 | 'webinar_registration': [
145 | 'webinar5'
146 | ]
147 | }
148 | }
149 | )
150 |
151 |
152 | except Exception as e:
153 | logging.error('Received Error while processing s3 file: %s', e)
154 |
155 |
156 |
157 |
158 | EventProcessingLambdaRole:
159 | Type: AWS::IAM::Role
160 | Properties:
161 | AssumeRolePolicyDocument:
162 | Version: 2012-10-17
163 | Statement:
164 | - Effect: Allow
165 | Principal:
166 | Service:
167 | - lambda.amazonaws.com
168 | Action:
169 | - "sts:AssumeRole"
170 | Path: "/"
171 | ManagedPolicyArns:
172 | - "arn:aws:iam::aws:policy/service-role/AWSLambdaKinesisExecutionRole"
173 | Policies:
174 | -
175 | PolicyName: "LambdaExecutionPolicy"
176 | PolicyDocument:
177 | Version: "2012-10-17"
178 | Statement:
179 | -
180 | Effect: "Allow"
181 | Action:
182 | - "mobiletargeting:UpdateEndpoint"
183 | Resource: !Sub
184 | - 'arn:aws:mobiletargeting:${AWS::Region}:${AWS::AccountId}:apps/${ProjectId}*'
185 | - {ProjectId: !If [NeedsPinpointProjectId, !Ref PinpointApplication, !Ref PinpointProjectId] }
186 | -
187 | Effect: "Allow"
188 | Action:
189 | - "logs:CreateLogGroup"
190 | - "logs:CreateLogStream"
191 | - "logs:PutLogEvents"
192 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*"
193 |
194 | EventProcessingLambdaTrigger:
195 | Type: AWS::Lambda::EventSourceMapping
196 | Properties:
197 | Enabled: true
198 | EventSourceArn: !GetAtt PinpointEventKinesis.Arn
199 | FunctionName: !GetAtt EventProcessingLambda.Arn
200 | StartingPosition: 'TRIM_HORIZON'
201 |
--------------------------------------------------------------------------------
/cloudformation/External_Attributes.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: 2010-09-09
2 | Description: Send-Time Amazon Pinpoint Campaign Attributes
3 |
4 | Parameters:
5 | PinpointProjectId:
6 | Type: String
7 | Description: Amazon Pinpoint Project ID if one already exists, blank to create one
8 | PinpointProjectName:
9 | Type: String
10 | Default: "My Pinpoint Project"
11 | Description: "If no PinpointProjectId provided, name to be used to create the Pinpoint project"
12 |
13 | Conditions:
14 | NeedsPinpointProjectId: !Equals
15 | - ''
16 | - !Ref PinpointProjectId
17 |
18 | Resources:
19 | PinpointApplication:
20 | Type: AWS::Pinpoint::App
21 | Condition: NeedsPinpointProjectId
22 | Properties:
23 | Name: !Ref PinpointProjectName
24 |
25 | PinpointApplicationSettings:
26 | Type: AWS::Pinpoint::ApplicationSettings
27 | Properties:
28 | ApplicationId: !If
29 | - NeedsPinpointProjectId
30 | - !Ref PinpointApplication
31 | - !Ref PinpointProjectId
32 | CampaignHook:
33 | LambdaFunctionName: !GetAtt FilterCampaignHookLambdaFunction.Arn
34 | Mode: 'FILTER'
35 | DependsOn: LambdaPermission
36 |
37 | LambdaPermission:
38 | Type: AWS::Lambda::Permission
39 | Properties:
40 | Action: 'lambda:InvokeFunction'
41 | FunctionName: !GetAtt FilterCampaignHookLambdaFunction.Arn
42 | Principal: !Sub 'pinpoint.${AWS::Region}.amazonaws.com'
43 | SourceArn:
44 | !Sub
45 | - 'arn:aws:mobiletargeting:${AWS::Region}:${AWS::AccountId}:/apps/${ProjectId}*'
46 | - {ProjectId: !If [NeedsPinpointProjectId, !Ref PinpointApplication, !Ref PinpointProjectId] }
47 |
48 | FilterCampaignHookLambdaFunction:
49 | Type: AWS::Lambda::Function
50 | Properties:
51 | Handler: index.lambda_handler
52 | Role: !GetAtt FilterCampaignHookLambdaFunctionRole.Arn
53 | Runtime: "python3.7"
54 | Timeout: 60
55 | MemorySize: 1024
56 | Code:
57 | ZipFile: |
58 | import json
59 | import logging
60 |
61 | def lambda_handler(event, context):
62 | logging.getLogger().setLevel('DEBUG')
63 | logging.debug(json.dumps(event))
64 |
65 | # Loop over each incoming Endpoint
66 | for endpointId,endpoint in event['Endpoints'].items():
67 |
68 | # Mock fetch an offer for the current endpoint
69 | data = mock_call_to_get_offer(endpointId)
70 |
71 | # Add the data to the Endpoint's Attributes
72 | endpoint['Attributes']['Offer'] = [data]
73 |
74 | # Return the Mutated Endpoints
75 | return event['Endpoints']
76 |
77 |
78 | def mock_call_to_get_offer(endpoint_id):
79 |
80 | #TODO - call a webservice, look up a value from a database, call a CRM API to retrieve an offer for this endpoint
81 | return 'some offer for endpointId: ' + endpoint_id;
82 |
83 |
84 | FilterCampaignHookLambdaFunctionRole:
85 | Type: AWS::IAM::Role
86 | Properties:
87 | AssumeRolePolicyDocument:
88 | Version: "2012-10-17"
89 | Statement:
90 | -
91 | Effect: "Allow"
92 | Principal:
93 | Service:
94 | - "lambda.amazonaws.com"
95 | Action:
96 | - "sts:AssumeRole"
97 | Policies:
98 | -
99 | PolicyName: "LambdaExecutionPolicy"
100 | PolicyDocument:
101 | Version: "2012-10-17"
102 | Statement:
103 | -
104 | Effect: "Allow"
105 | Action:
106 | - "logs:CreateLogGroup"
107 | - "logs:CreateLogStream"
108 | - "logs:PutLogEvents"
109 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*"
110 |
--------------------------------------------------------------------------------
/cloudformation/External_Templates.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: 2010-09-09
2 | Description: External Amazon Pinpoint Campaign Templates
3 |
4 | Parameters:
5 | PinpointProjectId:
6 | Type: String
7 | Description: Amazon Pinpoint Project ID if one already exists, blank to create one
8 | PinpointProjectName:
9 | Type: String
10 | Default: "My Pinpoint Project"
11 | Description: "If no PinpointProjectId provided, name to be used to create the Pinpoint project"
12 |
13 | Conditions:
14 | NeedsPinpointProjectId: !Equals
15 | - ''
16 | - !Ref PinpointProjectId
17 |
18 | Resources:
19 | PinpointApplication:
20 | Type: AWS::Pinpoint::App
21 | Condition: NeedsPinpointProjectId
22 | Properties:
23 | Name: !Ref PinpointProjectName
24 |
25 | PinpointApplicationSettings:
26 | Type: AWS::Pinpoint::ApplicationSettings
27 | Properties:
28 | ApplicationId: !If
29 | - NeedsPinpointProjectId
30 | - !Ref PinpointApplication
31 | - !Ref PinpointProjectId
32 | CampaignHook:
33 | LambdaFunctionName: !GetAtt FilterCampaignHookLambdaFunction.Arn
34 | Mode: 'FILTER'
35 | DependsOn: LambdaPermission
36 |
37 | LambdaPermission:
38 | Type: AWS::Lambda::Permission
39 | Properties:
40 | Action: 'lambda:InvokeFunction'
41 | FunctionName: !GetAtt FilterCampaignHookLambdaFunction.Arn
42 | Principal: !Sub 'pinpoint.${AWS::Region}.amazonaws.com'
43 | SourceArn:
44 | !Sub
45 | - 'arn:aws:mobiletargeting:${AWS::Region}:${AWS::AccountId}:/apps/${ProjectId}*'
46 | - {ProjectId: !If [NeedsPinpointProjectId, !Ref PinpointApplication, !Ref PinpointProjectId] }
47 |
48 | FilterCampaignHookLambdaFunction:
49 | Type: AWS::Lambda::Function
50 | Properties:
51 | Handler: index.lambda_handler
52 | Role: !GetAtt FilterCampaignHookLambdaFunctionRole.Arn
53 | Runtime: "python3.7"
54 | Timeout: 60
55 | MemorySize: 1024
56 | Code:
57 | ZipFile: |
58 | import json
59 | import logging
60 |
61 | def lambda_handler(event, context):
62 | logging.getLogger().setLevel('DEBUG')
63 | logging.debug(json.dumps(event))
64 |
65 | # Loop over each incoming Endpoint
66 | for endpointId,endpoint in event['Endpoints'].items():
67 |
68 | # Mock fetch html and subject
69 | subject = mock_call_to_get_subjectline(endpointId)
70 | html = mock_call_to_get_htmlbody(endpointId)
71 |
72 | # Add subject and html to the Endpoint's Attributes
73 | endpoint['Attributes']['subject'] = [subject]
74 | endpoint['Attributes']['html'] = [html]
75 |
76 | # Return the Mutated Endpoints
77 | return event['Endpoints']
78 |
79 |
80 | def mock_call_to_get_subjectline(endpoint_id):
81 | #TODO - call a webservice, content management service, S3, to get a subject line for the endpoint
82 | return 'Here is my subject line for endpointId: ' + endpoint_id
83 |
84 | def mock_call_to_get_htmlbody(endpoint_id):
85 | #TODO - call a webservice, content management service, S3, to get the html body for the endpoint
86 | return '
Here is my html for EndpointID: ' + endpoint_id + '
';
87 |
88 |
89 | FilterCampaignHookLambdaFunctionRole:
90 | Type: AWS::IAM::Role
91 | Properties:
92 | AssumeRolePolicyDocument:
93 | Version: "2012-10-17"
94 | Statement:
95 | -
96 | Effect: "Allow"
97 | Principal:
98 | Service:
99 | - "lambda.amazonaws.com"
100 | Action:
101 | - "sts:AssumeRole"
102 | Policies:
103 | -
104 | PolicyName: "LambdaExecutionPolicy"
105 | PolicyDocument:
106 | Version: "2012-10-17"
107 | Statement:
108 | -
109 | Effect: "Allow"
110 | Action:
111 | - "logs:CreateLogGroup"
112 | - "logs:CreateLogStream"
113 | - "logs:PutLogEvents"
114 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*"
115 |
--------------------------------------------------------------------------------
/cloudformation/Pinpoint_Campaing_Journey_Segment_DB/Assets/Architecture-Diagram.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/cloudformation/Pinpoint_Campaing_Journey_Segment_DB/Assets/Architecture-Diagram.PNG
--------------------------------------------------------------------------------
/cloudformation/Pinpoint_Campaing_Journey_Segment_DB/Assets/DynamoDB-Campaign-Journey.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/cloudformation/Pinpoint_Campaing_Journey_Segment_DB/Assets/DynamoDB-Campaign-Journey.PNG
--------------------------------------------------------------------------------
/cloudformation/Pinpoint_Campaing_Journey_Segment_DB/Assets/DynamoDB-Segments.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/cloudformation/Pinpoint_Campaing_Journey_Segment_DB/Assets/DynamoDB-Segments.PNG
--------------------------------------------------------------------------------
/cloudformation/Pinpoint_Campaing_Journey_Segment_DB/README.md:
--------------------------------------------------------------------------------
1 | # Amazon Pinpoint Campaings/Journeys/Segments DB
2 | Amazon Pinpoint [event streaming capability](https://docs.aws.amazon.com/pinpoint/latest/developerguide/event-streams.html), utilizes [Amazon Kinesis Firehose or Data Streams](https://aws.amazon.com/kinesis/) to stream the raw customer engagement events to an AWS service for further processing or storage. Amazon Pinpoint customers can use these data points to create custom dashboards using [Amazon QuickSight](https://aws.amazon.com/quicksight/) or a 3rd party business intelligence tool.
3 |
4 | The Amazon Pinpoint streamed events include the Campaign, Journey and Segment Ids but they don't include their names making it challenging for users to identify them when building a custom report.
5 |
6 | ### Solution & Architecture
7 | This solution deploys a series of AWS services using [AWS CloudFormation](https://aws.amazon.com/cloudformation/) creating two [Amazon DynamoDB tables](https://aws.amazon.com/dynamodb/), one stores the mapping between Campaign/Journey Ids and their names while the second one stores the mapping between Segment Ids and their names. To query the Amazon DynamoDB data with [Amazon Athena](https://aws.amazon.com/athena/), you can use [this connector](https://docs.aws.amazon.com/athena/latest/ug/connectors-dynamodb.html).
8 |
9 | **Note**: The solution will create the mappings for all existing **imported** and **dynamic** segments. However, only new **dynamic** segments will be added in the segment mapping table and not imported.
10 |
11 | [AWS CloudTrail](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-user-guide.html) logs are used to process Amazon Pinpoint management events regarding Campaigns, Journeys and Segments. These events include Campaigns/Journeys/Segments created/deleted or updated in 5 minutes intervals (fixed interval from AWS CloudTrail).
12 |
13 | **Note**: You can create up to five trails per AWS Region A trail that logs activity from all Regions counts as one trail per Region. Read more on [Create multiple trails](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/create-multiple-trails.html).
14 |
15 | The AWS CloudTrail log files are process using [AWS Lambda](https://aws.amazon.com/lambda/), which gets the Campaign/Journey names using Amazon Pinpoint's respective API operations and accordingly creates or updates the respecitve items in Amazon DynamoDB. See below the Amazon DynamoDB tables' preview where **id** is the **Key** and refers to the Amazon Pinpoint Campaign, Journey or Segment Id:
16 |
17 | **Campaigns & Journeys table:**
18 | Fields:
19 | 1. **Id:** The Campaign/Journey Id.
20 | 2. **Deleted:** False if it still exists and True if it has been deleted.
21 | 3. **Event_name:** If it's not an event based Campaign/Journey the value will be **null**.
22 | 4. **Name:** The Campaign/Journey name.
23 | 5. **Segment_id:** The segment_id used for this Campaign/Journey. A Campaign always has a segment whereas a journey might have **null**.
24 | 6. **Type:** It's either **campaign** or **journey**.
25 |
26 | 
27 |
28 | **Segments table:**
29 | Fields:
30 | 1. **Id:** The segment Id.
31 | 2. **Deleted:** False if it still exists and True if it has been deleted.
32 | 3. **Name:** The segment name.
33 |
34 | 
35 |
36 | Upon deployment of the AWS CloudFormation template, a custom resource (AWS Lambda) performs the following two actions:
37 | 1. Creates Amazon DynamoDB items for all existing Amazon Pinpoint Campaigns, Journeys and Segments.
38 | 2. Creates an Amazon S3 event notification so upon creation of a AWS CloudTrail log file, an AWS Lambda function gets invoked for processing.
39 |
40 | 
41 |
42 | ### Solution logic
43 | 1. Journeys whose state is **DRAFT** aren't being created in the Amazon DynamoDB table.
44 | 2. If a Campaign, Journey or Segment gets created and deleted in under 5 minutes, it won't show on the Amazon DynamoDB tables. This is because AWS CloudTrail generates logs in 5 minute intervals and the AWS Lambda cannot perform the respective GET API operation to the Amazon Pinpoint resource that got deleted.
45 | 3. When deleting a Campaign, Journey or Segment, the Amazon DynamoDB items gets updated to reflect the new status under the attribute **Deleted**.
46 | 4. AWS CloudTrail logs are stored in an Amazon S3 bucket and they are automatically deleted after 1 day. You can change this by configuring the bucket's [lifecycle](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-lifecycle-mgmt.html).
47 |
48 | ### Prerequisites
49 | 1) [AWS account](https://aws.amazon.com/premiumsupport/knowledge-center/create-and-activate-aws-account/).
50 | 2) An Amazon Pinpoint project – [How to create an Amazon Pinpoint project](https://catalog.workshops.aws/amazon-pinpoint-customer-experience/en-US/prerequisites/create-a-project).
51 | 3) Deploy using AWS CloudFormation the [digital user engagement events database](https://github.com/awslabs/digital-user-engagement-events-database).
52 | 4) Deploy using AWS CloudFormation the [Amazon Athena data source connector](https://docs.aws.amazon.com/athena/latest/ug/connect-to-a-data-source-lambda-deploying.html) for [Amazon DynamoDB](https://docs.aws.amazon.com/athena/latest/ug/connectors-dynamodb.html). You can skip this step if you already have an Amazon DynamoDB - Amazon Atehna data source connector.
53 |
54 | ### Implementation
55 | 1. Download the [AWS CloudFormation template](https://github.com/Pioank/pinpoint-campaign-journey-db/blob/main/CF-PinpointCampaignJourneyDB.yaml) and navigate to the AWS CloudFormation console in the AWS region you want to deploy the solution.
56 | 2. Select **Create stack and With new resources**. Choose **Template is ready** as **Prerequisite – Prepare template** and **Upload a template file** as **Specify template**. Upload the template downloaded in **step 1**.
57 | 3. Fill the **AWS CloudFormation parameters**. For **PinpointProjectId** copy and paste your Amazon Pinpoint's project Id.
58 | 4. Once the AWS CloudFormation stack has been successfully deployed, navigate to Amazon Athena, for **Data source** select the data source connector created for Amazon DynamoDB (see Prerequisites step 4). From there select the table with name structure **--** and perform any queries you want using Amazon Athena's query editor. **Note:** If you don't have any pre-existing Amazon Pinpoint Campaigns/Journeys in your Amazon Pinpoint project, first create some and wait for 5 - 10 minutes otherwise the Amazon Athena table will be empty.
59 |
--------------------------------------------------------------------------------
/cloudformation/Pinpoint_Custom_Channel_Attachment/assets/architecture-nn.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/cloudformation/Pinpoint_Custom_Channel_Attachment/assets/architecture-nn.PNG
--------------------------------------------------------------------------------
/cloudformation/Pinpoint_Custom_Channel_Attachment/assets/attachment-scenarios.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/cloudformation/Pinpoint_Custom_Channel_Attachment/assets/attachment-scenarios.PNG
--------------------------------------------------------------------------------
/cloudformation/Pinpoint_Custom_Channel_Attachment/assets/custom-channel-cost-usd-nn.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/cloudformation/Pinpoint_Custom_Channel_Attachment/assets/custom-channel-cost-usd-nn.PNG
--------------------------------------------------------------------------------
/cloudformation/Pinpoint_Custom_Channel_Attachment/assets/pinpoint-journey.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/cloudformation/Pinpoint_Custom_Channel_Attachment/assets/pinpoint-journey.png
--------------------------------------------------------------------------------
/cloudformation/Pinpoint_Custom_Channel_Attachment/functions/custom-channel-attachments/index.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 | import boto3
4 | from email.mime.multipart import MIMEMultipart
5 | from email.mime.text import MIMEText
6 | from email.mime.application import MIMEApplication
7 | from botocore.exceptions import ClientError
8 | pinpoint_client = boto3.client('pinpoint')
9 | s3_client = boto3.client('s3')
10 | ses_client = boto3.client('ses')
11 | application_id = os.environ['PINPOINT_APP_ID']
12 | s3_bucket_name = os.environ['BUCKET_NAME']
13 | s3_expiration = os.environ['EXPIRATION']
14 | filetype = os.environ['FILE_TYPE']
15 |
16 | def lambda_handler(event, context):
17 | print(event)
18 | data = list(event['Data'].split(","))
19 | EmailTemplate = data[1]
20 | FriendlyName = str(data[0])
21 | endpoints = event['Endpoints']
22 | JourneyId = str(event['JourneyId'])
23 |
24 | if FriendlyName == "NA":
25 | SenderAddress = str(data[2])
26 | else:
27 | SenderAddress = FriendlyName + " <" + str(data[2]) + ">"
28 |
29 | Attachment = str(data[3])
30 | if Attachment == "NO": # Values NO, ONEPER, ONEALL
31 | send_email(SenderAddress,EmailTemplate,endpoints,JourneyId)
32 | else:
33 | AttachmentPrefix = str(data[4]) # S3 file prefix
34 | AttachmentType = str(data[5]) # URL for S3 presigned URL or FILE for actual attachment
35 | if AttachmentType == "URL":
36 | send_email_s3URL(SenderAddress,EmailTemplate,s3_bucket_name,AttachmentPrefix,s3_expiration,filetype,endpoints,JourneyId,Attachment)
37 | else:
38 | send_email_attach(s3_bucket_name,AttachmentPrefix,endpoints,filetype,SenderAddress,EmailTemplate,JourneyId,Attachment)
39 |
40 | # Email with no attachment
41 | def send_email(SenderAddress,EmailTemplate,endpoints,JourneyId):
42 | for endpoint in endpoints:
43 | try:
44 | response = pinpoint_client.send_messages(
45 | ApplicationId = application_id,
46 | MessageRequest = {
47 | 'Endpoints': {
48 | endpoint: {}
49 | },
50 | 'MessageConfiguration': {
51 | 'EmailMessage': {
52 | 'FromAddress': SenderAddress
53 | }
54 | },
55 | 'TemplateConfiguration': {
56 | 'EmailTemplate': {
57 | 'Name': EmailTemplate,
58 | 'Version': 'latest'
59 | }
60 | },
61 | 'TraceId': JourneyId
62 | }
63 | )
64 | except ClientError as e:
65 | print(e.response['Error']['Message'])
66 | else:
67 | print("Email sent!")
68 | print(response)
69 |
70 | # Email with S3 presigned URL
71 | def send_email_s3URL(SenderAddress,EmailTemplate,s3_bucket_name,AttachmentPrefix,s3_expiration,filetype,endpoints,JourneyId,Attachment):
72 | for endpoint in endpoints:
73 | if Attachment == "ONEPER":
74 | object_name = AttachmentPrefix + "_" + endpoint + filetype
75 | else:
76 | object_name = AttachmentPrefix + filetype
77 | s3_url = s3_client.generate_presigned_url('get_object',Params={'Bucket': s3_bucket_name,'Key': object_name},ExpiresIn=s3_expiration)
78 |
79 | try:
80 | response = pinpoint_client.send_messages(
81 | ApplicationId = application_id,
82 | MessageRequest = {
83 | 'Endpoints': {
84 | endpoint: {'Substitutions': {"S3URL": [s3_url]}}
85 | },
86 | 'MessageConfiguration': {
87 | 'EmailMessage': {
88 | 'FromAddress': SenderAddress
89 | }
90 | },
91 | 'TemplateConfiguration': {
92 | 'EmailTemplate': {
93 | 'Name': EmailTemplate,
94 | 'Version': 'latest'
95 | }
96 | },
97 | 'TraceId': JourneyId
98 | }
99 | )
100 | except ClientError as e:
101 | print(e.response['Error']['Message'])
102 | else:
103 | print("Email sent!")
104 | print(response)
105 |
106 | # Email with attachment logic
107 | def send_email_attach(s3_bucket_name,AttachmentPrefix,endpoints,filetype,SenderAddress,EmailTemplate,JourneyId,Attachment):
108 | HTMLTemplate = pinpoint_client.get_email_template(TemplateName=EmailTemplate)
109 | SUBJECT = HTMLTemplate['EmailTemplateResponse']['Subject']
110 | BODY_HTML = HTMLTemplate['EmailTemplateResponse']['HtmlPart']
111 | if Attachment == "ONEPER":
112 | key = AttachmentPrefix + "_" + endpoint + filetype
113 | for endpoint in endpoints:
114 | EndpointData = endpoints[endpoint]
115 | ToAddress =[EndpointData['Address']]
116 | s3_object = s3_client.get_object(Bucket=s3_bucket_name,Key=key)
117 | attachment = s3_object['Body'].read()
118 | sendraw(SUBJECT,BODY_HTML,SenderAddress,ToAddress,attachment,key,JourneyId)
119 | else:
120 | key = AttachmentPrefix + filetype
121 | ToAddress = []
122 | for endpoint in endpoints:
123 | EndpointData = endpoints[endpoint]
124 | ToAddress.append(EndpointData['Address'])
125 | s3_object = s3_client.get_object(Bucket=s3_bucket_name,Key=key)
126 | attachment = s3_object['Body'].read()
127 | sendraw(SUBJECT,BODY_HTML,SenderAddress,ToAddress,attachment,key,JourneyId)
128 |
129 | # Email with attachment action
130 | def sendraw(SUBJECT,BODY_HTML,SenderAddress,ToAddress,attachment,key,JourneyId):
131 | CHARSET = "utf-8"
132 | msg = MIMEMultipart('mixed')
133 | msg['Subject'] = SUBJECT
134 | msg['From'] = SenderAddress
135 | #msg['To'] = ToAddress
136 | msg_body = MIMEMultipart('alternative')
137 | htmlpart = MIMEText(BODY_HTML.encode(CHARSET), 'html', CHARSET)
138 | msg_body.attach(htmlpart)
139 | att = MIMEApplication(attachment)
140 | att.add_header('Content-Disposition','attachment',filename=key)
141 | TAGS = "JourneyId=" + JourneyId
142 | msg.add_header('X-SES-MESSAGE-TAGS', TAGS)
143 | msg.attach(msg_body)
144 | msg.attach(att)
145 | try:
146 | response = ses_client.send_raw_email(
147 | Source=SenderAddress,
148 | Destinations=ToAddress,
149 | RawMessage={
150 | 'Data':msg.as_string(),
151 | }
152 | )
153 | except ClientError as e:
154 | print(e.response['Error']['Message'])
155 | else:
156 | print("Email sent! Message ID: " + response['MessageId'])
157 | print(response)
--------------------------------------------------------------------------------
/cloudformation/Pinpoint_Custom_Channel_Attachment/template.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: "2010-09-09"
2 | Transform: AWS::Serverless-2016-10-31
3 | Description: >
4 | pinpoint-custom-channel-attachments
5 |
6 | SAM Template for Amazon Pinpoint attachments custom channel.
7 |
8 | Parameters:
9 | PinpointAppId:
10 | Type: String
11 | Description: "The Amazon Pinpoint application or project id."
12 | MaxLength: 50
13 |
14 | AttachmentsBucketName:
15 | Type: String
16 | Description: "The S3 bucket where attachments will be stored."
17 |
18 | S3URLExpiration:
19 | Type: Number
20 | Default: 3600
21 | Description: "The S3 presigned URL expiration time in seconds."
22 |
23 | FileType:
24 | Type: String
25 | Default: ".pdf"
26 | Description: "The file type that will be used for the attachments starting with . e.g. .csv or .pdf."
27 |
28 | Resources:
29 |
30 | PinpointCustomAttachmentLambda:
31 | Type: AWS::Serverless::Function
32 | Metadata:
33 | cfn_nag:
34 | rules_to_suppress:
35 | - id: W89
36 | reason: Not public facing.
37 | Properties:
38 | ReservedConcurrentExecutions: 1
39 | CodeUri: functions/custom-channel-attachments/
40 | Handler: index.lambda_handler
41 | Runtime: python3.9
42 | Timeout: 150
43 | MemorySize: 512
44 | Architectures:
45 | - arm64
46 | Environment:
47 | Variables:
48 | PINPOINT_APP_ID: !Ref PinpointAppId
49 | BUCKET_NAME: !Ref AttachmentsBucketName
50 | EXPIRATION: !Ref S3URLExpiration
51 | FILE_TYPE: !Ref FileType
52 | Policies:
53 | - Version: 2012-10-17
54 | Statement:
55 | - Effect: Allow
56 | Action:
57 | - mobiletargeting:SendMessages
58 | - mobiletargeting:GetEmailTemplate
59 | Resource: !Sub "arn:aws:mobiletargeting:${AWS::Region}:${AWS::AccountId}:*"
60 | - Version: 2012-10-17
61 | Statement:
62 | - Effect: Allow
63 | Action:
64 | - ses:SendRawEmail
65 | Resource: !Sub "arn:aws:ses:${AWS::Region}:${AWS::AccountId}:*"
66 | - Version: 2012-10-17
67 | Statement:
68 | - Effect: Allow
69 | Action:
70 | - s3:GetObject
71 | Resource: !Sub "arn:aws:s3:::${AttachmentsBucketName}/*"
72 | - CloudWatchLogsFullAccess
73 |
74 | Outputs:
75 | AWSLambda:
76 | Description: "Name of AWS Lambda function created."
77 | Value: !Ref PinpointCustomAttachmentLambda
78 |
--------------------------------------------------------------------------------
/cloudformation/Pinpoint_Journey_Copy_Mechanism/PinpointJourneyCopyProcess.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/cloudformation/Pinpoint_Journey_Copy_Mechanism/PinpointJourneyCopyProcess.png
--------------------------------------------------------------------------------
/cloudformation/Pinpoint_Journey_Copy_Mechanism/README.md:
--------------------------------------------------------------------------------
1 | # Automating the Amazon Pinpoint Journey copying
2 |
3 | ## Description
4 |
5 | Amazon Pinpoint Campaigns, Journeys and Segments are Project specific resources and cannot be copied between Projects. This can become a blocker when Amazon Pinpoint is used from multiple teams who would like to share existing resources or when migrating between projects or AWS Regions.
6 |
7 | To achieve the above Amazon Pinpoint users need to re-create these assets in the new Amazon Pinpoint Project. This is a manual task that takes time and it is prone to human error.
8 |
9 | ## Solution
10 |
11 | 
12 |
13 | The solution presented in this repository, utilizes AWS CloudFormation that executes an AWS Lambda function upon deployment and copies the specified Journeys from one Amazon Pinpoint Project to another. The solution can also copy journeys between AWS regions and have them created either in an existing Amazon Pinpoint Project or a new one.
14 |
15 | **Solution key features:**
16 | 1. Programmatically obtain existing Journeys
17 | 3. Cross AWS Region copying
18 | 4. Optional creation of new Amazon Pinpoint project to paste the Journeys if you don't have one
19 | 5. Optional deletion of all Journeys created when deleting the CloudFormation stack
20 |
21 | **IMPROTANT**:
22 | - The solution will reset the **Starting** and **End** dates of the copied Journeys. This is done because these dates might be in the past, something that isn't allowed across both Campaigns and Journeys.
23 | - The **Status** of all newly created Journeys is updated to **DRAFT**, which means that they are not live and they need to be published.
24 | - If the CloudFormation creates a new Pinpoint Project, that Project won't be deleted when deleting the CloudFormation stack
25 |
26 | ## Implementation
27 |
28 | 1. Navigate to the AWS CloudFormation console under the AWS Region that you want to paste the copied Journeys.
29 | 2. Create a Stack from **New Resources** and select the [AWS CloudFormation template](https://github.com/aws-samples/communication-developer-services-reference-architectures/blob/master/cloudformation/Pinpoint_Journey_Copy_Mechanism/PinpointJourneyCopingMechanismCF.yaml) from this repository.
30 | 3. Fill the template parameters as shown below:
31 | 1. **Stack name**: Provide a name for your AWS CloudFormation stack.
32 | 2. **AWSRegionFrom**: Select from the list the AWS Region where you want to copy the existing Pinpoint journeys from.
33 | 3. **PinpointProjectIdFrom**: Provide the Amazon Pinpoint Project Id for the project that hosts the Pinpoint journeys.
34 | 4. **PinpointJourneyIds**: Type the Pinpoint Journey Ids that you want to copy separated by comma "," and no spaces.
35 | 5. **PinpointProjectId**: Type the Pinpoint Project Id if you already have one where you want the Journeys to be pasted otherwise leave it empty.
36 | 6. **NewPInpointProjectName**: If you don't have an existing Pinpoint Project then type a name to create one.
37 | 7. **DeleteAll**: If Yes is selected then all Pinpoint Journeys will be deleted. Note that if you create a Pinpoint Project as part of this CloudFormation template, it won't be deleted.
38 | 4. Create the Stack.
39 |
--------------------------------------------------------------------------------
/cloudformation/Pinpoint_SMS_Event_DB/README.md:
--------------------------------------------------------------------------------
1 | # Amazon Pinpoint SMS events' DB
2 |
3 | ## Description
4 |
5 | This solution is relevant for anyone planning to use or using [Amazon Pinpoint's SMS and Voice v2 API](https://docs.aws.amazon.com/pinpoint/latest/apireference_smsvoicev2/Welcome.html) to send SMS. It enables you to store SMS events in Amazon S3 and perform SQL queries using Amazon Athena.
6 |
7 | ## Solution
8 |
9 | 
10 |
11 | The solution presented in this repository, utilizes AWS CloudFormation to deploy an Amazon Kinesis Firehose, an Amazon S3 bucket, an AWS Glue database and an Amazon Athena table for streaming, storing and querying SMS engagement events respectively.
12 |
13 | Some information regarding the AWS services used in the solution:
14 | - **Amazon Kinesis Data Firehose** is an extract, transform, and load (ETL) service that reliably captures, transforms, and delivers streaming data to data lakes, data stores, and analytics services.
15 | - **Amazon S3** provides object storage through a web service interface.
16 | - **AWS Glue** is a serverless data integration service that makes it easier to discover, prepare, move, and integrate data from multiple sources for analytics, machine learning (ML), and application development.
17 | - **Amazon Athena** is a serverless, interactive analytics service that provides a simplified and flexible way to analyze petabytes of data where it lives.
18 |
19 | The solution creates one table with all SMS events and one Amazon Athena view, which contains only the latest event per **message_id** and can be used to check the SMS delivery status.
20 |
21 | ## Prerequisites
22 |
23 | 1. Access to Amazon Pinpoint, AWS CloudShell and IAM policies assigned to your AWS user that allow you to deploy an AWS CloudFormation templates, manage Amazon Athena, Amazon S3, Amazon Kinesis Firehose and AWS Glue resources.
24 | 2. An Amazon Pinpoint SMS [configuration set](https://docs.aws.amazon.com/sms-voice/latest/userguide/configuration-sets.html).
25 |
26 | ## Implementation
27 |
28 | 1. Navigate to the AWS CloudShell in the AWS region you want to deploy the solution. If AWS CloudShell isn't available in the AWS region you want to use, then use the [AWS CLI locally](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html).
29 | 2. Execute the command below to copy the AWS CloudFormation template in the local storage:
30 |
31 | ```
32 | wget https://github.com/aws-samples/communication-developer-services-reference-architectures/blob/master/cloudformation/Pinpoint_SMS_Event_DB/SMS-events-database.yaml
33 | ```
34 | 3. The Amazon S3 bucket name needs to be unique, thus the commands below will create a unique name using a static string, your AWS account Id and a random five characters string.
35 |
36 | ```
37 | # Get the AWS account ID
38 | ACCOUNT_ID=$(aws sts get-caller-identity --query 'Account' --output text)
39 |
40 | # Generate a random ID (lowercase, at least 5 characters)
41 | RANDOM_ID=$(LC_CTYPE=C tr -dc 'a-z0-9' < /dev/urandom | fold -w 5 | head -n 1)
42 |
43 | # Ensure RANDOM_ID is at least 8 characters
44 | while [ ${#RANDOM_ID} -lt 5 ]; do
45 | RANDOM_ID="${RANDOM_ID}$(LC_CTYPE=C tr -dc 'a-z0-9' < /dev/urandom | fold -w 1 | head -n 1)"
46 | done
47 |
48 | # Create an S3 bucket with a unique name (lowercase)
49 | BUCKET_NAME="sms-db-${ACCOUNT_ID}-${RANDOM_ID}"
50 | echo "S3 Bucket name: ${BUCKET_NAME}"
51 | ```
52 |
53 | 4. To deploy the AWS CloudFormation stack execute the AWS CLI command [deploy](https://docs.aws.amazon.com/cli/latest/reference/cloudformation/deploy/) below. This AWS CloudFormation template includes two parameters:
54 | - **EventAthenaDatabaseName:** The name of the AWS Glue database that will be created as part of this solution.
55 | - **CreateBucketName:** The Amazon S3 bucket, where all SMS events will be stored.
56 |
57 | :warning: **Note:** The AWS CloudFormation template deployment time should be between 4 - 6 minutes.
58 |
59 | ```
60 | aws cloudformation deploy \
61 | --template-file "SMS-events-database.yaml" \
62 | --stack-name Pinpoint-SMS-Database \
63 | --parameter-overrides EventAthenaDatabaseName="sms_event_db" CreateBucketName="${BUCKET_NAME}" \
64 | --capabilities CAPABILITY_NAMED_IAM \
65 | --output table
66 | ```
67 |
68 | 5. The AWS CloudFormation deployed has two outputs: the Amazon Kinesis Firehose ARN and Amazon Kinesis Firehose IAM role ARN. Copy the **OutputValue** for both **KinesisFirehose** and **PinpointSMSFirehoseRole** as they will be needed to create an SMS event destination and send a test SMS respectively. Execute the command below in AWS CloudShell to obtain the Amazon Kinesis Firehose ARN and Amazon Kinesis Firehose IAM role ARN.
69 |
70 | ```
71 | aws cloudformation describe-stacks --stack-name Pinpoint-SMS-Database --query "Stacks[].Outputs"
72 | ```
73 |
74 | You can use the `jq` tool to parse the JSON output and extract the values of each `OutputValue`. Here's how you can save the values into variables using a Bash script:
75 |
76 | ```
77 | # Run the AWS CLI command and store the JSON output in a variable
78 | STACK_OUTPUTS=$(aws cloudformation describe-stacks --stack-name Pinpoint-SMS-Database --query "Stacks[].Outputs")
79 |
80 | # Use jq to extract the OutputValue for each key
81 | KINESIS_FIREHOSE_ARN=$(echo "$STACK_OUTPUTS" | jq -r '.[0][] | select(.OutputKey == "KinesisFirehose") | .OutputValue')
82 | KINESIS_IAM_ROLE_ARN=$(echo "$STACK_OUTPUTS" | jq -r '.[0][] | select(.OutputKey == "PinpointSMSFirehoseRole") | .OutputValue')
83 |
84 | # Print the values or use them as needed
85 | echo "Kinesis Firehose Arn: $KINESIS_FIREHOSE_ARN"
86 | echo "Kinesis Firehose IAM Role Arn: $KINESIS_IAM_ROLE_ARN"
87 | ```
88 |
89 | 6. Create a new event destination that will stream all SMS events to the Amazon S3 bucket created by the solution. Before executing the AWS CLI command below, make sure you have replaced the placeholders for **CONFIG_NAME** with the name of the configuration set you will be using to send SMS.
90 |
91 | ```
92 | aws pinpoint-sms-voice-v2 create-event-destination \
93 | --configuration-set-name CONFIG_NAME \
94 | --event-destination-name "SMSallEventDB" \
95 | --matching-event-types TEXT_ALL \
96 | --kinesis-firehose-destination IamRoleArn="${KINESIS_IAM_ROLE_ARN}",DeliveryStreamArn="${KINESIS_FIREHOSE_ARN}"
97 | ```
98 |
99 | 8. Send 4 - 5 SMS, wait 2 minutes and navigate to the Amazon Athena console, where you can preview the table and Athena under the database with name **sms_event_db**.
100 |
--------------------------------------------------------------------------------
/cloudformation/Pinpoint_SMS_Event_DB/SMS-event-db-architecture.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/cloudformation/Pinpoint_SMS_Event_DB/SMS-event-db-architecture.PNG
--------------------------------------------------------------------------------
/cloudformation/SES_Auto_Reply.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: 2010-09-09
2 | Description: Automatic Responder (aka NoReply)
3 |
4 | Parameters:
5 | SESRuleSet:
6 | Type: String
7 | Description: "Existing Amazon Simple Email Service (SES) rule set to store the configuration under. Usually default-rule-set is the active rule set."
8 | Default: "default-rule-set"
9 | MinLength: 1
10 | SESRecipients:
11 | Type: String
12 | Default: ""
13 | Description: "Comma-separated recipients (or domains) that the automatic response will apply to. Leave blank to apply to _all_."
14 | PinpointTemplateName:
15 | Type: String
16 | Description: "Amazon Pinpoint template name to use for the automatic response. Note, to use Pinpoint (!) template here, not the SES one."
17 | MinLength: 1
18 | DefaultEmail:
19 | Type: String
20 | Default: ""
21 | Description: "Default outgoing email address to send auto-responder emails from. Leave blank if you want the auto-reply to come from the original email address to which the triggering email was sent."
22 |
23 | Conditions:
24 | # SESRuleSetNotSet: !Equals ["", !Ref SESRuleSet]
25 | RecipientsNotSet: !Equals ["", !Ref SESRecipients]
26 | # DefaultEmailNotSet: !Equals ["", !Ref DefaultEmail]
27 | # NeedsPinpointTemplate: !Equals
28 | # - ''
29 | # - !Ref PinpointTemplateName
30 |
31 | Resources:
32 | # PinpointTemplate:
33 | # Type: AWS::Pinpoint::EmailTemplate
34 | # Condition: NeedsPinpointTemplate
35 | # Properties:
36 | # Name: !Ref PinpointTemplateName
37 | SESEmailReceivedRule:
38 | Type: "AWS::SES::ReceiptRule"
39 | Properties:
40 | RuleSetName: !Ref SESRuleSet
41 | Rule:
42 | Name: !Ref PinpointTemplateName
43 | Enabled: true
44 | Recipients: !If
45 | - RecipientsNotSet
46 | - !Ref AWS::NoValue
47 | - [!Ref SESRecipients]
48 | Actions:
49 | - LambdaAction:
50 | FunctionArn: !GetAtt AutoResponderLambda.Arn
51 | InvocationType: Event
52 | - StopAction:
53 | Scope: RuleSet
54 |
55 | AutoResponderLambda:
56 | Type: AWS::Lambda::Function
57 | Properties:
58 | Handler: index.handler
59 | Role: !GetAtt AutoResponderLambdaRole.Arn
60 | Runtime: "nodejs12.x"
61 | Timeout: 10
62 | MemorySize: 128
63 | Environment:
64 | Variables:
65 | TemplateName: !Ref PinpointTemplateName
66 | DefaultEmail: !Ref DefaultEmail
67 | Code:
68 | ZipFile: |
69 | var AWS = require('aws-sdk');
70 |
71 | exports.handler = function (event, context, callback) {
72 | console.log('Auto-Reply invoked as Lambda Action -- this function is designed to run directly off the receipt - incoming email body is disregarded');
73 |
74 | var sesNotification = event.Records[0].ses;
75 | console.log("SES Notification:\n", JSON.stringify(sesNotification));
76 |
77 | var pinpoint = new AWS.Pinpoint();
78 | var params = {
79 | TemplateName: process.env.TemplateName
80 | };
81 | pinpoint.getEmailTemplate(params, function (err, data) {
82 | if (err) {
83 | console.log(err.message);
84 | }
85 | else {
86 | console.log("Template pulled: ", data.EmailTemplateResponse.Subject);
87 |
88 | var from = sesNotification.mail.commonHeaders.from[0];
89 | var to = sesNotification.mail.commonHeaders.to[0];
90 | var subject = data.EmailTemplateResponse.Subject + sesNotification.mail.commonHeaders.subject;
91 | var messageId = sesNotification.mail.commonHeaders.messageId.replace("@amazon.com", "");
92 | var sender = (process.env.DefaultEmail!=null && process.env.DefaultEmail != "") ? process.env.DefaultEmail : to;
93 | var recipient = from;
94 | //const configuration_set = "ConfigSet";
95 | //var body_text = data.EmailTemplateResponse.TextPart;
96 | var body_html = data.EmailTemplateResponse.HtmlPart;
97 | body_html = body_html.replace("%%NAME%%", from).replace("%%ID%%", messageId);
98 | var charset = "UTF-8";
99 |
100 | var ses = new AWS.SES();
101 | var params = {
102 | Source: sender,
103 | Destination: {
104 | ToAddresses: [
105 | recipient
106 | ],
107 | },
108 | Message: {
109 | Subject: {
110 | Data: subject,
111 | Charset: charset
112 | },
113 | Body: {
114 | /*Text: {
115 | Data: body_text,
116 | Charset: charset
117 | },*/
118 | Html: {
119 | Data: body_html,
120 | Charset: charset
121 | }
122 | }
123 | },
124 | //ConfigurationSetName: configuration_set
125 | };
126 |
127 | ses.sendEmail(params, function (err, data) {
128 | if (err) {
129 | console.log(err.message);
130 | }
131 | else {
132 | console.log("Email sent! Message ID: ", data.MessageId);
133 | }
134 | });
135 | }
136 | });
137 | console.log('Responding with all clear - in case of sync invocation, etc.');
138 | callback(null, null);
139 | };
140 |
141 | AutoResponderLambdaRole:
142 | Type: AWS::IAM::Role
143 | Properties:
144 | AssumeRolePolicyDocument:
145 | Version: "2012-10-17"
146 | Statement:
147 | - Effect: "Allow"
148 | Principal:
149 | Service:
150 | - "lambda.amazonaws.com"
151 | Action:
152 | - "sts:AssumeRole"
153 | Policies:
154 | - PolicyName: "LambdaExecutionPolicy"
155 | PolicyDocument:
156 | Version: "2012-10-17"
157 | Statement:
158 | - Effect: "Allow"
159 | Action:
160 | - "mobiletargeting:GetEmailTemplate"
161 | Resource: !Sub "arn:aws:mobiletargeting:${AWS::Region}:${AWS::AccountId}:templates/*/*"
162 | - Effect: "Allow"
163 | Action:
164 | - "ses:SendEmail"
165 | Resource: "arn:aws:ses:*:*:identity/*"
166 | - Effect: "Allow"
167 | Action:
168 | - "logs:CreateLogGroup"
169 | - "logs:CreateLogStream"
170 | - "logs:PutLogEvents"
171 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*"
172 |
173 | LambdaInvokePermission:
174 | Type: AWS::Lambda::Permission
175 | Properties:
176 | Action: "lambda:InvokeFunction"
177 | Principal: "ses.amazonaws.com"
178 | SourceAccount: !Sub ${AWS::AccountId}
179 | FunctionName: !GetAtt AutoResponderLambda.Arn
180 |
--------------------------------------------------------------------------------
/cloudformation/SES_Event_DB/email-event-db-architecture.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/cloudformation/SES_Event_DB/email-event-db-architecture.PNG
--------------------------------------------------------------------------------
/cloudformation/SES_Event_DB/example-data/email_status_view.csv:
--------------------------------------------------------------------------------
1 | "message_id","subject","destination","time_sent","time_delivered","time_clicked","time_opened","time_bounced","bounce_type","bounce_subtype","time_complained","complaint_feedbacktype"
2 | "0101018f7bc571e6-4ceaf257-3b22-48b0-975e-83d69b6c71aa-000000","Complaint1","[complaint@simulator.amazonses.com]","2024-05-15T10:21:28.422Z","2024-05-15T10:21:29.566Z",,"2024-05-15T10:21:47.754Z",,,,"2024-05-15T10:21:30.466Z","abuse"
3 | "0101018f7bc4a0fc-8ee5df2d-ab9b-4032-93ce-6cb7ba201f59-000000","Subject2","[email@example.com]","2024-05-15T10:20:34.940Z","2024-05-15T10:20:35.894Z",,"2024-05-15T10:21:40.343Z",,,,,
4 | "0101018f7bc4d418-5c567434-5100-4efa-9c5f-bb9e620bf5ee-000000","Subject3","[email@example.com]","2024-05-15T10:20:48.024Z","2024-05-15T10:20:48.736Z",,"2024-05-15T10:21:43.755Z",,,,,
5 | "0101018f7bc5451b-b60b87ea-33fc-4dab-a4d3-50cdfb0eab72-000000","Bounce1","[bounce@simulator.amazonses.com]","2024-05-15T10:21:16.955Z",,,,"2024-05-15T10:21:17.610Z","Permanent","General",,
6 | "0101018f7c9dbf22-58e5cc54-4bf5-44a9-b329-1bca093e5f3a-000000","Subject","[success@simulator.amazonses.com]","2024-05-15T14:17:43.970Z","2024-05-15T14:17:44.511Z",,,,,,,
7 |
--------------------------------------------------------------------------------
/cloudformation/SES_Pinpoint_Messages_Queuing/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Pavlos Ioannou
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/cloudformation/SES_Pinpoint_Messages_Queuing/README.md:
--------------------------------------------------------------------------------
1 | # SES_Pinpoint_Messages_Queuing
2 |
3 | ## Description
4 |
5 | Amazon SES and Amazon Pinpoint API operations for sending messages, don't have a queuing mechanism. If your application exceeds the allocated throughput limits, then the API will return a throttling error message **Maximum sending rate exceeded**. This means that queuing messages should take place before calling the Amazon Pinpoint or SES API.
6 |
7 | ## Architecture
8 |
9 | 
10 |
11 | The solution, utilizes Amazon SQS for queuing, Amazon Lambda for consuming the SQS messages and Amazon CloudWatch for monitoring.
12 |
13 | For testing purposes, this solution deploys an AWS Lambda function (publisher), which is invoked every minute by an [Amazon EventBridge rule](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-rules.html). The **publisher** writes X messages to an SQS Standard Queue with each message being an email. You can change the number of messages sent to SQS when deploying the solution. By default the EventBridge rule is **DISABLED** and you will need to enable it by navigating to the **EventBridge > Rules > TriggerSQSPublishMessagesLambda**.
14 |
15 | The **poller** AWS Lambda function sends dummy emails via the SES API and uses the simulator email address success@simulator.amazonses.com. For more information regarding SES simulator visit this [link](https://docs.aws.amazon.com/ses/latest/dg/send-an-email-from-console.html).
16 |
17 | The above resources allow you to test, monitor and configure your email sending throughput before integrating with your application.
18 |
19 | ## Mechanics
20 |
21 | Both Amazon Pinpoint and Amazon SES have a **Maximum rate per limit**, which is also known as throughput or emails send per second. When you start sending emails, your SES / Pinpoint account is in Sandbox allowing you to send only one email per second. Follow the links below to get out of the Sandbox and increase sending limits [SES](https://docs.aws.amazon.com/ses/latest/dg/request-production-access.html) & [Pinpoint](https://docs.aws.amazon.com/pinpoint/latest/userguide/channels-email-setup-production-access.html).
22 |
23 | Sending an email via API or SDK using SES / Pinpoint takes approximately 90 - 120 ms. This depends on the AWS region and you can monitor it from the CloudWatch dashboard deployed as part of this solution once you start sending. Considering that an API call takes on average 100 ms, an AWS Lambda function can send roughly 9 emails per second (you should always factor the possiblity that some API calls might take longer).
24 |
25 | To reach e.g. 20 emails per second throughput, you would need more than one AWS Lambda function to process the SQS messages at the same time. To achieve that you can increase the AWS Lambda reserved concurrency to 2. This will result to an estimated throughput of 18 emails per second.
26 |
27 | In case the SES or Pinpoint API returns a throttling error, the **poller** AWS Lambda function will write that message back to the SQS Standard Queue. If any other error is returned, the message will be placed to a Dead Letter Queue (DLQ), which is deployed as part of this solution.
28 |
29 | 
30 |
31 | ## CloudWatch-Dashboard
32 |
33 | Monitoring is important and helpful for configuring properly the SQS batch and AWS Lambda concurrency so that you can achieve the maximum throughput.
34 |
35 | 
36 |
37 | This solution deploys an Amazon CloudWatch dashboard from where you can monitor:
38 | - Number of messages written to the SQS
39 | - Number of emails send & delivered (SES)
40 | - Number of messages processed by Lambda (send & throttled) **CUSTOM METRIC**
41 | - SES throttling errors **CUSTOM METRIC**
42 | - Email throughput (average number of SES message deliveries against your SES throughput - provided when deploying the solution)
43 | - Number of visible SQS messages
44 | - Duration (ms) - AWS Lambda for sending messages
45 | - SES API response time (ms) - AWS Lambda for sending messages **CUSTOM METRIC**
46 | - Concurrent executions average - AWS Lambda for sending messages
47 | - Errors - AWS Lambda for sending messages
48 |
49 | ## Deployment
50 |
51 | **IMPORTANT**:
52 | - This solution by default uses SES API to send emails. To use Amazon Pinpoint, you will need to amend the code in the AWS Lambda function **sqs_message_poller > lambda_function** and un-comment the function that sends the message via Pinpoint. Furthermore the CloudWatch dashboard email metrics are for SES, thus the respective changes will need to be done there as well.
53 | - The AWS Lambda function **sqs_message_poller > lambda_function** contains a **sleep** function in case your SES / Pinpoint account is in Sandbox and the sending throughput is 1. To avoid receiving constant throttling erros, the **sleep** function keeps the AWS Lambda function running up to 1 second e.g. 1 SES / Pinpoint send_message API call is 100 ms so the **sleep** function is 800 - 900 ms.To use the sleep funtion you will need to un-comment it.
54 |
55 | **IMPLEMENTATION:**
56 | - Create an S3 bucket and upload the two zip files in the folder [aws-lambda-code](https://github.com/aws-samples/communication-developer-services-reference-architectures/tree/master/cloudformation/SES_Pinpoint_Messages_Queuing/aws-lambda-code)
57 | - Navigate to the AWS CloudFormation console and deploy the stack using existing resources [YAML template attached](https://github.com/aws-samples/communication-developer-services-reference-architectures/blob/master/cloudformation/SES_Pinpoint_Messages_Queuing/SES_Pinpoint_Messages_Queuing.yaml)
58 | - Fill the fields as per the instructions below:
59 | - Sub **DashboardName:** Provide a name for the CloudWatch dashboard that will be created
60 | - Sub **EmailFrom:** The email address that you will use to send emails for testing purposes. This email address will be used from the solution to start sending messages, thus it needs to be [verified first](https://docs.aws.amazon.com/ses/latest/dg/creating-identities.html)
61 | - Sub **EmailThroughput:** This is your SES or Pinpoint email sending throughput. If your account is in sandbox your email sending throughput is 1 email per second
62 | - Sub **LambdaCodeS3BucketName:** The name of the S3 bucket that you created in step 1
63 | - Sub **NoOfMessagesSQS:** The number of messages the AWS Lambda function **publisher** will write to the SQS Standard Queue every minute. This is for testing purposes allowing you to monitor how SQS and AWS Lambda function **poller** behave based on the volume of emails and respective configuration (Lambda reserved concurrency & SQS batch size)
64 | - Sub **ReservedLambdaConcurrency:** Considering that the average SES / Pinpoint API call to send an email is 90 - 120 ms, an AWS Lambda function should be able to send 9 emails per second. If your throughput is higher than 9 - 10 then the Lambda reserved concurrency should be greater than 1
65 | - Sub **SQSBatchSize:** This specifies how many messages an SQS batch includes. An AWS Lambda function processes one batch per invokation
66 | - Once the CloudFormation stack has been deployed, navigate to the EventBridge console and enable the Rule **TriggerSQSPublishMessagesLambda**
67 | - Navigate to the CloudWatch dashboards, select the dashboard you created and monitor the metrics
68 |
69 |
70 |
71 |
--------------------------------------------------------------------------------
/cloudformation/SES_Pinpoint_Messages_Queuing/aws-lambda-code/sqs_message_poller.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/cloudformation/SES_Pinpoint_Messages_Queuing/aws-lambda-code/sqs_message_poller.zip
--------------------------------------------------------------------------------
/cloudformation/SES_Pinpoint_Messages_Queuing/aws-lambda-code/sqs_message_publisher.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/cloudformation/SES_Pinpoint_Messages_Queuing/aws-lambda-code/sqs_message_publisher.zip
--------------------------------------------------------------------------------
/cloudformation/SES_Pinpoint_Messages_Queuing/images/ArchDiagram.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/cloudformation/SES_Pinpoint_Messages_Queuing/images/ArchDiagram.PNG
--------------------------------------------------------------------------------
/cloudformation/SES_Pinpoint_Messages_Queuing/images/CloudWatch-Dashboard-Metrics.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/cloudformation/SES_Pinpoint_Messages_Queuing/images/CloudWatch-Dashboard-Metrics.PNG
--------------------------------------------------------------------------------
/cloudformation/SES_Pinpoint_Messages_Queuing/images/queuing-logic.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/cloudformation/SES_Pinpoint_Messages_Queuing/images/queuing-logic.PNG
--------------------------------------------------------------------------------
/cloudformation/SMS-Retry/parent.yaml:
--------------------------------------------------------------------------------
1 | Description: This template deploys Amazon Kinesis Data Firehose with Transformation Lambda to trigger SMS retry on _SMS.FAILURE event for UNREACHABLE, UNKNOWN, CARRIER_UNREACHABLE & TTL_EXPIRED record status
2 |
3 | Parameters:
4 |
5 | PinpointApplicationId:
6 | Description: The ID of your Pinpoint Application
7 | Type: String
8 | Default: 9ee8c2377b124e3db32a93fc01cd8fbd
9 |
10 | EventStreamConfiguration:
11 | Description: Choose the way in which your Event stream is configured for Pinpoint project.
12 | Default: No existing event stream setup
13 | Type: String
14 | AllowedValues:
15 | - No existing event stream setup
16 | - Event stream setup with Amazon Kinesis Data Stream
17 | - Event stream setup with Amazon Kinesis Firehose
18 | ConstraintDescription: must specify from the drop-down.
19 |
20 | Conditions:
21 |
22 | FirehoseEventStreamConfigured: !Equals
23 | - !Ref EventStreamConfiguration
24 | - Event stream setup with Amazon Kinesis Firehose
25 |
26 | KinesisEventStreamConfigured: !Equals
27 | - !Ref EventStreamConfiguration
28 | - Event stream setup with Amazon Kinesis Data Stream
29 |
30 | NoEventStreamConfigured: !Equals
31 | - !Ref EventStreamConfiguration
32 | - No existing event stream setup
33 |
34 | Resources:
35 |
36 | NestedStackNoEventStream:
37 | Type: 'AWS::CloudFormation::Stack'
38 | Condition: NoEventStreamConfigured
39 | Properties:
40 | TemplateURL: >-
41 | https://d2908q01vomqb2.cloudfront.net/artifacts/BusinessApplications/2022/08/sim-D30841386/NoEventStreamConfigured.yaml
42 | Parameters:
43 | PinpointApplicationId: !Ref PinpointApplicationId
44 |
45 | NestedStackKinesisEventStream:
46 | Type: 'AWS::CloudFormation::Stack'
47 | Condition: KinesisEventStreamConfigured
48 | Properties:
49 | TemplateURL: >-
50 | https://d2908q01vomqb2.cloudfront.net/artifacts/BusinessApplications/2022/08/sim-D30841386/KinesisEventStreamConfigured.yaml
51 | Parameters:
52 | PinpointApplicationId: !Ref PinpointApplicationId
53 |
54 | NestedStackFirehoseEventStream:
55 | Type: 'AWS::CloudFormation::Stack'
56 | Condition: FirehoseEventStreamConfigured
57 | Properties:
58 | TemplateURL: >-
59 | https://d2908q01vomqb2.cloudfront.net/artifacts/BusinessApplications/2022/08/sim-D30841386/FirehoseEventStreamConfigured.yaml
60 | Parameters:
61 | PinpointApplicationId: !Ref PinpointApplicationId
--------------------------------------------------------------------------------
/cloudformation/Self-managed_Opt_Outs.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: 2010-09-09
2 | Description: Self-Managed Opt Outs in Pinpoint Channel
3 |
4 | Parameters:
5 | PinpointProjectId:
6 | Type: String
7 | Description: Amazon Pinpoint Project ID if one already exists, blank to create one
8 | PinpointProjectName:
9 | Type: String
10 | Default: "My Pinpoint Project"
11 | Description: "If no PinpointProjectId provided, name to be used to create the Pinpoint project"
12 |
13 | Conditions:
14 | NeedsPinpointProjectId: !Equals
15 | - ''
16 | - !Ref PinpointProjectId
17 |
18 | Resources:
19 | PinpointApplication:
20 | Type: AWS::Pinpoint::App
21 | Condition: NeedsPinpointProjectId
22 | Properties:
23 | Name: !Ref PinpointProjectName
24 |
25 | TwoWaySNSTopic:
26 | Type: AWS::SNS::Topic
27 | Properties:
28 | DisplayName: 'Two Way SMS'
29 | KmsMasterKeyId: alias/aws/sns
30 | Subscription:
31 | -
32 | Endpoint: !GetAtt TwoWaySMSLambda.Arn
33 | Protocol: "lambda"
34 |
35 |
36 | TwoWaySMSLambda:
37 | Type: AWS::Lambda::Function
38 | Properties:
39 | Handler: index.lambda_handler
40 | Role: !GetAtt TwoWaySMSLambdaRole.Arn
41 | Runtime: "python3.7"
42 | Timeout: 60
43 | MemorySize: 1024
44 | Environment:
45 | Variables:
46 | PINPOINT_PROJECT_ID: !If
47 | - NeedsPinpointProjectId
48 | - !Ref PinpointApplication
49 | - !Ref PinpointProjectId
50 | Code:
51 | ZipFile: |
52 | import json
53 | import logging
54 | import boto3
55 | import os
56 |
57 | pinpoint = boto3.client('pinpoint')
58 |
59 | def lambda_handler(event, context):
60 | logging.getLogger().setLevel('DEBUG')
61 | logging.debug(json.dumps(event))
62 |
63 | for record in event['Records']:
64 | messagejson = record['Sns']['Message']
65 | logging.debug('Found Message %s', messagejson)
66 |
67 | try:
68 | message = json.loads(messagejson)
69 |
70 | txt_response = message['messageBody']
71 |
72 | if (
73 | 'ARRET' in txt_response.upper() or
74 | 'CANCEL' in txt_response.upper() or
75 | 'END' in txt_response.upper() or
76 | 'OPT-OUT' in txt_response.upper() or
77 | 'OPTOUT' in txt_response.upper() or
78 | 'QUIT' in txt_response.upper() or
79 | 'REMOVE' in txt_response.upper() or
80 | 'STOP' in txt_response.upper() or
81 | 'TD' in txt_response.upper() or
82 | 'UNSUBSCRIBE'in txt_response.upper()
83 | ):
84 |
85 | num = message['originationNumber']
86 |
87 | # CUSTOMER TODO - assumes endpoint id is the same as the SMS address minus the "+" at the beginning
88 | # Replace with call to database to lookup endpoint id by phone number otherwise
89 | endpointId = num[1:]
90 |
91 | logging.info('Opting out: %s', endpointId)
92 |
93 | addresses = {}
94 | addresses[num] = {
95 | 'ChannelType': 'SMS'
96 | }
97 |
98 | pinpoint.send_messages(
99 | ApplicationId=os.environ['PINPOINT_PROJECT_ID'],
100 | MessageRequest={
101 | 'Addresses': addresses,
102 | 'MessageConfiguration': {
103 | 'SMSMessage': {
104 | 'Body': 'You have successfully been removed.'
105 | }
106 | }
107 | }
108 | )
109 |
110 | pinpoint.update_endpoint(
111 | ApplicationId=os.environ['PINPOINT_PROJECT_ID'],
112 | EndpointId=endpointId,
113 | EndpointRequest={
114 | 'Address': num,
115 | 'ChannelType': 'SMS',
116 | 'OptOut': 'ALL'
117 | }
118 | )
119 |
120 | except Exception as error:
121 | logging.error('Found Error: %s', error)
122 |
123 | TwoWaySMSLambdaRole:
124 | Type: AWS::IAM::Role
125 | Properties:
126 | AssumeRolePolicyDocument:
127 | Version: "2012-10-17"
128 | Statement:
129 | -
130 | Effect: "Allow"
131 | Principal:
132 | Service:
133 | - "lambda.amazonaws.com"
134 | Action:
135 | - "sts:AssumeRole"
136 | Policies:
137 | -
138 | PolicyName: "LambdaExecutionPolicy"
139 | PolicyDocument:
140 | Version: "2012-10-17"
141 | Statement:
142 | -
143 | Effect: "Allow"
144 | Action:
145 | - "mobiletargeting:UpdateEndpoint"
146 | - "mobiletargeting:SendMessages"
147 | Resource:
148 | !Sub
149 | - 'arn:aws:mobiletargeting:${AWS::Region}:${AWS::AccountId}:apps/${ProjectId}*'
150 | - {ProjectId: !If [NeedsPinpointProjectId, !Ref PinpointApplication, !Ref PinpointProjectId] }
151 | -
152 | Effect: "Allow"
153 | Action:
154 | - "logs:CreateLogGroup"
155 | - "logs:CreateLogStream"
156 | - "logs:PutLogEvents"
157 | Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:*"
158 |
159 | LambdaInvokePermission:
160 | Type: AWS::Lambda::Permission
161 | Properties:
162 | Action: "lambda:InvokeFunction"
163 | Principal: "sns.amazonaws.com"
164 | SourceArn: !Ref TwoWaySNSTopic
165 | FunctionName: !GetAtt TwoWaySMSLambda.Arn
166 |
--------------------------------------------------------------------------------
/cloudformation/Simple_CMS.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: 2010-09-09
2 | Description: Automaticaly provision and configure the AWS services necessary to deploy
3 | an S3 bucket along with CloudFront Distribution to allow for simple hosting of images
4 | and attachments for Pinpoint emails or other uses..
5 | Resources:
6 | StaticFiles:
7 | Type: AWS::S3::Bucket
8 | Properties:
9 | AccessControl: Private
10 | PublicAccessBlockConfiguration:
11 | BlockPublicAcls: true
12 | BlockPublicPolicy: true
13 | IgnorePublicAcls: true
14 | RestrictPublicBuckets: true
15 | VersioningConfiguration:
16 | Status: Enabled
17 | BucketEncryption:
18 | ServerSideEncryptionConfiguration:
19 | - ServerSideEncryptionByDefault:
20 | SSEAlgorithm: AES256
21 | LoggingConfiguration:
22 | DestinationBucketName:
23 | Ref: LogBucket
24 | LogFilePrefix: simple-cms-s3/
25 | LogBucket:
26 | Type: AWS::S3::Bucket
27 | DeletionPolicy: Retain
28 | Metadata:
29 | cfn_nag:
30 | rules_to_suppress:
31 | - id: W35
32 | reason: This is the log bucket.
33 | Properties:
34 | AccessControl: LogDeliveryWrite
35 | PublicAccessBlockConfiguration:
36 | BlockPublicAcls: true
37 | BlockPublicPolicy: true
38 | IgnorePublicAcls: true
39 | RestrictPublicBuckets: true
40 | BucketEncryption:
41 | ServerSideEncryptionConfiguration:
42 | - ServerSideEncryptionByDefault:
43 | SSEAlgorithm: AES256
44 | VersioningConfiguration:
45 | Status: Enabled
46 | LogBucketPolicy:
47 | Type: AWS::S3::BucketPolicy
48 | Properties:
49 | Bucket:
50 | Ref: LogBucket
51 | PolicyDocument:
52 | Version: 2012-10-17
53 | Statement:
54 | - Sid: AWSCloudTrailAclCheck
55 | Effect: Allow
56 | Principal:
57 | Service: cloudtrail.amazonaws.com
58 | Action: s3:GetBucketAcl
59 | Resource:
60 | Fn::Sub: arn:aws:s3:::${LogBucket}
61 | - Sid: AWSCloudTrailWrite
62 | Effect: Allow
63 | Principal:
64 | Service: cloudtrail.amazonaws.com
65 | Action: s3:PutObject
66 | Resource:
67 | Fn::Sub: arn:aws:s3:::${LogBucket}/AWSLogs/${AWS::AccountId}/*
68 | Condition:
69 | StringEquals:
70 | s3:x-amz-acl: bucket-owner-full-control
71 | - Sid: LogBucketAllowSSLRequestsOnly
72 | Effect: Deny
73 | Principal: '*'
74 | Action: s3:*
75 | Resource:
76 | - Fn::Sub: arn:aws:s3:::${LogBucket}/*
77 | - Fn::Sub: arn:aws:s3:::${LogBucket}
78 | Condition:
79 | Bool:
80 | aws:SecureTransport: 'false'
81 | ReadPolicy:
82 | Type: AWS::S3::BucketPolicy
83 | Properties:
84 | Bucket:
85 | Ref: StaticFiles
86 | PolicyDocument:
87 | Statement:
88 | - Action: s3:GetObject
89 | Effect: Allow
90 | Resource:
91 | Fn::Sub: arn:aws:s3:::${StaticFiles}/*
92 | Principal:
93 | CanonicalUser:
94 | Fn::GetAtt:
95 | - CloudFrontOriginAccessIdentity
96 | - S3CanonicalUserId
97 | CloudFrontOriginAccessIdentity:
98 | Type: AWS::CloudFront::CloudFrontOriginAccessIdentity
99 | Properties:
100 | CloudFrontOriginAccessIdentityConfig:
101 | Comment:
102 | Fn::GetAtt:
103 | - StaticFiles
104 | - RegionalDomainName
105 | CloudFrontDistribution:
106 | Type: AWS::CloudFront::Distribution
107 | DependsOn:
108 | - LogBucket
109 | - CloudFrontOriginAccessIdentity
110 | Metadata:
111 | cfn_nag:
112 | rules_to_suppress:
113 | - id: W70
114 | reason: Using CloudFront Provided Cert which defaults this to TLS1. Hoping
115 | to avoid customer needing to provision cert just to deploy solution.
116 | Properties:
117 | DistributionConfig:
118 | Origins:
119 | - DomainName:
120 | Fn::GetAtt:
121 | - StaticFiles
122 | - RegionalDomainName
123 | Id:
124 | Fn::GetAtt:
125 | - StaticFiles
126 | - RegionalDomainName
127 | S3OriginConfig:
128 | OriginAccessIdentity:
129 | Fn::Sub: origin-access-identity/cloudfront/${CloudFrontOriginAccessIdentity}
130 | DefaultCacheBehavior:
131 | AllowedMethods:
132 | - GET
133 | - HEAD
134 | - OPTIONS
135 | CachedMethods:
136 | - GET
137 | - HEAD
138 | - OPTIONS
139 | Compress: true
140 | DefaultTTL: 60
141 | ForwardedValues:
142 | Cookies:
143 | Forward: none
144 | QueryString: false
145 | MaxTTL: 86400
146 | MinTTL: 0
147 | SmoothStreaming: false
148 | TargetOriginId:
149 | Fn::GetAtt:
150 | - StaticFiles
151 | - RegionalDomainName
152 | ViewerProtocolPolicy: redirect-to-https
153 | Comment: ''
154 | PriceClass: PriceClass_All
155 | Enabled: true
156 | ViewerCertificate:
157 | CloudFrontDefaultCertificate: true
158 | MinimumProtocolVersion: TLSv1.2_2018
159 | Restrictions:
160 | GeoRestriction:
161 | RestrictionType: none
162 | HttpVersion: http2
163 | IPV6Enabled: true
164 | DefaultRootObject: index.html
165 | Logging:
166 | Bucket:
167 | Fn::GetAtt:
168 | - LogBucket
169 | - DomainName
170 | IncludeCookies: true
171 | Prefix: simple-cms-cloudfront
172 | Outputs:
173 | Domain:
174 | Description: Cloudfront Domain
175 | Value:
176 | Fn::GetAtt:
177 | - CloudFrontDistribution
178 | - DomainName
179 | S3Bucket:
180 | Description: The S3 Bucket used to store images and attachments
181 | Value:
182 | Ref: StaticFiles
183 | SimpleCMSURL:
184 | Description: Use this link to prefix your images and attachments
185 | Value:
186 | Fn::Sub:
187 | - https://${CFDomain}/
188 | - CFDomain:
189 | Fn::GetAtt:
190 | - CloudFrontDistribution
191 | - DomainName
192 |
--------------------------------------------------------------------------------
/cloudformation/ses_bounce_logging_blog.yml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: '2010-09-09'
2 | Description: 'Template to create Lambda function using Cloudformation'
3 | Parameters:
4 | CloudWatchGroupName:
5 | Description: Cloudwatch Group Name for bounce notifications.
6 | Default: /aws/ses/bounce_logs
7 | Type: String
8 | AllowedPattern: .+
9 | ConstraintDescription: Cloudwatch Group Name for bounce notifications.
10 | SNSTopicARN:
11 | Description: Add SNS Topic ARN.
12 | Type: String
13 | AllowedPattern: .+
14 | ConstraintDescription: Add SNS Topic ARN.
15 | EventType:
16 | Description: AWS SES Event Type to log to the CloudWatchGroupName
17 | Type: String
18 | Default: Bounce
19 | AllowedValues:
20 | - Bounce
21 | - Complaint
22 | - Delivery
23 | Resources:
24 | LambdaRole:
25 | Type: 'AWS::IAM::Role'
26 | Properties:
27 | AssumeRolePolicyDocument: {Version: '2012-10-17', Statement: [{Effect: Allow, Principal: {Service: [lambda.amazonaws.com]}, Action: ['sts:AssumeRole']}]}
28 | Policies:
29 | - PolicyName: cloudwatch_write_policy
30 | PolicyDocument: {Version: '2012-10-17', Statement: [{Effect: Allow, Action: ['logs:CreateLogGroup','logs:CreateLogStream','logs:PutLogEvents','logs:DescribeLogStreams'], "Resource" :['arn:aws:logs:*:*:log-group:/aws/ses/*']}]}
31 | Path: /
32 | SnsSubscription:
33 | Type: AWS::SNS::Subscription
34 | Properties:
35 | Protocol: lambda
36 | Endpoint: !GetAtt LambdaFunction.Arn
37 | TopicArn: !Ref SNSTopicARN
38 | LambdaInvokePermission:
39 | Type: AWS::Lambda::Permission
40 | Properties:
41 | Action: lambda:InvokeFunction
42 | Principal: sns.amazonaws.com
43 | SourceArn: !Ref SNSTopicARN
44 | FunctionName: !Ref LambdaFunction
45 | LambdaFunction:
46 | Type: 'AWS::Lambda::Function'
47 | DependsOn: LambdaRole
48 | Properties:
49 | Environment: {Variables: {group_name: !Ref CloudWatchGroupName, event_type: !Ref EventType, LOG_LEVEL: 'INFO'}}
50 | Role: !GetAtt LambdaRole.Arn
51 | Timeout: 60
52 | Handler: index.lambda_handler
53 | Runtime: python3.12
54 | MemorySize: 128
55 | Code:
56 | ZipFile: |
57 | import boto3
58 | import time
59 | import json
60 | import sys
61 | import secrets
62 | import os
63 | import logging
64 |
65 | client = boto3.client('logs')
66 |
67 | log_group = os.getenv("group_name")
68 | event_type = os.getenv("event_type")
69 |
70 | def lambda_handler(event, context):
71 | global log_level
72 | log_level = str(os.environ.get('LOG_LEVEL')).upper()
73 | if log_level not in [
74 | 'DEBUG', 'INFO',
75 | 'WARNING', 'ERROR',
76 | 'CRITICAL'
77 | ]:
78 | log_level = 'ERROR'
79 | logging.getLogger().setLevel(log_level)
80 |
81 | logging.info(event)
82 |
83 | for record in event['Records']:
84 | logs = record['Sns']['Message']
85 | logs_data = json.loads(logs)
86 | notification_type=logs_data['notificationType']
87 | if(notification_type==event_type):
88 | LOG_GROUP= log_group
89 | else:
90 | sys.exit()
91 | LOG_STREAM= '{}{}{}'.format(time.strftime('%Y/%m/%d'),'[$LATEST]',secrets.token_hex(16))
92 | try:
93 | client.create_log_group(logGroupName=LOG_GROUP)
94 | except client.exceptions.ResourceAlreadyExistsException:
95 | pass
96 | try:
97 | client.create_log_stream(logGroupName=LOG_GROUP, logStreamName=LOG_STREAM)
98 | except client.exceptions.ResourceAlreadyExistsException:
99 | pass
100 | response = client.describe_log_streams(
101 | logGroupName=LOG_GROUP,
102 | logStreamNamePrefix=LOG_STREAM
103 | )
104 | event_log = {
105 | 'logGroupName': LOG_GROUP,
106 | 'logStreamName': LOG_STREAM,
107 | 'logEvents': [
108 | {
109 | 'timestamp': int(round(time.time() * 1000)),
110 | 'message': logs
111 | }
112 | ],
113 | }
114 | if 'uploadSequenceToken' in response['logStreams'][0]:
115 | event_log.update({'sequenceToken': response['logStreams'][0] ['uploadSequenceToken']})
116 | response = client.put_log_events(**event_log)
117 |
118 | logging.info(response)
119 |
--------------------------------------------------------------------------------
/examples/CustomChannels/Nodejs/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this
4 | software and associated documentation files (the "Software"), to deal in the Software
5 | without restriction, including without limitation the rights to use, copy, modify,
6 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
7 | permit persons to whom the Software is furnished to do so.
8 |
9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
10 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
11 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
12 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
13 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
14 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------------------
/examples/CustomChannels/Nodejs/README.md:
--------------------------------------------------------------------------------
1 | # Pinpoint Nodejs Hello World Custom Channel
2 |
3 | This application is an Hello world nodejs example of custom channel which can be be used in integration with Amazon Pinpoint Campaigns and Journeys.
4 | ## SAM Details
5 |
6 | This project contains source code and supporting files for a serverless application that you can deploy with the SAM CLI. It includes the following files and folders.
7 |
8 | - app.js - Code for the application's Lambda function.
9 | - events.json - Invocation events that you can use to invoke the function.
10 | - tests - Unit tests for the application code.
11 | - template.yaml - A template that defines the application's AWS resources.
12 |
13 | The application uses a singular Lambda function that can be invoked by the Amazon pinpoint service. This resource is defined in the `template.yaml` file in this project. You can update the template to add AWS resources through the same deployment process that updates your application code.
14 |
15 | If you prefer to use an integrated development environment (IDE) to build and test your application, you can use the AWS Toolkit.
16 | The AWS Toolkit is an open source plug-in for popular IDEs that uses the SAM CLI to build and deploy serverless applications on AWS. The AWS Toolkit also adds a simplified step-through debugging experience for Lambda function code. See the following links to get started.
17 |
18 | * [PyCharm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
19 | * [IntelliJ](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
20 | * [VS Code](https://docs.aws.amazon.com/toolkit-for-vscode/latest/userguide/welcome.html)
21 | * [Visual Studio](https://docs.aws.amazon.com/toolkit-for-visual-studio/latest/user-guide/welcome.html)
22 |
23 | ## Deploy the sample application
24 |
25 | The Serverless Application Model Command Line Interface (SAM CLI) is an extension of the AWS CLI that adds functionality for building and testing Lambda applications. It uses Docker to run your functions in an Amazon Linux environment that matches Lambda. It can also emulate your application's build environment and API.
26 |
27 | To use the SAM CLI, you need the following tools.
28 |
29 | * SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html)
30 | * [Python 3 installed](https://www.python.org/downloads/)
31 | * Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community)
32 |
33 | To build and deploy your application for the first time, run the following in your shell:
34 |
35 | ```bash
36 | sam build
37 | sam deploy --guided
38 | ```
39 |
40 | The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts:
41 |
42 | * **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name.
43 | * **AWS Region**: The AWS region you want to deploy your app to
44 | * **Parameter PinpointProjectId [*]**: Amazon Pinpoint Project ID if one already exists, leave blank to provide permissions to all Pinpoint Applications.
45 | * **Parameter CustomChannelFunctionName []**: Lambda function Name(Optional)
46 | * **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes.
47 | * **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modified IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command.
48 | * **Disable rollback** : Specify whether to rollback your AWS CloudFormation stack if an error occurs during a deployment. By default, your AWS CloudFormation stack rolls back to the last stable state if there's an error during a deployment. If you specify 'N' and an error occurs during a deployment, resources that have been created or updated before the error occurs aren't rolled back.
49 | * **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application.
50 | * **SAM configuration file** :The path and file name of the configuration file containing default parameter values to use.
51 | * **SAM configuration environment** : The environment name specifying the default parameter values in the configuration file to use. The default value is default
52 |
53 | ## Use the SAM CLI to build and test locally
54 |
55 | Build your application with the `sam build --use-container` command.
56 |
57 | ```bash
58 | $ sam build
59 | ```
60 |
61 | The SAM CLI installs dependencies defined in `customchannel/package.json`, creates a deployment package, and saves it in the `.aws-sam/build` folder.
62 |
63 | Test a single function by invoking it directly with a test event. An event is a JSON document that represents the input that the function receives from the event source. Test events are included in the `events` folder in this project.
64 |
65 | Run functions locally and invoke them with the `sam local invoke` command.
66 |
67 | ```bash
68 | $ sam local invoke CustomChannelFunction --event events/event.json
69 | ```
70 |
71 |
72 | ## Add a resource to your application
73 | The application template uses AWS Serverless Application Model (AWS SAM) to define application resources. AWS SAM is an extension of AWS CloudFormation with a simpler syntax for configuring common serverless application resources such as functions, triggers, and APIs. For resources not included in [the SAM specification](https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md), you can use standard [AWS CloudFormation](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html) resource types.
74 |
75 | ## Fetch, tail, and filter Lambda function logs
76 |
77 | To simplify troubleshooting, SAM CLI has a command called `sam logs`. `sam logs` lets you fetch logs generated by your deployed Lambda function from the command line. In addition to printing the logs on the terminal, this command has several nifty features to help you quickly find the bug.
78 |
79 | `NOTE`: This command works for all AWS Lambda functions; not just the ones you deploy using SAM.
80 |
81 | ```bash
82 | $ sam logs -n CustomChannelFunction --stack-name --tail
83 | ```
84 |
85 | You can find more information and examples about filtering Lambda function logs in the [SAM CLI Documentation](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-logging.html).
86 |
87 | ## Unit tests
88 |
89 | Tests are defined in the `tests` folder in this project. Use NPM to install the Mocha test framework (https://mochajs.org/) and run unit tests..
90 |
91 | ```bash
92 | $ npm install
93 | $ npm run test
94 | ```
95 |
96 | ## Cleanup
97 |
98 | To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following:
99 |
100 | ```bash
101 | aws cloudformation delete-stack --stack-name
102 | ```
103 |
104 | ## Resources
105 |
106 | See the [AWS SAM developer guide](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/what-is-sam.html) for an introduction to SAM specification, the SAM CLI, and serverless application concepts.
107 |
108 | Next, you can use AWS Serverless Application Repository to deploy ready to use Apps that go beyond hello world samples and learn how authors developed their applications: [AWS Serverless Application Repository main page](https://aws.amazon.com/serverless/serverlessrepo/)
109 |
--------------------------------------------------------------------------------
/examples/CustomChannels/Nodejs/customchannel/.npmignore:
--------------------------------------------------------------------------------
1 | tests/*
2 |
--------------------------------------------------------------------------------
/examples/CustomChannels/Nodejs/customchannel/app.js:
--------------------------------------------------------------------------------
1 | exports.handler = async (event, context) => {
2 | console.log(JSON.stringify(event))
3 | return "Hello World!"
4 |
5 | }
6 |
7 |
--------------------------------------------------------------------------------
/examples/CustomChannels/Nodejs/customchannel/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "customchannel",
3 | "version": "1.0.0",
4 | "description": "A function invoked by an Amazon Pinpoint Campaign or Journey",
5 | "main": "app.js",
6 | "scripts": {
7 | "test": "mocha tests/unit/"
8 | },
9 | "author": "",
10 | "license": "MIT",
11 | "dependencies": {
12 | "chai": "^4.3.6",
13 | "mocha": "^10.0.0"
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/examples/CustomChannels/Nodejs/customchannel/tests/unit/test-handler.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | const app = require('../../app.js');
4 | const chai = require('chai');
5 | const expect = chai.expect;
6 | var event, context;
7 |
8 | describe('Tests index', function () {
9 | it('verifies successful response', async () => {
10 | const result = await app.handler(event, context)
11 | expect(result).to.be.an('string');
12 | expect(result).to.be.equal("Hello World!");
13 | });
14 | });
15 |
--------------------------------------------------------------------------------
/examples/CustomChannels/Nodejs/events/event.json:
--------------------------------------------------------------------------------
1 | {
2 | "Message": {},
3 | "ApplicationId": "71b0f21869ac444eb0185d43539b97ea",
4 | "CampaignId": "54115c33de414441b604a71f59a2ccc3",
5 | "TreatmentId": "0",
6 | "ActivityId": "ecf06111556d4c1ca09b1b197469a61a",
7 | "ScheduledTime": "2020-04-19T00:33:24.609Z",
8 | "Endpoints": {
9 | "EndpointId-1234": {
10 | "ChannelType": "CUSTOM",
11 | "Address": "+14255555555",
12 | "EndpointStatus": "ACTIVE",
13 | "OptOut": "NONE",
14 | "Location": {
15 | "Country": "USA"
16 | },
17 | "Demographic": {
18 | "Make": "Apple",
19 | "Platform": "ios"
20 | },
21 | "EffectiveDate": "2020-04-03T22:23:23.597Z",
22 | "Attributes": {
23 | "FirstName": [
24 | "Test"
25 | ]
26 | },
27 | "User": {
28 | "UserId": "user1"
29 | },
30 | "CreationDate": "2020-04-03T22:23:23.597Z"
31 | }
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/examples/CustomChannels/Nodejs/template.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: '2010-09-09'
2 | Description: Amazon Pinpoint Custom Channel Hello World AWS lambda function written in nodejs.
3 | Transform: AWS::Serverless-2016-10-31
4 |
5 | Parameters:
6 | PinpointProjectId:
7 | Type: String
8 | Description: Amazon Pinpoint Project ID if one already exists, leave blank to provide permissions to all Pinpoint Applications.
9 | AllowedPattern: '^[a-zA-Z0-9*]*$'
10 | Default: '*'
11 |
12 | CustomChannelFunctionName:
13 | Type: String
14 | Description: Amazon Pinpoint custom channel lambda function name.
15 |
16 |
17 | Conditions:
18 | KeyNameExist: !Not [!Equals [!Ref CustomChannelFunctionName,""]]
19 |
20 | Resources:
21 | CustomChannelFunction:
22 | Type: AWS::Serverless::Function
23 | Properties:
24 | FunctionName : !If [KeyNameExist, !Ref CustomChannelFunctionName, !Ref "AWS::NoValue"]
25 | CodeUri: customchannel/
26 | Handler: app.handler
27 | Runtime: nodejs14.x
28 | Timeout: 60
29 |
30 | PinpointInvokePermission:
31 | Type: AWS::Lambda::Permission
32 | Properties:
33 | Action: lambda:InvokeFunction
34 | FunctionName: !Ref CustomChannelFunction
35 | Principal: !Sub pinpoint.${AWS::Region}.amazonaws.com
36 | SourceArn: !Sub
37 | - arn:${AWS::Partition}:mobiletargeting:${AWS::Region}:${AWS::AccountId}:apps/${AppID}/*
38 | - AppID: !Ref PinpointProjectId
39 |
40 | Outputs:
41 | CustomChannelFunctionArn:
42 | Description: "A function invoked by an Amazon Pinpoint Campaign or Journey."
43 | Value: !GetAtt CustomChannelFunction.Arn
44 |
--------------------------------------------------------------------------------
/examples/CustomChannels/Python/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this
4 | software and associated documentation files (the "Software"), to deal in the Software
5 | without restriction, including without limitation the rights to use, copy, modify,
6 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
7 | permit persons to whom the Software is furnished to do so.
8 |
9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
10 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
11 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
12 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
13 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
14 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------------------
/examples/CustomChannels/Python/README.md:
--------------------------------------------------------------------------------
1 | # Pinpoint Python Hello World Custom Channel
2 |
3 | This application is an Hello world python example of custom channel which can be be used in integration with Amazon Pinpoint Campaigns and Journeys.
4 |
5 | ## SAM Details
6 |
7 | This project contains source code and supporting files for a serverless application that you can deploy with the SAM CLI. It includes the following files and folders.
8 |
9 | - app.py - Code for the application's Lambda function.
10 | - events.json - Invocation events that you can use to invoke the function.
11 | - tests - Unit tests for the application code.
12 | - template.yaml - A template that defines the application's AWS resources.
13 |
14 | The application uses a singular Lambda function that can be invoked by the Amazon pinpoint service. This resource is defined in the `template.yaml` file in this project. You can update the template to add AWS resources through the same deployment process that updates your application code.
15 |
16 | If you prefer to use an integrated development environment (IDE) to build and test your application, you can use the AWS Toolkit.
17 | The AWS Toolkit is an open source plug-in for popular IDEs that uses the SAM CLI to build and deploy serverless applications on AWS. The AWS Toolkit also adds a simplified step-through debugging experience for Lambda function code. See the following links to get started.
18 |
19 | * [PyCharm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
20 | * [IntelliJ](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html)
21 | * [VS Code](https://docs.aws.amazon.com/toolkit-for-vscode/latest/userguide/welcome.html)
22 | * [Visual Studio](https://docs.aws.amazon.com/toolkit-for-visual-studio/latest/user-guide/welcome.html)
23 |
24 | ## Deploy the sample application
25 |
26 | The Serverless Application Model Command Line Interface (SAM CLI) is an extension of the AWS CLI that adds functionality for building and testing Lambda applications. It uses Docker to run your functions in an Amazon Linux environment that matches Lambda. It can also emulate your application's build environment and API.
27 |
28 | To use the SAM CLI, you need the following tools.
29 |
30 | * SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html)
31 | * [Python 3 installed](https://www.python.org/downloads/)
32 | * Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community)
33 |
34 | To build and deploy your application for the first time, run the following in your shell:
35 |
36 | ```bash
37 | sam build --use-container
38 | sam deploy --guided
39 | ```
40 |
41 | The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts:
42 |
43 | * **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name.
44 | * **AWS Region**: The AWS region you want to deploy your app to
45 | * **Parameter PinpointProjectId [*]**: Amazon Pinpoint Project ID if one already exists, leave blank to provide permissions to all Pinpoint Applications.
46 | * **Parameter CustomChannelFunctionName []**: Lambda function Name(Optional)
47 | * **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes.
48 | * **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modified IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command.
49 | * **Disable rollback** : Specify whether to rollback your AWS CloudFormation stack if an error occurs during a deployment. By default, your AWS CloudFormation stack rolls back to the last stable state if there's an error during a deployment. If you specify 'N' and an error occurs during a deployment, resources that have been created or updated before the error occurs aren't rolled back.
50 | * **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application.
51 | * **SAM configuration file** :The path and file name of the configuration file containing default parameter values to use.
52 | * **SAM configuration environment** : The environment name specifying the default parameter values in the configuration file to use. The default value is default
53 |
54 | ## Use the SAM CLI to build and test locally
55 |
56 | Build your application with the `sam build --use-container` command.
57 |
58 | ```bash
59 | $ sam build --use-container
60 | ```
61 |
62 | The SAM CLI installs dependencies defined in `customchannel/requirements.txt`, creates a deployment package, and saves it in the `.aws-sam/build` folder.
63 |
64 | Test a single function by invoking it directly with a test event. An event is a JSON document that represents the input that the function receives from the event source. Test events are included in the `events` folder in this project.
65 |
66 | Run functions locally and invoke them with the `sam local invoke` command.
67 |
68 | ```bash
69 | $ sam local invoke CustomChannelFunction --event events/event.json
70 | ```
71 |
72 | ## Add a resource to your application
73 | The application template uses AWS Serverless Application Model (AWS SAM) to define application resources. AWS SAM is an extension of AWS CloudFormation with a simpler syntax for configuring common serverless application resources such as functions, triggers, and APIs. For resources not included in [the SAM specification](https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md), you can use standard [AWS CloudFormation](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html) resource types.
74 |
75 | ## Fetch, tail, and filter Lambda function logs
76 |
77 | To simplify troubleshooting, SAM CLI has a command called `sam logs`. `sam logs` lets you fetch logs generated by your deployed Lambda function from the command line. In addition to printing the logs on the terminal, this command has several nifty features to help you quickly find the bug.
78 |
79 | `NOTE`: This command works for all AWS Lambda functions; not just the ones you deploy using SAM.
80 |
81 | ```bash
82 | $ sam logs -n CustomChannelFunction --stack-name --tail
83 | ```
84 |
85 | You can find more information and examples about filtering Lambda function logs in the [SAM CLI Documentation](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-logging.html).
86 |
87 | ## Unit tests
88 |
89 | Tests are defined in the `tests` folder in this project. Use PIP to install the [pytest](https://docs.pytest.org/en/latest/) and run unit tests.
90 |
91 | ```bash
92 | $ pip install pytest pytest-mock --user
93 | $ python -m pytest tests/ -v
94 | ```
95 |
96 | ## Cleanup
97 |
98 | To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following:
99 |
100 | ```bash
101 | aws cloudformation delete-stack --stack-name
102 | ```
103 |
104 | ## Resources
105 |
106 | See the [AWS SAM developer guide](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/what-is-sam.html) for an introduction to SAM specification, the SAM CLI, and serverless application concepts.
107 |
108 | Next, you can use AWS Serverless Application Repository to deploy ready to use Apps that go beyond hello world samples and learn how authors developed their applications: [AWS Serverless Application Repository main page](https://aws.amazon.com/serverless/serverlessrepo/)
109 |
--------------------------------------------------------------------------------
/examples/CustomChannels/Python/customchannel/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/examples/CustomChannels/Python/customchannel/__init__.py
--------------------------------------------------------------------------------
/examples/CustomChannels/Python/customchannel/app.py:
--------------------------------------------------------------------------------
1 |
2 | # This function can be used within an Amazon Pinpoint Campaign or Amazon Pinpoint Journey.
3 |
4 | def lambda_handler(event, context):
5 |
6 | # print the payload the Lambda was invoked with
7 | print(event)
8 | return "Hello World!"
9 |
--------------------------------------------------------------------------------
/examples/CustomChannels/Python/customchannel/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/examples/CustomChannels/Python/customchannel/requirements.txt
--------------------------------------------------------------------------------
/examples/CustomChannels/Python/events/event.json:
--------------------------------------------------------------------------------
1 | {
2 | "Message": {},
3 | "ApplicationId": "71b0f21869ac444eb0185d43539b97ea",
4 | "CampaignId": "54115c33de414441b604a71f59a2ccc3",
5 | "TreatmentId": "0",
6 | "ActivityId": "ecf06111556d4c1ca09b1b197469a61a",
7 | "ScheduledTime": "2020-04-19T00:33:24.609Z",
8 | "Endpoints": {
9 | "EndpointId-1234": {
10 | "ChannelType": "CUSTOM",
11 | "Address": "+14255555555",
12 | "EndpointStatus": "ACTIVE",
13 | "OptOut": "NONE",
14 | "Location": {
15 | "Country": "USA"
16 | },
17 | "Demographic": {
18 | "Make": "Apple",
19 | "Platform": "ios"
20 | },
21 | "EffectiveDate": "2020-04-03T22:23:23.597Z",
22 | "Attributes": {
23 | "FirstName": [
24 | "Test"
25 | ]
26 | },
27 | "User": {
28 | "UserId": "user1"
29 | },
30 | "CreationDate": "2020-04-03T22:23:23.597Z"
31 | }
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/examples/CustomChannels/Python/template.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: '2010-09-09'
2 | Description: Amazon Pinpoint Custom Channel Hello World AWS lambda function written in Python.
3 | Transform: AWS::Serverless-2016-10-31
4 |
5 | Parameters:
6 | PinpointProjectId:
7 | Type: String
8 | Description: Amazon Pinpoint Project ID if one already exists, leave blank to provide permissions to all Pinpoint Applications.
9 | AllowedPattern: '^[a-zA-Z0-9*]*$'
10 | Default: '*'
11 |
12 | CustomChannelFunctionName:
13 | Type: String
14 | Description: Amazon Pinpoint custom channel lambda function name.
15 |
16 | Conditions:
17 | KeyNameExist: !Not [!Equals [!Ref CustomChannelFunctionName,""]]
18 |
19 | Resources:
20 | CustomChannelFunction:
21 | Type: AWS::Serverless::Function
22 | Properties:
23 | FunctionName: !If [KeyNameExist, !Ref CustomChannelFunctionName, !Ref "AWS::NoValue"]
24 | CodeUri: customchannel/
25 | Handler: app.lambda_handler
26 | Runtime: python3.7
27 | Timeout: 60
28 |
29 | PinpointInvokePermission:
30 | Type: AWS::Lambda::Permission
31 | Properties:
32 | Action: lambda:InvokeFunction
33 | FunctionName: !Ref CustomChannelFunction
34 | Principal: !Sub pinpoint.${AWS::Region}.amazonaws.com
35 | SourceArn: !Sub
36 | - arn:${AWS::Partition}:mobiletargeting:${AWS::Region}:${AWS::AccountId}:apps/${AppID}/*
37 | - AppID: !Ref PinpointProjectId
38 |
39 | Outputs:
40 | CustomChannelFunctionArn:
41 | Description: "A function invoked by an Amazon Pinpoint Campaign or Journey."
42 | Value: !GetAtt CustomChannelFunction.Arn
43 |
--------------------------------------------------------------------------------
/examples/CustomChannels/Python/tests/unit/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/examples/CustomChannels/Python/tests/unit/__init__.py
--------------------------------------------------------------------------------
/examples/CustomChannels/Python/tests/unit/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/examples/CustomChannels/Python/tests/unit/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/examples/CustomChannels/Python/tests/unit/__pycache__/test_handler.cpython-37-pytest-7.1.2.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/examples/CustomChannels/Python/tests/unit/__pycache__/test_handler.cpython-37-pytest-7.1.2.pyc
--------------------------------------------------------------------------------
/examples/CustomChannels/Python/tests/unit/test_handler.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from customchannel import app
4 |
5 | @pytest.fixture()
6 | def pinpoint_event():
7 | """ Generates A Pinpoint Event"""
8 |
9 | return {
10 | "Message": {},
11 | "ApplicationId": "71b0f21869ac444eb0185d43539b97ea",
12 | "CampaignId": "54115c33de414441b604a71f59a2ccc3",
13 | "TreatmentId": "0",
14 | "ActivityId": "ecf06111556d4c1ca09b1b197469a61a",
15 | "ScheduledTime": "2020-04-19T00:33:24.609Z",
16 | "Endpoints": {
17 | "EndpointId-1234": {
18 | "ChannelType": "CUSTOM",
19 | "Address": "+14255555555",
20 | "EndpointStatus": "ACTIVE",
21 | "OptOut": "NONE",
22 | "Location": {
23 | "Country": "USA"
24 | },
25 | "Demographic": {
26 | "Make": "Apple",
27 | "Platform": "ios"
28 | },
29 | "EffectiveDate": "2020-04-03T22:23:23.597Z",
30 | "Attributes": {
31 | "FirstName": [
32 | "Test"
33 | ]
34 | },
35 | "User": {
36 | "UserId": "austin52789"
37 | },
38 | "CreationDate": "2020-04-03T22:23:23.597Z"
39 | }
40 | }
41 | }
42 |
43 |
44 | def test_lambda_handler(pinpoint_event):
45 |
46 | return_value = app.lambda_handler(pinpoint_event, "")
47 |
48 | assert return_value == "Hello World!"
49 |
--------------------------------------------------------------------------------
/examples/Global SMS Planning Sheet.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/examples/Global SMS Planning Sheet.xlsx
--------------------------------------------------------------------------------
/examples/Outbound_calling.json:
--------------------------------------------------------------------------------
1 | {
2 | "modules": [
3 | {
4 | "id": "36116af3-289a-4aa8-bf87-deb98e24d070",
5 | "type": "Disconnect",
6 | "branches": [],
7 | "parameters": [],
8 | "metadata": {
9 | "position": {
10 | "x": 711.5,
11 | "y": 61.5
12 | }
13 | }
14 | },
15 | {
16 | "id": "7dd279f6-d10c-400d-b7dd-0852eecef283",
17 | "type": "PlayPrompt",
18 | "branches": [
19 | {
20 | "condition": "Success",
21 | "transition": "36116af3-289a-4aa8-bf87-deb98e24d070"
22 | }
23 | ],
24 | "parameters": [
25 | {
26 | "name": "Text",
27 | "value": "$.Attributes.Message",
28 | "namespace": "External"
29 | },
30 | {
31 | "name": "TextToSpeechType",
32 | "value": "text"
33 | }
34 | ],
35 | "metadata": {
36 | "position": {
37 | "x": 269,
38 | "y": 24
39 | },
40 | "useDynamic": true
41 | }
42 | }
43 | ],
44 | "version": "1",
45 | "type": "contactFlow",
46 | "start": "7dd279f6-d10c-400d-b7dd-0852eecef283",
47 | "metadata": {
48 | "entryPointPosition": {
49 | "x": 15.5,
50 | "y": 15.5
51 | },
52 | "snapToGrid": false,
53 | "name": "Outbound calling",
54 | "description": null,
55 | "type": "contactFlow",
56 | "status": "published",
57 | "hash": "74160b39a2f0bec4e7bb5de9ab28314712ce983d4772ea119beb08b156871d7d"
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/examples/multi-region/images/endpoint-imports-api.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/examples/multi-region/images/endpoint-imports-api.png
--------------------------------------------------------------------------------
/examples/multi-region/images/endpoint-imports-s3-active-active.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/examples/multi-region/images/endpoint-imports-s3-active-active.png
--------------------------------------------------------------------------------
/examples/multi-region/images/endpoint-imports-s3-active-passive.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/examples/multi-region/images/endpoint-imports-s3-active-passive.png
--------------------------------------------------------------------------------
/examples/multi-region/images/picture1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/examples/multi-region/images/picture1.png
--------------------------------------------------------------------------------
/examples/multi-region/images/pinpoint-active-active.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/examples/multi-region/images/pinpoint-active-active.png
--------------------------------------------------------------------------------
/examples/multi-region/images/pinpoint-active-passive.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/examples/multi-region/images/pinpoint-active-passive.png
--------------------------------------------------------------------------------
/examples/multi-region/images/ses-event-based-list-sync.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/examples/multi-region/images/ses-event-based-list-sync.png
--------------------------------------------------------------------------------
/examples/multi-region/images/ses-list-replication.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/examples/multi-region/images/ses-list-replication.png
--------------------------------------------------------------------------------
/examples/sample_data_S3_triggered_import.csv:
--------------------------------------------------------------------------------
1 | ChannelType,Address,FirstName,LastName,Timezone
2 | SMS,14256791901,John,Smith,CST
3 | SMS,(202) 555-0191,Ivan,Popov,PST
4 | SMS,605-555-0198,Mary,Loweless,EST
5 | SMS,14256791901,Aaron,Slew,CST
6 | SMS,(+351) 282 43 50 50,Tim,Doe,IST
7 |
--------------------------------------------------------------------------------
/examples/sample_file.csv:
--------------------------------------------------------------------------------
1 | sms_number,message,external_campaign_id,short_code,message_type,id_1,id_2
2 | +1[CUSTOMER_NUMBER_HERE],"Here is a long message, with a comma",12345,[SHORT_OR_LONG_CODE_HERE],TRANSACTIONAL,tracking 1, tracking2
3 | +1[CUSTOMER_NUMBER_HERE],"Here is a long message, with a comma",12345,[SHORT_OR_LONG_CODE_HERE],TRANSACTIONAL,tracking 1, tracking2
4 |
--------------------------------------------------------------------------------
/images/Advanced_Segmentation_S3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/Advanced_Segmentation_S3.png
--------------------------------------------------------------------------------
/images/AppFlow_Salesforce.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/AppFlow_Salesforce.png
--------------------------------------------------------------------------------
/images/Connect_as_a_Channel.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/Connect_as_a_Channel.png
--------------------------------------------------------------------------------
/images/Create_campaign.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/Create_campaign.png
--------------------------------------------------------------------------------
/images/DUE-engageEvents-dashboardauto_x2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/DUE-engageEvents-dashboardauto_x2.jpg
--------------------------------------------------------------------------------
/images/EventDB_ArchDiagram.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/EventDB_ArchDiagram.png
--------------------------------------------------------------------------------
/images/Event_Based_Segmentation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/Event_Based_Segmentation.png
--------------------------------------------------------------------------------
/images/External_Attributes.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/External_Attributes.png
--------------------------------------------------------------------------------
/images/External_Templates.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/External_Templates.png
--------------------------------------------------------------------------------
/images/Federated_Segmentation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/Federated_Segmentation.png
--------------------------------------------------------------------------------
/images/Message_Archiver.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/Message_Archiver.png
--------------------------------------------------------------------------------
/images/Phone_number_validate_statemachine.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/Phone_number_validate_statemachine.png
--------------------------------------------------------------------------------
/images/Pinpoint_Event_Processing.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/Pinpoint_Event_Processing.png
--------------------------------------------------------------------------------
/images/S3_Data_Lake.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/S3_Data_Lake.png
--------------------------------------------------------------------------------
/images/S3_triggered_import.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/S3_triggered_import.png
--------------------------------------------------------------------------------
/images/SES_Auto_Reply.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/SES_Auto_Reply.png
--------------------------------------------------------------------------------
/images/SES_Event_Processing.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/SES_Event_Processing.png
--------------------------------------------------------------------------------
/images/SMS-retry-mechanism.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/SMS-retry-mechanism.png
--------------------------------------------------------------------------------
/images/SMS_S3_drop.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/SMS_S3_drop.png
--------------------------------------------------------------------------------
/images/Scheduled-VDM-stats-export.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/Scheduled-VDM-stats-export.png
--------------------------------------------------------------------------------
/images/Self-managed_Opt_Outs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/Self-managed_Opt_Outs.png
--------------------------------------------------------------------------------
/images/Simple_CMS.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/images/Simple_CMS.png
--------------------------------------------------------------------------------
/integrations/amplitude-sam/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by https://www.gitignore.io/api/osx,node,visualstudiocode
2 | # Edit at https://www.gitignore.io/?templates=osx,node,visualstudiocode
3 |
4 | ### Node ###
5 | # Logs
6 | logs
7 | *.log
8 | npm-debug.log*
9 | yarn-debug.log*
10 | yarn-error.log*
11 | lerna-debug.log*
12 |
13 | # Diagnostic reports (https://nodejs.org/api/report.html)
14 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
15 |
16 | # Runtime data
17 | pids
18 | *.pid
19 | *.seed
20 | *.pid.lock
21 |
22 | # Directory for instrumented libs generated by jscoverage/JSCover
23 | lib-cov
24 |
25 | # Coverage directory used by tools like istanbul
26 | coverage
27 | *.lcov
28 |
29 | # nyc test coverage
30 | .nyc_output
31 |
32 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
33 | .grunt
34 |
35 | # Bower dependency directory (https://bower.io/)
36 | bower_components
37 |
38 | # node-waf configuration
39 | .lock-wscript
40 |
41 | # Compiled binary addons (https://nodejs.org/api/addons.html)
42 | build/Release
43 |
44 | # Dependency directories
45 | node_modules/
46 | jspm_packages/
47 |
48 | # TypeScript v1 declaration files
49 | typings/
50 |
51 | # TypeScript cache
52 | *.tsbuildinfo
53 |
54 | # Optional npm cache directory
55 | .npm
56 |
57 | # Optional eslint cache
58 | .eslintcache
59 |
60 | # Optional REPL history
61 | .node_repl_history
62 |
63 | # Output of 'npm pack'
64 | *.tgz
65 |
66 | # Yarn Integrity file
67 | .yarn-integrity
68 |
69 | # dotenv environment variables file
70 | .env
71 | .env.test
72 |
73 | # parcel-bundler cache (https://parceljs.org/)
74 | .cache
75 |
76 | # next.js build output
77 | .next
78 |
79 | # nuxt.js build output
80 | .nuxt
81 |
82 | # rollup.js default build output
83 | dist/
84 |
85 | # Uncomment the public line if your project uses Gatsby
86 | # https://nextjs.org/blog/next-9-1#public-directory-support
87 | # https://create-react-app.dev/docs/using-the-public-folder/#docsNav
88 | # public
89 |
90 | # Storybook build outputs
91 | .out
92 | .storybook-out
93 |
94 | # vuepress build output
95 | .vuepress/dist
96 |
97 | # Serverless directories
98 | .serverless/
99 |
100 | # FuseBox cache
101 | .fusebox/
102 |
103 | # DynamoDB Local files
104 | .dynamodb/
105 |
106 | # Temporary folders
107 | tmp/
108 | temp/
109 |
110 | ### OSX ###
111 | # General
112 | .DS_Store
113 | .AppleDouble
114 | .LSOverride
115 |
116 | # Icon must end with two \r
117 | Icon
118 |
119 | # Thumbnails
120 | ._*
121 |
122 | # Files that might appear in the root of a volume
123 | .DocumentRevisions-V100
124 | .fseventsd
125 | .Spotlight-V100
126 | .TemporaryItems
127 | .Trashes
128 | .VolumeIcon.icns
129 | .com.apple.timemachine.donotpresent
130 |
131 | # Directories potentially created on remote AFP share
132 | .AppleDB
133 | .AppleDesktop
134 | Network Trash Folder
135 | Temporary Items
136 | .apdisk
137 |
138 | ### VisualStudioCode ###
139 | .vscode/*
140 | !.vscode/settings.json
141 | !.vscode/tasks.json
142 | !.vscode/launch.json
143 | !.vscode/extensions.json
144 |
145 | ### VisualStudioCode Patch ###
146 | # Ignore all local history of files
147 | .history
148 |
149 | # End of https://www.gitignore.io/api/osx,node,visualstudiocode
150 |
151 | env.json
152 | samconfig.toml
153 | function.zip
154 |
155 | # Build folder
156 |
157 | */build/*
158 | template-export.yml
--------------------------------------------------------------------------------
/integrations/amplitude-sam/Architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/integrations/amplitude-sam/Architecture.png
--------------------------------------------------------------------------------
/integrations/amplitude-sam/README.md:
--------------------------------------------------------------------------------
1 | # Amplitude and Pinpoint Cohorts Integration Application
2 |
3 | This SAM application provides infrastructure to toake Amplitude user cohort export files in a S3 bucket, and import them into a Pinpoint project.
4 |
5 | ## Architecture
6 |
7 | 
8 |
9 | ## Data Retention Considerations
10 | Note that the files written to S3 in the architecture would remain indefinitely. It's recommended that you implement S3 Lifecycle policies to remove these files based on your particular information security and data retention requirements. See [Managing your storage lifecyle](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-lifecycle-mgmt.html) for more information.
11 |
12 | ## Prerequisites
13 | * [A Pinpoint Project](https://docs.aws.amazon.com/pinpoint/latest/userguide/gettingstarted-create-project.html)
14 | * AWS CLI - [Install the AWS CLI](https://aws.amazon.com/cli/).
15 | * Node.js - [Install Node.js 12](https://nodejs.org/en/), including the npm package management tool.
16 | * S3 Bucket for Build Deployments
17 |
18 | # Setup Steps
19 |
20 | ## 1. Set up a Pinpoint Project
21 |
22 | If you do not have a Pinpoint project, create one in the AWS console. Instructions are [here](https://docs.aws.amazon.com/pinpoint/latest/userguide/projects-manage.html). Once you have your project set up, get its project ID, you will need it in step 2.
23 |
24 | ## 2. Deploy this Integration
25 |
26 | * Clone the Github repo: git clone `https://github.com/aws-samples/communication-developer-services-reference-architectures.git`
27 | * Change into this directory: `cd communication-developer-services-reference-architectures/integrations/amplitude-sam`
28 | * Package the integration SAM package to an S3 bucket in your account: `aws cloudformation package --template template.yml --s3-bucket [S3 Deployment Bucket] --output-template template-export.yml` (Note that the package bucket should be created in the AWS region in which you plan to run your integration)
29 | * Deploy the integration via AWS CLI: `aws --region [your region] cloudformation deploy --template-file template-export.yml --stack-name amplitude-sam --capabilities CAPABILITY_IAM --parameter-overrides PinpointProjectId=[Pinpoint Project/Application ID] FileDropS3Bucket=[S3 Bucket Name]`
30 |
31 | ## 3. Sync your cohorts to Amazon S3 using the S3 integration in Amplitude.
32 |
33 | Amplitude cohort instructions are [here](https://help.amplitude.com/hc/en-us/articles/360051952812-Sync-Amplitude-cohorts-to-Amazon-S3).
34 |
35 | * You will need to configure the sync to go to the bucket you specified above
36 | * Your Amplitude CSV file will need to contain at least a name and email field; entries in the export without these fields will be ignored
37 | * As Amplitude updates your cohort and exports it to S3, the integration Lambda will update your segment in Pinpoint
38 |
--------------------------------------------------------------------------------
/integrations/amplitude-sam/__tests__/integration/test-sns-logger.js:
--------------------------------------------------------------------------------
1 | const AWS = require("aws-sdk");
2 | const uuid = require("uuid");
3 |
4 | const sleep = (secs) =>
5 | new Promise((resolve) => setTimeout(resolve, 1000 * secs));
6 |
7 | /**
8 | * Get stack name from environment variable AWS_SAM_STACK_NAME and make an API call to verify the stack exists.
9 | * throw exception if AWS_SAM_STACK_NAME is not set.
10 | */
11 | const getAndVerifyStackName = async () => {
12 | const stackName = process.env["AWS_SAM_STACK_NAME"];
13 | if (!stackName) {
14 | throw new Error(
15 | "Cannot find env var AWS_SAM_STACK_NAME.\n" +
16 | "Please setup this environment variable with the stack name where we are running integration tests."
17 | );
18 | }
19 |
20 | const client = new AWS.CloudFormation();
21 | try {
22 | await client
23 | .describeStacks({
24 | StackName: stackName,
25 | })
26 | .promise();
27 | } catch (e) {
28 | throw new Error(
29 | `Cannot find stack ${stackName}: ${e.message}\n` +
30 | `Please make sure stack with the name "${stackName}" exists.`
31 | );
32 | }
33 |
34 | return stackName;
35 | };
36 |
37 | /**
38 | * This test publish a testing message to sns topic
39 | * and make sure cloudwatch has corresponding log entry.
40 | */
41 | describe("Test SNS Logger", function () {
42 | let functionName, topicArn;
43 | /**
44 | * Based on the provided stack name,
45 | * here we use cloudformation API to find out what the SNSPayloadLogger and SimpleTopic are
46 | */
47 | beforeAll(async () => {
48 | const stackName = await getAndVerifyStackName();
49 |
50 | const client = new AWS.CloudFormation();
51 | const response = await client
52 | .listStackResources({
53 | StackName: stackName,
54 | })
55 | .promise();
56 |
57 | const resources = response.StackResourceSummaries;
58 |
59 | const functionResource = resources.find(
60 | (resource) => resource.LogicalResourceId === "SNSPayloadLogger"
61 | );
62 | expect(functionResource).not.toBe(undefined);
63 |
64 | const topicResource = resources.find(
65 | (resource) => resource.LogicalResourceId === "SimpleTopic"
66 | );
67 | expect(topicResource).not.toBe(undefined);
68 |
69 | functionName = functionResource.PhysicalResourceId;
70 | topicArn = topicResource.PhysicalResourceId;
71 | });
72 |
73 | /**
74 | * Publish a SNS message so that a log entry containing the integTestId is sent to cloudwatch
75 | */
76 | const publishMessage = (integTestId) => {
77 | const client = new AWS.SNS();
78 | return client
79 | .publish({
80 | Subject: `IntegTest: ${integTestId}`,
81 | Message: `integ-test-${integTestId}`,
82 | TopicArn: topicArn,
83 | })
84 | .promise();
85 | };
86 |
87 | /**
88 | * find the latest log stream name, if the log group does not exist, return null.
89 | */
90 | const getLatestLogStreamName = async (logGroupName) => {
91 | const client = new AWS.CloudWatchLogs();
92 | try {
93 | const response = await client
94 | .describeLogStreams({
95 | logGroupName,
96 | orderBy: "LastEventTime",
97 | descending: true,
98 | })
99 | .promise();
100 | return response.logStreams[0].logStreamName;
101 | } catch (e) {
102 | if (e.code == "ResourceNotFoundException") {
103 | return null;
104 | }
105 | throw e;
106 | }
107 | };
108 |
109 | /**
110 | * Constantly check cloudwatch log group's latest log stream,
111 | * pass if integTestId string appears in any event message.
112 | */
113 | const checkCloudwatchLogRecorded = async (integTestId) => {
114 | const logGroupName = `/aws/lambda/${functionName}`;
115 | const client = new AWS.CloudWatchLogs();
116 |
117 | let retries = 5;
118 | const startTime = Date.now() - 60 * 1000; // we only look for log entries since 1 min ago
119 | while (retries >= 0) {
120 | const logStreamName = await getLatestLogStreamName(logGroupName);
121 | if (!logStreamName) {
122 | console.warn(`Cannot find log group ${logStreamName}, waiting`);
123 | await sleep(5);
124 | continue;
125 | }
126 |
127 | const response = await client
128 | .getLogEvents({
129 | logGroupName,
130 | logStreamName,
131 | startTime,
132 | endTime: Date.now(),
133 | startFromHead: true,
134 | })
135 | .promise();
136 |
137 | const matchEvents = response.events.find(
138 | (event) => event.message && event.message.includes(integTestId)
139 | );
140 |
141 | if (matchEvents) {
142 | // event found, pass
143 | return;
144 | } else {
145 | console.warn(
146 | `Cannot find matching events containing integration test id ${integTestId}, waiting`
147 | );
148 | retries -= 1;
149 | await sleep(5);
150 | }
151 | }
152 |
153 | throw new Error(
154 | `Cannot find matching events containing integration test id ${integTestId} after 5 retries`
155 | );
156 | };
157 |
158 | /**
159 | * Publish a message containing a unique string to the SNS topic using AWS API and
160 | * check the corresponding log is inserted into cloudwatch
161 | */
162 | it("When SNSPayloadLogger called, cloudwatch should have logs recorded", async () => {
163 | console.info("function name:", functionName, "sns topic:", topicArn);
164 |
165 | // we will use this uuid to verify the recorded log entry is init from this test
166 | const integTestId = uuid.v4();
167 |
168 | await publishMessage(integTestId);
169 | await checkCloudwatchLogRecorded(integTestId);
170 | }, 60000); // timeout 60 secs, it takes some time for cloudwatch log to show up
171 | });
172 |
--------------------------------------------------------------------------------
/integrations/amplitude-sam/__tests__/unit/handlers/sqs-payload-logger.test.js:
--------------------------------------------------------------------------------
1 | // Import all functions from sqs-payload-logger.js
2 | const sqsPayloadLogger = require('../../../src/handlers/sqs-payload-logger.js');
3 |
4 | describe('Test for sqs-payload-logger', function () {
5 | // This test invokes the sqs-payload-logger Lambda function and verifies that the received payload is logged
6 | it('Verifies the payload is logged', async () => {
7 | // Mock console.log statements so we can verify them. For more information, see
8 | // https://jestjs.io/docs/en/mock-functions.html
9 | console.info = jest.fn()
10 |
11 | // Create a sample payload with SQS message format
12 | var payload = {
13 | DelaySeconds: 10,
14 | MessageAttributes: {
15 | "Sender": {
16 | DataType: "String",
17 | StringValue: "sqs-payload-logger"
18 | }
19 | },
20 | MessageBody: "This message was sent by the sqs-payload-logger Lambda function",
21 | QueueUrl: "SQS_QUEUE_URL"
22 | }
23 |
24 | await sqsPayloadLogger.sqsPayloadLoggerHandler(payload, null)
25 |
26 | // Verify that console.info has been called with the expected payload
27 | expect(console.info).toHaveBeenCalledWith(JSON.stringify(payload))
28 | });
29 | });
30 |
--------------------------------------------------------------------------------
/integrations/amplitude-sam/buildspec.yml:
--------------------------------------------------------------------------------
1 | version: 0.2
2 |
3 | phases:
4 | install:
5 | commands:
6 | # Install all dependencies (including dependencies for running tests)
7 | - npm install
8 | pre_build:
9 | commands:
10 | # Discover and run unit tests in the '__tests__' directory
11 | - npm run test
12 | # Remove all unit tests to reduce the size of the package that will be ultimately uploaded to Lambda
13 | - rm -rf ./__tests__
14 | # Remove all dependencies not needed for the Lambda deployment package (the packages from devDependencies in package.json)
15 | - npm prune --production
16 | build:
17 | commands:
18 | # Use AWS SAM to package the application by using AWS CloudFormation
19 | - aws cloudformation package --template template.yml --s3-bucket $S3_BUCKET --output-template template-export.yml
20 | artifacts:
21 | type: zip
22 | files:
23 | - template-export.yml
24 |
--------------------------------------------------------------------------------
/integrations/amplitude-sam/events/event-sqs.json:
--------------------------------------------------------------------------------
1 | {
2 | "DelaySeconds": 10,
3 | "MessageAttributes": {
4 | "Sender": {
5 | "DataType": "String",
6 | "StringValue": "sqs-payload-logger"
7 | }
8 | },
9 | "MessageBody": "This message was sent by the sqs-payload-logger Lambda function",
10 | "QueueUrl": "SQS_QUEUE_URL"
11 | }
12 |
--------------------------------------------------------------------------------
/integrations/amplitude-sam/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "replaced-by-user-input",
3 | "description": "replaced-by-user-input",
4 | "version": "0.0.1",
5 | "private": true,
6 | "dependencies": {},
7 | "devDependencies": {
8 | "aws-sdk": "^2.815.0",
9 | "jest": "^26.6.3",
10 | "uuid": "^8.3.2"
11 | },
12 | "scripts": {
13 | "test": "jest --roots __tests__/unit",
14 | "integ-test": "jest --roots __tests__/integration"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/integrations/amplitude-sam/src/handlers/s3-trigger-processor/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "s3-trigger-processor",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1"
8 | },
9 | "author": "",
10 | "license": "MIT",
11 | "dependencies": {
12 | "aws-sdk": "^2.936.0",
13 | "csvtojson": "^2.0.10",
14 | "decompress": "^4.2.1",
15 | "loglevel": "^1.7.1",
16 | "uuid": "^8.3.2"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/integrations/amplitude-sam/src/handlers/sqs-queue-processor/index.js:
--------------------------------------------------------------------------------
1 | const AWS = require('aws-sdk');
2 | const pinpoint = new AWS.Pinpoint()
3 | const log = require('loglevel');
4 | log.setLevel(process.env.LOG_LEVEL || 'info');
5 |
6 | const attributesToIgnore = [
7 | 'amplitude_id',
8 | 'user_id',
9 | 'email',
10 | 'applicationId',
11 | 'cohort',
12 | 'action'
13 | ]
14 |
15 | async function getUserEndpoints(applicationId, userId) {
16 | var params = {
17 | ApplicationId: applicationId,
18 | UserId: userId
19 | };
20 |
21 | try {
22 | var data = await pinpoint.getUserEndpoints(params).promise();
23 | log.trace(data)
24 | return data.EndpointsResponse.Item;
25 | }
26 | catch (err){
27 | log.error(err, err.stack);
28 | }
29 | }
30 |
31 | async function upsertEndpoints(records) {
32 | var endpoints = []
33 |
34 | //"amplitude_id","user_id","amplitude_id","name","a_prop","persona","username","email"
35 | for(var i=0; i {
91 | // All log statements are written to CloudWatch by default. For more information, see
92 | // https://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-logging.html
93 | log.trace(JSON.stringify(event));
94 | let result = await upsertEndpoints(event.Records);
95 | log.info(`Successfully processed ${event.Records.length} endpoints`);
96 | }
97 |
--------------------------------------------------------------------------------
/integrations/amplitude-sam/src/handlers/sqs-queue-processor/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "sqs-queue-processor",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1"
8 | },
9 | "author": "",
10 | "license": "MIT",
11 | "dependencies": {
12 | "aws-sdk": "^2.936.0",
13 | "loglevel": "^1.7.1"
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/archiver/app.js:
--------------------------------------------------------------------------------
1 | /*********************************************************************************************************************
2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. *
3 | * *
4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance *
5 | * with the License. A copy of the License is located at *
6 | * *
7 | * http://www.apache.org/licenses/ *
8 | * *
9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES *
10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions *
11 | * and limitations under the License. *
12 | *********************************************************************************************************************/
13 |
14 | /**
15 | * @author rjlowe
16 | */
17 |
18 | 'use strict';
19 |
20 | let lib = require('./lib');
21 | const { createLogger, format, transports } = require('winston');
22 | const { combine, timestamp, label, printf } = format;
23 | const myFormat = printf(({ level, message, label, requestId, module, timestamp }) => {
24 | return `${timestamp} [RequestId: ${requestId}] [${module}] ${level}: ${message}`;
25 | });
26 | const mainLogger = createLogger({
27 | format: combine(
28 | label({ label: 'Amazon Pinpoint Message Archiver - Archiver' }),
29 | timestamp(),
30 | myFormat
31 | ),
32 | transports: [new transports.Console()],
33 | level: process.env.LOG_LEVEL || 'notice'
34 | });
35 |
36 | exports.handler = async (event, context) => {
37 |
38 | const logger = mainLogger.child({requestId: context.awsRequestId});
39 |
40 |
41 | // Load the message passed into the Lambda function into a JSON object
42 | const eventText = JSON.stringify(event);
43 | logger.log({
44 | level: 'info',
45 | message: eventText,
46 | module: 'app.js'
47 | });
48 |
49 | try {
50 | const resp = await lib.process(event.Records, {
51 | logger
52 | });
53 | return Promise.resolve(resp);
54 | } catch (err) {
55 | logger.log({
56 | level: 'error',
57 | message: JSON.stringify(err),
58 | module: 'app.js'
59 | });
60 | return Promise.reject(err);
61 | }
62 | };
63 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/archiver/lib/archiver.js:
--------------------------------------------------------------------------------
1 | /*********************************************************************************************************************
2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. *
3 | * *
4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance *
5 | * with the License. A copy of the License is located at *
6 | * *
7 | * http://www.apache.org/licenses/ *
8 | * *
9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES *
10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions *
11 | * and limitations under the License. *
12 | *********************************************************************************************************************/
13 | /**
14 | * @author rjlowe
15 | */
16 | const AWS = require('aws-sdk');
17 | const mimemessage = require('mimemessage');
18 | const parse = require('url').parse;
19 |
20 | AWS.config.update({
21 | region: process.env.AWS_REGION
22 | });
23 |
24 |
25 | class Archiver {
26 |
27 | /**
28 | * @class Renderer
29 | * @constructor
30 | */
31 | constructor(options) {
32 | this.options = {}
33 | this.options.logger = options.logger.child({module: 'lib/archiver.js'});
34 | this.s3 = new AWS.S3;
35 | }
36 |
37 |
38 | archive(rendered, endpointId, config, messageArchiveLocation) {
39 |
40 | if (rendered.length === 0) {return Promise.resolve('success');}
41 |
42 | const pathResult = this.parseS3FilePath(messageArchiveLocation);
43 | const html = this.generateMimeBody(rendered, endpointId, config);
44 |
45 | this.options.logger.log({
46 | level: 'info',
47 | message: JSON.stringify(pathResult)
48 | });
49 |
50 | return this.s3.putObject({
51 | Body: html,
52 | Bucket: pathResult.bucket,
53 | Key: pathResult.key
54 | }).promise()
55 | .then((results) => {
56 | this.options.logger.log({
57 | level: 'info',
58 | message: JSON.stringify(results)
59 | });
60 | return 'success';
61 | })
62 | };
63 |
64 | parseS3FilePath(path) {
65 | const uri = parse(path);
66 | uri.pathname = decodeURIComponent(uri.pathname || '');
67 | return {
68 | bucket: uri.hostname,
69 | key: uri.pathname.slice(1)
70 | };
71 | }
72 |
73 | generateMimeBody(rendered, endpointId, config) {
74 | const msg = mimemessage.factory({
75 | contentType: 'multipart/mixed',
76 | body: []
77 | });
78 |
79 | // Add Mime Headers
80 | msg.header('EndpointId', endpointId);
81 | msg.header('ApplicationId', config.applicationId);
82 | msg.header('EventTimestamp', config.eventTimestamp);
83 | if (config.campaignId) {
84 | msg.header('CampaignId', config.campaignId);
85 | msg.header('TreatmentId', config.treatmentId);
86 | } else if (config.journeyId) {
87 | msg.header('JourneyId', config.journeyId);
88 | msg.header('JourneyActivityId', config.journeyActivityId);
89 | }
90 |
91 | msg.header('Channel', rendered[0].channel);
92 |
93 | // const title = rendered.find(x => x.pieceType === 'TITLE');
94 | // if (title) {
95 | // msg.header('Subject_Title', title.html);
96 | // }
97 |
98 | // rendered.filter(x => x.pieceType !== 'TITLE').forEach((rendering) => {
99 | rendered.forEach((rendering) => {
100 |
101 | const piece = mimemessage.factory({
102 | contentType: 'multipart/mixed',
103 | body: rendering.html
104 | });
105 | piece.header('Content-Piece-Type', rendering.pieceType);
106 | msg.body.push(piece);
107 | });
108 |
109 | return msg.toString();
110 |
111 | }
112 | }
113 |
114 | module.exports = Archiver;
115 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/archiver/lib/archiver.spec.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | const sinon = require('sinon');
4 | const assert = require('chai').assert;
5 | const expect = require('chai').expect;
6 | const path = require('path');
7 | const mimemessage = require('mimemessage');
8 | const AWS = require('aws-sdk-mock');
9 | AWS.setSDK(path.resolve('./node_modules/aws-sdk'));
10 |
11 | const ArchiverLib = require('./archiver.js');
12 |
13 | let sandbox;
14 |
15 | const options = {
16 | logger: {
17 | log: function(m) {
18 | console.log(m);
19 | },
20 | child: function(m) {
21 | return options.logger;
22 | }
23 | }
24 | }
25 |
26 | describe('Archiver', function() {
27 |
28 | beforeEach(function() {
29 | sandbox = sinon.createSandbox();
30 | AWS.mock('S3', 'putObject', (params, callback) => { callback(null, 'success'); });
31 | });
32 |
33 | afterEach(function() {
34 | sandbox.restore();
35 | });
36 |
37 | it('archive should return successfully with rendered content', function(done) {
38 |
39 | const _rendered = [{pieceType: 'TITLE', html: 'html goes here', channel: 'SMS'}];
40 | const _endpointId = 'fake_endpoint_id';
41 | const _config = {
42 | applicationId: 'appId', eventTimestamp: 1591996341698, campaignId: 'campaignId', treatmentId: 'treatmentId', journeyId: 'journeyId', journeyActivityId: 'journeyActivityId'
43 | };
44 | const _messageArchiveLocation = 's3://fake_bucket/path/to/location/random_file_name';
45 |
46 | const archiver = new ArchiverLib(options);
47 |
48 | archiver.archive(_rendered, _endpointId, _config, _messageArchiveLocation).then((resp) => {
49 | expect(resp).to.equal('success');
50 | done();
51 | }).catch((err) => {
52 | done(err);
53 | });
54 | });
55 |
56 | it('archive should return successfully with no content', function(done) {
57 |
58 | const _rendered = [];
59 | const _endpointId = 'fake_endpoint_id';
60 | const _config = {
61 | applicationId: 'appId', eventTimestamp: 1591996341698, campaignId: 'campaignId', treatmentId: 'treatmentId', journeyId: 'journeyId', journeyActivityId: 'journeyActivityId'
62 | };
63 | const _messageArchiveLocation = 's3://fake_bucket/path/to/location/random_file_name';
64 |
65 | const archiver = new ArchiverLib(options);
66 |
67 | archiver.archive(_rendered, _endpointId, _config, _messageArchiveLocation).then((resp) => {
68 | expect(resp).to.equal('success');
69 | done();
70 | }).catch((err) => {
71 | done(err);
72 | });
73 | });
74 |
75 | it('generateMimeBody should archive an SMS rendering', function(done) {
76 |
77 | const _rendered = [{pieceType: 'TITLE', html: 'html goes here', channel: 'SMS'}];
78 | const _endpointId = 'fake_endpoint_id';
79 | const _config = {
80 | applicationId: 'appId', eventTimestamp: 1591996341698, campaignId: 'campaignId', treatmentId: 'treatmentId'
81 | };
82 |
83 | const archiver = new ArchiverLib(options);
84 |
85 | try {
86 | const resp = archiver.generateMimeBody(_rendered, _endpointId, _config);
87 | console.log(resp);
88 | expect(resp).to.be.a('string');
89 |
90 | const msg = mimemessage.parse(resp);
91 | expect(msg).to.not.equal(false);
92 | expect(msg.isMultiPart()).to.equal(true);
93 | expect(msg.contentType().type).to.equal('multipart');
94 | expect(msg.contentType().subtype).to.equal('mixed');
95 | expect(msg.contentType().fulltype).to.equal('multipart/mixed');
96 | expect(msg.header('EndpointId')).to.equal('fake_endpoint_id');
97 | expect(msg.header('ApplicationId')).to.equal('appId');
98 | expect(msg.header('EventTimestamp')).to.equal('1591996341698');
99 | expect(msg.header('CampaignId')).to.equal('campaignId');
100 | expect(msg.header('TreatmentId')).to.equal('treatmentId');
101 | expect(msg.header('JourneyId')).to.equal(undefined);
102 | expect(msg.header('JourneyActivityId')).to.equal(undefined);
103 | expect(msg.header('Channel')).to.equal('SMS');
104 |
105 | expect(msg.body[0].body).to.equal('html goes here');
106 | expect(msg.body[0].header('Content-Piece-Type')).to.equal('TITLE');
107 |
108 | done();
109 | } catch (err) {
110 | done(err);
111 | };
112 | });
113 |
114 | it('generateMimeBody should archive an Email rendering', function(done) {
115 |
116 | const _rendered = [
117 | {pieceType: 'TITLE', html: 'Subject goes here', channel: 'EMAIL'},
118 | {pieceType: 'HTML', html: 'HTML Body Goes here
', channel: 'EMAIL'},
119 | {pieceType: 'TEXT', html: 'Text Body', channel: 'EMAIL'},
120 | ];
121 | const _endpointId = 'fake_endpoint_id';
122 | const _config = {
123 | applicationId: 'appId', eventTimestamp: 1591996341698, journeyId: 'journeyId', journeyActivityId: 'journeyActivityId'
124 | };
125 |
126 | const archiver = new ArchiverLib(options);
127 |
128 | try {
129 | const resp = archiver.generateMimeBody(_rendered, _endpointId, _config);
130 | console.log(resp);
131 | expect(resp).to.be.a('string');
132 |
133 | const msg = mimemessage.parse(resp);
134 | expect(msg).to.not.equal(false);
135 | expect(msg.isMultiPart()).to.equal(true);
136 | expect(msg.contentType().type).to.equal('multipart');
137 | expect(msg.contentType().subtype).to.equal('mixed');
138 | expect(msg.contentType().fulltype).to.equal('multipart/mixed');
139 | expect(msg.header('EndpointId')).to.equal('fake_endpoint_id');
140 | expect(msg.header('ApplicationId')).to.equal('appId');
141 | expect(msg.header('EventTimestamp')).to.equal('1591996341698');
142 | expect(msg.header('CampaignId')).to.equal(undefined);
143 | expect(msg.header('TreatmentId')).to.equal(undefined);
144 | expect(msg.header('JourneyId')).to.equal('journeyId');
145 | expect(msg.header('JourneyActivityId')).to.equal('journeyActivityId');
146 | expect(msg.header('Channel')).to.equal('EMAIL');
147 |
148 | expect(msg.body[0].body).to.equal('Subject goes here');
149 | expect(msg.body[0].header('Content-Piece-Type')).to.equal('TITLE');
150 | expect(msg.body[1].body).to.equal('HTML Body Goes here
');
151 | expect(msg.body[1].header('Content-Piece-Type')).to.equal('HTML');
152 | expect(msg.body[2].body).to.equal('Text Body');
153 | expect(msg.body[2].header('Content-Piece-Type')).to.equal('TEXT');
154 |
155 | done();
156 | } catch (err) {
157 | done(err);
158 | };
159 | });
160 |
161 | });
162 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/archiver/lib/index.js:
--------------------------------------------------------------------------------
1 | /*********************************************************************************************************************
2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. *
3 | * *
4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance *
5 | * with the License. A copy of the License is located at *
6 | * *
7 | * http://www.apache.org/licenses/ *
8 | * *
9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES *
10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions *
11 | * and limitations under the License. *
12 | *********************************************************************************************************************/
13 | /**
14 | * @author rjlowe
15 | */
16 |
17 | 'use strict';
18 |
19 | const PinpointLib = require('./pinpoint.js');
20 | const RendererLib = require('./renderer.js');
21 | const ArchiverLib = require('./archiver.js');
22 |
23 | const process = async (records, options) => {
24 |
25 | const logger = options.logger.child({module: 'lib/index.js'});
26 | try {
27 |
28 | const pinpoint = new PinpointLib(options);
29 | const renderer = new RendererLib(options);
30 | const archiver = new ArchiverLib(options);
31 |
32 |
33 | // Loop over the SNS records
34 | return Promise.all(records.map((record, i) => {
35 | const body = record.body;
36 | const payload = JSON.parse(body);
37 |
38 | // Get variables from the JSON SNS payload
39 | const applicationId = payload.application.app_id;
40 | const eventTimestamp = payload.event_timestamp;
41 | const campaignId = payload.attributes.campaign_id;
42 | const treatmentId = payload.attributes.treatment_id;
43 | const journeyId = payload.attributes.journey_id;
44 | const journeyActivityId = payload.attributes.journey_activity_id;
45 | const endpointId = payload.client.client_id;
46 | const endpoint = JSON.parse(payload.client_context.custom.endpoint);
47 | const messageArchiveLocation = payload.client_context.custom.message_archive_location;
48 |
49 | const config = {applicationId, eventTimestamp, campaignId, treatmentId, journeyId, journeyActivityId};
50 |
51 | logger.log({
52 | level: 'info',
53 | message: JSON.stringify(config)
54 | });
55 |
56 | // Get the Content from Pinpoint
57 | return pinpoint.getContentParts(config)
58 | .then((content) => {
59 | return renderer.render(content, endpoint, endpointId, config);
60 | })
61 | .then((rendered) => {
62 | logger.log({
63 | level: 'info',
64 | message: JSON.stringify(rendered)
65 | });
66 | return archiver.archive(rendered, endpointId, config, messageArchiveLocation)
67 | });
68 | }))
69 | .then((results) => {
70 | return 'success';
71 | });
72 |
73 |
74 | } catch (err) {
75 | logger.log({
76 | level: 'error',
77 | message: JSON.stringify(err)
78 | });
79 | return Promise.reject(err);
80 | }
81 |
82 |
83 | };
84 |
85 |
86 | module.exports = {
87 | process
88 | };
89 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/archiver/lib/index.spec.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | const sinon = require('sinon');
4 | const assert = require('chai').assert;
5 | const expect = require('chai').expect;
6 | const path = require('path');
7 | const AWS = require('aws-sdk-mock');
8 | AWS.setSDK(path.resolve('./node_modules/aws-sdk'));
9 |
10 | const { process } = require('./index.js');
11 | const PinpointLib = require('./pinpoint.js');
12 | const RendererLib = require('./renderer.js');
13 | const ArchiverLib = require('./archiver.js');
14 |
15 | let sandbox;
16 |
17 | const options = {
18 | logger: {
19 | log: function(m) {
20 | console.log(m);
21 | },
22 | child: function(m) {
23 | return options.logger;
24 | }
25 | }
26 | }
27 |
28 | describe('Index', function() {
29 |
30 | beforeEach(function() {
31 | sandbox = sinon.createSandbox();
32 | });
33 |
34 | afterEach(function() {
35 | sandbox.restore();
36 | });
37 |
38 | it('process should be successful with a valid record', function(done) {
39 |
40 | const _records = [
41 | {
42 | "messageId": "c6578ec3-4044-466d-9596-db37e9224451",
43 | "receiptHandle": "AQEBLPolk9cvjH07V9au5DGPgfdhWxs7lMx2WhYDWgZxsoxjzIVblOvhakDAk5hvzYCkKlYDUkLOTRykjEYdHKQEljkWWyRsisgRys9G+zM1zgKjWubllr+SvBkxeWNfbsRzlHbuP/42/a1ipC8wOdVbqtug5EOy7105uNaecUoEjwl9nZ1Kud3bETieO1RvMJdc/adtFHClMUA8LTRFodTs10Nz3Vh7sannU1qInnP2DOI8JHKkwn+FRlrMOwa1AgaRmw349SxPuG0wbmjRqIlo0KZDANia2diZKpIEiC0Lbq3t66qI7YkuN8+Z6rxyvvYIMJW/u8/tSPIqMgdMdrJoYCqyXo91JK/FK0OMlAplrCG/3R6klfrV8v4Wt+WMBBBhMN3mWyIugAK6DHNdDu1wPzDZ8CFqD3sIRxR4HPu9GXA=",
44 | "body": "{\"event_type\":\"_campaign.send\",\"event_timestamp\":1591996341698,\"arrival_timestamp\":1591996342338,\"event_version\":\"3.1\",\"application\":{\"app_id\":\"xxxx\",\"sdk\":{}},\"client\":{\"client_id\":\"updatemesms3\"},\"device\":{\"platform\":{}},\"session\":{},\"attributes\":{\"campaign_activity_id\":\"e307c199592a41a280a73df802299d81\",\"campaign_send_status\":\"SUCCESS\",\"campaign_type\":null,\"treatment_id\":\"0\",\"campaign_id\":\"bc3727c0ee2c428b876489de58230c52\"},\"client_context\":{\"custom\":{\"endpoint\":\"{\\\"ChannelType\\\":\\\"SMS\\\",\\\"EndpointStatus\\\":\\\"ACTIVE\\\",\\\"OptOut\\\":\\\"NONE\\\",\\\"EffectiveDate\\\":\\\"2020-06-12T20:42:30.309Z\\\",\\\"Attributes\\\":{\\\"Item\\\":[\\\"Taco\\\"],\\\"PricePaid\\\":[\\\"23\\\"]},\\\"User\\\":{\\\"UserId\\\":\\\"User3\\\"}}\",\"message_archive_location\":\"s3://archiver001-messagearchives3bucket-8wgin3ndjsm8/archive/updatemesms3/2020/06/12/21/100548fc-4394-476b-a94f-07bbf7e81219\"}},\"awsAccountId\":\"xxxx\"}",
45 | "attributes": {
46 | "ApproximateReceiveCount": "1",
47 | "SentTimestamp": "1592416212754",
48 | "SenderId": "AROA6BBJYL7GBNFRX6TWJ:Archiver001-QuererLambda-1J7I5KNW1Z5T5",
49 | "ApproximateFirstReceiveTimestamp": "1592416212833"
50 | },
51 | "messageAttributes": {},
52 | "md5OfBody": "f26c094eb2751765b8c80b06430027ab",
53 | "eventSource": "aws:sqs",
54 | "eventSourceARN": "arn:aws:sqs:xxx",
55 | "awsRegion": "xxx"
56 | }
57 | ];
58 |
59 | sandbox.stub(PinpointLib.prototype, 'getContentParts').resolves(Promise.resolve([{pieceType: 'TITLE', html: 'html goes here', channel: 'SMS'}]));
60 | sandbox.stub(RendererLib.prototype, 'render').resolves(Promise.resolve([{pieceType: 'TITLE', html: 'html goes here', channel: 'SMS'}]));
61 | sandbox.stub(ArchiverLib.prototype, 'archive').resolves(Promise.resolve());
62 |
63 | process(_records, options).then((resp) => {
64 | expect(resp).to.equal('success');
65 | done();
66 | }).catch((err) => {
67 | done(err);
68 | });
69 | });
70 |
71 | it('process should error with an invalid record', function(done) {
72 |
73 | const _records = [
74 | {
75 | "messageId": "c6578ec3-4044-466d-9596-db37e9224451",
76 | "receiptHandle": "AQEBLPolk9cvjH07V9au5DGPgfdhWxs7lMx2WhYDWgZxsoxjzIVblOvhakDAk5hvzYCkKlYDUkLOTRykjEYdHKQEljkWWyRsisgRys9G+zM1zgKjWubllr+SvBkxeWNfbsRzlHbuP/42/a1ipC8wOdVbqtug5EOy7105uNaecUoEjwl9nZ1Kud3bETieO1RvMJdc/adtFHClMUA8LTRFodTs10Nz3Vh7sannU1qInnP2DOI8JHKkwn+FRlrMOwa1AgaRmw349SxPuG0wbmjRqIlo0KZDANia2diZKpIEiC0Lbq3t66qI7YkuN8+Z6rxyvvYIMJW/u8/tSPIqMgdMdrJoYCqyXo91JK/FK0OMlAplrCG/3R6klfrV8v4Wt+WMBBBhMN3mWyIugAK6DHNdDu1wPzDZ8CFqD3sIRxR4HPu9GXA=",
77 | "body": "NOTJSON-SHOULDERROR",
78 | "attributes": {
79 | "ApproximateReceiveCount": "1",
80 | "SentTimestamp": "1592416212754",
81 | "SenderId": "AROA6BBJYL7GBNFRX6TWJ:Archiver001-QuererLambda-1J7I5KNW1Z5T5",
82 | "ApproximateFirstReceiveTimestamp": "1592416212833"
83 | },
84 | "messageAttributes": {},
85 | "md5OfBody": "f26c094eb2751765b8c80b06430027ab",
86 | "eventSource": "aws:sqs",
87 | "eventSourceARN": "arn:aws:sqs:xxx",
88 | "awsRegion": "xxx"
89 | }
90 | ];
91 |
92 | sandbox.stub(PinpointLib.prototype, 'getContentParts').resolves(Promise.resolve([{pieceType: 'TITLE', html: 'html goes here', channel: 'SMS'}]));
93 | sandbox.stub(RendererLib.prototype, 'render').resolves(Promise.resolve([{pieceType: 'TITLE', html: 'html goes here', channel: 'SMS'}]));
94 | sandbox.stub(ArchiverLib.prototype, 'archive').resolves(Promise.resolve());
95 |
96 | process(_records, options).then((resp) => {
97 | done('ShouldError');
98 | }).catch((err) => {
99 | done();
100 | });
101 | });
102 |
103 | });
104 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/archiver/lib/pinpoint.spec.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | const sinon = require('sinon');
4 | const assert = require('chai').assert;
5 | const expect = require('chai').expect;
6 | const path = require('path');
7 | const AWS = require('aws-sdk-mock');
8 | AWS.setSDK(path.resolve('./node_modules/aws-sdk'));
9 |
10 | const PinpointLib = require('./pinpoint.js');
11 |
12 | let sandbox;
13 |
14 | const options = {
15 | logger: {
16 | log: function(m) {
17 | console.log(m);
18 | },
19 | child: function(m) {
20 | return options.logger;
21 | }
22 | }
23 | }
24 |
25 | describe('Pinpoint', function() {
26 |
27 | beforeEach(function() {
28 | sandbox = sinon.createSandbox();
29 | });
30 |
31 | afterEach(function() {
32 | sandbox.restore();
33 | });
34 |
35 |
36 | it('getContentParts should return successfully for Journeys', function(done) {
37 |
38 | const _options = {
39 | applicationId: 'fake_application_id', eventTimestamp: 1591996341698, campaignId: undefined, treatmentId: undefined, journeyId: 'journeyId', journeyActivityId: 'lmtZdYWNYR', campaignId: undefined, treatmentId: undefined
40 | };
41 |
42 | AWS.mock('Pinpoint', 'getJourney', {JourneyResponse: _journeyResponse});
43 | AWS.mock('Pinpoint', 'getEmailTemplate', {EmailTemplateResponse: _emailTemplateResponse});
44 |
45 | const pinpoint = new PinpointLib(options);
46 |
47 | pinpoint.getContentParts(_options).then((resp) => {
48 | expect(resp.length).to.equal(3);
49 | expect(resp[0].channel).to.equal('EMAIL');
50 | expect(resp[1].channel).to.equal('EMAIL');
51 | expect(resp[2].channel).to.equal('EMAIL');
52 |
53 | resp.sort((a,b) => a.pieceType < b.pieceType ? -1 : 1);
54 | expect(resp[0].html).to.equal("\n \n \n \n\n\nGreat Job! Purchase Status {{Attributes.Purchase}}\n\n");
55 | expect(resp[1].html).to.equal("Plain text here");
56 | expect(resp[2].html).to.equal("Thank you for making a purchase!");
57 |
58 | expect(resp[0].defaultSubs).to.equal(undefined);
59 | expect(resp[1].defaultSubs).to.equal(undefined);
60 | expect(resp[2].defaultSubs).to.equal(undefined);
61 |
62 | AWS.restore('Pinpoint', 'getJourney');
63 | AWS.restore('Pinpoint', 'getEmailTemplate');
64 |
65 | done();
66 | }).catch((err) => {
67 | done(err);
68 | });
69 | });
70 |
71 | it('getContentParts should return successfully for Journeys and DefaultSubstitutions', function(done) {
72 | const _options = {
73 | applicationId: 'fake_application_id', eventTimestamp: 1591996341698, campaignId: undefined, treatmentId: undefined, journeyId: 'journeyId', journeyActivityId: 'lmtZdYWNYR', campaignId: undefined, treatmentId: undefined
74 | };
75 |
76 | AWS.mock('Pinpoint', 'getJourney', {JourneyResponse: _journeyResponseWithDefaultSubstitutions});
77 | AWS.mock('Pinpoint', 'getEmailTemplate', {EmailTemplateResponse: _emailTemplateResponseWithDefaultSubstitutions});
78 |
79 | const pinpoint = new PinpointLib(options);
80 |
81 | pinpoint.getContentParts(_options).then((resp) => {
82 |
83 | console.log(resp);
84 |
85 | expect(resp.length).to.equal(3);
86 | expect(resp[0].channel).to.equal('EMAIL');
87 | expect(resp[1].channel).to.equal('EMAIL');
88 | expect(resp[2].channel).to.equal('EMAIL');
89 |
90 | resp.sort((a,b) => a.pieceType < b.pieceType ? -1 : 1);
91 | expect(resp[0].html).to.equal("\n \n \n \n\n\nGreat Job! Purchase Status {{Attributes.Purchase}}\n\n");
92 | expect(resp[1].html).to.equal("Plain text here");
93 | expect(resp[2].html).to.equal("Thank you for making a purchase!");
94 |
95 | expect(resp[0].defaultSubs).to.equal('{"Attributes":{"Purchase":"Test"}}');
96 | expect(resp[1].defaultSubs).to.equal('{"Attributes":{"Purchase":"Test"}}');
97 | expect(resp[2].defaultSubs).to.equal('{"Attributes":{"Purchase":"Test"}}');
98 |
99 | AWS.restore('Pinpoint', 'getJourney');
100 | AWS.restore('Pinpoint', 'getEmailTemplate');
101 |
102 | done();
103 | }).catch((err) => {
104 | done(err);
105 | });
106 | });
107 |
108 | });
109 |
110 |
111 | const _journeyResponse = {
112 | "Activities": {
113 | "DZaQ0CESSG": {},
114 | "WxOKnCmKpT": {},
115 | "lmtZdYWNYR": {
116 | "EMAIL": {
117 | "NextActivity": "8EyBzMMeDa",
118 | "TemplateName": "Email_Template_name"
119 | }
120 | },
121 | "8EyBzMMeDa": {}
122 | },
123 | "ApplicationId": "fake_application_id",
124 | "CreationDate": "2020-05-28T23:27:09.860Z",
125 | "Id": "fcd3044b0f12446eb62b61892f8c0c3a",
126 | "LastModifiedDate": "2020-06-10T18:26:09.021Z",
127 | "LocalTime": false,
128 | "Name": "Journey Name",
129 | "Schedule": {
130 | "Timezone": "UTC-07"
131 | },
132 | "StartActivity": "DZaQ0CESSG",
133 | "State": "DRAFT"
134 | };
135 |
136 | const _journeyResponseWithDefaultSubstitutions = {
137 | "Activities": {
138 | "DZaQ0CESSG": {},
139 | "WxOKnCmKpT": {},
140 | "lmtZdYWNYR": {
141 | "EMAIL": {
142 | "NextActivity": "8EyBzMMeDa",
143 | "TemplateName": "Email_Template_name_with_default"
144 | }
145 | },
146 | "8EyBzMMeDa": {}
147 | },
148 | "ApplicationId": "fake_application_id",
149 | "CreationDate": "2020-05-28T23:27:09.860Z",
150 | "Id": "fcd3044b0f12446eb62b61892f8c0c3a",
151 | "LastModifiedDate": "2020-06-10T18:26:09.021Z",
152 | "LocalTime": false,
153 | "Name": "Journey Name",
154 | "Schedule": {
155 | "Timezone": "UTC-07"
156 | },
157 | "StartActivity": "DZaQ0CESSG",
158 | "State": "DRAFT"
159 | };
160 |
161 |
162 | const _emailTemplateResponse = {
163 | "Arn": "arn:aws:mobiletargeting:xxx",
164 | "CreationDate": "2019-11-22T15:49:02.572Z",
165 | "HtmlPart": "\n \n \n \n\n\nGreat Job! Purchase Status {{Attributes.Purchase}}\n\n",
166 | "LastModifiedDate": "2019-11-22T15:49:02.572Z",
167 | "Subject": "Thank you for making a purchase!",
168 | "tags": {},
169 | "TemplateName": "ThanksForPurchasing",
170 | "TemplateType": "EMAIL",
171 | "TextPart": "Plain text here",
172 | "Version": "1"
173 | };
174 |
175 | const _emailTemplateResponseWithDefaultSubstitutions = {
176 | "Arn": "arn:aws:mobiletargeting:xxx",
177 | "CreationDate": "2019-11-22T15:49:02.572Z",
178 | "DefaultSubstitutions": "{\"Attributes\":{\"Purchase\":\"Test\"}}",
179 | "HtmlPart": "\n \n \n \n\n\nGreat Job! Purchase Status {{Attributes.Purchase}}\n\n",
180 | "LastModifiedDate": "2019-11-22T15:49:02.572Z",
181 | "Subject": "Thank you for making a purchase!",
182 | "tags": {},
183 | "TemplateName": "ThanksForPurchasing",
184 | "TemplateType": "EMAIL",
185 | "TextPart": "Plain text here",
186 | "Version": "1"
187 | };
188 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/archiver/lib/renderer.js:
--------------------------------------------------------------------------------
1 | /*********************************************************************************************************************
2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. *
3 | * *
4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance *
5 | * with the License. A copy of the License is located at *
6 | * *
7 | * http://www.apache.org/licenses/ *
8 | * *
9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES *
10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions *
11 | * and limitations under the License. *
12 | *********************************************************************************************************************/
13 | /**
14 | * @author rjlowe
15 | */
16 | const promisedHandlebars = require('promised-handlebars');
17 | const Q = require('q');
18 | const Handlebars = promisedHandlebars(require('handlebars'), { Promise: Q.Promise });
19 |
20 | class Renderer {
21 |
22 | /**
23 | * @class Renderer
24 | * @constructor
25 | */
26 | constructor(options) {
27 | this.options = {}
28 | this.options.logger = options.logger.child({module: 'lib/renderer.js'});
29 | this.cacheCompilers = {};
30 | }
31 |
32 | async render(content, endpoint, endpointId, config) {
33 |
34 | endpoint['Id'] = endpointId;
35 | endpoint['Address'] = 'XXXXXXXX';
36 | const key = `${config.campaignId}_${config.journeyId}`
37 | const compilers = this.getCompilers(key, content);
38 |
39 | const renderedContentPromises = compilers.map((compiler, i) => {
40 |
41 | const compileContext = Object.assign({}, endpoint, compiler.defaultSubs ? JSON.parse(compiler.defaultSubs) : {});
42 |
43 | this.options.logger.log({
44 | level: 'info',
45 | message: JSON.stringify(compileContext)
46 | });
47 |
48 | return compiler.pieceCompiler(compileContext)
49 | .then((html) => {
50 | return {pieceType: compiler.pieceType, html, channel: compiler.channel};
51 | });
52 | });
53 |
54 | return Promise.all(renderedContentPromises);
55 |
56 | }
57 |
58 | getCompilers(key, content) {
59 |
60 | if (!content) return [];
61 |
62 | if (this.cacheCompilers[key]) {
63 | return this.cacheCompilers[key];
64 | }
65 |
66 | // {pieceType: APNS.Title, html: 'blob', defaultSubs: '{json}'}
67 |
68 | const compilers = content.filter(piece => piece.html).map((piece, i) => {
69 | return {
70 | pieceType: piece.pieceType,
71 | pieceCompiler: Handlebars.compile(piece.html),
72 | defaultSubs: piece.defaultSubs,
73 | channel: piece.channel
74 | };
75 | });
76 |
77 | this.cacheCompilers[key] = compilers;
78 |
79 | return compilers;
80 |
81 | }
82 |
83 | }
84 |
85 | module.exports = Renderer;
86 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/archiver/lib/test-setup.spec.js:
--------------------------------------------------------------------------------
1 | const sinon = require('sinon');
2 | const chai = require('chai');
3 | const sinonChai = require('sinon-chai');
4 |
5 | before(function() {
6 | chai.use(sinonChai);
7 | });
8 |
9 | beforeEach(function() {
10 | this.sandbox = sinon.createSandbox();
11 | });
12 |
13 | afterEach(function() {
14 | this.sandbox.restore();
15 | });
--------------------------------------------------------------------------------
/lambda/Message_Archiver/archiver/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "amazon-pinpoint-message-archiver-queuer",
3 | "version": "1.0.0",
4 | "main": "app.js",
5 | "scripts": {
6 | "pretest": "npm install",
7 | "test": "mocha lib/*.spec.js",
8 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules",
9 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml archiver.zip .",
10 | "build:dist": "mkdir dist && mv archiver.zip dist/",
11 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist",
12 | "coverage": "nyc npm test",
13 | "local:test": "sam local invoke 'ExampleService' -e ./tests/event.json"
14 | },
15 | "author": "",
16 | "license": "ISC",
17 | "dependencies": {
18 | "handlebars": "^4.7.6",
19 | "mimemessage": "^1.0.5",
20 | "promised-handlebars": "^2.0.1",
21 | "q": "^1.5.1",
22 | "winston": "^3.2.1"
23 | },
24 | "devDependencies": {
25 | "aws-sdk": "*",
26 | "aws-sdk-mock": "*",
27 | "chai": "*",
28 | "mocha": "*",
29 | "nyc": "*",
30 | "sinon": "*",
31 | "sinon-chai": "*"
32 | },
33 | "description": ""
34 | }
35 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/archiver/tests/event.json:
--------------------------------------------------------------------------------
1 | {
2 | "Records": [
3 | {
4 | "messageId": "c6578ec3-4044-466d-9596-db37e9224451",
5 | "receiptHandle": "AQEBLPolk9cvjH07V9au5DGPgfdhWxs7lMx2WhYDWgZxsoxjzIVblOvhakDAk5hvzYCkKlYDUkLOTRykjEYdHKQEljkWWyRsisgRys9G+zM1zgKjWubllr+SvBkxeWNfbsRzlHbuP/42/a1ipC8wOdVbqtug5EOy7105uNaecUoEjwl9nZ1Kud3bETieO1RvMJdc/adtFHClMUA8LTRFodTs10Nz3Vh7sannU1qInnP2DOI8JHKkwn+FRlrMOwa1AgaRmw349SxPuG0wbmjRqIlo0KZDANia2diZKpIEiC0Lbq3t66qI7YkuN8+Z6rxyvvYIMJW/u8/tSPIqMgdMdrJoYCqyXo91JK/FK0OMlAplrCG/3R6klfrV8v4Wt+WMBBBhMN3mWyIugAK6DHNdDu1wPzDZ8CFqD3sIRxR4HPu9GXA=",
6 | "body": "{\"event_type\":\"_campaign.send\",\"event_timestamp\":1591996341698,\"arrival_timestamp\":1591996342338,\"event_version\":\"3.1\",\"application\":{\"app_id\":\"xxxx\",\"sdk\":{}},\"client\":{\"client_id\":\"updatemesms3\"},\"device\":{\"platform\":{}},\"session\":{},\"attributes\":{\"campaign_activity_id\":\"e307c199592a41a280a73df802299d81\",\"campaign_send_status\":\"SUCCESS\",\"campaign_type\":null,\"treatment_id\":\"0\",\"campaign_id\":\"bc3727c0ee2c428b876489de58230c52\"},\"client_context\":{\"custom\":{\"endpoint\":\"{\\\"ChannelType\\\":\\\"SMS\\\",\\\"EndpointStatus\\\":\\\"ACTIVE\\\",\\\"OptOut\\\":\\\"NONE\\\",\\\"EffectiveDate\\\":\\\"2020-06-12T20:42:30.309Z\\\",\\\"Attributes\\\":{\\\"Item\\\":[\\\"Taco\\\"],\\\"PricePaid\\\":[\\\"23\\\"]},\\\"User\\\":{\\\"UserId\\\":\\\"User3\\\"}}\",\"message_archive_location\":\"s3://archiver001-messagearchives3bucket-8wgin3ndjsm8/archive/updatemesms3/2020/06/12/21/100548fc-4394-476b-a94f-07bbf7e81219\"}},\"awsAccountId\":\"xxxx\"}",
7 | "attributes": {
8 | "ApproximateReceiveCount": "1",
9 | "SentTimestamp": "1592416212754",
10 | "SenderId": "AROA6BBJYL7GBNFRX6TWJ:Archiver001-QuererLambda-1J7I5KNW1Z5T5",
11 | "ApproximateFirstReceiveTimestamp": "1592416212833"
12 | },
13 | "messageAttributes": {},
14 | "md5OfBody": "f26c094eb2751765b8c80b06430027ab",
15 | "eventSource": "aws:sqs",
16 | "eventSourceARN": "arn:aws:sqs:xxx",
17 | "awsRegion": "xxx"
18 | }
19 | ]
20 | }
21 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/queuer/app.js:
--------------------------------------------------------------------------------
1 | /*********************************************************************************************************************
2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. *
3 | * *
4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance *
5 | * with the License. A copy of the License is located at *
6 | * *
7 | * http://www.apache.org/licenses/ *
8 | * *
9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES *
10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions *
11 | * and limitations under the License. *
12 | *********************************************************************************************************************/
13 | /**
14 | * @author rjlowe
15 | */
16 |
17 | let lib = require('./lib');
18 |
19 | const { createLogger, format, transports } = require('winston');
20 | const { combine, timestamp, label, printf } = format;
21 | const myFormat = printf(({ level, message, label, requestId, module, timestamp }) => {
22 | return `${timestamp} [RequestId: ${requestId}] [${module}] ${level}: ${message}`;
23 | });
24 | const mainLogger = createLogger({
25 | format: combine(
26 | label({ label: 'Amazon Pinpoint Message Archiver - Queuer' }),
27 | timestamp(),
28 | myFormat
29 | ),
30 | transports: [new transports.Console()],
31 | level: process.env.LOG_LEVEL || 'notice'
32 | });
33 |
34 | // Lambda Entry Point
35 | exports.handler = async (event, context) => {
36 |
37 | const logger = mainLogger.child({requestId: context.awsRequestId});
38 |
39 | const eventText = JSON.stringify(event);
40 | logger.log({
41 | level: 'info',
42 | message: eventText,
43 | module: 'app.js'
44 | });
45 |
46 | try {
47 |
48 | return lib.process(event.records, {logger});
49 |
50 | } catch (err) {
51 | logger.log({
52 | level: 'error',
53 | message: JSON.stringify(err),
54 | module: 'app.js'
55 | });
56 | return Promise.reject(err);
57 | }
58 | };
59 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/queuer/lib/index.js:
--------------------------------------------------------------------------------
1 | /*********************************************************************************************************************
2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. *
3 | * *
4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance *
5 | * with the License. A copy of the License is located at *
6 | * *
7 | * http://www.apache.org/licenses/ *
8 | * *
9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES *
10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions *
11 | * and limitations under the License. *
12 | *********************************************************************************************************************/
13 | /**
14 | * @author rjlowe
15 | */
16 |
17 | 'use strict';
18 |
19 | const EventProcessor = require('./processor.js');
20 |
21 | const process = async (records, options) => {
22 |
23 | const logger = options.logger.child({
24 | module: 'lib/index.js'
25 | });
26 |
27 | const processor = new EventProcessor(options);
28 |
29 | try {
30 |
31 | return processor.processRecords(records)
32 | .then((mutated_records) => {
33 | return { records: mutated_records };
34 | });
35 |
36 | } catch (err) {
37 | logger.log({
38 | level: 'error',
39 | message: JSON.stringify(err)
40 | });
41 | return Promise.reject(err);
42 | }
43 | }
44 |
45 | module.exports = {
46 | process
47 | };
48 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/queuer/lib/processor.js:
--------------------------------------------------------------------------------
1 | /*********************************************************************************************************************
2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. *
3 | * *
4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance *
5 | * with the License. A copy of the License is located at *
6 | * *
7 | * http://www.apache.org/licenses/ *
8 | * *
9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES *
10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions *
11 | * and limitations under the License. *
12 | *********************************************************************************************************************/
13 | /**
14 | * @author rjlowe
15 | */
16 |
17 | 'use strict';
18 |
19 | const Queuer = require('./queuer.js');
20 | const { generateFilePath } = require('./s3Path.js');
21 |
22 | class EventProcessor {
23 |
24 | /**
25 | * @class EventProcessor
26 | * @constructor
27 | */
28 | constructor(options) {
29 | this.options = {}
30 | this.options.logger = options.logger.child({module: 'lib/processor.js'});
31 | this.journeyCampaignEventTypes = ['_campaign.send', '_journey.send'];
32 | this.queuer = new Queuer(options);
33 |
34 | }
35 |
36 | processRecords(records) {
37 | return records.reduce((p, record) => {
38 | return p.then((out) => {
39 | return this.promiseFromRecord(record)
40 | .then((currentOut) => {
41 | out.push(currentOut);
42 | return out;
43 | });
44 | });
45 | }, Promise.resolve([]));
46 | }
47 |
48 | promiseFromRecord(record, out) {
49 |
50 | // Decode the base64 message
51 | const decoded = Buffer.from(record.data, 'base64').toString('ascii');
52 | this.options.logger.log({
53 | level: 'info',
54 | message: decoded
55 | });
56 |
57 | const pinpointEvent = JSON.parse(decoded);
58 |
59 | const p = this.journeyCampaignEventTypes.includes(pinpointEvent.event_type)
60 | ? this.journeyCampaignEvent(pinpointEvent)
61 | : this.defaultEvent(record);
62 |
63 | return p.then((data) => {
64 | return {
65 | data,
66 | recordId: record.recordId,
67 | result: 'Ok'
68 | };
69 | });
70 | }
71 |
72 | journeyCampaignEvent(pinpointEvent) {
73 |
74 | // Pre-compute where the archived message will be stored in S3 so that
75 | // we can mutate the pinpointEvent object for reporting
76 | pinpointEvent.client_context.custom.message_archive_location = generateFilePath(pinpointEvent.client.client_id, pinpointEvent.event_timestamp);
77 |
78 | const mutatedEvent = JSON.stringify(pinpointEvent);
79 |
80 | return this.queuer.sendEventForAchiving(mutatedEvent)
81 | .then(() => {
82 | return Buffer.from(mutatedEvent).toString('base64');
83 | });
84 | }
85 |
86 | defaultEvent(record) {
87 | return Promise.resolve(record.data);
88 | }
89 | }
90 |
91 | module.exports = EventProcessor;
92 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/queuer/lib/queuer.js:
--------------------------------------------------------------------------------
1 | /*********************************************************************************************************************
2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. *
3 | * *
4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance *
5 | * with the License. A copy of the License is located at *
6 | * *
7 | * http://www.apache.org/licenses/ *
8 | * *
9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES *
10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions *
11 | * and limitations under the License. *
12 | *********************************************************************************************************************/
13 | /**
14 | * @author rjlowe
15 | */
16 |
17 | 'use strict';
18 |
19 | const AWS = require('aws-sdk');
20 | AWS.config.update({
21 | region: process.env.AWS_REGION
22 | });
23 |
24 | const queueUrl = process.env.SQS_QUEUE_URL;
25 | const sqs = new AWS.SQS();
26 |
27 | class Queuer {
28 |
29 | /**
30 | * @class EventProcessor
31 | * @constructor
32 | */
33 | constructor(options) {
34 | this.options = {}
35 | this.options.logger = options.logger.child({module: 'lib/queuer.js'});
36 | }
37 |
38 | sendEventForAchiving(mutatedEvent) {
39 | this.options.logger.log({
40 | level: 'info',
41 | message: mutatedEvent
42 | });
43 |
44 | return sqs.sendMessage({
45 | MessageBody: mutatedEvent,
46 | QueueUrl: queueUrl
47 | }).promise();
48 | }
49 | }
50 |
51 |
52 | module.exports = Queuer;
53 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/queuer/lib/s3Path.js:
--------------------------------------------------------------------------------
1 | /*********************************************************************************************************************
2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. *
3 | * *
4 | * Licensed under the Apache License Version 2.0 (the 'License'). You may not use this file except in compliance *
5 | * with the License. A copy of the License is located at *
6 | * *
7 | * http://www.apache.org/licenses/ *
8 | * *
9 | * or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES *
10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions *
11 | * and limitations under the License. *
12 | *********************************************************************************************************************/
13 | /**
14 | * @author rjlowe
15 | */
16 |
17 |
18 |
19 | const generateFilePath = function(endpointId, eventTimestamp) {
20 |
21 | const prefix = process.env.S3_PREFIX || 'archive';
22 | const s3Bucket = process.env.S3_BUCKET;
23 |
24 | const d = new Date(eventTimestamp);
25 | return 's3://' + s3Bucket + '/' + prefix + '/' + endpointId
26 | + '/' + d.getUTCFullYear() + '/' + pad(d.getUTCMonth() + 1)
27 | + '/' + pad(d.getUTCDate()) + '/' + pad(d.getUTCHours())
28 | + '/' + uuidv4();
29 | }
30 |
31 | function pad(n){return n<10 ? '0'+n : n}
32 |
33 | function uuidv4() {
34 | return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
35 | var r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8);
36 | return v.toString(16);
37 | });
38 | }
39 |
40 |
41 | module.exports = {generateFilePath};
42 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/queuer/lib/s3Path.spec.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | const sinon = require('sinon');
4 | const assert = require('chai').assert;
5 | const expect = require('chai').expect;
6 | const path = require('path');
7 | const parse = require('url').parse;
8 | const AWS = require('aws-sdk-mock');
9 | AWS.setSDK(path.resolve('./node_modules/aws-sdk'));
10 |
11 |
12 | const {generateFilePath} = require('./s3Path.js');
13 |
14 | let sandbox;
15 |
16 | describe('S3Path', function() {
17 |
18 | beforeEach(function() {
19 | sandbox = sinon.createSandbox();
20 | });
21 |
22 | afterEach(function() {
23 | sandbox.restore();
24 | });
25 |
26 | it('generateFilePath returns an appropriate S3 URL with UTC time codes', function(done) {
27 |
28 | process.env.S3_BUCKET = 'test_bucket_name';
29 |
30 | const _endpointId = 'test_endpoint_id';
31 | const _eventTimestamp = 1591996342301;
32 |
33 | const _resp = generateFilePath(_endpointId, _eventTimestamp);
34 | console.log(_resp);
35 | expect(_resp).to.be.a('string');
36 | expect(_resp.substr(0, 5)).to.equal('s3://');
37 |
38 | const uri = parse(_resp);
39 | expect(uri.hostname).to.equal('test_bucket_name');
40 | expect(decodeURIComponent(uri.pathname).substring(0, 40)).to.equal('/archive/test_endpoint_id/2020/06/12/21/');
41 |
42 | done();
43 |
44 | });
45 |
46 | });
47 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/queuer/lib/test-setup.spec.js:
--------------------------------------------------------------------------------
1 | const sinon = require('sinon');
2 | const chai = require('chai');
3 | const sinonChai = require('sinon-chai');
4 |
5 | before(function() {
6 | chai.use(sinonChai);
7 | });
8 |
9 | beforeEach(function() {
10 | this.sandbox = sinon.createSandbox();
11 | });
12 |
13 | afterEach(function() {
14 | this.sandbox.restore();
15 | });
--------------------------------------------------------------------------------
/lambda/Message_Archiver/queuer/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "amazon-pinpoint-message-archiver-queuer",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "app.js",
6 | "scripts": {
7 | "pretest": "npm install",
8 | "test": "mocha lib/*.spec.js",
9 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules",
10 | "build:zip": "rm -rf package-lock.json && zip -rq --exclude=*tests* --exclude=*template.yml queuer.zip .",
11 | "build:dist": "mkdir dist && mv queuer.zip dist/",
12 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist",
13 | "coverage": "nyc npm test",
14 | "local:test": "sam local invoke 'ExampleService' -e ./tests/event.json"
15 | },
16 | "author": "",
17 | "license": "ISC",
18 | "dependencies": {
19 | "winston": "^3.2.1"
20 | },
21 | "devDependencies": {
22 | "aws-sdk-mock": "*",
23 | "aws-sdk": "*",
24 | "chai": "*",
25 | "mocha": "*",
26 | "nyc": "*",
27 | "sinon": "*",
28 | "sinon-chai": "*"
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/lambda/Message_Archiver/queuer/tests/event.json:
--------------------------------------------------------------------------------
1 | {
2 | "invocationId": "639a70d3-54db-455b-9d55-1fabbaeab169",
3 | "deliveryStreamArn": "arn:aws:firehose:xxxx",
4 | "region": "xxx",
5 | "records": [
6 | {
7 | "recordId": "49607899387440210186580631207602122343817916558121893890000000",
8 | "approximateArrivalTimestamp": 1591996344859,
9 | "data": "eyJldmVudF90eXBlIjoiX2NhbXBhaWduLnNlbmQiLCJldmVudF90aW1lc3RhbXAiOjE1OTE5OTYzNDE2OTcsImFycml2YWxfdGltZXN0YW1wIjoxNTkxOTk2MzQyMzM3LCJldmVudF92ZXJzaW9uIjoiMy4xIiwiYXBwbGljYXRpb24iOnsiYXBwX2lkIjoieHh4eCIsInNkayI6e319LCJjbGllbnQiOnsiY2xpZW50X2lkIjoidXBkYXRlbWVzbXMyIn0sImRldmljZSI6eyJwbGF0Zm9ybSI6e319LCJzZXNzaW9uIjp7fSwiYXR0cmlidXRlcyI6eyJjYW1wYWlnbl9hY3Rpdml0eV9pZCI6ImUzMDdjMTk5NTkyYTQxYTI4MGE3M2RmODAyMjk5ZDgxIiwiY2FtcGFpZ25fc2VuZF9zdGF0dXMiOiJTVUNDRVNTIiwiY2FtcGFpZ25fdHlwZSI6bnVsbCwidHJlYXRtZW50X2lkIjoiMCIsImNhbXBhaWduX2lkIjoiYmMzNzI3YzBlZTJjNDI4Yjg3NjQ4OWRlNTgyMzBjNTIifSwiY2xpZW50X2NvbnRleHQiOnsiY3VzdG9tIjp7ImVuZHBvaW50Ijoie1wiQ2hhbm5lbFR5cGVcIjpcIlNNU1wiLFwiRW5kcG9pbnRTdGF0dXNcIjpcIkFDVElWRVwiLFwiT3B0T3V0XCI6XCJOT05FXCIsXCJFZmZlY3RpdmVEYXRlXCI6XCIyMDIwLTA2LTEyVDIwOjQyOjMwLjMwOVpcIixcIkF0dHJpYnV0ZXNcIjp7XCJJdGVtXCI6W1wiUGxhbnRcIl0sXCJQcmljZVBhaWRcIjpbXCIzNFwiXX0sXCJVc2VyXCI6e1wiVXNlcklkXCI6XCJVc2VyMlwifX0ifX0sImF3c0FjY291bnRJZCI6Inh4eHgifQo="
10 | },
11 | {
12 | "recordId": "49607899387440210186580631207603331269637531187296600066000000",
13 | "approximateArrivalTimestamp": 1591996344862,
14 | "data": "eyJldmVudF90eXBlIjoiX2NhbXBhaWduLnNlbmQiLCJldmVudF90aW1lc3RhbXAiOjE1OTE5OTYzNDE2OTgsImFycml2YWxfdGltZXN0YW1wIjoxNTkxOTk2MzQyMzM4LCJldmVudF92ZXJzaW9uIjoiMy4xIiwiYXBwbGljYXRpb24iOnsiYXBwX2lkIjoieHh4eCIsInNkayI6e319LCJjbGllbnQiOnsiY2xpZW50X2lkIjoidXBkYXRlbWVzbXMzIn0sImRldmljZSI6eyJwbGF0Zm9ybSI6e319LCJzZXNzaW9uIjp7fSwiYXR0cmlidXRlcyI6eyJjYW1wYWlnbl9hY3Rpdml0eV9pZCI6ImUzMDdjMTk5NTkyYTQxYTI4MGE3M2RmODAyMjk5ZDgxIiwiY2FtcGFpZ25fc2VuZF9zdGF0dXMiOiJTVUNDRVNTIiwiY2FtcGFpZ25fdHlwZSI6bnVsbCwidHJlYXRtZW50X2lkIjoiMCIsImNhbXBhaWduX2lkIjoiYmMzNzI3YzBlZTJjNDI4Yjg3NjQ4OWRlNTgyMzBjNTIifSwiY2xpZW50X2NvbnRleHQiOnsiY3VzdG9tIjp7ImVuZHBvaW50Ijoie1wiQ2hhbm5lbFR5cGVcIjpcIlNNU1wiLFwiRW5kcG9pbnRTdGF0dXNcIjpcIkFDVElWRVwiLFwiT3B0T3V0XCI6XCJOT05FXCIsXCJFZmZlY3RpdmVEYXRlXCI6XCIyMDIwLTA2LTEyVDIwOjQyOjMwLjMwOVpcIixcIkF0dHJpYnV0ZXNcIjp7XCJJdGVtXCI6W1wiVGFjb1wiXSxcIlByaWNlUGFpZFwiOltcIjIzXCJdfSxcIlVzZXJcIjp7XCJVc2VySWRcIjpcIlVzZXIzXCJ9fSJ9fSwiYXdzQWNjb3VudElkIjoieHh4eCJ9Cg=="
15 | },
16 | {
17 | "recordId": "49607899387440210186580631207604540195457145953910259714000000",
18 | "approximateArrivalTimestamp": 1591996347027,
19 | "data": "eyJldmVudF90eXBlIjoiX1NNUy5CVUZGRVJFRCIsImV2ZW50X3RpbWVzdGFtcCI6MTU5MTk5NjM0MjA3OCwiYXJyaXZhbF90aW1lc3RhbXAiOjE1OTE5OTYzNDE3MTksImV2ZW50X3ZlcnNpb24iOiIzLjEiLCJhcHBsaWNhdGlvbiI6eyJhcHBfaWQiOiJ4eHh4Iiwic2RrIjp7fX0sImNsaWVudCI6eyJjbGllbnRfaWQiOiJ1cGRhdGVtZXNtczIifSwiZGV2aWNlIjp7InBsYXRmb3JtIjp7fX0sInNlc3Npb24iOnt9LCJhdHRyaWJ1dGVzIjp7InNlbmRlcl9yZXF1ZXN0X2lkIjoiOWRiZWY5YmMtYjgyZS00MzIwLWEzNjUteHh4IiwiY2FtcGFpZ25fYWN0aXZpdHlfaWQiOiJlMzA3YzE5OTU5MmE0MWEyODBhNzNkZjgwMjI5OWQ4MSIsImRlc3RpbmF0aW9uX3Bob25lX251bWJlciI6IisxeHh4eCIsInJlY29yZF9zdGF0dXMiOiJTVUNDRVNTRlVMIiwiaXNvX2NvdW50cnlfY29kZSI6IlVTIiwidHJlYXRtZW50X2lkIjoiMCIsIm51bWJlcl9vZl9tZXNzYWdlX3BhcnRzIjoiMSIsIm1lc3NhZ2VfaWQiOiJ4eHh4IiwibWVzc2FnZV90eXBlIjoiVHJhbnNhY3Rpb25hbCIsImNhbXBhaWduX2lkIjoiYmMzNzI3YzBlZTJjNDI4Yjg3NjQ4OWRlNTgyMzBjNTIiLCJjdXN0b21lcl9jb250ZXh0Ijoie1widXNlcklkXCI6XCJVc2VyMlwifSJ9LCJtZXRyaWNzIjp7InByaWNlX2luX21pbGxpY2VudHNfdXNkIjo2NDUuMH0sImF3c0FjY291bnRJZCI6Inh4eHgifQo="
20 | },
21 | {
22 | "recordId": "49607899387440210186580631207605749121276760583084965890000000",
23 | "approximateArrivalTimestamp": 1591996347030,
24 | "data": "eyJldmVudF90eXBlIjoiX1NNUy5CVUZGRVJFRCIsImV2ZW50X3RpbWVzdGFtcCI6MTU5MTk5NjM0MjMwMSwiYXJyaXZhbF90aW1lc3RhbXAiOjE1OTE5OTYzNDE3NDcsImV2ZW50X3ZlcnNpb24iOiIzLjEiLCJhcHBsaWNhdGlvbiI6eyJhcHBfaWQiOiJ4eHh4Iiwic2RrIjp7fX0sImNsaWVudCI6eyJjbGllbnRfaWQiOiJ1cGRhdGVtZXNtczMifSwiZGV2aWNlIjp7InBsYXRmb3JtIjp7fX0sInNlc3Npb24iOnt9LCJhdHRyaWJ1dGVzIjp7InNlbmRlcl9yZXF1ZXN0X2lkIjoiOGY4MDg1ZDMtMTVkZC00MjJjLTliNTMteHh4eHgiLCJjYW1wYWlnbl9hY3Rpdml0eV9pZCI6ImUzMDdjMTk5NTkyYTQxYTI4MGE3M2RmODAyMjk5ZDgxIiwiZGVzdGluYXRpb25fcGhvbmVfbnVtYmVyIjoiKzF4eHh4IiwicmVjb3JkX3N0YXR1cyI6IlNVQ0NFU1NGVUwiLCJpc29fY291bnRyeV9jb2RlIjoiVVMiLCJ0cmVhdG1lbnRfaWQiOiIwIiwibnVtYmVyX29mX21lc3NhZ2VfcGFydHMiOiIxIiwibWVzc2FnZV9pZCI6Inh4eHgiLCJtZXNzYWdlX3R5cGUiOiJUcmFuc2FjdGlvbmFsIiwiY2FtcGFpZ25faWQiOiJiYzM3MjdjMGVlMmM0MjhiODc2NDg5ZGU1ODIzMGM1MiIsImN1c3RvbWVyX2NvbnRleHQiOiJ7XCJ1c2VySWRcIjpcIlVzZXIzXCJ9In0sIm1ldHJpY3MiOnsicHJpY2VfaW5fbWlsbGljZW50c191c2QiOjY0NS4wfSwiYXdzQWNjb3VudElkIjoieHh4eCJ9Cg=="
25 | }
26 | ]
27 | }
28 |
--------------------------------------------------------------------------------
/solutions/architectures/amazon-pinpoint-preference-center-arch-diagram.27719954c8638a8569a88b5448edab04d932b953.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/solutions/architectures/amazon-pinpoint-preference-center-arch-diagram.27719954c8638a8569a88b5448edab04d932b953.png
--------------------------------------------------------------------------------
/solutions/architectures/digital-user-engagement-events-database-architecture-diagram.b1f4423b5b7e11c22879e599ee5b085b29ea16e9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/solutions/architectures/digital-user-engagement-events-database-architecture-diagram.b1f4423b5b7e11c22879e599ee5b085b29ea16e9.png
--------------------------------------------------------------------------------
/solutions/architectures/iot-channel-using-amazon-pinpoint-architecture.61f861b69135996efd52ec971a9f352d7e2786d7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/solutions/architectures/iot-channel-using-amazon-pinpoint-architecture.61f861b69135996efd52ec971a9f352d7e2786d7.png
--------------------------------------------------------------------------------
/solutions/architectures/location-based-notifications-pinpoint-ra.a89caa813efd5e212ea1295bfec42561c25f32a3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/solutions/architectures/location-based-notifications-pinpoint-ra.a89caa813efd5e212ea1295bfec42561c25f32a3.png
--------------------------------------------------------------------------------
/solutions/architectures/predictive-segmentation-using-amazon-pinpoint-and-amazon-sagemaker-architecture.b6341ce6d26ce5a90d4984f1060c27d17d3b7f95.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/solutions/architectures/predictive-segmentation-using-amazon-pinpoint-and-amazon-sagemaker-architecture.b6341ce6d26ce5a90d4984f1060c27d17d3b7f95.png
--------------------------------------------------------------------------------
/solutions/architectures/predictive-user-engagement-architecture.7e3bdd3b55e962e74e3c638f68fc0e88beb98c3a.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/communication-developer-services-reference-architectures/bc10a06610f9e49af6492352733e9aa3423e1a91/solutions/architectures/predictive-user-engagement-architecture.7e3bdd3b55e962e74e3c638f68fc0e88beb98c3a.png
--------------------------------------------------------------------------------