├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
└── PULL_REQUEST_TEMPLATE.md
├── .gitignore
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE.txt
├── NOTICE.txt
├── README.md
├── SECURITY.md
├── deployment
├── amazon-marketing-cloud-uploader-from-aws.yaml
├── auth.yaml
├── build-s3-dist.sh
├── glue.yaml
├── lambda_layer_factory
│ ├── Dockerfile
│ ├── build-lambda-layer.sh
│ ├── docker-entrypoint.sh
│ └── requirements.txt
├── run-unit-tests.sh
└── web.yaml
├── docs
├── images
│ ├── Settings.png
│ ├── Step1.png
│ ├── Step2.png
│ ├── Step3.png
│ ├── Step4.png
│ ├── Step5.png
│ ├── Step6.png
│ ├── architecture.png
│ ├── create_user01.png
│ ├── create_user02.png
│ ├── create_user03.png
│ └── launch-stack.png
└── sample_data
│ ├── sample_dimension.csv
│ ├── sample_dimension.json
│ └── sample_fact.json
├── solution-manifest.yaml
└── source
├── .coveragerc
├── amc_uploader
├── __init__.py
├── amc_uploader.py
├── lib
│ ├── __init__.py
│ └── tasks.py
├── requirements.txt
└── setup.py
├── anonymous_data_logger
├── __init__.py
├── anonymous_data_logger.py
├── anonymous_lib
│ ├── __init__.py
│ ├── cfnresponse.py
│ └── metrics.py
└── requirements.txt
├── api
├── .chalice
│ ├── config.json
│ └── dev-app-policy.json
├── .gitignore
├── __init__.py
├── app.py
├── chalicelib
│ ├── __init__.py
│ └── tasks.py
├── external_resources.json
└── requirements.txt
├── cognito_hosted_ui_resource
├── __init__.py
├── amcufa-logo.png
├── cognito_hosted_ui_resource.py
├── login.css
└── requirements.txt
├── glue
├── __init__.py
├── amc_transformations.py
└── library
│ ├── __init__.py
│ ├── address_map_helper.json
│ ├── address_normalizer.py
│ ├── city_normalizer.py
│ ├── default_normalizer.py
│ ├── email_normalizer.py
│ ├── phone_normalizer.py
│ ├── read_write.py
│ ├── state_normalizer.py
│ ├── transform.py
│ └── zip_normalizer.py
├── helper
├── cf_helper.py
├── config_helper.py
└── website_helper.py
├── pytest.ini
├── requirements-dev.txt
├── share
├── __init__.py
└── tasks.py
├── tests
├── README.md
├── __init__.py
├── e2e
│ ├── __init__.py
│ ├── conftest.py
│ ├── requirements.txt
│ ├── run_e2e.sh
│ └── test_app.py
├── integration_test
│ ├── __init__.py
│ └── test_api_integration.py
├── requirements-test.txt
├── run_test.sh
└── unit_test
│ ├── __init__.py
│ ├── amc_transformation
│ ├── __init__.py
│ ├── sample_data
│ │ ├── README.md
│ │ ├── test_ca
│ │ │ ├── ca_check.json
│ │ │ └── ca_raw.json
│ │ ├── test_de
│ │ │ ├── de_check.json
│ │ │ └── de_raw.json
│ │ ├── test_es
│ │ │ ├── es_check.json
│ │ │ └── es_raw.json
│ │ ├── test_fr
│ │ │ ├── fr_check.json
│ │ │ └── fr_raw.json
│ │ ├── test_gb
│ │ │ ├── gb_check.json
│ │ │ └── gb_raw.json
│ │ ├── test_in
│ │ │ ├── in_check.json
│ │ │ └── in_raw.json
│ │ ├── test_it
│ │ │ ├── it_check.json
│ │ │ └── it_raw.json
│ │ ├── test_jp
│ │ │ ├── jp_check.json
│ │ │ └── jp_raw.json
│ │ └── test_us
│ │ │ ├── us_check.json
│ │ │ └── us_raw.json
│ └── test_amc_transformation.py
│ ├── amc_uploader
│ ├── __init__.py
│ └── test_amc_uploader.py
│ ├── conftest.py
│ ├── test_anonymous_data_logger.py
│ ├── test_api.py
│ ├── test_cognito_hosted_ui_resource.py
│ ├── test_helper.py
│ └── test_tasks.py
└── website
├── babel.config.js
├── package-lock.json
├── package.json
├── public
├── .well-known
│ └── security.txt
├── index.html
└── robots.txt
├── src
├── App.vue
├── components
│ ├── Header.vue
│ ├── Sidebar.vue
│ ├── VoerroTagsInput.css
│ └── VoerroTagsInput.vue
├── main.js
├── registerServiceWorker.js
├── router.js
├── store
│ ├── actions.js
│ ├── index.js
│ ├── mutations.js
│ └── state.js
└── views
│ ├── Redirect.vue
│ ├── Settings.vue
│ ├── Step1.vue
│ ├── Step2.vue
│ ├── Step3.vue
│ ├── Step4.vue
│ ├── Step5.vue
│ └── Step6.vue
└── vue.config.js
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: bug
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior.
15 |
16 | **Expected behavior**
17 | A clear and concise description of what you expected to happen.
18 |
19 | **Please complete the following information about the solution:**
20 | - [ ] Version: [e.g. v1.0.0]
21 |
22 | To get the version of the solution, you can look at the description of the created CloudFormation stack. For example, "_(SO0222) - amcufa **v1.0.0**. This is the base AWS CloudFormation template that defines resources and nested stacks for this solution._". If the description does not contain the version information, you can look at the mappings section of the template:
23 |
24 | ```yaml
25 | Mappings:
26 | Application:
27 | Solution:
28 | Id: "SO0222"
29 | Name: "amcufa"
30 | Version: "v1.0.0"
31 | ```
32 |
33 | - [ ] Region: [e.g. us-east-1]
34 | - [ ] Was the solution modified from the version published on this repository?
35 | - [ ] If the answer to the previous question was yes, are the changes available on GitHub?
36 | - [ ] Have you checked your [service quotas](https://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html) for the sevices this solution uses?
37 | - [ ] Were there any errors in the CloudWatch Logs?
38 |
39 | **Screenshots**
40 | If applicable, add screenshots to help explain your problem (please **DO NOT include sensitive information**).
41 |
42 | **Additional context**
43 | Add any other context about the problem here.
44 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this solution
4 | title: ''
5 | labels: enhancement
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the feature you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Additional context**
17 | Add any other context or screenshots about the feature request here.
18 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | *Issue #, if available:*
2 |
3 | *Description of changes:*
4 |
5 | By submitting this pull request, I confirm that you can use, modify, copy, and redistribute this contribution, under the terms of your choice.
6 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | source/website/public/runtimeConfig.json
3 | */.DS_Store
4 | .DS_Store
5 | .idea/*
6 | node_modules
7 | package
8 | deployment/regional-s3-assets
9 | deployment/global-s3-assets
10 | dist
11 | webapp-manifest.json
12 | *.zip
13 | source/.coverage
14 | source/amc_uploader/amc_uploader.egg-info/
15 | source/tests/coverage-reports/
16 | deployment/open-source
17 | __pycache__
18 | .scannerwork/*
19 | source/tests/.DS_Store
20 | source/tests/amc_transformation/test_results
21 | source/tests/amc_transformation/test_results/*
22 | source/tests/.coverage
23 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | All notable changes to this project will be documented in this file.
4 |
5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7 |
8 | ## [3.0.15] - 2025-09-22
9 |
10 | ### Security
11 |
12 | - Updated axios to fix [CVE-2025-58754](https://nvd.nist.gov/vuln/detail/CVE-2025-58754)
13 | - Updated npm dependencies
14 |
15 | ## [3.0.14] - 2025-08-27
16 |
17 | ### Security
18 |
19 | - Upgraded urllib3 and requests packages
20 |
21 | ## [3.0.13] - 2025-07-31
22 |
23 | ### Security
24 |
25 | - Upgraded form-data dependency to version 4.0.4
26 |
27 | ## [3.0.12] - 2025-06-11
28 |
29 | ### Fixed
30 |
31 | - Corrected some normalization bugs for street address
32 |
33 | ## [3.0.11] - 2025-04-10
34 |
35 | ### Security
36 |
37 | - Updated npm dependencies
38 |
39 | ## [3.0.10] - 2025-03-14
40 |
41 | ### Security
42 |
43 | - Updated npm dependencies
44 |
45 | ## [3.0.9] - 2025-03-03
46 |
47 | ### Security
48 |
49 | - Updated python and npm dependencies
50 |
51 | ## [3.0.8] - 2024-11-25
52 |
53 | ### Security
54 |
55 | - Updated npm dependencies
56 |
57 | ## [3.0.7] - 2024-11-01
58 |
59 | ### Changed
60 |
61 | - Set awswrangler to version 3.9.1 for compatibility
62 |
63 | ## [3.0.6] - 2024-10-30
64 |
65 | ### Security
66 |
67 | - Vulnerability patches to address CVE-2024-21536.
68 |
69 | ## [3.0.5] - 2024-09-17
70 |
71 | ### Security
72 |
73 | - Vulnerability patches to address CVE-2024-45296, CVE-2024-43788, CVE-2024-4067, and CVE-2024-43799.
74 |
75 | ## [3.0.4] - 2024-08-20
76 |
77 | ### Security
78 |
79 | - Update axios to version 1.7.4 to address vulnerability CVE-2024-39338.
80 |
81 | ## [3.0.3] - 2024-08-02
82 |
83 | ### Security
84 |
85 | - Update fast-xml-parser to version 4.4.1 to address vulnerability CVE-2024-41818.
86 |
87 | ## [3.0.2] - 2024-07-26
88 |
89 | ### Changed
90 |
91 | - Remove Android (AAID) and iOS (IDFA) options for Mobile Ad Id (MAID) because MAID now supersedes AAID and IDFA.
92 |
93 | ### Fixed
94 |
95 | - Avoid dropping Country Code when LiveRamp, Experian, or Mobile Ad IDs are used for identity resolution instead of hashed PII.
96 | - Upload with manifest files rather than individual files so that the partitions of large files do not overwrite each other when using the FULL_REPLACE update strategy.
97 | - Fix error parsing the Glue ETL parameter for timestamp_column when country_code parameter is unspecified.
98 |
99 | ## [3.0.1] - 2024-06-21
100 |
101 | ### Security
102 |
103 | - Updated npm and python dependencies
104 |
105 | ## [3.0.0] - 2024-05-29
106 |
107 | ### Added
108 |
109 | - Added a link to the front-end URL to the welcome email.
110 |
111 | ### Changed
112 |
113 | - Migrated all AMC requests to use OAuth and the AMC API provided by Amazon Ads instead of instance-level APIs.
114 | - Migrated the front-end to align with the AMC API provided by Amazon Ads.
115 | - Migrated the front-end to use the Amazon Cognito hosted user interface for login instead of AWS Amplify.
116 | - Replaced time-series based file partitioning (which the AMC API no longer requires) with a strategy based on file size so that pseudonymized files will not exceed 500MB (compressed).
117 |
118 | ### Security
119 |
120 | - Updated npm dependencies
121 |
122 | ## [2.3.1] - 2024-03-28
123 |
124 | ### Security
125 |
126 | - Update npm dependencies for vue-cli
127 |
128 | ## [2.3.0] - 2024-02-29
129 |
130 | ### Changed
131 |
132 | - Provide country code when creating dataset in AMC.
133 |
134 | ### Security
135 |
136 | - Update npm dependencies for awswrangler, aws-amplify, bootstrap, and Vue.js
137 |
138 | ## [2.2.2] - 2024-01-09
139 |
140 | ### Fixed
141 |
142 | - Resolve a defect that causes Glue ETL dependencies to be deleted after updating the Cloud Formation stack from v2.0.0, v2.1.0, v2.2.0, or v2.2.1 to a newer version.
143 |
144 | ### Changed
145 |
146 | - The Artifact Bucket resource is no longer automatically removed when the stack is removed. Customers will need to remove this bucket manually after removing the stack.
147 |
148 | ## [2.2.1] - 2023-12-07
149 |
150 | ### Added
151 |
152 | - Added the option for users to specify CSV/JSON file format when uploading to existing datasets.
153 |
154 | ### Fixed
155 |
156 | - Resolved an error that occurs when uploading files with an unexpected content type from a CloudFormation stack that was updated from v2.1.0.
157 | - Resolved a defect that prevented users from being able to upload Dimension datasets to multiple AMC instances.
158 |
159 | ## [2.2.0] - 2023-11-01
160 |
161 | ### Added
162 |
163 | - Added the option for users to specify CSV/JSON file format in the dataset definition web form.
164 | - Added an optional parameter to API resources /get_data_columns and /start_amc_transformation that allows users to specify CSV/JSON file format.
165 |
166 | ### Changed
167 |
168 | - Resolve code quality issues identified by SonarQube in the front-end.
169 | - Remove Autodetect and PT1M options from the dataset definition web form.
170 |
171 | ## [2.1.1] - 2023-09-11
172 |
173 | ### Fixed
174 |
175 | - Resolve a defect in the reporting of anonymous metrics that prevents CloudFormation events from being properly recorded.
176 |
177 | ## [2.1.0] - 2023-06-01
178 |
179 | ### Added
180 |
181 | - Added instructions for automating uploads via S3 trigger to Step 5 in the front-end.
182 | - Added support for Mobile Ad ID column types.
183 |
184 | ### Changed
185 |
186 | - Alphabetize the country code list shown in dataset definition web form
187 | - Enhance the protection of S3 access logs. (#232)
188 |
189 | ### Fixed
190 |
191 | - Allow stack names to contain upper case characters
192 | - Avoid redirecting / web requests to /step5
193 |
194 | ### Security
195 |
196 | - Update npm dependencies for vuejs, vue-cli, aws-amplify, bootstrap, webpack-subresource-integrity, and eslint.
197 | - Removed the eslint package in order to avoid a vulnerability in one of its dependencies
198 |
199 | ## [2.0.0] - 2023-04-17
200 |
201 | ### Added
202 |
203 | - Support for eu-west-1 [#45]
204 | - Upload to multiple AMC instances [#133, #145, #150, #172, #180, #183]
205 | - Upload to existing datasets [#73]
206 | - Upload multiple files at once [#41]
207 | - Upload gzipped JSON and CSV files [#159]
208 | - Improved data normalization for US, UK, JP, IN, IT, ES, CA, DE, FR [#61, #63, #72, #108, #109]
209 | - Glue ETL performance metrics [#52]
210 | - FACT partition size option [#52, #138]
211 | - Country code option [#83, #132, #155, #157]
212 | - LiveRamp support [#40, #85]
213 | - Import/Export dataset schema [#102]
214 | - Show API errors in front-end [#104]
215 | - Retry on AMC request timeouts. [#117, #160]
216 | - Custom lambda layer for aws-xray-sdk [#37, #68, #172]
217 | - Automated integration tests [#34, #42, #173]
218 | - Automated unit tests [#105, #120]
219 | - AWS Solutions quality bar [#47, #64, #91, #103, #113, #153, #154, #156, #162]
220 | - AWS Solutions pipeline integration [#81, #96]
221 | - Add Pre-commit, fix SonarCube/SonarLint issues [#16]
222 | - Add Unit Tests, combine coverage report for source/api/tests and source/tests. [#100]
223 | - API documentation [#152]
224 | - Show upload failure messages when provided by AMC [#221]
225 |
226 | ### Changed
227 |
228 | - Prefix S3 bucket names with CF stack name to help organize S3 resources [#29]
229 | - Allocate more time to HelperFunction for removing stack resources [#58]
230 | - Remove unused lambda layer [#37]
231 | - Architecture diagram [#177]
232 | - Set s3 object ownership in order to maintain compatibility with s3 access logging [#229]
233 |
234 | ### Fixed
235 |
236 | - Redundant log group [#168]
237 | - Sorting of front-end tables [#148, #151]
238 | - UX/UI issues for dataset schema [#93]
239 | - Dropping first record [#23]
240 |
241 | ### Security
242 |
243 | - Enable integrity checks for front-end assets [#39]
244 | - Migrate python 3.9 to python 3.10 in build script [#33, #95]
245 |
246 | ## [1.0.0] - 2023-01-05
247 |
248 | ### Added
249 |
250 | - Initial release.
251 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | ## Code of Conduct
2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
4 | opensource-codeofconduct@amazon.com with any additional questions or comments.
5 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing Guidelines
2 |
3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
4 | documentation, we greatly value feedback and contributions from our community.
5 |
6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
7 | information to effectively respond to your bug report or contribution.
8 |
9 | ## Reporting Bugs/Feature Requests
10 |
11 | We welcome you to use the GitHub issue tracker to report bugs or suggest features.
12 |
13 | When filing an issue, please check [existing open](https://github.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/issues), or [recently closed](https://github.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/issues?utf8=%E2%9C%93&q=is%3Aissue%20is%3Aclosed%20), issues to make sure somebody else hasn't already
14 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
15 |
16 | - A reproducible test case or series of steps
17 | - The version of our code being used
18 | - Any modifications you've made relevant to the bug
19 | - Anything unusual about your environment or deployment
20 |
21 | ## Contributing via Pull Requests
22 |
23 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
24 |
25 | 1. You are working against the latest source on the _main_ branch.
26 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
27 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
28 |
29 | To send us a pull request, please:
30 |
31 | 1. Fork the repository.
32 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.
33 | 3. Ensure local tests pass.
34 | 4. Commit to your fork using clear commit messages.
35 | 5. Send us a pull request, answering any default questions in the pull request interface.
36 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.
37 |
38 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
39 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/).
40 |
41 | ## Finding contributions to work on
42 |
43 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels ((enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/labels/help%20wanted) issues is a great place to start.
44 |
45 | ## Code of Conduct
46 |
47 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
48 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
49 | opensource-codeofconduct@amazon.com with any additional questions or comments.
50 |
51 | ## Security issue notifications
52 |
53 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
54 |
55 | ## Licensing
56 |
57 | See the [LICENSE](https://github.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/blob/main/LICENSE.txt) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
58 |
59 | We may ask you to sign a [Contributor License Agreement (CLA)](https://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes.
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Reporting Security Issues
2 |
3 | We take all security reports seriously.
4 | When we receive such reports,
5 | we will investigate and subsequently address
6 | any potential vulnerabilities as quickly as possible.
7 | If you discover a potential security issue in this project,
8 | please notify AWS/Amazon Security via our
9 | [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/)
10 | or directly via email to [AWS Security](mailto:aws-security@amazon.com).
11 | Please do *not* create a public GitHub issue in this project.
12 |
--------------------------------------------------------------------------------
/deployment/auth.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: "2010-09-09"
2 | Description: "(SO0222auth) - Amazon Marketing Cloud Uploader from AWS. This AWS CloudFormation template defines Cognito resources. %%VERSION%%"
3 |
4 | Parameters:
5 | ParentStackName:
6 | Description: Parent stack name
7 | Type: String
8 | AdminEmail:
9 | Description: Email address of the solution administrator
10 | Type: String
11 | DataBucketName:
12 | Description: Name of the first-party data source bucket
13 | Type: String
14 | RestApiId:
15 | Description: REST API ID
16 | Type: String
17 | CognitoUISubdomain:
18 | Description: Prefix for Cognito hosted UI
19 | Type: String
20 | CognitoCallbackURL:
21 | Description: Redirect URL after successful sign-in
22 | Type: String
23 | CognitoLogoutURL:
24 | Description: Redirect URL after successful logout
25 | Type: String
26 |
27 | Resources:
28 | UserPool:
29 | Type: AWS::Cognito::UserPool
30 | Properties:
31 | MfaConfiguration: OPTIONAL
32 | UserPoolAddOns:
33 | AdvancedSecurityMode: "ENFORCED"
34 | EnabledMfas:
35 | - SOFTWARE_TOKEN_MFA
36 | AdminCreateUserConfig:
37 | AllowAdminCreateUserOnly: True
38 | InviteMessageTemplate:
39 | EmailMessage: !Join ["", [
40 | "
Please use the credentials below to login to the AMC Uploader console.
",
41 | "Console: ",
42 | !Ref CognitoCallbackURL,
43 | "
",
44 | "Username: {username}
",
45 | "Password: {####}
",
46 | "Stack name: ",
47 | !Ref ParentStackName,
48 | "
"
49 | ]]
50 | EmailSubject: "Welcome to Amazon Marketing Cloud uploader from AWS"
51 | EmailConfiguration:
52 | EmailSendingAccount: 'COGNITO_DEFAULT'
53 | AutoVerifiedAttributes: ['email']
54 |
55 | UserPoolDomain:
56 | Type: AWS::Cognito::UserPoolDomain
57 | Properties:
58 | Domain: !Ref CognitoUISubdomain
59 | UserPoolId: !Ref UserPool
60 |
61 | UserPoolRiskConfiguration:
62 | Type: AWS::Cognito::UserPoolRiskConfigurationAttachment
63 | Properties:
64 | UserPoolId: !Ref UserPool
65 | ClientId: "ALL"
66 | AccountTakeoverRiskConfiguration:
67 | Actions:
68 | HighAction:
69 | EventAction: "MFA_REQUIRED"
70 | Notify: False
71 | MediumAction:
72 | EventAction: "MFA_IF_CONFIGURED"
73 | Notify: False
74 | LowAction:
75 | EventAction: "MFA_IF_CONFIGURED"
76 | Notify: False
77 |
78 | WebAppClient:
79 | Type: AWS::Cognito::UserPoolClient
80 | Properties:
81 | AllowedOAuthFlows:
82 | - code
83 | AllowedOAuthFlowsUserPoolClient: true
84 | AllowedOAuthScopes:
85 | - openid
86 | - profile
87 | UserPoolId: !Ref UserPool
88 | CallbackURLs:
89 | - !Ref CognitoCallbackURL
90 | LogoutURLs:
91 | - !Ref CognitoLogoutURL
92 | SupportedIdentityProviders:
93 | - COGNITO
94 |
95 | # Service - cognito / security infrastructure
96 |
97 | # CognitoRoleMappingTransformer is a hack meant to workaround
98 | # Cognito's (current) lack of CF support. References:
99 | # https://forums.aws.amazon.com/message.jspa?messageID=790437#790437
100 | # https://stackoverflow.com/questions/53131052/aws-cloudformation-can-not-create-stack-when-awscognitoidentitypoolroleattac
101 |
102 | CognitoRoleMappingTransformer:
103 | Type: AWS::Lambda::Function
104 | Metadata:
105 | cfn_nag:
106 | rules_to_suppress:
107 | - id: W58
108 | reason: "The role includes permission to write to CloudWatch Logs"
109 | - id: W89
110 | reason: "This resource does not need to access any other resource provisioned within a VPC."
111 | - id: W92
112 | reason: "This function does not performance optimization, so the default concurrency limits suffice."
113 | Properties:
114 | Code:
115 | ZipFile: |
116 | import json
117 | import cfnresponse
118 |
119 | def handler(event, context):
120 | print("Event: %s" % json.dumps(event))
121 | resourceProperties = event["ResourceProperties"]
122 | responseData = {
123 | "RoleMapping": {
124 | resourceProperties["IdentityProvider"]: {
125 | "Type": resourceProperties["Type"]
126 | }
127 | }
128 | }
129 | if resourceProperties["AmbiguousRoleResolution"]:
130 | responseData["RoleMapping"][resourceProperties["IdentityProvider"]]["AmbiguousRoleResolution"] = \
131 | resourceProperties["AmbiguousRoleResolution"]
132 |
133 | print(responseData)
134 | cfnresponse.send(event, context, cfnresponse.SUCCESS, responseData)
135 | Handler: !Join
136 | - ''
137 | - - index
138 | - .handler
139 | Role: !GetAtt CognitoRoleMapperLambdaExecutionRole.Arn
140 | Runtime: python3.12
141 | Timeout: 30
142 |
143 | CognitoRoleMappingTransformerLogGroup:
144 | Type: AWS::Logs::LogGroup
145 | Metadata:
146 | cfn_nag:
147 | rules_to_suppress:
148 | - id: W84
149 | reason: "The data generated via this role does not need to be encrypted."
150 | Properties:
151 | LogGroupName: !Join ['/', ['/aws/lambda', !Ref CognitoRoleMappingTransformer]]
152 | RetentionInDays: 3653
153 |
154 | CognitoRoleMapperLambdaExecutionRole:
155 | Type: 'AWS::IAM::Role'
156 | Properties:
157 | AssumeRolePolicyDocument:
158 | Version: 2012-10-17
159 | Statement:
160 | - Effect: Allow
161 | Principal:
162 | Service:
163 | - lambda.amazonaws.com
164 | Action:
165 | - 'sts:AssumeRole'
166 | Path: /
167 | Policies:
168 | - PolicyName: root
169 | PolicyDocument:
170 | Version: 2012-10-17
171 | Statement:
172 | - Effect: Allow
173 | Action:
174 | - 'logs:CreateLogStream'
175 | - 'logs:PutLogEvents'
176 | Resource: 'arn:aws:logs:*:*:*'
177 | Metadata:
178 | guard:
179 | SuppressedRules:
180 | - IAM_NO_INLINE_POLICY_CHECK
181 |
182 | IdentityPool:
183 | Type: AWS::Cognito::IdentityPool
184 | Properties:
185 | AllowUnauthenticatedIdentities: False
186 | CognitoIdentityProviders:
187 | - ClientId: !Ref WebAppClient
188 | ProviderName: !GetAtt UserPool.ProviderName
189 |
190 | CognitoStandardAuthDefaultRole:
191 | Type: "AWS::IAM::Role"
192 | Metadata:
193 | cfn_nag:
194 | rules_to_suppress:
195 | - id: F38
196 | reason: "The wildcard is used for a deny action, not an allow action."
197 | guard:
198 | SuppressedRules:
199 | - IAM_NO_INLINE_POLICY_CHECK
200 | Properties:
201 | AssumeRolePolicyDocument:
202 | Version: "2012-10-17"
203 | Statement:
204 | - Effect: "Allow"
205 | Principal:
206 | Federated: "cognito-identity.amazonaws.com"
207 | Action:
208 | - "sts:AssumeRoleWithWebIdentity"
209 | Condition:
210 | StringEquals:
211 | "cognito-identity.amazonaws.com:aud": !Ref IdentityPool
212 | "ForAnyValue:StringEquals":
213 | "cognito-identity.amazonaws.com:amr": authenticated
214 | Policies:
215 | - PolicyName: !Sub "${AWS::StackName}-AuthNoGroup"
216 | PolicyDocument:
217 | Version: "2012-10-17"
218 | Statement:
219 | - Action: "*"
220 | Resource: "*"
221 | Effect: "Deny"
222 |
223 | CognitoStandardUnauthDefaultRole:
224 | Type: "AWS::IAM::Role"
225 | Properties:
226 | AssumeRolePolicyDocument:
227 | Version: "2012-10-17"
228 | Statement:
229 | - Effect: "Allow"
230 | Principal:
231 | Federated: "cognito-identity.amazonaws.com"
232 | Action:
233 | - "sts:AssumeRoleWithWebIdentity"
234 | Condition:
235 | StringEquals:
236 | "cognito-identity.amazonaws.com:aud": !Ref IdentityPool
237 | "ForAnyValue:StringEquals":
238 | "cognito-identity.amazonaws.com:amr": unauthenticated
239 |
240 | IdentityPoolRoleMapping:
241 | Type: AWS::Cognito::IdentityPoolRoleAttachment
242 | Properties:
243 | IdentityPoolId: !Ref IdentityPool
244 | RoleMappings:
245 | TransformedRoleMapping:
246 | IdentityProvider:
247 | 'Fn::Join':
248 | - ':'
249 | - - 'Fn::GetAtt':
250 | - UserPool
251 | - ProviderName
252 | - Ref: WebAppClient
253 | AmbiguousRoleResolution: Deny
254 | Type: Token
255 | Roles:
256 | authenticated: !GetAtt CognitoStandardAuthDefaultRole.Arn
257 | unauthenticated: !GetAtt CognitoStandardUnauthDefaultRole.Arn
258 |
259 | AdminGroup:
260 | Type: AWS::Cognito::UserPoolGroup
261 | Properties:
262 | Description: 'User group for solution administrators'
263 | RoleArn: !GetAtt AdminRole.Arn
264 | UserPoolId: !Ref UserPool
265 | GroupName: !Sub "${AWS::StackName}-Admins"
266 |
267 | AdminAccount:
268 | Type: AWS::Cognito::UserPoolUser
269 | Properties:
270 | DesiredDeliveryMediums:
271 | - EMAIL
272 | UserAttributes: [{"Name": "email", "Value": !Ref AdminEmail}]
273 | Username: !Ref AdminEmail
274 | UserPoolId: !Ref UserPool
275 |
276 | AdminRole:
277 | Type: "AWS::IAM::Role"
278 | Metadata:
279 | guard:
280 | SuppressedRules:
281 | - IAM_NO_INLINE_POLICY_CHECK
282 | Properties:
283 | AssumeRolePolicyDocument:
284 | Version: "2012-10-17"
285 | Statement:
286 | - Effect: "Allow"
287 | Principal:
288 | Federated: "cognito-identity.amazonaws.com"
289 | Action:
290 | - "sts:AssumeRoleWithWebIdentity"
291 | Condition:
292 | StringEquals:
293 | "cognito-identity.amazonaws.com:aud": !Ref IdentityPool
294 | "ForAnyValue:StringEquals":
295 | "cognito-identity.amazonaws.com:amr": authenticated
296 | Policies:
297 | - PolicyName: !Sub "${AWS::StackName}-AdminPolicy"
298 | PolicyDocument: !Sub
299 | - |-
300 | {
301 | "Version": "2012-10-17",
302 | "Statement": [
303 | {
304 | "Action": [
305 | "execute-api:Invoke"
306 | ],
307 | "Effect": "Allow",
308 | "Resource": [
309 | "arn:aws:execute-api:${region}:${account}:${restApi}/*"
310 | ]
311 | },
312 | {
313 | "Action": [
314 | "s3:PutObject"
315 | ],
316 | "Effect": "Allow",
317 | "Resource": [
318 | "arn:aws:s3:::${DataBucketName}/public/*"
319 | ]
320 | },
321 | {
322 | "Action": [
323 | "s3:ListBucket"
324 | ],
325 | "Effect": "Allow",
326 | "Resource": "arn:aws:s3:::${DataBucketName}"
327 | }
328 | ]
329 | }
330 | - {
331 | region: !Ref "AWS::Region",
332 | account: !Ref "AWS::AccountId",
333 | restApi: !Ref RestApiId,
334 | DataBucketName: !Ref DataBucketName
335 | }
336 |
337 | AddAdminUserToAdminGroup:
338 | DependsOn: AdminAccount
339 | Type: AWS::Cognito::UserPoolUserToGroupAttachment
340 | Properties:
341 | GroupName: !Ref AdminGroup
342 | Username: !Ref AdminEmail
343 | UserPoolId: !Ref UserPool
344 |
345 | Outputs:
346 | AdminRoleArn:
347 | Value: !GetAtt AdminRole.Arn
348 | UserPoolId:
349 | Value: !Ref UserPool
350 | IdentityPoolId:
351 | Value: !Ref IdentityPool
352 | UserPoolClientId:
353 | Value: !Ref WebAppClient
354 | HostedUIDomain:
355 | Value: !Sub "${CognitoUISubdomain}.auth.${AWS::Region}.amazoncognito.com"
356 |
--------------------------------------------------------------------------------
/deployment/lambda_layer_factory/Dockerfile:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | FROM public.ecr.aws/docker/library/alpine:3.20
5 |
6 | RUN apk add --no-cache --upgrade bash
7 | RUN apk add zip
8 | RUN apk add python3=~3.12
9 | RUN apk add --update py-pip
10 |
11 | # Install Python packages and build zip files at runtime
12 | WORKDIR /
13 | RUN mkdir -p /packages/lambda_layer-python-3.12/python/lib/python3.12/site-packages
14 | COPY ./docker-entrypoint.sh /
15 |
16 | ENTRYPOINT ["sh", "docker-entrypoint.sh"]
17 |
--------------------------------------------------------------------------------
/deployment/lambda_layer_factory/build-lambda-layer.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #############################################################################
3 | # PURPOSE: Build a Lambda layer for specified Python libraries.
4 | #
5 | # PREREQUISITES:
6 | # docker, aws cli
7 | #
8 | # USAGE:
9 | # Save the python libraries you want in the lambda layer in
10 | # requirements.txt, then run like this:
11 | #
12 | # ./build-lambda-layer.sh
13 | #
14 | #############################################################################
15 |
16 | # Check to see if input has been provided:
17 | if [ -z "$1" ]; then
18 | echo "USAGE: ./build-lambda-layer.sh "
19 | exit 1
20 | fi
21 |
22 | REQUIREMENTS_FILE="$1"
23 |
24 | # Check to see if requirements.txt file exists
25 |
26 | if [ ! -f "$REQUIREMENTS_FILE" ]; then
27 | echo "$REQUIREMENTS_FILE does not exist"
28 | exit 1
29 | fi
30 |
31 | # Check to see if AWS CLI and Docker are installed
32 | docker --version
33 | if [ $? -ne 0 ]; then
34 | echo "ERROR: install Docker before running this script."
35 | exit 1
36 | else
37 | docker ps > /dev/null
38 | if [ $? -ne 0 ]; then
39 | echo "ERROR: start Docker before running this script."
40 | exit 1
41 | fi
42 | fi
43 |
44 | echo "------------------------------------------------------------------------------"
45 | echo "Building Lambda Layer zip file"
46 | echo "------------------------------------------------------------------------------"
47 |
48 | rm -rf ./lambda_layer_python-3.12/
49 | rm -f ./lambda_layer_python3.12.zip
50 | docker logout public.ecr.aws
51 | docker build --tag=lambda_layer_factory:latest . 2>&1 > /dev/null
52 | if [ $? -eq 0 ]; then
53 | docker run --rm -v "$PWD":/packages lambda_layer_factory
54 | fi
55 | if [[ ! -f ./lambda_layer_python3.12.zip ]]; then
56 | echo "ERROR: Failed to build lambda layer zip file."
57 | exit 1
58 | fi
59 | echo "------------------------------------------------------------------------------"
60 | echo "Verifying the Lambda layer meets AWS size limits"
61 | echo "------------------------------------------------------------------------------"
62 | # See https://docs.aws.amazon.com/lambda/latest/dg/limits.html
63 |
64 | unzip -q -d lambda_layer_python-3.12 ./lambda_layer_python3.12.zip
65 | ZIPPED_LIMIT=50
66 | UNZIPPED_LIMIT=250
67 | UNZIPPED_SIZE_312=$(du -sm ./lambda_layer_python-3.12/ | cut -f 1)
68 | ZIPPED_SIZE_312=$(du -sm ./lambda_layer_python3.12.zip | cut -f 1)
69 |
70 | rm -rf ./lambda_layer-python-3.12/
71 | if (($UNZIPPED_SIZE_312 > $UNZIPPED_LIMIT || $ZIPPED_SIZE_312 > $ZIPPED_LIMIT)); then
72 | echo "ERROR: Deployment package exceeds AWS Lambda layer size limits.";
73 | exit 1
74 | fi
75 | echo "Lambda layers have been saved to ./lambda_layer_python3.12.zip."
76 |
77 | echo "------------------------------------------------------------------------------"
78 | echo "Done"
79 | echo "------------------------------------------------------------------------------"
80 |
--------------------------------------------------------------------------------
/deployment/lambda_layer_factory/docker-entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | echo "================================================================================"
4 | echo "Installing the packages listed in requirements.txt:"
5 | echo "================================================================================"
6 | cat /packages/requirements.txt
7 | pip3.12 install -q -r /packages/requirements.txt -t /packages/lambda_layer_python-3.12/python/lib/python3.12/site-packages
8 |
9 |
10 | echo "================================================================================"
11 | echo "Creating zip files for Lambda layers"
12 | echo "================================================================================"
13 | cd /packages/lambda_layer_python-3.12/
14 | zip -q -r /packages/lambda_layer_python3.12.zip .
15 |
16 |
17 | # Clean up build environment
18 | cd /packages/
19 | rm -rf /packages/lambda_layer_python-3.12/
20 |
--------------------------------------------------------------------------------
/deployment/lambda_layer_factory/requirements.txt:
--------------------------------------------------------------------------------
1 | aws-xray-sdk==2.13.0
2 |
--------------------------------------------------------------------------------
/deployment/run-unit-tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ###############################################################################
3 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
4 | # SPDX-License-Identifier: Apache-2.0
5 | #
6 | # PURPOSE:
7 | # Verify that pytest scripts achieve a minimum threshold for code coverage.
8 | #
9 | # USAGE:
10 | # ./run-unit-tests.sh [-h] [-v]
11 | #
12 | # The following options are available:
13 | #
14 | # -h | --help Print usage
15 | # -v | --verbose Print script debug info
16 | #
17 | ###############################################################################
18 |
19 | trap cleanup_and_die SIGINT SIGTERM ERR
20 |
21 | usage() {
22 | msg "$msg"
23 | cat <&2 -e "${1-}"
61 | }
62 |
63 | die() {
64 | local msg=$1
65 | local code=${2-1} # default exit status 1
66 | msg "$msg"
67 | exit "$code"
68 | }
69 |
70 | parse_params() {
71 | # default values of variables set from params
72 | flag=0
73 | param=''
74 | use_solution_builder_pipeline=false
75 |
76 | while :; do
77 | case "${1-}" in
78 | -h | --help) usage ;;
79 | -v | --verbose) set -x ;;
80 | -?*) die "Unknown option: $1" ;;
81 | *) break ;;
82 | esac
83 | shift
84 | done
85 |
86 | args=("$@")
87 |
88 | return 0
89 | }
90 |
91 | parse_params "$@"
92 |
93 | # Get reference for all important folders
94 | template_dir="$PWD"
95 | source_dir="$(cd $template_dir/../source; pwd -P)"
96 | root_dir="$template_dir/.."
97 |
98 | # Create and activate a temporary Python environment for this script.
99 | echo "------------------------------------------------------------------------------"
100 | echo "Creating a temporary Python virtualenv for this script"
101 | echo "------------------------------------------------------------------------------"
102 | if [[ "$VIRTUAL_ENV" != "" ]]; then
103 | echo "ERROR: Do not run this script inside Virtualenv. Type \`deactivate\` and run again.";
104 | exit 1;
105 | fi
106 | command -v python3
107 | if [ $? -ne 0 ]; then
108 | echo "ERROR: install Python3 before running this script"
109 | exit 1
110 | fi
111 | echo "Using virtual python environment:"
112 | VENV=$(mktemp -d) && echo "$VENV"
113 | command -v python3 > /dev/null
114 | if [ $? -ne 0 ]; then
115 | echo "ERROR: install Python3 before running this script"
116 | exit 1
117 | fi
118 | python3 -m venv "$VENV"
119 | source "$VENV"/bin/activate
120 |
121 | # configure the environment
122 | cd $source_dir
123 | pip install --upgrade pip
124 | pip install -q -r requirements-dev.txt
125 | pip install -q -r tests/requirements-test.txt
126 |
127 | # env variables
128 | export PYTHONDONTWRITEBYTECODE=1
129 | export AMC_API_ROLE_ARN="arn:aws:iam::999999999999:role/SomeTestRole"
130 | export SOLUTION_NAME="amcufa test"
131 | export ARTIFACT_BUCKET="test_bucket"
132 | export SYSTEM_TABLE_NAME="test_table"
133 | export UPLOAD_FAILURES_TABLE_NAME="upload_failures_test_table"
134 | export VERSION="0.0.0"
135 | export botoConfig='{"region_name": "us-east-1"}'
136 | export AWS_XRAY_SDK_ENABLED=false
137 | export AMC_GLUE_JOB_NAME="some-GlueStack-123-amc-transformation-job"
138 | export CUSTOMER_MANAGED_KEY="test_customer_managed_key"
139 | export AWS_REGION="us-east-1"
140 | export SOLUTION_VERSION="0.0.0"
141 | export CLIENT_ID="123456sdgdg"
142 | export CLIENT_SECRET="fdvaed4535gd"
143 | export STACK_NAME="amcufa-stack-name"
144 |
145 | echo "------------------------------------------------------------------------------"
146 | echo "[Test] Run pytest with coverage"
147 | echo "------------------------------------------------------------------------------"
148 | cd $source_dir
149 | # setup coverage report path
150 | coverage_report_path=$source_dir/tests/coverage-reports/source.coverage.xml
151 | echo "coverage report path set to $coverage_report_path"
152 | cd tests
153 | # set PYTHONPATH to enable importing modules from ./glue/library,/anonymous_data_logger
154 | export PYTHONPATH=$PYTHONPATH:../anonymous_data_logger:../api:../glue:../helper:../cognito_hosted_ui_resource
155 | pytest unit_test/. --cov=$source_dir/glue/ --cov=$source_dir/helper/ --cov=$source_dir/amc_uploader/ --cov=$source_dir/anonymous_data_logger/ --cov=$source_dir/api/ --cov=$source_dir/share/ --cov=$source_dir/cognito_hosted_ui_resource/ --cov-report term-missing --cov-report term --cov-report "xml:$coverage_report_path" --cov-config=$source_dir/.coveragerc -vv
156 | cd ..
157 |
158 | # The pytest --cov with its parameters and .coveragerc generates a xml cov-report with `coverage/sources` list
159 | # with absolute path for the source directories. To avoid dependencies of tools (such as SonarQube) on different
160 | # absolute paths for source directories, this substitution is used to convert each absolute source directory
161 | # path to the corresponding project relative path. The $source_dir holds the absolute path for source directory.
162 | sed -i -e "s,$source_dir,source,g" $coverage_report_path
163 |
164 | cleanup
165 | echo "Done"
166 | exit 0
167 |
--------------------------------------------------------------------------------
/docs/images/Settings.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/docs/images/Settings.png
--------------------------------------------------------------------------------
/docs/images/Step1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/docs/images/Step1.png
--------------------------------------------------------------------------------
/docs/images/Step2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/docs/images/Step2.png
--------------------------------------------------------------------------------
/docs/images/Step3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/docs/images/Step3.png
--------------------------------------------------------------------------------
/docs/images/Step4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/docs/images/Step4.png
--------------------------------------------------------------------------------
/docs/images/Step5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/docs/images/Step5.png
--------------------------------------------------------------------------------
/docs/images/Step6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/docs/images/Step6.png
--------------------------------------------------------------------------------
/docs/images/architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/docs/images/architecture.png
--------------------------------------------------------------------------------
/docs/images/create_user01.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/docs/images/create_user01.png
--------------------------------------------------------------------------------
/docs/images/create_user02.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/docs/images/create_user02.png
--------------------------------------------------------------------------------
/docs/images/create_user03.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/docs/images/create_user03.png
--------------------------------------------------------------------------------
/docs/images/launch-stack.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/docs/images/launch-stack.png
--------------------------------------------------------------------------------
/docs/sample_data/sample_dimension.csv:
--------------------------------------------------------------------------------
1 | first_name,last_name,gender,street_address,first_visit
2 | John,Skinner,FEMALE,63 Old Nectar Ridge,06/25/2013
3 | David,Reynolds,FEMALE,1415 Blue Hills Heights,05/17/2013
4 | Rodney,Calloway,MALE,97209 Round Chestnut Swale,07/12/2013
5 | Jose,Mclemore,FEMALE,51814 Tawny Nectar Edge,06/30/2013
6 | Thomas,Smith,FEMALE,31135 Old Robin Vale,05/27/2013
7 | Edward,Yokley,FEMALE,67078 Thunder Mountain Manor,02/09/2013
8 | Shelley,Mcnichols,MALE,19263 Pied Blossom Orchard,04/07/2013
9 | Josette,Christian,MALE,22174 Gentle Beacon Hollow,07/23/2013
10 | Beverly,Gumbs,MALE,59879 Grand Leaf Bluff,06/11/2013
11 | Jose,Hopkins,MALE,91014 Emerald View Orchard,03/05/2013
12 | Michael,Healey,MALE,4785 Heather Autumn Place,05/17/2013
13 | Rodney,Vincent,FEMALE,5041 Blue Butterfly Run,04/24/2013
14 | Thomas,Hollingsworth,MALE,276 Foggy Zephyr Pike,07/02/2013
15 | Tyson,Baker,FEMALE,37869 Foggy Deer Road,06/19/2013
16 | Grace,Moseley,FEMALE,62209 Easy Pebble Orchard,07/23/2013
17 | Jonnie,Potter,FEMALE,69251 Square Quail Isle,04/04/2013
18 | Jennifer,Coates,FEMALE,68048 Heather Axe Run,06/18/2013
19 | Dale,Baker,FEMALE,14831 Quiet Axe Bayou,06/25/2013
20 | Lloyd,Brown,FEMALE,35599 Pleasant Shadow Saunter,11/12/2012
21 | Adam,Spencer,FEMALE,3927 Jagged Lake Flats,07/04/2013
22 | Benjamin,Mccallum,MALE,2372 Cold Holly Lane,10/13/2012
23 | Olga,Meyer,FEMALE,22594 Pied Lake Bank,06/05/2013
24 | Juana,Smith,MALE,14361 Lone Stone Quay,08/04/2012
25 | Aretha,Hunter,FEMALE,11768 Cotton Fox Turn,03/30/2013
26 | Gregory,Glaze,MALE,22489 Rose Bird Flats,06/20/2013
27 | Bernardo,Burley,FEMALE,8111 Sleepy Dale Dale,03/12/2013
28 | Oscar,Ward,MALE,27440 Iron Elk Pass,03/19/2013
29 | James,Huff,MALE,33944 Amber River Lace,05/25/2013
30 | Clarence,Dahle,MALE,97606 Quiet Blossom Chase,07/08/2013
31 | Nicholas,Tolbert,MALE,24845 Square Goat Drive,06/19/2013
32 | James,Clemente,MALE,2499 Cinder Elk Villa,07/02/2013
33 | Mildred,Somers,FEMALE,25383 Grand Butternut Haven,04/30/2013
34 | Nicholas,Harris,FEMALE,45238 Lazy Elm Wander,01/14/2013
35 | Thomas,Whalen,FEMALE,88962 Big Beaver Wander,01/14/2013
36 | Marvin,Cook,MALE,66521 Heather Pine Wander,07/21/2013
37 | Victor,Mitchell,MALE,13567 Lone Bird Place,04/25/2013
38 | Ruby,Roberts,MALE,30731 Indian Anchor Estates,07/04/2013
39 | Shirley,Thompson,MALE,25733 Windy Goat Summit,04/16/2013
40 | Jordan,Harvey,FEMALE,47475 Sleepy River Twist,04/20/2013
41 | Anthony,White,MALE,66490 Honey Treasure Road,02/09/2013
42 | Robert,Mingo,FEMALE,4899 Honey Squirrel Street,07/06/2013
43 | Linda,Ramsey,FEMALE,3621 Pleasant Lamb Wander,07/05/2013
44 | Mark,Wiley,MALE,10218 Ivory Apple Glen,01/11/2013
45 | James,Husband,FEMALE,72090 Gentle Leaf Meadow,07/28/2013
46 | Eric,Lapointe,FEMALE,246 Rose Gate Bluff,04/13/2013
47 | Frederick,Mathews,FEMALE,3799 Noble Willow Wander,06/02/2013
48 | David,Smith,MALE,60565 Pied Branch Hollow,05/06/2013
49 | Mark,Keyes,MALE,87701 Silver Squirrel Niche,07/29/2013
50 | William,Hall,MALE,45067 Lost Rock Run,09/27/2012
51 | Eugene,Norcross,MALE,7543 Still Barn Stroll,04/28/2013
52 | Dottie,Roller,FEMALE,29519 Umber Bird Flats,07/19/2013
53 | Salvador,Fairbanks,FEMALE,1565 Long Spring Promenade,07/14/2013
54 | Thomas,Mccray,MALE,75837 Emerald Harbor Bayou,05/24/2013
55 | Rosa,Mccombs,FEMALE,32669 Burning Blossom Meadow,06/25/2013
56 | Mark,Fowler,FEMALE,224 Red Leaf Turn,02/07/2013
57 | Allen,Hoke,MALE,6565 White Crow Isle,04/27/2013
58 | Susan,Shattuck,OTHER,14572 Rustic Chestnut Ramble,07/21/2013
59 | Julie,Lopez,FEMALE,66261 Velvet Zephyr Chase,06/11/2013
60 | Amy,Bowen,MALE,93770 Windy Wagon Arbor,04/30/2013
61 | Isaac,Hatfield,MALE,8024 Heather Bridge Niche,04/21/2013
62 | Jean,Cross,MALE,0 Strong Castle Flats,07/19/2013
63 | Nicholas,Luton,MALE,69152 Red Rise Ledge,06/24/2013
64 | Thomas,Cherry,FEMALE,66525 High Wagon Path,05/30/2013
65 | Guillermina,Hall,MALE,94669 Sandy Oak Avenue,07/09/2013
66 | Charles,Banks,MALE,86122 Lazy Prairie Bend,07/18/2013
67 | Yolanda,Saunders,OTHER,88181 Stony Autumn Falls,07/15/2013
68 | Willie,Gaines,FEMALE,9471 Blue Willow Ridge,07/24/2013
69 | Lori,Sharp,FEMALE,96904 Sunny Pumpkin Jetty,06/14/2013
70 | Benjamin,Baker,MALE,14276 Golden Nest Trail,06/16/2013
71 | Marcelino,Hodge,MALE,6 Old Rise Crest,05/10/2013
72 | Marcela,Martin,MALE,2124 Hush Anchor Point,05/30/2013
73 | Verna,Harris,FEMALE,95242 Cotton Nest Lagoon,07/24/2013
74 | Frank,Gibson,FEMALE,17375 Umber Creek Round,07/08/2013
75 | Kandis,Wilson,MALE,10479 Misty Butternut Meadow,06/12/2013
76 | Ronald,Barksdale,OTHER,7479 Silver Brook Saunter,06/29/2013
77 | Melinda,Migues,FEMALE,26413 Merry Log Heights,03/04/2013
78 | Ouida,Curtis,MALE,12226 Small Mill Bank,06/20/2013
79 | Salina,Waugh,MALE,69927 Calm Pumpkin Race,07/13/2013
80 | Albert,Murry,MALE,1077 Lazy Berry Isle,07/02/2013
81 | Henry,Marra,MALE,91696 Shady Branch Ledge,06/29/2013
82 | Douglas,Cobb,MALE,27714 Old Crow Stroll,05/02/2013
83 | Virginia,Mann,MALE,50944 Hazy Prairie Ridge,01/29/2013
84 | Tequila,Kalb,OTHER,12190 Thunder Nectar Valley,03/26/2012
85 | Reba,Navarro,FEMALE,57687 Blue Beaver Stead,05/25/2013
86 | Sarah,Cline,MALE,66891 White Leaf Haven,04/09/2013
87 | Mae,Brewster,MALE,85773 Tender Castle Avenue,07/24/2013
88 | Lissette,Delbosque,FEMALE,672 Dusty Diamond Canyon,06/26/2013
89 | Erica,Sevin,MALE,1865 Harvest Embers Lagoon,07/22/2013
90 | Shauna,Stroud,MALE,83578 Noble Quail Round,09/14/2012
91 | David,Cruz,FEMALE,35644 Pearl Forest Edge,05/02/2013
92 | Carla,Hamm,FEMALE,40539 Iron Rise Trace,07/23/2013
93 | Justin,Bridgeman,MALE,43012 White Snake Vale,05/26/2013
94 | Barbara,Kane,MALE,67277 Quaking Crow Woods,04/09/2013
95 | Russell,Garcia,MALE,4780 Hazy Shadow Knoll,06/07/2013
96 | Joseph,Vandenbosch,MALE,25638 Crystal Fox Way,07/31/2013
97 | Kathryn,Donnelly,MALE,14896 Misty Stone Vale,07/30/2013
98 | Harley,Llewellyn,FEMALE,110 Pearl Blossom Edge,06/19/2013
99 | Alba,Cummings,TRANS,24587 Small Maple Glade,04/29/2013
100 | Valerie,Dandrea,FEMALE,4187 Stony Anchor Run,12/07/2012
101 | Lillian,Ewing,MALE,49842 Cotton Brook Bank,04/29/2013
102 |
--------------------------------------------------------------------------------
/solution-manifest.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | id: SO0222 # Solution Id
3 | name: amazon-marketing-cloud-uploader-from-aws # trademarked name
4 | version: v3.0.15 # current version of the solution. Used to verify template headers
5 | cloudformation_templates: # This list should match with AWS CloudFormation templates section of IG
6 | - template: amazon-marketing-cloud-uploader-from-aws.template
7 | main_template: true
8 | build_environment:
9 | build_image: 'aws/codebuild/standard:7.0' # Options include: 'aws/codebuild/standard:5.0','aws/codebuild/standard:6.0','aws/codebuild/standard:7.0','aws/codebuild/amazonlinux2-x86_64-standard:4.0','aws/codebuild/amazonlinux2-x86_64-standard:5.0'
10 |
--------------------------------------------------------------------------------
/source/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | tests/*
4 | api/tests/*
5 |
6 | source =
7 | api
8 | glue
9 | helper
10 | amc_uploader
11 | anonymous_data_logger
12 |
13 | [report]
14 | fail_under = 82
15 |
--------------------------------------------------------------------------------
/source/amc_uploader/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/amc_uploader/__init__.py
--------------------------------------------------------------------------------
/source/amc_uploader/amc_uploader.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | #
4 | # ###############################################################################
5 | # PURPOSE:
6 | # Upload data files to AMC from a .txt manifest file that points to multiple
7 | # distinct S3 objects.
8 | # Data files must be in the format required by AMC.
9 | #
10 | # USAGE:
11 | # Start this Lambda with an S3 CreateObject trigger on the bucket where
12 | # transformed data files are saved.
13 | #
14 | # REQUIREMENTS:
15 | # Input files are expected to be in the following s3 key pattern:
16 | # s3://[bucket_name]/amc/[dataset_id]/[update_strategy]/[file_format]/[country_code]/[instance_id|user_id]/[manifest].txt
17 | ###############################################################################
18 |
19 | import json
20 | import logging
21 | import os
22 | import urllib.parse
23 | from datetime import datetime
24 |
25 | import boto3
26 |
27 | # Patch libraries to instrument downstream calls
28 | from aws_xray_sdk.core import patch_all
29 | from boto3.dynamodb.conditions import Key
30 | from botocore import config
31 | from lib.tasks import tasks
32 |
33 | patch_all()
34 |
35 | # Environment variables
36 | solution_config = json.loads(os.environ["botoConfig"])
37 | config = config.Config(**solution_config)
38 | UPLOAD_FAILURES_TABLE_NAME = os.environ["UPLOAD_FAILURES_TABLE_NAME"]
39 | SYSTEM_TABLE_NAME = os.environ["SYSTEM_TABLE_NAME"]
40 |
41 | # format log messages like this:
42 | formatter = logging.Formatter(
43 | "{%(pathname)s:%(lineno)d} %(levelname)s - %(message)s"
44 | )
45 | handler = logging.StreamHandler()
46 | handler.setFormatter(formatter)
47 |
48 | # Remove the default logger in order to avoid duplicate log messages
49 | # after we attach our custom logging handler.
50 | logging.getLogger().handlers.clear()
51 | logger = logging.getLogger()
52 | logger.setLevel(logging.INFO)
53 | logger.addHandler(handler)
54 |
55 |
56 | def lambda_handler(event, context):
57 | logger.info("We got the following event:\n")
58 | logger.info("event:\n {s}".format(s=event))
59 | logger.info("context:\n {s}".format(s=context))
60 | bucket = event["Records"][0]["s3"]["bucket"]["name"]
61 | key = urllib.parse.unquote_plus(event["Records"][0]["s3"]["object"]["key"])
62 | if _is_manifest(key):
63 | upload_res_info = _start_upload(bucket=bucket, key=key)
64 | logger.debug(upload_res_info)
65 | else:
66 | message = f"The key '{key}' is not a .txt manifest file. Exiting."
67 | logger.info(message)
68 | return {"Status": "Warning", "Message": message}
69 |
70 | def _is_manifest(key):
71 | return key.endswith(".txt")
72 |
73 |
74 | def get_dynamo_table(table_name):
75 | dynamo_resource = boto3.resource(
76 | "dynamodb", region_name=os.environ["AWS_REGION"]
77 | )
78 | return dynamo_resource.Table(table_name), dynamo_resource
79 |
80 |
81 | def get_amc_instance(instance_id):
82 | system_table, _ = get_dynamo_table(SYSTEM_TABLE_NAME)
83 | response = system_table.query(
84 | KeyConditionExpression=Key("Name").eq("AmcInstances")
85 | )
86 |
87 | for item in response["Items"]:
88 | for instance in item["Value"]:
89 | if instance.get("instance_id") == instance_id:
90 | if not (instance.get("marketplace_id") and instance.get("advertiser_id")):
91 | raise ValueError(
92 | f"AMC instances: marketplace_id and advertiser_id required for {instance_id}."
93 | )
94 | return instance
95 | raise ValueError(f"AMC instances: {instance_id} not found.")
96 |
97 |
98 | def verify_amc_request(**kwargs):
99 | # Verify AMC requests
100 | ads_kwargs = tasks.get_ads_token(**kwargs, redirect_uri="None")
101 | if ads_kwargs.get("authorize_url"):
102 | raise RuntimeError("Unauthorized AMC request.")
103 | return ads_kwargs
104 |
105 |
106 | def update_upload_failures_table(response, dataset_id, instance_id):
107 | logger.info(f"Response code: {response.status_code}\n")
108 | logger.info("Response: " + response.text)
109 | upload_failures_table, dynamo_resource = get_dynamo_table(
110 | UPLOAD_FAILURES_TABLE_NAME
111 | )
112 | item_key = {"dataset_id": dataset_id, "instance_id": instance_id}
113 | # Clear previously recorded failure item.
114 | try:
115 | upload_failures_table.delete_item(Key=item_key)
116 | except dynamo_resource.meta.client.exceptions.ConditionalCheckFailedException:
117 | pass
118 | # If this upload failed then record that failure.
119 | if response.status_code != 200:
120 | item = item_key
121 | item["Value"] = response.text
122 | upload_failures_table.put_item(Item=item)
123 |
124 | def safe_json_loads(val):
125 | try:
126 | return json.loads(val)
127 | except Exception:
128 | return val
129 |
130 |
131 | def _start_upload(**kwargs):
132 | try:
133 | logger.info("Uploading dataset")
134 | bucket = kwargs["bucket"]
135 | key = kwargs["key"]
136 |
137 | # s3Key must be in the following format:
138 | # amc/[dataset_id]/[update_strategy]/[country_code]/[instance_id|user_id]/[manifest].txt
139 |
140 | _, dataset_id, update_strategy, file_format, country_code, instance_id_user_id, filename_quoted = key.split('/')
141 | instance_id, user_id = instance_id_user_id.split("|")
142 | filename = urllib.parse.unquote_plus(filename_quoted)
143 | ads_kwargs = verify_amc_request(**kwargs, user_id=user_id)
144 | amc_instance = get_amc_instance(instance_id=instance_id)
145 | kwargs["marketplace_id"] = amc_instance["marketplace_id"]
146 | kwargs["advertiser_id"] = amc_instance["advertiser_id"]
147 | kwargs["instance_id"] = instance_id
148 | kwargs["user_id"] = user_id
149 |
150 | logger.info("key: " + key)
151 | logger.info("dataset_id: " + dataset_id)
152 | logger.info("update_strategy: " + update_strategy)
153 | logger.info("country_code: " + country_code)
154 | logger.info("filename: " + filename)
155 | logger.info("instance_id: " + instance_id)
156 | logger.info(
157 | "Uploading s3://"
158 | + bucket
159 | + "/"
160 | + key
161 | + " to dataSetId "
162 | + dataset_id
163 | )
164 | data = {
165 | "countryCode": safe_json_loads(country_code),
166 | "updateStrategy": update_strategy,
167 | "compressionFormat": "GZIP",
168 | "dataSource": {
169 | "sourceS3Bucket": bucket,
170 | "sourceManifestS3Key": key,
171 | }
172 | }
173 | if file_format == "CSV":
174 | data["fileFormat"] = {
175 | "csvDataFormat": {
176 | "fieldDelimiter": ","
177 | }
178 | }
179 | elif file_format == "JSON":
180 | data["fileFormat"] = {
181 | "jsonDataFormat": "LINES"
182 | }
183 |
184 | path = f"/uploads/{dataset_id}"
185 | amc_request = tasks.AMCRequests(
186 | amc_path=path,
187 | http_method="POST",
188 | payload=json.dumps(data)
189 | )
190 | response = amc_request.process_request(**kwargs, **ads_kwargs)
191 | update_upload_failures_table(response, dataset_id, instance_id)
192 | return response.text
193 |
194 | except Exception as ex:
195 | logger.error(ex)
196 | return {"Status": "Error", "Message": ex}
197 |
198 |
199 |
--------------------------------------------------------------------------------
/source/amc_uploader/lib/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/amc_uploader/lib/__init__.py
--------------------------------------------------------------------------------
/source/amc_uploader/lib/tasks.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | #
4 | # ###########################################################################
5 | # Below are functions for constructing sigv4 signed HTTP requests
6 | # Reference:
7 | # http://docs.aws.amazon.com/general/latest/gr/signature-v4-examples.html#signature-v4-examples-python
8 | #
9 | # The requests package is not included in the default AWS Lambda env
10 | # be sure that it has been provided in a Lambda layer.
11 | #
12 | ##########################################################################
13 |
14 | import importlib
15 | import sys
16 |
17 | sys.path.insert(0, "./share/tasks.py")
18 | tasks = importlib.import_module("share.tasks")
19 |
--------------------------------------------------------------------------------
/source/amc_uploader/requirements.txt:
--------------------------------------------------------------------------------
1 | requests>=2.32.4
2 | chalice==1.31.0
3 |
--------------------------------------------------------------------------------
/source/amc_uploader/setup.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import setuptools
5 |
6 | setuptools.setup(
7 | name="amc_uploader",
8 | version="1.0.0",
9 | description="AMC - Uploader Functions",
10 | author="AWS Solutions Builders",
11 | packages=setuptools.find_packages(exclude=("shared",)),
12 | package_data={"": ["*.json", "*.yaml"]},
13 | include_package_data=True,
14 | python_requires=">=3.7",
15 | classifiers=[
16 | "Development Status :: 4 - Beta",
17 | "Intended Audience :: Developers",
18 | "License :: OSI Approved :: Apache Software License",
19 | "Programming Language :: JavaScript",
20 | "Programming Language :: Python :: 3 :: Only",
21 | "Programming Language :: Python :: 3.7",
22 | "Programming Language :: Python :: 3.8",
23 | "Programming Language :: Python :: 3.9",
24 | "Topic :: Software Development :: Code Generators",
25 | "Topic :: Utilities",
26 | "Typing :: Typed",
27 | ],
28 | )
29 |
--------------------------------------------------------------------------------
/source/anonymous_data_logger/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/anonymous_data_logger/__init__.py
--------------------------------------------------------------------------------
/source/anonymous_data_logger/anonymous_data_logger.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 | # SPDX-License-Identifier: Apache-2.0
4 | ##############################################################################
5 | #
6 | # PURPOSE:
7 | # This function sends anonymous performance data to the AWS
8 | # Solutions metrics API. This information is anonymous and helps improve the
9 | # quality of the solution.
10 | #
11 | ##############################################################################
12 |
13 | import logging
14 | import uuid
15 |
16 | import anonymous_lib.cfnresponse as cfn
17 | import anonymous_lib.metrics as Metrics
18 |
19 | # format log messages like this:
20 | formatter = logging.Formatter(
21 | "{%(pathname)s:%(lineno)d} %(levelname)s - %(message)s"
22 | )
23 | handler = logging.StreamHandler()
24 | handler.setFormatter(formatter)
25 |
26 | # Remove the default logger in order to avoid duplicate log messages
27 | # after we attach our custom logging handler.
28 | logging.getLogger().handlers.clear()
29 | logger = logging.getLogger()
30 | logger.setLevel(logging.INFO)
31 | logger.addHandler(handler)
32 |
33 |
34 | def handler(event, context):
35 | logger.info(f"We got this event: {event}\n")
36 | # Each resource returns a promise with a json object to return cloudformation.
37 | try:
38 | request_type = event["RequestType"]
39 | if request_type in ("Create", "Update"):
40 | # Here we handle the CloudFormation CREATE and UPDATE events
41 | # sent by the AnonymousMetric custom resource.
42 | resource = event["ResourceProperties"]["Resource"]
43 | config = event["ResourceProperties"]
44 | # Remove ServiceToken (lambda arn) to avoid sending AccountId
45 | config.pop("ServiceToken", None)
46 | config.pop("Resource", None)
47 | # Add some useful fields related to stack change
48 | config["CFTemplate"] = (
49 | request_type + "d"
50 | ) # Created, Updated, or Deleted
51 | response_data = {}
52 | logger.info(
53 | "Request::{} Resource::{}".format(request_type, resource)
54 | )
55 | if resource == "UUID":
56 | response_data = {"UUID": str(uuid.uuid4())}
57 | response_uuid = response_data["UUID"]
58 | cfn.send(
59 | event, context, "SUCCESS", response_data, response_uuid
60 | )
61 | elif resource == "AnonymousMetric":
62 | Metrics.send_metrics(config)
63 | response_uuid = "Metrics Sent"
64 | cfn.send(
65 | event, context, "SUCCESS", response_data, response_uuid
66 | )
67 | else:
68 | logger.error(
69 | "Create failed, {} not defined in the Custom Resource".format(
70 | resource
71 | )
72 | )
73 | cfn.send(event, context, "FAILED", {}, context.log_stream_name)
74 | elif request_type == "Delete":
75 | # Here we handle the CloudFormation DELETE event
76 | # sent by the AnonymousMetric custom resource.
77 | resource = event["ResourceProperties"]["Resource"]
78 | logger.info(
79 | "RESPONSE:: {}: Not required to report data for delete request.".format(
80 | resource
81 | )
82 | )
83 | cfn.send(event, context, "SUCCESS", {})
84 | elif request_type == "Workload":
85 | # Here we handle the performance metrics reported by the Glue ETL job.
86 | metrics = event["Metrics"]
87 | logger.info("Workload metrics:")
88 | logger.info(metrics)
89 | Metrics.send_metrics(metrics)
90 | else:
91 | # If we get any other type of event, we handle that here.
92 | logger.error("RESPONSE:: {} Not supported".format(request_type))
93 | except Exception as e:
94 | cfn.send(event, context, "FAILED", {}, context.log_stream_name)
95 | logger.error(e)
96 |
--------------------------------------------------------------------------------
/source/anonymous_data_logger/anonymous_lib/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/anonymous_data_logger/anonymous_lib/__init__.py
--------------------------------------------------------------------------------
/source/anonymous_data_logger/anonymous_lib/cfnresponse.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 | # SPDX-License-Identifier: Apache-2.0
4 |
5 | import json
6 |
7 | import requests
8 |
9 |
10 | def send(
11 | event,
12 | context,
13 | response_status,
14 | response_data,
15 | physical_resource_id=None,
16 | no_echo=False,
17 | ):
18 | response_url = event["ResponseURL"]
19 |
20 | response_body = {}
21 | response_body["Status"] = response_status
22 | response_body["Reason"] = (
23 | "See the details in CloudWatch Log Stream: " + context.log_stream_name
24 | )
25 | response_body["PhysicalResourceId"] = (
26 | physical_resource_id or context.log_stream_name
27 | )
28 | response_body["StackId"] = event["StackId"]
29 | response_body["RequestId"] = event["RequestId"]
30 | response_body["LogicalResourceId"] = event["LogicalResourceId"]
31 | response_body["NoEcho"] = no_echo
32 | response_body["Data"] = response_data
33 |
34 | json_response_body = json.dumps(response_body)
35 |
36 | headers = {
37 | "content-type": "",
38 | "content-length": str(len(json_response_body)),
39 | }
40 |
41 | try:
42 | response = requests.put(
43 | response_url, data=json_response_body, headers=headers, timeout=20
44 | )
45 | print("Status code: " + response.reason)
46 | except Exception as e:
47 | print("send(..) failed executing requests.put(..): " + str(e))
48 |
--------------------------------------------------------------------------------
/source/anonymous_data_logger/anonymous_lib/metrics.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 | # SPDX-License-Identifier: Apache-2.0
4 |
5 | import datetime
6 | import json
7 | import requests
8 |
9 |
10 | def send_metrics(config):
11 | metrics = {}
12 | # move Solution ID and UUID to the root JSON level
13 | metrics["Solution"] = config.pop("SolutionId", None)
14 | metrics["UUID"] = config.pop("UUID", None)
15 | metrics["TimeStamp"] = str(datetime.datetime.utcnow().isoformat())
16 | metrics["Data"] = config
17 | url = "https://metrics.awssolutionsbuilder.com/generic"
18 | data = json.dumps(metrics).encode("utf8")
19 | headers = {"content-type": "application/json"}
20 | req = requests.post(url, headers=headers, data=data, timeout=15)
21 | print("RESPONSE CODE:: {}".format(req.text))
22 | print("METRICS SENT:: {}".format(metrics))
23 |
--------------------------------------------------------------------------------
/source/anonymous_data_logger/requirements.txt:
--------------------------------------------------------------------------------
1 | requests>=2.32.4
2 | urllib3>=2.5.0
3 |
--------------------------------------------------------------------------------
/source/api/.chalice/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "1.0",
3 | "app_name": "amcufa_api",
4 | "environment_variables": {
5 | "botoConfig": "{}",
6 | "VERSION": "",
7 | "SOLUTION_NAME": "",
8 | "AMC_API_ROLE_ARN": "",
9 | "AMC_GLUE_JOB_NAME": "",
10 | "ARTIFACT_BUCKET": "",
11 | "SYSTEM_TABLE_NAME": "",
12 | "UPLOAD_FAILURES_TABLE_NAME": "",
13 | "CUSTOMER_MANAGED_KEY": ""
14 | },
15 | "stages": {
16 | "dev": {
17 | "api_gateway_stage": "api",
18 | "lambda_memory_size": 2048,
19 | "lambda_timeout": 600,
20 | "autogen_policy": false,
21 | "iam_policy_file": "dev-app-policy.json",
22 | "xray": true,
23 | "tags": {
24 | "environment": "amcufa"
25 | }
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/source/api/.chalice/dev-app-policy.json:
--------------------------------------------------------------------------------
1 | {
2 | "Version": "2012-10-17",
3 | "Statement": [
4 | {
5 | "Action": [
6 | "logs:CreateLogGroup",
7 | "logs:CreateLogStream",
8 | "logs:PutLogEvents"
9 | ],
10 | "Resource": "*",
11 | "Effect": "Allow",
12 | "Sid": "Logging"
13 | },
14 | {
15 | "Effect": "Allow",
16 | "Action": "lambda:InvokeFunction",
17 | "Resource": "*"
18 | },
19 | {
20 | "Effect": "Allow",
21 | "Action": [
22 | "iam:PassRole"
23 | ],
24 | "Resource": [
25 | "*"
26 | ]
27 | }
28 | ]
29 | }
30 |
--------------------------------------------------------------------------------
/source/api/.gitignore:
--------------------------------------------------------------------------------
1 | .chalice/deployments/
2 | .chalice/venv/
3 | __pycache__/
4 |
--------------------------------------------------------------------------------
/source/api/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/api/__init__.py
--------------------------------------------------------------------------------
/source/api/chalicelib/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/api/chalicelib/__init__.py
--------------------------------------------------------------------------------
/source/api/chalicelib/tasks.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | #
4 | # ###########################################################################
5 | # This file contains functions for constructing sigv4 signed HTTP requests
6 | # Reference:
7 | # http://docs.aws.amazon.com/general/latest/gr/signature-v4-examples.html#signature-v4-examples-python
8 | #
9 | # The requests package is not included in the default AWS Lambda env
10 | # be sure that it has been provided in a Lambda layer.
11 | #
12 | ##########################################################################
13 |
14 | import importlib
15 | import sys
16 |
17 | sys.path.insert(0, "..")
18 | tasks = importlib.import_module("share.tasks")
19 |
--------------------------------------------------------------------------------
/source/api/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/api/requirements.txt
--------------------------------------------------------------------------------
/source/cognito_hosted_ui_resource/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/cognito_hosted_ui_resource/__init__.py
--------------------------------------------------------------------------------
/source/cognito_hosted_ui_resource/amcufa-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/cognito_hosted_ui_resource/amcufa-logo.png
--------------------------------------------------------------------------------
/source/cognito_hosted_ui_resource/cognito_hosted_ui_resource.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import os
5 | import json
6 | import logging
7 |
8 | import boto3
9 | import cfnresponse
10 |
11 |
12 |
13 | logger = logging.getLogger()
14 | logger.setLevel(logging.INFO)
15 |
16 | USER_POOL_ID = os.environ["USER_POOL_ID"]
17 | DIR_PATH = os.path.dirname(os.path.realpath(__file__))
18 |
19 | def handler(event, context):
20 | logger.info(f"Event {event}")
21 |
22 | try:
23 | client = boto3.client('cognito-idp')
24 | response = client.set_ui_customization(
25 | UserPoolId=USER_POOL_ID,
26 | CSS=get_file(f"{DIR_PATH}/login.css", "r"),
27 | ImageFile=get_file(f"{DIR_PATH}/amcufa-logo.png", "rb")
28 | )
29 | logger.info(response)
30 | except Exception as error:
31 | logger.error(error)
32 | cfnresponse.send(event, context, cfnresponse.FAILED, {"error": error})
33 | return
34 |
35 | cfnresponse.send(event, context, cfnresponse.SUCCESS, {"response": response})
36 |
37 | def get_file(file_path, mode):
38 | with open(file_path, mode) as file:
39 | return file.read()
--------------------------------------------------------------------------------
/source/cognito_hosted_ui_resource/login.css:
--------------------------------------------------------------------------------
1 |
2 | .logo-customizable {
3 | max-width: 100%;
4 | max-height: 100%;
5 | }
6 |
7 | .banner-customizable {
8 | padding: 15px 0px 15px 0px;
9 | background-color: #343a40 !important;
10 | }
11 |
12 | .label-customizable {
13 | font-weight: 400;
14 | }
15 |
16 | .textDescription-customizable {
17 | padding-top: 10px;
18 | padding-bottom: 10px;
19 | display: block;
20 | font-size: 16px;
21 | }
22 |
23 | .idpDescription-customizable {
24 | padding-top: 10px;
25 | padding-bottom: 10px;
26 | display: block;
27 | font-size: 16px;
28 | }
29 |
30 | .legalText-customizable {
31 | color: #747474;
32 | font-size: 11px;
33 | }
34 |
35 | .submitButton-customizable {
36 | font-size: 14px;
37 | font-weight: bold;
38 | margin: 20px 0px 10px 0px;
39 | height: 40px;
40 | width: 100%;
41 | color: #fff;
42 | background-color: #007bff;
43 | border-color: #007bff;
44 | }
45 |
46 | .submitButton-customizable:hover {
47 | color: #fff;
48 | background-color: #286090;
49 | }
50 |
51 | .errorMessage-customizable {
52 | padding: 5px;
53 | font-size: 14px;
54 | width: 100%;
55 | background: #F5F5F5;
56 | border: 2px solid #D64958;
57 | color: #D64958;
58 | }
59 |
60 | .inputField-customizable {
61 | width: 100%;
62 | height: 34px;
63 | color: #555;
64 | background-color: #fff;
65 | border: 1px solid #ccc;
66 | }
67 |
68 | .inputField-customizable:focus {
69 | border-color: #66afe9;
70 | outline: 0;
71 | }
72 |
73 | .idpButton-customizable {
74 | height: 40px;
75 | width: 100%;
76 | text-align: center;
77 | margin-bottom: 15px;
78 | color: #fff;
79 | background-color: #007bff;
80 | border-color: #007bff;
81 | }
82 |
83 | .idpButton-customizable:hover {
84 | color: #fff;
85 | background-color: #31b0d5;
86 | }
87 |
88 | .socialButton-customizable {
89 | border-radius: 2px;
90 | height: 40px;
91 | margin-bottom: 15px;
92 | padding: 1px;
93 | text-align: left;
94 | width: 100%;
95 | }
96 |
97 | .redirect-customizable {
98 | text-align: center;
99 | }
100 |
101 | .passwordCheck-notValid-customizable {
102 | color: #DF3312;
103 | }
104 |
105 | .passwordCheck-valid-customizable {
106 | color: #19BF00;
107 | }
108 |
109 | .background-customizable {
110 | background-color: #fff;
111 | }
112 |
--------------------------------------------------------------------------------
/source/cognito_hosted_ui_resource/requirements.txt:
--------------------------------------------------------------------------------
1 | cfnresponse==1.1.4
--------------------------------------------------------------------------------
/source/glue/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/glue/__init__.py
--------------------------------------------------------------------------------
/source/glue/amc_transformations.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # ###############################################################################
4 | # PURPOSE:
5 | # Normalize and hash clear-text PII, and partition time series datasets for AMC.
6 | #
7 | # PREREQUISITES:
8 | # Timestamp columns must be formatted according to ISO 8601.
9 | #
10 | # INPUT:
11 | # --source_bucket: S3 bucket containing input file
12 | # --output_bucket: S3 bucket for output data
13 | # --source_key: S3 key of input file.
14 | # --timestamp_column: Column name containing timestamps for time series datasets (e.g. FACT). Leave blank for datasets that are not time series (e.g. DIMENSION).
15 | # --pii_fields: json formatted array containing column names that need to be hashed and the PII type of their data. The type must be FIRST_NAME, LAST_NAME, PHONE, ADDRESS, CITY, STATE, ZIP, or EMAIL.
16 | # --deleted_fields: array of strings indicating the names of columns which the user requested to be dropped from the dataset prior to uploading to AMC.
17 | # --dataset_id: name of dataset, used as the prefix folder for the output s3key.
18 | # --country_code: country-specific normalization to apply to all rows in the dataset (2-digit ISO country code).
19 | # --amc_instances: List of AMC instances to receive uploads
20 | #
21 | # OUTPUT:
22 | # - Transformed data files in user-specified output bucket,
23 | # partitioned according to AMC spec.
24 | #
25 | # SAMPLE COMMAND-LINE USAGE:
26 | #
27 | # export JOB_NAME=mystack-GlueStack-12BSLR8H1F79M-amc-transformation-job
28 | # export SOURCE_BUCKET=mybucket
29 | # export SOURCE_KEY=mydata.json
30 | # export OUTPUT_BUCKET=mystack-etl-artifacts-zmtmhi
31 | # export TIMESTAMP_COLUMN=timestamp
32 | # export PII_FIELDS='[{\"column_name\": \"first_name\",\"pii_type\": \"FIRST_NAME\"},{\"column_name\": \"last_name\",\"pii_type\": \"LAST_NAME\"},{\"column_name\": \"address\",\"pii_type\": \"ADDRESS\"}]'
33 | # export DELETED_FIELDS='[\"customer_id\",\"purchase_id\"]'
34 | # export DATASET_ID='mytest123'
35 | # export REGION=us-east-1
36 | # aws glue start-job-run --job-name $JOB_NAME --arguments '{"--source_bucket": "'$SOURCE_BUCKET'", "--output_bucket": "'$OUTPUT_BUCKET'", "--source_key": "'$SOURCE_KEY'", "--pii_fields": "'$PII_FIELDS'",
37 | # "--deleted_fields": "'$DELETED_FIELDS'", "--timestamp_column": "'$TIMESTAMP_COLUMN'", "--dataset_id": "'$DATASET_ID'", "--country_code": "US"}' --region $REGION
38 | #
39 | ###############################################################################
40 |
41 | import sys
42 |
43 | from awsglue.utils import GlueArgumentError, getResolvedOptions
44 | from library import read_write as rw
45 | from library import transform
46 |
47 | REQUIRED_PARAMS = [
48 | "JOB_NAME",
49 | "solution_id",
50 | "uuid",
51 | "enable_anonymous_data",
52 | "anonymous_data_logger",
53 | "source_bucket",
54 | "source_key",
55 | "output_bucket",
56 | "pii_fields",
57 | "deleted_fields",
58 | "dataset_id",
59 | "user_id",
60 | "file_format",
61 | "amc_instances",
62 | "update_strategy"
63 | ]
64 | OPTIONAL_PARAMS = ["timestamp_column", "country_code"]
65 |
66 |
67 | def check_params(required: list, optional: list) -> dict:
68 | # assign required params
69 | try:
70 | args = getResolvedOptions(sys.argv, required)
71 | except GlueArgumentError as e:
72 | # If any required parameter is missing then stop execution.
73 | print(e)
74 | sys.exit(1)
75 |
76 | # assign optional params
77 | for parameter in optional:
78 | try:
79 | args.update(getResolvedOptions(sys.argv, [parameter]))
80 | except GlueArgumentError as e:
81 | # Continue execution if any optional parameter is missing.
82 | pass
83 |
84 | # strip whitespace on applicable fields
85 | for i in ("dataset_id", "timestamp_column"):
86 | if i in args.keys():
87 | args[i] = args[i].strip()
88 | if args.get("country_code") and args.get("country_code") not in (
89 | "US",
90 | "GB",
91 | "JP",
92 | "IN",
93 | "IT",
94 | "ES",
95 | "CA",
96 | "DE",
97 | "FR",
98 | ):
99 | print("ERROR: Invalid user-defined value for country:")
100 | print(args["country_code"])
101 | sys.exit(1)
102 | if args["file_format"] not in (
103 | "JSON",
104 | "CSV"
105 | ):
106 | print("ERROR: Invalid file format for input files:")
107 | print(args["file_format"])
108 | sys.exit(1)
109 | if len(args["amc_instances"]) == 0:
110 | print("amc_instances cannot be empty")
111 | sys.exit(1)
112 | return args
113 |
114 |
115 | params = check_params(required=REQUIRED_PARAMS, optional=OPTIONAL_PARAMS)
116 |
117 | print("Runtime args:")
118 | print(params)
119 |
120 | file = rw.DataFile(args=params)
121 |
122 | file.read_bucket()
123 | file.load_input_data()
124 | file.remove_deleted_fields()
125 |
126 | if file.country_code:
127 | file.data = transform.transform_data(
128 | data=file.data, pii_fields=file.pii_fields, country_code=file.country_code
129 | )
130 | file.data = transform.hash_data(data=file.data, pii_fields=file.pii_fields)
131 |
132 | if file.timestamp_column:
133 | file.timestamp_transform()
134 |
135 | file.save_output()
136 |
137 | if params.get("enable_anonymous_data", "false") == "true":
138 | file.save_performance_metrics()
139 |
--------------------------------------------------------------------------------
/source/glue/library/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/glue/library/__init__.py
--------------------------------------------------------------------------------
/source/glue/library/address_normalizer.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | ###############################################################################
4 | #
5 | # PURPOSE:
6 | # Normalize addresses according to the normalization standard used by Amazon
7 | # Ads for privacy enhanced identity resolution and data collaboration.
8 | #
9 | # USAGE:
10 | # 1. Import AddressNormalizer
11 | # 2. Instantiate AddressNormalizer with a country code, like this:
12 | # addressNormalizer = AddressNormalizer('FR')
13 | # 3. Normalize text, like this:
14 | # addressNormalizer.normalize(text).normalized_address
15 | #
16 | # REFERENCE:
17 | # https://github.com/amzn/amazon-ads-advertiser-audience-normalization-sdk-py
18 | #
19 | ###############################################################################
20 | import json
21 | import os
22 | import re
23 | import tempfile
24 | from re import finditer
25 | from zipfile import ZipFile
26 |
27 |
28 | def load_address_map_helper():
29 | try:
30 | __location__ = os.path.realpath(
31 | os.path.join(os.getcwd(), os.path.dirname(__file__))
32 | )
33 | with open(
34 | os.path.join(__location__, "address_map_helper.json"),
35 | "r",
36 | encoding="utf-8",
37 | ) as file:
38 | return json.load(file)
39 | except Exception:
40 | # Glue job put files in zip
41 | with ZipFile("library.zip", "r") as zipFile:
42 | with tempfile.TemporaryDirectory() as tempdir:
43 | zipFile.extractall(path=tempdir)
44 | print(os.listdir(tempdir))
45 | with open(
46 | os.path.join(f"{tempdir}/library/address_map_helper.json"),
47 | "r",
48 | encoding="utf-8",
49 | ) as file:
50 | return json.load(file)
51 |
52 |
53 | address_map = load_address_map_helper()
54 |
55 | NumberIndicators = address_map["NumberIndicators"]
56 | DirectionalWords = address_map["DirectionalWords"]
57 | DefaultStreetSuffixes = address_map["DefaultStreetSuffixes"]
58 | USStreetSuffixes = address_map["USStreetSuffixes"]
59 | USSubBuildingDesignator = address_map["USSubBuildingDesignator"]
60 | ITStreetPrefixes = address_map["ITStreetPrefixes"]
61 | FRStreetDesignator = address_map["FRStreetDesignator"]
62 | ESStreetPrefixes = address_map["ESStreetPrefixes"]
63 | UKOrganizationSuffixes = address_map["UKOrganizationSuffixes"]
64 | UKStreetSuffixes = address_map["UKStreetSuffixes"]
65 | UKSubBuildingDesignator = address_map["UKSubBuildingDesignator"]
66 |
67 | DASH_STRING = "-"
68 |
69 | POUND_REGEX = "([A-Z]*)#([0-9A-Z-/]*)"
70 | POUND_STRING = "#"
71 |
72 | DELIMITER_PATTERN_MAP = {
73 | "COMMA": "(\\s?,\\s?)+",
74 | "CO": "\\bC/O\\b",
75 | "EXCLAMATION_MARK": "!",
76 | "OPEN_BRACKET": "[\\[\\{\\(]",
77 | "CLOSE_BRACKET": "[\\]\\}\\)]",
78 | "SPACE": "\\s+",
79 | "DOT": "\\.",
80 | }
81 |
82 |
83 | class Delimiter:
84 | def __init__(self, text="", start=0, del_type=None) -> None:
85 | self.text = text
86 | self.start = start
87 | self.end = start + len(text)
88 | self.del_type = del_type
89 |
90 | def parse(self, text, start=None):
91 | print("Running parse")
92 | if start is None:
93 | start = 0
94 | delimiters = []
95 | a = list(DELIMITER_PATTERN_MAP.keys())
96 | for i in range(0, len(a)):
97 | delimiter_type = a[i]
98 | new_found_delimiters = self.find_delimiters(
99 | text, start, delimiters, delimiter_type
100 | )
101 | delimiters.extend(new_found_delimiters)
102 | delimiters.sort(key=lambda x: x.start, reverse=False)
103 | return delimiters
104 |
105 | def find_delimiters(self, text, start, delimiters, del_type):
106 | print("Running find_delimiters")
107 | result = []
108 | text_start = start
109 | delimiters_1 = delimiters
110 |
111 | for i in range(0, len(delimiters_1)):
112 | cur_delimiter = delimiters_1[i]
113 | text_end = cur_delimiter.start
114 | if text_end == text_start:
115 | text_start = cur_delimiter.end
116 | continue
117 | search_string = text[text_start - start : text_end - start]
118 | regexp = re.compile(DELIMITER_PATTERN_MAP[del_type], re.IGNORECASE)
119 | match_result = 0
120 | for match_result in finditer(regexp, search_string):
121 | delimiter = Delimiter(
122 | match_result.group(),
123 | text_start + match_result.span()[0],
124 | del_type,
125 | )
126 | result.append(delimiter)
127 | text_start = cur_delimiter.end
128 |
129 | if text_start < start + len(text):
130 | search_string = text[text_start - start :]
131 | regexp = re.compile(DELIMITER_PATTERN_MAP[del_type], re.IGNORECASE)
132 | match_result = 0
133 | for match_result in finditer(regexp, search_string):
134 | index = match_result.span()[0]
135 | delimiter_string = match_result.group()
136 | delimiter = Delimiter(
137 | delimiter_string, text_start + index, del_type
138 | )
139 | result.append(delimiter)
140 | return result
141 |
142 |
143 | class NormalizedAddress:
144 | def __init__(self, full_address):
145 | self.address_tokens = []
146 | self.full_address = full_address
147 |
148 | def generate_tokens(self):
149 | delimiters = Delimiter().parse(text=self.full_address)
150 | tokens = []
151 | start = 0
152 | address = self.full_address
153 | text_start = start
154 |
155 | delimiters_1 = delimiters
156 |
157 | for i in range(0, len(delimiters_1)):
158 | delimiter = delimiters_1[i]
159 | text_end = delimiter.start
160 | if text_end != text_start:
161 | address_token = address[text_start - start : text_end - start]
162 | tokens.append(address_token)
163 | text_start = delimiter.end
164 | if text_start < start + len(address):
165 | address_token = address[text_start - start :]
166 | tokens.append(address_token)
167 |
168 | self.address_tokens = tokens
169 |
170 | def update_address_tokens(self, index, **kwargs):
171 | rest = []
172 |
173 | for _, value in kwargs.items():
174 | rest.append(value)
175 |
176 | self.address_tokens.pop(index)
177 | for i in range(0, len(rest)):
178 | self.address_tokens.insert(index + i, rest[i])
179 |
180 |
181 | class Dash:
182 | def apply(self, normalized_address):
183 | for i in range(0, len(normalized_address.address_tokens)):
184 | word = normalized_address.address_tokens[i]
185 | index = word.rfind(DASH_STRING)
186 | if index > 0 and index < len(word) - 1:
187 | first_part = word[0:index]
188 | second_part = word[index + 1 :]
189 | if not second_part.isnumeric() and first_part.isnumeric():
190 | normalized_address.update_address_tokens(
191 | i, first_part=first_part, second_part=second_part
192 | )
193 |
194 |
195 | class Pound:
196 | def apply(self, normalized_address):
197 | for i in range(0, len(normalized_address.address_tokens)):
198 | word = normalized_address.address_tokens[i]
199 | regexp = re.compile(POUND_REGEX)
200 | match_result = 0
201 | for match_result in finditer(regexp, word):
202 | first_part = match_result.group(1)
203 | second_part = match_result.group(2)
204 |
205 | if first_part == "" and second_part == "":
206 | continue
207 |
208 | if first_part == "":
209 | normalized_address.update_address_tokens(
210 | i,
211 | pound_string=POUND_STRING,
212 | second_part=second_part,
213 | )
214 | i += 1
215 | elif second_part == "":
216 | normalized_address.update_address_tokens(
217 | i, first_part=first_part, pound_string=POUND_STRING
218 | )
219 | i += 1
220 | else:
221 | normalized_address.update_address_tokens(
222 | i,
223 | first_part=first_part,
224 | pound_string=POUND_STRING,
225 | second_part=second_part,
226 | )
227 | i += 2
228 |
229 |
230 | pre_proccess_rules = [Dash(), Pound()]
231 |
232 |
233 | class AddressNormalizer:
234 | def __init__(self, country_code):
235 | self.street_word_maps = []
236 | self.street_word_maps.extend(NumberIndicators)
237 | self.street_word_maps.extend(DirectionalWords)
238 | self.normalized_address = None
239 |
240 | if country_code == "US":
241 | self.street_word_maps.extend(USStreetSuffixes)
242 | self.street_word_maps.extend(USSubBuildingDesignator)
243 |
244 | elif country_code == "CA":
245 | self.street_word_maps.extend(DefaultStreetSuffixes)
246 |
247 | elif country_code == "GB":
248 | self.street_word_maps.extend(UKOrganizationSuffixes)
249 | self.street_word_maps.extend(UKStreetSuffixes)
250 | self.street_word_maps.extend(UKSubBuildingDesignator)
251 |
252 | elif country_code == "FR":
253 | self.street_word_maps.extend(FRStreetDesignator)
254 | self.street_word_maps.extend(DefaultStreetSuffixes)
255 |
256 | elif country_code == "DE":
257 | self.street_word_maps.extend(DefaultStreetSuffixes)
258 |
259 | elif country_code == "ES":
260 | self.street_word_maps.extend(DefaultStreetSuffixes)
261 | self.street_word_maps.extend(ESStreetPrefixes)
262 |
263 | elif country_code == "IT":
264 | self.street_word_maps.extend(DefaultStreetSuffixes)
265 | self.street_word_maps.extend(ITStreetPrefixes)
266 |
267 | elif country_code == "JP":
268 | self.street_word_maps.extend(DefaultStreetSuffixes)
269 |
270 | elif country_code == "IN":
271 | self.street_word_maps.extend(DefaultStreetSuffixes)
272 |
273 | else:
274 | raise ValueError("The country code provided is not yet supported")
275 |
276 | self.pre_proccess_rules = pre_proccess_rules
277 |
278 | def normalize(self, record):
279 | record = record.strip().upper()
280 |
281 | normalized_address = NormalizedAddress(record)
282 | normalized_address.generate_tokens()
283 |
284 | a = self.pre_proccess_rules
285 | for i in range(0, len(a)):
286 | rule = a[i]
287 | rule.apply(normalized_address)
288 |
289 | for i in range(0, len(normalized_address.address_tokens)):
290 | word = normalized_address.address_tokens[i]
291 | for j in range(0, len(self.street_word_maps)):
292 | if word in self.street_word_maps[j]:
293 | normalized_address.update_address_tokens(
294 | i, first_part=self.street_word_maps[j].get(word)
295 | )
296 |
297 | self.normalized_record = "".join(
298 | normalized_address.address_tokens
299 | ).lower()
300 |
301 | return self
302 |
--------------------------------------------------------------------------------
/source/glue/library/city_normalizer.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | import re
4 |
5 |
6 | class CityNormalizer:
7 | def normalize(self, record):
8 | self.normalized_record = record.lower()
9 | self.normalized_record = re.sub(
10 | r"[^a-zA-Z0-9]+", "", self.normalized_record
11 | )
12 |
13 | return self
14 |
--------------------------------------------------------------------------------
/source/glue/library/default_normalizer.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | import re
4 |
5 |
6 | class DefaultNormalizer:
7 | def normalize(self, record):
8 | self.normalized_record = record.lower()
9 |
10 | # convert characters ß, ä, ö, ü, ø, æ
11 | self.normalized_record = self.normalized_record.replace("ß", "ss")
12 | self.normalized_record = self.normalized_record.replace("ä", "ae")
13 | self.normalized_record = self.normalized_record.replace("ö", "oe")
14 | self.normalized_record = self.normalized_record.replace("ü", "ue")
15 | self.normalized_record = self.normalized_record.replace("ø", "o")
16 | self.normalized_record = self.normalized_record.replace("æ", "ae")
17 |
18 | # remove all symbols and whitespace
19 | self.normalized_record = re.sub(
20 | r"[^a-z0-9]", "", self.normalized_record
21 | )
22 |
23 | return self
24 |
--------------------------------------------------------------------------------
/source/glue/library/email_normalizer.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | import re
4 |
5 |
6 | def is_valid_email(email):
7 | try:
8 | wrd_str = r"\w"
9 | re_str = f"([{wrd_str}._-]+@[{wrd_str}._-]+)"
10 | return bool(email and re.match(re_str, email))
11 | except Exception:
12 | return False
13 |
14 |
15 | class EmailNormalizer:
16 | def normalize(self, record):
17 | self.normalized_record = record.lower()
18 | self.normalized_record = re.sub(
19 | r"[^\w.@-]+", "", self.normalized_record
20 | )
21 |
22 | if not is_valid_email(self.normalized_record):
23 | self.normalized_record = ""
24 |
25 | return self
26 |
--------------------------------------------------------------------------------
/source/glue/library/phone_normalizer.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | import phonenumbers
4 |
5 |
6 | class PhoneNormalizer:
7 | def normalize(self, record):
8 | # Amazon Ads normalization spec requires that phone numbers begin with a country code.
9 | # Prepend a '+' to the number unless it already begins with '+'
10 | # because the phonenumbers library requires that leading plus sign
11 | # in order to correctly parse the country code.
12 | if record.startswith('+') is False:
13 | record = "+" + record
14 |
15 | try:
16 | parsed_number = phonenumbers.parse(record, None)
17 | except phonenumbers.phonenumberutil.NumberParseException:
18 | self.normalized_record = ""
19 | else:
20 | is_possible = phonenumbers.is_possible_number(parsed_number)
21 | if is_possible:
22 | # Amazon Ads spec expects the phone number in E.164 format
23 | # but without a leading plus sign.
24 | self.normalized_record = phonenumbers.format_number(
25 | parsed_number, phonenumbers.PhoneNumberFormat.E164
26 | ).replace("+", "")
27 | else:
28 | self.normalized_record = ""
29 | return self
30 |
--------------------------------------------------------------------------------
/source/glue/library/state_normalizer.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | import re
4 |
5 | USStateAbbreviation = {
6 | "ALABAMA": "AL",
7 | "ALASKA": "AK",
8 | "ARIZONA": "AZ",
9 | "ARKANSAS": "AR",
10 | "CALIFORNIA": "CA",
11 | "COLORADO": "CO",
12 | "CONNECTICUT": "CT",
13 | "DELAWARE": "DE",
14 | "FLORIDA": "FL",
15 | "GEORGIA": "GA",
16 | "HAWAII": "HI",
17 | "IDAHO": "ID",
18 | "ILLINOIS": "IL",
19 | "INDIANA": "IN",
20 | "IOWA": "IA",
21 | "KANSAS": "KS",
22 | "KENTUCKY": "KY",
23 | "LOUISIANA": "LA",
24 | "MAINE": "ME",
25 | "MARYLAND": "MD",
26 | "MASSACHUSETTS": "MA",
27 | "MICHIGAN": "MI",
28 | "MINNESOTA": "MN",
29 | "MISSISSIPPI": "MS",
30 | "MISSOURI": "MO",
31 | "MONTANA": "MT",
32 | "NEBRASKA": "NE",
33 | "NEVADA": "NV",
34 | "NEWHAMPSHIRE": "NH",
35 | "NEWJERSEY": "NJ",
36 | "NEWMEXICO": "NM",
37 | "NEWYORK": "NY",
38 | "NORTHCAROLINA": "NC",
39 | "NORTHDAKOTA": "ND",
40 | "OHIO": "OH",
41 | "OKLAHOMA": "OK",
42 | "OREGON": "OR",
43 | "PENNSYLVANIA": "PA",
44 | "RHODEISLAND": "RI",
45 | "SOUTHCAROLINA": "SC",
46 | "SOUTHDAKOTA": "SD",
47 | "TENNESSEE": "TN",
48 | "TEXAS": "TX",
49 | "UTAH": "UT",
50 | "VERMONT": "VT",
51 | "VIRGINIA": "VA",
52 | "WASHINGTON": "WA",
53 | "WESTVIRGINIA": "WV",
54 | "WISCONSIN": "WI",
55 | "WYOMING": "WY",
56 | }
57 |
58 | FRStateAbbreviation = {
59 | "ALSACE": "AA",
60 | "AQUITAINE": "AQ",
61 | "AUVERGNE": "AU",
62 | "BRITTANY": "BT",
63 | "BURGUNDY": "BG",
64 | "CENTRE": "CN",
65 | "CHAMPAGNEARDENNE": "CG",
66 | "FRANCHECOMTE": "FC",
67 | "ILEDEFRANCE": "IF",
68 | "LANGUEDOCROUSSILLON": "LU",
69 | "LIMOUSIN": "LM",
70 | "LORRAINE": "LE",
71 | "LOWERNORMANDY": "BN",
72 | "MIDIPYRENEES": "MP",
73 | "NORDPASDECALAIS": "NP",
74 | "PAYSDELALOIRE": "PL",
75 | "PICARDY": "PI",
76 | "POITOUCHARENTES": "PT",
77 | "PROVENCEALPESCOTEDAZUR": "PR",
78 | "RHONEALPES": "RA",
79 | "UPPERNORMANDY": "HT",
80 | "CORSICA": "CE",
81 | }
82 |
83 | CAStateAbbreviation = {
84 | "ALBERTA": "AB",
85 | "BRITISHCOLUMBIA": "BC",
86 | "MANITOBA": "MB",
87 | "NEWBRUNSWICK": "NB",
88 | "NEWFOUNDLANDANDLABRADOR": "NL",
89 | "NORTHWESTTERRITORIES": "NT",
90 | "NOVASCOTIA": "NS",
91 | "NUNAVUT": "NU",
92 | "ONTARIO": "ON",
93 | "PRINCEEDWARDISLAND": "PE",
94 | "QUEBEC": "QC",
95 | "SASKATCHEWAN": "SK",
96 | "YUKON": "YT",
97 | }
98 |
99 | DEStateAbbreviation = {
100 | "BADENWUERTTEMBERG": "BW",
101 | "BAVARIA": "BY",
102 | "BERLIN": "BE",
103 | "BRANDENBURG": "BB",
104 | "BREMEN": "HB",
105 | "HAMBURG": "HH",
106 | "HESSE": "HE",
107 | "LOWERSAXONY": "NI",
108 | "MECKLENBURGVORPOMMERN": "MV",
109 | "NORTHRHINEWESTPHALIA": "NW",
110 | "RHINELANDPALATINATE": "RP",
111 | "SAARLAND": "SL",
112 | "SAXONY": "SN",
113 | "SAXONYANHALT": "ST",
114 | "SCHLESWIGHOLSTEIN": "SH",
115 | "THURINGIA": "TH",
116 | }
117 |
118 | ITStateAbbreviation = {
119 | "AGRIGENTO": "AG",
120 | "ALESSANDRIA": "AL",
121 | "ANCONA": "AN",
122 | "AOSTA": "AO",
123 | "AREZZO": "AR",
124 | "ASCOLIPICENO": "AP",
125 | "ASTI": "AT",
126 | "AVELLINO": "AV",
127 | "BARI": "BA",
128 | "BARLETTAANDRIATRANI": "BT",
129 | "BELLUNO": "BL",
130 | "BENEVENTO": "BN",
131 | "BERGAMO": "BG",
132 | "BIELLA": "BI",
133 | "BOLOGNA": "BO",
134 | "SOUTHTYROL": "BZ",
135 | "BRESCIA": "BS",
136 | "BRINDISI": "BR",
137 | "CAGLIARI": "CA",
138 | "CALTANISSETTA": "CL",
139 | "CAMPOBASSO": "CB",
140 | "CARBONIAIGLESIAS": "CI",
141 | "CASERTA": "CE",
142 | "CATANIA": "CT",
143 | "CATANZARO": "CZ",
144 | "CHIETI": "CH",
145 | "COMO": "CO",
146 | "COSENZA": "CS",
147 | "CREMONA": "CR",
148 | "CROTONE": "KR",
149 | "CUNEO": "CN",
150 | "ENNA": "EN",
151 | "FERMO": "FM",
152 | "FERRARA": "FE",
153 | "FLORENCE": "FI",
154 | "FOGGIA": "FG",
155 | "FORLICESENA": "FC",
156 | "FROSINONE": "FR",
157 | "GENOA": "GE",
158 | "GORIZIA": "GO",
159 | "GROSSETO": "GR",
160 | "IMPERIA": "IM",
161 | "ISERNIA": "IS",
162 | "LASPEZIA": "SP",
163 | "LAQUILA": "AQ",
164 | "LATINA": "LT",
165 | "LECCE": "LE",
166 | "LECCO": "LC",
167 | "LIVORNO": "LI",
168 | "LODI": "LO",
169 | "LUCCA": "LU",
170 | "MACERATA": "MC",
171 | "MANTUA": "MN",
172 | "MASSAANDCARRARA": "MS",
173 | "MATERA": "MT",
174 | "MEDIOCAMPIDANO": "VS",
175 | "MESSINA": "ME",
176 | "MILAN": "MI",
177 | "MODENA": "MO",
178 | "MONZAANDBRIANZA": "MB",
179 | "NAPLES": "NA",
180 | "NOVARA": "NO",
181 | "NUORO": "NU",
182 | "OGLIASTRA": "OG",
183 | "OLBIATEMPIO": "OT",
184 | "ORISTANO": "OR",
185 | "PADUA": "PD",
186 | "PALERMO": "PA",
187 | "PARMA": "PR",
188 | "PAVIA": "PV",
189 | "PERUGIA": "PG",
190 | "PESAROANDURBINO": "PU",
191 | "PESCARA": "PE",
192 | "PIACENZA": "PC",
193 | "PISA": "PI",
194 | "PISTOIA": "PT",
195 | "PORDENONE": "PN",
196 | "POTENZA": "PZ",
197 | "PRATO": "PO",
198 | "RAGUSA": "RG",
199 | "RAVENNA": "RA",
200 | "REGGIOCALABRIA": "RC",
201 | "REGGIOEMILIA": "RE",
202 | "RIETI": "RI",
203 | "RIMINI": "RN",
204 | "ROME": "RM",
205 | "ROVIGO": "RO",
206 | "SALERNO": "SA",
207 | "SASSARI": "SS",
208 | "SAVONA": "SV",
209 | "SIENA": "SI",
210 | "SONDRIO": "SO",
211 | "SYRACUSE": "SR",
212 | "TARANTO": "TA",
213 | "TERAMO": "TE",
214 | "TERNI": "TR",
215 | "TRAPANI": "TP",
216 | "TRENTINO": "TN",
217 | "TREVISO": "TV",
218 | "TRIESTE": "TS",
219 | "TURIN": "TO",
220 | "UDINE": "UD",
221 | "VARESE": "VA",
222 | "VENICE": "VE",
223 | "VERBANOCUSIOOSSOLA": "VB",
224 | "VERCELLI": "VC",
225 | "VERONA": "VR",
226 | "VIBOVALENTIA": "VV",
227 | "VICENZA": "VI",
228 | "VITERBO": "VT",
229 | }
230 |
231 | ESStateAbbreviation = {
232 | "ALICANTE": "A",
233 | "ALACANT": "A",
234 | "ALBACETE": "AB",
235 | "ALMERIA": "AL",
236 | "AVILA": "AV",
237 | "BARCELONA": "B",
238 | "BADAJOZ": "BA",
239 | "VIZCAYA": "BI",
240 | "BIZKAIA": "BI",
241 | "BURGOS": "BU",
242 | "LACORUNA": "C",
243 | "ACORUNA": "C",
244 | "CADIZ": "CA",
245 | "CACERES": "CC",
246 | "CEUTA": "CE",
247 | "CORDOBA": "CO",
248 | "CIUDADREAL": "CR",
249 | "CASTELLON": "CS",
250 | "CASTELLO": "CS",
251 | "CUENCA": "CU",
252 | "LASPALMAS": "GC",
253 | "GIRONA": "GI",
254 | "GERONA": "GI",
255 | "GRANADA": "GR",
256 | "GUADALAJARA": "GU",
257 | "HUELVA": "H",
258 | "HUESCA": "HU",
259 | "JAEN": "J",
260 | "LERIDA": "L",
261 | "LLEIDA": "L",
262 | "LEON": "LE",
263 | "LARIOJA": "LO",
264 | "LUGO": "LU",
265 | "MADRID": "M",
266 | "MALAGA": "MA",
267 | "MELILLA": "ML",
268 | "MURCIA": "MU",
269 | "NAVARRA": "NA",
270 | "NAFARROA": "NA",
271 | "ASTURIAS": "O",
272 | "ORENSE": "OR",
273 | "OURENSE": "OR",
274 | "PALENCIA": "P",
275 | "BALEARES": "PM",
276 | "BALEARS": "PM",
277 | "PONTEVEDRA": "PO",
278 | "CANTABRIA": "S",
279 | "SALAMANCA": "SA",
280 | "SEVILLA": "SE",
281 | "SEGOVIA": "SG",
282 | "SORIA": "SO",
283 | "GUIPUZCOA": "SS",
284 | "GIPUZKOA": "SS",
285 | "TARRAGONA": "T",
286 | "TERUEL": "TE",
287 | "SANTACRUZDETENERIFE": "TF",
288 | "TOLEDO": "TO",
289 | "VALENCIA": "V",
290 | "VALLADOLID": "VA",
291 | "ALAVA": "VI",
292 | "ARABA": "VI",
293 | "ZARAGOZA": "Z",
294 | "ZAMORA": "ZA",
295 | }
296 |
297 |
298 | class StateNormalizer:
299 | def __init__(self, country_code):
300 | if country_code == "US":
301 | self.state_abbreviation_map = USStateAbbreviation
302 | elif country_code == "FR":
303 | self.state_abbreviation_map = FRStateAbbreviation
304 | elif country_code == "CA":
305 | self.state_abbreviation_map = CAStateAbbreviation
306 | elif country_code == "DE":
307 | self.state_abbreviation_map = DEStateAbbreviation
308 | elif country_code == "ES":
309 | self.state_abbreviation_map = ESStateAbbreviation
310 | elif country_code == "IT":
311 | self.state_abbreviation_map = ITStateAbbreviation
312 | # Countries without specific state abbreviations
313 | else:
314 | self.state_abbreviation_map = {}
315 |
316 | def normalize(self, record):
317 | self.normalized_record = record.upper()
318 | self.normalized_record = re.sub(r"[^A-Z]", "", self.normalized_record)
319 |
320 | if self.normalized_record in self.state_abbreviation_map:
321 | self.normalized_record = self.state_abbreviation_map.get(
322 | self.normalized_record
323 | )
324 | elif len(record) > 2:
325 | self.normalized_record = self.normalized_record[:2]
326 |
327 | self.normalized_record = self.normalized_record.lower()
328 |
329 | return self
330 |
--------------------------------------------------------------------------------
/source/glue/library/transform.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | import hashlib
4 |
5 | import pandas as pd
6 | import regex as re
7 | from library.address_normalizer import AddressNormalizer
8 | from library.city_normalizer import CityNormalizer
9 | from library.default_normalizer import DefaultNormalizer
10 | from library.email_normalizer import EmailNormalizer
11 | from library.phone_normalizer import PhoneNormalizer
12 | from library.state_normalizer import StateNormalizer
13 | from library.zip_normalizer import ZipNormalizer
14 |
15 | ###############################
16 | # HELPER FUNCTIONS
17 | ###############################
18 |
19 |
20 | # Use this function to flag records that are null or already hashed
21 | # These records will skip normalization/hashing
22 | def skip_record_flag(text):
23 | # This regex expression matches a sha256 hash value.
24 | # Sha256 hash codes are 64 consecutive hexadecimal digits, a-f and 0-9.
25 | sha256_pattern = "^[a-f0-9]{64}$"
26 | if pd.isnull(text) or re.match(sha256_pattern, text):
27 | return True
28 |
29 |
30 | class NormalizationPatterns:
31 | def __init__(self, field, country_code):
32 | field_map = {
33 | "ADDRESS": AddressNormalizer(country_code),
34 | "STATE": StateNormalizer(country_code),
35 | "ZIP": ZipNormalizer(country_code),
36 | "PHONE": PhoneNormalizer(),
37 | "EMAIL": EmailNormalizer(),
38 | "CITY": CityNormalizer(),
39 | }
40 | self.normalizer = field_map.get(field, DefaultNormalizer())
41 |
42 | def text_transformations(self, text):
43 | text = self.normalizer.normalize(text).normalized_record
44 | return text
45 |
46 |
47 | ###############################
48 | # DATA NORMALIZATION
49 | ###############################
50 |
51 |
52 | def transform_data(
53 | data: pd.DataFrame, pii_fields: dict, country_code: str
54 | ) -> pd.DataFrame:
55 | for field in pii_fields:
56 | column_name = field["column_name"]
57 | pii_type = field["pii_type"]
58 | field_normalizer = NormalizationPatterns(
59 | field=pii_type, country_code=country_code
60 | )
61 | data[column_name] = (
62 | data[column_name]
63 | .copy()
64 | .apply(
65 | lambda x, field_normalizer=field_normalizer: x
66 | if skip_record_flag(x)
67 | else field_normalizer.text_transformations(text=x)
68 | )
69 | )
70 | return data
71 |
72 |
73 | ###############################
74 | # PII HASHING
75 | ###############################
76 |
77 |
78 | def hash_data(data: pd.DataFrame, pii_fields: dict) -> pd.DataFrame:
79 | for field in pii_fields:
80 | column_name = field["column_name"]
81 | data[column_name] = (
82 | data[column_name]
83 | .copy()
84 | .apply(
85 | lambda x: x
86 | if skip_record_flag(x)
87 | else hashlib.sha256(x.encode()).hexdigest()
88 | )
89 | )
90 | return data
91 |
--------------------------------------------------------------------------------
/source/glue/library/zip_normalizer.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | import re
4 |
5 |
6 | class ZipNormalizer:
7 | def __init__(self, country_code):
8 | # CA ZIP, A1A1A1
9 | if country_code == "CA":
10 | self.normalize_regex = r"[^0-9A-Za-z]"
11 | self.zip_length = 6
12 | self.regex = re.compile(r"^([A-Za-z]\d){3}$")
13 | # UK Zip, A11AA A111AA AA11AA AA111AA | A1A1AA AA1A1AA
14 | elif country_code == "GB":
15 | self.normalize_regex = r"[^0-9A-Za-z]"
16 | self.zip_length = 7
17 | self.regex = re.compile(
18 | r"^(([A-Za-z]{1,2}\d{2,3})|([A-Za-z]{1,2}\d[A-Za-z]\d))[A-Za-z]{2}$"
19 | )
20 | # IN ZIP, 6 digits
21 | elif country_code == "IN":
22 | self.normalize_regex = r"[^\d]"
23 | self.zip_length = 6
24 | self.regex = re.compile(r"\d{6}")
25 | # JP ZIP, 7 digits
26 | elif country_code == "JP":
27 | self.normalize_regex = r"[^0-9]"
28 | self.zip_length = 7
29 | self.regex = re.compile(r"\d{7}")
30 | # ZIP, 5 digits
31 | else:
32 | self.normalize_regex = r"[^0-9]"
33 | self.zip_length = 5
34 | self.regex = re.compile(r"\d{5}")
35 |
36 | def normalize(self, record):
37 | self.normalized_record = re.sub(self.normalize_regex, "", record)
38 |
39 | if len(self.normalized_record) > self.zip_length:
40 | self.normalized_record = self.normalized_record[: self.zip_length]
41 |
42 | if not re.match(self.regex, self.normalized_record):
43 | self.normalized_record = ""
44 |
45 | return self
46 |
--------------------------------------------------------------------------------
/source/helper/cf_helper.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import json
5 | import logging
6 | from urllib.request import HTTPHandler, Request, build_opener
7 |
8 |
9 | def send_response(event, context, response_status, response_data) -> None:
10 | """
11 | Send a resource manipulation status response to CloudFormation
12 | """
13 | logger = logging.getLogger()
14 | logger.setLevel(logging.INFO)
15 | response_body = json.dumps(
16 | {
17 | "Status": response_status,
18 | "Reason": f"See the details in CloudWatch Log Stream: {context.log_stream_name}",
19 | "PhysicalResourceId": context.log_stream_name,
20 | "StackId": event["StackId"],
21 | "RequestId": event["RequestId"],
22 | "LogicalResourceId": event["LogicalResourceId"],
23 | "Data": response_data,
24 | }
25 | )
26 |
27 | logger.info(f"ResponseURL: {event['ResponseURL']}")
28 | logger.info(f"ResponseBody: {response_body}")
29 | opener = build_opener(HTTPHandler)
30 | request = Request(event["ResponseURL"], data=response_body.encode("utf-8"))
31 | request.add_header("Content-Type", "")
32 | request.add_header("Content-Length", str(len(response_body)))
33 | # Ensure that the HTTP method can be determined at runtime, just before the request is executed
34 | # by setting get_method to a callable (like a lambda function).
35 | request.get_method = lambda: "PUT"
36 | response = opener.open(request)
37 | logger.info(f"Status code: {response.getcode}")
38 | logger.info(f"Status message: {response.msg}")
39 |
--------------------------------------------------------------------------------
/source/helper/config_helper.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | """
5 | This custom resource creates and writes the configuration file for the web application after
6 | most other resources have been created.
7 |
8 | For 'Create' and 'Update' events, this function combines configuration parameters (including
9 | AWS Cognito and S3 resource details) into a JSON structure and uploads it to an S3 bucket.
10 | For 'Delete' events, it removes the file.
11 |
12 | Parameters:
13 | - event (Dict): Contains CloudFormation event data, including request type and resource properties.
14 | - context (Any): Provides AWS Lambda runtime information.
15 |
16 | The event data must include specific properties such as API_ENDPOINT, AWS_REGION, USER_POOL_ID,
17 | and others relevant to the AWS resources in use. This function assumes IAM permissions to write
18 | to the specified S3 bucket and prefix are in place.
19 | """
20 |
21 | import json
22 | import logging
23 | from typing import Any, Dict
24 |
25 | import boto3
26 | from cf_helper import send_response
27 |
28 | WEB_RUNTIME_CONFIG = "runtimeConfig.json"
29 |
30 |
31 | def handler(event: Dict[str, Any], context: Any) -> None:
32 | """Handles CloudFormation custom resource requests."""
33 | logger = logging.getLogger()
34 | logger.setLevel(logging.INFO)
35 |
36 | try:
37 | request_type = event["RequestType"]
38 | properties = event["ResourceProperties"]
39 | response_data = {"Message": ""}
40 | logger.info(
41 | f"Resource properties: {json.dumps(properties, default=str)}, request_type: {request_type}"
42 | )
43 | s3 = boto3.client("s3")
44 | if request_type in ["Create", "Update"]:
45 | data = {
46 | k: properties[k]
47 | for k in [
48 | "API_ENDPOINT",
49 | "AWS_REGION",
50 | "USER_POOL_ID",
51 | "USER_POOL_CLIENT_ID",
52 | "IDENTITY_POOL_ID",
53 | "DATA_BUCKET_NAME",
54 | "HOSTED_UI_DOMAIN",
55 | "COGNITO_CALLBACK_URL",
56 | "COGNITO_LOGOUT_URL",
57 | "ARTIFACT_BUCKET_NAME",
58 | "ENCRYPTION_MODE"
59 | ]
60 | }
61 | s3.put_object(
62 | Bucket=properties["WEBSITE_BUCKET"],
63 | Key=WEB_RUNTIME_CONFIG,
64 | Body=json.dumps(data, indent=4),
65 | )
66 | response_data = {"Message": f"Put {WEB_RUNTIME_CONFIG}"}
67 |
68 | # Send success response back to CloudFormation for Create/Update and Delete
69 | send_response(event, context, "SUCCESS", response_data)
70 |
71 | except Exception as handler_exception:
72 | logger.exception(handler_exception)
73 | send_response(event, context, "FAILED", {"Message": str(handler_exception)})
74 |
--------------------------------------------------------------------------------
/source/helper/website_helper.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | #
4 |
5 | """
6 | PURPOSE:
7 | * Copy files for the website from a source build bucket to a deployment bucket.
8 | * This function starts as a CloudFormation custom resource in deployment/web.yaml.
9 | """
10 |
11 |
12 | import json
13 | import logging
14 |
15 | import boto3
16 | from cf_helper import send_response
17 |
18 | logger = logging.getLogger()
19 | logger.setLevel(logging.INFO)
20 |
21 | s3 = boto3.resource("s3")
22 | s3_client = boto3.client("s3")
23 |
24 |
25 | def copy_source(event):
26 | """
27 | Copies source files for a web application from a source S3 bucket to a deployment S3 bucket.
28 |
29 | Parameters:
30 | - event (dict): The event triggering this function, expected to contain `ResourceProperties` with keys
31 | `WebsiteCodeBucket` (source S3 bucket), `WebsiteCodePrefix` (prefix in source bucket), and
32 | `DeploymentBucket` (deployment S3 bucket, domain name format).
33 | - context: The context in which the function is called, not used in this function.
34 |
35 | Notes:
36 | - The deployment bucket name is derived by stripping the domain part from the `DeploymentBucket` value.
37 | - It assumes that the `webapp-manifest.json` file is present in the root directory where the function is executed
38 | and contains a JSON object with keys representing the file paths to be copied.
39 | """
40 |
41 | source_bucket = event["ResourceProperties"]["WebsiteCodeBucket"]
42 | source_key = event["ResourceProperties"]["WebsiteCodePrefix"]
43 | # Assuming 'DeploymentBucket' always has a domain part to strip off.
44 | website_bucket = event["ResourceProperties"]["DeploymentBucket"].split(
45 | "."
46 | )[0]
47 |
48 | with open("./webapp-manifest.json", encoding="utf-8") as file:
49 | manifest = json.load(file)
50 | logger.info("UPLOADING FILES:")
51 | for key in manifest:
52 | logger.info(f"s3://{source_bucket}/{source_key}/{key}")
53 | s3.meta.client.copy(
54 | {"Bucket": source_bucket, "Key": f"{source_key}/{key}"},
55 | website_bucket,
56 | key,
57 | )
58 |
59 |
60 | def lambda_handler(event, context):
61 | """
62 | Processes events from AWS Lambda, routing them based on request type.
63 |
64 | Parameters:
65 | - event (dict): The event dictionary received from AWS Lambda, containing details
66 | about the specific request, such as the request type and resource properties.
67 | - context: The context object provided by AWS Lambda, containing metadata about
68 | the invocation, function, and execution environment.
69 |
70 | The function logs both the received event and context for debugging purposes and
71 | ensures a response is sent back to the CloudFormation service, indicating the
72 | outcome of the operation.
73 | """
74 | try:
75 | # Log the received event and context for debugging.
76 | logger.info(f"REQUEST RECEIVED:\n {event}")
77 | logger.info(f"CONTEXT RECEIVED:\n {context}")
78 |
79 | # Determine the request type from the event and call the appropriate function.
80 | request_type = event["RequestType"]
81 | if request_type in ["Create", "Update"]:
82 | copy_source(event)
83 |
84 | # On successful execution, send a success response back.
85 | send_response(
86 | event, context, "SUCCESS", {"Message": "Operation successful"}
87 | )
88 | except Exception as handler_exception:
89 | # Log the exception and send a failure response back in case of errors.
90 | logger.exception(handler_exception)
91 | send_response(
92 | event,
93 | context,
94 | "FAILED",
95 | {"Message": f"Exception during processing: {handler_exception}"},
96 | )
97 |
--------------------------------------------------------------------------------
/source/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | minversion = 6.0
3 | addopts = -ra -q
4 | testpaths =
5 | tests
6 |
--------------------------------------------------------------------------------
/source/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | -e amc_uploader
2 | aws-xray-sdk==2.13.0
3 | awswrangler==3.9.1
4 | boto3==1.34.101
5 | botocore==1.34.101
6 | moto[s3,cognitoidp]==5.0.6
7 | pandas==2.2.2
8 | phonenumbers==8.13.36
9 | pytest==8.2.0
10 | pytest-cov==5.0.0
11 | regex==2024.4.28
12 | requests>=2.32.4
13 | requests_aws4auth==1.2.3
14 | selenium==4.10.0
15 | webdriver-manager==4.0.1
16 |
--------------------------------------------------------------------------------
/source/share/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/share/__init__.py
--------------------------------------------------------------------------------
/source/tests/README.md:
--------------------------------------------------------------------------------
1 | ## Instructions for Running Unit Tests
2 | ---
3 |
4 | The following steps can be done to run the unit tests contained in the `tests` directory:
5 |
6 | 1. Open a terminal window and navigate to the project `/deployment` directory.
7 | 2. Run this command in the terminal:
8 | ```shell
9 | $ sh run-unit-tests.sh
10 | ```
11 | 3. A new virtual environment should now be created with the script with test environment variables. The tests will also execute.
12 | 4. A coverage report will be generated for SonarQube and can be viewed in the `tests/coverage-reports` directory.
13 |
14 |
15 | #### From the project test directory
16 |
17 | Unit Test
18 | ```shell
19 | $ ./run_test.sh --run_unit_test
20 | ----
21 | $ ./run_test.sh -h
22 | -rut, --run_unit_test Run Unit Test.
23 | [--test_file-name TEST_FILE_NAME] (e.g `test_api.py` or `test_api.py::test_get_etl_jobs` for a single test.)
24 | [--aws-region AWS_REGION] (optional, Default is us-east-1.))
25 | ```
26 |
27 | Integration Test
28 | ```shell
29 | $ ./run_test.sh --run_integ_test
30 | -------
31 | $ ./run_test.sh -h
32 | -rit, --run_integ_test Run Integ Test.
33 | [--stack-name STACK_NAME] (An existing deployed stack with code changes/version to run integration test on.)
34 | [--aws-region AWS_REGION]
35 | [--aws-default-profile AWS_DEFAULT_PROFILE] (AWS default profiles with creds) (Required if --aws-access-key-id and --aws-secret-access-key is not provided)
36 | [--aws-access-key-id AWS_ACCESS_KEY_ID] [--aws-secret-access-key AWS_SECRET_ACCESS_KEY] (Required if --aws-default-profile is not provided)
37 | [--data-bucket-name DATA_BUCKET_NAME] (Optional if --test-params-secret-name is provided )
38 | [--amc-instance-id AMC_INSTANCE_ID] (Optional if --test-params-secret-name is provided )
39 | [--amc-advertiser-id AMC_ADVERTISER_ID] (Optional if --test-params-secret-name is provided )
40 | [--amc-marketplace-id AMC_MARKETPLACE_ID] (Optional if --test-params-secret-name is provided )
41 | [--auth-code AUTH_CODE] (Amazon API auth code) (Optional if --refresh-token is provided) (Optional, but not eligible with --test-params-secret-name as refresh token is required )
42 | [--client-id CLIENT_ID] (Optional if --test-params-secret-name is provided )
43 | [--client-secret CLIENT_SECRET] (Optional if --test-params-secret-name is provided )
44 | [--refresh-token REFRESH_TOKEN] (Amazon API Refresh token) (Required, if --auth-code is not provided.) (Optional if --test-params-secret-name is provided )
45 | [--test-data-upload-account-id TEST_DATA_UPLOAD_ACCOUNT_ID] (Optional if --test-params-secret-name is provided )
46 | [--test-user-arn TEST_USER_ARN] (Optional, if not provided '/root' user will be used, with stack account id) (It also assumes user has admin priviledges.)
47 | [--aws-xray-sdk-enabled] (Optional, Default is false)
48 | [--boto-config] (Optional, Default is '{"region_name": "AWS_REGION"}')
49 | [--version] (Optional, Default is 0.0.0)
50 | [--solution-name] (Optional, Default is Amcufa Integration Test)
51 | [--test-params-secret-name] (Optional, Run integ test with variables stored in stack account aws secret manager.)
52 | ## secret-id amcufa_integ_test_secret
53 | ## secret-value sample, all variables are required.
54 | {
55 | "instance_id": "abcd",
56 | "advertiser_id": "ABCD12345",
57 | "marketplace_id": "ABCD",
58 | "data_upload_account_id": "1234567889",
59 | "client_id": "amzn1.XXXXXXXXXX",
60 | "client_secret": "amzn1.XXXXXXX",
61 | "refresh_token": "Atzr|XXXXXXXXX",
62 | "data_bucket_name": "s3-source-bucket",
63 | "amc_endpoint_url": "https://some-api-endpoint.us-east-1.amazonawa.com/beta",
64 | }
65 | [--test-params-secret-name-region] (Optional, Default to us-east-1.)
66 | [--deep-test] (Optional, Default to false.) (100% test coverage, but set to false for tests optimization to prevent timeouts.)
67 | ```
68 |
--------------------------------------------------------------------------------
/source/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/tests/__init__.py
--------------------------------------------------------------------------------
/source/tests/e2e/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/tests/e2e/__init__.py
--------------------------------------------------------------------------------
/source/tests/e2e/conftest.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import logging
5 | import os
6 |
7 | import boto3
8 | import pytest
9 |
10 | # Fixture for retrieving env variables
11 |
12 |
13 | @pytest.fixture(scope="session")
14 | def test_environment():
15 | print("Setting variables for tests")
16 | try:
17 | test_env_vars = {
18 | "REGION": os.environ["REGION"],
19 | "STACK_NAME": os.environ["STACK_NAME"],
20 | "ACCESS_KEY": os.environ["AWS_ACCESS_KEY_ID"],
21 | "SECRET_KEY": os.environ["AWS_SECRET_ACCESS_KEY"],
22 | "EMAIL": os.environ["EMAIL"],
23 | "PASSWORD": os.environ["PASSWORD"],
24 | "DATA_BUCKET_NAME": os.environ["DATA_BUCKET_NAME"],
25 | "LOCALHOST_URL": os.environ.get("LOCALHOST_URL"),
26 | }
27 |
28 | except KeyError as e:
29 | logging.error(
30 | "ERROR: Missing a required environment variable for testing: {variable}".format(
31 | variable=e
32 | )
33 | )
34 | raise Exception(e)
35 | else:
36 | return test_env_vars
37 |
38 |
39 | # Fixture for stack resources
40 | @pytest.fixture(scope="session")
41 | def stack_resources(test_environment):
42 | print("Getting stack outputs")
43 | resources = {}
44 | client = boto3.client(
45 | "cloudformation", region_name=test_environment["REGION"]
46 | )
47 | response = client.describe_stacks(StackName=test_environment["STACK_NAME"])
48 | outputs = response["Stacks"][0]["Outputs"]
49 | for output in outputs:
50 | resources[output["OutputKey"]] = output["OutputValue"]
51 | return resources
52 |
--------------------------------------------------------------------------------
/source/tests/e2e/requirements.txt:
--------------------------------------------------------------------------------
1 | -r ../../requirements-dev.txt
2 | pandas==2.0.2
3 | phonenumbers==8.13.14
4 | regex==2023.6.3
5 | requests_aws4auth==1.2.3
6 | selenium==4.20.0
7 | webdriver-manager==3.8.6
8 |
--------------------------------------------------------------------------------
/source/tests/e2e/run_e2e.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ###############################################################################
3 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
4 | # SPDX-License-Identifier: Apache-2.0
5 | #
6 | # PURPOSE:
7 | # Run selenium tests.
8 | #
9 | # PRELIMINARY:
10 | # Deploy the solution before running this script and set the following environment variables:
11 | # - AWS_ACCESS_KEY_ID - To authorize AWS CLI commands
12 | # - AWS_SECRET_ACCESS_KEY - To authorize AWS CLI commands
13 | # - EMAIL - To log into the webui
14 | # - PASSWORD - To log into the webui
15 | # - DATA_BUCKET_NAME - From which to upload data files for testing
16 | #
17 | # USAGE:
18 | # ./run_e2e.sh [-h] [-v] --stack-name {STACK_NAME} --region {REGION} [--profile {PROFILE}]
19 | # STACK_NAME name of the Cloudformation stack where the solution is running.
20 | # REGION needs to be in a format like us-east-1
21 | # PROFILE is optional. It's the profile that you have setup in ~/.aws/credentials
22 | # that you want to use for AWS CLI commands.
23 | #
24 | # The following options are available:
25 | #
26 | # -h | --help Print usage
27 | # -v | --verbose Print script debug info
28 | #
29 | ###############################################################################
30 |
31 | trap cleanup_and_die SIGINT SIGTERM ERR
32 |
33 | usage() {
34 | msg "$msg"
35 | cat <&2 -e "${1-}"
71 | }
72 |
73 | die() {
74 | local msg=$1
75 | local code=${2-1} # default exit status 1
76 | msg "$msg"
77 | exit "$code"
78 | }
79 |
80 | parse_params() {
81 | # default values of variables set from params
82 | flag=0
83 | param=''
84 |
85 | while :; do
86 | case "${1-}" in
87 | -h | --help) usage ;;
88 | -v | --verbose) set -x ;;
89 | --stack-name)
90 | stack_name="${2}"
91 | shift
92 | ;;
93 | --region)
94 | region="${2}"
95 | shift
96 | ;;
97 | --profile)
98 | profile="${2}"
99 | shift
100 | ;;
101 | -?*) die "Unknown option: $1" ;;
102 | *) break ;;
103 | esac
104 | shift
105 | done
106 |
107 | args=("$@")
108 |
109 | # check required params and arguments
110 | [[ -z "${stack_name}" ]] && usage "Missing required parameter: stack-name"
111 | [[ -z "${region}" ]] && usage "Missing required parameter: region"
112 |
113 | return 0
114 | }
115 |
116 | parse_params "$@"
117 | msg "Parameters:"
118 | msg "- Stack name: ${stack_name}"
119 | msg "- Region: ${region}"
120 | [[ ! -z "${profile}" ]] && msg "- Profile: ${profile}"
121 |
122 | echo ""
123 | sleep 3
124 |
125 | # Make sure aws cli is installed
126 | if [[ ! -x "$(command -v aws)" ]]; then
127 | echo "ERROR: This script requires the AWS CLI to be installed. Please install it then run again."
128 | exit 1
129 | fi
130 |
131 | # Make sure aws cli is authorized
132 | if [ -z $AWS_ACCESS_KEY_ID ]
133 | then
134 | echo "ERROR: You must set the env variable 'AWS_ACCESS_KEY_ID' with a valid IAM access key id."
135 | exit 1
136 | fi
137 |
138 | if [ -z $AWS_SECRET_ACCESS_KEY ]
139 | then
140 | echo "ERROR: You must set the env variable 'AWS_SECRET_ACCESS_KEY' with a valid IAM secret access key."
141 | exit 1
142 | fi
143 |
144 | # Make sure aws cli is authorized
145 | if [ -z $EMAIL ]
146 | then
147 | echo "ERROR: You must set the env variable 'EMAIL' to log into the webui."
148 | exit 1
149 | fi
150 |
151 | if [ -z $PASSWORD ]
152 | then
153 | echo "ERROR: You must set the env variable 'PASSWORD' to log into the webui."
154 | exit 1
155 | fi
156 |
157 | if [ -z $DATA_BUCKET_NAME ]
158 | then
159 | echo "ERROR: You must set the env variable 'DATA_BUCKET_NAME' from which to upload test data files."
160 | exit 1
161 | fi
162 |
163 | # Create and activate a temporary Python environment for this script.
164 | echo "------------------------------------------------------------------------------"
165 | echo "Creating a temporary Python virtualenv for this script"
166 | echo "------------------------------------------------------------------------------"
167 | python3 -c "import os; print (os.getenv('VIRTUAL_ENV'))" | grep -q None
168 | if [ $? -ne 0 ]; then
169 | echo "ERROR: Do not run this script inside Virtualenv. Type \`deactivate\` and run again.";
170 | exit 1;
171 | fi
172 | command -v python3
173 | if [ $? -ne 0 ]; then
174 | echo "ERROR: install Python3 before running this script"
175 | exit 1
176 | fi
177 | echo "Using virtual python environment:"
178 | VENV=$(mktemp -d) && echo "$VENV"
179 | command -v python3 > /dev/null
180 | if [ $? -ne 0 ]; then
181 | echo "ERROR: install Python3 before running this script"
182 | exit 1
183 | fi
184 | python3 -m venv "$VENV"
185 | source "$VENV"/bin/activate
186 | pip3 install wheel
187 | pip3 install --quiet -r requirements.txt
188 |
189 | echo "------------------------------------------------------------------------------"
190 | echo "Running pytest"
191 | echo "------------------------------------------------------------------------------"
192 |
193 | export REGION=$region
194 | export STACK_NAME=$stack_name
195 | export EMAIL=$EMAIL
196 | export PASSWORD=$PASSWORD
197 | export DATA_BUCKET_NAME=$DATA_BUCKET_NAME
198 | pytest -s -W ignore::DeprecationWarning -p no:cacheproviders
199 |
200 | if [ $? -ne 0 ]; then
201 | die 1
202 | fi
203 |
204 | cleanup
205 | echo "Done"
206 | exit 0
207 |
--------------------------------------------------------------------------------
/source/tests/e2e/test_app.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import pytest
5 | from selenium import webdriver
6 | from selenium.webdriver.chrome.options import Options
7 | from selenium.webdriver.common.by import By
8 | from selenium.webdriver.support import expected_conditions as EC
9 | from selenium.webdriver.support.ui import Select, WebDriverWait
10 | from webdriver_manager.chrome import ChromeDriverManager
11 |
12 |
13 | @pytest.fixture
14 | def browser():
15 | chrome_options = Options()
16 | # Make sure the window is large enough in headless mode so that all
17 | # the elements on the page are visible
18 | chrome_options.add_argument("--headless")
19 | chrome_options.add_argument("--window-size=1920,1080")
20 | chrome_options.add_argument("--start-maximized")
21 |
22 | browser = webdriver.Chrome(
23 | ChromeDriverManager().install(), chrome_options=chrome_options
24 | )
25 | return browser
26 |
27 |
28 | def test_everything(browser, test_environment, stack_resources):
29 | browser.implicitly_wait(5)
30 | browser.get(
31 | test_environment.get("LOCALHOST_URL")
32 | or stack_resources["UserInterface"]
33 | )
34 | wait = WebDriverWait(browser, 30)
35 | # Login
36 | username_field = browser.find_element(
37 | "xpath", "/html/body/div/div/div/div/div[2]/div[1]/div/input"
38 | )
39 | username_field.send_keys(test_environment["EMAIL"])
40 | password_field = browser.find_element(
41 | "xpath", "/html/body/div/div/div/div/div[2]/div[2]/input"
42 | )
43 | password_field.send_keys(test_environment["PASSWORD"])
44 | browser.find_element(
45 | "xpath", "/html/body/div/div/div/div/div[3]/span[1]/button"
46 | ).click()
47 | # Validate navbar brand
48 | xpath = "/html/body/div/div/div/div[1]/nav/a"
49 | wait.until(EC.presence_of_element_located((By.XPATH, xpath)))
50 | navbar_brand = browser.find_elements("xpath", xpath)[0].get_attribute(
51 | "innerText"
52 | )
53 | assert navbar_brand == "Amazon Marketing Cloud uploader from AWS"
54 |
55 | # open Step 1
56 | browser.find_element(By.ID, "step1").click()
57 | # validate the value shown for s3 bucket
58 | element_id = "bucket-input"
59 | wait.until(EC.presence_of_element_located((By.ID, element_id)))
60 | bucket_value = browser.find_element(By.ID, element_id).get_attribute(
61 | "placeholder"
62 | )
63 | assert bucket_value == test_environment["DATA_BUCKET_NAME"]
64 | # validate the s3 bucket table
65 | xpath = "/html/body/div/div/div/div[2]/div/div[2]/div[2]/div/table/tbody/tr[1]/td[2]"
66 | wait.until(EC.presence_of_element_located((By.XPATH, xpath)))
67 | xpath = (
68 | "/html/body/div/div/div/div[2]/div/div[2]/div[2]/div/table/tbody/tr"
69 | )
70 | rows = browser.find_elements("xpath", xpath)
71 | assert len(rows) > 0
72 |
73 | # validate key selection when clicking top table row
74 | browser.find_element(
75 | "xpath",
76 | "/html/body/div/div/div/div[2]/div/div[2]/div[2]/div/table/tbody/tr[1]/td[1]",
77 | ).click()
78 |
79 | key_input_field = browser.find_element(
80 | "xpath",
81 | "/html/body/div/div/div/div[2]/div/div[2]/div[1]/div/div[1]/div[2]/div/input",
82 | )
83 | key_input_text = key_input_field.get_attribute("value")
84 |
85 | key_table_field1 = browser.find_element(
86 | "xpath",
87 | "/html/body/div/div/div/div[2]/div/div[2]/div[2]/div/table/tbody/tr[1]/td[2]",
88 | )
89 | key_table_text1 = key_table_field1.text
90 | assert key_input_text == key_table_text1
91 |
92 | # validate multiple key selection, click second row
93 | browser.find_element(
94 | "xpath",
95 | "/html/body/div/div/div/div[2]/div/div[2]/div[2]/div/table/tbody/tr[2]/td[1]",
96 | ).click()
97 |
98 | multiple_key_text = key_input_field.get_attribute("value")
99 |
100 | key_table_field2 = browser.find_element(
101 | "xpath",
102 | "/html/body/div/div/div/div[2]/div/div[2]/div[2]/div/table/tbody/tr[2]/td[2]",
103 | )
104 | key_table_text2 = key_table_field2.text
105 |
106 | keys = key_table_text1 + ", " + key_table_text2
107 | assert multiple_key_text == keys
108 |
109 | # open Step 2
110 | browser.find_element(
111 | "xpath",
112 | "/html/body/div/div/div/div[2]/div/div[2]/div[1]/div/div[2]/button",
113 | ).click()
114 |
115 | # validate if keys stored, s3 key field matches in step 2 from step 1
116 | s3_key_field = browser.find_element(
117 | "xpath",
118 | "/html/body/div/div/div/div[2]/div/div[2]/div[1]/div[2]/div/input",
119 | )
120 | s3_key_text = s3_key_field.get_attribute("value")
121 |
122 | assert s3_key_text == keys
123 |
124 | # validate add to existing dataset button triggers select data dropdown
125 | browser.find_element(
126 | "xpath",
127 | "/html/body/div/div/div/div[2]/div/div[2]/div[1]/div[3]/div[2]/label/span",
128 | ).click()
129 | assert browser.find_element(
130 | "xpath",
131 | "/html/body/div/div/div/div[2]/div/div[2]/div[1]/div[4]/select",
132 | )
133 |
134 | # validate create dataset button triggers name form field
135 | browser.find_element(
136 | "xpath",
137 | "/html/body/div/div/div/div[2]/div/div[2]/div[1]/div[3]/div[1]/label/span",
138 | ).click()
139 | assert browser.find_element(
140 | "xpath",
141 | "/html/body/div/div/div/div[2]/div/div[2]/div[1]/div[4]/div[1]/div/input",
142 | )
143 |
144 | # open Step 2
145 | browser.find_element(By.ID, "step2").click()
146 | # Time period options should not be visible until we click FACT
147 | assert not len(browser.find_elements(By.ID, "time_period_options"))
148 | # select FACT dataset type
149 | browser.find_element(
150 | "xpath",
151 | "/html/body/div/div/div/div[2]/div/div[2]/div[1]/div[4]/div[3]/div[1]/fieldset/div/div/div[1]/label/span",
152 | ).click()
153 | # Time period options should now be visible
154 | # and the first option should be selected by default
155 | assert len(browser.find_elements(By.ID, "time_period_options"))
156 | assert browser.find_element(
157 | By.XPATH, '//*[@id="time_period_options_BV_option_0"]'
158 | ).is_selected()
159 | # Country dropdown should be visible
160 | assert browser.find_element(
161 | By.XPATH, '//*[@id="time_period_options_BV_option_0"]'
162 | ).is_selected()
163 | # select US as country and check that the value updates
164 | cc_dropdown = browser.find_element(By.ID, "country-code-dropdown")
165 | Select(cc_dropdown).select_by_value("US")
166 | assert cc_dropdown.get_attribute("value") == "US"
167 |
168 | # Sign out
169 | browser.find_element(
170 | "xpath", "/html/body/div/div/div/div[1]/nav/div/ul/li/a"
171 | ).click()
172 |
--------------------------------------------------------------------------------
/source/tests/integration_test/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/tests/integration_test/__init__.py
--------------------------------------------------------------------------------
/source/tests/requirements-test.txt:
--------------------------------------------------------------------------------
1 | aws-xray-sdk>=2.13.0
2 | awswrangler==3.9.1
3 | boto3==1.34.101
4 | chalice==1.31.0
5 | moto[s3,cognitoidp]==5.0.6
6 | pyparsing==3.1.2
7 | pytest==8.2.0
8 | pytest-mock==3.14.0
9 | pytest-ordering==0.6
10 | pytest-dependency==0.6.0
11 | requests>=2.32.4
12 | cfnresponse==1.1.4
13 | requests-mock==1.12.1
14 |
--------------------------------------------------------------------------------
/source/tests/unit_test/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/tests/unit_test/__init__.py
--------------------------------------------------------------------------------
/source/tests/unit_test/amc_transformation/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/tests/unit_test/amc_transformation/__init__.py
--------------------------------------------------------------------------------
/source/tests/unit_test/amc_transformation/sample_data/README.md:
--------------------------------------------------------------------------------
1 | There is no PII contained in this directory
2 | All sample data is generated using https://www.mockaroo.com/
3 |
--------------------------------------------------------------------------------
/source/tests/unit_test/amc_uploader/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/0cbf8c1f74d87f8e26cd1900ffe0cf36a64a7cbc/source/tests/unit_test/amc_uploader/__init__.py
--------------------------------------------------------------------------------
/source/tests/unit_test/conftest.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import os
5 | import pytest
6 |
7 |
8 | @pytest.fixture(scope="session", autouse=True)
9 | def aws_credentials():
10 | """Mocked AWS Credentials for moto."""
11 | os.environ["AWS_ACCESS_KEY_ID"] = "123456789"
12 | os.environ["AWS_SECRET_ACCESS_KEY"] = "987654321"
13 | os.environ["AWS_SECURITY_TOKEN"] = "test_securitytoken"
14 | os.environ["AWS_SESSION_TOKEN"] = "test_session_token"
15 | os.environ["AWS_REGION"] = os.environ.get("AWS_REGION", "us-east-1")
16 | os.environ["AWS_DEFAULT_REGION"] = os.environ["AWS_REGION"]
17 |
--------------------------------------------------------------------------------
/source/tests/unit_test/test_anonymous_data_logger.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | #
4 | # ###############################################################################
5 | # PURPOSE:
6 | # * Regression test for anonymous_data_logger/.
7 | # USAGE:
8 | # ./run_test.sh --run_unit_test --test-file-name test_anonymous_data_logger.py
9 | ###############################################################################
10 |
11 | from unittest.mock import MagicMock, patch
12 |
13 | import pytest
14 |
15 |
16 | @pytest.fixture
17 | def fake_event():
18 | return {
19 | "ResponseURL": "https://test.com/test",
20 | "StackId": 12345,
21 | "RequestId": 67890,
22 | "LogicalResourceId": 1112131415,
23 | "ResourceProperties": {
24 | "Resource": None,
25 | },
26 | }
27 |
28 |
29 | @pytest.fixture
30 | def fake_context():
31 | return MagicMock(log_stream_name="fake_log_stream")
32 |
33 |
34 | @patch("requests.put")
35 | @patch("urllib.request.urlopen")
36 | def test_handler(
37 | mock_response_open, mock_response_put, fake_event, fake_context
38 | ):
39 | from anonymous_data_logger.anonymous_data_logger import handler
40 |
41 | fake_event["RequestType"] = "Create"
42 | fake_event["ResourceProperties"]["Resource"] = "UUID"
43 | fake_event["ResourceProperties"]["ServiceToken"] = "some arn"
44 |
45 | mock_response_open.return_value.getcode().return_value = 200
46 | mock_response_put.return_value = MagicMock(reason="200")
47 |
48 | handler(
49 | event=fake_event,
50 | context=fake_context,
51 | )
52 |
53 | fake_event["RequestType"] = "Update"
54 | handler(
55 | event=fake_event,
56 | context=fake_context,
57 | )
58 |
59 | fake_event["ResourceProperties"]["Resource"] = "AnonymousMetric"
60 | fake_event["RequestType"] = "Create"
61 | handler(
62 | event=fake_event,
63 | context=fake_context,
64 | )
65 |
66 | fake_event["RequestType"] = "Update"
67 | handler(
68 | event=fake_event,
69 | context=fake_context,
70 | )
71 |
72 | with patch("logging.Logger.info") as log_mock:
73 | fake_resource = "AnonymousMetric"
74 | fake_event["RequestType"] = "Delete"
75 | fake_event["ResourceProperties"]["Resource"] = fake_resource
76 | handler(
77 | event=fake_event,
78 | context=fake_context,
79 | )
80 | log_mock.assert_called_with(
81 | "RESPONSE:: {}: Not required to report data for delete request.".format(
82 | fake_resource
83 | )
84 | )
85 |
86 | fake_event["RequestType"] = "Workload"
87 | fake_event["Metrics"] = "some metrics"
88 | handler(
89 | event=fake_event,
90 | context=fake_context,
91 | )
92 | log_mock.assert_called_with("some metrics")
93 |
94 | with patch("logging.Logger.error") as log_mock:
95 | fake_resource = "FakeAnonymousMetric"
96 | fake_event["ResourceProperties"]["Resource"] = fake_resource
97 | fake_event["RequestType"] = "Create"
98 | handler(
99 | event=fake_event,
100 | context=fake_context,
101 | )
102 | log_mock.assert_called_with(
103 | "Create failed, {} not defined in the Custom Resource".format(
104 | fake_resource
105 | )
106 | )
107 |
108 | fake_event["ResourceProperties"]["Resource"] = fake_resource
109 | fake_event["RequestType"] = "Update"
110 | handler(
111 | event=fake_event,
112 | context=fake_context,
113 | )
114 | log_mock.assert_called_with(
115 | "Create failed, {} not defined in the Custom Resource".format(
116 | fake_resource
117 | )
118 | )
119 |
120 | fake_event["ResourceProperties"]["Resource"] = fake_resource
121 | fake_event["RequestType"] = "DOES NOT EXIST"
122 | handler(
123 | event=fake_event,
124 | context=fake_context,
125 | )
126 | log_mock.assert_called_with(
127 | "RESPONSE:: {} Not supported".format(fake_event["RequestType"])
128 | )
129 |
--------------------------------------------------------------------------------
/source/tests/unit_test/test_cognito_hosted_ui_resource.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | #
4 | # ###############################################################################
5 | # PURPOSE:
6 | # * Regression test for cognito_hosted_ui_resource/.
7 | # USAGE:
8 | # ./run_test.sh --run_unit_test --test-file-name test_cognito_hosted_ui_resource.py
9 | ###############################################################################
10 |
11 | import os
12 | from unittest.mock import patch
13 |
14 | import pytest
15 |
16 |
17 | @pytest.fixture
18 | def mock_env_variable():
19 | os.environ["USER_POOL_ID"] = "123456"
20 |
21 |
22 | @patch("cognito_hosted_ui_resource.cognito_hosted_ui_resource.boto3.client")
23 | @patch("cognito_hosted_ui_resource.cognito_hosted_ui_resource.cfnresponse")
24 | def test_handler(mock_cfnresponse, mock_boto3_client, mock_env_variable):
25 |
26 | from cognito_hosted_ui_resource.cognito_hosted_ui_resource import handler, get_file, DIR_PATH
27 |
28 | handler({}, None)
29 | expected_test_args = {
30 | "UserPoolId": os.environ["USER_POOL_ID"],
31 | "CSS": get_file(f"{DIR_PATH}/login.css", "r"),
32 | "ImageFile": get_file(f"{DIR_PATH}/amcufa-logo.png", "rb")
33 | }
34 | mock_boto3_client.assert_called_once_with("cognito-idp")
35 | mock_boto3_client("cognito-idp").set_ui_customization.assert_called_once_with(**expected_test_args)
36 | mock_cfnresponse.send.assert_called_once_with({}, None, mock_cfnresponse.SUCCESS, {"response": mock_boto3_client("cognito-idp").set_ui_customization(**expected_test_args)})
--------------------------------------------------------------------------------
/source/tests/unit_test/test_helper.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | #
4 | # ###############################################################################
5 | # PURPOSE:
6 | # * Regression test for helper.
7 | # USAGE:
8 | # ./run_test.sh --run_unit_test --test-file-name test_helper.py
9 | ###############################################################################
10 |
11 |
12 | import json
13 | import os
14 | from unittest.mock import MagicMock, mock_open, patch
15 |
16 | import boto3
17 | import botocore
18 | import pytest
19 | from moto import mock_aws
20 |
21 |
22 | @pytest.fixture
23 | def test_configs():
24 | return {
25 | "response_status": "SUCCESS",
26 | "s3_bucket": "fake_s3_bucket",
27 | "s3_key": "some_file.json",
28 | "s3_prefix": "some_prefix",
29 | "s3_artifact_bucket": "fake_s3_artifact_bucket",
30 | "api_endpoint": "https://test_end_point_url.com/test",
31 | "content_type": "application/json",
32 | }
33 |
34 |
35 | @pytest.fixture
36 | def fake_event(test_configs):
37 | return {
38 | "StackId": 12345,
39 | "RequestId": 67890,
40 | "LogicalResourceId": 1112131415,
41 | "ResponseURL": "https://test.com/test",
42 | "ResourceProperties": {
43 | "WebsiteCodeBucket": test_configs["s3_bucket"],
44 | "WebsiteCodePrefix": test_configs["s3_prefix"],
45 | "DeploymentBucket": f"{test_configs['s3_bucket']}.some_bucket",
46 | },
47 | }
48 |
49 |
50 | @pytest.fixture
51 | def fake_config_event(test_configs):
52 | return {
53 | "StackId": 12345,
54 | "RequestId": 67890,
55 | "LogicalResourceId": 1112131415,
56 | "ResponseURL": "https://test.com/test",
57 | "ResourceProperties": {
58 | "API_ENDPOINT": "API_ENDPOINT Value",
59 | "AWS_REGION": "AWS_REGION Value",
60 | "USER_POOL_ID": "USER_POOL_ID Value",
61 | "USER_POOL_CLIENT_ID": "USER_POOL_CLIENT_ID Value",
62 | "IDENTITY_POOL_ID": "IDENTITY_POOL_ID Value",
63 | "DATA_BUCKET_NAME": "DATA_BUCKET_NAME Value",
64 | "ARTIFACT_BUCKET_NAME": "ARTIFACT_BUCKET_NAME Value",
65 | "ENCRYPTION_MODE": "ENCRYPTION_MODE Value",
66 | "HOSTED_UI_DOMAIN": "HOSTED_UI_DOMAIN Value",
67 | "COGNITO_CALLBACK_URL": "COGNITO_CALLBACK_URL Value",
68 | "COGNITO_LOGOUT_URL": "COGNITO_LOGOUT_URL Value",
69 | "WEBSITE_BUCKET": test_configs["s3_bucket"]
70 | },
71 | }
72 |
73 |
74 | @pytest.fixture
75 | def fake_context():
76 | return MagicMock(log_stream_name="fake_log_stream")
77 |
78 |
79 | @pytest.fixture
80 | def mock_env_variables(test_configs):
81 | os.environ["UserPoolId"] = "3333"
82 | os.environ["PoolClientId"] = "4444"
83 | os.environ["IdentityPoolId"] = "2222"
84 | os.environ["ApiEndpoint"] = test_configs["api_endpoint"]
85 | os.environ["DataBucketName"] = test_configs["s3_bucket"]
86 | os.environ["ArtifactBucketName"] = test_configs["s3_artifact_bucket"]
87 | os.environ["EncryptionMode"] = "Secured"
88 |
89 |
90 | @mock_aws
91 | @patch("urllib.request.build_opener")
92 | def test_send_response(mock_response, fake_event, fake_context, test_configs):
93 | from helper.website_helper import send_response
94 |
95 | send_response(
96 | event=fake_event,
97 | context=fake_context,
98 | response_status=test_configs["response_status"],
99 | response_data={},
100 | )
101 |
102 |
103 | @mock_aws
104 | @patch("urllib.request.build_opener")
105 | def test_copy_source(mock_response, mock_env_variables, fake_event, fake_context, test_configs):
106 | s3 = boto3.client("s3", region_name="us-east-1")
107 | s3.create_bucket(Bucket=test_configs["s3_bucket"])
108 | s3 = boto3.resource("s3", region_name="us-east-1")
109 | s3_object = s3.Object(
110 | test_configs["s3_bucket"],
111 | f'{test_configs["s3_prefix"]}/{test_configs["s3_key"]}',
112 | )
113 | s3_object.put(Body="{}", ContentType=test_configs["content_type"])
114 | file_loc = "./webapp-manifest.json"
115 | from helper.website_helper import lambda_handler
116 |
117 | manifest_data = [test_configs["s3_key"]]
118 |
119 | with patch(
120 | "builtins.open", mock_open(read_data=json.dumps(manifest_data))
121 | ) as mock_file:
122 | from helper.website_helper import copy_source
123 | copy_source(event=fake_event)
124 | mock_file.assert_called_with(file_loc, encoding="utf-8")
125 |
126 |
127 | @patch("urllib.request.build_opener")
128 | def test_lambda_handler(mock_response, fake_event, fake_context, test_configs):
129 | with mock_aws():
130 | s3 = boto3.client("s3", region_name="us-east-1")
131 | s3.create_bucket(Bucket=test_configs["s3_bucket"])
132 | s3 = boto3.resource("s3", region_name="us-east-1")
133 | s3_object = s3.Object(
134 | test_configs["s3_bucket"],
135 | f'{test_configs["s3_prefix"]}/{test_configs["s3_key"]}',
136 | )
137 | s3_object.put(Body="{}", ContentType=test_configs["content_type"])
138 | file_loc = "./webapp-manifest.json"
139 | from helper.website_helper import lambda_handler
140 |
141 | manifest_data = [test_configs["s3_key"]]
142 |
143 | with patch(
144 | "builtins.open", mock_open(read_data=json.dumps(manifest_data))
145 | ) as mock_file:
146 | fake_event["RequestType"] = "Create"
147 | lambda_handler(event=fake_event, context=fake_context)
148 |
149 | fake_event["RequestType"] = "Update"
150 | lambda_handler(event=fake_event, context=fake_context)
151 |
152 | mock_file.assert_called_with(file_loc, encoding="utf-8")
153 | fake_event["RequestType"] = "Delete"
154 | lambda_handler(event=fake_event, context=fake_context)
155 |
156 |
157 | @patch("urllib.request.build_opener")
158 | def test_config_lambda_handler(mock_response, fake_config_event, fake_context, test_configs):
159 | with mock_aws():
160 | s3 = boto3.client("s3", region_name="us-east-1")
161 | s3.create_bucket(Bucket=test_configs["s3_bucket"])
162 | file = "runtimeConfig.json"
163 | from helper.config_helper import handler
164 |
165 | fake_config_event["RequestType"] = "Create"
166 | handler(event=fake_config_event, context=fake_context)
167 | s3.head_object(Bucket=test_configs["s3_bucket"], Key=file)
168 |
169 | fake_config_event["RequestType"] = "Update"
170 | handler(event=fake_config_event, context=fake_context)
171 | s3.head_object(Bucket=test_configs["s3_bucket"], Key=file)
172 |
173 | fake_config_event["RequestType"] = "Delete"
174 | handler(event=fake_config_event, context=fake_context)
175 | s3.head_object(Bucket=test_configs["s3_bucket"], Key=file)
176 |
--------------------------------------------------------------------------------
/source/tests/unit_test/test_tasks.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | #
4 | # ###############################################################################
5 | # PURPOSE:
6 | # * Unit test for project api endpoints and workflow.
7 | #
8 | # USAGE:
9 | # ./run_test.sh --run_unit_test --test-file-name test_tasks.py
10 | ###############################################################################
11 |
12 | import json
13 | import os
14 |
15 | import pytest
16 | from moto import mock_aws
17 |
18 |
19 | def test_safe_json():
20 | from share.tasks import safe_json_loads
21 |
22 | assert safe_json_loads("test") == "test"
23 | assert safe_json_loads(json.dumps({"test": "123"})) == {"test": "123"}
24 |
25 |
26 | @mock_aws
27 | def test_create_update_secret_exception():
28 | from share.tasks import create_update_secret
29 |
30 | with pytest.raises(Exception):
31 | create_update_secret(secret_id=123, secret_string="test_string")
32 |
33 |
34 | def test_get_client_id_secret_env():
35 | from share.tasks import get_client_id_secret_env
36 |
37 | client_id = os.environ["CLIENT_ID"]
38 | client_secret = os.environ["CLIENT_SECRET"]
39 |
40 | assert get_client_id_secret_env() == (client_id, client_secret)
41 |
42 | os.environ["CLIENT_ID"] = ""
43 | os.environ["CLIENT_SECRET"] = ""
44 |
45 | assert get_client_id_secret_env() == ('', '')
46 |
47 | # Reapply env client_id and secrets.
48 | os.environ["CLIENT_ID"] = client_id
49 | os.environ["CLIENT_SECRET"] = client_secret
50 |
--------------------------------------------------------------------------------
/source/website/babel.config.js:
--------------------------------------------------------------------------------
1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | // SPDX-License-Identifier: Apache-2.0
3 |
4 | module.exports = {
5 | presets: [
6 | '@vue/app'
7 | ]
8 | }
9 |
--------------------------------------------------------------------------------
/source/website/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "amcufa",
3 | "version": "3.0.15",
4 | "private": true,
5 | "scripts": {
6 | "serve": "vue-cli-service serve",
7 | "build": "vue-cli-service build",
8 | "lint": "vue-cli-service lint --no-fix"
9 | },
10 | "dependencies": {
11 | "@babel/core": "^7.24.5",
12 | "@vue/compat": "^3.5.13",
13 | "aws-amplify": "^5.3.15",
14 | "bootstrap": "^4.6.2",
15 | "bootstrap-vue": "^2.23.1",
16 | "register-service-worker": "^1.7.2",
17 | "vue": "^3.5.13",
18 | "vue-router": "^3.6.5",
19 | "vuex": "^4.1.0",
20 | "webpack-subresource-integrity": "^5.2.0-rc.1"
21 | },
22 | "devDependencies": {
23 | "@babel/cli": "^7.24.7",
24 | "@vue/cli-plugin-eslint": "^5.0.8",
25 | "@vue/cli-service": "^5.0.8",
26 | "@vue/compiler-sfc": "^3.5.0-alpha.2",
27 | "eslint-plugin-vue": "^9.26.0"
28 | },
29 | "eslintConfig": {
30 | "root": true,
31 | "env": {
32 | "node": true
33 | },
34 | "extends": [
35 | "plugin:vue/essential"
36 | ],
37 | "rules": {
38 | "vue/multi-word-component-names": 0,
39 | "vue/no-reserved-component-names": 0
40 | }
41 | },
42 | "overrides": {
43 | "fast-xml-parser": "4.4.1",
44 | "ip": "^2.0.1",
45 | "postcss": "^8.4.38",
46 | "semver": "7.6.2",
47 | "cross-spawn": "^7.0.6",
48 | "cookie": "^0.7.0",
49 | "vue": "^3.5.13",
50 | "form-data": "4.0.4"
51 | },
52 | "resolutions": {
53 | "fast-xml-parser": "4.4.1",
54 | "semver": "7.6.2",
55 | "form-data": "4.0.4"
56 | },
57 | "browserslist": [
58 | "> 1%",
59 | "last 2 versions",
60 | "not dead"
61 | ],
62 | "description": "This application provides a utility to upload first-party data to the Amazon Marketing Cloud",
63 | "main": "babel.config.js",
64 | "directories": {
65 | "doc": "doc",
66 | "test": "../../test"
67 | },
68 | "repository": {
69 | "type": "git",
70 | "url": "git+https://github.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws"
71 | },
72 | "keywords": [],
73 | "author": {
74 | "name": "Amazon Web Services",
75 | "url": "https://aws.amazon.com/solutions"
76 | },
77 | "license": "Apache-2.0",
78 | "bugs": {
79 | "url": "https://github.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws/issues"
80 | },
81 | "homepage": "https://github.com/aws-solutions/amazon-marketing-cloud-uploader-from-aws#readme"
82 | }
83 |
--------------------------------------------------------------------------------
/source/website/public/.well-known/security.txt:
--------------------------------------------------------------------------------
1 | # For security issues related to Amazon Web Services (AWS), please see our security policy
2 | Policy: https://aws.amazon.com/security/vulnerability-reporting/
3 |
4 | # To contact AWS regarding a vulnerability
5 | Contact: mailto:aws-security@amazon.com
6 | Preferred-Languages: en
7 |
8 | # We support PGP encryption
9 | Encryption: https://aws.amazon.com/security/aws-pgp-public-key/
10 |
11 | # This file expires every 365 days
12 | Expires: 2025-05-01T00:00:00z
13 |
14 | # We're hiring - join Amazon Security!
15 | Hiring: https://www.amazon.jobs/en/business_categories/amazon-security
16 |
--------------------------------------------------------------------------------
/source/website/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | Amazon Marketing Cloud uploader from AWS
9 |
10 |
11 |
12 |
13 | We're sorry but this application doesn't work properly without JavaScript enabled. Please enable it to
14 | continue.
15 |
16 |
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/source/website/public/robots.txt:
--------------------------------------------------------------------------------
1 | User-agent: *
2 | Disallow:
3 |
--------------------------------------------------------------------------------
/source/website/src/App.vue:
--------------------------------------------------------------------------------
1 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
22 |
--------------------------------------------------------------------------------
/source/website/src/components/Header.vue:
--------------------------------------------------------------------------------
1 |
5 |
6 |
7 |
8 |
13 |
14 | Amazon Marketing Cloud Uploader from AWS
15 |
16 |
17 |
18 |
22 |
23 |
24 |
27 | Sign Out
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
59 |
60 |
67 |
--------------------------------------------------------------------------------
/source/website/src/components/Sidebar.vue:
--------------------------------------------------------------------------------
1 |
5 |
6 |
7 |
8 |
9 |
10 | Step 1
11 |
16 |
17 | Select file
18 |
19 |
20 | Step 2
21 |
26 |
27 | Select destinations
28 |
29 |
30 | Step 3
31 |
36 |
37 | Define dataset
38 |
39 |
40 | Step 4
41 |
46 |
47 | Define columns
48 |
49 |
50 | Step 5
51 |
56 |
57 | Confirm details
58 |
59 |
60 | Step 6
61 |
66 |
67 | Monitor uploads
68 |
69 |
70 | Settings
71 |
76 |
77 | AMC Instances
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
93 |
94 |
99 |
--------------------------------------------------------------------------------
/source/website/src/components/VoerroTagsInput.css:
--------------------------------------------------------------------------------
1 | /* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. */
2 | /* SPDX-License-Identifier: Apache-2.0 */
3 |
4 | /* The input */
5 | .tags-input {
6 | display: flex;
7 | flex-wrap: wrap;
8 | align-items: center;
9 | }
10 |
11 | .tags-input input {
12 | flex: 1;
13 | background: transparent;
14 | border: none;
15 | }
16 |
17 | .tags-input input:focus {
18 | outline: none;
19 | }
20 |
21 | .tags-input input[type="text"] {
22 | color: #495057;
23 | }
24 |
25 | .tags-input-wrapper-default {
26 | padding: .5em .25em;
27 |
28 | background: #fff;
29 |
30 | border: 1px solid transparent;
31 | border-radius: .25em;
32 | border-color: #dbdbdb;
33 | }
34 |
35 | .tags-input-wrapper-default.active {
36 | border: 1px solid #8bbafe;
37 | box-shadow: 0 0 0 0.2em rgba(13, 110, 253, 0.25);
38 | outline: 0 none;
39 | }
40 |
41 | /* The tag badges & the remove icon */
42 | .tags-input span {
43 | margin-right: 0.3em;
44 | }
45 |
46 | .tags-input-remove {
47 | cursor: pointer;
48 | position: absolute;
49 | display: inline-block;
50 | right: 0.3em;
51 | top: 0.3em;
52 | padding: 0.5em;
53 | overflow: hidden;
54 | }
55 |
56 | .tags-input-remove:focus {
57 | outline: none;
58 | }
59 |
60 | .tags-input-remove:before, .tags-input-remove:after {
61 | content: '';
62 | position: absolute;
63 | width: 75%;
64 | left: 0.15em;
65 | background: #5dc282;
66 |
67 | height: 2px;
68 | margin-top: -1px;
69 | }
70 |
71 | .tags-input-remove:before {
72 | transform: rotate(45deg);
73 | }
74 | .tags-input-remove:after {
75 | transform: rotate(-45deg);
76 | }
77 |
78 | /* Tag badge styles */
79 | .tags-input-badge {
80 | position: relative;
81 | display: inline-block;
82 | padding: 0.25em 0.4em;
83 | font-size: 75%;
84 | font-weight: 700;
85 | line-height: 1;
86 | text-align: center;
87 | white-space: nowrap;
88 | vertical-align: baseline;
89 | border-radius: 0.25em;
90 | overflow: hidden;
91 | text-overflow: ellipsis;
92 | }
93 |
94 | .tags-input-badge-pill {
95 | padding-right: 1.25em;
96 | padding-left: 0.6em;
97 | border-radius: 10em;
98 | }
99 | .tags-input-badge-pill.disabled {
100 | padding-right: 0.6em;
101 | }
102 |
103 | .tags-input-badge-selected-default {
104 | color: #212529;
105 | background-color: #f0f1f2;
106 | }
107 |
108 | /* Typeahead */
109 | .typeahead-hide-btn {
110 | color: #999 !important;
111 | font-style: italic;
112 | }
113 |
114 | /* Typeahead - badges */
115 | .typeahead-badges > span {
116 | margin-top: .5em;
117 | cursor: pointer;
118 | margin-right: 0.3em;
119 | }
120 |
121 | /* Typeahead - dropdown */
122 | .typeahead-dropdown {
123 | list-style-type: none;
124 | padding: 0;
125 | margin: 0;
126 | position: absolute;
127 | width: 100%;
128 | z-index: 1000;
129 | }
130 |
131 | .typeahead-dropdown li {
132 | padding: .25em 1em;
133 | cursor: pointer;
134 | }
135 |
136 | /* Typeahead elements style/theme */
137 | .tags-input-typeahead-item-default {
138 | color: #fff;
139 | background-color: #343a40;
140 | }
141 |
142 | .tags-input-typeahead-item-highlighted-default {
143 | color: #fff;
144 | background-color: #007bff !important;
145 | }
146 |
--------------------------------------------------------------------------------
/source/website/src/main.js:
--------------------------------------------------------------------------------
1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | // SPDX-License-Identifier: Apache-2.0
3 |
4 | console.warn = () => {}
5 |
6 | import * as AmplifyModules from "aws-amplify"
7 | import { Amplify, Auth, Hub } from "aws-amplify"
8 | import { BIconClipboard, BIconExclamationTriangleFill, BIconPlusSquare, BIconQuestionCircleFill, BIconXCircle, BootstrapVue } from 'bootstrap-vue'
9 | import 'bootstrap-vue/dist/bootstrap-vue.css'
10 | import 'bootstrap/dist/css/bootstrap.css'
11 | import { createApp } from 'vue'
12 | import App from './App.vue'
13 | import router from './router.js'
14 | import store from './store'
15 |
16 | const getRuntimeConfig = async () => {
17 | const runtimeConfig = await fetch("/runtimeConfig.json");
18 | return runtimeConfig.json()
19 | };
20 |
21 | const syncCurrentSession = async () => {
22 | try {
23 | const currentUserSession = await Auth.currentAuthenticatedUser({ bypassCache: true });
24 | return currentUserSession;
25 | } catch (error) {
26 | console.log(error)
27 | return {}
28 | }
29 | };
30 |
31 | const checkAMCRedirect = () => {
32 | // Return true if this is an auth redirect from AMC/Amazon Login
33 | // Cognito redirects to / (root)
34 | // Amazon Login redirects to /redirect
35 | const urlParams = new URLSearchParams(window.location.search);
36 | return urlParams.has('code') && urlParams.has('state') && window.location.pathname.endsWith('/redirect');
37 | }
38 |
39 | const updateAndRedirect = () => {
40 | // Rename the code and state parameters to prevent Amplify from processing them
41 | const currentUrl = new URL(window.location.href);
42 | const codeValue = currentUrl.searchParams.get('code');
43 | const stateValue = currentUrl.searchParams.get('state');
44 | if (codeValue) {
45 | currentUrl.searchParams.set('amz-code', codeValue);
46 | currentUrl.searchParams.delete('code');
47 | }
48 | if (stateValue) {
49 | currentUrl.searchParams.set('amz-state', stateValue);
50 | currentUrl.searchParams.delete('state');
51 | }
52 | if (codeValue || stateValue) {
53 | // force a refresh
54 | window.location.href = currentUrl.href;
55 | }
56 | }
57 |
58 | // rename the parameters and refresh the page if we're returning from AMC/Amazon Login
59 | if (checkAMCRedirect()) {
60 | console.log('Redirect from AMC/Amazon Login detected');
61 | updateAndRedirect();
62 | }
63 |
64 | Promise.all([getRuntimeConfig, syncCurrentSession]).then((promiseObj) => {
65 | promiseObj[0]().then(function (json) {
66 | const awsconfig = {
67 | Auth: {
68 | region: json.AWS_REGION,
69 | userPoolId: json.USER_POOL_ID,
70 | userPoolWebClientId: json.USER_POOL_CLIENT_ID,
71 | identityPoolId: json.IDENTITY_POOL_ID,
72 | oauth: {
73 | domain: json.HOSTED_UI_DOMAIN,
74 | scope: ["profile", "openid"],
75 | redirectSignIn: json.COGNITO_CALLBACK_URL,
76 | redirectSignOut: json.COGNITO_LOGOUT_URL,
77 | responseType: "code"
78 | }
79 | },
80 | Storage: {
81 | AWSS3: {
82 | region: json.AWS_REGION
83 | }
84 | },
85 | API: {
86 | endpoints: [
87 | {
88 | name: "amcufa-api",
89 | endpoint: json.API_ENDPOINT,
90 | service: "execute-api",
91 | region: json.AWS_REGION
92 | },
93 | ]
94 | }
95 | };
96 |
97 | console.log("Runtime config: " + JSON.stringify(json));
98 | Amplify.configure(awsconfig);
99 |
100 | promiseObj[1]().then(function (data) {
101 | const app = createApp({
102 | router,
103 | ...App
104 | })
105 |
106 | app.use(AmplifyModules)
107 | app.use(BootstrapVue);
108 | app.use(store);
109 | app.component('BIconClipboard', BIconClipboard)
110 | app.component('BIconQuestionCircleFill', BIconQuestionCircleFill)
111 | app.component('BIconXCircle', BIconXCircle)
112 | app.component('BIconPlusSquare', BIconPlusSquare)
113 | app.component('BIconExclamationTriangleFill', BIconExclamationTriangleFill)
114 |
115 | app.mixin({
116 | data() {
117 | return {
118 | // Distribute runtime configs into every Vue component
119 | AWS_REGION: json.AWS_REGION,
120 | API_ENDPOINT: json.API_ENDPOINT,
121 | DATA_BUCKET_NAME: json.DATA_BUCKET_NAME,
122 | ARTIFACT_BUCKET_NAME: json.ARTIFACT_BUCKET_NAME,
123 | ENCRYPTION_MODE: json.ENCRYPTION_MODE,
124 | USER_POOL_ID: json.USER_POOL_ID
125 | }
126 | },
127 | });
128 |
129 | router.beforeResolve(async (to, from, next) => {
130 | if (to.matched.some(record => record.meta.requiresAuth)) {
131 | try {
132 | await Auth.currentAuthenticatedUser();
133 | next();
134 | } catch (e) {
135 | console.error(e);
136 | Auth.federatedSignIn();
137 | }
138 | }
139 | else {
140 | console.log(next);
141 | next();
142 | }
143 | });
144 |
145 | Hub.listen("auth", (data) => {
146 | console.log(`Hub event ${data.payload.event}`)
147 | switch (data.payload.event) {
148 | case "signIn":
149 | console.log("user signed in");
150 | router.push({ path: "/step6" });
151 | break;
152 | case "signOut":
153 | console.log("user signed out");
154 | Auth.federatedSignIn();
155 | break;
156 | }
157 | });
158 |
159 | app.mount("#app")
160 | })
161 | })
162 | })
163 |
--------------------------------------------------------------------------------
/source/website/src/registerServiceWorker.js:
--------------------------------------------------------------------------------
1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | // SPDX-License-Identifier: Apache-2.0
3 |
4 | /* eslint-disable no-console */
5 |
6 | import { register } from 'register-service-worker'
7 |
8 | if (process.env.NODE_ENV === 'production') {
9 | register(`${process.env.BASE_URL}service-worker.js`, {
10 | ready () {
11 | console.log(
12 | 'App is being served from cache by a service worker.\n' +
13 | 'For more details, visit https://goo.gl/AFskqB'
14 | )
15 | },
16 | registered () {
17 | console.log('Service worker has been registered.')
18 | },
19 | cached () {
20 | console.log('Content has been cached for offline use.')
21 | },
22 | updatefound () {
23 | console.log('New content is downloading.')
24 | },
25 | updated () {
26 | console.log('New content is available; please refresh.')
27 | },
28 | offline () {
29 | console.log('No internet connection found. App is running in offline mode.')
30 | },
31 | error (error) {
32 | console.error('Error during service worker registration:', error)
33 | }
34 | })
35 | }
36 |
--------------------------------------------------------------------------------
/source/website/src/router.js:
--------------------------------------------------------------------------------
1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | // SPDX-License-Identifier: Apache-2.0
3 |
4 | import Vue from 'vue'
5 | import VueRouter from 'vue-router'
6 | import Step1 from '@/views/Step1.vue'
7 | import Step2 from '@/views/Step2.vue'
8 | import Step3 from '@/views/Step3.vue'
9 | import Step4 from '@/views/Step4.vue'
10 | import Step5 from '@/views/Step5.vue'
11 | import Step6 from '@/views/Step6.vue'
12 | import Settings from '@/views/Settings.vue'
13 | import Redirect from '@/views/Redirect.vue'
14 |
15 | Vue.use(VueRouter);
16 |
17 | const router = new VueRouter({
18 | mode: 'history',
19 | base: process.env.BASE_URL,
20 | routes: [
21 | {
22 | path: '/step1',
23 | name: 'Step1',
24 | component: Step1,
25 | meta: { requiresAuth: true }
26 | },
27 | {
28 | path: '/step2',
29 | name: 'Step2',
30 | component: Step2,
31 | meta: { requiresAuth: true }
32 | },
33 | {
34 | path: '/step3',
35 | name: 'Step3',
36 | component: Step3,
37 | meta: { requiresAuth: true }
38 | },
39 | {
40 | path: '/step4',
41 | name: 'Step4',
42 | component: Step4,
43 | meta: { requiresAuth: true }
44 | },
45 | {
46 | path: '/step5',
47 | name: 'Step5',
48 | component: Step5,
49 | meta: { requiresAuth: true }
50 | },
51 | {
52 | path: '/step6',
53 | alias: '/',
54 | name: 'Step6',
55 | component: Step6,
56 | meta: { requiresAuth: true }
57 | },
58 | {
59 | path: "/settings",
60 | name: "Settings",
61 | component: Settings,
62 | meta: {requiresAuth: true}
63 | },
64 | {
65 | path: "/redirect",
66 | name: "Redirect",
67 | component: Redirect,
68 | meta: {requiresAuth: true}
69 | }
70 | ]
71 | });
72 |
73 | export default router;
74 |
--------------------------------------------------------------------------------
/source/website/src/store/actions.js:
--------------------------------------------------------------------------------
1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | // SPDX-License-Identifier: Apache-2.0
3 |
4 | export default {
5 | }
6 |
--------------------------------------------------------------------------------
/source/website/src/store/index.js:
--------------------------------------------------------------------------------
1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | // SPDX-License-Identifier: Apache-2.0
3 |
4 | import { createStore as _createStore } from 'vuex'
5 | import state from './state'
6 | import mutations from './mutations'
7 | import actions from './actions'
8 |
9 | export default new _createStore({
10 | state,
11 | mutations,
12 | actions
13 | })
14 |
--------------------------------------------------------------------------------
/source/website/src/store/mutations.js:
--------------------------------------------------------------------------------
1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | // SPDX-License-Identifier: Apache-2.0
3 |
4 | export default {
5 | updateDeletedColumns (state, value) {
6 | state.deleted_columns = value
7 | },
8 | updateDatasetDefinition (state, value) {
9 | state.dataset_definition = value
10 | },
11 | saveStep3FormInput (state, value) {
12 | state.step3_form_input = value
13 | },
14 | updateS3key (state, value) {
15 | state.s3key = value
16 | },
17 | updateSelectedDataset (state, value) {
18 | state.selected_dataset = value
19 | },
20 | updateSelectedAmcInstances (state, value) {
21 | state.amc_instances_selected = value
22 | },
23 | updateAmcMonitor (state, value) {
24 | state.amc_monitor = value
25 | },
26 | updateAmcSelectorVisibility (state, value) {
27 | state.amc_selector_visible_state = value
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/source/website/src/store/state.js:
--------------------------------------------------------------------------------
1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | // SPDX-License-Identifier: Apache-2.0
3 |
4 | export default {
5 | s3key: '',
6 | deleted_columns: [],
7 | dataset_definition: {},
8 | step3_form_input: {},
9 | selected_dataset: null,
10 | amc_instances_selected: [],
11 | amc_monitor: '',
12 | amc_selector_visible_state: true
13 | }
14 |
--------------------------------------------------------------------------------
/source/website/src/views/Redirect.vue:
--------------------------------------------------------------------------------
1 |
5 |
6 |
7 |
8 |
42 |
43 |
44 |
168 |
--------------------------------------------------------------------------------
/source/website/src/views/Step1.vue:
--------------------------------------------------------------------------------
1 |
5 |
6 |
7 |
8 |
100 |
101 |
102 |
103 |
182 |
--------------------------------------------------------------------------------
/source/website/src/views/Step2.vue:
--------------------------------------------------------------------------------
1 |
5 |
6 |
7 |
8 |
169 |
170 |
171 |
172 |
308 |
309 |
314 |
--------------------------------------------------------------------------------
/source/website/vue.config.js:
--------------------------------------------------------------------------------
1 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | // SPDX-License-Identifier: Apache-2.0
3 |
4 | const { SubresourceIntegrityPlugin } = require('webpack-subresource-integrity');
5 |
6 | module.exports = {
7 | devServer: {
8 | client: {
9 | overlay: false
10 | }
11 | },
12 | chainWebpack: (config) => {
13 | config.resolve.alias.set('vue', '@vue/compat')
14 |
15 | config.module
16 | .rule('vue')
17 | .use('vue-loader')
18 | .tap((options) => {
19 | return {
20 | ...options,
21 | compilerOptions: {
22 | compatConfig: {
23 | MODE: 2
24 | }
25 | }
26 | }
27 | })
28 | }
29 | }
30 |
--------------------------------------------------------------------------------