├── .devcontainer └── devcontainer.json ├── .github ├── solutionid_validator.sh └── workflows │ └── maintainer_workflows.yml ├── .gitignore ├── CODEOWNERS ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── build.py ├── business-logic ├── analytics-processing │ ├── app.sql │ ├── build.py │ ├── index.js │ ├── lib │ │ ├── cloudwatch.js │ │ └── process.js │ └── package.json ├── api │ ├── admin │ │ ├── build.py │ │ ├── index.js │ │ ├── lib │ │ │ ├── admin.js │ │ │ └── app.js │ │ └── package.json │ ├── api-definitions │ │ ├── game-analytics-api copy.yaml │ │ └── game-analytics-api.yaml │ ├── build.py │ └── lambda-authorizer │ │ ├── build.py │ │ ├── index.js │ │ └── package.json ├── build.py ├── data-lake │ ├── build.py │ ├── glue-partition-creator │ │ ├── build.py │ │ ├── index.js │ │ └── package.json │ └── glue-scripts │ │ └── game_events_etl.py ├── events-processing │ ├── build.py │ ├── config │ │ └── event_schema.json │ ├── index.js │ ├── lib │ │ ├── event.js │ │ └── index.js │ └── package.json ├── publish-data │ ├── handler.py │ └── requirements.txt └── solution-helper │ ├── build.py │ ├── index.js │ ├── lib │ ├── athena-helper.js │ ├── cloudwatch-helper.js │ ├── dynamodb-helper.js │ ├── glue-helper.js │ ├── kinesis-helper.js │ ├── lambda-helper.js │ ├── metrics-helper.js │ └── s3-helper.js │ └── package.json ├── docs └── architecture.png ├── infrastructure ├── .gitignore ├── .npmignore ├── README.md ├── build.py ├── cdk.json ├── config.yaml.TEMPLATE ├── package.json ├── src │ ├── app-stack.ts │ ├── app.ts │ ├── constructs │ │ ├── api-construct.ts │ │ ├── data-lake-construct.ts │ │ ├── lambda-construct.ts │ │ ├── metrics-construct.ts │ │ ├── streaming-analytics.ts │ │ └── streaming-ingestion-construct.ts │ ├── helpers │ │ ├── config-types.ts │ │ └── stack-config-loader.ts │ └── pipeline-stack.ts └── tsconfig.json ├── package.json └── tsconfig.json /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "AWS CDK", 3 | "image": "mcr.microsoft.com/devcontainers/python:1-3.11-bullseye", 4 | "remoteUser": "vscode", 5 | "features": { 6 | "ghcr.io/devcontainers/features/aws-cli:1": { 7 | "version": "latest" 8 | }, 9 | "ghcr.io/devcontainers/features/docker-in-docker:2": { 10 | "moby": true, 11 | "installDockerBuildx": true, 12 | "version": "latest", 13 | "dockerDashComposeVersion": "v1" 14 | }, 15 | "ghcr.io/devcontainers-contrib/features/aws-cdk:2": { 16 | "version": "2.92" 17 | } 18 | }, 19 | "customizations": { 20 | "vscode": { 21 | "extensions": [ 22 | "ms-python.python", 23 | "streetsidesoftware.code-spell-checker", 24 | "amazonwebservices.aws-toolkit-vscode", 25 | "AmazonWebServices.amazon-q-vscode" 26 | ] 27 | } 28 | } 29 | } -------------------------------------------------------------------------------- /.github/solutionid_validator.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | #set -e 3 | 4 | echo "checking solution id $1" 5 | echo "grep -nr --exclude-dir='.github' "$1" ./.." 6 | result=$(grep -nr --exclude-dir='.github' "$1" ./..) 7 | if [ $? -eq 0 ] 8 | then 9 | echo "Solution ID $1 found\n" 10 | echo "$result" 11 | exit 0 12 | else 13 | echo "Solution ID $1 not found" 14 | exit 1 15 | fi 16 | 17 | export result 18 | -------------------------------------------------------------------------------- /.github/workflows/maintainer_workflows.yml: -------------------------------------------------------------------------------- 1 | # Workflows managed by aws-solutions-library-samples maintainers 2 | name: Maintainer Workflows 3 | on: 4 | # Triggers the workflow on push or pull request events but only for the "main" branch 5 | push: 6 | branches: [ "main" ] 7 | pull_request: 8 | branches: [ "main" ] 9 | types: [opened, reopened, edited] 10 | 11 | jobs: 12 | CheckSolutionId: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v4 16 | - name: Run solutionid validator 17 | run: | 18 | chmod u+x ./.github/solutionid_validator.sh 19 | ./.github/solutionid_validator.sh ${{ vars.SOLUTIONID }} -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Environment 2 | *.swp 3 | # package-lock.json 4 | __pycache__ 5 | .pytest_cache 6 | .venv 7 | *.egg-info 8 | 9 | # CDK asset staging directory 10 | .cdk.staging 11 | cdk.out 12 | 13 | # macOS 14 | .DS_Store 15 | 16 | # PyCharm 17 | .idea/ 18 | 19 | # pyenv 20 | .python-version 21 | 22 | # VSCode 23 | .vscode 24 | .history/ 25 | *.vsix 26 | 27 | node_modules/ 28 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | CODEOWNERS @aws-solutions-library-samples/maintainers 2 | /.github/workflows/maintainer_workflows.yml @aws-solutions-library-samples/maintainers 3 | /.github/solutionid_validator.sh @aws-solutions-library-samples/maintainers 4 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | 10 | ## Reporting Bugs/Feature Requests 11 | 12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 13 | 14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already 15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 16 | 17 | * A reproducible test case or series of steps 18 | * The version of our code being used 19 | * Any modifications you've made relevant to the bug 20 | * Anything unusual about your environment or deployment 21 | 22 | 23 | ## Contributing via Pull Requests 24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 25 | 26 | 1. You are working against the latest source on the *main* branch. 27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 29 | 30 | To send us a pull request, please: 31 | 32 | 1. Fork the repository. 33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 34 | 3. Ensure local tests pass. 35 | 4. Commit to your fork using clear commit messages. 36 | 5. Send us a pull request, answering any default questions in the pull request interface. 37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 38 | 39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 41 | 42 | 43 | ## Finding contributions to work on 44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start. 45 | 46 | 47 | ## Code of Conduct 48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 50 | opensource-codeofconduct@amazon.com with any additional questions or comments. 51 | 52 | 53 | ## Security issue notifications 54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 55 | 56 | 57 | ## Licensing 58 | 59 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 60 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT No Attribution 2 | 3 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 12 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 13 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 14 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 15 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 16 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Game Analytics Pipeline on AWS 2 | 3 | >[!IMPORTANT] 4 | >This Guidance requires the use of AWS [CodeCommit](https://docs.aws.amazon.com/codecommit/latest/userguide/welcome.html), which is no longer available to new customers. Existing customers of AWS CodeCommit can continue using and deploying this Guidance as normal. 5 | 6 | 7 | ## Table of Content 8 | - [Game Analytics Pipeline on AWS](#game-analytics-pipeline-on-aws) 9 | - [Table of Content](#table-of-content) 10 | - [Overview](#overview) 11 | - [Prerequisites](#prerequisites) 12 | - [Sample Code Configuration and Customization](#sample-code-configuration-and-customization) 13 | - [Configuration Setup](#configuration-setup) 14 | - [Custom Settings](#custom-settings) 15 | - [Sample Code Deployment](#sample-code-deployment) 16 | - [Deployed Infrastructure](#deployed-infrastructure) 17 | - [CI/CD Toolchain](#cicd-toolchain) 18 | - [Next Steps](#next-steps) 19 | - [Cleanup](#cleanup) 20 | - [Security](#security) 21 | - [License](#license) 22 | 23 | 24 | ## Overview 25 | 26 | The games industry is increasing adoption of the Games-as-a-Service operating model, where games have become more like a service than a product, and recurring revenue is frequently generated through in-app purchases, subscriptions, and other techniques. With this change, it is critical to develop a deeper understanding of how players use the features of games and related services. This understanding allows game developers to continually adapt, and make the necessary changes to keep players engaged. 27 | 28 | The Game Analytics Pipeline guidance helps game developers to apply a flexible, and scalable DataOps methodology to their games. Allowing them to continuously integrate, and continuously deploy (CI/CD) a scalable serverless data pipeline for ingesting, storing, and analyzing telemetry data generated from games, and services. The guidance supports streaming ingestion of data, allowing users to gain critical insights from their games, and other applications in near real-time, allowing them to focus on expanding, and improving game experience almost immediately, instead of managing the underlying infrastructure operations. Since the guidance has been codified as a CDK application, game developers can determine the best modules, or components that fit their use case, allowing them to test, and QA the best architecture before deploying into production. This modular system allows for additional AWS capabilities, such as AI/ML models, to be integrated into the architecture in order to further support real-time decision making, and automated LiveOps using AIOps, to further enhance player engagement. Essentially allowing developers to focus on expanding game functionality, rather than managing the underlying infrastructure operations. 29 | 30 | ![Architecture](./docs/architecture.png) 31 | 32 | ## Prerequisites 33 | 34 | Before deploying the sample code, ensure that the following required tools have been installed: 35 | 36 | - **[GitHub Account](https://docs.github.com/en/get-started/start-your-journey/creating-an-account-on-github)** 37 | - **[Visual Studio Code](https://code.visualstudio.com/Download)** 38 | - **[Docker Desktop (local)](https://www.docker.com/products/docker-desktop/)** 39 | - **AWS Cloud Development Kit (CDK) 2.92** 40 | - **Python >=3.8** 41 | - **NodeJS >= 20.0.0** 42 | 43 | >__NOTE:__ A Visual Studio Code [dev container](https://docs.github.com/en/codespaces/setting-up-your-project-for-codespaces/adding-a-dev-container-configuration/introduction-to-dev-containers) configuration has been provided for you. This image container the necessary *Python*, *NodeJS*, and the *AWS CDK* versions needed to implement this guidance. It is **recommended**, that you use the pre-configured [environment](https://code.visualstudio.com/docs/devcontainers/containers) as your development environment. 44 | 45 | ## Sample Code Configuration and Customization 46 | 47 | Before deploying the sample code, it needs to be customized to suite your specific usage requirements. Guidance configuration, and customization, is managed using a `config.yaml` file, located in the `infrastructure` folder of the repository. 48 | 49 | ### Configuration Setup 50 | 51 | The following steps will walk you through how to customize the sample code configuration to suite your usage requirements: 52 | 53 | 1. Log into your GitHub account, and [fork this repository](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo) into your GitHub account. 54 | 55 | 2. Follow the instructions on how to (Create a connection to GitHub)[https://docs.aws.amazon.com/dtconsole/latest/userguide/connections-create-github.html#connections-create-github-console], to connect AWS CodePipeline to the forked copy of this repository. Once the connection has been created, make a note of the Amazon Resource Name (ARN) for the connection. 56 | 57 | 3. A configuration template file, called `config.yaml.TEMPLATE` has been provided as a reference for use case customizations. Using the provided Visual Studio Code devcontainer environment, run the following command to create a usable copy of this file: 58 | 59 | ```bash 60 | cp ./infrastructure/config.yaml.TEMPLATE ./infrastructure/config.yaml 61 | ``` 62 | 63 | 2. Open the `./infrastructure/config.yaml` file for editing. 64 | 65 | ### Custom Settings 66 | 67 | The following settings can be adjusted to suite your use case: 68 | 69 | - `WORKLOAD_NAME` 70 | - *Description:* The name of the workload that will deployed. This name will be used as a prefix for for any component deployed into your AWS Account. 71 | - *Type:* String 72 | - *Example:* `"GameAnalyticsPipeline"` 73 | - `CDK_VERSION` 74 | - *Description:* The version of the CDK installed in your environment. To see the current version of the CDK, run the `cdk --version` command. The guidance has been tested using CDK version `2.92.0` of the CDK. If you are using a different version of the CDK, ensure that this version is also reflected in the `./infrastructure/package.json` file. 75 | - *Type:* String 76 | - *Example:* `"2.92.0"` 77 | - `NODE_VERSION` 78 | - *Description:* The version of NodeJS being used. The default value is set to `"latest"`, and should only be changed this if you require a specific version. 79 | - *Type:* String 80 | - *Example:* `"latest"` 81 | - `PYTHON_VESION` 82 | - *Description:* The version of Python being used. The default value is set to `"3.8"`, and should only be changed if you require a specific version. 83 | - *Type:* String 84 | - *Example:* `"3.8"` 85 | - `DEV_MODE` 86 | - *Description:* Wether or not to enable developer mode. This mode will ensure synthetic data, and shorter retention times are enabled. It is recommended that you set the value to `true` when first deploying the sample code for testing, as this setting will enable S3 versioning, and won't delete S3 buckets on teardown. This setting can be changed at a later time, and the infrastructure re-deployed through CI/CD. 87 | - *Type:* Boolean 88 | - *Example:* `true` 89 | - `ENABLE_STREAMING_ANALYTICS` 90 | - *Description:* Wether or not to enable the [Kinesis Data Analytics](https://aws.amazon.com/kinesis/data-analytics/) component/module of the guidance. It is recommended to set this value to `true` when first deploying this sample code for testing, as this setting will allow you to verify if streaming analytics is required for your use case. This setting can be changed at a later time, and the guidance re-deployed through CI/CD. 91 | - *Type:* Boolean 92 | - *Example:* `true` 93 | - `STREAM_SHARD_COUNT` 94 | - *Description:* The number of Kinesis shards, or sequence of data records, to use for the data stream. The default value has been set to `1` for initial deployment, and testing purposes. This value can be changed at a later time, and the guidance re-deployed through CI/CD. For information about determining the shards required for your use case, refer to [Amazon Kinesis Data Streams Terminology and Concepts](https://docs.aws.amazon.com/streams/latest/dev/key-concepts.html) in the *Amazon Kinesis Data Streams Developer Guide*. 95 | - *Type:* Integer 96 | - *Example:* `1` 97 | - `CODECOMMIT_REPO` 98 | - *Description:* The name of the [AWS CodeCoomit](https://aws.amazon.com/codecommit/), repository used as source control for the codified infrastructure, and CI/CD pipeline. 99 | - *Type:* String 100 | - *Example:* `"game-analytics-pipeline"` 101 | - `RAW_EVENTS_PREFIX` 102 | - *Description:* The prefix for new/raw data files stored in S3. 103 | - *Type:* String 104 | - *Example:* `"raw_events"` 105 | - `PROCESSED_EVENTS_PREFIX` 106 | - *Description:* The prefix processed data files stored in S3. 107 | - *Type:* String 108 | - *Example:* `"processed_events"` 109 | - `RAW_EVENTS_TABLE` 110 | - *Description:* The name of the of the [AWS Glue table](https://docs.aws.amazon.com/glue/latest/dg/tables-described.html) within which all new/raw data is cataloged. 111 | - *Type:* String 112 | - *Example:* `"raw_events"` 113 | - `GLUE_TMP_PREFIX` 114 | - *Description:* The name of the temporary data store for AWS Glue. 115 | - *Type:* String 116 | - *Example:* `"glueetl-tmp"` 117 | - `S3_BACKUP_MODE` 118 | - *Description:* Wether or not to enable [Kinesis Data Firehose](https://aws.amazon.com/kinesis/data-firehose/) to send a backup of new/raw data to S3. The default value has been set to `false` for initial deployment, and testing purposes. This value can be changed at a later time, and the guidance re-deployed through CI/CD. 119 | - *Type:* Boolean 120 | - *Example:* `false` 121 | - `CLOUDWATCH_RETENTION_DAYS` 122 | - *Description:* The default number of days in which [Amazon CloudWatch](https://aws.amazon.com/cloudwatch/) stores all the logs. The default value has been set to `30` for initial deployment, and testing purposes. This value can be changed at a later time, and the guidance re-deployed through CI/CD. 123 | - *Type:* Integer 124 | - *Example:* `30` 125 | - `API_STAGE_NAME` 126 | - *Description:* The name of the REST API [stage](https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-stages.html) for the [Amazon API Gateway](https://aws.amazon.com/api-gateway/) configuration endpoint for sending telemetry data to the pipeline. This provides an integration option for applications that cannot integrate with Amazon Kinesis directly. The API also provides configuration endpoints for admins to use for registering their game applications with the guidance, and generating API keys for developers to use when sending events to the REST API. The default value is set to `live`. 127 | - *Type:* String 128 | - *Example:* `"live"` 129 | - `EMAIL_ADDRESS` 130 | - *Description:* The email address to receive operational notifications, and delivered by CloudWatch. 131 | - *Type:* String 132 | - *Example:* `"user@example.com"` 133 | - `GITHUB_USERNAME` 134 | - *Description:* The user name for the Github account, into which the guidance has been forked. 135 | - *Type:* String 136 | - `GITHUB_REPO_NAME` 137 | - *Description:* The repository name of the fork in your GitHub account. 138 | - *Type:* String 139 | - *Example:* `"guidance-for-game-analytics-pipeline-on-aws"` 140 | - `CONNECTION_ARN` 141 | - *Description:* The ARN for the GitHub connection, created during the [Configuration Setup](#configuration-setup) section. 142 | - *Type* String 143 | - *Example:* `"arn:aws:codeconnections:us-east-1:123456789123:connection/6506b29d-429e-4bf3-8ab4-78cb2fc011b3"` 144 | - `accounts` 145 | - *Description:* Leverages CDK Cross-account, Cross-region capabilities for deploying separate CI/CD pipeline stages to separate AWS Accounts, AWS Regions. For more information on Cross-account CI/CD pipelines, using the CDK, refer to the [Building a Cross-account CI/CD Pipeline](https://catalog.us-east-1.prod.workshops.aws/workshops/00bc829e-fd7c-4204-9da1-faea3cf8bd88/en-US/introduction) workshop. 146 | - *Example:* 147 | ```yaml 148 | accounts: 149 | - NAME: "QA" 150 | ACCOUNT: "" 151 | REGION: "" 152 | - NAME: "PROD" 153 | ACCOUNT: "" 154 | REGION: "" 155 | ``` 156 | >__NOTE:__ It is recommended that you use the same AWS Account, as well as the same AWS Region, for both the `QA`, and `PROD` stages, when first deploying the guidance. 157 | 158 | ## Sample Code Deployment 159 | 160 | Once you will have to add your own custom configuration settings, and saved the `config.yaml` file, then following steps can be used to deploy the CI/CD pipeline: 161 | 162 | 1. Build the sample code dependencies, by running the following command: 163 | ```bash 164 | npm run build 165 | ``` 166 | 2. Bootstrap the sample code, by running the following command: 167 | ```bash 168 | npm run deploy.bootstrap 169 | ``` 170 | 3. Deploy the sample code, by running the following command: 171 | ```bash 172 | npm run deploy 173 | ``` 174 | 175 | After the sample code has been deployed, two CloudFormation stacks are created within you AWS Account, and AWS Region: 176 | 177 | 1. `PROD-`: The deployed version of the guidance infrastructure. 178 | 2. `-Toolchain`: The CI/CD Pipeline for the guidance. 179 | 180 | ### Deployed Infrastructure 181 | 182 | The stack hosts the deployed production version of the AWS resources for you to validate, and further optimize the guidance for your use case. 183 | 184 | ### CI/CD Toolchain 185 | 186 | Once the deployed infrastructure has been validated, or further optimized for your use case, you can trigger the continuos deployment, by committing any updated source code into the newly create CodeCommit repository, using the following steps: 187 | 188 | 1. Copy the URL for cloning CodeCommit repository that you specified in the `config.yanl` file. See the **View repository details (console)** section of the [AWS CodeCommit User Guid](https://docs.aws.amazon.com/codecommit/latest/userguide/how-to-view-repository-details.html) for more information on how to vie the *Clone URL* for the repository. 189 | 2. Create a news Git repository, by running the following command: 190 | ```bash 191 | rm -rf .git 192 | git init --initial-branch=main 193 | ``` 194 | 3. Add the CodeCommit repository as the origin, using the following command: 195 | ```bash 196 | git remote add origin 197 | ``` 198 | 4. Commit the code to trigger the CI/CD process, by running the following commands: 199 | ```bash 200 | git add -A 201 | git commit -m "Initial commit" 202 | git push --set-upstream origin 203 | ``` 204 | 205 | ## Next Steps 206 | 207 | Make any code changes to subsequently optimize the guidance for your use case. Committing these changes will trigger a subsequent continuous integration, and deployment of the deployed production stack, `PROD-`. 208 | 209 | ## Cleanup 210 | 211 | To clean up any of the deployed resources, you can either delete the stack through the AWS CloudFormation console, or run the `cdk destroy` command. 212 | 213 | >__NOTE:__ Deleting the deployed resources will not delete the Amazon S3 bucket, in order to protect any game data already ingested, and stored with the data lake. The Amazon S3 Bucket, and data, can be deleted from Amazon S3 using the Amazon S3 console, AWS SDKs, AWS Command Line Interface (AWS CLI), or REST API. See the [Deleting Amazon S3 objects](https://docs.aws.amazon.com/AmazonS3/latest/userguide/DeletingObjects.html) section of the user guide for mor information. 214 | 215 | --- 216 | # Security 217 | 218 | See [CONTRIBUTING](CONTRIBUTING.md#security-issue-notifications) for more information. 219 | 220 | --- 221 | # License 222 | 223 | This library is licensed under the MIT-0 License. See the LICENSE file. 224 | 225 | -------------------------------------------------------------------------------- /build.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2021 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | 4 | Licensed under the Amazon Software License (the "License"). 5 | You may not use this file except in compliance with the License. 6 | A copy of the License is located at 7 | 8 | http://aws.amazon.com/asl/ 9 | 10 | or in the "license" file accompanying this file. This file is distributed 11 | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | express or implied. See the License for the specific language governing 13 | permissions and limitations under the License. 14 | """ 15 | 16 | import os 17 | import subprocess 18 | import sys 19 | import argparse 20 | 21 | 22 | def exit_on_failure(exit_code, msg): 23 | if exit_code != 0: 24 | print(msg) 25 | exit(exit_code) 26 | 27 | 28 | def change_dir_with_return(dir): 29 | current_dir = os.getcwd() 30 | os.chdir(dir) 31 | return lambda: os.chdir(current_dir) 32 | 33 | 34 | def build_infrastructure(): 35 | 36 | return_dir = change_dir_with_return("./infrastructure") 37 | 38 | cmd = [sys.executable, "build.py"] 39 | proc = subprocess.run(cmd, stderr=subprocess.STDOUT, shell=False) 40 | exit_on_failure(proc.returncode, "Infrastructure build failed") 41 | 42 | return_dir() 43 | 44 | 45 | def build_web_app(): 46 | 47 | return_dir = change_dir_with_return("./web-app") 48 | cmd = [sys.executable, "build.py"] 49 | proc = subprocess.run(cmd, stderr=subprocess.STDOUT, shell=False) 50 | exit_on_failure(proc.returncode, "Web app build failed") 51 | 52 | return_dir() 53 | 54 | 55 | def build_logic(): 56 | 57 | return_dir = change_dir_with_return("./business-logic") 58 | 59 | cmd = [sys.executable, "build.py"] 60 | proc = subprocess.run(cmd, stderr=subprocess.STDOUT, shell=False) 61 | exit_on_failure(proc.returncode, "Business Logic build failed") 62 | 63 | return_dir() 64 | 65 | 66 | def main(): 67 | parser = argparse.ArgumentParser( 68 | description="Builds parts or all of the solution. If no arguments are passed then all builds are run" 69 | ) 70 | parser.add_argument("--infrastructure", 71 | action="store_true", help="builds infrastructure") 72 | parser.add_argument("--business_logic", 73 | action="store_true", help="builds business logic") 74 | args = parser.parse_args() 75 | 76 | if len(sys.argv) == 1: 77 | # build_web_app() 78 | build_logic() 79 | build_infrastructure() 80 | # needs to be last to ensure the dependencies are built before the CDK deployment can build/run 81 | else: 82 | if args.business_logic: 83 | build_logic() 84 | if args.infrastructure: 85 | build_infrastructure() 86 | 87 | 88 | if __name__ == "__main__": 89 | main() 90 | -------------------------------------------------------------------------------- /business-logic/analytics-processing/app.sql: -------------------------------------------------------------------------------- 1 | /* 2 | This application generates real-time metrics that are processed by Lambda. 3 | Query outputs should adhere to the schema defined in DESTINATION_STREAM required by the Lambda function that processes output. 4 | Additional in-application streams can be pumped into the DESTINATION_STREAM table for consumption and processing by Lambda. 5 | Refer to the Game Analytics Pipeline Developer Guide for more information. 6 | */ 7 | 8 | CREATE STREAM "DESTINATION_STREAM"( 9 | METRIC_NAME VARCHAR(1024), 10 | METRIC_TIMESTAMP BIGINT, 11 | METRIC_UNIT_VALUE_INT BIGINT, 12 | METRIC_UNIT VARCHAR(1024), 13 | DIMENSION_APPLICATION_ID VARCHAR(1024), 14 | DIMENSION_APP_VERSION VARCHAR(1024), 15 | DIMENSION_COUNTRY_ID VARCHAR(1024), 16 | DIMENSION_CURRENCY_TYPE VARCHAR (1024), 17 | DIMENSION_SPELL_ID VARCHAR (1024), 18 | DIMENSION_MISSION_ID VARCHAR (1024), 19 | DIMENSION_ITEM_ID VARCHAR (1024), 20 | OUTPUT_TYPE VARCHAR(1024)); 21 | 22 | -- Total Events 23 | -- Count of Total Events within period 24 | CREATE OR REPLACE PUMP "TOTAL_EVENTS_PUMP" AS 25 | INSERT INTO "DESTINATION_STREAM" (METRIC_NAME, METRIC_TIMESTAMP, METRIC_UNIT_VALUE_INT, METRIC_UNIT, DIMENSION_APPLICATION_ID, DIMENSION_APP_VERSION, OUTPUT_TYPE) 26 | SELECT STREAM 'TotalEvents', UNIX_TIMESTAMP(TIME_WINDOW), COUNT(distinct_stream.event_id) AS unique_count, 'Count', distinct_stream.application_id, distinct_stream.app_version, 'metrics' 27 | FROM ( 28 | SELECT STREAM DISTINCT 29 | rowtime as window_time, 30 | "AnalyticsApp_001"."event_id" as event_id, 31 | "AnalyticsApp_001"."application_id" as application_id, 32 | "AnalyticsApp_001"."app_version" as app_version, 33 | STEP("AnalyticsApp_001".rowtime BY INTERVAL '1' MINUTE) as TIME_WINDOW 34 | FROM "AnalyticsApp_001" 35 | ) as distinct_stream 36 | GROUP BY 37 | application_id, 38 | app_version, 39 | TIME_WINDOW, 40 | STEP(distinct_stream.window_time BY INTERVAL '1' MINUTE); 41 | 42 | -- Total Logins 43 | -- Count of logins within period 44 | CREATE OR REPLACE PUMP "LOGIN_PUMP" AS 45 | INSERT INTO "DESTINATION_STREAM" (METRIC_NAME, METRIC_TIMESTAMP, METRIC_UNIT_VALUE_INT, METRIC_UNIT, DIMENSION_APPLICATION_ID, DIMENSION_APP_VERSION, OUTPUT_TYPE) 46 | SELECT STREAM 'TotalLogins', UNIX_TIMESTAMP(TIME_WINDOW), COUNT(distinct_stream.login_count) AS unique_count, 'Count', distinct_stream.application_id, distinct_stream.app_version, 'metrics' 47 | FROM ( 48 | SELECT STREAM DISTINCT 49 | rowtime as window_time, 50 | "AnalyticsApp_001"."event_id" as login_count, 51 | "AnalyticsApp_001"."application_id" as application_id, 52 | "AnalyticsApp_001"."app_version" as app_version, 53 | STEP("AnalyticsApp_001".rowtime BY INTERVAL '1' MINUTE) as TIME_WINDOW 54 | FROM "AnalyticsApp_001" 55 | WHERE "AnalyticsApp_001"."event_type" = 'login' 56 | ) as distinct_stream 57 | GROUP BY 58 | application_id, 59 | app_version, 60 | TIME_WINDOW, 61 | STEP(distinct_stream.window_time BY INTERVAL '1' MINUTE); 62 | 63 | -- Knockouts By Spells 64 | -- Get the number of knockouts by each spell used in a knockout in the period 65 | CREATE OR REPLACE PUMP "KNOCKOUTS_BY_SPELL_PUMP" AS 66 | INSERT INTO "DESTINATION_STREAM" (METRIC_NAME, METRIC_TIMESTAMP, METRIC_UNIT_VALUE_INT, METRIC_UNIT, DIMENSION_SPELL_ID, DIMENSION_APPLICATION_ID, DIMENSION_APP_VERSION, OUTPUT_TYPE) 67 | SELECT STREAM 'KnockoutsBySpell', UNIX_TIMESTAMP(TIME_WINDOW), SPELL_COUNT, 'Count', SPELL_ID, application_id, app_version, 'metrics' 68 | FROM ( 69 | SELECT STREAM 70 | events."spell_id" as SPELL_ID, 71 | events."application_id" as application_id, 72 | events."app_version" as app_version, 73 | count(*) as SPELL_COUNT, 74 | STEP(events.rowtime BY INTERVAL '1' MINUTE) as TIME_WINDOW 75 | FROM "AnalyticsApp_001" events 76 | WHERE events."spell_id" is not NULL 77 | AND events."event_type" = 'user_knockout' 78 | GROUP BY 79 | STEP (events.ROWTIME BY INTERVAL '1' MINUTE), 80 | events."spell_id", 81 | events."application_id", 82 | events."app_version" 83 | HAVING count(*) > 1 84 | ORDER BY STEP (events.ROWTIME BY INTERVAL '1' MINUTE), SPELL_COUNT desc 85 | ); 86 | 87 | -- Purchases 88 | -- Get all purchases grouped by country over the period 89 | CREATE OR REPLACE PUMP "PURCHASES_PER_CURRENCY_PUMP" AS 90 | INSERT INTO "DESTINATION_STREAM" (METRIC_NAME, METRIC_TIMESTAMP, METRIC_UNIT_VALUE_INT, METRIC_UNIT, DIMENSION_CURRENCY_TYPE, DIMENSION_APPLICATION_ID, DIMENSION_APP_VERSION, OUTPUT_TYPE) 91 | SELECT 'Purchases', UNIX_TIMESTAMP(TIME_WINDOW), PURCHASE_COUNT, 'Count', CURRENCY_TYPE, application_id, app_version, 'metrics' FROM ( 92 | SELECT STREAM 93 | events."currency_type" as CURRENCY_TYPE, 94 | events."application_id" as application_id, 95 | events."app_version" as app_version, 96 | count(*) as PURCHASE_COUNT, 97 | STEP(events.rowtime BY INTERVAL '1' MINUTE) as TIME_WINDOW 98 | FROM "AnalyticsApp_001" events 99 | WHERE events."currency_type" is not NULL 100 | AND events."event_type" = 'iap_transaction' 101 | GROUP BY 102 | STEP (events.ROWTIME BY INTERVAL '1' MINUTE), 103 | events."currency_type", 104 | events."application_id", 105 | events."app_version" 106 | HAVING count(*) > 1 107 | ORDER BY STEP (events.ROWTIME BY INTERVAL '1' MINUTE), PURCHASE_COUNT desc 108 | ); -------------------------------------------------------------------------------- /business-logic/analytics-processing/build.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2021 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | 4 | Licensed under the Amazon Software License (the "License"). 5 | You may not use this file except in compliance with the License. 6 | A copy of the License is located at 7 | 8 | http://aws.amazon.com/asl/ 9 | 10 | or in the "license" file accompanying this file. This file is distributed 11 | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | express or implied. See the License for the specific language governing 13 | permissions and limitations under the License. 14 | """ 15 | 16 | import os 17 | import subprocess 18 | import sys 19 | import shutil 20 | 21 | 22 | def exit_on_failure(exit_code, msg): 23 | if exit_code != 0: 24 | print(msg) 25 | exit(exit_code) 26 | 27 | 28 | dir_path = os.path.dirname(os.path.realpath(__file__)) 29 | 30 | npm_cmd = shutil.which("npm") 31 | cmd = [npm_cmd, "install", "--prefix", dir_path] 32 | proc = subprocess.run(cmd, stderr=subprocess.STDOUT, shell=False) 33 | exit_on_failure(proc.returncode, "Web app npm install failed") 34 | -------------------------------------------------------------------------------- /business-logic/analytics-processing/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 'use strict'; 19 | 20 | /** 21 | * Lib 22 | */ 23 | 24 | let lib = require('./lib/process.js'); 25 | 26 | exports.handler = function(event, context, callback) { 27 | console.log(`Analytics Processing service received event`); 28 | 29 | lib 30 | .respond(event) 31 | .then(data => { 32 | return callback(null, data); 33 | }) 34 | .catch(err => { 35 | return callback(err, null); 36 | }); 37 | }; 38 | -------------------------------------------------------------------------------- /business-logic/analytics-processing/lib/cloudwatch.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | 'use strict'; 20 | const moment = require('moment'); 21 | const AWS = require('aws-sdk'); 22 | const cloudwatch = new AWS.CloudWatch(); 23 | const creds = new AWS.EnvironmentCredentials('AWS'); // Lambda provided credentials 24 | const cloudwatchConfig = { 25 | credentials: creds, 26 | region: process.env.AWS_REGION 27 | }; 28 | 29 | /** 30 | * Process Kinesis Analytics output and publish to CloudWatch 31 | * @class CloudWatchMetrics 32 | */ 33 | class CloudWatchMetrics { 34 | constructor() { 35 | this.cloudwatchConfig = cloudwatchConfig; 36 | } 37 | 38 | /** 39 | * Publish metric to CloudWatch Metrics 40 | * @param {JSON} metric - the payload to send to Cloudwatch 41 | */ 42 | async publishMetric(metric) { 43 | let namespace = `${process.env.CW_NAMESPACE}`; 44 | console.log(`Publishing metric: ${JSON.stringify(metric)}`); 45 | const params = { 46 | 'MetricData': [metric], 47 | 'Namespace': namespace 48 | }; 49 | let data; 50 | try { 51 | data = await cloudwatch.putMetricData(params).promise(); 52 | console.log(`cw response: ${JSON.stringify(data)}`); 53 | } catch (err) { 54 | console.log(`${JSON.stringify(err)}`); 55 | return Promise.reject(err); 56 | } 57 | return Promise.resolve(data); 58 | } 59 | 60 | /** 61 | * Convert a Kinesis Data Analytics output metric record into CloudWatch Metric format 62 | * @param {JSON} payload - input metric data record to be transformed 63 | */ 64 | async buildMetric(payload) { 65 | let metric = { 66 | MetricName: payload.METRIC_NAME, 67 | Timestamp: moment(payload.METRIC_TIMESTAMP).unix(), 68 | Value: payload.METRIC_UNIT_VALUE_INT, 69 | Unit: payload.METRIC_UNIT || 'None' 70 | }; 71 | 72 | // Extract dimensions from input, populate dimensions array in format required by CloudWatch 73 | // Strip DIMENSION_ prefix from metric before publishing 74 | let dimensions = []; 75 | for (var key in payload) { 76 | if (key.includes('DIMENSION_') && (payload[key] !== null && payload[key] != "" && payload[key] != "null")) { 77 | dimensions.push({ 78 | 'Name': key.split("DIMENSION_").pop(), 79 | 'Value': payload[key] 80 | }); 81 | } 82 | } 83 | if (dimensions.length > 0) { 84 | metric.Dimensions = dimensions; 85 | } 86 | return Promise.resolve(metric); 87 | } 88 | } 89 | 90 | module.exports = CloudWatchMetrics; -------------------------------------------------------------------------------- /business-logic/analytics-processing/lib/process.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | 'use strict'; 20 | 21 | /** 22 | * This function processes outputs from Kinesis Data Analytics. 23 | */ 24 | const AWS = require('aws-sdk'); 25 | const CloudwatchMetrics = require('./cloudwatch.js'); 26 | 27 | const respond = async event => { 28 | let success = 0; 29 | let failure = 0; 30 | let kinesisAnalyticsErrors = 0; 31 | let cloudwatch = new CloudwatchMetrics(); 32 | let results = []; 33 | for (const record of event.records) { 34 | try { 35 | const payload = JSON.parse(Buffer.from(record.data, 'base64')); 36 | if (payload.OUTPUT_TYPE === 'metrics') { 37 | let metric = await cloudwatch.buildMetric(payload); 38 | await cloudwatch.publishMetric(metric) 39 | .then(data => { 40 | success++; 41 | results.push({ 42 | recordId: record.recordId, 43 | result: 'Ok' 44 | }); 45 | }); 46 | 47 | } else if (payload.ERROR_NAME) { 48 | // Log errors from Kinesis Analytics error_stream - https://docs.aws.amazon.com/kinesisanalytics/latest/dev/error-handling.html 49 | // Treat as successfully handled record. Alarming on KDA errors can be handled in Cloudwatch with metric filter on kinesisAnalyticsErrors 50 | console.log(`Kinesis Data Analytics Error: ${JSON.stringify(payload)}`); 51 | kinesisAnalyticsErrors++; 52 | success++; 53 | results.push({ 54 | recordId: record.recordId, 55 | result: 'Ok' 56 | }); 57 | } else { 58 | // Records that are not tagged "metrics" are treated as delivery errors 59 | console.log(`Record does not contain OUTPUT_TYPE of metric`); 60 | failure++; 61 | results.push({ 62 | recordId: record.recordId, 63 | result: 'Ok' 64 | }); 65 | } 66 | } catch (err) { 67 | console.log(JSON.stringify(err)); 68 | failure++; 69 | results.push({ 70 | recordId: record.recordId, 71 | result: 'DeliveryFailed' 72 | }); 73 | } 74 | 75 | } 76 | 77 | console.log(JSON.stringify({ 78 | 'SuccessfulRecords': success, 79 | 'FailedRecords': failure, 80 | 'KinesisAnalyticsErrors': kinesisAnalyticsErrors 81 | })); 82 | return Promise.resolve({ 83 | records: results 84 | }); 85 | }; 86 | 87 | module.exports = { 88 | respond 89 | }; 90 | 91 | 92 | 93 | -------------------------------------------------------------------------------- /business-logic/analytics-processing/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "game-analytics-pipeline-analytics-processing", 3 | "version": "0.0.1", 4 | "description": "Lambda Function for processing output from Kinesis Data Analytics SQL Application", 5 | "private": true, 6 | "main": "index.js", 7 | "dependencies": { 8 | "aws-sdk": "*", 9 | "moment": "*" 10 | }, 11 | "devDependencies": { 12 | "npm-run-all": "*" 13 | }, 14 | "scripts": { 15 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 16 | "build:zip": "zip -rq analytics-processing.zip .", 17 | "build:dist": "mkdir dist && mv analytics-processing.zip dist/", 18 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /business-logic/api/admin/build.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2021 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | 4 | Licensed under the Amazon Software License (the "License"). 5 | You may not use this file except in compliance with the License. 6 | A copy of the License is located at 7 | 8 | http://aws.amazon.com/asl/ 9 | 10 | or in the "license" file accompanying this file. This file is distributed 11 | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | express or implied. See the License for the specific language governing 13 | permissions and limitations under the License. 14 | """ 15 | 16 | import os 17 | import subprocess 18 | import sys 19 | import shutil 20 | 21 | 22 | def exit_on_failure(exit_code, msg): 23 | if exit_code != 0: 24 | print(msg) 25 | exit(exit_code) 26 | 27 | 28 | dir_path = os.path.dirname(os.path.realpath(__file__)) 29 | 30 | npm_cmd = shutil.which("npm") 31 | cmd = [npm_cmd, "install", "--prefix", dir_path] 32 | proc = subprocess.run(cmd, stderr=subprocess.STDOUT, shell=False) 33 | exit_on_failure(proc.returncode, "Web app npm install failed") 34 | -------------------------------------------------------------------------------- /business-logic/api/admin/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | 'use strict'; 20 | 21 | const awsServerlessExpress = require('aws-serverless-express'); 22 | let app = require('./lib/app.js'); 23 | 24 | const server = awsServerlessExpress.createServer(app); 25 | exports.handler = (event, context) => { 26 | console.log(`Application admin service received event. ${JSON.stringify(event)}`); 27 | awsServerlessExpress.proxy(server, event, context); 28 | }; -------------------------------------------------------------------------------- /business-logic/api/admin/lib/app.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | 'use strict'; 20 | 21 | /** 22 | * Lib 23 | */ 24 | const Application = require('./admin.js'); 25 | const express = require('express'); 26 | const bodyParser = require('body-parser'); 27 | const cors = require('cors'); 28 | const awsServerlessExpressMiddleware = require('aws-serverless-express/middleware'); 29 | const app = express(); 30 | const router = express.Router(); 31 | 32 | // declare a new express app 33 | router.use(cors()); 34 | router.use((req, res, next) => { 35 | bodyParser.json()(req, res, err => { 36 | if (err) { 37 | return res.status(400).json({ 38 | 'error': 'BadRequest', 39 | 'error_detail': err.message 40 | }); 41 | } 42 | next(); 43 | }); 44 | }); 45 | router.use(bodyParser.urlencoded({extended: true})); 46 | router.use(awsServerlessExpressMiddleware.eventContext()); 47 | 48 | // List applications 49 | const listApplications = async (req, res) => { 50 | console.log(`Attempting to retrieve registered applications`); 51 | let _application = new Application(); 52 | try { 53 | const result = await _application.listApplications(); 54 | res.json(result); 55 | } catch (err) { 56 | console.log(JSON.stringify(err)); 57 | return res.status(err.code).json({ 58 | 'error': err.error, 59 | 'error_detail': err.message 60 | }); 61 | } 62 | }; 63 | 64 | 65 | // Get detail for an application 66 | const getApplicationDetail = async (req, res) => { 67 | console.log(`Attempting to retrieve application detail information.`); 68 | const applicationId = req.params.applicationId; 69 | let _application = new Application(); 70 | try { 71 | const result = await _application.getApplicationDetail(applicationId); 72 | res.json(result); 73 | } catch (err) { 74 | console.log(JSON.stringify(err)); 75 | return res.status(err.code).json({ 76 | 'error': err.error, 77 | 'error_detail': err.message 78 | }); 79 | } 80 | }; 81 | 82 | // Creates a new application 83 | const createApplication = async (req, res) => { 84 | console.log(`Attempting to create new registered application`); 85 | const body = req.body; 86 | let _application = new Application(); 87 | try { 88 | if (!body.Name || typeof body.Name !== "string") { 89 | console.log(`An application name must be provided`); 90 | return res.status(400).json({ 91 | 'error': 'InvalidParameterException', 92 | 'error_detail': 'Name is required and must be string' 93 | }); 94 | } 95 | if (body.Description && typeof body.Description !== "string") { 96 | console.log(`Description must be a string value`); 97 | return res.status(400).json({ 98 | 'error': 'InvalidParameterException', 99 | 'error_detail': 'Description must be string' 100 | }); 101 | } 102 | const result = await _application.createApplication(body); 103 | res.json(result); 104 | } catch (err) { 105 | console.log(JSON.stringify(err)); 106 | return res.status(err.code).json({ 107 | 'error': err.error, 108 | 'error_detail': err.message 109 | }); 110 | } 111 | }; 112 | 113 | 114 | // Deletes a registered application 115 | const deleteApplication = async (req, res) => { 116 | console.log(`Attempting to delete registered application`); 117 | const applicationId = req.params.applicationId; 118 | let _application = new Application(); 119 | try { 120 | const result = await _application.deleteApplication(applicationId); 121 | res.json(result); 122 | } catch (err) { 123 | console.log(JSON.stringify(err)); 124 | return res.status(err.code).json({ 125 | 'error': err.error, 126 | 'error_detail': err.message 127 | }); 128 | } 129 | }; 130 | 131 | // Creates events api authorization 132 | const createAuthorization = async (req, res) => { 133 | console.log(`Attempting to create authorization`); 134 | let _application = new Application(); 135 | try { 136 | const body = req.body; 137 | const apiKeyName = body.Name; 138 | const apiKeyDescription = body.Description; 139 | const applicationId = req.params.applicationId; 140 | if (!apiKeyName || typeof apiKeyName !== "string") { 141 | console.log(`A name must be provided`); 142 | return res.status(400).json({ 143 | 'error': 'InvalidParameterException', 144 | 'error_detail': 'Name is required and must be string' 145 | }); 146 | } 147 | if (apiKeyDescription && typeof apiKeyDescription !== "string") { 148 | console.log(`Description must be a string value`); 149 | return res.status(400).json({ 150 | 'error': 'InvalidParameterException', 151 | 'error_detail': 'Description must be string' 152 | }); 153 | } 154 | 155 | // First check that provided application is valid 156 | await _application.getApplicationDetail(applicationId); 157 | const apiKey = await _application.createApiKey(apiKeyName, apiKeyDescription); 158 | const result = await _application.createAuthorization(apiKey.value, applicationId, apiKeyName, apiKeyDescription, apiKey.id); 159 | res.json(result); 160 | } catch (err) { 161 | console.log(JSON.stringify(err)); 162 | return res.status(err.code).json({ 163 | 'error': err.error, 164 | 'error_detail': err.message 165 | }); 166 | } 167 | }; 168 | 169 | // Delete authorization 170 | const deleteAuthorization = async (req, res) => { 171 | console.log(`Attempting to delete authorization`); 172 | const applicationId = req.params.applicationId; 173 | const apiKeyId = req.params.apiKeyId; 174 | let _application = new Application(); 175 | try { 176 | const result = await _application.deleteAuthorization(apiKeyId, applicationId); 177 | res.json(result); 178 | } catch (err) { 179 | console.log(JSON.stringify(err)); 180 | return res.status(err.code).json({ 181 | 'error': err.error, 182 | 'error_detail': err.message 183 | }); 184 | } 185 | }; 186 | 187 | // Modify authorization. 188 | const modifyAuthorization = async (req, res) => { 189 | console.log(`Attempting to modify authorization`); 190 | const applicationId = req.params.applicationId; 191 | const apiKeyId = req.params.apiKeyId; 192 | const body = req.body; 193 | let _application = new Application(); 194 | try { 195 | const enabled = body.Enabled; 196 | if (typeof enabled === "boolean") { 197 | const result = await _application.modifyAuthorization(apiKeyId, applicationId, enabled); 198 | res.json(result); 199 | } else { 200 | console.log(`Enabled is required and must be of type boolean`); 201 | return res.status(400).json({ 202 | 'error': 'InvalidParameterException', 203 | 'error_detail': 'Enabled field is required and must be boolean value' 204 | }); 205 | } 206 | } catch (err) { 207 | console.log(JSON.stringify(err)); 208 | return res.status(err.code).json({ 209 | 'error': err.error, 210 | 'error_detail': err.message 211 | }); 212 | } 213 | }; 214 | 215 | // List authorizations for an application 216 | const listAuthorizations = async (req, res) => { 217 | console.log(`Attempting to list authorizations`); 218 | const applicationId = req.params.applicationId; 219 | let _application = new Application(); 220 | try { 221 | const result = await _application.listApplicationAuthorizations(applicationId); 222 | res.json(result); 223 | } catch (err) { 224 | console.log(JSON.stringify(err)); 225 | return res.status(err.code).json({ 226 | 'error': err.error, 227 | 'error_detail': err.message 228 | }); 229 | } 230 | }; 231 | 232 | // Get detail for an authorization 233 | const getAuthorizationDetail = async (req, res) => { 234 | console.log(`Attempting to retrieve authorization detail information.`); 235 | const applicationId = req.params.applicationId; 236 | const apiKeyId = req.params.apiKeyId; 237 | let _application = new Application(); 238 | try { 239 | const result = await _application.getAuthorizationDetail(apiKeyId, applicationId); 240 | res.json(result); 241 | } catch (err) { 242 | console.log(JSON.stringify(err)); 243 | return res.status(err.code).json({ 244 | 'error': err.error, 245 | 'error_detail': err.message 246 | }); 247 | } 248 | }; 249 | 250 | /**************************** 251 | * Event methods * 252 | ****************************/ 253 | 254 | 255 | router.get('/applications', listApplications); 256 | router.post('/applications', createApplication); 257 | 258 | router.get('/applications/:applicationId', getApplicationDetail); 259 | router.delete('/applications/:applicationId', deleteApplication); 260 | 261 | router.post('/applications/:applicationId/authorizations', createAuthorization); 262 | router.get('/applications/:applicationId/authorizations', listAuthorizations); 263 | router.get('/applications/:applicationId/authorizations/:apiKeyId', getAuthorizationDetail); 264 | router.put('/applications/:applicationId/authorizations/:apiKeyId', modifyAuthorization); 265 | router.delete('/applications/:applicationId/authorizations', deleteAuthorization); 266 | router.delete('/applications/:applicationId/authorizations/:apiKeyId', deleteAuthorization); 267 | //router.put('/registrations/:registration_name', updateRegistration); 268 | 269 | app.use('/', router); 270 | 271 | // Export the app object. When executing the application local this does nothing. However, 272 | // to port it to AWS Lambda we will create a wrapper around that will load the app from 273 | // this file 274 | module.exports = app; 275 | -------------------------------------------------------------------------------- /business-logic/api/admin/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "game-analytics-application-admin-service", 3 | "description": "The application admin microservice for the game analytics pipeline solution", 4 | "main": "index.js", 5 | "version": "0.0.1", 6 | "private": true, 7 | "dependencies": { 8 | "aws-sdk": "*", 9 | "aws-serverless-express": "*", 10 | "body-parser": "*", 11 | "cors": "*", 12 | "express": "*", 13 | "moment": "*", 14 | "underscore": "*", 15 | "uuid": "*" 16 | }, 17 | "devDependencies": { 18 | "aws-sdk": "*", 19 | "npm-run-all": "*" 20 | }, 21 | "scripts": { 22 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 23 | "build:zip": "zip -rq admin.zip .", 24 | "build:dist": "mkdir dist && mv admin.zip dist/", 25 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /business-logic/api/build.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2021 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | 4 | Licensed under the Amazon Software License (the "License"). 5 | You may not use this file except in compliance with the License. 6 | A copy of the License is located at 7 | 8 | http://aws.amazon.com/asl/ 9 | 10 | or in the "license" file accompanying this file. This file is distributed 11 | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | express or implied. See the License for the specific language governing 13 | permissions and limitations under the License. 14 | """ 15 | 16 | import os 17 | import shutil 18 | import sys 19 | import subprocess 20 | 21 | # Prepares the all the lambdas for deployment 22 | # 23 | # Walks each directory looking for a build script and executes it if found 24 | 25 | build_file_name = "build.py" 26 | 27 | dir_path = os.path.dirname(os.path.realpath(__file__)) 28 | build_file_name = os.path.basename(__file__) 29 | exit_code = 0 30 | 31 | for file in os.listdir(dir_path): 32 | file = os.path.join(dir_path, file) 33 | if os.path.isdir(file): 34 | folder_path = os.path.join(dir_path, file) 35 | build_file_path = os.path.join(folder_path, build_file_name) 36 | if os.path.exists(build_file_path): 37 | cmd = [sys.executable, build_file_path] 38 | proc = subprocess.run(cmd, stderr=subprocess.STDOUT, shell=False) 39 | exit_code = exit_code + proc.returncode 40 | 41 | 42 | exit(exit_code) 43 | -------------------------------------------------------------------------------- /business-logic/api/lambda-authorizer/build.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2021 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | 4 | Licensed under the Amazon Software License (the "License"). 5 | You may not use this file except in compliance with the License. 6 | A copy of the License is located at 7 | 8 | http://aws.amazon.com/asl/ 9 | 10 | or in the "license" file accompanying this file. This file is distributed 11 | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | express or implied. See the License for the specific language governing 13 | permissions and limitations under the License. 14 | """ 15 | 16 | import os 17 | import subprocess 18 | import sys 19 | import shutil 20 | 21 | 22 | def exit_on_failure(exit_code, msg): 23 | if exit_code != 0: 24 | print(msg) 25 | exit(exit_code) 26 | 27 | 28 | dir_path = os.path.dirname(os.path.realpath(__file__)) 29 | 30 | npm_cmd = shutil.which("npm") 31 | cmd = [npm_cmd, "install", "--prefix", dir_path] 32 | proc = subprocess.run(cmd, stderr=subprocess.STDOUT, shell=False) 33 | exit_on_failure(proc.returncode, "Web app npm install failed") 34 | -------------------------------------------------------------------------------- /business-logic/api/lambda-authorizer/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | 'use strict'; 20 | 21 | /** 22 | * Lib 23 | */ 24 | const AWS = require('aws-sdk'); 25 | console.log('Loading function'); 26 | 27 | 28 | /** 29 | * AuthPolicy receives a set of allowed and denied methods and generates a valid 30 | * AWS policy for the API Gateway authorizer. The constructor receives the calling 31 | * user principal, the AWS account ID of the API owner, and an apiOptions object. 32 | * The apiOptions can contain an API Gateway RestApi Id, a region for the RestApi, and a 33 | * stage that calls should be allowed/denied for. For example 34 | * { 35 | * restApiId: 'xxxxxxxxxx, 36 | * region: 'us-east-1, 37 | * stage: 'dev', 38 | * } 39 | * 40 | * const testPolicy = new AuthPolicy("[principal user identifier]", "[AWS account id]", apiOptions); 41 | * testPolicy.allowMethod(AuthPolicy.HttpVerb.GET, "/users/username"); 42 | * testPolicy.denyMethod(AuthPolicy.HttpVerb.POST, "/pets"); 43 | * callback(null, testPolicy.build()); 44 | * 45 | * @class AuthPolicy 46 | * @constructor 47 | */ 48 | function AuthPolicy(principal, awsAccountId, apiOptions) { 49 | /** 50 | * The AWS account id the policy will be generated for. This is used to create 51 | * the method ARNs. 52 | * 53 | * @property awsAccountId 54 | * @type {String} 55 | */ 56 | this.awsAccountId = awsAccountId; 57 | 58 | /** 59 | * The principal used for the policy, this should be a unique identifier for 60 | * the end user. 61 | * 62 | * @property principalId 63 | * @type {String} 64 | */ 65 | this.principalId = principal; 66 | 67 | /** 68 | * The policy version used for the evaluation. This should always be "2012-10-17" 69 | * 70 | * @property version 71 | * @type {String} 72 | * @default "2012-10-17" 73 | */ 74 | this.version = '2012-10-17'; 75 | 76 | /** 77 | * The regular expression used to validate resource paths for the policy 78 | * 79 | * @property pathRegex 80 | * @type {RegExp} 81 | * @default '^\/[/.a-zA-Z0-9-\*]+$' 82 | */ 83 | this.pathRegex = new RegExp('^[/.a-zA-Z0-9-\*]+$'); 84 | 85 | // These are the internal lists of allowed and denied methods. These are lists 86 | // of objects and each object has two properties: a resource ARN and a nullable 87 | // conditions statement. The build method processes these lists and generates 88 | // the appropriate statements for the final policy. 89 | this.allowMethods = []; 90 | this.denyMethods = []; 91 | 92 | if (!apiOptions || !apiOptions.restApiId) { 93 | this.restApiId = '*'; 94 | } else { 95 | this.restApiId = apiOptions.restApiId; 96 | } 97 | if (!apiOptions || !apiOptions.region) { 98 | this.region = '*'; 99 | } else { 100 | this.region = apiOptions.region; 101 | } 102 | if (!apiOptions || !apiOptions.stage) { 103 | this.stage = '*'; 104 | } else { 105 | this.stage = apiOptions.stage; 106 | } 107 | } 108 | 109 | /** 110 | * A set of existing HTTP verbs supported by API Gateway. This property is here 111 | * only to avoid spelling mistakes in the policy. 112 | * 113 | * @property HttpVerb 114 | * @type {Object} 115 | */ 116 | AuthPolicy.HttpVerb = { 117 | GET: 'GET', 118 | POST: 'POST', 119 | PUT: 'PUT', 120 | PATCH: 'PATCH', 121 | HEAD: 'HEAD', 122 | DELETE: 'DELETE', 123 | OPTIONS: 'OPTIONS', 124 | ALL: '*', 125 | }; 126 | 127 | AuthPolicy.prototype = (function AuthPolicyClass() { 128 | /** 129 | * Adds a method to the internal lists of allowed or denied methods. Each object in 130 | * the internal list contains a resource ARN and a condition statement. The condition 131 | * statement can be null. 132 | * 133 | * @method addMethod 134 | * @param {String} The effect for the policy. This can only be "Allow" or "Deny". 135 | * @param {String} The HTTP verb for the method, this should ideally come from the 136 | * AuthPolicy.HttpVerb object to avoid spelling mistakes 137 | * @param {String} The resource path. For example "/pets" 138 | * @param {Object} The conditions object in the format specified by the AWS docs. 139 | * @return {void} 140 | */ 141 | function addMethod(effect, verb, resource, conditions) { 142 | if (verb !== '*' && !Object.prototype.hasOwnProperty.call(AuthPolicy.HttpVerb, verb)) { 143 | throw new Error(`Invalid HTTP verb ${verb}. Allowed verbs in AuthPolicy.HttpVerb`); 144 | } 145 | 146 | if (!this.pathRegex.test(resource)) { 147 | throw new Error(`Invalid resource path: ${resource}. Path should match ${this.pathRegex}`); 148 | } 149 | 150 | let cleanedResource = resource; 151 | if (resource.substring(0, 1) === '/') { 152 | cleanedResource = resource.substring(1, resource.length); 153 | } 154 | const resourceArn = `arn:aws:execute-api:${this.region}:${this.awsAccountId}:${this.restApiId}/${this.stage}/${verb}/${cleanedResource}`; 155 | 156 | if (effect.toLowerCase() === 'allow') { 157 | this.allowMethods.push({ 158 | resourceArn, 159 | conditions, 160 | }); 161 | } else if (effect.toLowerCase() === 'deny') { 162 | this.denyMethods.push({ 163 | resourceArn, 164 | conditions, 165 | }); 166 | } 167 | } 168 | 169 | /** 170 | * Returns an empty statement object prepopulated with the correct action and the 171 | * desired effect. 172 | * 173 | * @method getEmptyStatement 174 | * @param {String} The effect of the statement, this can be "Allow" or "Deny" 175 | * @return {Object} An empty statement object with the Action, Effect, and Resource 176 | * properties prepopulated. 177 | */ 178 | function getEmptyStatement(effect) { 179 | const statement = {}; 180 | statement.Action = 'execute-api:Invoke'; 181 | statement.Effect = effect.substring(0, 1).toUpperCase() + effect.substring(1, effect.length).toLowerCase(); 182 | statement.Resource = []; 183 | 184 | return statement; 185 | } 186 | 187 | /** 188 | * This function loops over an array of objects containing a resourceArn and 189 | * conditions statement and generates the array of statements for the policy. 190 | * 191 | * @method getStatementsForEffect 192 | * @param {String} The desired effect. This can be "Allow" or "Deny" 193 | * @param {Array} An array of method objects containing the ARN of the resource 194 | * and the conditions for the policy 195 | * @return {Array} an array of formatted statements for the policy. 196 | */ 197 | function getStatementsForEffect(effect, methods) { 198 | const statements = []; 199 | 200 | if (methods.length > 0) { 201 | const statement = getEmptyStatement(effect); 202 | 203 | for (let i = 0; i < methods.length; i++) { 204 | const curMethod = methods[i]; 205 | if (curMethod.conditions === null || curMethod.conditions.length === 0) { 206 | statement.Resource.push(curMethod.resourceArn); 207 | } else { 208 | const conditionalStatement = getEmptyStatement(effect); 209 | conditionalStatement.Resource.push(curMethod.resourceArn); 210 | conditionalStatement.Condition = curMethod.conditions; 211 | statements.push(conditionalStatement); 212 | } 213 | } 214 | 215 | if (statement.Resource !== null && statement.Resource.length > 0) { 216 | statements.push(statement); 217 | } 218 | } 219 | 220 | return statements; 221 | } 222 | 223 | return { 224 | constructor: AuthPolicy, 225 | 226 | /** 227 | * Adds an allow "*" statement to the policy. 228 | * 229 | * @method allowAllMethods 230 | */ 231 | allowAllMethods() { 232 | addMethod.call(this, 'allow', '*', '*', null); 233 | }, 234 | 235 | /** 236 | * Adds a deny "*" statement to the policy. 237 | * 238 | * @method denyAllMethods 239 | */ 240 | denyAllMethods() { 241 | addMethod.call(this, 'deny', '*', '*', null); 242 | }, 243 | 244 | /** 245 | * Adds an API Gateway method (Http verb + Resource path) to the list of allowed 246 | * methods for the policy 247 | * 248 | * @method allowMethod 249 | * @param {String} The HTTP verb for the method, this should ideally come from the 250 | * AuthPolicy.HttpVerb object to avoid spelling mistakes 251 | * @param {string} The resource path. For example "/pets" 252 | * @return {void} 253 | */ 254 | allowMethod(verb, resource) { 255 | addMethod.call(this, 'allow', verb, resource, null); 256 | }, 257 | 258 | /** 259 | * Adds an API Gateway method (Http verb + Resource path) to the list of denied 260 | * methods for the policy 261 | * 262 | * @method denyMethod 263 | * @param {String} The HTTP verb for the method, this should ideally come from the 264 | * AuthPolicy.HttpVerb object to avoid spelling mistakes 265 | * @param {string} The resource path. For example "/pets" 266 | * @return {void} 267 | */ 268 | denyMethod(verb, resource) { 269 | addMethod.call(this, 'deny', verb, resource, null); 270 | }, 271 | 272 | /** 273 | * Adds an API Gateway method (Http verb + Resource path) to the list of allowed 274 | * methods and includes a condition for the policy statement. More on AWS policy 275 | * conditions here: http://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements.html#Condition 276 | * 277 | * @method allowMethodWithConditions 278 | * @param {String} The HTTP verb for the method, this should ideally come from the 279 | * AuthPolicy.HttpVerb object to avoid spelling mistakes 280 | * @param {string} The resource path. For example "/pets" 281 | * @param {Object} The conditions object in the format specified by the AWS docs 282 | * @return {void} 283 | */ 284 | allowMethodWithConditions(verb, resource, conditions) { 285 | addMethod.call(this, 'allow', verb, resource, conditions); 286 | }, 287 | 288 | /** 289 | * Adds an API Gateway method (Http verb + Resource path) to the list of denied 290 | * methods and includes a condition for the policy statement. More on AWS policy 291 | * conditions here: http://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements.html#Condition 292 | * 293 | * @method denyMethodWithConditions 294 | * @param {String} The HTTP verb for the method, this should ideally come from the 295 | * AuthPolicy.HttpVerb object to avoid spelling mistakes 296 | * @param {string} The resource path. For example "/pets" 297 | * @param {Object} The conditions object in the format specified by the AWS docs 298 | * @return {void} 299 | */ 300 | denyMethodWithConditions(verb, resource, conditions) { 301 | addMethod.call(this, 'deny', verb, resource, conditions); 302 | }, 303 | 304 | /** 305 | * Generates the policy document based on the internal lists of allowed and denied 306 | * conditions. This will generate a policy with two main statements for the effect: 307 | * one statement for Allow and one statement for Deny. 308 | * Methods that includes conditions will have their own statement in the policy. 309 | * 310 | * @method build 311 | * @return {Object} The policy object that can be serialized to JSON. 312 | */ 313 | build() { 314 | if ((!this.allowMethods || this.allowMethods.length === 0) && 315 | (!this.denyMethods || this.denyMethods.length === 0)) { 316 | throw new Error('No statements defined for the policy'); 317 | } 318 | 319 | const policy = {}; 320 | policy.principalId = this.principalId; 321 | const doc = {}; 322 | doc.Version = this.version; 323 | doc.Statement = []; 324 | 325 | doc.Statement = doc.Statement.concat(getStatementsForEffect.call(this, 'Allow', this.allowMethods)); 326 | doc.Statement = doc.Statement.concat(getStatementsForEffect.call(this, 'Deny', this.denyMethods)); 327 | 328 | policy.policyDocument = doc; 329 | 330 | return policy; 331 | }, 332 | }; 333 | }()); 334 | 335 | 336 | exports.handler = async (event, context, callback) => { 337 | console.log(`Received event: ${JSON.stringify(event)}`); 338 | // Retrieve request parameters from the Lambda function input: 339 | const headers = event.headers; 340 | const queryStringParameters = event.queryStringParameters; 341 | const pathParameters = event.pathParameters; 342 | const stageVariables = event.stageVariables; 343 | 344 | // Parse the input for the parameter values 345 | const apiOptions = {}; 346 | const tmp = event.methodArn.split(':'); 347 | const apiGatewayArnTmp = tmp[5].split('/'); 348 | const awsAccountId = tmp[4]; 349 | apiOptions.region = tmp[3]; 350 | apiOptions.restApiId = apiGatewayArnTmp[0]; 351 | apiOptions.stage = apiGatewayArnTmp[1]; 352 | 353 | /** 354 | * Extract authorization header 355 | * Use Api Key to check if authorizations exist in Authorizations Table 356 | * Generate Allow POST /events associated with key 357 | */ 358 | if (!headers.Authorization) { 359 | callback('Unauthorized'); 360 | } 361 | 362 | const apiKeyValue = headers.Authorization; 363 | let policy = new AuthPolicy(apiKeyValue, awsAccountId, apiOptions); 364 | let authResponse = {}; 365 | // Returns array of authorizations or false if no authorizations are found for key 366 | const authorizations = await getAuthorizations(apiKeyValue); 367 | 368 | // If api key is valid and authorizations are found, use them to build and return an allow policy 369 | // If api key not found, "false" is returned and a "deny all" policy is set for the unknown key value so it gets cached 370 | if (authorizations.authorizations) { 371 | console.log(JSON.stringify(authorizations)); 372 | authorizations.authorizations.forEach(function(item) { 373 | policy.allowMethod(AuthPolicy.HttpVerb.POST, `/applications/${item.application_id}/events`); 374 | }); 375 | 376 | authResponse = policy.build(); 377 | authResponse.usageIdentifierKey = apiKeyValue; 378 | console.log(JSON.stringify(authResponse)); 379 | callback(null, authResponse); 380 | } else { 381 | console.log(`No authorizations found for api key. Setting policy to deny all so that it can be cached`); 382 | policy.denyAllMethods(); // return deny all if api key invalid/no authZ found 383 | authResponse = policy.build(); 384 | callback(null, authResponse); 385 | } 386 | }; 387 | 388 | /** 389 | * Retrieve registrations for an api key 390 | */ 391 | const getAuthorizations = async (apiKeyValue) => { 392 | console.log(`Attempting to get authorizations`); 393 | try { 394 | const result = await _queryDDBAuthorizations(apiKeyValue); 395 | return result; 396 | } catch (err) { 397 | console.log(JSON.stringify(err)); 398 | if (err.error === 'NoAuthorizationsFound') { 399 | return false; 400 | } 401 | return err; 402 | } 403 | }; 404 | 405 | 406 | // Queries the ApiKeyValues index and only returns enabled keys 407 | // If needed, admins can update the records directly in DynamoDB to disable (but not delete) a key to deactivate it 408 | const _queryDDBAuthorizations = async (apiKeyValue, lastevalkey) => { 409 | let authorizations = []; 410 | let params = { 411 | TableName: process.env.AUTHORIZATIONS_TABLE, 412 | IndexName: 'ApiKeyValues', 413 | KeyConditionExpression: 'api_key_value = :apiKeyValue', 414 | ExpressionAttributeValues: { 415 | ':apiKeyValue': apiKeyValue, 416 | ':enabled': true 417 | }, 418 | FilterExpression : 'enabled = :enabled', 419 | Limit: 500 420 | }; 421 | this.creds = new AWS.EnvironmentCredentials('AWS'); // Lambda provided credentials 422 | this.config = { 423 | credentials: this.creds, 424 | region: process.env.AWS_REGION, 425 | }; 426 | 427 | if (lastevalkey) { 428 | params.ExclusiveStartKey = lastevalkey; 429 | } 430 | let docClient = new AWS.DynamoDB.DocumentClient(this.config); 431 | try { 432 | let result = await docClient.query(params).promise(); 433 | if (result.Items.length < 1){ 434 | console.log(`No authorizations found for api key`); 435 | return Promise.reject({ 436 | code: 400, 437 | error: 'NoAuthorizationsFound', 438 | message: 'No authorizations found for api key' 439 | }); 440 | } 441 | result.Items.forEach(function(item) { 442 | authorizations.push({ 443 | 'api_key_id': item.api_key_id, 444 | 'api_key_value': item.api_key_value, 445 | 'application_id': item.application_id, 446 | 'enabled': item.enabled 447 | }); 448 | }); 449 | if (result.LastEvaluatedKey) { 450 | let moreResult = await this._queryDDBAuthorizations(apiKeyValue, lastevalkey); 451 | moreResult.Items.forEach(function(item) { 452 | authorizations.push({ 453 | 'api_key_id': item.api_key_id, 454 | 'api_key_value': item.api_key_value, 455 | 'application_id': item.application_id, 456 | 'enabled': item.enabled 457 | }); 458 | }); 459 | } 460 | return Promise.resolve({ 461 | 'authorizations': authorizations, 462 | 'count': authorizations.length 463 | }); 464 | } catch (err) { 465 | console.log(JSON.stringify(err)); 466 | return Promise.reject(err); 467 | } 468 | }; 469 | -------------------------------------------------------------------------------- /business-logic/api/lambda-authorizer/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "game-analytics-lambda-authorizer", 3 | "version": "0.0.1", 4 | "description": "API Gateway Lambda Authorizer used to validate requests to solution /events API endpoint", 5 | "private": true, 6 | "main": "index.js", 7 | "dependencies": { 8 | "aws-sdk": "*" 9 | }, 10 | "scripts": { 11 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 12 | "build:zip": "zip -rq lambda-authorizer.zip .", 13 | "build:dist": "mkdir dist && mv lambda-authorizer.zip dist/", 14 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /business-logic/build.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2021 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | 4 | Licensed under the Amazon Software License (the "License"). 5 | You may not use this file except in compliance with the License. 6 | A copy of the License is located at 7 | 8 | http://aws.amazon.com/asl/ 9 | 10 | or in the "license" file accompanying this file. This file is distributed 11 | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | express or implied. See the License for the specific language governing 13 | permissions and limitations under the License. 14 | """ 15 | 16 | import os 17 | import shutil 18 | import sys 19 | import subprocess 20 | 21 | # Prepares the all the lambdas for deployment 22 | # 23 | # Walks each directory looking for a build script and executes it if found 24 | 25 | build_file_name = "build.py" 26 | 27 | dir_path = os.path.dirname(os.path.realpath(__file__)) 28 | build_file_name = os.path.basename(__file__) 29 | exit_code = 0 30 | 31 | for file in os.listdir(dir_path): 32 | file = os.path.join(dir_path, file) 33 | if os.path.isdir(file): 34 | folder_path = os.path.join(dir_path, file) 35 | build_file_path = os.path.join(folder_path, build_file_name) 36 | if os.path.exists(build_file_path): 37 | cmd = [sys.executable, build_file_path] 38 | proc = subprocess.run(cmd, stderr=subprocess.STDOUT, shell=False) 39 | exit_code = exit_code + proc.returncode 40 | 41 | 42 | exit(exit_code) 43 | -------------------------------------------------------------------------------- /business-logic/data-lake/build.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2021 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | 4 | Licensed under the Amazon Software License (the "License"). 5 | You may not use this file except in compliance with the License. 6 | A copy of the License is located at 7 | 8 | http://aws.amazon.com/asl/ 9 | 10 | or in the "license" file accompanying this file. This file is distributed 11 | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | express or implied. See the License for the specific language governing 13 | permissions and limitations under the License. 14 | """ 15 | 16 | import os 17 | import shutil 18 | import sys 19 | import subprocess 20 | 21 | # Prepares the all the lambdas for deployment 22 | # 23 | # Walks each directory looking for a build script and executes it if found 24 | 25 | build_file_name = "build.py" 26 | 27 | dir_path = os.path.dirname(os.path.realpath(__file__)) 28 | build_file_name = os.path.basename(__file__) 29 | exit_code = 0 30 | 31 | for file in os.listdir(dir_path): 32 | file = os.path.join(dir_path, file) 33 | if os.path.isdir(file): 34 | folder_path = os.path.join(dir_path, file) 35 | build_file_path = os.path.join(folder_path, build_file_name) 36 | if os.path.exists(build_file_path): 37 | cmd = [sys.executable, build_file_path] 38 | proc = subprocess.run(cmd, stderr=subprocess.STDOUT, shell=False) 39 | exit_code = exit_code + proc.returncode 40 | 41 | 42 | exit(exit_code) 43 | -------------------------------------------------------------------------------- /business-logic/data-lake/glue-partition-creator/build.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2021 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | 4 | Licensed under the Amazon Software License (the "License"). 5 | You may not use this file except in compliance with the License. 6 | A copy of the License is located at 7 | 8 | http://aws.amazon.com/asl/ 9 | 10 | or in the "license" file accompanying this file. This file is distributed 11 | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | express or implied. See the License for the specific language governing 13 | permissions and limitations under the License. 14 | """ 15 | 16 | import os 17 | import subprocess 18 | import sys 19 | import shutil 20 | 21 | 22 | def exit_on_failure(exit_code, msg): 23 | if exit_code != 0: 24 | print(msg) 25 | exit(exit_code) 26 | 27 | 28 | dir_path = os.path.dirname(os.path.realpath(__file__)) 29 | 30 | npm_cmd = shutil.which("npm") 31 | cmd = [npm_cmd, "install", "--prefix", dir_path] 32 | proc = subprocess.run(cmd, stderr=subprocess.STDOUT, shell=False) 33 | exit_on_failure(proc.returncode, "Web app npm install failed") 34 | -------------------------------------------------------------------------------- /business-logic/data-lake/glue-partition-creator/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | 'use strict'; 20 | 21 | const AWS = require('aws-sdk') 22 | const moment = require('moment'); 23 | const glue = new AWS.Glue({ apiVersion: '2017-03-31' }); 24 | global.StorageDescriptor = {}; 25 | 26 | exports.handler = async (event) => { 27 | var storageDescriptor = {}; 28 | console.log(`Event: ${JSON.stringify(event)}`); 29 | const date = moment(); 30 | const year = moment(date).format('YYYY'); 31 | const month = moment(date).format('MM'); 32 | const day = moment(date).format('DD'); 33 | //const hour = moment(date).format('HH'); 34 | console.log(`date: ${date}, year: ${year}, month: ${month}, day: ${day}`); 35 | 36 | try { 37 | let result = await glue.getPartition({ 38 | DatabaseName: process.env.DATABASE_NAME, 39 | TableName: process.env.TABLE_NAME, 40 | PartitionValues: [String(year), String(month), String(day)] 41 | }).promise(); 42 | console.log(`Partition already exists for year=${year}/month=${month}/day=${day}`); 43 | return result; 44 | } catch (err) { 45 | // If partition does not exist, create a new one based on the S3 key 46 | console.log(`Partition doesn't exist, retrieving table configuration from Glue`); 47 | let Table = await glue.getTable({ 48 | DatabaseName: process.env.DATABASE_NAME, 49 | Name: process.env.TABLE_NAME, 50 | }).promise(); 51 | console.log(`Table setting: ${JSON.stringify(Table)}`); 52 | storageDescriptor = Table.Table.StorageDescriptor; 53 | if(err.code === 'EntityNotFoundException'){ 54 | let params = { 55 | DatabaseName: process.env.DATABASE_NAME, 56 | TableName: process.env.TABLE_NAME, 57 | PartitionInput: { 58 | StorageDescriptor: { 59 | ...storageDescriptor, 60 | Location: `${storageDescriptor.Location}/year=${year}/month=${month}/day=${day}` 61 | }, 62 | Values: [String(year), String(month), String(day)], 63 | } 64 | }; 65 | await glue.createPartition(params).promise(); 66 | console.log(`Created new table partition: ${storageDescriptor.Location}/year=${year}/month=${month}/day=${day}`); 67 | } else { 68 | console.log(`There was an error: ${JSON.stringify(err)}`); 69 | return err; 70 | } 71 | } 72 | }; -------------------------------------------------------------------------------- /business-logic/data-lake/glue-partition-creator/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "game-analytics-glue-partition-creator", 3 | "description": "Function to create partitions in Glue on a recurring scheduled basis as part of the game analytics pipeline solution", 4 | "main": "index.js", 5 | "version": "0.0.1", 6 | "private": true, 7 | "dependencies": { 8 | "aws-sdk": "*", 9 | "moment": "*" 10 | }, 11 | "devDependencies": { 12 | "aws-sdk": "*" 13 | }, 14 | "scripts": { 15 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 16 | "build:zip": "zip -rq glue-partition-creator.zip .", 17 | "build:dist": "mkdir dist && mv glue-partition-creator.zip dist/", 18 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /business-logic/data-lake/glue-scripts/game_events_etl.py: -------------------------------------------------------------------------------- 1 | ###################################################################################################################### 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 5 | # software and associated documentation files (the "Software"), to deal in the Software 6 | # without restriction, including without limitation the rights to use, copy, modify, 7 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 8 | # permit persons to whom the Software is furnished to do so. 9 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 10 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 11 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 12 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 13 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 14 | # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 15 | ###################################################################################################################### 16 | 17 | import sys 18 | import json 19 | from datetime import datetime 20 | from awsglue.transforms import * 21 | from pyspark.sql.functions import * 22 | from awsglue.utils import getResolvedOptions 23 | from pyspark.context import SparkContext 24 | from awsglue.context import GlueContext 25 | from awsglue.dynamicframe import DynamicFrame 26 | from awsglue.job import Job 27 | from pyspark.sql import SparkSession 28 | from pyspark.sql.types import StringType 29 | 30 | #sc = SparkContext() 31 | sc = SparkContext.getOrCreate() 32 | sc.setLogLevel("TRACE") 33 | glueContext = GlueContext(sc) 34 | job = Job(glueContext) 35 | 36 | args = getResolvedOptions(sys.argv, 37 | ['JOB_NAME', 38 | 'database_name', 39 | 'raw_events_table_name', 40 | 'analytics_bucket', 41 | 'processed_data_prefix', 42 | 'glue_tmp_prefix']) 43 | 44 | job.init(args['JOB_NAME'], args) 45 | 46 | print("Database: {}".format(args['database_name'])) 47 | print("Raw Events Table: {}".format(args['raw_events_table_name'])) 48 | print("Analytics bucket output path: {}{}".format(args['analytics_bucket'], args['processed_data_prefix'])) 49 | print("Glue Temp S3 location: {}{}".format(args['analytics_bucket'], args['glue_tmp_prefix'])) 50 | 51 | # catalog: database and table names 52 | db_name = args['database_name'] 53 | raw_events_table = args['raw_events_table_name'] 54 | 55 | # Output location 56 | analytics_bucket_output = args['analytics_bucket'] + args['processed_data_prefix'] 57 | analytics_bucket_temp_storage = args['analytics_bucket'] + args['glue_tmp_prefix'] 58 | 59 | # Helper Function replaces the year month day and hour with the one from the timestamp 60 | def applyTransform(rec): 61 | rec["year"] = datetime.utcfromtimestamp(rec["event"]["event_timestamp"]).year 62 | rec["month"] = datetime.utcfromtimestamp(rec["event"]["event_timestamp"]).month 63 | rec["day"] = datetime.utcfromtimestamp(rec["event"]["event_timestamp"]).day 64 | #rec["hour"] = datetime.utcfromtimestamp(rec["event"]["event_timestamp"]).hour 65 | return rec 66 | 67 | # Create dynamic frame from the source tables 68 | events = glueContext.create_dynamic_frame.from_catalog( 69 | database=db_name, 70 | table_name=raw_events_table, 71 | transformation_ctx = "events" 72 | ) 73 | 74 | # Maps a transformation function over each record to re-build date partitions using the event_timestamp 75 | # rather than the Firehose ingestion timestamp 76 | #filtered_events_dyf_transformed = Map.apply(frame = filtered_events_dyf, f = applyTransform) 77 | 78 | events.printSchema() 79 | record_count = events.count() 80 | print("Record count: {}".format(record_count)) 81 | 82 | # Avoid errors if Glue Job Bookmark detects no new data to process and records = 0. 83 | if record_count > 0: 84 | try: 85 | output = glueContext.write_dynamic_frame.from_options( 86 | frame = events, 87 | connection_type = "s3", 88 | connection_options = { 89 | "path": analytics_bucket_output, 90 | "partitionKeys": ["application_id", "year", "month", "day"] 91 | }, 92 | format = "glueparquet", 93 | transformation_ctx = "output" 94 | ) 95 | except: 96 | print("There was an error writing out the results to S3") 97 | else: 98 | print("Partition saved.") 99 | 100 | else: 101 | print("Glue Job Bookmark detected no new files to process") 102 | 103 | job.commit() -------------------------------------------------------------------------------- /business-logic/events-processing/build.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2021 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | 4 | Licensed under the Amazon Software License (the "License"). 5 | You may not use this file except in compliance with the License. 6 | A copy of the License is located at 7 | 8 | http://aws.amazon.com/asl/ 9 | 10 | or in the "license" file accompanying this file. This file is distributed 11 | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | express or implied. See the License for the specific language governing 13 | permissions and limitations under the License. 14 | """ 15 | 16 | import os 17 | import subprocess 18 | import sys 19 | import shutil 20 | 21 | 22 | def exit_on_failure(exit_code, msg): 23 | if exit_code != 0: 24 | print(msg) 25 | exit(exit_code) 26 | 27 | 28 | dir_path = os.path.dirname(os.path.realpath(__file__)) 29 | 30 | npm_cmd = shutil.which("npm") 31 | cmd = [npm_cmd, "install", "--prefix", dir_path] 32 | proc = subprocess.run(cmd, stderr=subprocess.STDOUT, shell=False) 33 | exit_on_failure(proc.returncode, "Web app npm install failed") 34 | -------------------------------------------------------------------------------- /business-logic/events-processing/config/event_schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json-schema.org/draft/2020-12/schema", 3 | "title": "Game Analytics JSON Event Schema", 4 | "description": "Format of events that are ingested to the AWS Game Analytics Pipeline", 5 | "type": "object", 6 | "additionalProperties": false, 7 | "properties": { 8 | "event": { 9 | "$ref": "#/definitions/event" 10 | }, 11 | "application_id": { 12 | "type": "string", 13 | "pattern": "^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$", 14 | "description": "The application identifier (UUID) this event is associated with" 15 | } 16 | }, 17 | "required": ["event", "application_id"], 18 | "definitions": { 19 | "event": { 20 | "type": "object", 21 | "additionalProperties": false, 22 | "properties": { 23 | "event_id": { 24 | "type": "string", 25 | "pattern": "^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$", 26 | "description": "The unique identifier for the event, formatted as UUID v4 string." 27 | }, 28 | "event_type": { 29 | "type": "string", 30 | "pattern": "^[A-Za-z0-9-_.]+$", 31 | "description": "Identifies the type of event" 32 | }, 33 | "event_name": { 34 | "type": "string", 35 | "pattern": "^[A-Za-z0-9-_.]+$", 36 | "description": "Name of the event that occurred" 37 | }, 38 | "event_timestamp": { 39 | "type": "number", 40 | "description": "The time in seconds since the Unix epoch at which this event occurred (set by producer of event)." 41 | }, 42 | "event_version": { 43 | "type": "string", 44 | "pattern": "^[A-Za-z0-9-_.]+$", 45 | "description": "An API version for this event format." 46 | }, 47 | "app_version": { 48 | "type": "string", 49 | "pattern": "^[A-Za-z0-9-_.]+$", 50 | "description": "Version identifier for the application that generated the event" 51 | }, 52 | "event_data": { 53 | "type": "object" 54 | } 55 | }, 56 | "required": ["event_id", "event_type", "event_timestamp", "event_name"] 57 | } 58 | } 59 | } -------------------------------------------------------------------------------- /business-logic/events-processing/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | 'use strict'; 20 | 21 | /** 22 | * Lib 23 | */ 24 | 25 | let lib = require('./lib/index.js'); 26 | 27 | exports.handler = function(event, context, callback) { 28 | console.log(`Events processing service received event`); 29 | 30 | lib 31 | .respond(event, context) 32 | .then(data => { 33 | return callback(null, data); 34 | }) 35 | .catch(err => { 36 | return callback(err, null); 37 | }); 38 | }; -------------------------------------------------------------------------------- /business-logic/events-processing/lib/event.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | 'use strict'; 20 | 21 | const AWS = require('aws-sdk'); 22 | const _ = require('underscore'); 23 | const moment = require('moment'); 24 | const event_schema = require('../config/event_schema.json'); 25 | const Ajv2020 = require('ajv/dist/2020'); 26 | 27 | const ajv = new Ajv2020(); 28 | var validate = ajv.compile(event_schema); 29 | 30 | const creds = new AWS.EnvironmentCredentials('AWS'); // Lambda provided credentials 31 | const dynamoConfig = { 32 | credentials: creds, 33 | region: process.env.AWS_REGION 34 | }; 35 | 36 | console.log(`Loaded event JSON Schema: ${JSON.stringify(event_schema)}`); 37 | 38 | class Event { 39 | 40 | constructor() { 41 | this.dynamoConfig = dynamoConfig; 42 | } 43 | 44 | /** 45 | * Process an event record sent to the events stream 46 | * Format processing output in format required by Kinesis Firehose 47 | * @param {JSON} input - game event input payload 48 | * @param {string} recordId - recordId from Kinesis 49 | * @param {JSON} context - AWS Lambda invocation context (https://docs.aws.amazon.com/lambda/latest/dg/nodejs-context.html) 50 | */ 51 | async processEvent(input, recordId, context) { 52 | const _self = this; 53 | try { 54 | // Extract event object and applicationId string from payload. application_id and event are required or record fails processing 55 | if(!input.hasOwnProperty('application_id')){ 56 | return Promise.reject({ 57 | recordId: recordId, 58 | result: 'ProcessingFailed', 59 | data: new Buffer.from(JSON.stringify(input) + '\n').toString('base64') 60 | }); 61 | } 62 | if(!input.hasOwnProperty('event')){ 63 | return Promise.reject({ 64 | recordId: recordId, 65 | result: 'ProcessingFailed', 66 | data: new Buffer.from(JSON.stringify(input) + '\n').toString('base64') 67 | }); 68 | } 69 | const applicationId = input.application_id; 70 | const event = input.event; 71 | 72 | // Add a processing timestamp and the Lambda Request Id to the event metadata 73 | let metadata = { 74 | ingestion_id: context.awsRequestId, 75 | processing_timestamp: moment().unix() 76 | }; 77 | 78 | // If event came from Solution API, it should have extra metadata 79 | if (input.aws_ga_api_validated_flag) { 80 | metadata.api = {}; 81 | if (input.aws_ga_api_requestId) { 82 | metadata.api.request_id = input.aws_ga_api_requestId; 83 | delete input.aws_ga_api_requestId; 84 | } 85 | if (input.aws_ga_api_requestTimeEpoch) { 86 | metadata.api.request_time_epoch = input.aws_ga_api_requestTimeEpoch; 87 | delete input.aws_ga_api_requestTimeEpoch; 88 | } 89 | delete input.aws_ga_api_validated_flag; 90 | } 91 | 92 | // Retrieve application config from Applications table 93 | const application = await _self.getApplication(applicationId); 94 | if (application !== null) { 95 | // Validate the input record against solution event schema 96 | const schemaValid = await _self.validateSchema(input); 97 | let transformed_event = {}; 98 | if (schemaValid.validation_result == 'schema_mismatch') { 99 | metadata.processing_result = { 100 | status: 'schema_mismatch', 101 | validation_errors: schemaValid.validation_errors 102 | }; 103 | transformed_event.metadata = metadata; 104 | //console.log(`Errors processing event: ${JSON.stringify(errors)}`); 105 | } else { 106 | metadata.processing_result = { 107 | status: 'ok' 108 | }; 109 | transformed_event.metadata = metadata; 110 | } 111 | 112 | if(event.hasOwnProperty('event_id')){ 113 | transformed_event.event_id = String(event.event_id); 114 | } 115 | if(event.hasOwnProperty('event_type')){ 116 | transformed_event.event_type = String(event.event_type); 117 | } 118 | if(event.hasOwnProperty('event_name')){ 119 | transformed_event.event_name = String(event.event_name); 120 | } 121 | if(event.hasOwnProperty('event_version')){ 122 | transformed_event.event_version = String(event.event_version); 123 | } 124 | if(event.hasOwnProperty('event_timestamp')){ 125 | transformed_event.event_timestamp = Number(event.event_timestamp); 126 | } 127 | if(event.hasOwnProperty('app_version')){ 128 | transformed_event.app_version = String(event.app_version); 129 | } 130 | if(event.hasOwnProperty('event_data')){ 131 | transformed_event.event_data = event.event_data; 132 | } 133 | 134 | transformed_event.application_name = String(application.application_name); 135 | transformed_event.application_id = String(applicationId); 136 | 137 | return Promise.resolve({ 138 | recordId: recordId, 139 | result: 'Ok', 140 | data: new Buffer.from(JSON.stringify(transformed_event) + '\n').toString('base64') 141 | }); 142 | } else { 143 | /** 144 | * Handle events from unregistered ("NOT_FOUND") applications 145 | * Sets processing result as "unregistered" 146 | * We don't attempt to validate schema of unregistered events, we just coerce the necessary fields into expected format 147 | */ 148 | metadata.processing_result = { 149 | status: 'unregistered' 150 | }; 151 | let unregistered_format = {}; 152 | unregistered_format.metadata = metadata; 153 | 154 | if(event.hasOwnProperty('event_id')){ 155 | unregistered_format.event_id = String(event.event_id); 156 | } 157 | if(event.hasOwnProperty('event_type')){ 158 | unregistered_format.event_type = String(event.event_type); 159 | } 160 | if(event.hasOwnProperty('event_name')){ 161 | unregistered_format.event_name = String(event.event_name); 162 | } 163 | if(event.hasOwnProperty('event_version')){ 164 | unregistered_format.event_version = String(event.event_version); 165 | } 166 | if(event.hasOwnProperty('event_timestamp')){ 167 | unregistered_format.event_timestamp = Number(event.event_timestamp); 168 | } 169 | if(event.hasOwnProperty('app_version')){ 170 | unregistered_format.app_version = String(event.app_version); 171 | } 172 | if(event.hasOwnProperty('event_data')){ 173 | unregistered_format.event_data = event.event_data; 174 | } 175 | 176 | // Even though the application_id is not registered, let's add it to the event 177 | unregistered_format.application_id = String(applicationId); 178 | 179 | return Promise.resolve({ 180 | recordId: recordId, 181 | result: 'Ok', 182 | data: new Buffer.from(JSON.stringify(unregistered_format) + '\n').toString('base64') 183 | }); 184 | } 185 | } catch (err) { 186 | console.log(`Error processing record: ${JSON.stringify(err)}`); 187 | return Promise.reject({ 188 | recordId: recordId, 189 | result: 'ProcessingFailed', 190 | data: new Buffer.from(JSON.stringify(input) + '\n').toString('base64') 191 | }); 192 | } 193 | } 194 | 195 | /** 196 | * Retrieve application from DynamoDB 197 | * Fetches from and updates the local registered applications cache with results 198 | */ 199 | async getApplication(applicationId) { 200 | const params = { 201 | TableName: process.env.APPLICATIONS_TABLE, 202 | Key: { 203 | application_id: applicationId 204 | } 205 | }; 206 | 207 | // first try to fetch from cache 208 | let applicationsCacheResult = global.applicationsCache.get(applicationId); 209 | if (applicationsCacheResult == 'NOT_FOUND') { 210 | // if already marked not found, skip processing. Applications will remain "NOT_FOUND" until the cache refresh 211 | return Promise.resolve(null); 212 | } else if (applicationsCacheResult == undefined) { 213 | // get from DynamoDB and set in Applications cache 214 | const docClient = new AWS.DynamoDB.DocumentClient(this.dynamoConfig); 215 | try { 216 | let data = await docClient.get(params).promise(); 217 | if (!_.isEmpty(data)) { 218 | // if found in ddb, set in cache and return it 219 | global.applicationsCache.set(applicationId, data.Item); 220 | return Promise.resolve(data.Item); 221 | } else { 222 | // if application isn't registered in dynamodb, set not found in cache 223 | console.log(`Application ${applicationId} not found in DynamoDB`); 224 | global.applicationsCache.set(applicationId, 'NOT_FOUND'); 225 | return Promise.resolve(null); 226 | } 227 | } catch (err) { 228 | console.log(JSON.stringify(err)); 229 | return Promise.reject(err); 230 | } 231 | } else { 232 | // if in cache, return it 233 | return Promise.resolve(applicationsCacheResult); 234 | } 235 | } 236 | 237 | /** 238 | * Validate input data against JSON schema 239 | */ 240 | async validateSchema(data) { 241 | try { 242 | let valid = validate(data); 243 | if (!valid) { 244 | let errors = validate.errors; 245 | return Promise.resolve({ 246 | validation_result: 'schema_mismatch', 247 | validation_errors: errors 248 | }); 249 | } else { 250 | return Promise.resolve({ 251 | validation_result: 'ok' 252 | }); 253 | } 254 | } catch (err) { 255 | console.log(`There was an error validating the schema ${JSON.stringify(err)}`); 256 | return Promise.reject(err); 257 | } 258 | } 259 | } 260 | 261 | 262 | module.exports = Event; -------------------------------------------------------------------------------- /business-logic/events-processing/lib/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | 'use strict'; 20 | 21 | /** 22 | * Lib 23 | */ 24 | 25 | const AWS = require('aws-sdk'); 26 | const NodeCache = require( 'node-cache'); 27 | const Event = require('./event.js'); 28 | 29 | /** 30 | * Applications table results cache 31 | * Maintains a local cache of registered Applications in DynamoDB. 32 | */ 33 | global.applicationsCache = new NodeCache({stdTTL: process.env.CACHE_TIMEOUT_SECONDS, checkPeriod: 60, maxKeys: 1000, useClones: false}); 34 | 35 | const respond = async (event, context) => { 36 | let validEvents = 0; 37 | let invalidEvents = 0; 38 | let results = []; 39 | let _event = new Event(); 40 | 41 | for (const record of event.records) { 42 | try { 43 | // Kinesis data is base64 encoded so decode here 44 | const payload = JSON.parse(Buffer.from(record.data, 'base64')); 45 | const processEvent = await _event.processEvent(payload, record.recordId, context); 46 | if (processEvent.result === 'Ok') { 47 | validEvents++; 48 | } else { 49 | invalidEvents++; 50 | } 51 | results.push(processEvent); 52 | } catch (err) { 53 | console.log(JSON.stringify(err)); 54 | invalidEvents++; 55 | results.push({ 56 | recordId: record.recordId, 57 | result: 'ProcessingFailed', 58 | data: record.data 59 | }); 60 | } 61 | } 62 | console.log(JSON.stringify({ 63 | 'InputEvents': event.records.length, 64 | 'EventsProcessedStatusOk': validEvents, 65 | 'EventsProcessedStatusFailed': invalidEvents 66 | })); 67 | return Promise.resolve({ 68 | records: results 69 | }); 70 | }; 71 | 72 | module.exports = { 73 | respond 74 | }; -------------------------------------------------------------------------------- /business-logic/events-processing/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "game-analytics-events-processing", 3 | "description": "The events processor microservice for the game analytics pipeline solution", 4 | "main": "index.js", 5 | "version": "0.0.1", 6 | "private": true, 7 | "dependencies": { 8 | "ajv": "*", 9 | "aws-sdk": "*", 10 | "moment": "*", 11 | "node-cache": "*", 12 | "underscore": "*" 13 | }, 14 | "devDependencies": { 15 | "aws-sdk": "*", 16 | "npm-run-all": "*" 17 | }, 18 | "scripts": { 19 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 20 | "build:zip": "zip -rq events-processing.zip .", 21 | "build:dist": "mkdir dist && mv events-processing.zip dist/", 22 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /business-logic/publish-data/handler.py: -------------------------------------------------------------------------------- 1 | ###################################################################################################################### 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 5 | # software and associated documentation files (the "Software"), to deal in the Software 6 | # without restriction, including without limitation the rights to use, copy, modify, 7 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 8 | # permit persons to whom the Software is furnished to do so. 9 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 10 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 11 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 12 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 13 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 14 | # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 15 | ###################################################################################################################### 16 | 17 | import boto3.session 18 | import json 19 | import random 20 | from random import choice 21 | import time 22 | from datetime import datetime 23 | import uuid 24 | import os 25 | import numpy 26 | import argparse 27 | 28 | # Event Payload defaults 29 | DEFAULT_EVENT_VERSION = '1.0.0' 30 | DEFAULT_BATCH_SIZE = 100 31 | 32 | 33 | def parse_cmd_line(): 34 | """Parse the command line and extract the necessary values.""" 35 | 36 | parser = argparse.ArgumentParser(description='Send data to a Kinesis stream for analytics. By default, the script ' 37 | 'will send events infinitely. If an input file is specified, the ' 38 | 'script will instead read and transmit all of the events contained ' 39 | 'in the file and then terminate.') 40 | 41 | # REQUIRED arguments 42 | kinesis_regions = boto3.session.Session().get_available_regions('kinesis') 43 | parser.add_argument('--region', required=True, choices=kinesis_regions, type=str, 44 | dest='region_name', metavar='kinesis_aws_region', 45 | help='The AWS region where the Kinesis stream is located.') 46 | parser.add_argument('--stream-name', required=True, type=str, dest='stream_name', 47 | help='The name of the Kinesis stream to publish to. Must exist in the specified region.') 48 | parser.add_argument('--application-id', required=True, type=str, dest='application_id', 49 | help='The application_id to use when submitting events to ths stream (i.e. You can use the default application for testing).') 50 | # OPTIONAL arguments 51 | parser.add_argument('--batch-size', type=int, dest='batch_size', default=DEFAULT_BATCH_SIZE, 52 | help='The number of events to send at once using the Kinesis PutRecords API.') 53 | parser.add_argument('--input-filename', type=str, dest='input_filename', 54 | help='Send events from a file rather than randomly generate them. The format of the file' 55 | ' should be one JSON-formatted event per line.') 56 | 57 | return parser.parse_args() 58 | 59 | # Returns array of UUIDS. Used for generating sets of random event data 60 | 61 | 62 | def getUUIDs(dataType, count): 63 | uuids = [] 64 | for i in range(0, count): 65 | uuids.append(str(uuid.uuid4())) 66 | return uuids 67 | 68 | # Randomly choose an event type from preconfigured options 69 | 70 | 71 | def getEventType(): 72 | event_types = { 73 | 1: 'user_registration', 74 | 2: 'user_knockout', 75 | 3: 'item_viewed', 76 | 4: 'iap_transaction', 77 | 5: 'login', 78 | 6: 'logout', 79 | 7: 'tutorial_progression', 80 | 8: 'user_rank_up', 81 | 9: 'matchmaking_start', 82 | 10: 'matchmaking_complete', 83 | 11: 'matchmaking_failed', 84 | 12: 'match_start', 85 | 13: 'match_end', 86 | 14: 'level_started', 87 | 15: 'level_completed', 88 | 16: 'level_failed', 89 | 17: 'lootbox_opened', 90 | 18: 'user_report', 91 | 19: 'user_sentiment' 92 | } 93 | return event_types[numpy.random.choice([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19], 1, p=[0.04, 0.05, 0.18, 0.02, 0.1, 0.06, 0.04, 0.03, 0.025, 0.025, 0.01, 0.03, 0.03, 0.08, 0.08, 0.08, 0.04, 0.04, 0.04])[0]] 94 | 95 | # Generate a randomized event from preconfigured sample data 96 | 97 | 98 | def getEvent(event_type): 99 | 100 | levels = [ 101 | '1', 102 | '2', 103 | '3', 104 | '4', 105 | '5' 106 | ] 107 | 108 | countries = [ 109 | 110 | 'UNITED STATES', 111 | 'UK', 112 | 'JAPAN', 113 | 'SINGAPORE', 114 | 'AUSTRALIA', 115 | 'BRAZIL', 116 | 'SOUTH KOREA', 117 | 'GERMANY', 118 | 'CANADA', 119 | 'FRANCE' 120 | ] 121 | 122 | items = getUUIDs('items', 10) 123 | 124 | currencies = [ 125 | 'USD', 126 | 'EUR', 127 | 'YEN', 128 | 'RMB' 129 | ] 130 | 131 | platforms = [ 132 | 'nintendo_switch', 133 | 'ps4', 134 | 'xbox_360', 135 | 'iOS', 136 | 'android', 137 | 'pc', 138 | 'fb_messenger' 139 | ] 140 | 141 | tutorial_screens = [ 142 | '1_INTRO', 143 | '2_MOVEMENT', 144 | '3_WEAPONS', 145 | '4_FINISH' 146 | ] 147 | 148 | match_types = [ 149 | '1v1', 150 | 'TEAM_DM_5v5', 151 | 'CTF' 152 | ] 153 | 154 | matching_failed_msg = [ 155 | 'timeout', 156 | 'user_quit', 157 | 'too_few_users' 158 | ] 159 | 160 | maps = [ 161 | 'WAREHOUSE', 162 | 'CASTLE', 163 | 'AIRPORT' 164 | ] 165 | 166 | game_results = [ 167 | 'WIN', 168 | 'LOSE', 169 | 'KICKED', 170 | 'DISCONNECTED', 171 | 'QUIT' 172 | ] 173 | 174 | spells = [ 175 | 'WATER', 176 | 'EARTH', 177 | 'FIRE', 178 | 'AIR' 179 | ] 180 | 181 | ranks = [ 182 | '1_BRONZE', 183 | '2_SILVER', 184 | '3_GOLD', 185 | '4_PLATINUM', 186 | '5_DIAMOND', 187 | '6_MASTER' 188 | ] 189 | 190 | item_rarities = [ 191 | 'COMMON', 192 | 'UNCOMMON', 193 | 'RARE', 194 | 'LEGENDARY' 195 | 196 | ] 197 | 198 | report_reasons = [ 199 | 'GRIEFING', 200 | 'CHEATING', 201 | 'AFK', 202 | 'RACISM/HARASSMENT' 203 | 204 | ] 205 | 206 | switcher = { 207 | 'login': { 208 | 'event_data': { 209 | 'platform': str(numpy.random.choice(platforms, 1, p=[0.2, 0.1, 0.3, 0.15, 0.1, 0.05, 0.1])[0]), 210 | 'last_login_time': int(time.time())-random.randint(40000, 4000000) 211 | } 212 | }, 213 | 214 | 'logout': { 215 | 'event_data': { 216 | 'last_screen_seen': 'the last screen' 217 | } 218 | }, 219 | 220 | 'client_latency': { 221 | 'event_data': { 222 | 'latency': numpy.random.choice((random.randint(40, 185), 1)), 223 | 'connected_server_id': str(random.choice(getUUIDs("servers", 3))), 224 | 'region': str(random.choice(countries)) 225 | } 226 | }, 227 | 228 | 'user_registration': { 229 | 'event_data': { 230 | 'country_id': str(numpy.random.choice(countries, 1, p=[0.3, 0.1, 0.2, 0.05, 0.05, 0.02, 0.15, 0.05, 0.03, 0.05])[0]), 231 | 'platform': str(numpy.random.choice(platforms, 1, p=[0.2, 0.1, 0.3, 0.15, 0.1, 0.05, 0.1])[0]) 232 | } 233 | }, 234 | 235 | 'user_knockout': { 236 | 'event_data': { 237 | 'match_id': str(random.choice(MATCHES)), 238 | 'map_id': str(numpy.random.choice(maps, 1, p=[0.6, 0.3, 0.1])[0]), 239 | 'spell_id': str(numpy.random.choice(spells, 1, p=[0.1, 0.4, 0.3, 0.2])[0]), 240 | 'exp_gained': random.randint(1, 2) 241 | } 242 | }, 243 | 244 | 'item_viewed': { 245 | 'event_data': { 246 | 'item_id': str(numpy.random.choice(items, 1, p=[0.125, 0.11, 0.35, 0.125, 0.04, 0.01, 0.07, 0.1, 0.05, 0.02])[0]), 247 | 'item_version': random.randint(1, 2) 248 | } 249 | }, 250 | 251 | 'iap_transaction': { 252 | 'event_data': { 253 | 'item_id': str(numpy.random.choice(items, 1, p=[0.125, 0.11, 0.35, 0.125, 0.04, 0.01, 0.07, 0.1, 0.05, 0.02])[0]), 254 | 'item_version': random.randint(1, 2), 255 | 'item_amount': random.randint(1, 4), 256 | 'currency_type': str(numpy.random.choice(currencies, 1, p=[0.7, 0.15, 0.1, 0.05])[0]), 257 | 'country_id': str(numpy.random.choice(countries, 1, p=[0.3, 0.1, 0.2, 0.05, 0.05, 0.02, 0.15, 0.05, 0.03, 0.05])[0]), 258 | 'currency_amount': random.randint(1, 10), 259 | 'transaction_id': str(uuid.uuid4()) 260 | } 261 | }, 262 | 263 | 'tutorial_progression': { 264 | 'event_data': { 265 | 'tutorial_screen_id': str(numpy.random.choice(tutorial_screens, 1, p=[0.3, 0.3, 0.2, 0.2])[0]), 266 | 'tutorial_screen_version': random.randint(1, 2) 267 | } 268 | }, 269 | 270 | 'user_rank_up': { 271 | 'event_data': { 272 | 'user_rank_reached': str(numpy.random.choice(ranks, 1, p=[0.25, 0.35, 0.2, 0.15, 0.0499, 0.0001])[0]) 273 | } 274 | }, 275 | 276 | 'matchmaking_start': { 277 | 'event_data': { 278 | 'match_id': str(random.choice(MATCHES)), 279 | 'match_type': str(numpy.random.choice(match_types, 1, p=[0.4, 0.3, 0.3])[0]) 280 | } 281 | }, 282 | 283 | 'matchmaking_complete': { 284 | 'event_data': { 285 | 'match_id': str(random.choice(MATCHES)), 286 | 'match_type': str(numpy.random.choice(match_types, 1, p=[0.6, 0.2, 0.2])[0]), 287 | 'matched_slots': random.randrange(start=6, stop=10) 288 | } 289 | }, 290 | 291 | 'matchmaking_failed': { 292 | 'event_data': { 293 | 'match_id': str(random.choice(MATCHES)), 294 | 'match_type': str(numpy.random.choice(match_types, 1, p=[0.35, 0.2, 0.45])[0]), 295 | 'matched_slots': random.randrange(start=1, stop=10), 296 | 'matching_failed_msg': str(numpy.random.choice(matching_failed_msg, 1, p=[0.35, 0.2, 0.45])[0]) 297 | } 298 | }, 299 | 300 | 'match_start': { 301 | 'event_data': { 302 | 'match_id': str(random.choice(MATCHES)), 303 | 'map_id': str(numpy.random.choice(maps, 1, p=[0.3, 0.3, 0.4])[0]) 304 | } 305 | }, 306 | 307 | 'match_end': { 308 | 'event_data': { 309 | 'match_id': str(random.choice(MATCHES)), 310 | 'map_id': str(numpy.random.choice(maps, 1, p=[0.3, 0.3, 0.4])[0]), 311 | 'match_result_type': str(numpy.random.choice(game_results, 1, p=[0.4, 0.4, 0.05, 0.05, 0.1])[0]), 312 | 'exp_gained': random.randrange(start=100, stop=200), 313 | 'most_used_spell': str(numpy.random.choice(spells, 1, p=[0.1, 0.4, 0.2, 0.3])[0]) 314 | } 315 | }, 316 | 317 | 'level_started': { 318 | 'event_data': { 319 | 'level_id': str(numpy.random.choice(levels, 1, p=[0.2, 0.2, 0.2, 0.2, 0.2])[0]), 320 | 'level_version': random.randint(1, 2) 321 | } 322 | }, 323 | 'level_completed': { 324 | 'event_data': { 325 | 'level_id': str(numpy.random.choice(levels, 1, p=[0.6, 0.2, 0.12, 0.05, 0.03])[0]), 326 | 'level_version': random.randint(1, 2) 327 | } 328 | }, 329 | 'level_failed': { 330 | 'event_data': { 331 | 'level_id': str(numpy.random.choice(levels, 1, p=[0.001, 0.049, 0.05, 0.3, 0.6])[0]), 332 | 'level_version': random.randint(1, 2) 333 | } 334 | }, 335 | 336 | 'lootbox_opened': { 337 | 'event_data': { 338 | 'lootbox_id': str(uuid.uuid4()), 339 | 'lootbox_cost': random.randint(2, 5), 340 | 'item_rarity': str(numpy.random.choice(item_rarities, 1, p=[0.5, 0.3, 0.17, .03])[0]), 341 | 'item_id': str(numpy.random.choice(items, 1, p=[0.125, 0.11, 0.35, 0.125, 0.04, 0.01, 0.07, 0.1, 0.05, 0.02])[0]), 342 | 'item_version': random.randint(1, 2), 343 | 'item_cost': random.randint(1, 5) 344 | } 345 | }, 346 | 347 | 'user_report': { 348 | 'event_data': { 349 | 'report_id': str(uuid.uuid4()), 350 | 'report_reason': str(numpy.random.choice(report_reasons, 1, p=[0.2, 0.5, 0.1, 0.2])[0]) 351 | } 352 | }, 353 | 354 | 'user_sentiment': { 355 | 'event_data': { 356 | 'user_rating': random.randint(1, 5) 357 | } 358 | } 359 | } 360 | 361 | return switcher[event_type] 362 | 363 | 364 | # Take an event type, get event data for it and then merge that event-specific data with the default event fields to create a complete event 365 | def generate_event(): 366 | event_type = getEventType() 367 | # Within the demo script the event_name is set same as event_type for simplicity. 368 | # In many use cases multiple events could exist under a common event type which can enable you to build a richer data taxonomy. 369 | event_name = event_type 370 | event_data = getEvent(event_type) 371 | event = { 372 | 'event_version': DEFAULT_EVENT_VERSION, 373 | 'event_id': str(uuid.uuid4()), 374 | 'event_type': event_type, 375 | 'event_name': event_name, 376 | 'event_timestamp': int(time.time()), 377 | 'app_version': str(numpy.random.choice(['1.0.0', '1.1.0', '1.2.0'], 1, p=[0.05, 0.80, 0.15])[0]) 378 | } 379 | 380 | event.update(event_data) 381 | return event 382 | 383 | 384 | def send_record_batch(kinesis_client, stream_name, raw_records): 385 | """Send a batch of records to Amazon Kinesis.""" 386 | 387 | # Translate input records into the format needed by the boto3 SDK 388 | formatted_records = [] 389 | for rec in raw_records: 390 | formatted_records.append( 391 | {'PartitionKey': rec['event']['event_id'], 'Data': json.dumps(rec)}) 392 | kinesis_client.put_records( 393 | StreamName=stream_name, Records=formatted_records) 394 | print('Sent %d records to stream %s.' % 395 | (len(formatted_records), stream_name)) 396 | 397 | 398 | def send_events_infinite(kinesis_client, stream_name, batch_size, application_id): 399 | """Send a batches of randomly generated events to Amazon Kinesis.""" 400 | 401 | while True: 402 | records = [] 403 | # Create a batch of random events to send 404 | for i in range(0, batch_size): 405 | event_dict = generate_event() 406 | record = { 407 | 'event': event_dict, 408 | 'application_id': application_id 409 | } 410 | records.append(record) 411 | send_record_batch(kinesis_client, stream_name, records) 412 | time.sleep(random.randint(1, 7)) 413 | 414 | 415 | def send_events_bulk(kinesis_client, stream_name, batch_size, application_id): 416 | """Send a batches of randomly generated events to Amazon Kinesis.""" 417 | 418 | while range(0, 100): 419 | records = [] 420 | # Create a batch of random events to send 421 | for i in range(0, batch_size): 422 | event_dict = generate_event() 423 | record = { 424 | 'event': event_dict, 425 | 'application_id': application_id 426 | } 427 | records.append(record) 428 | send_record_batch(kinesis_client, stream_name, records) 429 | time.sleep(random.randint(1, 7)) 430 | 431 | 432 | def send_data(params): 433 | aws_region = params['region_name'] 434 | kinesis_stream = params['stream_name'] 435 | batch_size = params['batch_size'] or DEFAULT_BATCH_SIZE 436 | application_id = params['application_id'] 437 | 438 | print('===========================================') 439 | print('CONFIGURATION PARAMETERS:') 440 | print('- KINESIS_STREAM: ' + kinesis_stream) 441 | print('- AWS_REGION: ' + aws_region) 442 | print('- APPLICATION_ID: ' + application_id) 443 | print('===========================================\n') 444 | 445 | session = boto3.Session() 446 | client = session.client('kinesis', region_name=aws_region) 447 | 448 | send_events_bulk(client, kinesis_stream, batch_size, application_id) 449 | 450 | 451 | # Set Global value for Server and Match id 452 | SERVERS = getUUIDs('servers', 3) 453 | MATCHES = getUUIDs('matches', 50) 454 | 455 | 456 | def handler(event, context): 457 | defaultInput = { 458 | 'region_name': os.environ['REGION_NAME'], 459 | 'stream_name': os.environ['STREAM_NAME'], 460 | 'application_id': os.environ['APPLICATION_ID'], 461 | 'batch_size': DEFAULT_BATCH_SIZE 462 | } 463 | 464 | send_data(defaultInput) 465 | 466 | 467 | if __name__ == '__main__': 468 | args = parse_cmd_line() 469 | send_data(vars(args)) 470 | -------------------------------------------------------------------------------- /business-logic/publish-data/requirements.txt: -------------------------------------------------------------------------------- 1 | boto3 2 | numpy 3 | argparse -------------------------------------------------------------------------------- /business-logic/solution-helper/build.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2021 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | 4 | Licensed under the Amazon Software License (the "License"). 5 | You may not use this file except in compliance with the License. 6 | A copy of the License is located at 7 | 8 | http://aws.amazon.com/asl/ 9 | 10 | or in the "license" file accompanying this file. This file is distributed 11 | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | express or implied. See the License for the specific language governing 13 | permissions and limitations under the License. 14 | """ 15 | 16 | import os 17 | import subprocess 18 | import sys 19 | import shutil 20 | 21 | 22 | def exit_on_failure(exit_code, msg): 23 | if exit_code != 0: 24 | print(msg) 25 | exit(exit_code) 26 | 27 | 28 | dir_path = os.path.dirname(os.path.realpath(__file__)) 29 | 30 | npm_cmd = shutil.which("npm") 31 | cmd = [npm_cmd, "install", "--prefix", dir_path] 32 | proc = subprocess.run(cmd, stderr=subprocess.STDOUT, shell=False) 33 | exit_on_failure(proc.returncode, "Web app npm install failed") 34 | -------------------------------------------------------------------------------- /business-logic/solution-helper/lib/athena-helper.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | 'use strict'; 20 | 21 | let AWS = require('aws-sdk'); 22 | 23 | /** 24 | * Helper function to interact with Athena for cfn custom resource. 25 | * 26 | * @class athenaHelper 27 | */ 28 | class athenaHelper { 29 | /** 30 | * @class athenaHelper 31 | * @constructor 32 | */ 33 | constructor() { 34 | this.creds = new AWS.EnvironmentCredentials('AWS'); // Lambda provided credentials 35 | this.config = { 36 | credentials: this.creds, 37 | region: process.env.AWS_REGION, 38 | }; 39 | } 40 | 41 | createDefaultNamedQuery(database, name, workgroupName, description, queryString) { 42 | return new Promise((resolve, reject) => { 43 | let athena = new AWS.Athena(this.config); 44 | const params = { 45 | Database: database, 46 | Name: name, 47 | WorkGroup: workgroupName, 48 | Description: description, 49 | QueryString: queryString 50 | }; 51 | 52 | athena.createNamedQuery(params, function(err, data) { 53 | if (err) { 54 | console.log(JSON.stringify(err)); 55 | reject(err); 56 | } else { 57 | console.log(data); 58 | resolve(data); 59 | } 60 | }); 61 | }); 62 | } 63 | } 64 | 65 | module.exports = athenaHelper; 66 | -------------------------------------------------------------------------------- /business-logic/solution-helper/lib/cloudwatch-helper.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | 'use strict'; 20 | 21 | let AWS = require('aws-sdk'); 22 | 23 | /** 24 | * Helper function to interact with CloudWatch for cfn custom resource. 25 | * 26 | * @class cloudwatchHelper 27 | */ 28 | class cloudwatchHelper { 29 | /** 30 | * @class cloudwatchHelper 31 | * @constructor 32 | */ 33 | constructor() { 34 | this.creds = new AWS.EnvironmentCredentials('AWS'); // Lambda provided credentials 35 | this.config = { 36 | credentials: this.creds, 37 | region: process.env.AWS_REGION, 38 | }; 39 | } 40 | 41 | deleteDashboard(dashboardName) { 42 | return new Promise((resolve, reject) => { 43 | let cloudwatch = new AWS.CloudWatch(this.config); 44 | const params = { 45 | DashboardNames: [ 46 | dashboardName 47 | ] 48 | }; 49 | 50 | cloudwatch.deleteDashboards(params, function(err, data) { 51 | if (err) { 52 | console.log(JSON.stringify(err)); 53 | reject(err); 54 | } else { 55 | console.log(data); 56 | resolve(data); 57 | } 58 | }) 59 | }) 60 | } 61 | 62 | createDashboard(event) { 63 | let cloudwatch = new AWS.CloudWatch(this.config); 64 | console.log(`Generate dashboard with input params: ${JSON.stringify(event)}`); 65 | let widgets = []; 66 | // Dashboard Header 67 | widgets.push({ 68 | type: 'text', 69 | x: 0, 70 | y: 0, 71 | width: 24, 72 | height: 2, 73 | properties: { 74 | markdown: '\n# **Game Analytics Pipeline - Operational Health**\nThis dashboard contains operational metrics for the Game Analytics Pipeline. Use these metrics to help you monitor the operational status of the AWS services used in the solution and track important application metrics.\n' 75 | } 76 | }); 77 | 78 | // Stream Ingestion and Processing Header 79 | widgets.push({ 80 | type: 'text', 81 | x: 0, 82 | y: 2, 83 | width: 12, 84 | height: 3, 85 | properties: { 86 | markdown: `\n## Stream Ingestion & Processing\nThis section covers metrics related to ingestion of data into the solution's Events Stream and processing by Kinesis Data Firehose and AWS Lambda Events Processing Function. Use the metrics here to track data freshness/latency and any issues with processor throttling/errors. \n` 87 | } 88 | }); 89 | 90 | // Events Ingestion and Delivery 91 | widgets.push({ 92 | type: 'metric', 93 | x: 0, 94 | y: 8, 95 | width: 6, 96 | height: 6, 97 | properties: { 98 | metrics: [ 99 | ['AWS/Kinesis', 'IncomingRecords', 'StreamName', event.Kinesis.GameEventsStream, {id: 'records', color: '#2ca02c', label: 'Events Stream Incoming Records (Kinesis)'}], 100 | ['AWS/Firehose', 'DeliveryToS3.Records', 'DeliveryStreamName', event.Kinesis.GameEventsFirehose, {id: 'delivered', label: 'Firehose Records Delivered to S3', color: '#17becf'}], 101 | ['AWS/ApiGateway', 'Count', 'ApiName', event.GameAnalyticsApi.Name, 'Resource', '/applications/{applicationId}/events', 'Stage', event.GameAnalyticsApi.Stage, 'Method', 'POST', {label: 'Events REST API Request Count', color: '#1f77b4'}] 102 | ], 103 | view: 'timeSeries', 104 | stacked: false, 105 | region: this.config.region, 106 | title: 'Events Ingestion and Delivery', 107 | stat: 'Sum', 108 | yAxis: { 109 | left: { 110 | label: 'Count', 111 | showUnits: false 112 | } 113 | } 114 | } 115 | }); 116 | 117 | // Events Processing Function Error and Success Rate 118 | widgets.push({ 119 | type: 'metric', 120 | x: 6, 121 | y: 8, 122 | width: 6, 123 | height: 6, 124 | properties: { 125 | metrics: [ 126 | ['AWS/Lambda','Errors', 'FunctionName', event.Functions.EventsProcessingFunction, 'Resource', event.Functions.EventsProcessingFunctionArn, {id: 'errors', stat: 'Sum', color: '#d13212', region: this.config.region}], 127 | ['AWS/Lambda', 'Invocations', 'FunctionName', event.Functions.EventsProcessingFunction, 'Resource', event.Functions.EventsProcessingFunctionArn, {id: 'invocations', stat: 'Sum', visible: false, region: this.config.region}], 128 | [{ expression: '100 - 100 * errors / MAX([errors, invocations])', label: 'Success rate (%)', id: 'availability', yAxis: 'right', region: this.config.region}] 129 | ], 130 | region: this.config.region, 131 | title: 'Lambda Error count and success rate (%)', 132 | period: 60, 133 | stat: 'Sum', 134 | yAxis: { 135 | right: { 136 | max: 100, 137 | label: 'Percent', 138 | showUnits: false 139 | }, 140 | left: { 141 | showUnits: false, 142 | label: '' 143 | } 144 | }, 145 | view: 'timeSeries', 146 | stacked: false 147 | } 148 | }); 149 | 150 | // Events Processing Health 151 | widgets.push({ 152 | type: 'metric', 153 | x: 0, 154 | y: 5, 155 | width: 12, 156 | height: 3, 157 | properties: { 158 | metrics: [ 159 | ['AWS/Firehose', 'DeliveryToS3.DataFreshness', 'DeliveryStreamName', event.Kinesis.GameEventsFirehose, {visible: true, label: 'Data Freshness', period: 300, id: 'datafreshness', stat: 'Maximum'}], 160 | ['AWS/Lambda', 'Duration', 'FunctionName', event.Functions.EventsProcessingFunction, 'Resource', event.Functions.EventsProcessingFunctionArn, {id: 'duration', label: 'Lambda Duration', stat: 'Average', period: 300}], 161 | ['AWS/Lambda', 'ConcurrentExecutions', 'FunctionName', event.Functions.EventsProcessingFunction, {visible: true, label: 'Lambda Concurrency', id: 'concurrency', stat: 'Maximum', period: 300}], 162 | ['AWS/Lambda', 'Throttles', 'FunctionName', event.Functions.EventsProcessingFunction, {label: 'Lambda Throttles', id: 'throttles', visible: true, stat: 'Sum', period: 300}] 163 | ], 164 | view: 'singleValue', 165 | stacked: true, 166 | region: this.config.region, 167 | title: 'Events Processing Health', 168 | stat: 'Average' 169 | } 170 | }); 171 | 172 | // Optionally create widgets for streaming analytics solution components if enabled 173 | if (event.StreamingAnalyticsEnabled === 'true') { 174 | // Streaming Analytics Header 175 | widgets.push({ 176 | type: 'text', 177 | x: 12, 178 | y: 2, 179 | width: 12, 180 | height: 3, 181 | properties: { 182 | markdown: '\n## Real-time Streaming Analytics\nThe below metrics can be used to monitor the real-time streaming SQL analytics of events. Use the Kinesis Data Analytics MillisBehindLatest metric to help you track the lag on the Kinesis SQL Application from the latest events. The Analytics Processing function that processes KDA application outputs can be tracked to measure function concurrency, success percentage, processing duration and throttles.\n' 183 | } 184 | }); 185 | 186 | // Streaming Analytics Function 187 | widgets.push({ 188 | type: 'metric', 189 | properties: { 190 | metrics: [ 191 | ['AWS/Lambda', 'ConcurrentExecutions', 'FunctionName', event.Functions.AnalyticsProcessingFunction, {label: 'Analytics Processing Concurrent Executions', stat: 'Maximum', id: 'concurrency'}], 192 | ['.', 'Duration', '.', '.', {label: 'Lambda Duration', id: 'duration', stat: 'Average'}], 193 | ['.', 'Throttles', '.', '.', {label: 'Lambda Throttles', id: 'throttles'}] 194 | ], 195 | view: 'singleValue', 196 | region: this.config.region, 197 | title: 'Real-time Analytics Health', 198 | stat: 'Sum', 199 | setPeriodToTimeRange: false 200 | }, 201 | x: 12, 202 | y: 5, 203 | width: 12, 204 | height: 3 205 | }); 206 | 207 | // Kinesis Analytics SQL Application 208 | widgets.push({ 209 | type: 'metric', 210 | x: 12, 211 | y: 8, 212 | width: 6, 213 | height: 6, 214 | properties: { 215 | metrics: [ 216 | ['AWS/KinesisAnalytics', 'MillisBehindLatest', 'Id', '1.1', 'Application', event.Kinesis.KinesisAnalyticsApp, 'Flow', 'Input'] 217 | ], 218 | view: 'timeSeries', 219 | stacked: true, 220 | period: 60, 221 | region: this.config.region, 222 | stat: 'Average', 223 | title: 'Kinesis Analytics Latency' 224 | } 225 | }); 226 | 227 | // Analytics Processing Function Error and Success Rate 228 | widgets.push({ 229 | type: 'metric', 230 | x: 18, 231 | y: 8, 232 | width: 6, 233 | height: 6, 234 | properties: { 235 | metrics: [ 236 | ['AWS/Lambda','Errors', 'FunctionName', event.Functions.AnalyticsProcessingFunction, 'Resource', event.Functions.AnalyticsProcessingFunctionArn, {id: 'errors', stat: 'Sum', color: '#d13212', region: this.config.region}], 237 | ['AWS/Lambda', 'Invocations', 'FunctionName', event.Functions.AnalyticsProcessingFunction, 'Resource', event.Functions.AnalyticsProcessingFunctionArn, {id: 'invocations', stat: 'Sum', visible: false, region: this.config.region}], 238 | [{ expression: '100 - 100 * errors / MAX([errors, invocations])', label: 'Success rate (%)', id: 'availability', yAxis: 'right', region: this.config.region}] 239 | ], 240 | region: this.config.region, 241 | title: 'Lambda Error count and success rate (%)', 242 | period: 60, 243 | stat: 'Sum', 244 | yAxis: { 245 | right: { 246 | max: 100, 247 | label: 'Percent', 248 | showUnits: false 249 | }, 250 | left: { 251 | showUnits: false, 252 | label: '' 253 | } 254 | }, 255 | view: 'timeSeries', 256 | stacked: false 257 | } 258 | }); 259 | } 260 | console.log(`widgets: ${JSON.stringify(widgets)}`); 261 | let dashboard = { 262 | widgets: widgets 263 | }; 264 | 265 | return new Promise((resolve, reject) => { 266 | const params = { 267 | DashboardName: event.DashboardName, 268 | DashboardBody: JSON.stringify(dashboard) 269 | }; 270 | 271 | cloudwatch.putDashboard(params, function(err, data) { 272 | if (err) { 273 | console.log(JSON.stringify(err)); 274 | reject(err); 275 | } else { 276 | console.log(data); 277 | resolve(data); 278 | } 279 | }) 280 | }); 281 | } 282 | } 283 | 284 | module.exports = cloudwatchHelper; 285 | -------------------------------------------------------------------------------- /business-logic/solution-helper/lib/dynamodb-helper.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | 'use strict'; 20 | 21 | let AWS = require('aws-sdk'); 22 | const _ = require('underscore'); 23 | 24 | /** 25 | * Helper function to interact with dynamodb for cfn custom resource. 26 | * 27 | * @class dynamoDBHelper 28 | */ 29 | class dynamoDBHelper { 30 | /** 31 | * @class dynamoDBHelper 32 | * @constructor 33 | */ 34 | constructor() { 35 | this.creds = new AWS.EnvironmentCredentials('AWS'); // Lambda provided credentials 36 | this.config = { 37 | credentials: this.creds, 38 | region: process.env.AWS_REGION, 39 | }; 40 | } 41 | 42 | /** 43 | * Save item to DynamoDB 44 | */ 45 | saveItem(item, ddbTable) { 46 | // Handling Promise Rejection 47 | console.log(`Saving item to DynamoDB: ${JSON.stringify(item)}`); 48 | process.on('unhandledRejection', error => { 49 | throw error; 50 | }); 51 | 52 | return new Promise((resolve, reject) => { 53 | for (var i = 0; i < _.keys(item).length; i++) { 54 | item[_.keys(item)[i]] = this._checkAssignedDataType( 55 | item[_.keys(item)[i]] 56 | ); 57 | } 58 | 59 | let params = { 60 | TableName: ddbTable, 61 | Item: item 62 | }; 63 | 64 | const docClient = new AWS.DynamoDB.DocumentClient(this.config); 65 | docClient.put(params, function (err, resp) { 66 | if (err) { 67 | console.log(JSON.stringify(err)); 68 | reject(err); 69 | } else { 70 | console.log(`Item saved.`); 71 | resolve(item); 72 | } 73 | }); 74 | }); 75 | } 76 | 77 | _checkAssignedDataType(attr) { 78 | if (_.isObject(attr)) { 79 | if (_.has(attr, 'N')) { 80 | return parseInt(attr['N']); 81 | } else if (_.has(attr, 'B')) { 82 | return attr['B'] === 'true'; 83 | } else { 84 | for (var i = 0; i < _.keys(attr).length; i++) { 85 | attr[_.keys(attr)[i]] = this._checkAssignedDataType( 86 | attr[_.keys(attr)[i]] 87 | ); 88 | } 89 | return attr; 90 | } 91 | } else { 92 | return attr; 93 | } 94 | } 95 | } 96 | 97 | module.exports = dynamoDBHelper; 98 | -------------------------------------------------------------------------------- /business-logic/solution-helper/lib/glue-helper.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | 'use strict'; 20 | 21 | let AWS = require('aws-sdk'); 22 | 23 | /** 24 | * Helper function to interact with Glue for cfn custom resource. 25 | * 26 | * @class glueHelper 27 | */ 28 | class glueHelper { 29 | /** 30 | * @class glueHelper 31 | * @constructor 32 | */ 33 | constructor() { 34 | this.creds = new AWS.EnvironmentCredentials('AWS'); // Lambda provided credentials 35 | this.config = { 36 | credentials: this.creds, 37 | region: process.env.AWS_REGION, 38 | }; 39 | } 40 | 41 | putDataCatalogEncryptionSettings(catalogId, catalogEncryptionMode) { 42 | return new Promise((resolve, reject) => { 43 | let glue = new AWS.Glue(this.config); 44 | const params = { 45 | DataCatalogEncryptionSettings: { 46 | ConnectionPasswordEncryption: { 47 | ReturnConnectionPasswordEncrypted: true 48 | }, 49 | EncryptionAtRest: { 50 | CatalogEncryptionMode: catalogEncryptionMode 51 | } 52 | }, 53 | CatalogId: catalogId 54 | }; 55 | 56 | glue.putDataCatalogEncryptionSettings(params, function(err, data) { 57 | if (err) { 58 | if (err.code === 'AlreadyExistsException') { 59 | console.log(`Encryption setting already exists for ${catalogId}, skipping`); 60 | resolve(); 61 | } 62 | console.log(JSON.stringify(err)); 63 | reject(err); 64 | } else { 65 | console.log(`Saved Glue encryption setting for ${catalogId}`); 66 | resolve(data); 67 | } 68 | }); 69 | }); 70 | } 71 | } 72 | 73 | module.exports = glueHelper; 74 | -------------------------------------------------------------------------------- /business-logic/solution-helper/lib/kinesis-helper.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | "use strict"; 20 | 21 | let AWS = require("aws-sdk"); 22 | 23 | /** 24 | * Helper function to interact with Kinesis for cfn custom resource. 25 | * 26 | * @class kinesisHelper 27 | */ 28 | class kinesisHelper { 29 | /** 30 | * @class kinesisHelper 31 | * @constructor 32 | */ 33 | constructor() { 34 | this.creds = new AWS.EnvironmentCredentials("AWS"); // Lambda provided credentials 35 | this.config = { 36 | credentials: this.creds, 37 | region: process.env.AWS_REGION, 38 | }; 39 | } 40 | 41 | startKinesisAnalyticsApp(applicationName) { 42 | return new Promise((resolve, reject) => { 43 | let params = { 44 | ApplicationName: applicationName, 45 | }; 46 | 47 | console.log(`Attempting to start Kinesis Analytics App: ${JSON.stringify(params)}`); 48 | let kda = new AWS.KinesisAnalyticsV2(this.config); 49 | 50 | kda.describeApplication(params, function (err, response) { 51 | if (err) { 52 | console.log(JSON.stringify(err)); 53 | reject(err); 54 | } else { 55 | if (response == null) { 56 | console.log("The Kinesis Analytics application could not be found"); 57 | reject(err); 58 | } 59 | if (response.ApplicationDetail.ApplicationStatus === "READY") { 60 | console.log("Starting Kinesis Analytics Application"); 61 | kda.startApplication( 62 | { 63 | ApplicationName: applicationName, 64 | RunConfiguration: { 65 | SqlRunConfigurations: [ 66 | { 67 | InputId: "1.1", 68 | InputStartingPositionConfiguration: { 69 | InputStartingPosition: "NOW", 70 | }, 71 | }, 72 | ], 73 | }, 74 | }, 75 | function (err, response) { 76 | if (err) { 77 | console.log(JSON.stringify(err)); 78 | reject(err); 79 | } else { 80 | console.log("Started Kinesis Analytics Application"); 81 | resolve(response); 82 | } 83 | } 84 | ); 85 | } 86 | } 87 | }); 88 | }); 89 | } 90 | } 91 | 92 | module.exports = kinesisHelper; 93 | -------------------------------------------------------------------------------- /business-logic/solution-helper/lib/lambda-helper.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | 'use strict'; 20 | 21 | let AWS = require('aws-sdk'); 22 | 23 | /** 24 | * Helper function to interact with AWS Lambda API for cfn custom resource. 25 | * 26 | * @class lambdaHelper 27 | */ 28 | class lambdaHelper { 29 | /** 30 | * @class lambdaHelper 31 | * @constructor 32 | */ 33 | constructor() { 34 | this.creds = new AWS.EnvironmentCredentials('AWS'); // Lambda provided credentials 35 | this.config = { 36 | credentials: this.creds, 37 | region: process.env.AWS_REGION, 38 | }; 39 | } 40 | 41 | /** 42 | * Invoke Sync of Lambda Function 43 | */ 44 | invokeFunctionSync(functionArn) { 45 | console.log(`Invoking Lambda Function: ${JSON.stringify(functionArn)}`); 46 | return new Promise((resolve, reject) => { 47 | try { 48 | const lambda = new AWS.Lambda(this.config); 49 | const params = { 50 | FunctionName: functionArn, 51 | InvocationType: 'RequestResponse' 52 | }; 53 | 54 | lambda.invoke(params, function (err, data) { 55 | if (err) { 56 | console.log(JSON.stringify(err)); 57 | reject(err); 58 | } else { 59 | resolve(data); 60 | } 61 | }); 62 | } catch (err) { 63 | console.log(JSON.stringify(err)); 64 | reject(err); 65 | } 66 | }); 67 | } 68 | } 69 | 70 | module.exports = lambdaHelper; 71 | -------------------------------------------------------------------------------- /business-logic/solution-helper/lib/metrics-helper.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | 'use strict'; 20 | 21 | const https = require('https'); 22 | 23 | // Metrics class for sending usage metrics to solution endpoints 24 | class Metrics { 25 | constructor() { 26 | this.endpoint = 'https://metrics.awssolutionsbuilder.com/generic'; 27 | } 28 | 29 | async sendAnonymousMetric(metric) { 30 | console.log('RESPONSE BODY:\n', responseBody); 31 | const parsedUrl = url.parse(event.ResponseURL); 32 | const options = { 33 | hostname: this.endpoint, 34 | port: 443, 35 | method: 'POST', 36 | headers: { 37 | 'Content-Type': 'application/json', 38 | 'Content-Length': metric.length, 39 | } 40 | }; 41 | 42 | const req = https.request(options, (res) => { 43 | console.log('STATUS:', res.statusCode); 44 | console.log('HEADERS:', JSON.stringify(res.headers)); 45 | callback(null, 'Successfully sent stack response!'); 46 | }); 47 | 48 | req.on('error', (err) => { 49 | console.log('sendResponse Error:\n', err); 50 | callback(err); 51 | }); 52 | 53 | req.write(JSON.stringify(metric)); 54 | req.end(); 55 | } 56 | } 57 | 58 | module.exports = Metrics; 59 | -------------------------------------------------------------------------------- /business-logic/solution-helper/lib/s3-helper.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | * SPDX-License-Identifier: MIT-0 4 | * 5 | * Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | * software and associated documentation files (the "Software"), to deal in the Software 7 | * without restriction, including without limitation the rights to use, copy, modify, 8 | * merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | * permit persons to whom the Software is furnished to do so. 10 | * 11 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | */ 18 | 19 | 'use strict'; 20 | 21 | let AWS = require('aws-sdk'); 22 | 23 | /** 24 | * Helper function to interact with S3 for cfn custom resource. 25 | * 26 | * @class s3Helper 27 | */ 28 | class s3Helper { 29 | /** 30 | * @class s3Helper 31 | * @constructor 32 | */ 33 | constructor() { 34 | this.creds = new AWS.EnvironmentCredentials('AWS'); // Lambda provided credentials 35 | } 36 | 37 | getObject(s3Bucket, s3Key) { 38 | return new Promise((resolve, reject) => { 39 | try { 40 | let s3 = new AWS.S3({sslEnabled: true, signatureVersion: 'v4'}); 41 | let params = { 42 | Bucket: s3Bucket, 43 | Key: s3Key 44 | }; 45 | 46 | s3.getObject(params, function(err, data) { 47 | if (err) { 48 | console.log(JSON.stringify(err)); 49 | reject(err); 50 | } else { 51 | var object = data.Body.toString(); 52 | console.log(`Retrieved data from S3: ${JSON.stringify(object)}`); 53 | resolve(object); 54 | } 55 | }); 56 | } catch (err) { 57 | console.log(JSON.stringify(err)); 58 | reject(err); 59 | } 60 | }); 61 | } 62 | 63 | uploadObject(s3Bucket, s3Key, objectBody) { 64 | console.log(`Uploading object to s3://${s3Bucket}/${s3Key}`); 65 | return new Promise((resolve, reject) => { 66 | let s3 = new AWS.S3({sslEnabled: true, signatureVersion: 'v4'}); 67 | const params = { 68 | Body: objectBody, 69 | Bucket: s3Bucket, 70 | Key: s3Key, 71 | ServerSideEncryption: 'AES256' 72 | }; 73 | 74 | s3.putObject(params, function(err, data) { 75 | if (err) { 76 | console.log(err); 77 | reject(err); 78 | } else { 79 | console.log(JSON.stringify(data)); 80 | resolve(data); 81 | } 82 | }); 83 | }); 84 | } 85 | } 86 | 87 | module.exports = s3Helper; 88 | -------------------------------------------------------------------------------- /business-logic/solution-helper/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "game-analytics-solution-helper", 3 | "description": "Game Analytics Pipeline solution custom resource helper Lambda function", 4 | "main": "index.js", 5 | "version": "0.0.1", 6 | "private": true, 7 | "dependencies": { 8 | "aws-sdk": "*", 9 | "moment": "*", 10 | "underscore": "*", 11 | "uuid": "*" 12 | }, 13 | "devDependencies": { 14 | "aws-sdk": "*", 15 | "npm-run-all": "*" 16 | }, 17 | "scripts": { 18 | "build:init": "rm -rf package-lock.json && rm -rf dist && rm -rf node_modules", 19 | "build:zip": "zip -rq solution-helper.zip .", 20 | "build:dist": "mkdir dist && mv solution-helper.zip dist/", 21 | "build": "npm run build:init && npm install --production && npm run build:zip && npm run build:dist" 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /docs/architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-solutions-library-samples/guidance-for-game-analytics-pipeline-on-aws/fc97ddfac13e3bb686e5484b91dd9f4147839fea/docs/architecture.png -------------------------------------------------------------------------------- /infrastructure/.gitignore: -------------------------------------------------------------------------------- 1 | *.js 2 | !jest.config.js 3 | *.d.ts 4 | node_modules 5 | 6 | # CDK asset staging directory 7 | .cdk.staging 8 | cdk.out 9 | -------------------------------------------------------------------------------- /infrastructure/.npmignore: -------------------------------------------------------------------------------- 1 | *.ts 2 | !*.d.ts 3 | 4 | # CDK asset staging directory 5 | .cdk.staging 6 | cdk.out 7 | -------------------------------------------------------------------------------- /infrastructure/README.md: -------------------------------------------------------------------------------- 1 | # Infrastructure Folder 2 | 3 | Ideally avoid deploying from here, deploy from root, check Readme.md at top level -------------------------------------------------------------------------------- /infrastructure/build.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2021 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | 4 | Licensed under the Amazon Software License (the "License"). 5 | You may not use this file except in compliance with the License. 6 | A copy of the License is located at 7 | 8 | http://aws.amazon.com/asl/ 9 | 10 | or in the "license" file accompanying this file. This file is distributed 11 | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | express or implied. See the License for the specific language governing 13 | permissions and limitations under the License. 14 | """ 15 | 16 | import os 17 | import subprocess 18 | import sys 19 | import shutil 20 | 21 | 22 | def exit_on_failure(exit_code, msg): 23 | if exit_code != 0: 24 | print(msg) 25 | exit(exit_code) 26 | 27 | 28 | npm_cmd = shutil.which("npm") 29 | npx_cmd = shutil.which("npx") 30 | 31 | cmd = [npm_cmd, "install"] 32 | proc = subprocess.run(cmd, stderr=subprocess.STDOUT, shell=False) 33 | exit_on_failure(proc.returncode, "Cdk npm install failed") 34 | 35 | cmd = [npx_cmd, "cdk", "synth"] 36 | proc = subprocess.run(cmd, stderr=subprocess.STDOUT, shell=False) 37 | exit_on_failure(proc.returncode, "Cdk synth failed") -------------------------------------------------------------------------------- /infrastructure/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node --prefer-ts-exts src/app.ts", 3 | "watch": { 4 | "include": [ 5 | "**" 6 | ], 7 | "exclude": [ 8 | "README.md", 9 | "cdk*.json", 10 | "**/*.d.ts", 11 | "**/*.js", 12 | "tsconfig.json", 13 | "package*.json", 14 | "yarn.lock", 15 | "node_modules", 16 | "test" 17 | ] 18 | }, 19 | "context": { 20 | "@aws-cdk/aws-lambda:recognizeLayerVersion": true, 21 | "@aws-cdk/core:checkSecretUsage": true, 22 | "@aws-cdk/core:target-partitions": [ 23 | "aws", 24 | "aws-cn" 25 | ], 26 | "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, 27 | "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, 28 | "@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true, 29 | "@aws-cdk/aws-iam:minimizePolicies": true, 30 | "@aws-cdk/core:validateSnapshotRemovalPolicy": true, 31 | "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true, 32 | "@aws-cdk/aws-s3:createDefaultLoggingPolicy": true, 33 | "@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true, 34 | "@aws-cdk/aws-apigateway:disableCloudWatchRole": true, 35 | "@aws-cdk/core:enablePartitionLiterals": true, 36 | "@aws-cdk/aws-events:eventsTargetQueueSameAccount": true, 37 | "@aws-cdk/aws-iam:standardizedServicePrincipals": true, 38 | "@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true, 39 | "@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true, 40 | "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true, 41 | "@aws-cdk/aws-route53-patters:useCertificate": true, 42 | "@aws-cdk/customresources:installLatestAwsSdkDefault": false 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /infrastructure/config.yaml.TEMPLATE: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # 3 | # Permission is hereby granted, free of charge, to any person obtaining a copy of 4 | # this software and associated documentation files (the "Software"), to deal in 5 | # the Software without restriction, including without limitation the rights to 6 | # use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 7 | # the Software, and to permit persons to whom the Software is furnished to do so. 8 | # 9 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 10 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 11 | # FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 12 | # COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 13 | # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 14 | # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 15 | 16 | WORKLOAD_NAME: "" 17 | CDK_VERSION: "" 18 | NODE_VERSION: "latest" 19 | PYTHON_VERSION: "3.8" 20 | DEV_MODE: true 21 | ENABLE_STREAMING_ANALYTICS: true 22 | STREAM_SHARD_COUNT: 1 23 | RAW_EVENTS_PREFIX: "" 24 | PROCESSED_EVENTS_PREFIX: "" 25 | RAW_EVENTS_TABLE: "" 26 | GLUE_TMP_PREFIX: "" 27 | S3_BACKUP_MODE: false 28 | CLOUDWATCH_RETENTION_DAYS: 30 29 | API_STAGE_NAME: "live" 30 | EMAIL_ADDRESS: "" 31 | GITHUB_USERNAME: "" 32 | GITHUB_REPO_NAME: "" 33 | CONNECTION_ARN: "" 34 | accounts: 35 | - NAME: "QA" 36 | ACCOUNT: "" 37 | REGION: "" 38 | - NAME: "PROD" 39 | ACCOUNT: "" 40 | REGION: "" -------------------------------------------------------------------------------- /infrastructure/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "game-analytics-pipeline-cdk", 3 | "version": "0.1.0", 4 | "bin": { 5 | "infrastructure": "bin/infrastructure.js" 6 | }, 7 | "scripts": { 8 | "build": "tsc", 9 | "watch": "tsc -w", 10 | "test": "jest", 11 | "cdk": "./node_modules/aws-cdk/bin/cdk", 12 | "bootstrap": "./node_modules/aws-cdk/bin/cdk bootstrap --cloudformation-execution-policies 'arn:aws:iam::aws:policy/PowerUserAccess,arn:aws:iam::aws:policy/IAMFullAccess'", 13 | "lint": "eslint src/" 14 | }, 15 | "devDependencies": { 16 | "@aws-cdk/aws-lambda-python-alpha": "2.92.0-alpha.0", 17 | "@aws-cdk/aws-apigatewayv2-alpha": "2.92.0-alpha.0", 18 | "@aws-cdk/aws-apigatewayv2-authorizers-alpha": "2.92.0-alpha.0", 19 | "@aws-cdk/aws-apigatewayv2-integrations-alpha": "2.92.0-alpha.0", 20 | "@types/jest": "^29.2.5", 21 | "@types/node": "18.11.18", 22 | "@types/uuid": "^9.0.1", 23 | "aws-cdk": "2.92.0", 24 | "aws-cdk-lib": "2.92.0", 25 | "fs": "^0.0.1-security", 26 | "jest": "^29.3.1", 27 | "js-yaml": "^4.1.0", 28 | "ts-jest": "^29.0.3", 29 | "ts-node": "^10.9.1", 30 | "typescript": "~4.9.4", 31 | "uuid": "^9.0.0" 32 | }, 33 | "dependencies": { 34 | "@types/js-yaml": "^4.0.5", 35 | "constructs": "^10.0.0", 36 | "source-map-support": "^0.5.21" 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /infrastructure/src/app.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import "source-map-support/register"; 3 | import * as cdk from "aws-cdk-lib"; 4 | import { InfrastructureStack } from "./app-stack"; 5 | import { getConfig } from "./helpers/stack-config-loader"; 6 | import { PipelineStack } from "./pipeline-stack"; 7 | 8 | const app = new cdk.App(); 9 | const config = getConfig(); 10 | 11 | // Checks for prod and QA account information 12 | const prod = config.accounts.find(({ NAME }) => NAME === "PROD"); 13 | const qa = config.accounts.find(({ NAME }) => NAME === "QA"); 14 | 15 | // Defaults to local deployment information 16 | const account = 17 | app.node.tryGetContext("account") || 18 | process.env.CDK_DEPLOY_ACCOUNT || 19 | process.env.CDK_DEFAULT_ACCOUNT || 20 | qa?.ACCOUNT; 21 | 22 | const region = 23 | app.node.tryGetContext("region") || 24 | process.env.CDK_DEPLOY_REGION || 25 | process.env.CDK_DEFAULT_REGION || 26 | qa?.REGION; 27 | 28 | const env = { region, account }; 29 | 30 | // Core infrastructure 31 | new InfrastructureStack(app, "CentralizedGameAnalytics", { 32 | stackName: `${prod?.NAME}-${config.WORKLOAD_NAME}`, 33 | description : "Guidance for the Game Analytics Pipeline on AWS (SO0096)", 34 | config, 35 | env, 36 | }); 37 | 38 | // Deployment through pipeline 39 | new PipelineStack(app, "PipelineStack", { 40 | stackName: `${config.WORKLOAD_NAME}-Toolchain`, 41 | description : "Guidance for the Game Analytics Pipeline on AWS (SO0096)", 42 | config, 43 | env, 44 | }); 45 | -------------------------------------------------------------------------------- /infrastructure/src/constructs/data-lake-construct.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2023 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | * 4 | * Licensed under the Amazon Software License (the 'License'). 5 | * You may not use this file except in compliance with the License. 6 | * A copy of the License is located at 7 | * 8 | * http://aws.amazon.com/asl/ 9 | * 10 | * or in the 'license' file accompanying this file. This file is distributed 11 | * on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | * express or implied. See the License for the specific language governing 13 | * permissions and limitations under the License. 14 | */ 15 | import { GameAnalyticsPipelineConfig } from "../helpers/config-types"; 16 | import * as cdk from "aws-cdk-lib"; 17 | import { Construct } from "constructs"; 18 | import * as s3 from "aws-cdk-lib/aws-s3"; 19 | 20 | import * as glueCfn from "aws-cdk-lib/aws-glue"; 21 | import * as sns from "aws-cdk-lib/aws-sns"; 22 | import * as events from "aws-cdk-lib/aws-events"; 23 | import * as eventstargets from "aws-cdk-lib/aws-events-targets"; 24 | import * as iam from "aws-cdk-lib/aws-iam"; 25 | 26 | /* eslint-disable @typescript-eslint/no-empty-interface */ 27 | export interface DataLakeConstructProps extends cdk.StackProps { 28 | analyticsBucket: s3.Bucket; 29 | config: GameAnalyticsPipelineConfig; 30 | notificationsTopic: sns.Topic; 31 | } 32 | 33 | const defaultProps: Partial = {}; 34 | 35 | /** 36 | * Deploys the DataLake construct 37 | * 38 | * Creates Glue to turn analytics s3 bucket into Datalake. Creates Jobs that can be used to process s3 data for Athena. 39 | */ 40 | export class DataLakeConstruct extends Construct { 41 | public readonly gameEventsDatabase: glueCfn.CfnDatabase; 42 | public readonly rawEventsTable: glueCfn.CfnTable; 43 | 44 | constructor(parent: Construct, name: string, props: DataLakeConstructProps) { 45 | super(parent, name); 46 | 47 | /* eslint-disable @typescript-eslint/no-unused-vars */ 48 | props = { ...defaultProps, ...props }; 49 | 50 | // Glue Database 51 | const gameEventsDatabase = new glueCfn.CfnDatabase( 52 | this, 53 | "GameEventsDatabase", 54 | { 55 | catalogId: cdk.Aws.ACCOUNT_ID, 56 | databaseInput: { 57 | description: `Database for game analytics events for stack: ${cdk.Aws.STACK_NAME}`, 58 | locationUri: `s3://${props.analyticsBucket.bucketName}`, 59 | }, 60 | } 61 | ); 62 | 63 | // Glue table for raw events that come in from stream 64 | const rawEventsTable = new glueCfn.CfnTable(this, "GameRawEventsTable", { 65 | catalogId: cdk.Aws.ACCOUNT_ID, 66 | databaseName: gameEventsDatabase.ref, 67 | tableInput: { 68 | description: `Stores raw event data from the game analytics pipeline for stack ${cdk.Aws.STACK_NAME}`, 69 | name: props.config.RAW_EVENTS_TABLE, 70 | tableType: "EXTERNAL_TABLE", 71 | partitionKeys: [ 72 | { name: "year", type: "string" }, 73 | { name: "month", type: "string" }, 74 | { name: "day", type: "string" }, 75 | ], 76 | parameters: { 77 | classification: "parquet", 78 | compressionType: "none", 79 | typeOfData: "file", 80 | }, 81 | storageDescriptor: { 82 | outputFormat: 83 | "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", 84 | inputFormat: 85 | "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", 86 | compressed: false, 87 | numberOfBuckets: -1, 88 | serdeInfo: { 89 | serializationLibrary: 90 | "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", 91 | parameters: { 92 | "serialization.format": "1", 93 | }, 94 | }, 95 | bucketColumns: [], 96 | sortColumns: [], 97 | storedAsSubDirectories: false, 98 | location: `s3://${props.analyticsBucket.bucketName}/${props.config.RAW_EVENTS_PREFIX}`, 99 | columns: [ 100 | { name: "event_id", type: "string" }, 101 | { name: "event_type", type: "string" }, 102 | { name: "event_name", type: "string" }, 103 | { name: "event_version", type: "string" }, 104 | { name: "event_timestamp", type: "bigint" }, 105 | { name: "app_version", type: "string" }, 106 | { name: "application_id", type: "string" }, 107 | { name: "application_name", type: "string" }, 108 | { name: "event_data", type: "string" }, 109 | { name: "metadata", type: "string" }, 110 | ], 111 | }, 112 | }, 113 | }); 114 | rawEventsTable.addDependency(gameEventsDatabase); 115 | 116 | // IAM Role allowing Glue ETL Job to access Analytics Bucket 117 | const gameEventsEtlRole = new iam.Role(this, "GameEventsEtlRole", { 118 | assumedBy: new iam.ServicePrincipal("glue.amazonaws.com"), 119 | path: "/", 120 | managedPolicies: [ 121 | iam.ManagedPolicy.fromAwsManagedPolicyName( 122 | "service-role/AWSGlueServiceRole" 123 | ), 124 | ], 125 | }); 126 | gameEventsEtlRole.addToPolicy( 127 | new iam.PolicyStatement({ 128 | sid: "S3Access", 129 | effect: iam.Effect.ALLOW, 130 | actions: [ 131 | "s3:ListBucket", 132 | "s3:GetObject", 133 | "s3:PutObject", 134 | "s3:DeleteObject", 135 | ], 136 | resources: [ 137 | props.analyticsBucket.bucketArn, 138 | `${props.analyticsBucket.bucketArn}/*`, 139 | ], 140 | }) 141 | ); 142 | gameEventsEtlRole.addToPolicy( 143 | new iam.PolicyStatement({ 144 | sid: "GlueTableAccess", 145 | effect: iam.Effect.ALLOW, 146 | actions: [ 147 | "glue:BatchGetPartition", 148 | "glue:GetPartition", 149 | "glue:GetPartitions", 150 | "glue:BatchCreatePartition", 151 | "glue:CreatePartition", 152 | "glue:CreateTable", 153 | "glue:GetTable", 154 | "glue:GetTables", 155 | "glue:GetTableVersion", 156 | "glue:GetTableVersions", 157 | "glue:UpdatePartition", 158 | "glue:UpdateTable", 159 | ], 160 | resources: [ 161 | `arn:${cdk.Aws.PARTITION}:glue:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:catalog`, 162 | `arn:${cdk.Aws.PARTITION}:glue:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:table/${gameEventsDatabase.ref}/*`, 163 | `arn:${cdk.Aws.PARTITION}:glue:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:database/${gameEventsDatabase.ref}`, 164 | ], 165 | }) 166 | ); 167 | gameEventsEtlRole.addToPolicy( 168 | new iam.PolicyStatement({ 169 | sid: "GlueDBAccess", 170 | effect: iam.Effect.ALLOW, 171 | actions: [ 172 | "glue:GetDatabase", 173 | "glue:GetDatabases", 174 | "glue:UpdateDatabase", 175 | ], 176 | resources: [ 177 | `arn:${cdk.Aws.PARTITION}:glue:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:catalog`, 178 | `arn:${cdk.Aws.PARTITION}:glue:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:database/${gameEventsDatabase.ref}`, 179 | ], 180 | }) 181 | ); 182 | gameEventsEtlRole.addToPolicy( 183 | new iam.PolicyStatement({ 184 | sid: "KMSAccess", 185 | effect: iam.Effect.ALLOW, 186 | actions: ["kms:Decrypt", "kms:Encrypt", "kms:GenerateDataKey"], 187 | resources: [ 188 | `arn:${cdk.Aws.PARTITION}:kms:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:alias/aws/glue`, 189 | ], 190 | }) 191 | ); 192 | const glueCrawlerRole = new iam.Role(this, "GlueCrawlerRole", { 193 | assumedBy: new iam.ServicePrincipal("glue.amazonaws.com"), 194 | path: "/", 195 | }); 196 | glueCrawlerRole.addToPolicy( 197 | new iam.PolicyStatement({ 198 | effect: iam.Effect.ALLOW, 199 | actions: [ 200 | "s3:ListBucket", 201 | "s3:GetObject", 202 | "s3:PutObject", 203 | "s3:DeleteObject", 204 | ], 205 | resources: [ 206 | props.analyticsBucket.arnForObjects("*"), 207 | props.analyticsBucket.bucketArn, 208 | ], 209 | }) 210 | ); 211 | glueCrawlerRole.addToPolicy( 212 | new iam.PolicyStatement({ 213 | effect: iam.Effect.ALLOW, 214 | actions: [ 215 | "glue:BatchGetPartition", 216 | "glue:GetPartition", 217 | "glue:GetPartitions", 218 | "glue:BatchCreatePartition", 219 | "glue:CreatePartition", 220 | "glue:CreateTable", 221 | "glue:GetTable", 222 | "glue:GetTables", 223 | "glue:GetTableVersion", 224 | "glue:GetTableVersions", 225 | "glue:UpdatePartition", 226 | "glue:UpdateTable", 227 | ], 228 | resources: [ 229 | `arn:${cdk.Aws.PARTITION}:glue:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:catalog`, 230 | `arn:${cdk.Aws.PARTITION}:glue:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:table/${gameEventsDatabase.ref}/*`, 231 | `arn:${cdk.Aws.PARTITION}:glue:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:database/${gameEventsDatabase.ref}`, 232 | ], 233 | }) 234 | ); 235 | glueCrawlerRole.addToPolicy( 236 | new iam.PolicyStatement({ 237 | effect: iam.Effect.ALLOW, 238 | actions: [ 239 | "glue:GetDatabase", 240 | "glue:GetDatabases", 241 | "glue:UpdateDatabase", 242 | ], 243 | resources: [ 244 | `arn:${cdk.Aws.PARTITION}:glue:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:catalog`, 245 | `arn:${cdk.Aws.PARTITION}:glue:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:database/${gameEventsDatabase.ref}`, 246 | ], 247 | }) 248 | ); 249 | glueCrawlerRole.addToPolicy( 250 | new iam.PolicyStatement({ 251 | effect: iam.Effect.ALLOW, 252 | actions: ["kms:Decrypt", "kms:Encrypt", "kms:GenerateDataKey"], 253 | resources: [ 254 | `arn:${cdk.Aws.PARTITION}:kms:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:alias/aws/glue`, 255 | ], 256 | }) 257 | ); 258 | glueCrawlerRole.addToPolicy( 259 | new iam.PolicyStatement({ 260 | effect: iam.Effect.ALLOW, 261 | actions: [ 262 | "logs:CreateLogGroup", 263 | "logs:CreateLogStream", 264 | "logs:PutLogEvents", 265 | ], 266 | resources: ["arn:*:logs:*:*:/aws-glue/*"], 267 | }) 268 | ); 269 | 270 | // Glue ETL Job to process events from staging and repartition by event_type and date 271 | const gameEventsEtlJob = new glueCfn.CfnJob(this, "GameEventsEtlJob", { 272 | description: `Etl job for processing raw game event data, for stack ${cdk.Aws.STACK_NAME}.`, 273 | glueVersion: "4.0", 274 | maxRetries: 0, 275 | maxCapacity: 10, 276 | timeout: 30, 277 | executionProperty: { 278 | maxConcurrentRuns: 1, 279 | }, 280 | command: { 281 | name: "glueetl", 282 | pythonVersion: "3", 283 | scriptLocation: `s3://${props.analyticsBucket.bucketName}/glue-scripts/game_events_etl.py`, 284 | }, 285 | role: gameEventsEtlRole.roleArn, 286 | defaultArguments: { 287 | "--enable-metrics": "true", 288 | "--enable-continuous-cloudwatch-log": "true", 289 | "--enable-glue-datacatalog": "true", 290 | "--database_name": gameEventsDatabase.ref, 291 | "--raw_events_table_name": props.config.RAW_EVENTS_TABLE, 292 | "--analytics_bucket": `s3://${props.analyticsBucket.bucketName}/`, 293 | "--processed_data_prefix": props.config.PROCESSED_EVENTS_PREFIX, 294 | "--glue_tmp_prefix": props.config.GLUE_TMP_PREFIX, 295 | "--job-bookmark-option": "job-bookmark-enable", 296 | "--TempDir": `s3://${props.analyticsBucket.bucketName}/${props.config.GLUE_TMP_PREFIX}`, 297 | }, 298 | }); 299 | 300 | // Crawler crawls s3 partitioned data 301 | const eventsCrawler = new glueCfn.CfnCrawler(this, "EventsCrawler", { 302 | role: glueCrawlerRole.roleArn, 303 | description: `AWS Glue Crawler for partitioned data, for stack ${cdk.Aws.STACK_NAME}`, 304 | databaseName: gameEventsDatabase.ref, 305 | targets: { 306 | s3Targets: [ 307 | { 308 | path: `s3://${props.analyticsBucket.bucketName}/${props.config.PROCESSED_EVENTS_PREFIX}`, 309 | }, 310 | ], 311 | }, 312 | schemaChangePolicy: { 313 | updateBehavior: "UPDATE_IN_DATABASE", 314 | deleteBehavior: "LOG", 315 | }, 316 | configuration: `{ 317 | "Version":1.0, 318 | "CrawlerOutput":{ 319 | "Partitions":{ 320 | "AddOrUpdateBehavior":"InheritFromTable" 321 | }, 322 | "Tables":{ 323 | "AddOrUpdateBehavior":"MergeNewColumns" 324 | } 325 | } 326 | }`, 327 | }); 328 | 329 | // Workflow that triggers glue ETL job, processes s3 data, and updates the data catalog 330 | const gameEventsWorkflow = new glueCfn.CfnWorkflow( 331 | this, 332 | "GameEventsWorkflow", 333 | { 334 | description: `Orchestrates a Glue ETL Job and Crawler to process data in S3 and update data catalog, for stack ${cdk.Aws.STACK_NAME}`, 335 | defaultRunProperties: { 336 | "--enable-metrics": "true", 337 | "--enable-continuous-cloudwatch-log": "true", 338 | "--enable-glue-datacatalog": "true", 339 | "--database_name": gameEventsDatabase.ref, 340 | "--raw_events_table_name": rawEventsTable.ref, 341 | "--analytics_bucket": `s3://${props.analyticsBucket.bucketName}/`, 342 | "--processed_data_prefix": props.config.PROCESSED_EVENTS_PREFIX, 343 | "--glue_tmp_prefix": props.config.GLUE_TMP_PREFIX, 344 | "--job-bookmark-option": "job-bookmark-enable", 345 | "--TempDir": `s3://${props.analyticsBucket.bucketName}/${props.config.GLUE_TMP_PREFIX}`, 346 | }, 347 | } 348 | ); 349 | gameEventsWorkflow.addDependency(gameEventsDatabase); 350 | gameEventsWorkflow.addDependency(rawEventsTable); 351 | 352 | // Trigger for Glue crawler 353 | const gameEventsCrawlerTrigger = new glueCfn.CfnTrigger( 354 | this, 355 | "GameEventsCrawlerTrigger", 356 | { 357 | type: "CONDITIONAL", 358 | description: `Starts a crawler to update the Glue Data Catalog with any changes detected in the processed_events S3 prefix after the ETL job runs, for stack ${cdk.Aws.STACK_NAME}`, 359 | startOnCreation: true, 360 | workflowName: gameEventsWorkflow.ref, 361 | actions: [ 362 | { 363 | crawlerName: eventsCrawler.ref, 364 | }, 365 | ], 366 | predicate: { 367 | conditions: [ 368 | { 369 | logicalOperator: "EQUALS", 370 | jobName: gameEventsEtlJob.ref, 371 | state: "SUCCEEDED", 372 | }, 373 | ], 374 | }, 375 | } 376 | ); 377 | gameEventsCrawlerTrigger.addDependency(gameEventsEtlJob); 378 | gameEventsCrawlerTrigger.addDependency(gameEventsWorkflow); 379 | gameEventsCrawlerTrigger.addDependency(eventsCrawler); 380 | 381 | // Trigger to start glue job 382 | const gameEventsETLJobTrigger = new glueCfn.CfnTrigger( 383 | this, 384 | "GameEventsTriggerETLJob", 385 | { 386 | workflowName: gameEventsWorkflow.ref, 387 | type: "ON_DEMAND", 388 | description: `Triggers the start of ETL job to process raw_events, for stack ${cdk.Aws.STACK_NAME}.`, 389 | actions: [ 390 | { 391 | jobName: gameEventsEtlJob.ref, 392 | }, 393 | ], 394 | } 395 | ); 396 | gameEventsETLJobTrigger.addDependency(gameEventsEtlJob); 397 | gameEventsETLJobTrigger.addDependency(gameEventsWorkflow); 398 | 399 | // Even that starts ETL job 400 | const etlJobStatusEventsRule = new events.Rule(this, "EtlJobStatusEvents", { 401 | description: `CloudWatch Events Rule for generating status events for the Glue ETL Job for ${cdk.Aws.STACK_NAME}.`, 402 | eventPattern: { 403 | detailType: ["Glue Job State Change"], 404 | source: ["aws.glue"], 405 | detail: { 406 | jobName: [gameEventsEtlJob.ref], 407 | }, 408 | }, 409 | enabled: true, 410 | targets: [new eventstargets.SnsTopic(props.notificationsTopic)], 411 | }); 412 | etlJobStatusEventsRule.node.addDependency(gameEventsEtlJob); 413 | 414 | const glueCrawlerStatusEventsRule = new events.Rule( 415 | this, 416 | "GlueCrawlerStatusEvents", 417 | { 418 | description: `CloudWatch Events Rule for generating status events for Glue ETL Job for stack ${cdk.Aws.STACK_NAME}`, 419 | eventPattern: { 420 | source: ["aws.glue"], 421 | detailType: ["Glue Crawler State Change"], 422 | detail: { 423 | crawlerName: [eventsCrawler.ref], 424 | }, 425 | }, 426 | enabled: true, 427 | targets: [new eventstargets.SnsTopic(props.notificationsTopic)], 428 | } 429 | ); 430 | glueCrawlerStatusEventsRule.node.addDependency(eventsCrawler); 431 | 432 | this.gameEventsDatabase = gameEventsDatabase; 433 | this.rawEventsTable = rawEventsTable; 434 | 435 | new cdk.CfnOutput(this, "GameEventsEtlJobOutput", { 436 | description: 437 | "ETL Job for processing game events into optimized format for analytics", 438 | value: gameEventsEtlJob.ref, 439 | }); 440 | 441 | new cdk.CfnOutput(this, "GameEventsDatabaseOutput", { 442 | description: "Glue Catalog Database for storing game analytics events", 443 | value: gameEventsDatabase.ref, 444 | }); 445 | } 446 | } 447 | -------------------------------------------------------------------------------- /infrastructure/src/constructs/lambda-construct.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2023 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | * 4 | * Licensed under the Amazon Software License (the 'License'). 5 | * You may not use this file except in compliance with the License. 6 | * A copy of the License is located at 7 | * 8 | * http://aws.amazon.com/asl/ 9 | * 10 | * or in the 'license' file accompanying this file. This file is distributed 11 | * on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | * express or implied. See the License for the specific language governing 13 | * permissions and limitations under the License. 14 | */ 15 | import { DataLakeConstruct } from "./data-lake-construct"; 16 | import * as cdk from "aws-cdk-lib"; 17 | import { Construct } from "constructs"; 18 | import { NodejsFunction } from "aws-cdk-lib/aws-lambda-nodejs"; 19 | import * as path from "path"; 20 | import * as lambda from "aws-cdk-lib/aws-lambda"; 21 | import * as events from "aws-cdk-lib/aws-events"; 22 | import * as eventstargets from "aws-cdk-lib/aws-events-targets"; 23 | 24 | /* eslint-disable @typescript-eslint/no-empty-interface */ 25 | export interface LambdaConstructProps extends cdk.StackProps { 26 | dataLakeConstruct: DataLakeConstruct; 27 | applicationsTable: cdk.aws_dynamodb.Table; 28 | authorizationsTable: cdk.aws_dynamodb.Table; 29 | } 30 | 31 | const defaultProps: Partial = {}; 32 | 33 | /** 34 | * Deploys the Lambda construct 35 | */ 36 | export class LambdaConstruct extends Construct { 37 | public readonly gluePartitionCreator: lambda.Function; 38 | public readonly eventsProcessingFunction: lambda.Function; 39 | public readonly solutionHelper: lambda.Function; 40 | public readonly lambdaAuthorizer: lambda.Function; 41 | public readonly applicationAdminServiceFunction: lambda.Function; 42 | 43 | constructor(parent: Construct, name: string, props: LambdaConstructProps) { 44 | super(parent, name); 45 | 46 | /* eslint-disable @typescript-eslint/no-unused-vars */ 47 | props = { ...defaultProps, ...props }; 48 | 49 | const codePath = "../../../business-logic"; 50 | 51 | // ---- Functions ---- // 52 | /* The following variables define the necessary resources for the `GluePartitionCreator` serverless 53 | function. This function creates a new date-based partition in Glue Database based on UTC Year/Month/Day. */ 54 | this.gluePartitionCreator = new NodejsFunction( 55 | this, 56 | "GluePartitionCreator", 57 | { 58 | description: 59 | "Function creates a new date-based partition in Glue Database based on UTC Year/Month/Day", 60 | entry: path.join( 61 | __dirname, 62 | `${codePath}/data-lake/glue-partition-creator/index.js` 63 | ), 64 | depsLockFilePath: path.join( 65 | __dirname, 66 | `${codePath}/data-lake/glue-partition-creator/package-lock.json` 67 | ), 68 | runtime: lambda.Runtime.NODEJS_18_X, 69 | memorySize: 128, 70 | timeout: cdk.Duration.minutes(5), 71 | environment: { 72 | TABLE_NAME: props.dataLakeConstruct.rawEventsTable.ref, 73 | DATABASE_NAME: props.dataLakeConstruct.gameEventsDatabase.ref, 74 | }, 75 | } 76 | ); 77 | const createPartition = new events.Rule(this, "CreatePartition", { 78 | schedule: events.Schedule.cron({ 79 | minute: "0", 80 | hour: "*/1", 81 | day: "*", 82 | month: "*", 83 | year: "*", 84 | }), 85 | }); 86 | createPartition.addTarget( 87 | new eventstargets.LambdaFunction(this.gluePartitionCreator) 88 | ); 89 | 90 | /* The following variables define the necessary resources for the `EventsProcessingFunction` serverless 91 | function. This function to process and transform raw events before they get written to S3. */ 92 | this.eventsProcessingFunction = new NodejsFunction( 93 | this, 94 | "EventsProcessingFunction", 95 | { 96 | description: 97 | "Function to process and transform raw events before they get written to S3", 98 | entry: path.join(__dirname, `${codePath}/events-processing/index.js`), 99 | depsLockFilePath: path.join( 100 | __dirname, 101 | `${codePath}/events-processing/package-lock.json` 102 | ), 103 | memorySize: 256, 104 | timeout: cdk.Duration.minutes(5), 105 | runtime: lambda.Runtime.NODEJS_18_X, 106 | environment: { 107 | APPLICATIONS_TABLE: props.applicationsTable.tableName, 108 | CACHE_TIMEOUT_SECONDS: "60", 109 | }, 110 | } 111 | ); 112 | 113 | /* The following variables define the `SolutionHelper` function. This function provides the various utilities 114 | required to initialize solution defaults. */ 115 | this.solutionHelper = new NodejsFunction(this, "SolutionHelper", { 116 | description: "Solution Helper utility function", 117 | entry: path.join(__dirname, `${codePath}/solution-helper/index.js`), 118 | depsLockFilePath: path.join( 119 | __dirname, 120 | `${codePath}/solution-helper/package-lock.json` 121 | ), 122 | 123 | memorySize: 128, 124 | timeout: cdk.Duration.minutes(5), 125 | runtime: lambda.Runtime.NODEJS_18_X, 126 | environment: { 127 | VERSION: "2", 128 | }, 129 | }); 130 | 131 | this.lambdaAuthorizer = new NodejsFunction(this, "LambdaAuthorizer", { 132 | description: 133 | "API Gateway Lambda Authorizer used to validate requests to solution /events API endpoint.", 134 | entry: path.join(__dirname, `${codePath}/api/lambda-authorizer/index.js`), 135 | depsLockFilePath: path.join( 136 | __dirname, 137 | `${codePath}/api/lambda-authorizer/package-lock.json` 138 | ), 139 | memorySize: 128, 140 | timeout: cdk.Duration.seconds(60), 141 | runtime: lambda.Runtime.NODEJS_18_X, 142 | environment: { 143 | AUTHORIZATIONS_TABLE: props.authorizationsTable.tableName, 144 | APPLICATION_AUTHORIZATIONS_INDEX: "ApplicationAuthorizations", 145 | APPLICATIONS_TABLE: props.applicationsTable.tableName, 146 | }, 147 | }); 148 | 149 | /* The following variables define the necessary resources for the `ApplicationAdminServiceFunction`. 150 | This function provides the application admin microservice. */ 151 | this.applicationAdminServiceFunction = new NodejsFunction( 152 | this, 153 | "ApplicationAdminServiceFunction", 154 | { 155 | description: 156 | "This function provides the application admin microservice.", 157 | entry: path.join(__dirname, `${codePath}/api/admin/index.js`), 158 | depsLockFilePath: path.join( 159 | __dirname, 160 | `${codePath}/api/admin/package-lock.json` 161 | ), 162 | 163 | memorySize: 128, 164 | timeout: cdk.Duration.seconds(60), 165 | runtime: lambda.Runtime.NODEJS_18_X, 166 | environment: { 167 | AUTHORIZATIONS_TABLE: props.authorizationsTable.tableName, 168 | APPLICATION_AUTHORIZATIONS_INDEX: "ApplicationAuthorizations", 169 | APPLICATIONS_TABLE: props.applicationsTable.tableName, 170 | }, 171 | } 172 | ); 173 | } 174 | } 175 | -------------------------------------------------------------------------------- /infrastructure/src/constructs/streaming-analytics.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2023 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | * 4 | * Licensed under the Amazon Software License (the 'License'). 5 | * You may not use this file except in compliance with the License. 6 | * A copy of the License is located at 7 | * 8 | * http://aws.amazon.com/asl/ 9 | * 10 | * or in the 'license' file accompanying this file. This file is distributed 11 | * on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | * express or implied. See the License for the specific language governing 13 | * permissions and limitations under the License. 14 | */ 15 | 16 | import * as cdk from "aws-cdk-lib"; 17 | import { Construct } from "constructs"; 18 | import * as kinesisanalytics from "aws-cdk-lib/aws-kinesisanalytics"; 19 | import * as customresources from "aws-cdk-lib/custom-resources"; 20 | import { NodejsFunction } from "aws-cdk-lib/aws-lambda-nodejs"; 21 | import * as lambda from "aws-cdk-lib/aws-lambda"; 22 | import * as kinesis from "aws-cdk-lib/aws-kinesis"; 23 | import * as iam from "aws-cdk-lib/aws-iam"; 24 | import * as path from "path"; 25 | import fs from "fs"; 26 | 27 | const inputSchema: kinesisanalytics.CfnApplication.InputSchemaProperty = { 28 | recordColumns: [ 29 | { 30 | name: "event_version", 31 | sqlType: "VARCHAR(8)", 32 | mapping: "$.event.event_version", 33 | }, 34 | { 35 | name: "event_id", 36 | sqlType: "VARCHAR(64)", 37 | mapping: "$.event.event_id", 38 | }, 39 | { 40 | name: "event_timestamp", 41 | sqlType: "BIGINT", 42 | mapping: "$.event.event_timestamp", 43 | }, 44 | { 45 | name: "event_type", 46 | sqlType: "VARCHAR(64)", 47 | mapping: "$.event.event_type", 48 | }, 49 | { 50 | name: "app_version", 51 | sqlType: "VARCHAR(8)", 52 | mapping: "$.event.app_version", 53 | }, 54 | { 55 | name: "level_id", 56 | sqlType: "VARCHAR(64)", 57 | mapping: "$.event.event_data.level_id", 58 | }, 59 | { 60 | name: "country_id", 61 | sqlType: "VARCHAR(64)", 62 | mapping: "$.event.event_data.country_id", 63 | }, 64 | { 65 | name: "spell_id", 66 | sqlType: "VARCHAR(64)", 67 | mapping: "$.event.event_data_spell_id", 68 | }, 69 | { 70 | name: "application_id", 71 | sqlType: "VARCHAR(64)", 72 | mapping: "$.application_id", 73 | }, 74 | { 75 | name: "last_login_time", 76 | sqlType: "BIGINT", 77 | mapping: "$.event.event_data.last_login_time", 78 | }, 79 | { 80 | name: "currency_type", 81 | sqlType: "VARCHAR(64)", 82 | mapping: "$.event.event_data.currency_type", 83 | }, 84 | { 85 | name: "currency_amount", 86 | sqlType: "DOUBLE", 87 | mapping: "$.event.event_data.currency_amount", 88 | }, 89 | ], 90 | recordFormat: { 91 | recordFormatType: "JSON", 92 | mappingParameters: { 93 | jsonMappingParameters: { 94 | recordRowPath: "$", 95 | }, 96 | }, 97 | }, 98 | }; 99 | 100 | /* eslint-disable @typescript-eslint/no-empty-interface */ 101 | export interface StreamingAnalyticsConstructProps extends cdk.StackProps { 102 | /** 103 | * Base Codepath for business logic folder 104 | */ 105 | baseCodePath: string; 106 | gameEventsStream: kinesis.IStream; 107 | solutionHelper: lambda.IFunction; 108 | solutionHelperProvider: customresources.Provider; 109 | } 110 | 111 | const defaultProps: Partial = {}; 112 | 113 | /** 114 | * Deploys the StreamingAnalytics construct 115 | * 116 | * Creates KDA application as well as Lambda Function for processing KDA output. Logs are stored in correct places 117 | * and KDA app is started automatically using a custom resource 118 | */ 119 | export class StreamingAnalyticsConstruct extends Construct { 120 | public readonly analyticsProcessingFunction: NodejsFunction; 121 | public readonly kinesisAnalyticsApp: kinesisanalytics.CfnApplication; 122 | 123 | constructor( 124 | parent: Construct, 125 | name: string, 126 | props: StreamingAnalyticsConstructProps 127 | ) { 128 | super(parent, name); 129 | 130 | /* eslint-disable @typescript-eslint/no-unused-vars */ 131 | props = { ...defaultProps, ...props }; 132 | const codePath = `../${props.baseCodePath}`; 133 | 134 | /* The following variables define the necessary resources for the `AnalyticsProcessingFunction` serverless 135 | function. This function consumes outputs from Kinesis Data Analytics application for processing. */ 136 | const analyticsProcessingFunction = new NodejsFunction( 137 | this, 138 | "AnalyticsProcessingFunction", 139 | { 140 | description: 141 | "Consumes outputs from Kinesis Data Analytics application for processing", 142 | entry: path.join( 143 | __dirname, 144 | `${codePath}/analytics-processing/index.js` 145 | ), 146 | depsLockFilePath: path.join( 147 | __dirname, 148 | `${codePath}/analytics-processing/package-lock.json` 149 | ), 150 | 151 | memorySize: 128, 152 | timeout: cdk.Duration.seconds(60), 153 | runtime: lambda.Runtime.NODEJS_18_X, 154 | environment: { 155 | stackName: cdk.Aws.STACK_NAME, 156 | CW_NAMESPACE: `${cdk.Aws.STACK_NAME}/AWSGameAnalytics`, 157 | }, 158 | } 159 | ); 160 | analyticsProcessingFunction.addToRolePolicy( 161 | new iam.PolicyStatement({ 162 | sid: "CloudWatch", 163 | effect: iam.Effect.ALLOW, 164 | actions: ["cloudwatch:PutMetricData"], 165 | resources: ["*"], 166 | }) 167 | ); 168 | analyticsProcessingFunction.addToRolePolicy( 169 | new iam.PolicyStatement({ 170 | sid: "XRay", 171 | effect: iam.Effect.ALLOW, 172 | actions: [ 173 | "xray:PutTraceSegments", 174 | "xray:PutTelemetryRecords", 175 | "xray:GetSamplingRules", 176 | "xray:GetSamplingTargets", 177 | ], 178 | resources: ["*"], 179 | }) 180 | ); 181 | 182 | /* The following variables define the Kinesis Analytics Application's IAM Role. */ 183 | const kinesisAnalyticsRole = new iam.Role(this, "KinesisAnalyticsRole", { 184 | assumedBy: new iam.ServicePrincipal("kinesisanalytics.amazonaws.com"), 185 | inlinePolicies: { 186 | KinesisAnalyticsAccess: new iam.PolicyDocument({ 187 | statements: [ 188 | new iam.PolicyStatement({ 189 | sid: "ReadKinesisStream", 190 | effect: iam.Effect.ALLOW, 191 | actions: [ 192 | "kinesis:DescribeStream", 193 | "kinesis:GetShardIterator", 194 | "kinesis:GetRecords", 195 | "kinesis:ListShards", 196 | ], 197 | resources: [props.gameEventsStream.streamArn], 198 | }), 199 | new iam.PolicyStatement({ 200 | sid: "LambdaAccess", 201 | effect: iam.Effect.ALLOW, 202 | actions: [ 203 | "lambda:InvokeFunction", 204 | "lambda:GetFunctionConfiguration", 205 | ], 206 | resources: analyticsProcessingFunction.resourceArnsForGrantInvoke, 207 | }), 208 | ], 209 | }), 210 | }, 211 | }); 212 | 213 | // KDA App that process input data in real time 214 | const appCodePath = path.join( 215 | __dirname, 216 | `${codePath}/analytics-processing/app.sql` 217 | ); 218 | const kinesisAnalyticsApp = new kinesisanalytics.CfnApplication( 219 | this, 220 | "KinesisAnalyticsApp", 221 | { 222 | applicationName: `AnalyticsApplication-${cdk.Aws.STACK_NAME}`, 223 | applicationDescription: `Real-time game analytics application, for ${cdk.Aws.STACK_NAME}`, 224 | // Load code from file 225 | applicationCode: fs.readFileSync(appCodePath).toString(), 226 | inputs: [ 227 | { 228 | namePrefix: "AnalyticsApp", 229 | inputSchema: inputSchema, 230 | kinesisStreamsInput: { 231 | resourceArn: props.gameEventsStream.streamArn, 232 | roleArn: kinesisAnalyticsRole.roleArn, 233 | }, 234 | }, 235 | ], 236 | } 237 | ); 238 | 239 | // Set Lambda as KDA output 240 | const kinesisAnalyticsLambdaOutput = 241 | new kinesisanalytics.CfnApplicationOutput( 242 | this, 243 | "KinesisAnalyticsLambdaOutput", 244 | { 245 | applicationName: kinesisAnalyticsApp.applicationName ?? "UNDEFINED", 246 | output: { 247 | name: "DESTINATION_STREAM", 248 | destinationSchema: { 249 | recordFormatType: "JSON", 250 | }, 251 | lambdaOutput: { 252 | resourceArn: analyticsProcessingFunction.functionArn, 253 | roleArn: kinesisAnalyticsRole.roleArn, 254 | }, 255 | }, 256 | } 257 | ); 258 | 259 | // Send errors to lambda as well 260 | const kinesisAnalyticsErrorOutput = 261 | new kinesisanalytics.CfnApplicationOutput( 262 | this, 263 | "KinesisAnalyticsErrorOutput", 264 | { 265 | applicationName: kinesisAnalyticsApp.applicationName ?? "UNDEFINED", 266 | output: { 267 | name: "error_stream", 268 | destinationSchema: { 269 | recordFormatType: "JSON", 270 | }, 271 | lambdaOutput: { 272 | resourceArn: analyticsProcessingFunction.functionArn, 273 | roleArn: kinesisAnalyticsRole.roleArn, 274 | }, 275 | }, 276 | } 277 | ); 278 | 279 | // * Needs a depnedency because output may be created before application (issue using CFN Constructs) 280 | kinesisAnalyticsLambdaOutput.addDependency(kinesisAnalyticsApp); 281 | kinesisAnalyticsErrorOutput.addDependency(kinesisAnalyticsApp); 282 | 283 | // Update `solution_helper` permissions to access Kinesis Analytics, if `ENABLE_STREAMING_ANALYTICS` is enabled 284 | props.solutionHelper.addToRolePolicy( 285 | new iam.PolicyStatement({ 286 | sid: "KinesisAnalytics", 287 | effect: iam.Effect.ALLOW, 288 | actions: [ 289 | "kinesisanalytics:StartApplication", 290 | "kinesisanalytics:DescribeApplication", 291 | ], 292 | resources: [ 293 | `arn:${cdk.Aws.PARTITION}:kinesisanalytics:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:application/${kinesisAnalyticsApp.applicationName}`, 294 | ], 295 | }) 296 | ); 297 | 298 | const startKinesisAnalyticsAppCustomResource = new cdk.CustomResource( 299 | this, 300 | "StartKinesisAnalyticsApp", 301 | { 302 | serviceToken: props.solutionHelperProvider.serviceToken, 303 | properties: { 304 | customAction: "startKinesisAnalyticsApp", 305 | Region: cdk.Aws.REGION, 306 | kinesisAnalyticsAppName: kinesisAnalyticsApp.applicationName, 307 | }, 308 | } 309 | ); 310 | startKinesisAnalyticsAppCustomResource.node.addDependency( 311 | props.gameEventsStream 312 | ); 313 | startKinesisAnalyticsAppCustomResource.node.addDependency( 314 | kinesisAnalyticsLambdaOutput 315 | ); 316 | 317 | this.analyticsProcessingFunction = analyticsProcessingFunction; 318 | 319 | new cdk.CfnOutput(this, "KinesisAnalyticsAppOutput", { 320 | description: 321 | "Name of the Kinesis Analytics Application for game analytics", 322 | value: kinesisAnalyticsApp.ref, 323 | }); 324 | 325 | new cdk.CfnOutput(this, "RealTimeAnalyticsCloudWatch", { 326 | description: 327 | "Link to the Amazon CloudWatch namespace where custom metrics are published by the solution AnalyticsProcessingFunction.", 328 | value: `https://console.aws.amazon.com/cloudwatch/home?region=${cdk.Aws.REGION}#metricsV2:graph=~();query=${cdk.Aws.STACK_NAME}/AWSGameAnalytics`, 329 | }); 330 | } 331 | } 332 | -------------------------------------------------------------------------------- /infrastructure/src/constructs/streaming-ingestion-construct.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2023 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | * 4 | * Licensed under the Amazon Software License (the "License"). 5 | * You may not use this file except in compliance with the License. 6 | * A copy of the License is located at 7 | * 8 | * http://aws.amazon.com/asl/ 9 | * 10 | * or in the "license" file accompanying this file. This file is distributed 11 | * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | * express or implied. See the License for the specific language governing 13 | * permissions and limitations under the License. 14 | */ 15 | 16 | import * as cdk from "aws-cdk-lib"; 17 | import * as iam from "aws-cdk-lib/aws-iam"; 18 | import * as logs from "aws-cdk-lib/aws-logs"; 19 | import * as kinesisFirehose from "aws-cdk-lib/aws-kinesisfirehose"; 20 | import { Construct } from "constructs"; 21 | import { GameAnalyticsPipelineConfig } from "../helpers/config-types"; 22 | 23 | /* eslint-disable @typescript-eslint/no-empty-interface */ 24 | export interface StreamingIngestionConstructProps extends cdk.StackProps { 25 | applicationsTable: cdk.aws_dynamodb.Table; 26 | gamesEventsStream: cdk.aws_kinesis.Stream; 27 | analyticsBucket: cdk.aws_s3.Bucket; 28 | rawEventsTable: cdk.aws_glue.CfnTable; 29 | gameEventsDatabase: cdk.aws_glue.CfnDatabase; 30 | eventsProcessingFunction: cdk.aws_lambda.Function; 31 | config: GameAnalyticsPipelineConfig; 32 | } 33 | 34 | const defaultProps: Partial = {}; 35 | 36 | /** 37 | * Deploys the StreamingIngestion construct 38 | */ 39 | export class StreamingIngestionConstruct extends Construct { 40 | public readonly gameEventsFirehose: kinesisFirehose.CfnDeliveryStream; 41 | 42 | constructor( 43 | parent: Construct, 44 | name: string, 45 | props: StreamingIngestionConstructProps 46 | ) { 47 | super(parent, name); 48 | 49 | /* eslint-disable @typescript-eslint/no-unused-vars */ 50 | props = { ...defaultProps, ...props }; 51 | 52 | // Create firehouse log groups and streams 53 | const firehoseLogGroup = new logs.LogGroup(this, "firehose-log-group", { 54 | retention: props.config.CLOUDWATCH_RETENTION_DAYS, 55 | }); 56 | 57 | const firehouseS3DeliveryLogStream = new logs.LogStream( 58 | this, 59 | "firehose-s3-delivery-log-stream", 60 | { 61 | logGroup: firehoseLogGroup, 62 | } 63 | ); 64 | 65 | const firehouseBackupDeliveryLogStream = new logs.LogStream( 66 | this, 67 | "firehose-backup-delivery-log-stream", 68 | { 69 | logGroup: firehoseLogGroup, 70 | } 71 | ); 72 | 73 | // Role for firehose 74 | const gamesEventsFirehoseRole = new iam.Role( 75 | this, 76 | "games-events-firehose-role", 77 | { 78 | assumedBy: new iam.ServicePrincipal("firehose.amazonaws.com"), 79 | inlinePolicies: { 80 | firehose_delivery_policy: new iam.PolicyDocument({ 81 | statements: [ 82 | new iam.PolicyStatement({ 83 | actions: [ 84 | "s3:AbortMultipartUpload", 85 | "s3:GetBucketLocation", 86 | "s3:GetObject", 87 | "s3:ListBucket", 88 | "s3:ListBucketMultipartUploads", 89 | "s3:PutObject", 90 | ], 91 | effect: iam.Effect.ALLOW, 92 | resources: [ 93 | props.analyticsBucket.arnForObjects("*"), 94 | props.analyticsBucket.bucketArn, 95 | ], 96 | }), 97 | new iam.PolicyStatement({ 98 | actions: [ 99 | "lambda:InvokeFunction", 100 | "lambda:GetFunctionConfiguration", 101 | ], 102 | effect: iam.Effect.ALLOW, 103 | resources: [props.eventsProcessingFunction.functionArn], 104 | }), 105 | new iam.PolicyStatement({ 106 | actions: [ 107 | "kinesis:DescribeStream", 108 | "kinesis:GetShardIterator", 109 | "kinesis:GetRecords", 110 | "kinesis:ListShards", 111 | ], 112 | effect: iam.Effect.ALLOW, 113 | resources: [props.gamesEventsStream.streamArn], 114 | }), 115 | new iam.PolicyStatement({ 116 | actions: [ 117 | "glue:GetTable", 118 | "glue:GetTableVersion", 119 | "glue:GetTableVersions", 120 | ], 121 | effect: iam.Effect.ALLOW, 122 | resources: [ 123 | `arn:${cdk.Aws.PARTITION}:glue:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:table/${props.gameEventsDatabase.ref}/*`, 124 | `arn:${cdk.Aws.PARTITION}:glue:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:database/${props.gameEventsDatabase.ref}`, 125 | `arn:${cdk.Aws.PARTITION}:glue:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:catalog`, 126 | ], 127 | }), 128 | new iam.PolicyStatement({ 129 | actions: ["logs:PutLogEvents"], 130 | effect: iam.Effect.ALLOW, 131 | resources: [firehoseLogGroup.logGroupArn], 132 | }), 133 | ], 134 | }), 135 | }, 136 | } 137 | ); 138 | 139 | // Prefix to send files to in s3 140 | const s3TimestampPrefix = 141 | "year=!{timestamp:YYYY}/month=!{timestamp:MM}/day=!{timestamp:dd}"; 142 | 143 | // Firehose to manage stream input, process data with Lambda, and send it to s3 144 | const gameEventsFirehose = new kinesisFirehose.CfnDeliveryStream( 145 | this, 146 | "game-events-firehose", 147 | { 148 | deliveryStreamType: "KinesisStreamAsSource", 149 | kinesisStreamSourceConfiguration: { 150 | kinesisStreamArn: props.gamesEventsStream.streamArn, 151 | roleArn: gamesEventsFirehoseRole.roleArn, 152 | }, 153 | extendedS3DestinationConfiguration: { 154 | bucketArn: props.analyticsBucket.bucketArn, 155 | bufferingHints: { 156 | intervalInSeconds: props.config.DEV_MODE ? 60 : 900, 157 | sizeInMBs: 128, 158 | }, 159 | prefix: `${props.config.RAW_EVENTS_PREFIX}/${s3TimestampPrefix}/`, 160 | errorOutputPrefix: `firehose-errors/${s3TimestampPrefix}/!{firehose:error-output-type}/`, 161 | compressionFormat: "UNCOMPRESSED", 162 | roleArn: gamesEventsFirehoseRole.roleArn, 163 | processingConfiguration: { 164 | enabled: true, 165 | processors: [ 166 | { 167 | type: "Lambda", 168 | parameters: [ 169 | { 170 | parameterName: "LambdaArn", 171 | parameterValue: props.eventsProcessingFunction.functionArn, 172 | }, 173 | { 174 | parameterName: "BufferIntervalInSeconds", 175 | parameterValue: "60", 176 | }, 177 | { 178 | parameterName: "BufferSizeInMBs", 179 | parameterValue: "3", 180 | }, 181 | { 182 | parameterName: "NumberOfRetries", 183 | parameterValue: "3", 184 | }, 185 | ], 186 | }, 187 | ], 188 | }, 189 | cloudWatchLoggingOptions: { 190 | enabled: true, 191 | logGroupName: firehoseLogGroup.logGroupName, 192 | logStreamName: firehouseS3DeliveryLogStream.logStreamName, 193 | }, 194 | s3BackupMode: props.config.S3_BACKUP_MODE ? "Enabled" : "Disabled", 195 | s3BackupConfiguration: { 196 | bucketArn: props.analyticsBucket.bucketArn, 197 | cloudWatchLoggingOptions: { 198 | enabled: true, 199 | logGroupName: firehoseLogGroup.logGroupName, 200 | logStreamName: firehouseBackupDeliveryLogStream.logStreamName, 201 | }, 202 | compressionFormat: "GZIP", 203 | bufferingHints: { 204 | intervalInSeconds: 900, 205 | sizeInMBs: 128, 206 | }, 207 | prefix: `FirehoseS3SourceRecordBackup/${s3TimestampPrefix}/`, 208 | errorOutputPrefix: `FirehoseS3SourceRecordBackup/firehose-errors/${s3TimestampPrefix}/!{firehose:error-output-type}/`, 209 | roleArn: gamesEventsFirehoseRole.roleArn, 210 | }, 211 | dataFormatConversionConfiguration: { 212 | enabled: true, 213 | inputFormatConfiguration: { 214 | deserializer: { 215 | openXJsonSerDe: { 216 | caseInsensitive: true, 217 | convertDotsInJsonKeysToUnderscores: false, 218 | }, 219 | }, 220 | }, 221 | outputFormatConfiguration: { 222 | serializer: { 223 | parquetSerDe: { 224 | compression: "SNAPPY", 225 | }, 226 | }, 227 | }, 228 | schemaConfiguration: { 229 | catalogId: cdk.Aws.ACCOUNT_ID, 230 | roleArn: gamesEventsFirehoseRole.roleArn, 231 | databaseName: props.gameEventsDatabase.ref, 232 | tableName: props.rawEventsTable.ref, 233 | region: cdk.Aws.REGION, 234 | versionId: "LATEST", 235 | }, 236 | }, 237 | }, 238 | } 239 | ); 240 | 241 | this.gameEventsFirehose = gameEventsFirehose; 242 | } 243 | } 244 | -------------------------------------------------------------------------------- /infrastructure/src/helpers/config-types.ts: -------------------------------------------------------------------------------- 1 | interface AccountConfiguration { 2 | NAME: string; 3 | ACCOUNT: string; 4 | REGION: string; 5 | } 6 | 7 | export interface GameAnalyticsPipelineConfig { 8 | accounts: AccountConfiguration[]; 9 | KinesisStreamShards: number; 10 | DEV_MODE: boolean; 11 | EnableStreamingAnalytics: boolean; 12 | SolutionAdminEmailAddress: string; 13 | // Default Configuration Settings 14 | DEMO: string; 15 | WORKLOAD_NAME: string; 16 | CDK_VERSION: string; 17 | NODE_VERSION: string; 18 | PYTHON_VERSION: string; 19 | EMAIL_ADDRESS: string; 20 | API_STAGE_NAME: string; 21 | RAW_EVENTS_PREFIX: string; 22 | PROCESSED_EVENTS_PREFIX: string; 23 | RAW_EVENTS_TABLE: string; 24 | GLUE_TMP_PREFIX: string; 25 | STREAM_SHARD_COUNT: number; 26 | ENABLE_STREAMING_ANALYTICS: boolean; 27 | S3_BACKUP_MODE: boolean; 28 | CLOUDWATCH_RETENTION_DAYS: number; 29 | GITHUB_USERNAME: string; 30 | GITHUB_REPO_NAME: string; 31 | CONNECTION_ARN: string; 32 | } 33 | -------------------------------------------------------------------------------- /infrastructure/src/helpers/stack-config-loader.ts: -------------------------------------------------------------------------------- 1 | import { GameAnalyticsPipelineConfig } from "./config-types"; 2 | import * as yaml from "js-yaml"; 3 | import * as fs from "fs"; 4 | import * as path from "path"; 5 | 6 | // Loads environment configuration from config.yaml 7 | export function getConfig(): GameAnalyticsPipelineConfig { 8 | let unparsedConfig: GameAnalyticsPipelineConfig = yaml.load( 9 | fs.readFileSync(path.resolve("./config.yaml"), "utf8") 10 | ) as GameAnalyticsPipelineConfig; 11 | console.log(unparsedConfig); 12 | return unparsedConfig; 13 | } 14 | -------------------------------------------------------------------------------- /infrastructure/src/pipeline-stack.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2023 Amazon.com, Inc. and its affiliates. All Rights Reserved. 3 | * 4 | * Licensed under the Amazon Software License (the 'License'). 5 | * You may not use this file except in compliance with the License. 6 | * A copy of the License is located at 7 | * 8 | * http://aws.amazon.com/asl/ 9 | * 10 | * or in the 'license' file accompanying this file. This file is distributed 11 | * on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | * express or implied. See the License for the specific language governing 13 | * permissions and limitations under the License. 14 | */ 15 | 16 | import * as cdk from "aws-cdk-lib"; 17 | import * as codebuild from "aws-cdk-lib/aws-codebuild"; 18 | import * as cdkPipelines from "aws-cdk-lib/pipelines"; 19 | import { Construct } from "constructs"; 20 | import { GameAnalyticsPipelineConfig } from "./helpers/config-types"; 21 | import { InfrastructureStack } from "./app-stack"; 22 | 23 | /* eslint-disable @typescript-eslint/no-empty-interface */ 24 | export interface PipelineStackProps extends cdk.StackProps { 25 | config: GameAnalyticsPipelineConfig; 26 | } 27 | 28 | const defaultProps: Partial = {}; 29 | 30 | /** 31 | * Deploys the Pipeline Stack 32 | */ 33 | export class PipelineStack extends cdk.Stack { 34 | constructor(scope: Construct, id: string, props: PipelineStackProps) { 35 | super(scope, id, props); 36 | 37 | /* eslint-disable @typescript-eslint/no-unused-vars */ 38 | props = { ...defaultProps, ...props }; 39 | 40 | const pipelineSource = cdkPipelines.CodePipelineSource.connection( 41 | props.config.GITHUB_USERNAME + "/" + props.config.GITHUB_REPO_NAME, 42 | "main", 43 | { 44 | connectionArn: props.config.CONNECTION_ARN 45 | } 46 | ); 47 | 48 | // Buildpsec used for synth 49 | const buildSpec = { 50 | phases: { 51 | install: { 52 | "runtime-version": { 53 | nodejs: props.config.NODE_VERSION, 54 | python: props.config.PYTHON_VERSION, 55 | }, 56 | }, 57 | }, 58 | }; 59 | 60 | // Synth step runs npm steps to get proper output needed by cdk pipelines 61 | const synth = new cdkPipelines.CodeBuildStep("Synth", { 62 | input: pipelineSource, 63 | partialBuildSpec: codebuild.BuildSpec.fromObject(buildSpec), 64 | commands: ["npm run build"], 65 | primaryOutputDirectory: "infrastructure/cdk.out", 66 | }); 67 | 68 | // Deployment pipeline 69 | const pipeline = new cdkPipelines.CodePipeline(this, "Pipeline", { 70 | synth, 71 | // cliVersion: props.config.CDK_VERSION, 72 | crossAccountKeys: true, 73 | dockerEnabledForSynth: true, 74 | }); 75 | 76 | // Creates a deployment stage for each stage in config 77 | props.config.accounts.forEach(({ NAME, ACCOUNT, REGION }) => { 78 | this.addStage(pipeline, NAME, ACCOUNT, REGION, props.config); 79 | }); 80 | } 81 | 82 | // Add a deployment stage 83 | addStage( 84 | pipeline: cdkPipelines.CodePipeline, 85 | stageName: string, 86 | stageAccount: string, 87 | stageRegion: string, 88 | config: GameAnalyticsPipelineConfig 89 | ) { 90 | const stageConstruct = new cdk.Stage(this, stageName, { 91 | env: { 92 | account: stageAccount, 93 | region: stageRegion, 94 | }, 95 | }); 96 | 97 | const deploymentStep = new InfrastructureStack( 98 | stageConstruct, 99 | config.WORKLOAD_NAME, 100 | { 101 | config, 102 | } 103 | ); 104 | 105 | // If QA we will add an approval stage after 106 | if (stageName === "QA") { 107 | const manualApprovalStep = new cdkPipelines.ManualApprovalStep( 108 | "ProductionApproval", 109 | { 110 | comment: 111 | "Reviewed Test Results and Approve/Reject for Production Deployment?", 112 | } 113 | ); 114 | 115 | pipeline.addStage(stageConstruct, { 116 | post: [manualApprovalStep], 117 | }); 118 | } else { 119 | pipeline.addStage(stageConstruct); 120 | } 121 | } 122 | } -------------------------------------------------------------------------------- /infrastructure/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../tsconfig.json", 3 | "compilerOptions": { 4 | "target": "ES2018", 5 | "module": "commonjs", 6 | "lib": ["es2018"], 7 | "declaration": true, 8 | "strict": true, 9 | "noImplicitAny": true, 10 | "strictNullChecks": true, 11 | "noImplicitThis": true, 12 | "alwaysStrict": true, 13 | "noUnusedLocals": false, 14 | "noUnusedParameters": false, 15 | "noImplicitReturns": true, 16 | "noFallthroughCasesInSwitch": false, 17 | "inlineSourceMap": true, 18 | "inlineSources": true, 19 | "experimentalDecorators": true, 20 | "strictPropertyInitialization": false, 21 | "typeRoots": ["./node_modules/@types"] 22 | }, 23 | "type": "module", 24 | "include": ["./src"] 25 | } 26 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "project-root", 3 | "version": "0.0.1", 4 | "scripts": { 5 | "start": "cd web-app && npm run start", 6 | "build": "python3 build.py", 7 | "build.logic": "python3 build.py --business_logic", 8 | "build.infrastructure": "python3 build.py --infrastructure", 9 | "deploy.bootstrap": "cd infrastructure && npm install && npm run bootstrap", 10 | "deploy": "cd infrastructure && npm run cdk deploy -- -c stack_name=\"${STACK_NAME:-}\" --all --require-approval never", 11 | "destroy": "cd infrastructure && npm run cdk destroy -- -c stack_name=\"${STACK_NAME:-}\" --all" 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "experimentalDecorators": true, 4 | "allowJs": true, 5 | "skipLibCheck": true, 6 | "esModuleInterop": true, 7 | "allowSyntheticDefaultImports": true, 8 | "strict": true, 9 | "forceConsistentCasingInFileNames": true, 10 | "noFallthroughCasesInSwitch": true, 11 | "moduleResolution": "node", 12 | "resolveJsonModule": true, 13 | "isolatedModules": true, 14 | "noEmit": true 15 | } 16 | } 17 | --------------------------------------------------------------------------------