├── assets ├── index.html ├── src │ ├── types │ │ ├── aws-amplify-ui.d.ts │ │ └── aws-amplify-react.d.ts │ ├── react-app-env.d.ts │ ├── modules │ │ ├── notFound │ │ │ ├── notFound.css │ │ │ └── NotFound.tsx │ │ ├── report │ │ │ ├── report.css │ │ │ ├── Report.tsx │ │ │ └── Embed.tsx │ │ └── signup │ │ │ ├── signup.css │ │ │ ├── rides.css │ │ │ ├── reviews.css │ │ │ ├── login.css │ │ │ ├── Rides.tsx │ │ │ ├── Reviews.tsx │ │ │ ├── Login.tsx │ │ │ ├── home.css │ │ │ └── Signup.tsx │ ├── images │ │ └── bikenow-demo.png │ ├── index.css │ ├── common │ │ └── PropsRoute.tsx │ ├── App.test.tsx │ ├── config.js │ ├── config.ts │ ├── index.tsx │ ├── Routes.tsx │ ├── App.css │ ├── App.tsx │ ├── service-worker.ts │ └── registerServiceWorker.ts ├── public │ ├── favicon.ico │ ├── manifest.json │ └── index.html ├── .eslintignore ├── .eslintrc.js ├── tsconfig.json └── package.json ├── lambdas ├── api_get_rides │ ├── requirements.txt │ └── index.py ├── api_post_rides │ ├── requirements.txt │ └── index.py ├── setup_empty_bucket │ ├── requirements.txt │ ├── index.py │ └── cfnresponse │ │ └── __init__.py ├── setup_rds_ddl │ ├── requirements.txt │ ├── cfnresponse │ │ └── __init__.py │ └── index.py ├── setup_upload_artifacts │ ├── requirements.txt │ ├── artifacts │ │ ├── bikenow-xgboost-regression-model.tar.gz │ │ ├── glue_unload_station_history_s3.py │ │ ├── glue_load_station_detail_redshift.py │ │ ├── glue_load_station_history_redshift.py │ │ └── glue_load_station_review_redshift.py │ ├── genpath.py │ ├── index.py │ └── cfnresponse │ │ └── __init__.py ├── setup_datawarehouse_ddl │ ├── requirements.txt │ ├── sql │ │ ├── create_spectrum_schema.sql │ │ └── create_status_history_table.sql │ ├── cfnresponse │ │ └── __init__.py │ └── index.py ├── api_search_stations │ ├── requirements.txt │ └── index.py ├── stream_station_status_to_es │ ├── requirements.txt │ └── index.py ├── api_get_quicksight_url │ ├── index.js │ ├── package.json │ ├── app.js │ └── package-lock.json ├── stream_station_status_to_s3 │ └── index.py ├── api_post_reviews │ └── index.py ├── load_station_detail_to_ddb │ └── index.py ├── load_station_status_to_ddb │ └── index.py ├── api_get_reviews │ └── index.py ├── stream_station_review_to_s3 │ └── index.py ├── api_predict_station_status │ └── index.py └── setup_update_config │ └── index.js ├── images ├── 03_AIMLDiagram.png ├── 02_DLAnalyticsDiagram.png └── 01_PurposeBuiltDbDiagram.png ├── quicksight ├── images │ ├── 004_ManageData.png │ ├── 005_NewDataSet.png │ ├── 001_ManageQuicksight.png │ ├── 006_ChooseYourTable.png │ ├── 014_CodeCommitAppTsx.png │ ├── 010_PublishADashboard.png │ ├── 007_NewAuroraDataSource.png │ ├── 008_AuroraChooseYourTable.png │ ├── 009_SharePublishDashboard.png │ ├── 013_EnvironmentVariables.png │ ├── 015_TestYourApplication.png │ ├── 003_AddAuroraVpcConnection.png │ ├── 011_ShareDashboardWithUsers.png │ ├── 002_AddRedshiftVpcConnection.png │ └── 012_DatabaseApplicationLambda.png └── README.md ├── .gitignore ├── CODE_OF_CONDUCT.md ├── sample └── seed_reviews.py ├── LICENSE ├── templates ├── setup-template.yaml ├── network-template.yaml ├── codebuild-template.yaml └── aiml-template.yaml ├── CONTRIBUTING.md ├── template.yaml └── README.md /assets/index.html: -------------------------------------------------------------------------------- 1 |

Hello world!

2 | -------------------------------------------------------------------------------- /lambdas/api_get_rides/requirements.txt: -------------------------------------------------------------------------------- 1 | pymysql -------------------------------------------------------------------------------- /lambdas/api_post_rides/requirements.txt: -------------------------------------------------------------------------------- 1 | pymysql -------------------------------------------------------------------------------- /lambdas/setup_empty_bucket/requirements.txt: -------------------------------------------------------------------------------- 1 | requests -------------------------------------------------------------------------------- /lambdas/setup_rds_ddl/requirements.txt: -------------------------------------------------------------------------------- 1 | pymysql 2 | requests -------------------------------------------------------------------------------- /lambdas/setup_upload_artifacts/requirements.txt: -------------------------------------------------------------------------------- 1 | requests -------------------------------------------------------------------------------- /assets/src/types/aws-amplify-ui.d.ts: -------------------------------------------------------------------------------- 1 | declare module '@aws-amplify/ui' -------------------------------------------------------------------------------- /assets/src/react-app-env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | -------------------------------------------------------------------------------- /lambdas/setup_datawarehouse_ddl/requirements.txt: -------------------------------------------------------------------------------- 1 | requests 2 | aws-psycopg2 -------------------------------------------------------------------------------- /lambdas/api_search_stations/requirements.txt: -------------------------------------------------------------------------------- 1 | requests 2 | requests_aws4auth -------------------------------------------------------------------------------- /assets/src/types/aws-amplify-react.d.ts: -------------------------------------------------------------------------------- 1 | declare module 'aws-amplify-react'; 2 | -------------------------------------------------------------------------------- /lambdas/stream_station_status_to_es/requirements.txt: -------------------------------------------------------------------------------- 1 | requests 2 | requests_aws4auth -------------------------------------------------------------------------------- /assets/src/modules/notFound/notFound.css: -------------------------------------------------------------------------------- 1 | .NotFound { 2 | padding-top: 100px; 3 | text-align: center; 4 | } -------------------------------------------------------------------------------- /assets/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/assets/public/favicon.ico -------------------------------------------------------------------------------- /images/03_AIMLDiagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/images/03_AIMLDiagram.png -------------------------------------------------------------------------------- /images/02_DLAnalyticsDiagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/images/02_DLAnalyticsDiagram.png -------------------------------------------------------------------------------- /assets/src/images/bikenow-demo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/assets/src/images/bikenow-demo.png -------------------------------------------------------------------------------- /images/01_PurposeBuiltDbDiagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/images/01_PurposeBuiltDbDiagram.png -------------------------------------------------------------------------------- /quicksight/images/004_ManageData.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/quicksight/images/004_ManageData.png -------------------------------------------------------------------------------- /quicksight/images/005_NewDataSet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/quicksight/images/005_NewDataSet.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | **.DS_Store 2 | **__pycache__ 3 | **node_modules 4 | **build 5 | output.yaml 6 | .vscode/settings.json 7 | assets/.eslintcache 8 | -------------------------------------------------------------------------------- /assets/src/index.css: -------------------------------------------------------------------------------- 1 | body { 2 | margin: 0; 3 | padding: 0; 4 | font-family: sans-serif; 5 | background: #252f3d !important; 6 | } 7 | -------------------------------------------------------------------------------- /quicksight/images/001_ManageQuicksight.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/quicksight/images/001_ManageQuicksight.png -------------------------------------------------------------------------------- /quicksight/images/006_ChooseYourTable.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/quicksight/images/006_ChooseYourTable.png -------------------------------------------------------------------------------- /quicksight/images/014_CodeCommitAppTsx.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/quicksight/images/014_CodeCommitAppTsx.png -------------------------------------------------------------------------------- /quicksight/images/010_PublishADashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/quicksight/images/010_PublishADashboard.png -------------------------------------------------------------------------------- /quicksight/images/007_NewAuroraDataSource.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/quicksight/images/007_NewAuroraDataSource.png -------------------------------------------------------------------------------- /quicksight/images/008_AuroraChooseYourTable.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/quicksight/images/008_AuroraChooseYourTable.png -------------------------------------------------------------------------------- /quicksight/images/009_SharePublishDashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/quicksight/images/009_SharePublishDashboard.png -------------------------------------------------------------------------------- /quicksight/images/013_EnvironmentVariables.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/quicksight/images/013_EnvironmentVariables.png -------------------------------------------------------------------------------- /quicksight/images/015_TestYourApplication.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/quicksight/images/015_TestYourApplication.png -------------------------------------------------------------------------------- /quicksight/images/003_AddAuroraVpcConnection.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/quicksight/images/003_AddAuroraVpcConnection.png -------------------------------------------------------------------------------- /quicksight/images/011_ShareDashboardWithUsers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/quicksight/images/011_ShareDashboardWithUsers.png -------------------------------------------------------------------------------- /quicksight/images/002_AddRedshiftVpcConnection.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/quicksight/images/002_AddRedshiftVpcConnection.png -------------------------------------------------------------------------------- /quicksight/images/012_DatabaseApplicationLambda.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/quicksight/images/012_DatabaseApplicationLambda.png -------------------------------------------------------------------------------- /assets/src/modules/notFound/NotFound.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import "./notFound.css"; 3 | 4 | export default () => 5 |
6 |

Sorry, page not found!

7 |
; -------------------------------------------------------------------------------- /assets/.eslintignore: -------------------------------------------------------------------------------- 1 | # don't ever lint node_modules 2 | node_modules 3 | # don't lint build output (make sure it's set to your correct build folder name) 4 | dist 5 | # don't lint nyc coverage output 6 | coverage 7 | * -------------------------------------------------------------------------------- /assets/src/modules/report/report.css: -------------------------------------------------------------------------------- 1 | @media all and (min-width: 480px) { 2 | .Reports { 3 | height: 100%; 4 | padding: 30px; 5 | display: block; 6 | margin-left: auto; 7 | margin-right: auto; 8 | } 9 | } -------------------------------------------------------------------------------- /lambdas/setup_upload_artifacts/artifacts/bikenow-xgboost-regression-model.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-bikenow-demo/HEAD/lambdas/setup_upload_artifacts/artifacts/bikenow-xgboost-regression-model.tar.gz -------------------------------------------------------------------------------- /lambdas/setup_datawarehouse_ddl/sql/create_spectrum_schema.sql: -------------------------------------------------------------------------------- 1 | DROP SCHEMA IF EXISTS bikedb_spectrum; 2 | 3 | CREATE EXTERNAL SCHEMA IF NOT EXISTS bikedb_spectrum 4 | FROM DATA CATALOG 5 | DATABASE '${GLUE_DB}' 6 | IAM_ROLE '${IAM_ROLE_ARN}' -------------------------------------------------------------------------------- /assets/src/common/PropsRoute.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { Route } from "react-router"; 3 | 4 | //@ts-ignore 5 | export default ({ component: C, props: cProps, ...rest }) => 6 | } />; -------------------------------------------------------------------------------- /assets/src/App.test.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom'; 3 | import App from './App'; 4 | 5 | it('renders without crashing', () => { 6 | const div = document.createElement('div'); 7 | ReactDOM.render(, div); 8 | ReactDOM.unmountComponentAtNode(div); 9 | }); 10 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /assets/src/modules/signup/signup.css: -------------------------------------------------------------------------------- 1 | @media all and (min-width: 480px) { 2 | .Signup { 3 | padding: 60px 0; 4 | } 5 | 6 | .Signup form { 7 | margin: 0 auto; 8 | max-width: 320px; 9 | } 10 | } 11 | 12 | .Signup form span.help-block { 13 | font-size: 14px; 14 | padding-bottom: 10px; 15 | color: #999; 16 | } -------------------------------------------------------------------------------- /lambdas/api_get_quicksight_url/index.js: -------------------------------------------------------------------------------- 1 | const awsServerlessExpress = require('aws-serverless-express'); 2 | const app = require('./app'); 3 | 4 | const server = awsServerlessExpress.createServer(app); 5 | 6 | exports.handler = (event, context) => { 7 | console.log(`EVENT: ${JSON.stringify(event)}`); 8 | awsServerlessExpress.proxy(server, event, context); 9 | }; -------------------------------------------------------------------------------- /assets/public/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "short_name": "BikeNow", 3 | "name": "AWS BikeNow Demo", 4 | "icons": [ 5 | { 6 | "src": "favicon.ico", 7 | "sizes": "64x64 32x32 24x24 16x16", 8 | "type": "image/x-icon" 9 | } 10 | ], 11 | "start_url": "./index.html", 12 | "display": "standalone", 13 | "theme_color": "#252f3d", 14 | "background_color": "#252f3d" 15 | } -------------------------------------------------------------------------------- /assets/src/modules/signup/rides.css: -------------------------------------------------------------------------------- 1 | @media all and (min-width: 480px) { 2 | .Rides { 3 | height: 100%; 4 | padding: 30px; 5 | display: block; 6 | margin-left: auto; 7 | margin-right: auto; 8 | } 9 | } 10 | 11 | a.empty-text { 12 | color: #ff9900; 13 | text-decoration: none; 14 | } 15 | 16 | a:hover.empty-text { 17 | color: #ff9900; 18 | text-decoration: underline; 19 | } -------------------------------------------------------------------------------- /assets/src/modules/signup/reviews.css: -------------------------------------------------------------------------------- 1 | @media all and (min-width: 480px) { 2 | .Reviews { 3 | height: 100%; 4 | padding: 30px; 5 | display: block; 6 | margin-left: auto; 7 | margin-right: auto; 8 | } 9 | } 10 | 11 | a.empty-text { 12 | color: #ff9900; 13 | text-decoration: none; 14 | } 15 | 16 | a:hover.empty-text { 17 | color: #ff9900; 18 | text-decoration: underline; 19 | } -------------------------------------------------------------------------------- /assets/src/modules/signup/login.css: -------------------------------------------------------------------------------- 1 | @media all and (min-width: 480px) { 2 | .Login { 3 | max-width: 350px; 4 | height: 100%; 5 | padding: 30px; 6 | display: block; 7 | margin-left: auto; 8 | margin-right: auto; 9 | } 10 | 11 | .Login form { 12 | margin: 0 auto; 13 | max-width: 320px; 14 | } 15 | } 16 | 17 | .control-label{ 18 | color: #fff !important; 19 | text-shadow: none !important; 20 | font-size: 20px !important; 21 | font-weight: 200; 22 | } -------------------------------------------------------------------------------- /lambdas/api_get_quicksight_url/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "api_get_quicksight_url", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "dependencies": { 7 | "amazon-cognito-identity-js": "^3.0.15", 8 | "aws-serverless-express": "^3.3.5", 9 | "body-parser": "^1.17.1", 10 | "express": "^4.15.2" 11 | }, 12 | "devDependencies": {}, 13 | "scripts": { 14 | "test": "echo \"Error: no test specified\" && exit 1" 15 | }, 16 | "author": "", 17 | "license": "ISC" 18 | } 19 | -------------------------------------------------------------------------------- /assets/src/config.js: -------------------------------------------------------------------------------- 1 | export default { 2 | MAX_ATTACHMENT_SIZE: 5000000, 3 | bikenowApi: { 4 | REGION: "us-east-1", 5 | API_URL: "https://1234567890.execute-api.us-east-1.amazonaws.com/demo" 6 | }, 7 | aimlApi: { 8 | REGION: "us-east-1", 9 | API_URL: "https://0987654321.execute-api.us-east-1.amazonaws.com/demo" 10 | }, 11 | cognito: { 12 | REGION: "us-east-1", 13 | USER_POOL_ID: "us-east-1_12345678", 14 | APP_CLIENT_ID: "12345678", 15 | IDENTITY_POOL_ID: "us-east-1:12345678" 16 | } 17 | }; -------------------------------------------------------------------------------- /assets/src/config.ts: -------------------------------------------------------------------------------- 1 | export default { 2 | MAX_ATTACHMENT_SIZE: 5000000, 3 | bikenowApi: { 4 | REGION: "us-east-1", 5 | API_URL: "https://1234567890.execute-api.us-east-1.amazonaws.com/demo" 6 | }, 7 | aimlApi: { 8 | REGION: "us-east-1", 9 | API_URL: "https://0987654321.execute-api.us-east-1.amazonaws.com/demo" 10 | }, 11 | cognito: { 12 | REGION: "us-east-1", 13 | USER_POOL_ID: "us-east-1_12345678", 14 | APP_CLIENT_ID: "12345678", 15 | IDENTITY_POOL_ID: "us-east-1:12345678" 16 | } 17 | }; -------------------------------------------------------------------------------- /lambdas/setup_upload_artifacts/genpath.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from os import listdir 4 | from os.path import isfile, join 5 | 6 | SCRIPT_BUCKET = 'mybucket' 7 | SCRIPT_FOLDER = 'artifacts' 8 | 9 | # Copy Glue script files to S3 bucket 10 | script_path = 'artifacts' 11 | #my_bucket = s3.Bucket(SCRIPT_BUCKET) 12 | 13 | for path, subdirs, files in os.walk(script_path): 14 | path = path.replace("\\","/") 15 | directory_name = path.replace(script_path, "") 16 | 17 | for file in files: 18 | print(SCRIPT_FOLDER + directory_name + '/' + file) -------------------------------------------------------------------------------- /assets/.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | root: true, 3 | parser: '@typescript-eslint/parser', 4 | rules: { 5 | '@typescript-eslint/ban-ts-comment': 'off', 6 | '@typescript-eslint/no-var-requires': 'off', 7 | 'no-useless-escape': 0, 8 | 'prefer-const': 0, 9 | 'react/prop-types': 0, 10 | 'react/display-name': 0 11 | }, 12 | plugins: [ 13 | '@typescript-eslint', 14 | 'react' 15 | ], 16 | extends: [ 17 | 'eslint:recommended', 18 | 'plugin:@typescript-eslint/recommended', 19 | 'plugin:react/recommended' 20 | ], 21 | }; -------------------------------------------------------------------------------- /sample/seed_reviews.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import json 3 | import os 4 | from boto3.dynamodb.conditions import Key 5 | from datetime import datetime 6 | 7 | # Read environment variables 8 | STATION_REVIEW_TABLE = 'station_review' 9 | # Initialize boto3 clients 10 | dynamodb = boto3.resource('dynamodb') 11 | table = dynamodb.Table(STATION_REVIEW_TABLE) 12 | 13 | with open('seed_reviews.json') as f: 14 | reviews = json.load(f) 15 | 16 | row_count = 0 17 | for r in reviews: 18 | r['create_date'] = datetime.fromtimestamp(r['create_date'] / 1e3).strftime('%Y-%m-%d %H:%M:%S.%f') 19 | table.put_item(Item=r) 20 | row_count = row_count + 1 21 | if row_count % 1000 == 0: 22 | print('Processed {} records.'.format(row_count)) 23 | 24 | print('Finished seeding reviews.') -------------------------------------------------------------------------------- /assets/src/modules/report/Report.tsx: -------------------------------------------------------------------------------- 1 | import React, { Component } from 'react'; 2 | import { Redirect } from 'react-router'; 3 | 4 | import Embed from './Embed'; 5 | 6 | import "./report.css"; 7 | import API from "@aws-amplify/api"; 8 | 9 | interface ReportProps { 10 | isAuthenticated: boolean; 11 | } 12 | 13 | interface ReportState { 14 | redirect: boolean; 15 | } 16 | 17 | export default class Report extends Component { 18 | constructor(props: ReportProps) { 19 | super(props); 20 | 21 | this.state = { 22 | redirect: false, 23 | }; 24 | } 25 | 26 | render() { 27 | if (this.state.redirect) return 28 | return ( 29 |
30 | 31 |
32 | ); 33 | } 34 | } -------------------------------------------------------------------------------- /assets/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "allowJs": false, 5 | "skipLibCheck": false, 6 | "esModuleInterop": true, 7 | "allowSyntheticDefaultImports": true, 8 | "strict": true, 9 | "forceConsistentCasingInFileNames": true, 10 | "module": "esnext", 11 | "moduleResolution": "node", 12 | "resolveJsonModule": true, 13 | "isolatedModules": true, 14 | "noEmit": true, 15 | "jsx": "react-jsx", 16 | "lib": [ 17 | "es2015", 18 | "es2017", 19 | "dom", 20 | "esnext.asynciterable" 21 | ], 22 | "typeRoots": [ 23 | "./src/types", 24 | "./node_modules/@types" 25 | ], 26 | "noFallthroughCasesInSwitch": true 27 | }, 28 | "include": [ 29 | "src" 30 | ], 31 | "exclude": [ 32 | "./node_modules/*" 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of 4 | this software and associated documentation files (the "Software"), to deal in 5 | the Software without restriction, including without limitation the rights to 6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software is furnished to do so. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 10 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 11 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 12 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 13 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 14 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 15 | 16 | -------------------------------------------------------------------------------- /assets/src/index.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom'; 3 | import { BrowserRouter as Router } from "react-router-dom"; 4 | import './index.css'; 5 | import App from './App'; 6 | import registerServiceWorker from './registerServiceWorker'; 7 | import Amplify from "@aws-amplify/core"; 8 | import config from "./config"; 9 | 10 | import 'bootstrap/dist/css/bootstrap.css'; 11 | 12 | Amplify.configure({ 13 | Auth: { 14 | mandatorySignIn: true, 15 | region: config.cognito.REGION, 16 | userPoolId: config.cognito.USER_POOL_ID, 17 | identityPoolId: config.cognito.IDENTITY_POOL_ID, 18 | userPoolWebClientId: config.cognito.APP_CLIENT_ID 19 | }, 20 | API: { 21 | endpoints: [ 22 | { 23 | name: "bikenow", 24 | endpoint: config.bikenowApi.API_URL, 25 | region: config.bikenowApi.REGION 26 | }, 27 | { 28 | name: "aimlApi", 29 | endpoint: config.aimlApi.API_URL, 30 | region: config.aimlApi.REGION 31 | } 32 | ] 33 | } 34 | }); 35 | 36 | ReactDOM.render( 37 | 38 | 39 | , 40 | document.getElementById('root') 41 | ); 42 | registerServiceWorker(); 43 | -------------------------------------------------------------------------------- /assets/src/Routes.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { Route, Switch } from "react-router-dom"; 3 | import PropsRoute from "./common/PropsRoute"; 4 | import Home from "./modules/signup/Home"; 5 | import Login from "./modules/signup/Login"; 6 | import Signup from "./modules/signup/Signup"; 7 | import Rides from "./modules/signup/Rides"; 8 | import Reviews from "./modules/signup/Reviews"; 9 | import Report from "./modules/report/Report"; 10 | import NotFound from "./modules/notFound/NotFound"; 11 | 12 | interface RouteProps { 13 | isAuthenticated: boolean; 14 | userHasAuthenticated: (authenticated: boolean) => void; 15 | } 16 | 17 | export const Routes: React.SFC = (childProps) => 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | ; -------------------------------------------------------------------------------- /lambdas/setup_datawarehouse_ddl/sql/create_status_history_table.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS public.station_status_history; 2 | 3 | CREATE TABLE IF NOT EXISTS public.station_status_history 4 | ( 5 | station_id INT 6 | ,num_bikes_available INT 7 | ,is_installed BOOLEAN 8 | ,is_returning BOOLEAN 9 | ,is_renting BOOLEAN 10 | ,last_reported TIMESTAMP 11 | ,load_partition VARCHAR(10) 12 | ,PRIMARY KEY(station_id, last_reported) 13 | ) 14 | DISTKEY(station_id) 15 | SORTKEY(last_reported); 16 | 17 | DROP TABLE IF EXISTS public.station_review_sentiment; 18 | 19 | CREATE TABLE IF NOT EXISTS public.station_review_sentiment 20 | ( 21 | station_id INT 22 | ,user_id VARCHAR(64) 23 | ,review VARCHAR(255) 24 | ,sentiment VARCHAR(16) 25 | ,sentiment_mixed FLOAT 26 | ,sentiment_neutral FLOAT 27 | ,sentiment_positive FLOAT 28 | ,sentiment_negative FLOAT 29 | ,create_date TIMESTAMP 30 | ,load_partition VARCHAR(10) 31 | ,PRIMARY KEY(station_id, user_id, create_date) 32 | ) 33 | DISTKEY(station_id) 34 | SORTKEY(create_date); 35 | 36 | DROP TABLE IF EXISTS public.station_detail; 37 | 38 | CREATE TABLE IF NOT EXISTS public.station_detail 39 | ( 40 | station_id INT 41 | ,station_name VARCHAR(128) 42 | ,capacity INT 43 | ,lon FLOAT 44 | ,lat FLOAT 45 | ,last_updated TIMESTAMP 46 | ,PRIMARY KEY(station_id) 47 | ) -------------------------------------------------------------------------------- /assets/src/App.css: -------------------------------------------------------------------------------- 1 | .App { 2 | margin-top: 15px; 3 | color: white; 4 | } 5 | 6 | .navbar-default{ 7 | background-color: transparent !important; 8 | box-shadow: none !important; 9 | background-image: none !important; 10 | border: 0 !important; 11 | } 12 | 13 | .navbar-default .navbar-brand{ 14 | text-shadow: none !important; 15 | padding: 20px 0; 16 | font-size: 28px; 17 | line-height:30px; 18 | } 19 | 20 | .navbar-brand, .navbar-nav > li > a{ 21 | color: white !important; 22 | text-shadow: none !important; 23 | font-size: 20px; 24 | } 25 | 26 | .navbar-brand, .navbar-nav > li{ 27 | padding: 10px 0; 28 | line-height:30px; 29 | display: block; 30 | } 31 | 32 | .navbar-default .navbar-nav > .open > a, .navbar-default .navbar-nav > .active > a{ 33 | background-image: none !important; 34 | background-color: transparent !important; 35 | box-shadow: none !important; 36 | } 37 | .navbar-default .navbar-nav > .open > a, .navbar-default .navbar-nav > .active > a:active{ 38 | background-image: none !important; 39 | background-color: transparent !important; 40 | box-shadow: none !important; 41 | } 42 | 43 | .navbar-default .navbar-nav > .open > a, .navbar-default .navbar-nav > .active > a:focus{ 44 | background-image: none !important; 45 | background-color: transparent !important; 46 | box-shadow: none !important; 47 | } 48 | 49 | .navbar-nav.ml-auto { 50 | position: absolute; 51 | right: 0px; 52 | } -------------------------------------------------------------------------------- /lambdas/stream_station_status_to_s3/index.py: -------------------------------------------------------------------------------- 1 | import json 2 | import decimal 3 | import boto3 4 | import os 5 | from boto3.dynamodb.types import TypeDeserializer 6 | 7 | client = boto3.client('firehose') 8 | FIREHOSE_STREAM_NAME = os.environ['FIREHOSE_STREAM_NAME'] 9 | 10 | def lambda_handler(event, context): 11 | record_count = 0 12 | for record in event['Records']: 13 | # Get the primary key of the station 14 | station_id = record['dynamodb']['Keys']['station_id']['N'] 15 | if record['eventName'] != 'REMOVE': 16 | new_image = record['dynamodb']['NewImage'] 17 | deserializer = TypeDeserializer() 18 | payload = json.dumps({k: deserializer.deserialize(v) for k, v in new_image.items()}, cls=DecimalEncoder) 19 | 20 | response = client.put_record( 21 | DeliveryStreamName = FIREHOSE_STREAM_NAME, 22 | Record = { 'Data': payload + '\r\n' } 23 | ) 24 | 25 | print('[DEBUG] Processed station_id: {}. Request response: {}'.format(station_id, payload)) 26 | record_count += 1 27 | 28 | print("[INFO] Processed {} records.".format(str(record_count))) 29 | 30 | ''' 31 | Decode Decimal types to JSON numeric value 32 | ''' 33 | class DecimalEncoder(json.JSONEncoder): 34 | def default(self, o): 35 | if isinstance(o, decimal.Decimal): 36 | if o == int(o): 37 | return int(o) 38 | else: 39 | return float(o) 40 | return super(DecimalEncoder, self).default(o) -------------------------------------------------------------------------------- /lambdas/api_post_reviews/index.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import json 3 | import os 4 | from boto3.dynamodb.conditions import Key 5 | from datetime import datetime 6 | 7 | # Read environment variables 8 | STATION_REVIEW_TABLE = os.environ['STATION_REVIEW_TABLE'] 9 | # Initialize boto3 clients 10 | dynamodb = boto3.resource('dynamodb') 11 | table = dynamodb.Table(STATION_REVIEW_TABLE) 12 | 13 | def lambda_handler(event, context): 14 | status_code = 400 15 | try: 16 | # print("[DEBUG] Received event: " + json.dumps(event, indent=2)) 17 | user_id = event['requestContext']['identity']['cognitoIdentityId'] 18 | input = json.loads(event['body']) 19 | input['user_id'] = user_id 20 | input['create_date'] = str(datetime.utcnow()) 21 | 22 | print('[DEBUG] Received input: ' + json.dumps(input)) 23 | table.put_item(Item=input) 24 | 25 | output = '[SUCCESS] Review posted successfully.' 26 | status_code = 200 27 | 28 | except Exception as e: 29 | print('ERROR: ', e) 30 | output = '{}'.format(e) 31 | 32 | # Construct API response 33 | response = { 34 | 'statusCode': status_code, 35 | 'headers': { 36 | 'Access-Control-Allow-Origin': '*', 37 | 'Access-Control-Allow-Credentials': True, 38 | 'Access-Control-Allow-Headers': 'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent', 39 | 'Access-Control-Allow-Methods': 'GET,POST,PUT,DELETE,OPTIONS,HEAD,PATCH', 40 | 'Content-Type': 'application/json' 41 | }, 42 | 'body': json.dumps(output) 43 | } 44 | 45 | print('[INFO] Query response: {}'.format(json.dumps(response))) 46 | 47 | return response -------------------------------------------------------------------------------- /lambdas/load_station_detail_to_ddb/index.py: -------------------------------------------------------------------------------- 1 | import json 2 | import time 3 | import datetime 4 | import urllib.request 5 | import boto3 6 | import decimal 7 | import os 8 | 9 | # Read environment variables 10 | STATION_DETAIL_URL = os.environ['STATION_DETAIL_URL'] 11 | STATION_DETAIL_TABLE = os.environ['STATION_DETAIL_TABLE'] 12 | 13 | # Initialize boto3 clients 14 | dynamodb = boto3.resource('dynamodb') 15 | table = dynamodb.Table(STATION_DETAIL_TABLE) 16 | 17 | def lambda_handler(event, context): 18 | message = '' 19 | try: 20 | # Request latest bike station details data 21 | web_request = urllib.request.urlopen(STATION_DETAIL_URL) 22 | station_detail_data = json.loads(web_request.read().decode()) 23 | 24 | # Get last_update from root 25 | last_updated = int(station_detail_data['last_updated']) 26 | 27 | # Parse through each bike station and update table 28 | station_count = 0 29 | with table.batch_writer(overwrite_by_pkeys=['station_id']) as batch: 30 | for station in station_detail_data['data']['stations']: 31 | station_count += 1 32 | new_station = {} 33 | new_station['station_id'] = int(station['station_id']) 34 | new_station['name'] = str(station['name']) 35 | new_station['lat'] = str(station['lat']) 36 | new_station['lon'] = str(station['lon']) 37 | new_station['last_updated'] = last_updated 38 | 39 | if 'capacity' in station: 40 | new_station['capacity'] = int(station['capacity']) 41 | 42 | batch.put_item(Item = new_station) 43 | message = '[INFO] Updated {} stations.'.format(station_count) 44 | except Exception as e: 45 | message = '[ERROR] {}'.format(e) 46 | 47 | print(message) 48 | 49 | -------------------------------------------------------------------------------- /assets/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 12 | 13 | 14 | 23 | AWS BikeNow Demo 24 | 25 | 26 | 27 | 30 |
31 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /lambdas/setup_upload_artifacts/index.py: -------------------------------------------------------------------------------- 1 | import json 2 | import boto3 3 | import cfnresponse 4 | import os 5 | from os import listdir 6 | from os.path import isfile, join 7 | 8 | SCRIPT_BUCKET = os.environ['SCRIPT_BUCKET'] 9 | SCRIPT_FOLDER = os.environ['SCRIPT_FOLDER'] 10 | 11 | def lambda_handler(event, context): 12 | s3 = boto3.resource('s3') 13 | response = cfnresponse.FAILED 14 | 15 | # Get CloudFormation parameters 16 | cfn_stack_id = event.get('StackId') 17 | cfn_request_type = event.get('RequestType') 18 | cfn_physicalResourceId = context.log_stream_name if event.get('ResourceProperties.PhysicalResourceId') is None else event.get('ResourceProperties.PhysicalResourceId') 19 | 20 | message = '' 21 | 22 | if cfn_stack_id and cfn_request_type != 'Delete': 23 | try: 24 | # Copy Glue script files to S3 bucket 25 | script_path = 'artifacts' 26 | my_bucket = s3.Bucket(SCRIPT_BUCKET) 27 | for path, subdirs, files in os.walk(script_path): 28 | path = path.replace("\\","/") 29 | directory_name = path.replace(script_path,"") 30 | 31 | for file in files: 32 | my_bucket.upload_file(os.path.join(path, file), SCRIPT_FOLDER + directory_name + '/' + file) 33 | 34 | message = 'INFO: Copied script files to: ' + SCRIPT_BUCKET 35 | response = cfnresponse.SUCCESS 36 | except Exception as e: 37 | print('ERROR: ', e) 38 | message = '{}'.format(e) 39 | else: 40 | message = 'INFO: Deleting function.' 41 | response = cfnresponse.SUCCESS 42 | 43 | cfnresponse.send(event, context, response, 44 | { 45 | 'Message': message 46 | }, 47 | cfn_physicalResourceId) 48 | 49 | return { 50 | 'statusCode': 200, 51 | 'body': message 52 | } -------------------------------------------------------------------------------- /lambdas/api_search_stations/index.py: -------------------------------------------------------------------------------- 1 | import json 2 | import decimal 3 | import boto3 4 | import os 5 | import requests 6 | from requests_aws4auth import AWS4Auth 7 | from boto3.dynamodb.types import TypeDeserializer 8 | 9 | # Get Lambda run-time credentials 10 | region = os.environ['REGION'] 11 | service = 'es' 12 | credentials = boto3.Session().get_credentials() 13 | awsauth = AWS4Auth(credentials.access_key, credentials.secret_key, region, service, session_token = credentials.token) 14 | 15 | # Build Elasticsearch endpoint 16 | es_host = 'https://{}'.format(os.environ['ES_ENDPOINT']) 17 | es_index = 'stations' 18 | es_type = 'station' 19 | es_url = '{}/{}/{}/_search'.format(es_host, es_index, es_type) 20 | headers = { 'Content-Type': 'application/json' } 21 | 22 | def lambda_handler(event, context): 23 | # Construct the search query 24 | query = { 25 | 'size': 1000, 26 | 'query': { 27 | 'query_string': { 28 | 'query': event['queryStringParameters']['q'], 29 | 'fields': ['name'] 30 | } 31 | } 32 | } 33 | print('[INFO] Query body: {}'.format(json.dumps(query))) 34 | 35 | # Get search result from ES 36 | r = requests.get(es_url, auth = awsauth, headers = headers, data = json.dumps(query)) 37 | 38 | # Construct API response 39 | response = { 40 | 'statusCode': r.status_code, 41 | 'headers': { 42 | 'Access-Control-Allow-Origin': '*', 43 | 'Access-Control-Allow-Credentials': True, 44 | 'Access-Control-Allow-Headers': 'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent', 45 | 'Access-Control-Allow-Methods': 'GET,POST,PUT,DELETE,OPTIONS,HEAD,PATCH', 46 | 'Content-Type': 'application/json' 47 | }, 48 | 'body': r.text 49 | } 50 | print('[INFO] Query response: {}'.format(json.dumps(response))) 51 | 52 | return response -------------------------------------------------------------------------------- /lambdas/setup_empty_bucket/index.py: -------------------------------------------------------------------------------- 1 | import json 2 | import boto3 3 | import cfnresponse 4 | import os 5 | from os import listdir 6 | from os.path import isfile, join 7 | 8 | SCRIPT_BUCKET = os.environ['SCRIPT_BUCKET'] 9 | 10 | def lambda_handler(event, context): 11 | s3 = boto3.resource('s3') 12 | response = cfnresponse.FAILED 13 | 14 | # Get CloudFormation parameters 15 | cfn_stack_id = event.get('StackId') 16 | cfn_request_type = event.get('RequestType') 17 | cfn_physicalResourceId = context.log_stream_name if event.get('ResourceProperties.PhysicalResourceId') is None else event.get('ResourceProperties.PhysicalResourceId') 18 | 19 | message = '' 20 | 21 | # If CloudFormation is being deleted, empty S3 bucket 22 | if cfn_stack_id and cfn_request_type == 'Delete': 23 | try: 24 | bucket = s3.Bucket(SCRIPT_BUCKET) 25 | bucket.objects.delete() 26 | message = 'INFO: Deleted data from S3 bucket: ' + SCRIPT_BUCKET 27 | response = cfnresponse.SUCCESS 28 | except botocore.exceptions.ClientError as e: 29 | # If a client error is thrown, then check that it was a 404 error. 30 | # If it was a 404 error, then the bucket does not exist. 31 | error_code = int(e.response['Error']['Code']) 32 | if error_code == 404: 33 | message = 'WARNING: Bucket does not exist: ' + SCRIPT_BUCKET 34 | response = cfnresponse.SUCCESS 35 | else: 36 | print('ERROR: ', e) 37 | message = '{}'.format(e) 38 | except Exception as e: 39 | print('ERROR: ', e) 40 | message = '{}'.format(e) 41 | else: 42 | message = 'INFO: Creating or updating function.' 43 | response = cfnresponse.SUCCESS 44 | 45 | cfnresponse.send(event, context, response, 46 | { 47 | 'Message': message 48 | }, 49 | cfn_physicalResourceId) 50 | 51 | return { 52 | 'statusCode': 200, 53 | 'body': message 54 | } -------------------------------------------------------------------------------- /assets/src/modules/report/Embed.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Spinner } from "react-bootstrap"; 3 | import API from '@aws-amplify/api'; 4 | import Auth from '@aws-amplify/auth'; 5 | 6 | //import { embedDashboard } from 'amazon-quicksight-embedding-sdk'; 7 | const QuickSightEmbedding = require("amazon-quicksight-embedding-sdk"); 8 | 9 | const Embed = () => { 10 | 11 | let jwtToken : string; 12 | let payloadSub : any; 13 | let email : any; 14 | 15 | Auth.currentSession() 16 | .then(data => { 17 | jwtToken = data.getIdToken().getJwtToken(); 18 | payloadSub = data.getIdToken().payload.sub; 19 | email = data.getIdToken().payload.email; 20 | } ) 21 | .catch(err => console.log(err)); 22 | 23 | async function loadDashboard(e : any) { 24 | const myInit = { 25 | headers: {}, 26 | response: true, 27 | queryStringParameters: { 28 | jwtToken: jwtToken, 29 | payloadSub: payloadSub, 30 | email: email 31 | } 32 | } 33 | const data = await API.get('bikenow', '/report', myInit); 34 | 35 | const containerDiv = document.getElementById("dashboardContainer"); 36 | //let dashboard; 37 | const options = { 38 | url: data.data.data.EmbedUrl, 39 | container: containerDiv, 40 | parameters: { 41 | country: "United States" 42 | }, 43 | scrolling: "no", 44 | height: "AutoFit", 45 | loadingHeight: "480px", 46 | width: "100%" 47 | }; 48 | if (containerDiv) { 49 | containerDiv.innerHTML = ""; 50 | } 51 | const dashboard = QuickSightEmbedding.embedDashboard(options); 52 | } 53 | 54 | window.addEventListener('load', loadDashboard); 55 | 56 | return ( 57 | <> 58 |
59 | 60 | ); 61 | } 62 | 63 | export default Embed; -------------------------------------------------------------------------------- /lambdas/setup_rds_ddl/cfnresponse/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Amazon Web Services, Inc. or its affiliates. All Rights Reserved. 2 | # This file is licensed to you under the AWS Customer Agreement (the "License"). 3 | # You may not use this file except in compliance with the License. 4 | # A copy of the License is located at http://aws.amazon.com/agreement/ . 5 | # This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied. 6 | # See the License for the specific language governing permissions and limitations under the License. 7 | 8 | import requests 9 | import json 10 | 11 | SUCCESS = "SUCCESS" 12 | FAILED = "FAILED" 13 | 14 | def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False): 15 | responseUrl = event['ResponseURL'] 16 | 17 | print(responseUrl) 18 | 19 | responseBody = {} 20 | responseBody['Status'] = responseStatus 21 | responseBody['Reason'] = 'See the details in CloudWatch Log Stream: ' + context.log_stream_name 22 | responseBody['PhysicalResourceId'] = physicalResourceId or context.log_stream_name 23 | responseBody['StackId'] = event['StackId'] 24 | responseBody['RequestId'] = event['RequestId'] 25 | responseBody['LogicalResourceId'] = event['LogicalResourceId'] 26 | responseBody['NoEcho'] = noEcho 27 | responseBody['Data'] = responseData 28 | 29 | json_responseBody = json.dumps(responseBody) 30 | 31 | print("Response body:\n" + json_responseBody) 32 | 33 | headers = { 34 | 'content-type' : '', 35 | 'content-length' : str(len(json_responseBody)) 36 | } 37 | 38 | try: 39 | response = requests.put(responseUrl, 40 | data=json_responseBody, 41 | headers=headers) 42 | print("Status code: " + response.reason) 43 | except Exception as e: 44 | print("send(..) failed executing requests.put(..): " + str(e)) -------------------------------------------------------------------------------- /lambdas/setup_empty_bucket/cfnresponse/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Amazon Web Services, Inc. or its affiliates. All Rights Reserved. 2 | # This file is licensed to you under the AWS Customer Agreement (the "License"). 3 | # You may not use this file except in compliance with the License. 4 | # A copy of the License is located at http://aws.amazon.com/agreement/ . 5 | # This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied. 6 | # See the License for the specific language governing permissions and limitations under the License. 7 | 8 | import requests 9 | import json 10 | 11 | SUCCESS = "SUCCESS" 12 | FAILED = "FAILED" 13 | 14 | def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False): 15 | responseUrl = event['ResponseURL'] 16 | 17 | print(responseUrl) 18 | 19 | responseBody = {} 20 | responseBody['Status'] = responseStatus 21 | responseBody['Reason'] = 'See the details in CloudWatch Log Stream: ' + context.log_stream_name 22 | responseBody['PhysicalResourceId'] = physicalResourceId or context.log_stream_name 23 | responseBody['StackId'] = event['StackId'] 24 | responseBody['RequestId'] = event['RequestId'] 25 | responseBody['LogicalResourceId'] = event['LogicalResourceId'] 26 | responseBody['NoEcho'] = noEcho 27 | responseBody['Data'] = responseData 28 | 29 | json_responseBody = json.dumps(responseBody) 30 | 31 | print("Response body:\n" + json_responseBody) 32 | 33 | headers = { 34 | 'content-type' : '', 35 | 'content-length' : str(len(json_responseBody)) 36 | } 37 | 38 | try: 39 | response = requests.put(responseUrl, 40 | data=json_responseBody, 41 | headers=headers) 42 | print("Status code: " + response.reason) 43 | except Exception as e: 44 | print("send(..) failed executing requests.put(..): " + str(e)) -------------------------------------------------------------------------------- /lambdas/setup_datawarehouse_ddl/cfnresponse/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Amazon Web Services, Inc. or its affiliates. All Rights Reserved. 2 | # This file is licensed to you under the AWS Customer Agreement (the "License"). 3 | # You may not use this file except in compliance with the License. 4 | # A copy of the License is located at http://aws.amazon.com/agreement/ . 5 | # This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied. 6 | # See the License for the specific language governing permissions and limitations under the License. 7 | 8 | import requests 9 | import json 10 | 11 | SUCCESS = "SUCCESS" 12 | FAILED = "FAILED" 13 | 14 | def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False): 15 | responseUrl = event['ResponseURL'] 16 | 17 | print(responseUrl) 18 | 19 | responseBody = {} 20 | responseBody['Status'] = responseStatus 21 | responseBody['Reason'] = 'See the details in CloudWatch Log Stream: ' + context.log_stream_name 22 | responseBody['PhysicalResourceId'] = physicalResourceId or context.log_stream_name 23 | responseBody['StackId'] = event['StackId'] 24 | responseBody['RequestId'] = event['RequestId'] 25 | responseBody['LogicalResourceId'] = event['LogicalResourceId'] 26 | responseBody['NoEcho'] = noEcho 27 | responseBody['Data'] = responseData 28 | 29 | json_responseBody = json.dumps(responseBody) 30 | 31 | print("Response body:\n" + json_responseBody) 32 | 33 | headers = { 34 | 'content-type' : '', 35 | 'content-length' : str(len(json_responseBody)) 36 | } 37 | 38 | try: 39 | response = requests.put(responseUrl, 40 | data=json_responseBody, 41 | headers=headers) 42 | print("Status code: " + response.reason) 43 | except Exception as e: 44 | print("send(..) failed executing requests.put(..): " + str(e)) -------------------------------------------------------------------------------- /lambdas/setup_upload_artifacts/cfnresponse/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Amazon Web Services, Inc. or its affiliates. All Rights Reserved. 2 | # This file is licensed to you under the AWS Customer Agreement (the "License"). 3 | # You may not use this file except in compliance with the License. 4 | # A copy of the License is located at http://aws.amazon.com/agreement/ . 5 | # This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied. 6 | # See the License for the specific language governing permissions and limitations under the License. 7 | 8 | import requests 9 | import json 10 | 11 | SUCCESS = "SUCCESS" 12 | FAILED = "FAILED" 13 | 14 | def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False): 15 | responseUrl = event['ResponseURL'] 16 | 17 | print(responseUrl) 18 | 19 | responseBody = {} 20 | responseBody['Status'] = responseStatus 21 | responseBody['Reason'] = 'See the details in CloudWatch Log Stream: ' + context.log_stream_name 22 | responseBody['PhysicalResourceId'] = physicalResourceId or context.log_stream_name 23 | responseBody['StackId'] = event['StackId'] 24 | responseBody['RequestId'] = event['RequestId'] 25 | responseBody['LogicalResourceId'] = event['LogicalResourceId'] 26 | responseBody['NoEcho'] = noEcho 27 | responseBody['Data'] = responseData 28 | 29 | json_responseBody = json.dumps(responseBody) 30 | 31 | print("Response body:\n" + json_responseBody) 32 | 33 | headers = { 34 | 'content-type' : '', 35 | 'content-length' : str(len(json_responseBody)) 36 | } 37 | 38 | try: 39 | response = requests.put(responseUrl, 40 | data=json_responseBody, 41 | headers=headers) 42 | print("Status code: " + response.reason) 43 | except Exception as e: 44 | print("send(..) failed executing requests.put(..): " + str(e)) -------------------------------------------------------------------------------- /lambdas/load_station_status_to_ddb/index.py: -------------------------------------------------------------------------------- 1 | import json 2 | import time 3 | import datetime 4 | import urllib.request 5 | import boto3 6 | import decimal 7 | import os 8 | 9 | # Read environment variables 10 | STATION_STATUS_URL = os.environ['STATION_STATUS_URL'] 11 | STATION_STATUS_TABLE = os.environ['STATION_STATUS_TABLE'] 12 | 13 | # Initialize boto3 clients 14 | dynamodb = boto3.resource('dynamodb') 15 | table = dynamodb.Table(STATION_STATUS_TABLE) 16 | 17 | def lambda_handler(event, context): 18 | message = '' 19 | try: 20 | # Request latest bike station status data 21 | web_request = urllib.request.urlopen(STATION_STATUS_URL) 22 | station_status_data = json.loads(web_request.read().decode()) 23 | 24 | # Parse through each bike station and update table 25 | station_count = 0 26 | with table.batch_writer(overwrite_by_pkeys=['station_id']) as batch: 27 | for station in station_status_data['data']['stations']: 28 | station_count += 1 29 | new_station = {} 30 | new_station['station_id'] = int(station['station_id']) 31 | new_station['last_reported'] = int(station['last_reported']) 32 | new_station['num_bikes_available'] = int(station['num_bikes_available']) 33 | new_station['is_installed'] = bool(station['is_installed']) 34 | new_station['is_returning'] = bool(station['is_returning']) 35 | new_station['is_renting'] = bool(station['is_renting']) 36 | 37 | # new_station['num_bikes_disabled'] = int(station['num_bikes_disabled']) 38 | # new_station['num_docks_available'] = int(station['num_docks_available']) 39 | # new_station['num_ebikes_available'] = int(station['num_ebikes_available']) 40 | # new_station['num_docks_disabled'] = int(station['num_docks_disabled']) 41 | batch.put_item(Item = new_station) 42 | message = '[INFO] Updated {} stations.'.format(station_count) 43 | except Exception as e: 44 | message = '[ERROR] {}'.format(e) 45 | 46 | print(message) 47 | 48 | -------------------------------------------------------------------------------- /lambdas/api_get_reviews/index.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import json 3 | import os 4 | import decimal 5 | from boto3.dynamodb.conditions import Key 6 | 7 | # Read environment variables 8 | STATION_REVIEW_TABLE = os.environ['STATION_REVIEW_TABLE'] 9 | STATION_REVIEW_GSI = os.environ['STATION_REVIEW_GSI'] 10 | # Initialize boto3 clients 11 | dynamodb = boto3.resource('dynamodb') 12 | table = dynamodb.Table(STATION_REVIEW_TABLE) 13 | 14 | def lambda_handler(event, context): 15 | status_code = 400 16 | try: 17 | # print("[DEBUG] Received event: " + json.dumps(event, indent=2)) 18 | output = None 19 | if event['queryStringParameters']: 20 | station_id = int(event['queryStringParameters']['stationId']) 21 | query_result = table.query(KeyConditionExpression=Key('station_id').eq(station_id)) 22 | else: 23 | user_id = event['requestContext']['identity']['cognitoIdentityId'] 24 | query_result = table.query(IndexName=STATION_REVIEW_GSI, KeyConditionExpression=Key('user_id').eq(user_id)) 25 | 26 | output = query_result['Items'] 27 | status_code = 200 28 | except Exception as e: 29 | print('ERROR: ', e) 30 | output = '{}'.format(e) 31 | 32 | # Construct API response 33 | response = { 34 | 'statusCode': status_code, 35 | 'headers': { 36 | 'Access-Control-Allow-Origin': '*', 37 | 'Access-Control-Allow-Credentials': True, 38 | 'Access-Control-Allow-Headers': 'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent', 39 | 'Access-Control-Allow-Methods': 'GET,POST,PUT,DELETE,OPTIONS,HEAD,PATCH', 40 | 'Content-Type': 'application/json' 41 | }, 42 | 'body': json.dumps(output, cls=DecimalEncoder) 43 | } 44 | 45 | print('[INFO] Query response: {}'.format(json.dumps(response))) 46 | 47 | return response 48 | 49 | ''' 50 | Decode Decimal types to JSON numeric value 51 | ''' 52 | class DecimalEncoder(json.JSONEncoder): 53 | def default(self, o): 54 | if isinstance(o, decimal.Decimal): 55 | if o == int(o): 56 | return int(o) 57 | else: 58 | return float(o) 59 | return super(DecimalEncoder, self).default(o) -------------------------------------------------------------------------------- /lambdas/stream_station_status_to_es/index.py: -------------------------------------------------------------------------------- 1 | import json 2 | import decimal 3 | import boto3 4 | import os 5 | import requests 6 | from requests_aws4auth import AWS4Auth 7 | from boto3.dynamodb.types import TypeDeserializer 8 | 9 | # Get Lambda run-time credentials 10 | region = os.environ['REGION'] 11 | service = 'es' 12 | credentials = boto3.Session().get_credentials() 13 | awsauth = AWS4Auth(credentials.access_key, credentials.secret_key, region, service, session_token = credentials.token) 14 | 15 | # Build Elasticsearch endpoint 16 | es_host = 'https://{}'.format(os.environ['ES_ENDPOINT']) 17 | es_index = 'stations' 18 | es_type = 'station' 19 | es_url = '{}/{}/{}/'.format(es_host, es_index, es_type) 20 | headers = { 'Content-Type': 'application/json' } 21 | 22 | def lambda_handler(event, context): 23 | delete_count = 0 24 | update_count = 0 25 | for record in event['Records']: 26 | # Get the primary key of the station 27 | station_id = record['dynamodb']['Keys']['station_id']['N'] 28 | # If deleted form DynamoDB, remove from ES 29 | if record['eventName'] == 'REMOVE': 30 | r = requests.delete('{}{}'.format(es_url, station_id), auth = awsauth) 31 | delete_count += 1 32 | # Update record so we don't overwrite station details 33 | else: 34 | # Deserialize the image 35 | new_image = record['dynamodb']['NewImage'] 36 | deserializer = TypeDeserializer() 37 | deserialized_image = json.dumps({k: deserializer.deserialize(v) for k, v in new_image.items()}, cls=DecimalEncoder) 38 | payload = { 39 | 'doc': json.loads(deserialized_image), 40 | 'doc_as_upsert': True 41 | } 42 | 43 | # Insert or update to ES 44 | r = requests.post('{}{}/_update'.format(es_url, station_id), auth = awsauth, headers = headers, json = payload) 45 | update_count += 1 46 | print('[DEBUG] Processed station_id: {}. Request response: {}'.format(station_id, r.text)) 47 | 48 | message = '[INFO] Deleted {} stations. Insert or updated {} stations.'.format(delete_count, update_count) 49 | print(message) 50 | 51 | ''' 52 | Decode Decimal types to JSON numeric value 53 | ''' 54 | class DecimalEncoder(json.JSONEncoder): 55 | def default(self, o): 56 | if isinstance(o, decimal.Decimal): 57 | if o == int(o): 58 | return int(o) 59 | else: 60 | return float(o) 61 | return super(DecimalEncoder, self).default(o) -------------------------------------------------------------------------------- /lambdas/stream_station_review_to_s3/index.py: -------------------------------------------------------------------------------- 1 | import json 2 | import decimal 3 | import boto3 4 | import os 5 | from boto3.dynamodb.types import TypeDeserializer 6 | from datetime import datetime 7 | 8 | client = boto3.client('firehose') 9 | comprehend_client = boto3.client('comprehend') 10 | FIREHOSE_STREAM_NAME = os.environ['FIREHOSE_STREAM_NAME'] 11 | 12 | def lambda_handler(event, context): 13 | record_count = 0 14 | for record in event['Records']: 15 | try: 16 | # Get the primary key of the station 17 | station_id = record['dynamodb']['Keys']['station_id']['N'] 18 | if record['eventName'] != 'REMOVE': 19 | new_image = record['dynamodb']['NewImage'] 20 | 21 | new_dt = datetime.strptime(new_image['create_date']['S'], '%Y-%m-%d %H:%M:%S.%f') 22 | new_image['create_date'] = {'N': int(new_dt.timestamp())} 23 | 24 | sentiment = comprehend_client.detect_sentiment( 25 | Text=new_image['review']['S'] 26 | ,LanguageCode='en' 27 | ) 28 | 29 | new_image['sentiment'] = {'S': sentiment['Sentiment']} 30 | new_image['sentiment_mixed'] = {'N': sentiment['SentimentScore']['Mixed']} 31 | new_image['sentiment_neutral'] = {'N': sentiment['SentimentScore']['Neutral']} 32 | new_image['sentiment_positive'] = {'N': sentiment['SentimentScore']['Positive']} 33 | new_image['sentiment_negative'] = {'N': sentiment['SentimentScore']['Negative']} 34 | 35 | deserializer = TypeDeserializer() 36 | payload = json.dumps({k: deserializer.deserialize(v) for k, v in new_image.items()}, cls=DecimalEncoder) 37 | 38 | response = client.put_record( 39 | DeliveryStreamName = FIREHOSE_STREAM_NAME, 40 | Record = { 'Data': payload + '\r\n' } 41 | ) 42 | 43 | print('[DEBUG] Processed station_id: {}. Request response: {}'.format(station_id, payload)) 44 | record_count += 1 45 | except Exception: 46 | pass # JSON encoder to Decimal sometimes throws exception with inexact rounding. Skip these. Fix later. 47 | 48 | print("[INFO] Processed {} records.".format(str(record_count))) 49 | 50 | ''' 51 | Decode Decimal types to JSON numeric value 52 | ''' 53 | class DecimalEncoder(json.JSONEncoder): 54 | def default(self, o): 55 | if isinstance(o, decimal.Decimal): 56 | if o == int(o): 57 | return int(o) 58 | else: 59 | return float(o) 60 | return super(DecimalEncoder, self).default(o) -------------------------------------------------------------------------------- /assets/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "aws-bikenow-demo", 3 | "version": "0.1.0", 4 | "private": true, 5 | "dependencies": { 6 | "@aws-amplify/api": "^3.3.0", 7 | "@aws-amplify/auth": "^3.4.31", 8 | "@aws-amplify/core": "^3.8.23", 9 | "@types/graphql": "^14.5.0", 10 | "@types/jest": "^26.0.23", 11 | "@types/node": "^15.0.2", 12 | "@types/react": "^17.0.5", 13 | "@types/react-datepicker": "^3.1.8", 14 | "@types/react-dom": "^17.0.4", 15 | "@types/react-leaflet": "^2.8.1", 16 | "@types/react-router-bootstrap": "^0.24.5", 17 | "@types/react-router-dom": "^5.1.7", 18 | "@types/zen-observable": "0.8.2", 19 | "amazon-quicksight-embedding-sdk": "^1.0.15", 20 | "bootstrap": "^5.0.0", 21 | "graphql": "15.5.0", 22 | "jquery": "^3.6.0", 23 | "leaflet": "^1.7.1", 24 | "react": "^17.0.2", 25 | "react-bootstrap": "^1.5.2", 26 | "react-datepicker": "^3.8.0", 27 | "react-dom": "^17.0.2", 28 | "react-leaflet": "^3.1.0", 29 | "react-router-bootstrap": "^0.25.0", 30 | "react-router-dom": "^5.2.0", 31 | "react-scripts": "^4.0.3", 32 | "typescript": "4.2.4", 33 | "workbox-background-sync": "^6.1.5", 34 | "workbox-broadcast-update": "^6.1.5", 35 | "workbox-cacheable-response": "^6.1.5", 36 | "workbox-core": "^6.1.5", 37 | "workbox-expiration": "^6.1.5", 38 | "workbox-google-analytics": "^6.1.5", 39 | "workbox-navigation-preload": "^6.1.5", 40 | "workbox-precaching": "^6.1.5", 41 | "workbox-range-requests": "^6.1.5", 42 | "workbox-routing": "^6.1.5", 43 | "workbox-strategies": "^6.1.5", 44 | "workbox-streams": "^6.1.5", 45 | "zen-observable": "0.8.15" 46 | }, 47 | "resolutions": { 48 | "immer": ">=9.0.6", 49 | "is-svg": ">=4.2.2", 50 | "ssri": ">=8.0.1", 51 | "xmldom": ">0.5.0", 52 | "postcss": ">=8.2.10", 53 | "ws": ">=7.4.6", 54 | "dns-packet": ">=1.3.2", 55 | "normalize-url": ">=4.5.1", 56 | "tar": ">=6.1.2", 57 | "axios": ">=0.21.2", 58 | "set-value": ">=4.0.1", 59 | "tmpl": ">=1.0.5", 60 | "nth-check": ">=2.0.1" 61 | }, 62 | "scripts": { 63 | "preinstall": "npx npm-force-resolutions", 64 | "start": "react-scripts start", 65 | "build": "react-scripts build", 66 | "test": "react-scripts test --env=jsdom", 67 | "eject": "react-scripts eject" 68 | }, 69 | "devDependencies": { 70 | "eslint": "^7.26.0", 71 | "eslint-plugin-react": "^7.23.2", 72 | "npm-force-resolutions": "0.0.10" 73 | }, 74 | "browserslist": [ 75 | ">0.2%", 76 | "not dead", 77 | "not ie <= 11", 78 | "not op_mini all" 79 | ] 80 | } 81 | -------------------------------------------------------------------------------- /lambdas/setup_upload_artifacts/artifacts/glue_unload_station_history_s3.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import base64 3 | import json 4 | import pg 5 | import sys 6 | from awsglue.utils import getResolvedOptions 7 | from botocore.exceptions import ClientError 8 | from datetime import datetime, timedelta 9 | 10 | # Helper class for interfacing with Redshift 11 | class rs_common: 12 | # Returns connection and credential information from secrets manager 13 | @staticmethod 14 | def connection_info(db_creds): 15 | session = boto3.session.Session() 16 | client = session.client( 17 | service_name='secretsmanager' 18 | ) 19 | 20 | get_secret_value_response = client.get_secret_value(SecretId=db_creds) 21 | 22 | if 'SecretString' in get_secret_value_response: 23 | secret = json.loads(get_secret_value_response['SecretString']) 24 | else: 25 | secret = json.loads(base64.b64decode(get_secret_value_response['SecretBinary'])) 26 | 27 | return secret 28 | 29 | # Returns a connection to the cluster 30 | @staticmethod 31 | def get_connection(db_creds): 32 | 33 | con_params = rs_common.connection_info(db_creds) 34 | 35 | rs_conn_string = "host=%s port=%s dbname=%s user=%s password=%s" % (con_params['host'], con_params['port'], con_params['dbname'], con_params['username'], con_params['password']) 36 | rs_conn = pg.connect(dbname=rs_conn_string) 37 | rs_conn.query("set statement_timeout = 1200000") 38 | 39 | return rs_conn 40 | 41 | # Submits a query to the cluster 42 | @staticmethod 43 | def query(con,statement): 44 | res = con.query(statement) 45 | return res 46 | 47 | # Get job args 48 | args = getResolvedOptions(sys.argv,['db_creds','glue_db','rs_iam_role','dest_bucket']) 49 | db_creds = args['db_creds'] 50 | glue_db = args['glue_db'] 51 | rs_iam_role = args['rs_iam_role'] 52 | dest_bucket = args['dest_bucket'] 53 | utc_query = datetime.utcnow() - timedelta(hours=1) 54 | 55 | sql = ''' 56 | unload ( 57 | 'select avg_num_bikes_available 58 | ,station_id 59 | ,date_part(y, update_timestamp)::int as year 60 | ,date_part(mon, update_timestamp)::int as month 61 | ,date_part(d, update_timestamp)::int as day 62 | ,date_part(h, update_timestamp)::int as hour 63 | ,date_part(dw, update_timestamp)::int as day_of_week 64 | from public.station_status_history' 65 | ) 66 | to 's3://{}/unload/station_status_history_' 67 | iam_role '{}' 68 | delimiter as ',' 69 | allowoverwrite 70 | parallel off; 71 | '''.format(dest_bucket, rs_iam_role) 72 | 73 | # Connect to database 74 | print('Connecting...') 75 | con = rs_common.get_connection(db_creds) 76 | 77 | # Run SQL statement 78 | print("Connected. Running query...") 79 | result = rs_common.query(con, sql) 80 | 81 | print(result) -------------------------------------------------------------------------------- /lambdas/api_post_rides/index.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import base64 3 | import json 4 | import os 5 | import pymysql 6 | 7 | DB_CREDS = os.environ['DB_CREDS'] 8 | DB_NAME = os.environ['DB_NAME'] 9 | 10 | def connection_info(db_creds): 11 | session = boto3.session.Session() 12 | client = session.client( 13 | service_name='secretsmanager' 14 | ) 15 | 16 | get_secret_value_response = client.get_secret_value(SecretId=db_creds) 17 | 18 | if 'SecretString' in get_secret_value_response: 19 | secret = json.loads(get_secret_value_response['SecretString']) 20 | else: 21 | secret = json.loads(base64.b64decode(get_secret_value_response['SecretBinary'])) 22 | 23 | return secret 24 | 25 | # Expecting POST payload: 26 | # { 27 | # 'station_id': int, 28 | # 'station_name': string, 29 | # 'duration': int, 30 | # 'price': float 31 | # } 32 | def lambda_handler(event, context): 33 | status_code = 400 34 | try: 35 | # print("[DEBUG] Received event: " + json.dumps(event, indent=2)) 36 | user_id = event['requestContext']['identity']['cognitoIdentityId'] 37 | input = json.loads(event['body']) 38 | sql = ''' 39 | INSERT INTO rideTransactions 40 | ( 41 | userId 42 | ,stationId 43 | ,stationName 44 | ,duration 45 | ,price 46 | ) 47 | VALUES 48 | ( 49 | "{}" 50 | ,{} 51 | ,"{}" 52 | ,{} 53 | ,{} 54 | ); 55 | '''.format(user_id, input['station_id'], input['station_name'], input['duration'], input['price']) 56 | 57 | print('[INFO] Connecting...') 58 | conn_info = connection_info(DB_CREDS) 59 | conn = pymysql.connect(host=conn_info['host'], user=conn_info['username'], password=conn_info['password'], database=conn_info['dbname'], connect_timeout=30) 60 | with conn.cursor() as cur: 61 | print('[INFO] Executing SQL: {}'.format(sql)) 62 | cur.execute(sql) 63 | conn.commit() 64 | conn.close() 65 | status_code = 200 66 | output = '[SUCCESS] Executed insert query successfully.' 67 | 68 | except Exception as e: 69 | print('[ERROR] ', e) 70 | output = '{}'.format(e) 71 | 72 | # Construct API response 73 | response = { 74 | 'statusCode': status_code, 75 | 'headers': { 76 | 'Access-Control-Allow-Origin': '*', 77 | 'Access-Control-Allow-Credentials': True, 78 | 'Access-Control-Allow-Headers': 'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent', 79 | 'Access-Control-Allow-Methods': 'GET,POST,PUT,DELETE,OPTIONS,HEAD,PATCH', 80 | 'Content-Type': 'application/json' 81 | }, 82 | 'body': output 83 | } 84 | 85 | print('[INFO] Query response: {}'.format(json.dumps(response))) 86 | 87 | return response -------------------------------------------------------------------------------- /lambdas/api_get_rides/index.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import base64 3 | import json 4 | import os 5 | import pymysql 6 | import pymysql.cursors 7 | 8 | DB_CREDS = os.environ['DB_CREDS'] 9 | DB_NAME = os.environ['DB_NAME'] 10 | 11 | def connection_info(db_creds): 12 | session = boto3.session.Session() 13 | client = session.client( 14 | service_name='secretsmanager' 15 | ) 16 | 17 | get_secret_value_response = client.get_secret_value(SecretId=db_creds) 18 | 19 | if 'SecretString' in get_secret_value_response: 20 | secret = json.loads(get_secret_value_response['SecretString']) 21 | else: 22 | secret = json.loads(base64.b64decode(get_secret_value_response['SecretBinary'])) 23 | 24 | return secret 25 | 26 | def lambda_handler(event, context): 27 | status_code = 400 28 | try: 29 | # print("[DEBUG] Received event: " + json.dumps(event, indent=2)) 30 | user_id = event['requestContext']['identity']['cognitoIdentityId'] 31 | sql = ''' 32 | SELECT 33 | id 34 | ,userId 35 | ,stationId 36 | ,stationName 37 | ,duration 38 | ,price 39 | ,createdDate 40 | FROM 41 | rideTransactions 42 | WHERE 43 | userId = "{}" 44 | ORDER BY 45 | createdDate DESC; 46 | '''.format(user_id) 47 | 48 | print('[INFO] Connecting...') 49 | conn_info = connection_info(DB_CREDS) 50 | conn = pymysql.connect(host=conn_info['host'], user=conn_info['username'], password=conn_info['password'], database=conn_info['dbname'], connect_timeout=30, cursorclass=pymysql.cursors.DictCursor) 51 | with conn.cursor() as cur: 52 | print('[INFO] Executing SQL: {}'.format(sql)) 53 | cur.execute(sql) 54 | rows = cur.fetchall() 55 | conn.close() 56 | output = [{'id': c['id'], 'userId': c['userId'], 'stationId': c['stationId'], 'stationName': c['stationName'], 'duration': c['duration'], 'price': float(c['price']), 'createdDate': c['createdDate'].isoformat()} for c in rows] 57 | # print('[DEBUG] Output: {}'.format(output)) 58 | status_code = 200 59 | 60 | except Exception as e: 61 | print('ERROR: ', e) 62 | output = '{}'.format(e) 63 | 64 | # Construct API response 65 | response = { 66 | 'statusCode': status_code, 67 | 'headers': { 68 | 'Access-Control-Allow-Origin': '*', 69 | 'Access-Control-Allow-Credentials': True, 70 | 'Access-Control-Allow-Headers': 'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent', 71 | 'Access-Control-Allow-Methods': 'GET,POST,PUT,DELETE,OPTIONS,HEAD,PATCH', 72 | 'Content-Type': 'application/json' 73 | }, 74 | 'body': json.dumps(output) 75 | } 76 | 77 | print('[INFO] Query response: {}'.format(json.dumps(response))) 78 | 79 | return response -------------------------------------------------------------------------------- /lambdas/api_predict_station_status/index.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import json 3 | import decimal 4 | import os 5 | import sys 6 | import math 7 | from datetime import datetime 8 | 9 | runtime_client = boto3.client('runtime.sagemaker') 10 | MODEL_ENDPOINT_NAME = os.environ['MODEL_ENDPOINT_NAME'] 11 | 12 | def do_predict(data, endpoint_name, content_type): 13 | payload = '\n'.join(data) 14 | response = runtime_client.invoke_endpoint(EndpointName=endpoint_name, ContentType=content_type, Body=payload) 15 | result = response['Body'].read() 16 | result = result.decode('utf-8') 17 | result = result.split(',') 18 | preds = [round(float(num)) for num in result] 19 | return preds 20 | 21 | def batch_predict(data, batch_size, endpoint_name, content_type): 22 | items = len(data) 23 | arrs = [] 24 | 25 | for offset in range(0, items, batch_size): 26 | if offset+batch_size < items: 27 | results = do_predict(data[offset:(offset+batch_size)], endpoint_name, content_type) 28 | arrs.extend(results) 29 | else: 30 | arrs.extend(do_predict(data[offset:items], endpoint_name, content_type)) 31 | 32 | return arrs 33 | 34 | # Expecting POST payload: 35 | # { 36 | # 'year': int, 37 | # 'month': int, 38 | # 'day': int, 39 | # 'hour': int, 40 | # 'station_ids': [int] 41 | # } 42 | def lambda_handler(event, context): 43 | status_code = 400 44 | try: 45 | #print("Received event: " + json.dumps(event, indent=2)) 46 | operation = event['httpMethod'] 47 | if operation == 'POST': 48 | input = json.loads(event['body']) 49 | day_of_week = datetime.strptime('{}{}{}'.format(input['year'], input['month'], input['day']), '%Y%m%d').date().isoweekday() + 1 50 | test_data = [','.join(map(str, [station_id, input['year'], input['month'], input['day'], input['hour'], day_of_week])) for station_id in input['station_ids']] 51 | #print('Test data: {}'.format(json.dumps(test_data))) 52 | 53 | result = batch_predict(test_data, 100, MODEL_ENDPOINT_NAME, 'text/csv') 54 | output = json.dumps(dict(zip(input['station_ids'], result))) 55 | status_code = 200 56 | else: 57 | output = 'Unsupported method: {}'.format(operation) 58 | except Exception as e: 59 | print('ERROR: ', e) 60 | output = '{}'.format(e) 61 | 62 | # Construct API response 63 | response = { 64 | 'statusCode': status_code, 65 | 'headers': { 66 | 'Access-Control-Allow-Origin': '*', 67 | 'Access-Control-Allow-Credentials': True, 68 | 'Access-Control-Allow-Headers': 'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent', 69 | 'Access-Control-Allow-Methods': 'GET,POST,PUT,DELETE,OPTIONS,HEAD,PATCH', 70 | 'Content-Type': 'application/json' 71 | }, 72 | 'body': output 73 | } 74 | 75 | print('[INFO] Query response: {}'.format(json.dumps(response))) 76 | 77 | return response 78 | -------------------------------------------------------------------------------- /lambdas/setup_upload_artifacts/artifacts/glue_load_station_detail_redshift.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import base64 3 | import json 4 | import pg 5 | import sys 6 | from awsglue.utils import getResolvedOptions 7 | from botocore.exceptions import ClientError 8 | from datetime import datetime, timedelta 9 | 10 | # Helper class for interfacing with Redshift 11 | class rs_common: 12 | # Returns connection and credential information from secrets manager 13 | @staticmethod 14 | def connection_info(db_creds): 15 | session = boto3.session.Session() 16 | client = session.client( 17 | service_name='secretsmanager' 18 | ) 19 | 20 | get_secret_value_response = client.get_secret_value(SecretId=db_creds) 21 | 22 | if 'SecretString' in get_secret_value_response: 23 | secret = json.loads(get_secret_value_response['SecretString']) 24 | else: 25 | secret = json.loads(base64.b64decode(get_secret_value_response['SecretBinary'])) 26 | 27 | return secret 28 | 29 | # Returns a connection to the cluster 30 | @staticmethod 31 | def get_connection(db_creds): 32 | 33 | con_params = rs_common.connection_info(db_creds) 34 | 35 | rs_conn_string = "host=%s port=%s dbname=%s user=%s password=%s" % (con_params['host'], con_params['port'], con_params['dbname'], con_params['username'], con_params['password']) 36 | rs_conn = pg.connect(dbname=rs_conn_string) 37 | rs_conn.query("set statement_timeout = 1200000") 38 | 39 | return rs_conn 40 | 41 | # Submits a query to the cluster 42 | @staticmethod 43 | def query(con,statement): 44 | res = con.query(statement) 45 | return res 46 | 47 | # Get job args 48 | args = getResolvedOptions(sys.argv,['db_creds','glue_db']) 49 | db_creds = args['db_creds'] 50 | glue_db = args['glue_db'] 51 | 52 | sql = ''' 53 | BEGIN; 54 | CREATE TEMP TABLE staging_station_detail(LIKE public.station_detail); 55 | 56 | INSERT INTO staging_station_detail 57 | WITH cte AS 58 | ( 59 | SELECT 60 | ROW_NUMBER() OVER (PARTITION BY station_id ORDER BY last_updated DESC) AS rn 61 | ,station_id 62 | ,"name" AS station_name 63 | ,capacity 64 | ,CAST(lon as FLOAT) AS lon 65 | ,CAST(lat AS FLOAT) AS lat 66 | ,TIMESTAMP 'epoch' + last_updated *INTERVAL '1 second' AS last_updated 67 | FROM {}.station_detail_history 68 | ) 69 | SELECT 70 | station_id 71 | ,station_name 72 | ,capacity 73 | ,lon 74 | ,lat 75 | ,last_updated 76 | FROM cte 77 | WHERE rn = 1; 78 | 79 | DELETE FROM public.station_detail 80 | USING staging_station_detail s 81 | WHERE station_detail.station_id = s.station_id; 82 | 83 | INSERT INTO public.station_detail 84 | SELECT * FROM staging_station_detail; 85 | 86 | DROP TABLE staging_station_detail; 87 | 88 | COMMIT; 89 | '''.format(glue_db) 90 | 91 | # Connect to database 92 | print('Connecting...') 93 | con = rs_common.get_connection(db_creds) 94 | 95 | # Run SQL statement 96 | print("Connected. Running query...") 97 | result = rs_common.query(con, sql) 98 | 99 | print(result) -------------------------------------------------------------------------------- /lambdas/setup_upload_artifacts/artifacts/glue_load_station_history_redshift.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import base64 3 | import json 4 | import pg 5 | import sys 6 | from awsglue.utils import getResolvedOptions 7 | from botocore.exceptions import ClientError 8 | from datetime import datetime, timedelta 9 | 10 | # Helper class for interfacing with Redshift 11 | class rs_common: 12 | # Returns connection and credential information from secrets manager 13 | @staticmethod 14 | def connection_info(db_creds): 15 | session = boto3.session.Session() 16 | client = session.client( 17 | service_name='secretsmanager' 18 | ) 19 | 20 | get_secret_value_response = client.get_secret_value(SecretId=db_creds) 21 | 22 | if 'SecretString' in get_secret_value_response: 23 | secret = json.loads(get_secret_value_response['SecretString']) 24 | else: 25 | secret = json.loads(base64.b64decode(get_secret_value_response['SecretBinary'])) 26 | 27 | return secret 28 | 29 | # Returns a connection to the cluster 30 | @staticmethod 31 | def get_connection(db_creds): 32 | 33 | con_params = rs_common.connection_info(db_creds) 34 | 35 | rs_conn_string = "host=%s port=%s dbname=%s user=%s password=%s" % (con_params['host'], con_params['port'], con_params['dbname'], con_params['username'], con_params['password']) 36 | rs_conn = pg.connect(dbname=rs_conn_string) 37 | rs_conn.query("set statement_timeout = 1200000") 38 | 39 | return rs_conn 40 | 41 | # Submits a query to the cluster 42 | @staticmethod 43 | def query(con,statement): 44 | res = con.query(statement) 45 | return res 46 | 47 | # Get job args 48 | args = getResolvedOptions(sys.argv,['db_creds','glue_db']) 49 | db_creds = args['db_creds'] 50 | glue_db = args['glue_db'] 51 | 52 | sql = ''' 53 | BEGIN; 54 | CREATE TEMP TABLE staging_station_status_history(LIKE public.station_status_history); 55 | 56 | INSERT INTO staging_station_status_history 57 | SELECT 58 | station_id 59 | ,num_bikes_available 60 | ,is_installed 61 | ,is_returning 62 | ,is_renting 63 | ,TIMESTAMP 'epoch' + last_reported *INTERVAL '1 second' AS last_reported 64 | ,year || month || day || hour AS load_partition 65 | FROM {}.station_status_history 66 | WHERE 67 | year || month || day || hour > (SELECT NVL(MAX(load_partition), '0000000000') FROM public.station_status_history); 68 | 69 | DELETE FROM public.station_status_history 70 | USING staging_station_status_history s 71 | WHERE 72 | station_status_history.station_id = s.station_id 73 | AND station_status_history.last_reported = s.last_reported; 74 | 75 | INSERT INTO public.station_status_history 76 | SELECT * FROM staging_station_status_history; 77 | 78 | DROP TABLE staging_station_status_history; 79 | 80 | COMMIT; 81 | '''.format(glue_db) 82 | 83 | # Connect to database 84 | print('Connecting...') 85 | con = rs_common.get_connection(db_creds) 86 | 87 | # Run SQL statement 88 | print("Connected. Running query...") 89 | result = rs_common.query(con, sql) 90 | 91 | print(result) -------------------------------------------------------------------------------- /assets/src/App.tsx: -------------------------------------------------------------------------------- 1 | import Auth from "@aws-amplify/auth"; 2 | import React, { Component } from "react"; 3 | import { withRouter } from "react-router-dom"; 4 | import { Form, Nav, Navbar, Button } from "react-bootstrap"; 5 | import "./App.css"; 6 | import { Routes } from "./Routes"; 7 | 8 | interface AppProps { 9 | history: any; 10 | } 11 | 12 | interface AppState { 13 | isAuthenticated: boolean; 14 | isAuthenticating: boolean; 15 | } 16 | 17 | class App extends Component { 18 | constructor(props: AppProps) { 19 | super(props); 20 | 21 | this.state = { 22 | isAuthenticated: false, 23 | isAuthenticating: true 24 | }; 25 | 26 | document.title = "AWS BikeNow Demo" 27 | } 28 | 29 | async componentDidMount() { 30 | try { 31 | if (await Auth.currentSession()) { 32 | this.userHasAuthenticated(true); 33 | } 34 | } 35 | catch (e) { 36 | if (e !== 'No current user') { 37 | alert(e); 38 | } 39 | } 40 | 41 | this.setState({ isAuthenticating: false }); 42 | } 43 | 44 | userHasAuthenticated = (authenticated: boolean) => { 45 | this.setState({ isAuthenticated: authenticated }); 46 | } 47 | 48 | handleLogout = async () => { 49 | await Auth.signOut(); 50 | 51 | this.userHasAuthenticated(false); 52 | this.props.history.push("/login"); 53 | } 54 | 55 | showLoggedInBar = () => ( 56 |
57 | 58 | 59 | 60 | 61 |
62 | ); 63 | 64 | showLoggedOutBar = () => ( 65 |
66 | 67 | 68 |
69 | ); 70 | 71 | render() { 72 | const childProps = { 73 | isAuthenticated: this.state.isAuthenticated, 74 | userHasAuthenticated: this.userHasAuthenticated 75 | }; 76 | 77 | return ( 78 | !this.state.isAuthenticating && 79 |
80 | 81 | AWS BikeNow Demo 82 | 83 | 84 | 87 | 88 | 89 | 90 |
91 | ); 92 | } 93 | } 94 | 95 | export default withRouter(App as any); -------------------------------------------------------------------------------- /assets/src/modules/signup/Rides.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { Redirect } from 'react-router'; 3 | import { Table, Spinner } from "react-bootstrap"; 4 | 5 | import "./rides.css"; 6 | import API from "@aws-amplify/api"; 7 | 8 | interface RidesProps { 9 | isAuthenticated: boolean; 10 | } 11 | 12 | interface RidesState { 13 | isLoading: boolean, 14 | redirect: boolean, 15 | rides: Ride[], 16 | } 17 | 18 | interface Ride { 19 | id: number; 20 | userId: string; 21 | stationId: number; 22 | stationName: string; 23 | duration: number; 24 | price: number; 25 | createdDate: Date; 26 | } 27 | 28 | export default class Rides extends React.Component { 29 | constructor(props: RidesProps) { 30 | super(props); 31 | 32 | this.state = { 33 | isLoading: true, 34 | redirect: false, 35 | rides: [], 36 | }; 37 | } 38 | 39 | async componentDidMount() { 40 | if (!this.props.isAuthenticated) { 41 | return; 42 | } 43 | 44 | try { 45 | const rides = await this.rides(); 46 | this.setState({ rides }); 47 | } catch(e) { 48 | alert(e); 49 | } 50 | 51 | this.setState({ isLoading: false }); 52 | } 53 | 54 | rides() { 55 | return API.get("bikenow", "/rides", null); 56 | } 57 | 58 | renderRidesList(rides: Ride[]) { 59 | const ridesList: Ride[] = []; 60 | 61 | return ridesList.concat(rides).map( 62 | (ride, i) => 63 | 64 | {ride.stationName} 65 | {ride.duration} hours 66 | ${ride.price.toFixed(2)} 67 | {new Date(ride.createdDate).toLocaleDateString()} 68 | 69 | ); 70 | } 71 | 72 | render() { 73 | if (this.state.redirect) return 74 | 75 | return ( 76 |
77 |

Ride History

78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | { 89 | this.state.isLoading ? 90 | ( 91 | 92 | 95 | 96 | ) : 97 | this.renderRidesList(this.state.rides) 98 | } 99 | 100 |
Station NameDurationCostRide Date
93 | 94 |
101 | { 102 | !this.state.isLoading && (!this.state.rides || this.state.rides.length === 0) ? 103 |
You do not have any ride history. Find a bike station to start your first ride!
104 | : "" 105 | } 106 |
107 | ); 108 | } 109 | } -------------------------------------------------------------------------------- /lambdas/setup_upload_artifacts/artifacts/glue_load_station_review_redshift.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import base64 3 | import json 4 | import pg 5 | import sys 6 | from awsglue.utils import getResolvedOptions 7 | from botocore.exceptions import ClientError 8 | from datetime import datetime, timedelta 9 | 10 | # Helper class for interfacing with Redshift 11 | class rs_common: 12 | # Returns connection and credential information from secrets manager 13 | @staticmethod 14 | def connection_info(db_creds): 15 | session = boto3.session.Session() 16 | client = session.client( 17 | service_name='secretsmanager' 18 | ) 19 | 20 | get_secret_value_response = client.get_secret_value(SecretId=db_creds) 21 | 22 | if 'SecretString' in get_secret_value_response: 23 | secret = json.loads(get_secret_value_response['SecretString']) 24 | else: 25 | secret = json.loads(base64.b64decode(get_secret_value_response['SecretBinary'])) 26 | 27 | return secret 28 | 29 | # Returns a connection to the cluster 30 | @staticmethod 31 | def get_connection(db_creds): 32 | 33 | con_params = rs_common.connection_info(db_creds) 34 | 35 | rs_conn_string = "host=%s port=%s dbname=%s user=%s password=%s" % (con_params['host'], con_params['port'], con_params['dbname'], con_params['username'], con_params['password']) 36 | rs_conn = pg.connect(dbname=rs_conn_string) 37 | rs_conn.query("set statement_timeout = 1200000") 38 | 39 | return rs_conn 40 | 41 | # Submits a query to the cluster 42 | @staticmethod 43 | def query(con,statement): 44 | res = con.query(statement) 45 | return res 46 | 47 | # Get job args 48 | args = getResolvedOptions(sys.argv,['db_creds','glue_db']) 49 | db_creds = args['db_creds'] 50 | glue_db = args['glue_db'] 51 | 52 | sql = ''' 53 | BEGIN; 54 | CREATE TEMP TABLE staging_station_review_sentiment(LIKE public.station_review_sentiment); 55 | 56 | INSERT INTO staging_station_review_sentiment 57 | SELECT 58 | station_id 59 | ,user_id 60 | ,review 61 | ,sentiment 62 | ,CAST(sentiment_mixed AS FLOAT) 63 | ,CAST(sentiment_neutral AS FLOAT) 64 | ,CAST(sentiment_positive AS FLOAT) 65 | ,CAST(sentiment_negative AS FLOAT) 66 | ,TIMESTAMP 'epoch' + create_date *INTERVAL '1 second' AS create_date 67 | ,year || month || day || hour AS load_partition 68 | FROM {}.station_review_sentiment 69 | WHERE 70 | year || month || day || hour > (SELECT NVL(MAX(load_partition), '0000000000') FROM public.station_review_sentiment); 71 | 72 | DELETE FROM public.station_review_sentiment 73 | USING staging_station_review_sentiment s 74 | WHERE 75 | station_review_sentiment.station_id = s.station_id 76 | AND station_review_sentiment.user_id = s.user_id 77 | AND station_review_sentiment.create_date = s.create_date; 78 | 79 | INSERT INTO public.station_review_sentiment 80 | SELECT * FROM staging_station_review_sentiment; 81 | 82 | DROP TABLE staging_station_review_sentiment; 83 | 84 | COMMIT; 85 | '''.format(glue_db) 86 | 87 | # Connect to database 88 | print('Connecting...') 89 | con = rs_common.get_connection(db_creds) 90 | 91 | # Run SQL statement 92 | print("Connected. Running query...") 93 | result = rs_common.query(con, sql) 94 | 95 | print(result) -------------------------------------------------------------------------------- /assets/src/service-worker.ts: -------------------------------------------------------------------------------- 1 | /// 2 | /* eslint-disable no-restricted-globals */ 3 | 4 | // This service worker can be customized! 5 | // See https://developers.google.com/web/tools/workbox/modules 6 | // for the list of available Workbox modules, or add any other 7 | // code you'd like. 8 | // You can also remove this file if you'd prefer not to use a 9 | // service worker, and the Workbox build step will be skipped. 10 | 11 | import { clientsClaim } from 'workbox-core'; 12 | import { ExpirationPlugin } from 'workbox-expiration'; 13 | import { precacheAndRoute, createHandlerBoundToURL } from 'workbox-precaching'; 14 | import { registerRoute } from 'workbox-routing'; 15 | import { StaleWhileRevalidate } from 'workbox-strategies'; 16 | 17 | declare const self: ServiceWorkerGlobalScope; 18 | 19 | clientsClaim(); 20 | 21 | // Precache all of the assets generated by your build process. 22 | // Their URLs are injected into the manifest variable below. 23 | // This variable must be present somewhere in your service worker file, 24 | // even if you decide not to use precaching. See https://cra.link/PWA 25 | precacheAndRoute(self.__WB_MANIFEST); 26 | 27 | // Set up App Shell-style routing, so that all navigation requests 28 | // are fulfilled with your index.html shell. Learn more at 29 | // https://developers.google.com/web/fundamentals/architecture/app-shell 30 | const fileExtensionRegexp = new RegExp('/[^/?]+\\.[^/]+$'); 31 | registerRoute( 32 | // Return false to exempt requests from being fulfilled by index.html. 33 | ({ request, url }: { request: Request; url: URL }) => { 34 | // If this isn't a navigation, skip. 35 | if (request.mode !== 'navigate') { 36 | return false; 37 | } 38 | 39 | // If this is a URL that starts with /_, skip. 40 | if (url.pathname.startsWith('/_')) { 41 | return false; 42 | } 43 | 44 | // If this looks like a URL for a resource, because it contains 45 | // a file extension, skip. 46 | if (url.pathname.match(fileExtensionRegexp)) { 47 | return false; 48 | } 49 | 50 | // Return true to signal that we want to use the handler. 51 | return true; 52 | }, 53 | createHandlerBoundToURL(process.env.PUBLIC_URL + '/index.html') 54 | ); 55 | 56 | // An example runtime caching route for requests that aren't handled by the 57 | // precache, in this case same-origin .png requests like those from in public/ 58 | registerRoute( 59 | // Add in any other file extensions or routing criteria as needed. 60 | ({ url }) => url.origin === self.location.origin && url.pathname.endsWith('.png'), 61 | // Customize this strategy as needed, e.g., by changing to CacheFirst. 62 | new StaleWhileRevalidate({ 63 | cacheName: 'images', 64 | plugins: [ 65 | // Ensure that once this runtime cache reaches a maximum size the 66 | // least-recently used images are removed. 67 | new ExpirationPlugin({ maxEntries: 50 }), 68 | ], 69 | }) 70 | ); 71 | 72 | // This allows the web app to trigger skipWaiting via 73 | // registration.waiting.postMessage({type: 'SKIP_WAITING'}) 74 | self.addEventListener('message', (event) => { 75 | if (event.data && event.data.type === 'SKIP_WAITING') { 76 | self.skipWaiting(); 77 | } 78 | }); 79 | 80 | // Any other custom service worker logic can go here. 81 | -------------------------------------------------------------------------------- /templates/setup-template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: 'AWS::Serverless-2016-10-31' 3 | Description: Template to initialize and upload artifacts for Bike Demo 4 | 5 | Resources: 6 | # -------------------------------- ARTIFACTS BUCKET 7 | S3BucketArtifacts: 8 | Type: AWS::S3::Bucket 9 | Properties: 10 | AccessControl: Private 11 | DeletionPolicy: Delete 12 | 13 | # -------------------------------- LAMBDA ROLES 14 | RoleSetupArtifactsLambda: 15 | Type: AWS::IAM::Role 16 | Properties: 17 | Path: / 18 | AssumeRolePolicyDocument: 19 | Version: '2012-10-17' 20 | Statement: 21 | - Effect: Allow 22 | Principal: 23 | Service: 24 | - lambda.amazonaws.com 25 | Action: sts:AssumeRole 26 | Policies: 27 | - PolicyName: SetupArtifactsPolicy 28 | PolicyDocument: 29 | Version: '2012-10-17' 30 | Statement: 31 | - Effect: Allow 32 | Action: 33 | - logs:CreateLogStream 34 | - logs:CreateLogGroup 35 | - logs:PutLogEvents 36 | Resource: 37 | - arn:aws:logs:*:*:* 38 | - Effect: Allow 39 | Action: 40 | - s3:* 41 | Resource: 42 | - !GetAtt S3BucketArtifacts.Arn 43 | - Fn::Join: 44 | - "/" 45 | - 46 | - !GetAtt S3BucketArtifacts.Arn 47 | - "*" 48 | 49 | # -------------------------------- LAMBDA FUNCTIONS 50 | LambdaSetupArtifacts: 51 | Type: AWS::Serverless::Function 52 | Properties: 53 | Handler: index.lambda_handler 54 | Runtime: python3.8 55 | CodeUri: ../lambdas/setup_upload_artifacts 56 | Role: !GetAtt RoleSetupArtifactsLambda.Arn 57 | Description: Copy artifacts to S3 58 | MemorySize: 128 59 | Timeout: 300 60 | Environment: 61 | Variables: 62 | SCRIPT_BUCKET: !Ref S3BucketArtifacts 63 | SCRIPT_FOLDER: artifacts 64 | LambdaSetupEmptyBucket: 65 | Type: AWS::Serverless::Function 66 | Properties: 67 | Handler: index.lambda_handler 68 | Runtime: python3.8 69 | CodeUri: ../lambdas/setup_empty_bucket 70 | Role: !GetAtt RoleSetupArtifactsLambda.Arn 71 | Description: Empty artifacts bucket upon deletion 72 | MemorySize: 128 73 | Timeout: 300 74 | Environment: 75 | Variables: 76 | SCRIPT_BUCKET: !Ref S3BucketArtifacts 77 | 78 | # -------------------------------- CUSTOM SETUP RESOURCES 79 | SetupUploadArtifacts: 80 | Type: Custom::SetupFunction 81 | DependsOn: 82 | - S3BucketArtifacts 83 | - RoleSetupArtifactsLambda 84 | - LambdaSetupArtifacts 85 | Properties: 86 | ServiceToken: !GetAtt LambdaSetupArtifacts.Arn 87 | SetupEmptyBucket: 88 | Type: Custom::SetupFunction 89 | DependsOn: 90 | - S3BucketArtifacts 91 | - RoleSetupArtifactsLambda 92 | - LambdaSetupEmptyBucket 93 | Properties: 94 | ServiceToken: !GetAtt LambdaSetupEmptyBucket.Arn 95 | 96 | Outputs: 97 | ArtifactsBucket: 98 | Value: !Ref S3BucketArtifacts 99 | Description: Name of S3 bucket containing artifacts -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | 10 | ## Reporting Bugs/Feature Requests 11 | 12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 13 | 14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already 15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 16 | 17 | * A reproducible test case or series of steps 18 | * The version of our code being used 19 | * Any modifications you've made relevant to the bug 20 | * Anything unusual about your environment or deployment 21 | 22 | 23 | ## Contributing via Pull Requests 24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 25 | 26 | 1. You are working against the latest source on the *master* branch. 27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 29 | 30 | To send us a pull request, please: 31 | 32 | 1. Fork the repository. 33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 34 | 3. Ensure local tests pass. 35 | 4. Commit to your fork using clear commit messages. 36 | 5. Send us a pull request, answering any default questions in the pull request interface. 37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 38 | 39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 41 | 42 | 43 | ## Finding contributions to work on 44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start. 45 | 46 | 47 | ## Code of Conduct 48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 50 | opensource-codeofconduct@amazon.com with any additional questions or comments. 51 | 52 | 53 | ## Security issue notifications 54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 55 | 56 | 57 | ## Licensing 58 | 59 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 60 | 61 | We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes. 62 | -------------------------------------------------------------------------------- /assets/src/modules/signup/Reviews.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { Redirect } from 'react-router'; 3 | import { Table, Spinner } from "react-bootstrap"; 4 | 5 | import "./reviews.css"; 6 | import API from "@aws-amplify/api"; 7 | 8 | interface ReviewsProps { 9 | isAuthenticated: boolean; 10 | } 11 | 12 | interface ReviewsState { 13 | isLoading: boolean, 14 | redirect: boolean, 15 | reviews: Review[], 16 | } 17 | 18 | interface Review { 19 | userId: string; 20 | stationId: number; 21 | stationName: string; 22 | review: string; 23 | createdDate: Date; 24 | } 25 | 26 | export default class Reviews extends React.Component { 27 | constructor(props: ReviewsProps) { 28 | super(props); 29 | 30 | this.state = { 31 | isLoading: true, 32 | redirect: false, 33 | reviews: [], 34 | }; 35 | } 36 | 37 | async componentDidMount() { 38 | if (!this.props.isAuthenticated) { 39 | return; 40 | } 41 | 42 | try { 43 | const result = await this.reviews(); 44 | const reviews: Review[] = [] 45 | for (let i = 0; i < result.length; i++) { 46 | const element = result[i]; 47 | 48 | const review : Review = { 49 | userId: element.user_id, 50 | stationId: element.station_id, 51 | stationName: element.station_name, 52 | review: element.review, 53 | createdDate: new Date(element.create_date) 54 | }; 55 | reviews.push(review); 56 | } 57 | this.setState({ reviews: reviews.reverse() }); 58 | } catch(e) { 59 | alert(e); 60 | } 61 | 62 | this.setState({ isLoading: false }); 63 | } 64 | 65 | reviews() { 66 | return API.get("bikenow", "/reviews", null); 67 | } 68 | 69 | renderReviewsList(reviews: Review[]) { 70 | const reviewsList: Review[] = []; 71 | 72 | return reviewsList.concat(reviews).map( 73 | (review, i) => 74 | 75 | {review.stationName} 76 | {review.review} 77 | {new Date(review.createdDate).toLocaleDateString()} 78 | 79 | ); 80 | } 81 | 82 | render() { 83 | if (this.state.redirect) return 84 | 85 | return ( 86 |
87 |

Review History

88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | { 98 | this.state.isLoading ? 99 | ( 100 | 101 | 104 | 105 | ) : 106 | this.renderReviewsList(this.state.reviews) 107 | } 108 | 109 |
Station NameReviewDate
102 | 103 |
110 | { 111 | !this.state.isLoading && (!this.state.reviews || this.state.reviews.length === 0) ? 112 |
You have not made any reviews. Find a bike station to write your first review!
113 | : "" 114 | } 115 |
116 | ); 117 | } 118 | } -------------------------------------------------------------------------------- /assets/src/modules/signup/Login.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { Redirect } from 'react-router'; 3 | import { Form, FormGroup, FormControl, FormLabel, Button, Spinner, FormControlProps } from "react-bootstrap"; 4 | import Auth from "@aws-amplify/auth"; 5 | import "./login.css"; 6 | 7 | const emailRegex = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/; 8 | 9 | interface LoginProps { 10 | isAuthenticated: boolean; 11 | userHasAuthenticated: (authenticated: boolean) => void; 12 | } 13 | 14 | interface LoginState { 15 | loading: boolean; 16 | redirect: boolean; 17 | email: string; 18 | password: string; 19 | isValid: boolean; 20 | } 21 | 22 | export default class Login extends React.Component { 23 | constructor(props: LoginProps) { 24 | super(props); 25 | 26 | this.state = { 27 | loading: false, 28 | redirect: false, 29 | email: "", 30 | password: "", 31 | isValid: false, 32 | }; 33 | } 34 | 35 | onChange = (event: React.ChangeEvent) => { 36 | const target = event.target as HTMLInputElement; 37 | this.setState({ ...this.state, [target.name]: target.value }); 38 | } 39 | 40 | validateForm = () => { 41 | return emailRegex.test(this.state.email.toLowerCase()) && this.state.password; 42 | } 43 | 44 | onLogin = async (event: React.FormEvent) => { 45 | if (event.currentTarget.checkValidity()) { 46 | event.preventDefault(); 47 | event.stopPropagation(); 48 | this.setState({ isValid: true, loading: true }); 49 | 50 | try { 51 | await Auth.signIn(this.state.email, this.state.password); 52 | this.props.userHasAuthenticated(true); 53 | this.setState({ redirect: true }) 54 | } catch (e) { 55 | alert(e.message); 56 | this.setState({ loading: false }); 57 | } 58 | } 59 | } 60 | 61 | render() { 62 | if (this.state.redirect) return 63 | const { email, password, isValid } = this.state; 64 | 65 | return ( 66 |
67 |
68 | 69 | Email 70 | this.onChange(event as any)} 75 | isValid={emailRegex.test(email.toLowerCase())} 76 | required /> 77 | Must be a valid email address 78 | 79 | 80 | Password 81 | = 8} 88 | required /> 89 | Required field 90 | 91 | 99 |
100 |
101 | ); 102 | } 103 | } -------------------------------------------------------------------------------- /lambdas/setup_update_config/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; const https = require('https'); const url = require('url'); 2 | var AWS = require('aws-sdk'), 3 | codecommit = new AWS.CodeCommit(); 4 | 5 | exports.handler = function (event, context, callback) { 6 | console.log('Received event:', JSON.stringify(event, null, 2)); 7 | if (event.RequestType === 'Create') { 8 | getBranchInfo().then(function (data) { 9 | var parentCommitId = data.branch.commitId; 10 | updateConfigFile(parentCommitId).then(function (data) { 11 | sendResponse(event, callback, context.logStreamName, 'SUCCESS'); 12 | }).catch(function (err) { 13 | var responseData = { Error: 'Updating config file failed ' + err }; 14 | sendResponse(event, callback, context.logStreamName, 'FAILED', responseData); 15 | }); 16 | }).catch(function (err) { 17 | var responseData = { Error: 'Updating config file failed ' + err }; 18 | sendResponse(event, callback, context.logStreamName, 'FAILED', responseData); 19 | }); 20 | } else { 21 | sendResponse(event, callback, context.logStreamName, 'SUCCESS'); 22 | } 23 | }; 24 | 25 | function getConfigFile() { 26 | return `export default { 27 | MAX_ATTACHMENT_SIZE: 5000000, 28 | bikenowApi: { 29 | REGION: "${process.env.REGION}", 30 | API_URL: "${process.env.DB_API_URL}", 31 | }, 32 | aimlApi: { 33 | REGION: "${process.env.REGION}", 34 | API_URL: "${process.env.ML_API_URL}" 35 | }, 36 | cognito: { 37 | REGION: "${process.env.REGION}", 38 | USER_POOL_ID: "${process.env.USER_POOL_ID}", 39 | APP_CLIENT_ID: "${process.env.APP_CLIENT_ID}", 40 | IDENTITY_POOL_ID: "${process.env.IDENTITY_POOL_ID}" 41 | } 42 | };` 43 | } 44 | 45 | function updateConfigFile(parentCommitId) { 46 | var params = { 47 | branchName: process.env.BRANCH_NAME, 48 | fileContent: new Buffer(getConfigFile()), 49 | filePath: 'src/config.js', 50 | repositoryName: process.env.REPOSITORY_NAME, 51 | commitMessage: 'Updating config.js with backend variables', 52 | fileMode: "NORMAL", 53 | name: 'UploadConfigLambda', 54 | parentCommitId: parentCommitId 55 | }; 56 | return codecommit.putFile(params).promise(); 57 | } 58 | 59 | function getBranchInfo() { 60 | var params = { 61 | branchName: process.env.BRANCH_NAME, 62 | repositoryName: process.env.REPOSITORY_NAME 63 | }; 64 | return codecommit.getBranch(params).promise(); 65 | } 66 | 67 | function sendResponse(event, callback, logStreamName, responseStatus, responseData) { 68 | const responseBody = JSON.stringify({ 69 | Status: responseStatus, 70 | Reason: `See the details in CloudWatch Log Stream: ${logStreamName}`, 71 | PhysicalResourceId: logStreamName, 72 | StackId: event.StackId, 73 | RequestId: event.RequestId, 74 | LogicalResourceId: event.LogicalResourceId, 75 | Data: responseData, 76 | }); 77 | 78 | console.log('RESPONSE BODY:\n', responseBody); 79 | 80 | const parsedUrl = url.parse(event.ResponseURL); 81 | const options = { 82 | hostname: parsedUrl.hostname, 83 | port: 443, 84 | path: parsedUrl.path, 85 | method: 'PUT', 86 | headers: { 87 | 'Content-Type': '', 88 | 'Content-Length': responseBody.length, 89 | }, 90 | }; 91 | 92 | const req = https.request(options, (res) => { 93 | console.log('STATUS:', res.statusCode); 94 | console.log('HEADERS:', JSON.stringify(res.headers)); 95 | callback(null, 'Successfully sent stack response!'); 96 | }); 97 | 98 | req.on('error', (err) => { 99 | console.log('sendResponse Error:\n', err); 100 | callback(err); 101 | }); 102 | 103 | req.write(responseBody); 104 | req.end(); 105 | } 106 | -------------------------------------------------------------------------------- /lambdas/setup_datawarehouse_ddl/index.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import base64 3 | import json 4 | import sys 5 | import os 6 | import time 7 | import psycopg2 8 | import cfnresponse 9 | from botocore.exceptions import ClientError 10 | from datetime import datetime, timedelta 11 | 12 | DB_CREDS = os.environ['DB_CREDS'] 13 | GLUE_DB = os.environ['GLUE_DB'] 14 | IAM_ROLE_ARN = os.environ['IAM_ROLE_ARN'] 15 | REDSHIFT_NAME = os.environ['REDSHIFT_NAME'] 16 | 17 | def connection_info(db_creds): 18 | session = boto3.session.Session() 19 | client = session.client( 20 | service_name='secretsmanager' 21 | ) 22 | 23 | get_secret_value_response = client.get_secret_value(SecretId=db_creds) 24 | 25 | if 'SecretString' in get_secret_value_response: 26 | secret = json.loads(get_secret_value_response['SecretString']) 27 | else: 28 | secret = json.loads(base64.b64decode(get_secret_value_response['SecretBinary'])) 29 | 30 | return secret 31 | 32 | def lambda_handler(event, context): 33 | message = '' 34 | response = cfnresponse.FAILED 35 | cluster_status = '' 36 | 37 | # Get CloudFormation parameters 38 | cfn_stack_id = event.get('StackId') 39 | cfn_request_type = event.get('RequestType') 40 | cfn_physicalResourceId = context.log_stream_name if event.get('ResourceProperties.PhysicalResourceId') is None else event.get('ResourceProperties.PhysicalResourceId') 41 | 42 | if cfn_stack_id and cfn_request_type != 'Delete': 43 | try: 44 | # Wait for cluster to become available before trying to connect 45 | while cluster_status != 'Available': 46 | # Exit if Lambda will timeout before next sleep ends 47 | if context.get_remaining_time_in_millis() < (30 * 1000): 48 | message = 'Function will timeout. Exiting with failure.' 49 | print('ERROR: ', message) 50 | cfnresponse.send(event, context, response, 51 | { 52 | 'Message': message 53 | }, 54 | cfn_physicalResourceId) 55 | 56 | return { 57 | 'statusCode': 200, 58 | 'body': json.dumps(message) 59 | } 60 | 61 | # Get cluster availability status every 30 seconds 62 | time.sleep(30) 63 | rsclient = boto3.client('redshift') 64 | clusters = rsclient.describe_clusters(ClusterIdentifier=REDSHIFT_NAME) 65 | cluster_status = clusters['Clusters'][0]['ClusterAvailabilityStatus'] 66 | print('INFO: Cluster {} status: {}. Time remaining: {} ms.'.format(REDSHIFT_NAME, cluster_status, context.get_remaining_time_in_millis())) 67 | 68 | create_spectrum_schema_sql = '' 69 | create_status_history_table_sql = '' 70 | 71 | with open('sql/create_spectrum_schema.sql', 'r') as spectrum_sql_file: 72 | create_spectrum_schema_sql = spectrum_sql_file.read() 73 | create_spectrum_schema_sql = create_spectrum_schema_sql.replace('${GLUE_DB}', GLUE_DB).replace('${IAM_ROLE_ARN}', IAM_ROLE_ARN) 74 | 75 | with open('sql/create_status_history_table.sql', 'r') as table_sql_file: 76 | create_status_history_table_sql = table_sql_file.read() 77 | 78 | print('INFO: Connecting...') 79 | conn_info = connection_info(DB_CREDS) 80 | with psycopg2.connect(dbname=conn_info['dbname'], host=conn_info['host'], port=conn_info['port'], user=conn_info['username'], password=conn_info['password']) as conn: 81 | with conn.cursor() as cur: 82 | print('INFO: Executing SQL: {}'.format(create_spectrum_schema_sql)) 83 | cur.execute(create_spectrum_schema_sql) 84 | print('INFO: Executing SQL: {}'.format(create_status_history_table_sql)) 85 | cur.execute(create_status_history_table_sql) 86 | 87 | message = 'SUCCESS: Executed setup queries successfully.' 88 | response = cfnresponse.SUCCESS 89 | except Exception as e: 90 | print('ERROR: ', e) 91 | message = '{}'.format(e) 92 | else: 93 | message = 'INFO: Deleting function.' 94 | response = cfnresponse.SUCCESS 95 | 96 | cfnresponse.send(event, context, response, 97 | { 98 | 'Message': message 99 | }, 100 | cfn_physicalResourceId) 101 | 102 | return { 103 | 'statusCode': 200, 104 | 'body': json.dumps(message) 105 | } -------------------------------------------------------------------------------- /lambdas/setup_rds_ddl/index.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import base64 3 | import json 4 | import sys 5 | import os 6 | import time 7 | import pymysql 8 | import cfnresponse 9 | from botocore.exceptions import ClientError 10 | from datetime import datetime, timedelta 11 | 12 | DB_CREDS = os.environ['DB_CREDS'] 13 | DB_NAME = os.environ['DB_NAME'] 14 | DB_INSTANCE = os.environ['DB_INSTANCE'] 15 | 16 | def connection_info(db_creds): 17 | session = boto3.session.Session() 18 | client = session.client( 19 | service_name='secretsmanager' 20 | ) 21 | 22 | get_secret_value_response = client.get_secret_value(SecretId=db_creds) 23 | 24 | if 'SecretString' in get_secret_value_response: 25 | secret = json.loads(get_secret_value_response['SecretString']) 26 | else: 27 | secret = json.loads(base64.b64decode(get_secret_value_response['SecretBinary'])) 28 | 29 | print("[DEBUG] Secret: " + json.dumps(secret, indent=2)) 30 | return secret 31 | 32 | def lambda_handler(event, context): 33 | message = '' 34 | response = cfnresponse.FAILED 35 | instance_status = '' 36 | 37 | # Get CloudFormation parameters 38 | cfn_stack_id = event.get('StackId') 39 | cfn_request_type = event.get('RequestType') 40 | cfn_physicalResourceId = context.log_stream_name if event.get('ResourceProperties.PhysicalResourceId') is None else event.get('ResourceProperties.PhysicalResourceId') 41 | 42 | if cfn_stack_id and cfn_request_type != 'Delete': 43 | try: 44 | # Wait for instance to become available before trying to connect 45 | while instance_status.lower() != 'available': 46 | # Exit if Lambda will timeout before next sleep ends 47 | if context.get_remaining_time_in_millis() < (30 * 1000): 48 | message = 'Function will timeout. Exiting with failure.' 49 | print('[ERROR] ', message) 50 | cfnresponse.send(event, context, response, 51 | { 52 | 'Message': message 53 | }, 54 | cfn_physicalResourceId) 55 | 56 | return { 57 | 'statusCode': 200, 58 | 'body': json.dumps(message) 59 | } 60 | 61 | # Get instance availability status every 30 seconds 62 | time.sleep(30) 63 | rdsclient = boto3.client('rds') 64 | dbinstance = rdsclient.describe_db_instances(DBInstanceIdentifier=DB_INSTANCE) 65 | instance_status = dbinstance['DBInstances'][0]['DBInstanceStatus'] 66 | print('[INFO] DBInstance {} status: {}. Time remaining: {} ms.'.format(DB_NAME, instance_status, context.get_remaining_time_in_millis())) 67 | 68 | sql_queries = [ 69 | 'DROP TABLE IF EXISTS rideTransactions;' 70 | ,''' 71 | CREATE TABLE rideTransactions 72 | ( 73 | id INT NOT NULL AUTO_INCREMENT 74 | ,userId VARCHAR(64) 75 | ,stationId INT 76 | ,stationName VARCHAR(128) 77 | ,duration INT 78 | ,price DECIMAL(5,2) 79 | ,createdDate TIMESTAMP DEFAULT CURRENT_TIMESTAMP 80 | ,PRIMARY KEY (id) 81 | ); 82 | ''' 83 | ,'CREATE INDEX idxRideUserId ON rideTransactions(userId);' 84 | ,'CREATE INDEX idxRideStationId ON rideTransactions(stationId);' 85 | ] 86 | 87 | print('[INFO] Connecting...') 88 | conn_info = connection_info(DB_CREDS) 89 | print("[DEBUG] DB_CREDS: " + json.dumps(DB_CREDS, indent=2)) 90 | conn = pymysql.connect(host=conn_info['host'], user=conn_info['username'], password=conn_info['password'], database=conn_info['dbname'], connect_timeout=30) 91 | with conn.cursor() as cur: 92 | for sql in sql_queries: 93 | print('[INFO] Executing SQL: {}'.format(sql)) 94 | cur.execute(sql) 95 | conn.commit() 96 | conn.close() 97 | 98 | message = '[SUCCESS] Executed setup queries successfully.' 99 | response = cfnresponse.SUCCESS 100 | except Exception as e: 101 | print('[ERROR] ', e) 102 | message = '{}'.format(e) 103 | else: 104 | message = '[INFO] Deleting function.' 105 | response = cfnresponse.SUCCESS 106 | 107 | cfnresponse.send(event, context, response, 108 | { 109 | 'Message': message 110 | }, 111 | cfn_physicalResourceId) 112 | 113 | return { 114 | 'statusCode': 200, 115 | 'body': json.dumps(message) 116 | } -------------------------------------------------------------------------------- /assets/src/modules/signup/home.css: -------------------------------------------------------------------------------- 1 | @import url('https://unpkg.com/leaflet@1.6.0/dist/leaflet.css'); 2 | 3 | .Home .lander { 4 | padding: 80px 0; 5 | text-align: center; 6 | } 7 | 8 | .Home .lander h1 { 9 | font-family: "Open Sans", sans-serif; 10 | font-weight: 600; 11 | } 12 | 13 | .Home .lander p { 14 | color: white; 15 | } 16 | 17 | .Home .goals h4 { 18 | font-family: "Open Sans", sans-serif; 19 | font-weight: 600; 20 | overflow: hidden; 21 | line-height: 1.5; 22 | white-space: nowrap; 23 | text-overflow: ellipsis; 24 | } 25 | 26 | .Home .goals table { 27 | margin-top: 30px; 28 | color: white; 29 | table-layout: fixed; 30 | } 31 | 32 | .Home .goals table a { 33 | color: #4eacf9; 34 | } 35 | 36 | .list-group { 37 | background: transparent !important; 38 | box-shadow: none !important; 39 | } 40 | 41 | .list-group-item h4 { 42 | font-size: 24px; 43 | font-weight: 100 !important; 44 | } 45 | 46 | .list-group-item-text { 47 | font-weight: 100 !important; 48 | font-size: 18px !important; 49 | padding-left: 30px; 50 | } 51 | 52 | .list-group-item-heading { 53 | font-weight: 200 !important; 54 | font-size: 24px !important; 55 | padding-left:30px; 56 | } 57 | 58 | .container-fluid { 59 | padding-right: 0 !important; 60 | } 61 | 62 | .description { 63 | overflow: hidden; 64 | text-overflow: ellipsis; 65 | white-space: nowrap; 66 | } 67 | 68 | .orange-link { 69 | color: #ff9900 !important; 70 | } 71 | 72 | .center-spinner { 73 | position: absolute; 74 | margin-top: 20px; 75 | left: 50%; 76 | } 77 | 78 | .leaflet-container { 79 | width: 100%; 80 | height: 75vh; 81 | } 82 | 83 | .lander-screen { 84 | box-shadow: 0 4px 8px 0 rgba(255, 255, 255, 0.5), 0 6px 20px 0 rgba(255, 255, 255, 0.5); 85 | margin-top: 12px; 86 | } 87 | 88 | th.popup-title { 89 | font-size: 18px; 90 | font-weight: bolder; 91 | color: #000; 92 | padding-bottom: 1em; 93 | } 94 | 95 | td.popup-label { 96 | font-weight: bold; 97 | color: #000; 98 | } 99 | 100 | td.popup-caption { 101 | color: #000 102 | } 103 | 104 | .request-popup .leaflet-popup-content-wrapper { 105 | width: 300px; 106 | } 107 | 108 | .btn.uniform-width { 109 | width: 85px; 110 | } 111 | 112 | .btn.btn-outline-light.uniform-width { 113 | width: 85px; 114 | } 115 | 116 | .card.adv-search-header { 117 | border:none; 118 | } 119 | 120 | .card-header:first-child.adv-search-toggle { 121 | border-top-left-radius: 0px; 122 | border-top-right-radius: 0px; 123 | border-bottom-right-radius: 0px; 124 | border-bottom-left-radius: 0px; 125 | background-color: #252f3d; 126 | } 127 | 128 | .card-header.adv-search-header { 129 | padding: 0px; 130 | background-color: #252f3d; 131 | border: none; 132 | cursor: pointer; 133 | } 134 | 135 | .card-header.adv-search-toggle { 136 | padding-left: 3px; 137 | padding-top: 0px; 138 | padding-right: 0px; 139 | padding-bottom: 0px; 140 | } 141 | 142 | .card-body.adv-search-body { 143 | padding-top: 0px; 144 | padding-left: 0px; 145 | padding-right: 0px; 146 | padding-bottom: 12px; 147 | background-color: #252f3d; 148 | } 149 | 150 | i { 151 | border: solid white; 152 | border-width: 0 3px 3px 0; 153 | display: inline-block; 154 | padding: 3px; 155 | } 156 | 157 | .arrow-down { 158 | transform: rotate(45deg); 159 | -webkit-transform: rotate(45deg); 160 | } 161 | 162 | .arrow-up { 163 | transform: rotate(-135deg); 164 | -webkit-transform: rotate(-135deg); 165 | } 166 | 167 | .react-datepicker-popper { 168 | position: static !important; 169 | transform: none !important; 170 | } 171 | 172 | .react-datepicker-wrapper { 173 | display: block !important; 174 | } 175 | 176 | h5.modal-prompt { 177 | padding-bottom: 18px; 178 | } 179 | 180 | .modal-header, .modal-body { 181 | color: #212529; 182 | } 183 | 184 | .form-row.form-header { 185 | background-color: #ced4da; 186 | margin-left: 3px; 187 | margin-bottom: 0px; 188 | } 189 | 190 | .form-row > .col { 191 | padding-top: 5px; 192 | } 193 | 194 | .form-group.modal-price { 195 | margin-top: .375rem; 196 | } 197 | 198 | .form-group-inline { 199 | display: inline-block; 200 | padding: 2px; 201 | } 202 | 203 | .form-group-inline.col-md-1 { 204 | width: auto !important; 205 | } 206 | 207 | input { 208 | display: block; 209 | width: 100%; 210 | height: calc(1.5em + 0.75rem + 2px); 211 | padding: 0.375rem 0.75rem; 212 | font-size: 1rem; 213 | font-weight: 400; 214 | line-height: 1.5; 215 | color: #495057; 216 | background-color: #fff; 217 | background-clip: padding-box; 218 | border: 1px solid #ced4da; 219 | border-radius: 0.25rem; 220 | -webkit-transition: border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out; 221 | transition: border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out; 222 | } -------------------------------------------------------------------------------- /assets/src/registerServiceWorker.ts: -------------------------------------------------------------------------------- 1 | // In production, we register a service worker to serve assets from local cache. 2 | 3 | // This lets the app load faster on subsequent visits in production, and gives 4 | // it offline capabilities. However, it also means that developers (and users) 5 | // will only see deployed updates on the "N+1" visit to a page, since previously 6 | // cached resources are updated in the background. 7 | 8 | // To learn more about the benefits of this model, read https://goo.gl/KwvDNy. 9 | // This link also includes instructions on opting out of this behavior. 10 | 11 | const isLocalhost = Boolean( 12 | window.location.hostname === 'localhost' || 13 | // [::1] is the IPv6 localhost address. 14 | window.location.hostname === '[::1]' || 15 | // 127.0.0.1/8 is considered localhost for IPv4. 16 | window.location.hostname.match( 17 | /^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/ 18 | ) 19 | ); 20 | 21 | export default function register() { 22 | if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) { 23 | // The URL constructor is available in all browsers that support SW. 24 | const publicUrl = new URL(process.env.PUBLIC_URL, window.location.toString()); 25 | if (publicUrl.origin !== window.location.origin) { 26 | // Our service worker won't work if PUBLIC_URL is on a different origin 27 | // from what our page is served on. This might happen if a CDN is used to 28 | // serve assets; see https://github.com/facebookincubator/create-react-app/issues/2374 29 | return; 30 | } 31 | 32 | window.addEventListener('load', () => { 33 | const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`; 34 | 35 | if (isLocalhost) { 36 | // This is running on localhost. Lets check if a service worker still exists or not. 37 | checkValidServiceWorker(swUrl); 38 | 39 | // Add some additional logging to localhost, pointing developers to the 40 | // service worker/PWA documentation. 41 | navigator.serviceWorker.ready.then(() => { 42 | console.log( 43 | 'This web app is being served cache-first by a service ' + 44 | 'worker. To learn more, visit https://goo.gl/SC7cgQ' 45 | ); 46 | }); 47 | } else { 48 | // Is not local host. Just register service worker 49 | registerValidSW(swUrl); 50 | } 51 | }); 52 | } 53 | } 54 | 55 | function registerValidSW(swUrl: string) { 56 | navigator.serviceWorker 57 | .register(swUrl) 58 | .then(registration => { 59 | registration.onupdatefound = () => { 60 | const installingWorker = registration.installing; 61 | if (installingWorker) { 62 | installingWorker.onstatechange = () => { 63 | if (installingWorker.state === 'installed') { 64 | if (navigator.serviceWorker.controller) { 65 | // At this point, the old content will have been purged and 66 | // the fresh content will have been added to the cache. 67 | // It's the perfect time to display a "New content is 68 | // available; please refresh." message in your web app. 69 | console.log('New content is available; please refresh.'); 70 | } else { 71 | // At this point, everything has been precached. 72 | // It's the perfect time to display a 73 | // "Content is cached for offline use." message. 74 | console.log('Content is cached for offline use.'); 75 | } 76 | } 77 | }; 78 | } 79 | }; 80 | }) 81 | .catch(error => { 82 | console.error('Error during service worker registration:', error); 83 | }); 84 | } 85 | 86 | function checkValidServiceWorker(swUrl: string) { 87 | // Check if the service worker can be found. If it can't reload the page. 88 | fetch(swUrl) 89 | .then(response => { 90 | // Ensure service worker exists, and that we really are getting a JS file. 91 | if ( 92 | response.status === 404 || 93 | response.headers.get('content-type')!.indexOf('javascript') === -1 94 | ) { 95 | // No service worker found. Probably a different app. Reload the page. 96 | navigator.serviceWorker.ready.then(registration => { 97 | registration.unregister().then(() => { 98 | window.location.reload(); 99 | }); 100 | }); 101 | } else { 102 | // Service worker found. Proceed as normal. 103 | registerValidSW(swUrl); 104 | } 105 | }) 106 | .catch(() => { 107 | console.log( 108 | 'No internet connection found. App is running in offline mode.' 109 | ); 110 | }); 111 | } 112 | 113 | export function unregister() { 114 | if ('serviceWorker' in navigator) { 115 | navigator.serviceWorker.ready.then(registration => { 116 | registration.unregister(); 117 | }); 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /lambdas/api_get_quicksight_url/app.js: -------------------------------------------------------------------------------- 1 | var express = require('express') 2 | var bodyParser = require('body-parser') 3 | var awsServerlessExpressMiddleware = require('aws-serverless-express/middleware') 4 | 5 | var AWS = require('aws-sdk'); 6 | var AmazonCognitoIdentity = require('amazon-cognito-identity-js'); 7 | const https = require('https'); 8 | 9 | // declare a new express app 10 | var app = express() 11 | app.use(bodyParser.json()) 12 | app.use(awsServerlessExpressMiddleware.eventContext()) 13 | 14 | // Enable CORS for all methods 15 | app.use(function (req, res, next) { 16 | res.header("Access-Control-Allow-Origin", "*") 17 | res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept") 18 | next() 19 | }); 20 | 21 | var ROLE_ARN = process.env.ROLE_ARN; 22 | var IDENTITY_POOL_ID = process.env.IDENTITY_POOL_ID; 23 | var USER_POOL_ID = process.env.USER_POOL_ID; 24 | var ACCOUNT_ID = process.env.ACCOUNT_ID; 25 | var DASHBOARD_ID = process.env.DASHBOARD_ID; 26 | var REGION = process.env.REGION; 27 | 28 | app.get('/report', function (req, res) { 29 | 30 | var roleArn = ROLE_ARN; 31 | var cognitoUrl = 'cognito-idp.' + REGION + '.amazonaws.com/' + USER_POOL_ID; 32 | 33 | AWS.config.region = REGION; 34 | 35 | var sessionName = req.query.payloadSub; 36 | var cognitoIdentity = new AWS.CognitoIdentity(); 37 | var stsClient = new AWS.STS(); 38 | var params = { 39 | IdentityPoolId: IDENTITY_POOL_ID, 40 | Logins: { 41 | [cognitoUrl]: req.query.jwtToken 42 | } 43 | }; 44 | 45 | cognitoIdentity.getId(params, function (err, data) { 46 | if (err) { 47 | console.log(err, err.stack); 48 | } 49 | else { 50 | data.Logins = { 51 | [cognitoUrl]: req.query.jwtToken 52 | }; 53 | 54 | cognitoIdentity.getOpenIdToken(data, function (err, openIdToken) { 55 | if (err) { 56 | console.log(err, err.stack); 57 | res.json({ 58 | err 59 | }) 60 | } 61 | else { 62 | let stsParams = { 63 | RoleSessionName: sessionName, 64 | WebIdentityToken: openIdToken.Token, 65 | RoleArn: roleArn 66 | } 67 | stsClient.assumeRoleWithWebIdentity(stsParams, function (err, data) { 68 | if (err) { 69 | console.log(err, err.stack); 70 | res.json({ 71 | err 72 | }) 73 | } 74 | else { 75 | AWS.config.update({ 76 | region: REGION, 77 | credentials: { 78 | accessKeyId: data.Credentials.AccessKeyId, 79 | secretAccessKey: data.Credentials.SecretAccessKey, 80 | sessionToken: data.Credentials.SessionToken, 81 | expiration: data.Credentials.Expiration 82 | } 83 | }); 84 | var registerUserParams = { 85 | AwsAccountId: ACCOUNT_ID, 86 | Email: req.query.email, 87 | IdentityType: 'IAM', 88 | Namespace: 'default', 89 | UserRole: 'READER', 90 | IamArn: roleArn, 91 | SessionName: sessionName 92 | }; 93 | var qsClient = new AWS.QuickSight(); 94 | qsClient.registerUser(registerUserParams, function (err, data) { 95 | if (err) { 96 | console.log(err, err.stack); 97 | if (err.code && err.code === 'ResourceExistsException') { 98 | var getDashboardParams = { 99 | AwsAccountId: ACCOUNT_ID, 100 | DashboardId: DASHBOARD_ID, 101 | IdentityType: 'IAM', 102 | ResetDisabled: false, 103 | SessionLifetimeInMinutes: 100, 104 | UndoRedoDisabled: false 105 | }; 106 | 107 | qsClient.getDashboardEmbedUrl(getDashboardParams, function (err, data) { 108 | if (err) { 109 | console.log(err, err.stack); 110 | res.json({ 111 | err 112 | }) 113 | } 114 | else { 115 | console.log(data); 116 | res.json({ 117 | data 118 | }) 119 | } 120 | }); 121 | } 122 | else { 123 | res.json({ 124 | err 125 | }) 126 | } 127 | } 128 | else { 129 | setTimeout(function () { 130 | var getDashboardParams = { 131 | AwsAccountId: ACCOUNT_ID, 132 | DashboardId: DASHBOARD_ID, 133 | IdentityType: 'IAM', 134 | ResetDisabled: false, 135 | SessionLifetimeInMinutes: 100, 136 | UndoRedoDisabled: false 137 | }; 138 | 139 | qsClient.getDashboardEmbedUrl(getDashboardParams, function (err, data) { 140 | if (err) { 141 | console.log(err, err.stack); 142 | res.json({ 143 | err 144 | }) 145 | } else { 146 | console.log(data); 147 | res.json({ 148 | data 149 | }) 150 | } 151 | }); 152 | 153 | }, 2000); 154 | 155 | } 156 | }); 157 | 158 | } 159 | }); 160 | } 161 | }); 162 | } 163 | }); 164 | 165 | }); 166 | 167 | app.listen(3000, function() { 168 | console.log("App started") 169 | }); 170 | 171 | // Export the app object. When executing the application local this does nothing. However, 172 | // to port it to AWS Lambda we will create a wrapper around that will load the app from 173 | // this file 174 | module.exports = app 175 | -------------------------------------------------------------------------------- /template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: 'AWS::Serverless-2016-10-31' 3 | Description: Template to create AWS BikeNow demo 4 | Parameters: 5 | EnvironmentName: 6 | Description: Environment stage name 7 | Type: String 8 | Default: demo 9 | BikeStationStatusUrl: 10 | Description: URL for retrieving Citi Bike station data 11 | Type: String 12 | Default: https://gbfs.citibikenyc.com/gbfs/en/station_status.json 13 | BikeStationDetailUrl: 14 | Description: URL for retrieving Citi Bike station meta data 15 | Type: String 16 | Default: https://gbfs.citibikenyc.com/gbfs/en/station_information.json 17 | StationStatusTable: 18 | Description: DynamoDB table name where we store bike station status 19 | Type: String 20 | Default: station_status 21 | StationDetailTable: 22 | Description: DynamoDB table name where we store bike station details 23 | Type: String 24 | Default: station_detail 25 | ElasticsearchDomainName: 26 | Description: Amazon Elasticsearch domain name 27 | Type: String 28 | Default: bikenow 29 | GlueDatabaseName: 30 | Description: Name of AWS Glue database 31 | Type: String 32 | Default: bikenow_db 33 | RedshiftDbName: 34 | Description: Name of Redshift database 35 | Type: String 36 | Default: bikenowdw 37 | AuroraDbName: 38 | Description: Name of Amazon Aurora database 39 | Type: String 40 | Default: bikenowrds 41 | 42 | Resources: 43 | # -------------------------------- INITIALIZE AND COPY ARTIFACTS 44 | ApplicationInitialize: 45 | Type: AWS::Serverless::Application 46 | Properties: 47 | Location: templates/setup-template.yaml 48 | 49 | # -------------------------------- NETWORK STACK 50 | ApplicationNetwork: 51 | Type: AWS::Serverless::Application 52 | Properties: 53 | Location: templates/network-template.yaml 54 | 55 | # -------------------------------- DATABASE STACK 56 | ApplicationDatabase: 57 | Type: AWS::Serverless::Application 58 | Properties: 59 | Location: templates/database-template.yaml 60 | Parameters: 61 | EnvironmentName: !Ref EnvironmentName 62 | BikeStationStatusUrl: !Ref BikeStationStatusUrl 63 | BikeStationDetailUrl: !Ref BikeStationDetailUrl 64 | StationStatusTable: !Ref StationStatusTable 65 | StationDetailTable: !Ref StationDetailTable 66 | ElasticsearchDomainName: !Ref ElasticsearchDomainName 67 | VpcId: !GetAtt ApplicationNetwork.Outputs.VPC 68 | SubnetsPrivate: !GetAtt ApplicationNetwork.Outputs.SubnetsPrivate 69 | AuroraDbName: !Ref AuroraDbName 70 | 71 | # -------------------------------- ANALYTICS STACK 72 | ApplicationAnalytics: 73 | Type: AWS::Serverless::Application 74 | Properties: 75 | Location: templates/analytics-template.yaml 76 | Parameters: 77 | GlueDatabaseName: !Ref GlueDatabaseName 78 | StationStatusStreamArn: !GetAtt ApplicationDatabase.Outputs.StreamTableStationStatusArn 79 | StationDetailStreamArn: !GetAtt ApplicationDatabase.Outputs.StreamTableStationDetailArn 80 | StationReviewStreamArn: !GetAtt ApplicationDatabase.Outputs.StreamTableReviewArn 81 | RedshiftVPC: !GetAtt ApplicationNetwork.Outputs.VPC 82 | RedshiftSubnets: !GetAtt ApplicationNetwork.Outputs.SubnetsPrivate 83 | RedshiftAZ: !GetAtt ApplicationNetwork.Outputs.AZA 84 | RedshiftDbName: !Ref RedshiftDbName 85 | ArtifactsBucket: !GetAtt ApplicationInitialize.Outputs.ArtifactsBucket 86 | 87 | # -------------------------------- AI/ML STACK 88 | ApplicationAIML: 89 | Type: AWS::Serverless::Application 90 | Properties: 91 | Location: templates/aiml-template.yaml 92 | Parameters: 93 | ArtifactsBucket: !GetAtt ApplicationInitialize.Outputs.ArtifactsBucket 94 | DataLakeS3Bucket: !GetAtt ApplicationAnalytics.Outputs.DataLakeS3Bucket 95 | GlueEndpointName: !GetAtt ApplicationAnalytics.Outputs.GlueEndpointName 96 | EnvironmentName: !Ref EnvironmentName 97 | 98 | # -------------------------------- CODE PIPELINE STACK 99 | ApplicationCodeBuild: 100 | Type: AWS::Serverless::Application 101 | Properties: 102 | Location: templates/codebuild-template.yaml 103 | Parameters: 104 | EnvironmentName: !Ref EnvironmentName 105 | S3BucketWeb: !GetAtt ApplicationDatabase.Outputs.S3BucketWeb 106 | ApiDatabase: !GetAtt ApplicationDatabase.Outputs.ApiGatewayWebId 107 | ApiAiml: !GetAtt ApplicationAIML.Outputs.ApiGatewayModelId 108 | CognitoUserPool: !GetAtt ApplicationDatabase.Outputs.CognitoUserPoolId 109 | CognitoUserPoolClient: !GetAtt ApplicationDatabase.Outputs.CognitoAppClientId 110 | CognityIdentityPool: !GetAtt ApplicationDatabase.Outputs.CognityIdentityPoolId 111 | WebsiteCDN: !GetAtt ApplicationDatabase.Outputs.WebsiteCDN 112 | 113 | Outputs: 114 | BikenowApiUrl: 115 | Value: !Sub 'https://${ApplicationDatabase.Outputs.ApiGatewayWebId}.execute-api.${AWS::Region}.amazonaws.com/${EnvironmentName}' 116 | Description: URL of BikeNow purpose-built database demo API Gateway 117 | AimlApiUrl: 118 | Value: !Sub 'https://${ApplicationAIML.Outputs.ApiGatewayModelId}.execute-api.${AWS::Region}.amazonaws.com/${EnvironmentName}' 119 | Description: URL of BikeNow AI/ML demo API Gateway 120 | CognitoUserPoolId: 121 | Value: !GetAtt ApplicationDatabase.Outputs.CognitoUserPoolId 122 | Description: Cognito user pool ID 123 | CognitoAppClientId: 124 | Value: !GetAtt ApplicationDatabase.Outputs.CognitoAppClientId 125 | Description: Cognito application client ID 126 | CognityIdentityPoolId: 127 | Value: !GetAtt ApplicationDatabase.Outputs.CognityIdentityPoolId 128 | Description: Cognity identity pool ID 129 | WebsiteURL: 130 | Value: !GetAtt ApplicationDatabase.Outputs.WebsiteURL 131 | Description: The URL for the web application 132 | PrivateSubnet: 133 | Value: !GetAtt ApplicationNetwork.Outputs.SubnetAPrivate 134 | Description: Private subnet 135 | RedshiftSecurityGroup: 136 | Value: !GetAtt ApplicationAnalytics.Outputs.RedshiftSecurityGroup 137 | Description: Security Group for Redshift cluster 138 | DbSecurityGroup: 139 | Value: !GetAtt ApplicationDatabase.Outputs.DbSecurityGroup 140 | Description: Security Group for Aurora database -------------------------------------------------------------------------------- /assets/src/modules/signup/Signup.tsx: -------------------------------------------------------------------------------- 1 | import Auth from "@aws-amplify/auth"; 2 | import React from "react"; 3 | import { Redirect } from 'react-router'; 4 | import { Form, FormGroup, FormControl, FormLabel, Button, Spinner, FormControlProps } from "react-bootstrap"; 5 | import "./signup.css"; 6 | import "./home.css"; 7 | import { ISignUpResult } from "amazon-cognito-identity-js"; 8 | 9 | const emailRegex = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/; 10 | 11 | interface SignupProps { 12 | isAuthenticated: boolean; 13 | userHasAuthenticated: (authenticated: boolean) => void; 14 | } 15 | 16 | interface SignupState { 17 | loading: boolean; 18 | email: string; 19 | password: string; 20 | confirmPassword: string; 21 | confirmationCode: string; 22 | user: any; 23 | redirect: boolean; 24 | validated: boolean; 25 | } 26 | 27 | export default class Signup extends React.Component { 28 | constructor(props: SignupProps) { 29 | super(props); 30 | 31 | this.state = { 32 | loading: false, 33 | email: "", 34 | password: "", 35 | confirmPassword: "", 36 | confirmationCode: "", 37 | user: undefined, 38 | redirect: false, 39 | validated: false, 40 | }; 41 | } 42 | 43 | onChange = (event: React.ChangeEvent) => { 44 | const target = event.target as HTMLInputElement; 45 | this.setState({ ...this.state, [target.name]: target.value }); 46 | } 47 | 48 | onSignup = (event: React.FormEvent) => { 49 | event.preventDefault() 50 | 51 | if (event.currentTarget.checkValidity() === false) { 52 | event.stopPropagation(); 53 | } else { 54 | this.setState({ loading: true, validated: true }); 55 | 56 | Auth.signUp({ 57 | username: this.state.email, 58 | password: this.state.password 59 | }).then((value: ISignUpResult) => { 60 | this.setState({ user: value.user, loading: false }); 61 | }).catch((e: any) => { 62 | alert(e.message); 63 | this.setState({ loading: false }); 64 | }); 65 | } 66 | this.setState({ validated: true }); 67 | 68 | } 69 | 70 | onConfirm = async (event: React.FormEvent) => { 71 | event.preventDefault(); 72 | this.setState({ loading: true }); 73 | 74 | try { 75 | await Auth.confirmSignUp(this.state.email, this.state.confirmationCode); 76 | await Auth.signIn(this.state.email, this.state.password); 77 | this.props.userHasAuthenticated(true); 78 | this.setState({ redirect: true }) 79 | } catch (e) { 80 | alert(e.message); 81 | this.setState({ loading: false }); 82 | } 83 | } 84 | 85 | validateSignupForm = () => { 86 | return emailRegex.test(this.state.email.toLowerCase()) && 87 | this.state.confirmPassword.length >= 8 && 88 | this.state.password === this.state.confirmPassword 89 | } 90 | 91 | validateConfirmForm = () => { 92 | return this.state.confirmationCode; 93 | } 94 | 95 | showConfirmationForm = () => { 96 | if (this.state.redirect) return 97 | const { confirmationCode } = this.state; 98 | 99 | return ( 100 |
101 | 102 | Confirmation code 103 | this.onChange(event as any)} 108 | minLength={1} 109 | required /> 110 | 111 | 112 | A confirmation code will be sent to the email address provided 113 | 114 | 115 | 120 |
121 | ); 122 | } 123 | 124 | showSignupForm = () => { 125 | const { email, password, confirmPassword, validated } = this.state; 126 | return ( 127 |
) => this.onSignup(e)}> 128 | 129 | Email 130 | 138 | Must be a valid email address 139 | 140 | 141 | Password 142 | = 8} 149 | required /> 150 | Must be at least 8 characters 151 | 152 | Must be at least 8 characters 153 | 154 | 155 | 156 | Confirm Password 157 | = 8 && password === confirmPassword} 164 | required /> 165 | Passwords must be identical 166 | 167 | 172 |
173 | ); 174 | } 175 | 176 | render() { 177 | return ( 178 |
179 | {this.state.user === undefined ? this.showSignupForm() : this.showConfirmationForm()} 180 |
181 | ); 182 | } 183 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AWS BikeNow Demo 2 | AWS BikeNow Demo is a sample web application that demonstrates the breadth and depth of database, analytics, and AI/ML services on AWS. The application was built on top of the [AWS Full-Stack Template](https://github.com/awslabs/aws-full-stack-template). 3 | 4 | ## Overview 5 | The goal of the AWS BikeNow Demo is to help users understand the breadth and depth of the AWS database and analytics portfolio. AWS offers the broadest and deepest portfolio of purpose-built, fully managed database services as well as the most comprehensive, secure, scalable, and cost-effective portfolio of analytics services. With AWS BikeNow Demo, developers can use AWS database and analytics services to manage data through the entirety of its lifecycle, from ingestion, storage, feature engineering, visualization, and analysis in support of data-driven innovation. 6 | 7 | ## Instructions 8 | The AWS BikeNow Demo is only available in the following AWS Regions: us-east-1 (N. Virginia) and us-west-2 (Oregon). 9 | 10 | ### Pre-requisites 11 | * [Create and Activate AWS Account](https://aws.amazon.com/premiumsupport/knowledge-center/create-and-activate-aws-account/) 12 | * [See the Troubleshooting section for common issues](#troubleshooting) 13 | 14 | **IMPORTANT NOTE:** Creating this demo application in your AWS account will create and consume AWS resources, which will cost money. Be sure to shut down and remove all resources once you are finished to avoid on-going charges to your AWS account (see [instructions on cleaning up](#cleaning-up)). 15 | 16 | ### Getting Started 17 | To get AWS BikeNow Demo running in your AWS account, follow these steps: 18 | 1. Log into the [AWS console](https://console.aws.amazon.com/). 19 | 2. Choose one of the **Launch Stack** buttons below for your desired AWS Region. 20 | 21 | Region name | Region code | Launch 22 | --- | --- | --- 23 | US East (N. Virginia) | us-east-1 | [![Launch Stack](https://cdn.rawgit.com/buildkite/cloudformation-launch-stack-button-svg/master/launch-stack.svg)](https://console.aws.amazon.com/cloudformation/home?region=us-east-1#/stacks/new?stackName=BikenowDemo&templateURL=https://s3.amazonaws.com/aws-bikenow-demo-us-east-1/master.yaml) 24 | US West (Oregon) | us-west-2 | [![Launch Stack](https://cdn.rawgit.com/buildkite/cloudformation-launch-stack-button-svg/master/launch-stack.svg)](https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/new?stackName=BikenowDemo&templateURL=https://s3.amazonaws.com/aws-bikenow-demo-us-west-2/master.yaml) 25 | 26 | 3. From the Cloudformation Create stack page, choose **Next**. 27 | 4. You can use the default stack name and parameter values. Choose **Next**. 28 | 5. You can use the default stack options and advanced options. Choose **Next**. 29 | 6. From the Review page, select: 30 | * *I acknowledge that AWS CloudFormation might create IAM resources.* 31 | * *I acknowledge that AWS CloudFormation might create IAM resources with custom names.* 32 | * *I acknowledge that AWS CloudFormation might require the following capability: CAPABILITY_AUTO_EXPAND* 33 | 7. Choose **Create stack**. This step will take ~20-30 minutes to complete. 34 | 8. Once the CloudFormation template is finished deploying, sign into your application. 35 | 1. The output of the CloudFormation stack creation will provide a CloudFront URL (in the *Outputs* section of your stack details page). Copy and paste the CloudFront URL into your browser. 36 | 2. You can sign into your application by registering an email address and a password. Choose **Sign up to explore the demo** to register. The registration/login experience is run in your AWS account, and the supplied credentials are stored in Amazon Cognito. 37 | *Note: given that this is a demo application, we highly suggest that you do not use an email and password combination that you use for other purposes (such as an AWS account, email, or e-commerce site).* 38 | 3. Once you provide your credentials, you will receive a verification code at the email address you provided. Upon entering this verification code, you will be signed into the application. 39 | 40 | ### Embed a QuickSight Dashboard (Optional) 41 | 42 | Follow the [QuickSight tutorial](quicksight/) to embed a QuickSight dashboard in the application. 43 | 44 | ### Cleaning up 45 | 46 | To tear down your application and remove all resources associated with the AWS BikeNow Demo, follow these steps: 47 | 48 | 1. Log into the AWS CloudFormation Console and find the stack you created for the demo app 49 | 2. Delete the stack 50 | 1. Double-check that the S3 buckets created for the stack were successfully removed. 51 | 52 | *Remember to shut down/remove all related resources once you are finished to avoid ongoing charges to your AWS account.* 53 | 54 | ## Troubleshooting 55 | 56 | #### 1. Before you can proceed, you must enable a service-linked role to give Amazon ES permissions to access your VPC. 57 | 58 | Amazon Elasticsearch Service requires a service-linked role to access your VPC and create network interfaces. Create a service-linked role by running the following AWS CLI command: 59 | 60 | ``` 61 | aws iam create-service-linked-role --aws-service-name es.amazonaws.com 62 | ``` 63 | 64 | ## Architecture 65 | 66 | ### Purpose-Built Databases 67 | 68 | Web application assets are stored in a S3 bucket and presented to the user via a CloudFront distribution. Amazon Cognito is used for authentication, and API Gateway and AWS Lambda are used to read and write to backend databases. The core backend databases are Amazon Aurora, Amazon DynamoDB, and Amazon Elasticsearch Service. 69 | 70 | ![Purpose-Built Databases Diagram](images/01_PurposeBuiltDbDiagram.png) 71 | 72 | DynamoDB provides scalable, low-latency data access for maintaining the status of bike stations. DynamoDB is also used to store review data when a user reads or writes feedback about a bike station. The flexible schema allows us to add features and improve our application with greater agility. As the status of bike stations are updated in DynamoDB, updates are replicated to Amazon Elasticsearch Service. Elasticsearch provides search functionality for users to query bike stations using boolean search operators. When users rent bikes from a station, relational transaction data is stored in Amazon Aurora. 73 | 74 | ### Data Lake & Analytics 75 | 76 | As data is updated in the web application, updates are streamed to a S3 bucket via Amazon Kinesis Data Firehose. AWS Glue Data Catalog stores metadata about the data in S3 and can be used to query the data using other AWS services, including Amazon Athena, Amazon Redshift Spectrum, and Amazon EMR. 77 | 78 | ![Data Lake & Analytics Diagram](images/02_DLAnalyticsDiagram.png) 79 | 80 | Amazon Kinesis Data Firehose provides an easy way to stream data into our data lake on S3. Kinesis Firehose also batches and partitions the data before loading it to S3 so that it can be used effectively by AWS Glue, Athena, and Redshift Spectrum. AWS Glue workflow is used to orchestrate the execution of crawlers and ETL jobs. Crawlers are used to update the Data Catalog, and jobs perform the business logic to extract, transform, and load data to Amazon Redshift for fast, complex data analysis queries. Amazon QuickSight is used to create and publish interactive dashboards using data about bike stations from Redshift. Amazon QuickSight ML Insights leverages machine learning to perform forecasting, anomaly detection, and natural language narratives. 81 | 82 | ### Machine Learning 83 | 84 | As data is accumulated in the data lake, Amazon SageMaker is used to build, train, and deploy machine learning models using historical bike station data. 85 | 86 | ![AI/ML Diagram](images/03_AIMLDiagram.png) 87 | 88 | Amazon SageMaker Notebooks are used to perform analysis on the data in S3 and then build and train ML models. SageMaker also launches the ML instance and deploys the model with a secure endpoint. When a user plans their ride using the web application, API Gateway and AWS Lambda call the model endpoint to generate predictions for the number of bikes available at each station. 89 | 90 | ### Developer Tools 91 | 92 | The code is hosted in AWS CodeCommit. AWS CodePipeline builds the web application using AWS CodeBuild. After successfully building, CodeBuild copies the build artifacts into a S3 bucket where the web application assets are maintained. Along with uploading to Amazon S3, CodeBuild invalidates the cache so users always see the latest experience when accessing the storefront through the Amazon CloudFront distribution. AWS CodeCommit. AWS CodePipeline, and AWS CodeBuild are used in the deployment and update processes only, not while the application is in a steady-state of use. 93 | 94 | ## Conclusion 95 | 96 | AWS offers the broadest and deepest portfolio of purpose-built, fully managed database and analytics services. AWS BikeNow Demo showcases how AWS enables us to build data-driven applications and turn data into insights. 97 | 98 | For more information on AWS database and analytics services, see: 99 | * [Databases on AWS](https://aws.amazon.com/products/databases/) 100 | * [Data Lakes and Analytics on AWS](https://aws.amazon.com/big-data/datalakes-and-analytics/) 101 | -------------------------------------------------------------------------------- /templates/network-template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: 'AWS::Serverless-2016-10-31' 3 | Description: Template to create BikeNow VPC and subnets 4 | Resources: 5 | VPC: 6 | Type: 'AWS::EC2::VPC' 7 | Properties: 8 | CidrBlock: '10.71.0.0/16' 9 | EnableDnsSupport: true 10 | EnableDnsHostnames: true 11 | InstanceTenancy: default 12 | Tags: 13 | - Key: Name 14 | Value: 'BikeNow VPC' 15 | InternetGateway: 16 | Type: 'AWS::EC2::InternetGateway' 17 | Properties: 18 | Tags: 19 | - Key: Name 20 | Value: 'BikeNow IGW' 21 | VPCGatewayAttachment: 22 | Type: 'AWS::EC2::VPCGatewayAttachment' 23 | Properties: 24 | VpcId: !Ref VPC 25 | InternetGatewayId: !Ref InternetGateway 26 | SubnetAPublic: 27 | Type: 'AWS::EC2::Subnet' 28 | Properties: 29 | AvailabilityZone: !Select [0, !GetAZs ''] 30 | CidrBlock: '10.71.0.0/20' 31 | MapPublicIpOnLaunch: true 32 | VpcId: !Ref VPC 33 | Tags: 34 | - Key: Name 35 | Value: 'BikeNow - Public A' 36 | - Key: Reach 37 | Value: public 38 | SubnetAPrivate: 39 | Type: 'AWS::EC2::Subnet' 40 | Properties: 41 | AvailabilityZone: !Select [0, !GetAZs ''] 42 | CidrBlock: '10.71.16.0/20' 43 | VpcId: !Ref VPC 44 | Tags: 45 | - Key: Name 46 | Value: 'BikeNow - Private A' 47 | - Key: Reach 48 | Value: private 49 | SubnetBPublic: 50 | Type: 'AWS::EC2::Subnet' 51 | Properties: 52 | AvailabilityZone: !Select [1, !GetAZs ''] 53 | CidrBlock: '10.71.32.0/20' 54 | MapPublicIpOnLaunch: true 55 | VpcId: !Ref VPC 56 | Tags: 57 | - Key: Name 58 | Value: 'BikeNow - Public B' 59 | - Key: Reach 60 | Value: public 61 | SubnetBPrivate: 62 | Type: 'AWS::EC2::Subnet' 63 | Properties: 64 | AvailabilityZone: !Select [1, !GetAZs ''] 65 | CidrBlock: '10.71.48.0/20' 66 | VpcId: !Ref VPC 67 | Tags: 68 | - Key: Name 69 | Value: 'BikeNow - Private B' 70 | - Key: Reach 71 | Value: private 72 | RouteTableAPublic: 73 | Type: 'AWS::EC2::RouteTable' 74 | Properties: 75 | VpcId: !Ref VPC 76 | Tags: 77 | - Key: Name 78 | Value: 'BikeNow - Public route A' 79 | RouteTableAPrivate: 80 | Type: 'AWS::EC2::RouteTable' 81 | Properties: 82 | VpcId: !Ref VPC 83 | Tags: 84 | - Key: Name 85 | Value: 'BikeNow - Private route A' 86 | RouteTableBPublic: 87 | Type: 'AWS::EC2::RouteTable' 88 | Properties: 89 | VpcId: !Ref VPC 90 | Tags: 91 | - Key: Name 92 | Value: 'BikeNow - Public route B' 93 | RouteTableBPrivate: 94 | Type: 'AWS::EC2::RouteTable' 95 | Properties: 96 | VpcId: !Ref VPC 97 | Tags: 98 | - Key: Name 99 | Value: 'BikeNow - Private route B' 100 | RouteTableAssociationAPublic: 101 | Type: 'AWS::EC2::SubnetRouteTableAssociation' 102 | Properties: 103 | SubnetId: !Ref SubnetAPublic 104 | RouteTableId: !Ref RouteTableAPublic 105 | RouteTableAssociationAPrivate: 106 | Type: 'AWS::EC2::SubnetRouteTableAssociation' 107 | Properties: 108 | SubnetId: !Ref SubnetAPrivate 109 | RouteTableId: !Ref RouteTableAPrivate 110 | RouteTableAssociationBPublic: 111 | Type: 'AWS::EC2::SubnetRouteTableAssociation' 112 | Properties: 113 | SubnetId: !Ref SubnetBPublic 114 | RouteTableId: !Ref RouteTableBPublic 115 | RouteTableAssociationBPrivate: 116 | Type: 'AWS::EC2::SubnetRouteTableAssociation' 117 | Properties: 118 | SubnetId: !Ref SubnetBPrivate 119 | RouteTableId: !Ref RouteTableBPrivate 120 | RouteTableAPublicInternetRoute: 121 | Type: 'AWS::EC2::Route' 122 | DependsOn: VPCGatewayAttachment 123 | Properties: 124 | RouteTableId: !Ref RouteTableAPublic 125 | DestinationCidrBlock: '0.0.0.0/0' 126 | GatewayId: !Ref InternetGateway 127 | RouteTableBPublicInternetRoute: 128 | Type: 'AWS::EC2::Route' 129 | DependsOn: VPCGatewayAttachment 130 | Properties: 131 | RouteTableId: !Ref RouteTableBPublic 132 | DestinationCidrBlock: '0.0.0.0/0' 133 | GatewayId: !Ref InternetGateway 134 | NetworkAclPublic: 135 | Type: 'AWS::EC2::NetworkAcl' 136 | Properties: 137 | VpcId: !Ref VPC 138 | Tags: 139 | - Key: Name 140 | Value: BikeNow - Public NACL 141 | NetworkAclPrivate: 142 | Type: 'AWS::EC2::NetworkAcl' 143 | Properties: 144 | VpcId: !Ref VPC 145 | Tags: 146 | - Key: Name 147 | Value: BikeNow - Private NACL 148 | SubnetNetworkAclAssociationAPublic: 149 | Type: 'AWS::EC2::SubnetNetworkAclAssociation' 150 | Properties: 151 | SubnetId: !Ref SubnetAPublic 152 | NetworkAclId: !Ref NetworkAclPublic 153 | SubnetNetworkAclAssociationAPrivate: 154 | Type: 'AWS::EC2::SubnetNetworkAclAssociation' 155 | Properties: 156 | SubnetId: !Ref SubnetAPrivate 157 | NetworkAclId: !Ref NetworkAclPrivate 158 | SubnetNetworkAclAssociationBPublic: 159 | Type: 'AWS::EC2::SubnetNetworkAclAssociation' 160 | Properties: 161 | SubnetId: !Ref SubnetBPublic 162 | NetworkAclId: !Ref NetworkAclPublic 163 | SubnetNetworkAclAssociationBPrivate: 164 | Type: 'AWS::EC2::SubnetNetworkAclAssociation' 165 | Properties: 166 | SubnetId: !Ref SubnetBPrivate 167 | NetworkAclId: !Ref NetworkAclPrivate 168 | NetworkAclEntryInPublicAllowAll: 169 | Type: 'AWS::EC2::NetworkAclEntry' 170 | Properties: 171 | NetworkAclId: !Ref NetworkAclPublic 172 | RuleNumber: 99 173 | Protocol: -1 174 | RuleAction: allow 175 | Egress: false 176 | CidrBlock: '0.0.0.0/0' 177 | NetworkAclEntryOutPublicAllowAll: 178 | Type: 'AWS::EC2::NetworkAclEntry' 179 | Properties: 180 | NetworkAclId: !Ref NetworkAclPublic 181 | RuleNumber: 99 182 | Protocol: -1 183 | RuleAction: allow 184 | Egress: true 185 | CidrBlock: '0.0.0.0/0' 186 | NetworkAclEntryInPrivateAllowVPC: 187 | Type: 'AWS::EC2::NetworkAclEntry' 188 | Properties: 189 | NetworkAclId: !Ref NetworkAclPrivate 190 | RuleNumber: 99 191 | Protocol: -1 192 | RuleAction: allow 193 | Egress: false 194 | CidrBlock: '0.0.0.0/0' 195 | NetworkAclEntryOutPrivateAllowVPC: 196 | Type: 'AWS::EC2::NetworkAclEntry' 197 | Properties: 198 | NetworkAclId: !Ref NetworkAclPrivate 199 | RuleNumber: 99 200 | Protocol: -1 201 | RuleAction: allow 202 | Egress: true 203 | CidrBlock: '0.0.0.0/0' 204 | NAT: 205 | DependsOn: VPCGatewayAttachment 206 | Type: AWS::EC2::NatGateway 207 | Properties: 208 | AllocationId: 209 | Fn::GetAtt: 210 | - EIP 211 | - AllocationId 212 | SubnetId: 213 | Ref: SubnetAPublic 214 | Tags: 215 | - Key: foo 216 | Value: bar 217 | EIP: 218 | Type: AWS::EC2::EIP 219 | Properties: 220 | Domain: !Ref VPC 221 | RouteANAT: 222 | Type: AWS::EC2::Route 223 | Properties: 224 | RouteTableId: 225 | Ref: RouteTableAPrivate 226 | DestinationCidrBlock: 0.0.0.0/0 227 | NatGatewayId: 228 | Ref: NAT 229 | RouteBNAT: 230 | Type: AWS::EC2::Route 231 | Properties: 232 | RouteTableId: 233 | Ref: RouteTableBPrivate 234 | DestinationCidrBlock: 0.0.0.0/0 235 | NatGatewayId: 236 | Ref: NAT 237 | EndpointS3: 238 | Type: AWS::EC2::VPCEndpoint 239 | Properties: 240 | RouteTableIds: 241 | - !Ref RouteTableAPublic 242 | - !Ref RouteTableBPublic 243 | - !Ref RouteTableAPrivate 244 | - !Ref RouteTableBPrivate 245 | ServiceName: !Sub 'com.amazonaws.${AWS::Region}.s3' 246 | VpcId: !Ref VPC 247 | Outputs: 248 | StackName: 249 | Description: 'Stack name.' 250 | Value: !Sub '${AWS::StackName}' 251 | AZs: 252 | Description: 'AZs' 253 | Value: 2 254 | AZA: 255 | Description: 'AZ of A' 256 | Value: !Select [0, !GetAZs ''] 257 | AZB: 258 | Description: 'AZ of B' 259 | Value: !Select [1, !GetAZs ''] 260 | CidrBlock: 261 | Description: 'The set of IP addresses for the VPC.' 262 | Value: !GetAtt 'VPC.CidrBlock' 263 | VPC: 264 | Description: 'VPC.' 265 | Value: !Ref VPC 266 | SubnetsPublic: 267 | Description: 'Subnets public.' 268 | Value: !Join [',', [!Ref SubnetAPublic, !Ref SubnetBPublic]] 269 | SubnetsPrivate: 270 | Description: 'Subnets private.' 271 | Value: !Join [',', [!Ref SubnetAPrivate, !Ref SubnetBPrivate]] 272 | RouteTablesPrivate: 273 | Description: 'Route tables private.' 274 | Value: !Join [',', [!Ref RouteTableAPrivate, !Ref RouteTableBPrivate]] 275 | RouteTablesPublic: 276 | Description: 'Route tables public.' 277 | Value: !Join [',', [!Ref RouteTableAPublic, !Ref RouteTableBPublic]] 278 | SubnetAPublic: 279 | Description: 'Subnet A public.' 280 | Value: !Ref SubnetAPublic 281 | RouteTableAPublic: 282 | Description: 'Route table A public.' 283 | Value: !Ref RouteTableAPublic 284 | SubnetAPrivate: 285 | Description: 'Subnet A private.' 286 | Value: !Ref SubnetAPrivate 287 | RouteTableAPrivate: 288 | Description: 'Route table A private.' 289 | Value: !Ref RouteTableAPrivate 290 | SubnetBPublic: 291 | Description: 'Subnet B public.' 292 | Value: !Ref SubnetBPublic 293 | RouteTableBPublic: 294 | Description: 'Route table B public.' 295 | Value: !Ref RouteTableBPublic 296 | SubnetBPrivate: 297 | Description: 'Subnet B private.' 298 | Value: !Ref SubnetBPrivate 299 | RouteTableBPrivate: 300 | Description: 'Route table B private.' 301 | Value: !Ref RouteTableBPrivate 302 | -------------------------------------------------------------------------------- /quicksight/README.md: -------------------------------------------------------------------------------- 1 | # Embed a QuickSight Dashboard 2 | 3 | Follow these steps to create analyses with QuickSight and embed a dashboard in the application. 4 | 5 | ## Create VPC Endpoints 6 | 7 | ### Create a Redshift VPC Endpoint 8 | 9 | 1. Open the [Amazon QuickSight console](https://us-east-1.quicksight.aws.amazon.com/sn/start). 10 | 2. From the upper right-hand corner, choose on the user icon and choose **Manage QuickSight**. 11 | 12 | ![Manage Quicksight](images/001_ManageQuicksight.png) 13 | 14 | 3. From the left-hand navigation panel, choose **Manage VPC connections**. 15 | 4. Choose **Add VPC connection**. 16 | 5. For **VPC connection name**, enter a name for your VPC connection (e.g., `BikeDemoRedshiftVpcConn`) 17 | 6. For **VPC ID**, select the VPC named `Bike demo Redshift VPC`. 18 | 7. For **Subnet ID**, select the `PrivateSubnet` value from the CloudFormation Outputs. 19 | 8. For **Security group ID**, enter the `RedshiftSecurityGroup` value from the CloudFormation Outputs. 20 | 9. Choose **Create**. 21 | 22 | ![Add VPC Connection](images/002_AddRedshiftVpcConnection.png) 23 | 24 | ### Create an Aurora VPC Endpoint 25 | 26 | 1. Return to the [Amazon QuickSight console](https://us-east-1.quicksight.aws.amazon.com/sn/start). 27 | 2. From the upper right-hand corner, choose on the user icon and choose **Manage QuickSight**. 28 | 3. From the left-hand navigation panel, choose **Manage VPC connections**. 29 | 4. Choose **Add VPC connection**. 30 | 5. For **VPC connection name**, enter a name for your VPC connection (e.g., `BikeDemoAuroraVpcConn`) 31 | 6. For **VPC ID**, select the VPC named `Bike demo Aurora VPC`. 32 | 7. For **Subnet ID**, select the `PrivateSubnet` value from the CloudFormation Outputs. 33 | 8. For **Security group ID**, enter the `DbSecurityGroup` value from the CloudFormation Outputs. 34 | 9. Choose **Create**. 35 | 36 | ![Add VPC Connection](images/003_AddAuroraVpcConnection.png) 37 | 38 | ## Create data sources 39 | 40 | ### Create a Redshift data source 41 | 42 | 1. Return to the [Amazon QuickSight console](https://us-east-1.quicksight.aws.amazon.com/sn/start). 43 | 2. From the upper right-hand corner, choose **Manage data**. 44 | 45 | ![Manage Data](images/004_ManageData.png) 46 | 47 | 3. Choose **New data set**. 48 | 4. Select **Redshift - Manual connect**. 49 | 5. For **Data source name**, enter a name for your data source (e.g., `Bike Demo Redshift`) 50 | 6. For **Connection type**, select your Redshift VPC connection (i.e., `BikeDemoRedshiftVpcConn`). 51 | 7. For the following parameters, retrieve values from [Secrets Manager](https://console.aws.amazon.com/secretsmanager/home). 52 | 1. **Database server:** *host* 53 | 2. **Port:** *port* 54 | 3. **Database name:** *dbname* 55 | 4. **Username:** *username* 56 | 5. **Password:** *password* 57 | 8. Choose **Create data source**. 58 | 59 | ![New data set](images/005_NewDataSet.png) 60 | 61 | 9. Select the *public* schema and a table and choose **Edit/Preview data** or choose **Use custom SQL** to write your own query. 62 | 63 | ![Choose your table](images/006_ChooseYourTable.png) 64 | 65 | 10. Select *Import to SPICE for quicker analytics* or *Directly query your data* and choose **Edit/Preview data** to preview data. 66 | 11. After you are finished previewing the data, give your data set a name and choose **Save & Visualize**. 67 | 68 | ### Create an Aurora MySQL data source 69 | 70 | 1. Return to the [Amazon QuickSight console](https://us-east-1.quicksight.aws.amazon.com/sn/start). 71 | 2. From the upper right-hand corner, choose **Manage data**. 72 | 3. Choose **New data set**. 73 | 4. Select **Aurora**. 74 | 5. For **Data source name**, enter a name for your data source (e.g., `Bike Demo Aurora`) 75 | 6. For **Connection type**, select your Aurora VPC connection (i.e., `BikeDemoAuroraVpcConn`). 76 | 7. For **Database connector**, select **MySQL**. 77 | 8. For the following parameters, retrieve values from [Secrets Manager](https://console.aws.amazon.com/secretsmanager/home). 78 | 1. **Database server:** *host* 79 | 2. **Port:** *port* 80 | 3. **Database name:** *dbname* 81 | 4. **Username:** *username* 82 | 5. **Password:** *password* 83 | 8. Choose **Create data source**. 84 | 85 | ![New data set](images/007_NewAuroraDataSource.png) 86 | 87 | 9. Select a table and choose **Edit/Preview data** or choose **Use custom SQL** to write your own query. 88 | 89 | ![Choose your table](images/008_AuroraChooseYourTable.png) 90 | 91 | 10. Select *Import to SPICE for quicker analytics* or *Directly query your data* and choose **Edit/Preview data** to preview data. 92 | 11. After you are finished previewing the data, give your data set a name and choose **Save & Visualize**. 93 | 94 | 95 | ## Create a Dashboard 96 | 97 | ### Create an Analysis 98 | 99 | 1. Return to the [Amazon QuickSight console](https://us-east-1.quicksight.aws.amazon.com/sn/start). 100 | 2. From the upper left-hand corner, choose **New analysis**. 101 | 3. Choose a data source that you created in previous steps. 102 | 4. Choose **Create analysis**. 103 | 104 | You can now create an analysis using your data sets. For more information on creating your analysis, see: [Working with Analyses](https://docs.aws.amazon.com/quicksight/latest/user/working-with-analyses.html). 105 | 106 | You can add multiple data sets to your analysis. For more information, see: [Adding a Data Set to an Analysis](https://docs.aws.amazon.com/quicksight/latest/user/adding-a-data-set-to-an-analysis.html). 107 | 108 |
109 | Expand to see sample query. 110 | 111 | The following query will get the last reported number of bikes available and the capacity of each station: 112 | ``` 113 | WITH cte AS 114 | ( 115 | SELECT 116 | ROW_NUMBER() OVER (PARTITION BY station_id ORDER BY last_reported DESC) AS rn 117 | ,station_id 118 | ,num_bikes_available 119 | ,is_installed 120 | ,is_returning 121 | ,is_renting 122 | ,last_reported 123 | FROM public.station_status_history 124 | ) 125 | SELECT 126 | cte.station_id 127 | ,s.station_name 128 | ,cte.num_bikes_available 129 | ,cte.is_installed 130 | ,cte.is_returning 131 | ,cte.is_renting 132 | ,s.capacity 133 | ,s.lat 134 | ,s.lon 135 | ,cte.last_reported 136 | ,s.last_updated 137 | FROM 138 | cte 139 | INNER JOIN public.station_detail s ON cte.station_id = s.station_id 140 | WHERE rn = 1; 141 | ``` 142 |
143 | 144 | ### Publish Dashboard 145 | 146 | 1. Open the analysis that you created in previous steps. 147 | 2. Choose **Share** on the application bar, and then choose **Publish dashboard**. 148 | 149 | ![Publish Dashboard](images/009_SharePublishDashboard.png) 150 | 151 | 3. Select **Publish new dashboard as**, and then provide a dashboard name. 152 | 4. Choose **Publish dashboard**. 153 | 154 | ![Publish A Dashboard](images/010_PublishADashboard.png) 155 | 156 | 5. Select **Share with all users in this account** and choose **Confirm**. 157 | 158 | ![Share Dashboard With Users](images/011_ShareDashboardWithUsers.png) 159 | 160 | ***NOTE:** All users in the QuickSight account will have reader access to this dashboard. For more information on managing access to your dashboard, see: [Share Dashboards](https://docs.aws.amazon.com/quicksight/latest/user/sharing-a-dashboard.html)* 161 | 162 | 6. From the **Manage dashboard sharing** prompt, choose the **X** in the upper right-hand corner to close the prompt. 163 | 7. While viewing the dashboard, the URL in your browser location bar will be in the following format: `https://{REGION}.quicksight.aws.amazon.com/sn/dashboards/XXXXXXXX-XXXX-XXXX-XXXXXXXXXXXX`. The characters after the final `/` in the URL is your *Dashboard ID*. Copy the *Dashboard ID* into a text editor of your choice for the next steps. 164 | 165 | ## Embed a QuickSight Dashboard 166 | 167 | ### Update Embed URL Lambda Function 168 | 169 | 1. Open the [AWS Lambda Console](https://console.aws.amazon.com/lambda/home), and choose **Applications** from the left-hand navigation menu. 170 | 2. Choose the application with the description *Template to create BikeNow purpose-built database demo*. 171 | 3. Type `GetEmbeddedQSUrl` in the filter textbox and submit. 172 | 173 | ![Modify Embed URL Lambda Function](images/012_DatabaseApplicationLambda.png) 174 | 175 | 4. Choose the Lambda function with the **Logical ID** of *LambdaGetEmbeddedQSUrlApi* to open the AWS Lambda console editor. 176 | 5. Locate the **Environment variables** section and choose **Edit**. 177 | 178 | ![Environment Variables](images/013_EnvironmentVariables.png) 179 | 180 | 6. In the **Value** column next to the `DASHBOARD_ID` key, replace `` with the *Dashboard ID* that you copied from previous steps. Your *Dashboard ID* should have the following format: `XXXXXXXX-XXXX-XXXX-XXXXXXXXXXXX`. 181 | 10. Choose **Save**. 182 | 183 | ### Update Web Application Code 184 | 185 | 1. Open the [CodeCommit Console](https://console.aws.amazon.com/codesuite/codecommit/home) 186 | 2. Choose the **Bikenow-WebAssets** repository. 187 | 3. Navigate to the file `/src/App.tsx`, and choose **Edit**. 188 | 189 | ![Update Web Application Code](images/014_CodeCommitAppTsx.png) 190 | 191 | 4. Uncomment the *Report* button at *Line 67*. Your code should like the following: 192 | 193 | ``` 194 | showLoggedInBar = () => ( 195 |
196 | 197 | 198 | 199 | 200 |
201 | ); 202 | ``` 203 | 204 | 5. Below the code editor, provide an author name and email address and choose **Commit changes**. 205 | 6. Open the [CodePipeline Console](https://console.aws.amazon.com/codesuite/codepipeline/pipelines). 206 | 7. Once your Pipeline status is **Succeeded**, you can view your dashboard in the web application. 207 | 208 | ### Test your application 209 | 210 | 1. Using your browser, navigate to the `WebsiteURL` value from the CloudFormation Outputs. 211 | 2. If you are not signed in, register and sign into your application. 212 | 3. Choose **Report** from the upper right-hand navigation bar. 213 | 214 | ***NOTE:** You may need to perform a hard refresh to clear your browser's cache and see updates.** 215 | 216 | ![Test Your Application](images/015_TestYourApplication.png) 217 | 218 | ## Conclusion 219 | 220 | Congratulations! You have embedded a QuickSight dashboard in your application. 221 | 222 | For more information on embedding dashboards, see: [Embedding Amazon QuickSight Dashboards](https://docs.aws.amazon.com/quicksight/latest/user/embedding-dashboards.html). 223 | -------------------------------------------------------------------------------- /templates/codebuild-template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: 'AWS::Serverless-2016-10-31' 3 | Description: Template to create BikeNow Code-star pipeline 4 | Parameters: 5 | EnvironmentName: 6 | Description: Environment stage name 7 | Type: String 8 | S3BucketWeb: 9 | Description: Name of S3 bucket hosting web application 10 | Type: String 11 | ApiDatabase: 12 | Description: ID of API Gateway used for database application 13 | Type: String 14 | ApiAiml: 15 | Description: ID of API Gateway used for AIML application 16 | Type: String 17 | CognitoUserPool: 18 | Description: Cognito user pool 19 | Type: String 20 | CognitoUserPoolClient: 21 | Description: Cognito user pool client 22 | Type: String 23 | CognityIdentityPool: 24 | Description: Cognito identity pool 25 | Type: String 26 | WebsiteCDN: 27 | Description: CloudFront CDN for distributing web application 28 | Type: String 29 | 30 | Conditions: 31 | DefaultRegion: !Equals [!Ref "AWS::Region", "us-east-1"] 32 | 33 | Mappings: 34 | S3Buckets: 35 | us-east-1: 36 | Bucket: aws-bikenow-demo-us-east-1 37 | SeederFunctionBucket: fsd-aws-wildrydes-us-east-1 38 | us-west-2: 39 | Bucket: aws-bikenow-demo-us-west-2 40 | SeederFunctionBucket: fsd-aws-wildrydes-us-west-2 41 | eu-central-1: 42 | Bucket: aws-bikenow-demo-eu-central-1 43 | SeederFunctionBucket: fsd-aws-wildrydes-eu-central-1 44 | eu-west-1: 45 | Bucket: aws-bikenow-demo-eu-west-1 46 | SeederFunctionBucket: fsd-aws-wildrydes-eu-west-1 47 | Constants: 48 | AppKeys: 49 | SeedRepository: https://s3.amazonaws.com/aws-bikenow-demo-us-east-1/aws-bikenow-demo.zip 50 | S3Keys: 51 | SeederFunctionCode: aws-serverless-codecommit-seeder.zip 52 | 53 | Resources: 54 | # -------------------------------- CODE PIPELINE STUFF 55 | AssetsCodeRepository: 56 | Type: 'AWS::CodeCommit::Repository' 57 | Properties: 58 | RepositoryDescription: Code repository for web application 59 | RepositoryName: Bikenow-WebAssets 60 | CodeBuildRole: 61 | Description: Creating service role in IAM for AWS CodeBuild 62 | Type: 'AWS::IAM::Role' 63 | Properties: 64 | AssumeRolePolicyDocument: 65 | Statement: 66 | - Effect: Allow 67 | Principal: 68 | Service: 69 | - codebuild.amazonaws.com 70 | Action: 'sts:AssumeRole' 71 | Policies: 72 | - PolicyName: codebuild-policy 73 | PolicyDocument: 74 | Statement: 75 | - Action: 76 | - 's3:PutObject' 77 | - 's3:GetObject' 78 | - 's3:GetObjectVersion' 79 | - 's3:GetBucketVersioning' 80 | Resource: 81 | - !Sub 'arn:aws:s3:::${S3BucketWeb}/*' 82 | - !Sub 'arn:aws:s3:::${PipelineArtifactsBucket}/*' 83 | Effect: Allow 84 | - PolicyName: codebuild-logs 85 | PolicyDocument: 86 | Statement: 87 | - Action: 88 | - 'logs:CreateLogStream' 89 | - 'logs:PutLogEvents' 90 | - 'logs:CreateLogGroup' 91 | - 'cloudfront:CreateInvalidation' 92 | Resource: '*' 93 | Effect: Allow 94 | Path: / 95 | CodePipelineRole: 96 | Description: Creating service role in IAM for AWS CodePipeline 97 | Type: 'AWS::IAM::Role' 98 | Properties: 99 | AssumeRolePolicyDocument: 100 | Statement: 101 | - Effect: Allow 102 | Principal: 103 | Service: 104 | - codepipeline.amazonaws.com 105 | Action: 'sts:AssumeRole' 106 | Policies: 107 | - PolicyName: codecommit-for-codepipeline 108 | PolicyDocument: 109 | Statement: 110 | - Action: 111 | - 'codecommit:GetBranch' 112 | - 'codecommit:GetCommit' 113 | - 'codecommit:UploadArchive' 114 | - 'codecommit:GetUploadArchiveStatus' 115 | - 'codecommit:CancelUploadArchive' 116 | Resource: !GetAtt AssetsCodeRepository.Arn 117 | Effect: Allow 118 | - PolicyName: artifacts-for-pipeline 119 | PolicyDocument: 120 | Statement: 121 | - Action: 122 | - 's3:PutObject' 123 | - 's3:GetObject' 124 | Resource: 125 | 'Fn::Join': 126 | - '' 127 | - - 'Fn::GetAtt': 128 | - PipelineArtifactsBucket 129 | - Arn 130 | - /* 131 | Effect: Allow 132 | - PolicyName: codebuild-for-pipeline 133 | PolicyDocument: 134 | Statement: 135 | - Action: 136 | - 'codebuild:BatchGetBuilds' 137 | - 'codebuild:StartBuild' 138 | Resource: !GetAtt 139 | - CodeBuildProject 140 | - Arn 141 | Effect: Allow 142 | Path: / 143 | PipelineArtifactsBucket: 144 | Type: 'AWS::S3::Bucket' 145 | Properties: 146 | AccessControl: Private 147 | Metadata: 148 | 'AWS::CloudFormation::Designer': 149 | id: bbbc886f-d307-45dc-a6e3-63353f40a4f4 150 | CodeBuildProject: 151 | DependsOn: 152 | - PipelineArtifactsBucket 153 | Description: Creating AWS CodeBuild project 154 | Type: 'AWS::CodeBuild::Project' 155 | Properties: 156 | Artifacts: 157 | Type: CODEPIPELINE 158 | Description: Building stage for Bikenow. 159 | Environment: 160 | ComputeType: BUILD_GENERAL1_SMALL 161 | EnvironmentVariables: 162 | - Name: S3_BUCKET 163 | Value: !Ref PipelineArtifactsBucket 164 | Image: 'aws/codebuild/standard:2.0' 165 | Type: LINUX_CONTAINER 166 | Name: Bikenow-build 167 | ServiceRole: !Ref CodeBuildRole 168 | Source: 169 | Type: CODEPIPELINE 170 | BuildSpec: !Sub | 171 | version: 0.2 172 | phases: 173 | install: 174 | runtime-versions: 175 | nodejs: 10 176 | pre_build: 177 | commands: 178 | - echo Installing NPM dependencies... 179 | - npm install 180 | build: 181 | commands: 182 | - npm run build 183 | post_build: 184 | commands: 185 | - echo Uploading to S3BucketWeb 186 | - aws s3 cp --recursive ./build s3://${S3BucketWeb}/ 187 | - aws s3 cp --cache-control="max-age=0, no-cache, no-store, must-revalidate" ./build/service-worker.js s3://${S3BucketWeb}/ 188 | - aws s3 cp --cache-control="max-age=0, no-cache, no-store, must-revalidate" ./build/index.html s3://${S3BucketWeb}/ 189 | - aws cloudfront create-invalidation --distribution-id ${WebsiteCDN} --paths /index.html 190 | - aws cloudfront create-invalidation --distribution-id ${WebsiteCDN} --paths /index.html /service-worker.js 191 | artifacts: 192 | files: 193 | - '**/*' 194 | base-directory: build 195 | TimeoutInMinutes: 15 196 | AssetsCodePipeline: 197 | Type: 'AWS::CodePipeline::Pipeline' 198 | Properties: 199 | RoleArn: !GetAtt 200 | - CodePipelineRole 201 | - Arn 202 | ArtifactStore: 203 | Location: !Ref PipelineArtifactsBucket 204 | Type: S3 205 | Stages: 206 | - Name: Source 207 | Actions: 208 | - Name: Source 209 | InputArtifacts: [] 210 | ActionTypeId: 211 | Version: '1' 212 | Category: Source 213 | Owner: AWS 214 | Provider: CodeCommit 215 | Configuration: 216 | BranchName: master 217 | RepositoryName: Bikenow-WebAssets 218 | OutputArtifacts: 219 | - Name: Bikenow-SourceArtifact 220 | - Name: Build 221 | Actions: 222 | - Name: build-and-deploy 223 | InputArtifacts: 224 | - Name: Bikenow-SourceArtifact 225 | ActionTypeId: 226 | Category: Build 227 | Owner: AWS 228 | Version: '1' 229 | Provider: CodeBuild 230 | OutputArtifacts: 231 | - Name: Bikenow-BuildArtifact 232 | Configuration: 233 | ProjectName: Bikenow-build 234 | RunOrder: 1 235 | DependsOn: 236 | - PipelineArtifactsBucket 237 | SeederFunction: 238 | Properties: 239 | Code: 240 | S3Bucket: !FindInMap 241 | - S3Buckets 242 | - !Ref 'AWS::Region' 243 | - SeederFunctionBucket 244 | S3Key: !FindInMap 245 | - Constants 246 | - S3Keys 247 | - SeederFunctionCode 248 | Description: CodeCommit repository seeder 249 | Handler: seeder.SeedRepositoryHandler 250 | MemorySize: 3008 251 | Role: 252 | 'Fn::GetAtt': 253 | - SeederRole 254 | - Arn 255 | Runtime: java8 256 | Timeout: 900 257 | Type: 'AWS::Lambda::Function' 258 | DependsOn: 259 | - AssetsCodeRepository 260 | UpdateConfigFunction: 261 | Properties: 262 | CodeUri: ../lambdas/setup_update_config 263 | Description: Update config for CodeCommit repository 264 | Handler: index.handler 265 | Role: 266 | 'Fn::GetAtt': 267 | - SeederRole 268 | - Arn 269 | Runtime: nodejs12.x 270 | Timeout: 300 271 | Environment: 272 | Variables: 273 | DB_API_URL: !Sub 'https://${ApiDatabase}.execute-api.${AWS::Region}.amazonaws.com/${EnvironmentName}' 274 | ML_API_URL: !Sub 'https://${ApiAiml}.execute-api.${AWS::Region}.amazonaws.com/${EnvironmentName}' 275 | BRANCH_NAME: master 276 | REGION: !Ref 'AWS::Region' 277 | REPOSITORY_NAME: Bikenow-WebAssets 278 | USER_POOL_ID: !Ref CognitoUserPool 279 | APP_CLIENT_ID: !Ref CognitoUserPoolClient 280 | IDENTITY_POOL_ID: !Ref CognityIdentityPool 281 | Type: AWS::Serverless::Function 282 | DependsOn: 283 | - AssetsCodeRepository 284 | - SeederFunction 285 | - RepositorySeeder 286 | SeederRole: 287 | Properties: 288 | AssumeRolePolicyDocument: 289 | Statement: 290 | - Action: 291 | - 'sts:AssumeRole' 292 | Effect: Allow 293 | Principal: 294 | Service: 295 | - lambda.amazonaws.com 296 | Version: 2012-10-17 297 | ManagedPolicyArns: 298 | - 'arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole' 299 | Policies: 300 | - PolicyDocument: 301 | Statement: 302 | - Action: 303 | - 'codecommit:GetRepository' 304 | - 'codecommit:GitPush' 305 | - 'codecommit:GetBranch' 306 | - 'codecommit:PutFile' 307 | Effect: Allow 308 | Resource: !GetAtt AssetsCodeRepository.Arn 309 | Version: 2012-10-17 310 | PolicyName: SeederRolePolicy 311 | - PolicyDocument: 312 | Statement: 313 | - Action: 314 | - 'logs:*' 315 | Effect: Allow 316 | Resource: 'arn:aws:logs:*:*:*' 317 | Version: 2012-10-17 318 | PolicyName: LogsPolicy 319 | Type: 'AWS::IAM::Role' 320 | RepositorySeeder: 321 | Properties: 322 | ServiceToken: 323 | 'Fn::GetAtt': 324 | - SeederFunction 325 | - Arn 326 | sourceUrl: !FindInMap [Constants, AppKeys, SeedRepository] 327 | targetRepositoryName: Bikenow-WebAssets 328 | targetRepositoryRegion: '${AWS::Region}' 329 | Type: 'Custom::RepositorySeeder' 330 | RepositoryUpdater: 331 | Type: 'Custom::CustomResource' 332 | Properties: 333 | ServiceToken: !GetAtt UpdateConfigFunction.Arn 334 | ParameterOne: Parameter to pass into Custom Lambda Function 335 | DependsOn: UpdateConfigFunction 336 | 337 | # -------------------------------- CLEANUP STEPS 338 | RoleSetupEmptyBucket: 339 | Type: AWS::IAM::Role 340 | Properties: 341 | Path: / 342 | AssumeRolePolicyDocument: 343 | Version: '2012-10-17' 344 | Statement: 345 | - Effect: Allow 346 | Principal: 347 | Service: 348 | - lambda.amazonaws.com 349 | Action: sts:AssumeRole 350 | Policies: 351 | - PolicyName: RoleSetupRdsDdlLambdaPolicy 352 | PolicyDocument: 353 | Version: '2012-10-17' 354 | Statement: 355 | - Effect: Allow 356 | Action: 357 | - logs:CreateLogStream 358 | - logs:CreateLogGroup 359 | - logs:PutLogEvents 360 | Resource: 361 | - arn:aws:logs:*:*:* 362 | - Effect: Allow 363 | Action: 364 | - s3:* 365 | Resource: 366 | - !GetAtt PipelineArtifactsBucket.Arn 367 | - Fn::Join: 368 | - "/" 369 | - 370 | - !GetAtt PipelineArtifactsBucket.Arn 371 | - "*" 372 | LambdaSetupEmptyArtifactsBucket: 373 | Type: AWS::Serverless::Function 374 | Properties: 375 | Handler: index.lambda_handler 376 | Runtime: python3.8 377 | CodeUri: ../lambdas/setup_empty_bucket 378 | Role: !GetAtt RoleSetupEmptyBucket.Arn 379 | Description: Empty artifacts bucket upon deletion 380 | MemorySize: 128 381 | Timeout: 300 382 | Environment: 383 | Variables: 384 | SCRIPT_BUCKET: !Ref PipelineArtifactsBucket 385 | SetupEmptyArtifactsBucket: 386 | Type: Custom::SetupFunction 387 | DependsOn: 388 | - PipelineArtifactsBucket 389 | - RoleSetupEmptyBucket 390 | - LambdaSetupEmptyArtifactsBucket 391 | Properties: 392 | ServiceToken: !GetAtt LambdaSetupEmptyArtifactsBucket.Arn -------------------------------------------------------------------------------- /templates/aiml-template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: 'AWS::Serverless-2016-10-31' 3 | Description: Template to create BikeNow AI/ML demo 4 | 5 | Parameters: 6 | ArtifactsBucket: 7 | Description: Name of S3 bucket containing artifacts 8 | Type: String 9 | DataLakeS3Bucket: 10 | Description: Name of Data Lake S3 bucket 11 | Type: String 12 | GlueEndpointName: 13 | Description: Name of Glue development endpoint 14 | Type: String 15 | EnvironmentName: 16 | Description: Environment stage name 17 | Type: String 18 | 19 | Mappings: 20 | XGBoostMap: 21 | us-east-1: 22 | Image: 683313688378.dkr.ecr.us-east-1.amazonaws.com/sagemaker-xgboost:0.90-1-cpu-py3 23 | us-east-2: 24 | Image: 257758044811.dkr.ecr.us-east-2.amazonaws.com/sagemaker-xgboost:0.90-1-cpu-py3 25 | us-west-2: 26 | Image: 246618743249.dkr.ecr.us-west-2.amazonaws.com/sagemaker-xgboost:0.90-1-cpu-py3 27 | eu-central-1: 28 | Image: 492215442770.dkr.ecr.eu-central-1.amazonaws.com/sagemaker-xgboost:0.90-1-cpu-py3 29 | eu-west-1: 30 | Image: 141502667606.dkr.ecr.eu-west-1.amazonaws.com/sagemaker-xgboost:0.90-1-cpu-py3 31 | eu-west-2: 32 | Image: 764974769150.dkr.ecr.eu-west-2.amazonaws.com/sagemaker-xgboost:0.90-1-cpu-py3 33 | 34 | Resources: 35 | # -------------------------------- SAGEMAKER ROLE 36 | RoleSagemakerNotebook: 37 | Type: AWS::IAM::Role 38 | Properties: 39 | Path: / 40 | AssumeRolePolicyDocument: 41 | Version: '2012-10-17' 42 | Statement: 43 | - Effect: Allow 44 | Principal: 45 | Service: 46 | - sagemaker.amazonaws.com 47 | Action: sts:AssumeRole 48 | ManagedPolicyArns: 49 | - arn:aws:iam::aws:policy/AWSGlueConsoleFullAccess 50 | - arn:aws:iam::aws:policy/AmazonSageMakerFullAccess 51 | Policies: 52 | - PolicyName: SagemakerNotebookPolicy 53 | PolicyDocument: 54 | Version: '2012-10-17' 55 | Statement: 56 | - Effect: Allow 57 | Action: 58 | - sagemaker:ListTags 59 | Resource: 60 | - !Sub 'arn:aws:sagemaker:${AWS::Region}:${AWS::AccountId}:*' 61 | - Effect: Allow 62 | Action: 63 | - logs:CreateLogStream 64 | - logs:CreateLogGroup 65 | - logs:PutLogEvents 66 | Resource: 67 | - arn:aws:logs:*:*:* 68 | - Effect: Allow 69 | Action: 70 | - s3:GetAccountPublicAccessBlock 71 | - s3:ListAllMyBuckets 72 | - s3:HeadBucket 73 | Resource: 74 | - '*' 75 | - Effect: Allow 76 | Action: 77 | - glue:GetDevEndpoints 78 | - glue:UpdateDevEndpoint 79 | - glue:GetDevEndpoint 80 | Resource: 81 | - !Sub 'arn:aws:glue:${AWS::Region}:${AWS::AccountId}:devEndpoint/${GlueEndpointName}*' 82 | - Effect: Allow 83 | Action: 84 | - s3:* 85 | Resource: 86 | - !Sub 'arn:aws:s3:::${DataLakeS3Bucket}' 87 | - !Sub 'arn:aws:s3:::${DataLakeS3Bucket}/*' 88 | - !Sub 'arn:aws:s3:::${ArtifactsBucket}' 89 | - !Sub 'arn:aws:s3:::${ArtifactsBucket}/*' 90 | - Effect: Allow 91 | Action: 92 | - s3:GetObject 93 | Resource: 94 | - !Sub 'arn:aws:s3:::aws-glue-jes-prod-${AWS::Region}-assets*' 95 | - Effect: Allow 96 | Action: 97 | - iam:PassRole 98 | Resource: 99 | - arn:aws:iam::*:role/AWSGlueServiceRole 100 | Condition: 101 | StringLike: 102 | iam:PassedToService: 103 | - glue.amazonaws.com 104 | # -------------------------------- SAGEMAKER NOTEBOOK 105 | SagemakerNotebookLifecycleConfig: 106 | Type: AWS::SageMaker::NotebookInstanceLifecycleConfig 107 | Properties: 108 | OnCreate: 109 | - Content: 110 | Fn::Base64: !Sub | 111 | #!/bin/bash 112 | set -ex 113 | [ -e /home/ec2-user/glue_ready ] && exit 0 114 | 115 | mkdir -p /home/ec2-user/glue 116 | cd /home/ec2-user/glue 117 | 118 | #GLUE_ENDPOINT and ASSETS must be set by the consumer of this script 119 | REGION=$(aws configure get region) 120 | 121 | # Write dev endpoint in a file which will be used by daemon scripts 122 | glue_endpoint_file="/home/ec2-user/glue/glue_endpoint.txt" 123 | 124 | if [ -f $glue_endpoint_file ] ; then 125 | rm $glue_endpoint_file 126 | fi 127 | echo "https://glue.$REGION.amazonaws.com" >> $glue_endpoint_file 128 | 129 | ASSETS=s3://aws-glue-jes-prod-$REGION-assets/sagemaker/assets/ 130 | 131 | aws s3 cp $ASSETS . --recursive 132 | 133 | bash "/home/ec2-user/glue/Miniconda2-4.5.12-Linux-x86_64.sh" -b -u -p "/home/ec2-user/glue/miniconda" 134 | 135 | source "/home/ec2-user/glue/miniconda/bin/activate" 136 | 137 | tar -xf autossh-1.4e.tgz 138 | cd autossh-1.4e 139 | ./configure 140 | make 141 | sudo make install 142 | sudo cp /home/ec2-user/glue/autossh.conf /etc/init/ 143 | 144 | mkdir -p /home/ec2-user/.sparkmagic 145 | cp /home/ec2-user/glue/config.json /home/ec2-user/.sparkmagic/config.json 146 | 147 | mkdir -p /home/ec2-user/SageMaker/Glue\ Examples 148 | mv /home/ec2-user/glue/notebook-samples/* /home/ec2-user/SageMaker/Glue\ Examples/ 149 | 150 | # Copy BikeNow demo notebooks 151 | ARTIFACTS=s3://${ArtifactsBucket}/artifacts/ 152 | aws s3 cp $ARTIFACTS . --recursive --exclude "*" --include "*.ipynb" 153 | sudo chmod 666 *.ipynb 154 | mv *.ipynb /home/ec2-user/SageMaker/ 155 | 156 | # Run daemons as cron jobs and use flock make sure that daemons are started only iff stopped 157 | (crontab -l; echo "* * * * * /usr/bin/flock -n /tmp/lifecycle-config-v2-dev-endpoint-daemon.lock /usr/bin/sudo /bin/sh /home/ec2-user/glue/lifecycle-config-v2-dev-endpoint-daemon.sh") | crontab - 158 | 159 | (crontab -l; echo "* * * * * /usr/bin/flock -n /tmp/lifecycle-config-reconnect-dev-endpoint-daemon.lock /usr/bin/sudo /bin/sh /home/ec2-user/glue/lifecycle-config-reconnect-dev-endpoint-daemon.sh") | crontab - 160 | 161 | source "/home/ec2-user/glue/miniconda/bin/deactivate" 162 | 163 | rm -rf "/home/ec2-user/glue/Miniconda2-4.5.12-Linux-x86_64.sh" 164 | 165 | sudo touch /home/ec2-user/glue_ready 166 | OnStart: 167 | - Content: 168 | Fn::Base64: !Sub | 169 | #!/bin/bash 170 | set -ex 171 | [ -e /home/ec2-user/glue_ready ] && exit 0 172 | 173 | mkdir -p /home/ec2-user/glue 174 | cd /home/ec2-user/glue 175 | 176 | #GLUE_ENDPOINT and ASSETS must be set by the consumer of this script 177 | REGION=$(aws configure get region) 178 | 179 | # Write dev endpoint in a file which will be used by daemon scripts 180 | glue_endpoint_file="/home/ec2-user/glue/glue_endpoint.txt" 181 | 182 | if [ -f $glue_endpoint_file ] ; then 183 | rm $glue_endpoint_file 184 | fi 185 | echo "https://glue.$REGION.amazonaws.com" >> $glue_endpoint_file 186 | 187 | ASSETS=s3://aws-glue-jes-prod-$REGION-assets/sagemaker/assets/ 188 | 189 | aws s3 cp $ASSETS . --recursive 190 | 191 | bash "/home/ec2-user/glue/Miniconda2-4.5.12-Linux-x86_64.sh" -b -u -p "/home/ec2-user/glue/miniconda" 192 | 193 | source "/home/ec2-user/glue/miniconda/bin/activate" 194 | 195 | tar -xf autossh-1.4e.tgz 196 | cd autossh-1.4e 197 | ./configure 198 | make 199 | sudo make install 200 | sudo cp /home/ec2-user/glue/autossh.conf /etc/init/ 201 | 202 | mkdir -p /home/ec2-user/.sparkmagic 203 | cp /home/ec2-user/glue/config.json /home/ec2-user/.sparkmagic/config.json 204 | 205 | mkdir -p /home/ec2-user/SageMaker/Glue\ Examples 206 | mv /home/ec2-user/glue/notebook-samples/* /home/ec2-user/SageMaker/Glue\ Examples/ 207 | 208 | # Copy BikeNow demo notebooks 209 | ARTIFACTS=s3://${ArtifactsBucket}/artifacts/ 210 | aws s3 cp $ARTIFACTS . --recursive --exclude "*" --include "*.ipynb" 211 | sudo chmod 666 *.ipynb 212 | mv *.ipynb /home/ec2-user/SageMaker/ 213 | 214 | # Run daemons as cron jobs and use flock make sure that daemons are started only iff stopped 215 | (crontab -l; echo "* * * * * /usr/bin/flock -n /tmp/lifecycle-config-v2-dev-endpoint-daemon.lock /usr/bin/sudo /bin/sh /home/ec2-user/glue/lifecycle-config-v2-dev-endpoint-daemon.sh") | crontab - 216 | 217 | (crontab -l; echo "* * * * * /usr/bin/flock -n /tmp/lifecycle-config-reconnect-dev-endpoint-daemon.lock /usr/bin/sudo /bin/sh /home/ec2-user/glue/lifecycle-config-reconnect-dev-endpoint-daemon.sh") | crontab - 218 | 219 | source "/home/ec2-user/glue/miniconda/bin/deactivate" 220 | 221 | rm -rf "/home/ec2-user/glue/Miniconda2-4.5.12-Linux-x86_64.sh" 222 | 223 | sudo touch /home/ec2-user/glue_ready 224 | SagemakerNotebook: 225 | Type: AWS::SageMaker::NotebookInstance 226 | Properties: 227 | InstanceType: ml.m5.2xlarge 228 | VolumeSizeInGB: 20 229 | RoleArn: !GetAtt RoleSagemakerNotebook.Arn 230 | LifecycleConfigName: !GetAtt SagemakerNotebookLifecycleConfig.NotebookInstanceLifecycleConfigName 231 | Tags: 232 | - Key: aws-glue-dev-endpoint 233 | Value: !Ref GlueEndpointName 234 | 235 | # -------------------------------- SAGEMAKER ENDPOINT 236 | BikenowXgboostModel: 237 | Type: AWS::SageMaker::Model 238 | Properties: 239 | ExecutionRoleArn: !GetAtt RoleSagemakerNotebook.Arn 240 | Containers: 241 | - ContainerHostname: XGBoostContainer 242 | Image: !FindInMap [XGBoostMap, !Ref AWS::Region, Image] 243 | ModelDataUrl: !Sub 's3://${ArtifactsBucket}/artifacts/bikenow-xgboost-regression-model.tar.gz' 244 | BikenowXgboostEndpointConfig: 245 | Type: AWS::SageMaker::EndpointConfig 246 | Properties: 247 | ProductionVariants: 248 | - VariantName: BikenowXgboostVariant 249 | InitialInstanceCount: 1 250 | InitialVariantWeight: 1.0 251 | InstanceType: ml.m5.xlarge 252 | ModelName: !GetAtt BikenowXgboostModel.ModelName 253 | BikenowXgboostEndpoint: 254 | Type: AWS::SageMaker::Endpoint 255 | Properties: 256 | EndpointConfigName: !GetAtt BikenowXgboostEndpointConfig.EndpointConfigName 257 | 258 | # -------------------------------- LAMBDA ROLES 259 | RoleInvokeModel: 260 | Type: AWS::IAM::Role 261 | Properties: 262 | Path: / 263 | AssumeRolePolicyDocument: 264 | Version: '2012-10-17' 265 | Statement: 266 | - Effect: Allow 267 | Principal: 268 | Service: 269 | - lambda.amazonaws.com 270 | Action: sts:AssumeRole 271 | Policies: 272 | - PolicyName: InvokeModelLambdaPolicy 273 | PolicyDocument: 274 | Version: '2012-10-17' 275 | Statement: 276 | - Effect: Allow 277 | Action: 278 | - logs:CreateLogStream 279 | - logs:CreateLogGroup 280 | - logs:PutLogEvents 281 | Resource: 282 | - arn:aws:logs:*:*:* 283 | - Effect: Allow 284 | Action: 285 | - sagemaker:DescribeEndpoint* 286 | - sagemaker:InvokeEndpoint 287 | Resource: 288 | - !Ref BikenowXgboostEndpoint 289 | 290 | # -------------------------------- LAMBDA FUNCTIONS 291 | LambdaInvokeModelApi: 292 | Type: AWS::Serverless::Function 293 | Properties: 294 | Handler: index.lambda_handler 295 | Runtime: python3.8 296 | CodeUri: ../lambdas/api_predict_station_status 297 | Role: !GetAtt RoleInvokeModel.Arn 298 | Description: Invoke Sagemaker model to predict bike availability 299 | MemorySize: 256 300 | Timeout: 60 301 | Environment: 302 | Variables: 303 | MODEL_ENDPOINT_NAME: !GetAtt BikenowXgboostEndpoint.EndpointName 304 | LambdaInvokeModelApiPermission: 305 | Type: AWS::Lambda::Permission 306 | Properties: 307 | Action: lambda:InvokeFunction 308 | FunctionName: !Ref LambdaInvokeModelApi 309 | Principal: apigateway.amazonaws.com 310 | SourceArn: !Sub 'arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${ApiAiml}/*' 311 | 312 | # -------------------------------- API GATEWAY 313 | ApiAiml: 314 | Type: AWS::ApiGateway::RestApi 315 | Properties: 316 | Name: !Sub 'BikeNow-Aiml-${EnvironmentName}' 317 | Description: API Gateway for BikeNow AI/ML demo 318 | FailOnWarnings: true 319 | StationsApiResource: 320 | Type: AWS::ApiGateway::Resource 321 | Properties: 322 | RestApiId: !Ref ApiAiml 323 | ParentId: !GetAtt ApiAiml.RootResourceId 324 | PathPart: plan 325 | InvokeModelApiRequestPOST: 326 | Type: AWS::ApiGateway::Method 327 | DependsOn: 328 | - LambdaInvokeModelApi 329 | Properties: 330 | AuthorizationType: AWS_IAM 331 | HttpMethod: POST 332 | Integration: 333 | Type: AWS_PROXY 334 | IntegrationHttpMethod: POST 335 | Uri: !Sub 'arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${LambdaInvokeModelApi.Arn}/invocations' 336 | IntegrationResponses: 337 | - StatusCode: 200 338 | ResourceId: !Ref StationsApiResource 339 | RestApiId: !Ref ApiAiml 340 | MethodResponses: 341 | - StatusCode: 200 342 | ResponseModels: 343 | application/json: Empty 344 | InvokeModelApiRequestOPTIONS: 345 | Type: AWS::ApiGateway::Method 346 | Properties: 347 | ResourceId: !Ref StationsApiResource 348 | RestApiId: !Ref ApiAiml 349 | AuthorizationType: None 350 | HttpMethod: OPTIONS 351 | Integration: 352 | Type: MOCK 353 | IntegrationResponses: 354 | - ResponseParameters: 355 | method.response.header.Access-Control-Allow-Headers: >- 356 | 'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token' 357 | method.response.header.Access-Control-Allow-Methods: '''GET,POST,PUT,DELETE,OPTIONS,HEAD,PATCH''' 358 | method.response.header.Access-Control-Allow-Origin: '''*''' 359 | ResponseTemplates: 360 | application/json: '' 361 | StatusCode: '200' 362 | PassthroughBehavior: WHEN_NO_MATCH 363 | RequestTemplates: 364 | application/json: '{"statusCode": 200}' 365 | MethodResponses: 366 | - ResponseModels: 367 | application/json: Empty 368 | ResponseParameters: 369 | method.response.header.Access-Control-Allow-Headers: true 370 | method.response.header.Access-Control-Allow-Methods: true 371 | method.response.header.Access-Control-Allow-Origin: true 372 | StatusCode: '200' 373 | APIDeployment: 374 | DependsOn: 375 | - InvokeModelApiRequestPOST 376 | - InvokeModelApiRequestOPTIONS 377 | Type: AWS::ApiGateway::Deployment 378 | Properties: 379 | Description: !Sub 'API deployment to ${EnvironmentName}' 380 | RestApiId: !Ref ApiAiml 381 | StageName: !Ref EnvironmentName 382 | 383 | Outputs: 384 | ApiGatewayModelId: 385 | Value: !Ref ApiAiml 386 | Description: API Gateway ID for Sagemaker model endpoint -------------------------------------------------------------------------------- /lambdas/api_get_quicksight_url/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "api_get_quicksight_url", 3 | "version": "1.0.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "accepts": { 8 | "version": "1.3.7", 9 | "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz", 10 | "integrity": "sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==", 11 | "requires": { 12 | "mime-types": "~2.1.24", 13 | "negotiator": "0.6.2" 14 | } 15 | }, 16 | "amazon-cognito-identity-js": { 17 | "version": "3.0.15", 18 | "resolved": "https://registry.npmjs.org/amazon-cognito-identity-js/-/amazon-cognito-identity-js-3.0.15.tgz", 19 | "integrity": "sha512-FVVd6hO0ipEODE95i2Z/6Ue/6YV8JI3sdK8zzmC8WLvI/FihbiHiNtjXojD6b48Ng2D6MZwN4C/Hqkiw5jVeFw==", 20 | "requires": { 21 | "buffer": "4.9.1", 22 | "crypto-js": "^3.1.9-1", 23 | "js-cookie": "^2.1.4" 24 | } 25 | }, 26 | "array-flatten": { 27 | "version": "1.1.1", 28 | "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", 29 | "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" 30 | }, 31 | "aws-serverless-express": { 32 | "version": "3.3.6", 33 | "resolved": "https://registry.npmjs.org/aws-serverless-express/-/aws-serverless-express-3.3.6.tgz", 34 | "integrity": "sha512-VTn8YQpPpMAEdMeGjyaSygy7Rc0057C9MUjeZION0NBqmwTyphpu9Tc5DCHRNF4qNFQ9x1xcOte6OXKzJvvDhw==", 35 | "requires": { 36 | "binary-case": "^1.0.0", 37 | "type-is": "^1.6.16" 38 | } 39 | }, 40 | "base64-js": { 41 | "version": "1.3.1", 42 | "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz", 43 | "integrity": "sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g==" 44 | }, 45 | "binary-case": { 46 | "version": "1.1.4", 47 | "resolved": "https://registry.npmjs.org/binary-case/-/binary-case-1.1.4.tgz", 48 | "integrity": "sha512-9Kq8m6NZTAgy05Ryuh7U3Qc4/ujLQU1AZ5vMw4cr3igTdi5itZC6kCNrRr2X8NzPiDn2oUIFTfa71DKMnue/Zg==" 49 | }, 50 | "body-parser": { 51 | "version": "1.19.0", 52 | "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz", 53 | "integrity": "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==", 54 | "requires": { 55 | "bytes": "3.1.0", 56 | "content-type": "~1.0.4", 57 | "debug": "2.6.9", 58 | "depd": "~1.1.2", 59 | "http-errors": "1.7.2", 60 | "iconv-lite": "0.4.24", 61 | "on-finished": "~2.3.0", 62 | "qs": "6.7.0", 63 | "raw-body": "2.4.0", 64 | "type-is": "~1.6.17" 65 | } 66 | }, 67 | "buffer": { 68 | "version": "4.9.1", 69 | "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", 70 | "integrity": "sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg=", 71 | "requires": { 72 | "base64-js": "^1.0.2", 73 | "ieee754": "^1.1.4", 74 | "isarray": "^1.0.0" 75 | } 76 | }, 77 | "bytes": { 78 | "version": "3.1.0", 79 | "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", 80 | "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" 81 | }, 82 | "content-disposition": { 83 | "version": "0.5.3", 84 | "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz", 85 | "integrity": "sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==", 86 | "requires": { 87 | "safe-buffer": "5.1.2" 88 | } 89 | }, 90 | "content-type": { 91 | "version": "1.0.4", 92 | "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", 93 | "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" 94 | }, 95 | "cookie": { 96 | "version": "0.4.0", 97 | "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz", 98 | "integrity": "sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==" 99 | }, 100 | "cookie-signature": { 101 | "version": "1.0.6", 102 | "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", 103 | "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" 104 | }, 105 | "crypto-js": { 106 | "version": "3.1.9-1", 107 | "resolved": "https://registry.npmjs.org/crypto-js/-/crypto-js-3.1.9-1.tgz", 108 | "integrity": "sha1-/aGedh/Ad+Af+/3G6f38WeiAbNg=" 109 | }, 110 | "debug": { 111 | "version": "2.6.9", 112 | "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", 113 | "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", 114 | "requires": { 115 | "ms": "2.0.0" 116 | } 117 | }, 118 | "depd": { 119 | "version": "1.1.2", 120 | "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", 121 | "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" 122 | }, 123 | "destroy": { 124 | "version": "1.0.4", 125 | "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", 126 | "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" 127 | }, 128 | "ee-first": { 129 | "version": "1.1.1", 130 | "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", 131 | "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" 132 | }, 133 | "encodeurl": { 134 | "version": "1.0.2", 135 | "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", 136 | "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" 137 | }, 138 | "escape-html": { 139 | "version": "1.0.3", 140 | "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", 141 | "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" 142 | }, 143 | "etag": { 144 | "version": "1.8.1", 145 | "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", 146 | "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" 147 | }, 148 | "express": { 149 | "version": "4.17.1", 150 | "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz", 151 | "integrity": "sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==", 152 | "requires": { 153 | "accepts": "~1.3.7", 154 | "array-flatten": "1.1.1", 155 | "body-parser": "1.19.0", 156 | "content-disposition": "0.5.3", 157 | "content-type": "~1.0.4", 158 | "cookie": "0.4.0", 159 | "cookie-signature": "1.0.6", 160 | "debug": "2.6.9", 161 | "depd": "~1.1.2", 162 | "encodeurl": "~1.0.2", 163 | "escape-html": "~1.0.3", 164 | "etag": "~1.8.1", 165 | "finalhandler": "~1.1.2", 166 | "fresh": "0.5.2", 167 | "merge-descriptors": "1.0.1", 168 | "methods": "~1.1.2", 169 | "on-finished": "~2.3.0", 170 | "parseurl": "~1.3.3", 171 | "path-to-regexp": "0.1.7", 172 | "proxy-addr": "~2.0.5", 173 | "qs": "6.7.0", 174 | "range-parser": "~1.2.1", 175 | "safe-buffer": "5.1.2", 176 | "send": "0.17.1", 177 | "serve-static": "1.14.1", 178 | "setprototypeof": "1.1.1", 179 | "statuses": "~1.5.0", 180 | "type-is": "~1.6.18", 181 | "utils-merge": "1.0.1", 182 | "vary": "~1.1.2" 183 | } 184 | }, 185 | "finalhandler": { 186 | "version": "1.1.2", 187 | "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", 188 | "integrity": "sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==", 189 | "requires": { 190 | "debug": "2.6.9", 191 | "encodeurl": "~1.0.2", 192 | "escape-html": "~1.0.3", 193 | "on-finished": "~2.3.0", 194 | "parseurl": "~1.3.3", 195 | "statuses": "~1.5.0", 196 | "unpipe": "~1.0.0" 197 | } 198 | }, 199 | "forwarded": { 200 | "version": "0.1.2", 201 | "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", 202 | "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=" 203 | }, 204 | "fresh": { 205 | "version": "0.5.2", 206 | "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", 207 | "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" 208 | }, 209 | "http-errors": { 210 | "version": "1.7.2", 211 | "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz", 212 | "integrity": "sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==", 213 | "requires": { 214 | "depd": "~1.1.2", 215 | "inherits": "2.0.3", 216 | "setprototypeof": "1.1.1", 217 | "statuses": ">= 1.5.0 < 2", 218 | "toidentifier": "1.0.0" 219 | } 220 | }, 221 | "iconv-lite": { 222 | "version": "0.4.24", 223 | "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", 224 | "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", 225 | "requires": { 226 | "safer-buffer": ">= 2.1.2 < 3" 227 | } 228 | }, 229 | "ieee754": { 230 | "version": "1.1.13", 231 | "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", 232 | "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==" 233 | }, 234 | "inherits": { 235 | "version": "2.0.3", 236 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", 237 | "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" 238 | }, 239 | "ipaddr.js": { 240 | "version": "1.9.0", 241 | "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.0.tgz", 242 | "integrity": "sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA==" 243 | }, 244 | "isarray": { 245 | "version": "1.0.0", 246 | "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", 247 | "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" 248 | }, 249 | "js-cookie": { 250 | "version": "2.2.1", 251 | "resolved": "https://registry.npmjs.org/js-cookie/-/js-cookie-2.2.1.tgz", 252 | "integrity": "sha512-HvdH2LzI/EAZcUwA8+0nKNtWHqS+ZmijLA30RwZA0bo7ToCckjK5MkGhjED9KoRcXO6BaGI3I9UIzSA1FKFPOQ==" 253 | }, 254 | "media-typer": { 255 | "version": "0.3.0", 256 | "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", 257 | "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" 258 | }, 259 | "merge-descriptors": { 260 | "version": "1.0.1", 261 | "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", 262 | "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" 263 | }, 264 | "methods": { 265 | "version": "1.1.2", 266 | "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", 267 | "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" 268 | }, 269 | "mime": { 270 | "version": "1.6.0", 271 | "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", 272 | "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" 273 | }, 274 | "mime-db": { 275 | "version": "1.40.0", 276 | "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.40.0.tgz", 277 | "integrity": "sha512-jYdeOMPy9vnxEqFRRo6ZvTZ8d9oPb+k18PKoYNYUe2stVEBPPwsln/qWzdbmaIvnhZ9v2P+CuecK+fpUfsV2mA==" 278 | }, 279 | "mime-types": { 280 | "version": "2.1.24", 281 | "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.24.tgz", 282 | "integrity": "sha512-WaFHS3MCl5fapm3oLxU4eYDw77IQM2ACcxQ9RIxfaC3ooc6PFuBMGZZsYpvoXS5D5QTWPieo1jjLdAm3TBP3cQ==", 283 | "requires": { 284 | "mime-db": "1.40.0" 285 | } 286 | }, 287 | "ms": { 288 | "version": "2.0.0", 289 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", 290 | "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" 291 | }, 292 | "negotiator": { 293 | "version": "0.6.2", 294 | "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", 295 | "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==" 296 | }, 297 | "on-finished": { 298 | "version": "2.3.0", 299 | "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", 300 | "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", 301 | "requires": { 302 | "ee-first": "1.1.1" 303 | } 304 | }, 305 | "parseurl": { 306 | "version": "1.3.3", 307 | "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", 308 | "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" 309 | }, 310 | "path-to-regexp": { 311 | "version": "0.1.7", 312 | "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", 313 | "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" 314 | }, 315 | "proxy-addr": { 316 | "version": "2.0.5", 317 | "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.5.tgz", 318 | "integrity": "sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ==", 319 | "requires": { 320 | "forwarded": "~0.1.2", 321 | "ipaddr.js": "1.9.0" 322 | } 323 | }, 324 | "qs": { 325 | "version": "6.7.0", 326 | "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz", 327 | "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==" 328 | }, 329 | "range-parser": { 330 | "version": "1.2.1", 331 | "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", 332 | "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" 333 | }, 334 | "raw-body": { 335 | "version": "2.4.0", 336 | "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz", 337 | "integrity": "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==", 338 | "requires": { 339 | "bytes": "3.1.0", 340 | "http-errors": "1.7.2", 341 | "iconv-lite": "0.4.24", 342 | "unpipe": "1.0.0" 343 | } 344 | }, 345 | "safe-buffer": { 346 | "version": "5.1.2", 347 | "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", 348 | "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" 349 | }, 350 | "safer-buffer": { 351 | "version": "2.1.2", 352 | "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", 353 | "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" 354 | }, 355 | "send": { 356 | "version": "0.17.1", 357 | "resolved": "https://registry.npmjs.org/send/-/send-0.17.1.tgz", 358 | "integrity": "sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==", 359 | "requires": { 360 | "debug": "2.6.9", 361 | "depd": "~1.1.2", 362 | "destroy": "~1.0.4", 363 | "encodeurl": "~1.0.2", 364 | "escape-html": "~1.0.3", 365 | "etag": "~1.8.1", 366 | "fresh": "0.5.2", 367 | "http-errors": "~1.7.2", 368 | "mime": "1.6.0", 369 | "ms": "2.1.1", 370 | "on-finished": "~2.3.0", 371 | "range-parser": "~1.2.1", 372 | "statuses": "~1.5.0" 373 | }, 374 | "dependencies": { 375 | "ms": { 376 | "version": "2.1.1", 377 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", 378 | "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==" 379 | } 380 | } 381 | }, 382 | "serve-static": { 383 | "version": "1.14.1", 384 | "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.14.1.tgz", 385 | "integrity": "sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==", 386 | "requires": { 387 | "encodeurl": "~1.0.2", 388 | "escape-html": "~1.0.3", 389 | "parseurl": "~1.3.3", 390 | "send": "0.17.1" 391 | } 392 | }, 393 | "setprototypeof": { 394 | "version": "1.1.1", 395 | "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", 396 | "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" 397 | }, 398 | "statuses": { 399 | "version": "1.5.0", 400 | "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", 401 | "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" 402 | }, 403 | "toidentifier": { 404 | "version": "1.0.0", 405 | "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", 406 | "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" 407 | }, 408 | "type-is": { 409 | "version": "1.6.18", 410 | "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", 411 | "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", 412 | "requires": { 413 | "media-typer": "0.3.0", 414 | "mime-types": "~2.1.24" 415 | } 416 | }, 417 | "unpipe": { 418 | "version": "1.0.0", 419 | "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", 420 | "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" 421 | }, 422 | "utils-merge": { 423 | "version": "1.0.1", 424 | "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", 425 | "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" 426 | }, 427 | "vary": { 428 | "version": "1.1.2", 429 | "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", 430 | "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" 431 | } 432 | } 433 | } 434 | --------------------------------------------------------------------------------