├── .husky ├── .gitignore └── pre-commit ├── .env.develop ├── resolvers ├── queries │ ├── response.vtl │ └── query.req.vtl └── mutations │ ├── deleteList.req.vtl │ ├── deleteNote.req.vtl │ ├── deleteUser.req.vtl │ ├── updateList.req.vtl │ ├── updateNote.req.vtl │ ├── updateUser.req.vtl │ ├── createList.req.vtl │ ├── createNote.req.vtl │ ├── createUser.req.vtl │ └── response.vtl ├── .eslintignore ├── utils ├── constants.js ├── dbUtils.js └── index.js ├── resources ├── config │ ├── elastic-ip.yml │ ├── internet-gateway.yml │ ├── nat-gateway.yml │ ├── secrets.yml │ ├── security-groups.yml │ ├── vpc.yml │ ├── db-cluster.yml │ ├── route-private.yml │ ├── route-public.yml │ ├── subnet.yml │ └── roles.yml ├── lambdas │ ├── datasources.yml │ └── functions.yml ├── rds │ └── datasources.yml └── mapping-templates │ ├── queries.yml │ └── mutations.yml ├── .prettierrc ├── .env ├── migrations ├── resources │ ├── v1 │ │ ├── 01_updated_at_trigger.sql │ │ └── 02_create_users.sql │ ├── v2 │ │ └── 03_create_lists.sql │ └── v3 │ │ └── 04_create_notes.sql ├── 20210628063726-create-users.js ├── 20210628063731-create-lists.js ├── 20210628063734-create-notes.js └── utils │ └── index.js ├── scripts ├── get-host.js ├── post-deployment.js └── setup-local.sh ├── babel.config.js ├── functions ├── queries │ ├── Lists │ │ ├── package.json │ │ ├── index.js │ │ └── yarn.lock │ ├── Notes │ │ ├── package.json │ │ ├── index.js │ │ └── yarn.lock │ └── Users │ │ ├── package.json │ │ ├── index.js │ │ └── yarn.lock └── database │ └── migrate │ ├── index.js │ └── package.json ├── .editorconfig ├── .github └── workflows │ ├── ci.yml │ └── cd.yml ├── .gitignore ├── models ├── users.js ├── lists.js ├── notes.js └── index.js ├── .eslintrc.js ├── webpack.config.js ├── config └── config.js ├── schema.graphql ├── serverless.yml ├── package.json ├── README.md └── postman └── collections.json /.husky/.gitignore: -------------------------------------------------------------------------------- 1 | _ 2 | -------------------------------------------------------------------------------- /.env.develop: -------------------------------------------------------------------------------- 1 | STAGE=dev 2 | -------------------------------------------------------------------------------- /resolvers/queries/response.vtl: -------------------------------------------------------------------------------- 1 | $util.toJson($ctx.result) -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . "$(dirname "$0")/_/husky.sh" 3 | yarn lint:staged 4 | 5 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | /node-modules/** 2 | /dist/** 3 | /webpack/** 4 | /shellscripts/templates/** 5 | -------------------------------------------------------------------------------- /utils/constants.js: -------------------------------------------------------------------------------- 1 | export const DEFAULT_LIMIT = 10; 2 | export const DEFAULT_OFFSET = 0; 3 | export const MAX_LIMIT = 1000000; 4 | -------------------------------------------------------------------------------- /resources/config/elastic-ip.yml: -------------------------------------------------------------------------------- 1 | Resources: 2 | ElasticIpLambda: 3 | Type: AWS::EC2::EIP 4 | Properties: 5 | Domain: vpc -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 120, 3 | "tabWidth": 2, 4 | "useTabs": false, 5 | "semi": true, 6 | "singleQuote": true, 7 | "trailingComma": "none" 8 | } 9 | -------------------------------------------------------------------------------- /.env: -------------------------------------------------------------------------------- 1 | NAME=appsyncrdstodo 2 | REGION=ap-south-1 3 | RDS_PREFIX=appsync_rds_todo 4 | RDS_USERNAME=admin 5 | DB_PORT=5432 6 | DB_PASSWORD=password 7 | DB_DIALECT=postgres 8 | ACCOUNT_ID= 9 | 10 | -------------------------------------------------------------------------------- /resolvers/mutations/deleteList.req.vtl: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2018-05-29", 3 | "statements": ["UPDATE lists set deleted_at=NOW() WHERE id=$ctx.args.id", "SELECT * FROM lists WHERE id=$ctx.args.id"] 4 | } -------------------------------------------------------------------------------- /resolvers/mutations/deleteNote.req.vtl: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2018-05-29", 3 | "statements": ["UPDATE notes set deleted_at=NOW() WHERE id=$ctx.args.id", "SELECT * FROM notes WHERE id=$ctx.args.id"] 4 | } -------------------------------------------------------------------------------- /resolvers/mutations/deleteUser.req.vtl: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2018-05-29", 3 | "statements": ["UPDATE users set deleted_at=NOW() WHERE id=$ctx.args.id", "SELECT * FROM users WHERE id=$ctx.args.id"] 4 | } -------------------------------------------------------------------------------- /migrations/resources/v1/01_updated_at_trigger.sql: -------------------------------------------------------------------------------- 1 | CREATE OR REPLACE FUNCTION trigger_set_timestamp() 2 | RETURNS TRIGGER AS $$ 3 | BEGIN 4 | NEW.updated_at = NOW(); 5 | RETURN NEW; 6 | END; 7 | $$ LANGUAGE plpgsql; -------------------------------------------------------------------------------- /scripts/get-host.js: -------------------------------------------------------------------------------- 1 | module.exports = function (serverless) { 2 | if (serverless.variables.options.offline) { 3 | return 'localhost'; 4 | } 5 | return { 'Fn::GetAtt': ['RDSCluster', 'Endpoint.Address'] }; 6 | }; 7 | -------------------------------------------------------------------------------- /resources/config/internet-gateway.yml: -------------------------------------------------------------------------------- 1 | Resources: 2 | ServerlessInternetGateway: 3 | Type: AWS::EC2::InternetGateway 4 | Properties: 5 | Tags: 6 | - Key: 'Name' 7 | Value: 'ServerlessInternetGateway' 8 | -------------------------------------------------------------------------------- /migrations/20210628063726-create-users.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | up: (queryInterface) => { 3 | const { migrate } = require('./migrateUtils'); 4 | return migrate(__filename, queryInterface); 5 | }, 6 | down: () => Promise.reject(new Error('error')) 7 | }; 8 | -------------------------------------------------------------------------------- /migrations/20210628063731-create-lists.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | up: (queryInterface) => { 3 | const { migrate } = require('./migrateUtils'); 4 | return migrate(__filename, queryInterface); 5 | }, 6 | down: () => Promise.reject(new Error('error')) 7 | }; 8 | -------------------------------------------------------------------------------- /migrations/20210628063734-create-notes.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | up: (queryInterface) => { 3 | const { migrate } = require('./migrateUtils'); 4 | return migrate(__filename, queryInterface); 5 | }, 6 | down: () => Promise.reject(new Error('error')) 7 | }; 8 | -------------------------------------------------------------------------------- /resources/config/nat-gateway.yml: -------------------------------------------------------------------------------- 1 | Resources: 2 | ServerlessNatGateway: 3 | Type: AWS::EC2::NatGateway 4 | Properties: 5 | AllocationId: 6 | Fn::GetAtt: 7 | - ElasticIpLambda 8 | - AllocationId 9 | SubnetId: 10 | Ref: ServerlessSubnetA 11 | -------------------------------------------------------------------------------- /scripts/post-deployment.js: -------------------------------------------------------------------------------- 1 | async function migrate(serverless) { 2 | return `npx sls invoke --function databaseMigrate --stage=${serverless.variables.options.stage}`; 3 | } 4 | async function postDeployment(serverless) { 5 | return await migrate(serverless); 6 | } 7 | module.exports = postDeployment; 8 | -------------------------------------------------------------------------------- /babel.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | presets: [ 3 | [ 4 | '@babel/preset-env', 5 | { 6 | useBuiltIns: 'entry', 7 | targets: { 8 | node: 'current' 9 | } 10 | } 11 | ] 12 | ], 13 | plugins: ['@babel/plugin-proposal-class-properties'] 14 | }; 15 | -------------------------------------------------------------------------------- /functions/queries/Lists/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Lists", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "pg": "^8.6.0" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /functions/queries/Notes/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Notes", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "pg": "^8.6.0" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /functions/queries/Users/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Users", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "pg": "^8.6.0" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /functions/database/migrate/index.js: -------------------------------------------------------------------------------- 1 | import 'source-map-support/register'; 2 | /** 3 | * 4 | * DatabaseMigrate 5 | * 6 | */ 7 | import shell from 'shelljs'; 8 | 9 | exports.handler = async (event, context, callback) => { 10 | console.log(JSON.stringify(event)); 11 | shell.exec(`node_modules/sequelize-cli/lib/sequelize db:migrate --config config/config.js`); 12 | }; 13 | -------------------------------------------------------------------------------- /functions/database/migrate/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "database-migrations", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "pg": "^8.6.0", 13 | "sequelize": "^6.6.4" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /resources/lambdas/datasources.yml: -------------------------------------------------------------------------------- 1 | - type: AWS_LAMBDA 2 | name: Lambda_notes 3 | description: "Get notes" 4 | config: 5 | functionName: notes 6 | - type: AWS_LAMBDA 7 | name: Lambda_lists 8 | description: "Get lists" 9 | config: 10 | functionName: lists 11 | - type: AWS_LAMBDA 12 | name: Lambda_users 13 | description: "Get users" 14 | config: 15 | functionName: users -------------------------------------------------------------------------------- /resources/lambdas/functions.yml: -------------------------------------------------------------------------------- 1 | notes: 2 | handler: functions/queries/Notes/index.handler 3 | role: LambdaServiceRole 4 | users: 5 | handler: functions/queries/Users/index.handler 6 | role: LambdaServiceRole 7 | lists: 8 | handler: functions/queries/Lists/index.handler 9 | role: LambdaServiceRole 10 | databaseMigrate: 11 | handler: functions/database/migrate/index.handler 12 | role: LambdaServiceRole -------------------------------------------------------------------------------- /resources/rds/datasources.yml: -------------------------------------------------------------------------------- 1 | - type: RELATIONAL_DATABASE 2 | name: POSTGRES_RDS 3 | description: "Database" 4 | config: 5 | dbClusterIdentifier: { Ref: RDSCluster } # The identifier for RDSCluster. Where RDSCluster is the cluster defined in Resources 6 | databaseName: appsync_rds_todo_${env:STAGE} 7 | awsSecretStoreArn: !Ref RDSInstanceSecret 8 | serviceRoleArn: { Fn::GetAtt: [AppSyncRDSServiceRole, Arn] } 9 | region: ${env:REGION} 10 | -------------------------------------------------------------------------------- /resources/mapping-templates/queries.yml: -------------------------------------------------------------------------------- 1 | - type: Query 2 | field: notes 3 | request: "queries/query.req.vtl" 4 | response: "queries/response.vtl" 5 | dataSource: Lambda_notes 6 | 7 | - type: Query 8 | field: lists 9 | request: "queries/query.req.vtl" 10 | response: "queries/response.vtl" 11 | dataSource: Lambda_lists 12 | 13 | - type: Query 14 | field: users 15 | request: "queries/query.req.vtl" 16 | response: "queries/response.vtl" 17 | dataSource: Lambda_users -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | charset = utf-8 5 | trim_trailing_whitespace = true 6 | end_of_line = lf 7 | insert_final_newline = true 8 | indent_style = space 9 | indent_size = 2 10 | 11 | 12 | [*.{js,txt,md,css,html,php,py,json,yml,sass,scss,pug}] 13 | indent_style = space 14 | indent_size = 2 15 | 16 | [*.jsx] 17 | indent_style = space 18 | indent_size = 4 19 | 20 | [*.graphql] 21 | indent_style = space 22 | indent_size = 4 23 | 24 | [*.md] 25 | trim_trailing_whitespace = false 26 | -------------------------------------------------------------------------------- /resources/config/secrets.yml: -------------------------------------------------------------------------------- 1 | Resources: 2 | RDSInstanceSecret: 3 | Type: AWS::SecretsManager::Secret 4 | Properties: 5 | Description: 'Secret for the RDS instance' 6 | SecretString: '{"username":"${env:RDS_USERNAME}_${env:STAGE}","password":"${env:DB_PASSWORD}"}' 7 | SecretRDSInstanceAttachment: 8 | Type: AWS::SecretsManager::SecretTargetAttachment 9 | Properties: 10 | SecretId: !Ref RDSInstanceSecret 11 | TargetId: !Ref RDSCluster 12 | TargetType: AWS::RDS::DBCluster -------------------------------------------------------------------------------- /migrations/resources/v1/02_create_users.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE IF NOT EXISTS users ( 2 | id SERIAL PRIMARY KEY, 3 | name TEXT NOT NULL, 4 | user_ref TEXT NOT NULL, 5 | created_at timestamp NULL DEFAULT CURRENT_TIMESTAMP, 6 | updated_at timestamp NULL DEFAULT CURRENT_TIMESTAMP, 7 | deleted_at timestamp NULL 8 | ); 9 | 10 | CREATE INDEX IF NOT EXISTS users__idx__user_ref ON users (user_ref); 11 | 12 | CREATE TRIGGER set_timestamp 13 | BEFORE UPDATE ON users 14 | FOR EACH ROW 15 | EXECUTE PROCEDURE trigger_set_timestamp(); -------------------------------------------------------------------------------- /resources/config/security-groups.yml: -------------------------------------------------------------------------------- 1 | Resources: 2 | ServerlessSecurityGroup: 3 | DependsOn: 4 | - ServerlessVPC 5 | Type: AWS::EC2::SecurityGroup 6 | Properties: 7 | GroupDescription: SecurityGroup for Serverless Functions 8 | VpcId: 9 | Ref: ServerlessVPC 10 | SecurityGroupIngress: 11 | - IpProtocol: tcp 12 | FromPort: '0' 13 | ToPort: '65535' 14 | CidrIp: '0.0.0.0/0' 15 | Tags: 16 | - Key: 'Name' 17 | Value: 'ServerlessSecurityGroup' 18 | -------------------------------------------------------------------------------- /resources/config/vpc.yml: -------------------------------------------------------------------------------- 1 | Resources: 2 | ServerlessVPC: 3 | Type: AWS::EC2::VPC 4 | Properties: 5 | CidrBlock: ${self:custom.AURORA.VPC_CIDR}.0.0.0/16 6 | EnableDnsSupport: true 7 | EnableDnsHostnames: true 8 | InstanceTenancy: default 9 | Tags: 10 | - Key: 'Name' 11 | Value: 'ServerlessVPC' 12 | ServerlessVPCGA: 13 | Type: AWS::EC2::VPCGatewayAttachment 14 | Properties: 15 | VpcId: 16 | Ref: ServerlessVPC 17 | InternetGatewayId: 18 | Ref: ServerlessInternetGateway -------------------------------------------------------------------------------- /functions/queries/Notes/index.js: -------------------------------------------------------------------------------- 1 | import 'source-map-support/register'; 2 | /** 3 | * 4 | * Notes 5 | * 6 | */ 7 | import { logHandler, success, failure } from '@utils'; 8 | import db from '@models'; 9 | import { findAll } from '@utils/dbUtils'; 10 | 11 | exports.handler = async (event, context, callback) => 12 | logHandler(event, callback, async () => { 13 | try { 14 | return success(context.done || callback, await findAll(db.notes, event)); 15 | } catch (err) { 16 | return failure(context.fail || callback, err); 17 | } 18 | }); 19 | -------------------------------------------------------------------------------- /functions/queries/Users/index.js: -------------------------------------------------------------------------------- 1 | import 'source-map-support/register'; 2 | /** 3 | * 4 | * Users 5 | * 6 | */ 7 | import { logHandler, success, failure } from '@utils'; 8 | import db from '@models'; 9 | import { findAll } from '@utils/dbUtils'; 10 | 11 | exports.handler = async (event, context, callback) => 12 | logHandler(event, callback, async () => { 13 | try { 14 | return success(context.done || callback, await findAll(db.users, event)); 15 | } catch (err) { 16 | return failure(context.fail || callback, err); 17 | } 18 | }); 19 | -------------------------------------------------------------------------------- /migrations/resources/v2/03_create_lists.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE IF NOT EXISTS lists ( 2 | id SERIAL PRIMARY KEY, 3 | name TEXT NOT NULL, 4 | user_id INT NOT NULL, 5 | created_at timestamp NULL DEFAULT CURRENT_TIMESTAMP, 6 | updated_at timestamp NULL DEFAULT CURRENT_TIMESTAMP, 7 | deleted_at timestamp NULL, 8 | CONSTRAINT lists__fk_user_id FOREIGN KEY (user_id) REFERENCES users (id) 9 | ); 10 | 11 | CREATE INDEX IF NOT EXISTS list__idx__name ON lists (name); 12 | 13 | 14 | CREATE TRIGGER set_timestamp 15 | BEFORE UPDATE ON lists 16 | FOR EACH ROW 17 | EXECUTE PROCEDURE trigger_set_timestamp(); -------------------------------------------------------------------------------- /resolvers/queries/query.req.vtl: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2017-02-28", 3 | "operation": "Invoke", 4 | "payload": { 5 | "ctx": $util.toJson($ctx), 6 | "selectionSetList": $util.toJson($context.info.selectionSetList), 7 | "selectionSetGraphQL": $util.toJson($context.info.selectionSetGraphQL), 8 | "typeName": "Query", 9 | "field": "notes", 10 | "arguments": $util.toJson($ctx.arguments), 11 | "identity": $util.toJson($ctx.identity), 12 | "source": $util.toJson($ctx.source), 13 | "request": $util.toJson($ctx.request), 14 | "prev": $util.toJson($ctx.prev) 15 | } 16 | } -------------------------------------------------------------------------------- /functions/queries/Lists/index.js: -------------------------------------------------------------------------------- 1 | import 'source-map-support/register'; 2 | /** 3 | * 4 | * Lists 5 | * 6 | */ 7 | import { logHandler, success, failure } from '@utils'; 8 | import db from '@models'; 9 | import { findAll } from '@utils/dbUtils'; 10 | 11 | exports.handler = async (event, context, callback) => 12 | logHandler(event, callback, async () => { 13 | try { 14 | const lists = await findAll(db.lists, event); 15 | console.log(JSON.stringify(lists)); 16 | return success(context.done || callback, lists); 17 | } catch (err) { 18 | return failure(context.fail || callback, err); 19 | } 20 | }); 21 | -------------------------------------------------------------------------------- /scripts/setup-local.sh: -------------------------------------------------------------------------------- 1 | 2 | #!/bin/bash +x 3 | 4 | export ENVIRONMENT_NAME=local 5 | export DB_DIALECT=postgres 6 | export DB_NAME=appsync_rds_todo_dev 7 | export DB_HOST=localhost 8 | export DB_USERNAME=admin_dev 9 | export DB_PASSWORD=password 10 | export DB_PORT=5432 11 | export PGPASSWORD=password 12 | 13 | psql -c "CREATE ROLE admin_dev LOGIN CREATEDB PASSWORD 'password';" 14 | 15 | npx sequelize db:create 16 | 17 | psql -U admin_dev -c "GRANT ALL PRIVILEGES on DATABASE appsync_rds_todo_dev to admin_dev;" 18 | psql -c "CREATE DATABASE appsync_rds_todo_dev;" 19 | 20 | npx sequelize db:drop 21 | npx sequelize db:create 22 | npx sequelize db:migrate 23 | yarn start-offline 24 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: appsync-rds-todo CI develop 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - develop 7 | push: 8 | branches: 9 | - develop 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-latest 14 | strategy: 15 | matrix: 16 | node-version: [13.x] 17 | steps: 18 | - uses: actions/checkout@v2 19 | - name: Setup environment 20 | uses: actions/setup-node@v2-beta 21 | with: 22 | node-version: "12" 23 | - name: Install dependencies 24 | run: npm i 25 | - name: Lint 26 | run: npm run lint:eslint . 27 | - name: Build 28 | run: npm run build 29 | -------------------------------------------------------------------------------- /migrations/resources/v3/04_create_notes.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE IF NOT EXISTS notes ( 2 | id SERIAL, 3 | note TEXT NOT NULL, 4 | deadline timestamp WITH time zone NOT NULL, 5 | list_id INT NOT NULL, 6 | done SMALLINT NOT NULL DEFAULT 0, 7 | created_at timestamp NULL DEFAULT CURRENT_TIMESTAMP, 8 | updated_at timestamp NULL DEFAULT CURRENT_TIMESTAMP, 9 | deleted_at timestamp NULL, 10 | 11 | CONSTRAINT notes__fk_list_id FOREIGN KEY ( 12 | list_id 13 | ) REFERENCES lists ( 14 | id 15 | ) ON DELETE CASCADE ON UPDATE CASCADE 16 | ); 17 | 18 | CREATE INDEX IF NOT EXISTS notes__idx__list_id ON notes (list_id); 19 | CREATE INDEX IF NOT EXISTS notes__idx__note ON notes ("note"); 20 | 21 | CREATE TRIGGER set_timestamp 22 | BEFORE UPDATE ON notes 23 | FOR EACH ROW 24 | EXECUTE PROCEDURE trigger_set_timestamp(); -------------------------------------------------------------------------------- /resources/config/db-cluster.yml: -------------------------------------------------------------------------------- 1 | Resources: 2 | RDSCluster: 3 | Type: AWS::RDS::DBCluster 4 | DependsOn: 5 | - ServerlessSecurityGroup 6 | Properties: 7 | MasterUsername: ${env:RDS_USERNAME}_${env:STAGE} 8 | MasterUserPassword: ${env:DB_PASSWORD} 9 | Port: ${env:DB_PORT} 10 | DBSubnetGroupName: 11 | Ref: ServerlessSubnetGroup 12 | Engine: aurora-postgresql 13 | EnableHttpEndpoint: true 14 | EngineVersion: '10.7' 15 | EngineMode: serverless 16 | ScalingConfiguration: 17 | AutoPause: true 18 | MaxCapacity: 4 19 | MinCapacity: 2 20 | SecondsUntilAutoPause: 300 21 | DatabaseName: ${self:custom.AURORA.DB_NAME} 22 | BackupRetentionPeriod: 3 23 | DBClusterParameterGroupName: 'default.aurora-postgresql10' 24 | VpcSecurityGroupIds: 25 | - !Ref 'ServerlessSecurityGroup' 26 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | tokens.json 2 | .serverless 3 | amplify 4 | .dynamodb 5 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 6 | .idea/ 7 | # dependencies 8 | /node_modules 9 | /.pnp 10 | .pnp.js 11 | .history 12 | 13 | # testing 14 | /coverage 15 | 16 | # production 17 | /build 18 | .serverless 19 | # misc 20 | .DS_Store 21 | .env.local 22 | .env.development.local 23 | .env.test.local 24 | .env.production.local 25 | .webpack 26 | amplify/ 27 | 28 | npm-debug.log* 29 | yarn-debug.log* 30 | yarn-error.log* 31 | 32 | # #amplify 33 | amplify/\#current-cloud-backend 34 | amplify/.config/local-* 35 | amplify/mock-data 36 | amplify/backend/amplify-meta.json 37 | amplify/backend/awscloudformation 38 | # build/ 39 | # dist/ 40 | node_modules/ 41 | awsconfiguration.json 42 | amplifyconfiguration.json 43 | amplify-gradle-config.json 44 | amplifyxc.config 45 | 46 | #vscode 47 | .vscode/ 48 | -------------------------------------------------------------------------------- /resolvers/mutations/updateList.req.vtl: -------------------------------------------------------------------------------- 1 | #set( $update = "" ) 2 | #set( $equals = "=" ) 3 | ## 1 4 | #foreach( $entry in $ctx.args.input.keySet() ) 5 | ## 2 6 | #set( $cur = $ctx.args.input[$entry] ) 7 | #set( $regex = "([a-z])([A-Z]+)") 8 | #set( $replacement = "$1_$2") 9 | #set( $toSnake = $entry.replaceAll($regex, $replacement).toLowerCase() ) 10 | ## 3 11 | #if( $util.isBoolean($cur) ) 12 | #if( $cur ) 13 | #set ( $cur = "1" ) 14 | #else 15 | #set ( $cur = "0" ) 16 | #end 17 | #end 18 | ## 4 19 | #if ( $util.isNullOrEmpty($update) ) 20 | #set($update = "$toSnake$equals'$cur'" ) 21 | #else 22 | #set($update = "$update,$toSnake$equals'$cur'" ) 23 | #end 24 | #end 25 | { 26 | "version": "2018-05-29", 27 | "statements": ["UPDATE lists SET $update WHERE id=$ctx.args.input.id", "SELECT * FROM lists WHERE id=$ctx.args.input.id"] 28 | } -------------------------------------------------------------------------------- /resolvers/mutations/updateNote.req.vtl: -------------------------------------------------------------------------------- 1 | #set( $update = "" ) 2 | #set( $equals = "=" ) 3 | ## 1 4 | #foreach( $entry in $ctx.args.input.keySet() ) 5 | #set( $cur = $ctx.args.input[$entry] ) 6 | #set( $regex = "([a-z])([A-Z]+)") 7 | #set( $replacement = "$1_$2") 8 | #set( $toSnake = $entry.replaceAll($regex, $replacement).toLowerCase() ) 9 | ## 2 10 | #if( $util.isBoolean($cur) ) 11 | #if( $cur ) 12 | #set ( $cur = "1" ) 13 | #else 14 | #set ( $cur = "0" ) 15 | #end 16 | #end 17 | ## 3 18 | #if ( $util.isNullOrEmpty($update) ) 19 | #set($update = "$toSnake$equals'$cur'" ) 20 | #else 21 | ## 4 22 | #set($update = "$update,$toSnake$equals'$cur'" ) 23 | #end 24 | #end 25 | { 26 | "version": "2018-05-29", 27 | "statements": ["UPDATE notes SET $update WHERE id=$ctx.args.input.id", "SELECT * FROM notes WHERE id=$ctx.args.input.id"] 28 | } -------------------------------------------------------------------------------- /resolvers/mutations/updateUser.req.vtl: -------------------------------------------------------------------------------- 1 | #set( $update = "" ) 2 | #set( $equals = "=" ) 3 | ## 1 4 | #foreach( $entry in $ctx.args.input.keySet() ) 5 | ## 2 6 | #set( $cur = $ctx.args.input[$entry] ) 7 | #set( $regex = "([a-z])([A-Z]+)") 8 | #set( $replacement = "$1_$2") 9 | #set( $toSnake = $entry.replaceAll($regex, $replacement).toLowerCase() ) 10 | ## 3 11 | #if( $util.isBoolean($cur) ) 12 | #if( $cur ) 13 | #set ( $cur = "1" ) 14 | #else 15 | #set ( $cur = "0" ) 16 | #end 17 | #end 18 | ## 4 19 | #if ( $util.isNullOrEmpty($update) ) 20 | #set($update = "$toSnake$equals'$cur'" ) 21 | #else 22 | #set($update = "$update,$toSnake$equals'$cur'" ) 23 | #end 24 | #end 25 | { 26 | "version": "2018-05-29", 27 | "statements": ["UPDATE users SET $update WHERE id=$ctx.args.input.id", "SELECT * FROM users WHERE id=$ctx.args.input.id"] 28 | } -------------------------------------------------------------------------------- /models/users.js: -------------------------------------------------------------------------------- 1 | module.exports = function (sequelize, DataTypes) { 2 | return sequelize.define( 3 | 'users', 4 | { 5 | id: { 6 | autoIncrement: true, 7 | type: DataTypes.INTEGER, 8 | allowNull: false, 9 | primaryKey: true 10 | }, 11 | name: { 12 | type: DataTypes.TEXT, 13 | allowNull: false 14 | }, 15 | userRef: { 16 | field: 'user_ref', 17 | type: DataTypes.TEXT, 18 | allowNull: false 19 | } 20 | }, 21 | { 22 | sequelize, 23 | tableName: 'users', 24 | schema: 'public', 25 | timestamps: true, 26 | underscored: true, 27 | paranoid: true, 28 | indexes: [ 29 | { 30 | name: 'users__idx__user_ref', 31 | fields: [{ name: 'user_ref' }] 32 | }, 33 | { 34 | name: 'users_pkey', 35 | unique: true, 36 | fields: [{ name: 'id' }] 37 | } 38 | ] 39 | } 40 | ); 41 | }; 42 | -------------------------------------------------------------------------------- /models/lists.js: -------------------------------------------------------------------------------- 1 | module.exports = function (sequelize, DataTypes) { 2 | return sequelize.define( 3 | 'lists', 4 | { 5 | id: { 6 | autoIncrement: true, 7 | type: DataTypes.INTEGER, 8 | allowNull: false, 9 | primaryKey: true 10 | }, 11 | name: { 12 | type: DataTypes.TEXT, 13 | allowNull: false 14 | }, 15 | userId: { 16 | field: 'user_id', 17 | type: DataTypes.INTEGER, 18 | allowNull: false, 19 | references: { 20 | model: 'users', 21 | key: 'id' 22 | } 23 | } 24 | }, 25 | { 26 | sequelize, 27 | tableName: 'lists', 28 | schema: 'public', 29 | timestamps: true, 30 | paranoid: true, 31 | underscored: true, 32 | indexes: [ 33 | { 34 | name: 'list__idx__name', 35 | fields: [{ name: 'name' }] 36 | }, 37 | { 38 | name: 'lists_pkey', 39 | unique: true, 40 | fields: [{ name: 'id' }] 41 | } 42 | ] 43 | } 44 | ); 45 | }; 46 | -------------------------------------------------------------------------------- /models/notes.js: -------------------------------------------------------------------------------- 1 | module.exports = function (sequelize, DataTypes) { 2 | return sequelize.define( 3 | 'notes', 4 | { 5 | id: { 6 | autoIncrement: true, 7 | type: DataTypes.INTEGER, 8 | allowNull: false, 9 | primaryKey: true 10 | }, 11 | note: { 12 | type: DataTypes.TEXT, 13 | allowNull: false 14 | }, 15 | deadline: { 16 | type: DataTypes.DATE, 17 | allowNull: false 18 | }, 19 | listId: { 20 | field: 'list_id', 21 | type: DataTypes.INTEGER, 22 | allowNull: false, 23 | references: { 24 | model: 'lists', 25 | key: 'id' 26 | } 27 | }, 28 | done: { 29 | type: DataTypes.SMALLINT, 30 | allowNull: false, 31 | references: { 32 | model: 'lists', 33 | key: 'id' 34 | } 35 | } 36 | }, 37 | { 38 | sequelize, 39 | tableName: 'notes', 40 | schema: 'public', 41 | paranoid: true, 42 | timestamps: true, 43 | underscored: true 44 | } 45 | ); 46 | }; 47 | -------------------------------------------------------------------------------- /resolvers/mutations/createList.req.vtl: -------------------------------------------------------------------------------- 1 | #set( $cols = [] ) 2 | #set( $vals = [] ) 3 | #foreach( $entry in $ctx.args.input.keySet() ) 4 | #set( $regex = "([a-z])([A-Z]+)") 5 | #set( $replacement = "$1_$2") 6 | #set( $toSnake = $entry.replaceAll($regex, $replacement).toLowerCase() ) 7 | #set( $discard = $cols.add("$toSnake") ) 8 | #if( $util.isBoolean($ctx.args.input[$entry]) ) 9 | #if( $ctx.args.input[$entry] ) 10 | #set( $discard = $vals.add("1") ) 11 | #else 12 | #set( $discard = $vals.add("0") ) 13 | #end 14 | #else 15 | #set( $discard = $vals.add("'$ctx.args.input[$entry]'") ) 16 | #end 17 | #end 18 | 19 | #set( $valStr = $vals.toString().replace("[","(").replace("]",")") ) 20 | #set( $colStr = $cols.toString().replace("[","(").replace("]",")") ) 21 | #if ( $valStr.substring(0, 1) != '(' ) 22 | #set( $valStr = "($valStr)" ) 23 | #end 24 | #if ( $colStr.substring(0, 1) != '(' ) 25 | #set( $colStr = "($colStr)" ) 26 | #end 27 | { 28 | "version": "2018-05-29", 29 | "statements": ["INSERT INTO lists $colStr VALUES $valStr", "SELECT * FROM lists ORDER BY id DESC LIMIT 1"] 30 | } -------------------------------------------------------------------------------- /resolvers/mutations/createNote.req.vtl: -------------------------------------------------------------------------------- 1 | #set( $cols = [] ) 2 | #set( $vals = [] ) 3 | #foreach( $entry in $ctx.args.input.keySet() ) 4 | #set( $regex = "([a-z])([A-Z]+)") 5 | #set( $replacement = "$1_$2") 6 | #set( $toSnake = $entry.replaceAll($regex, $replacement).toLowerCase() ) 7 | #set( $discard = $cols.add("$toSnake") ) 8 | #if( $util.isBoolean($ctx.args.input[$entry]) ) 9 | #if( $ctx.args.input[$entry] ) 10 | #set( $discard = $vals.add("1") ) 11 | #else 12 | #set( $discard = $vals.add("0") ) 13 | #end 14 | #else 15 | #set( $discard = $vals.add("'$ctx.args.input[$entry]'") ) 16 | #end 17 | #end 18 | 19 | #set( $valStr = $vals.toString().replace("[","(").replace("]",")") ) 20 | #set( $colStr = $cols.toString().replace("[","(").replace("]",")") ) 21 | #if ( $valStr.substring(0, 1) != '(' ) 22 | #set( $valStr = "($valStr)" ) 23 | #end 24 | #if ( $colStr.substring(0, 1) != '(' ) 25 | #set( $colStr = "($colStr)" ) 26 | #end 27 | { 28 | "version": "2018-05-29", 29 | "statements": ["INSERT INTO notes $colStr VALUES $valStr", "SELECT * FROM notes ORDER BY id DESC LIMIT 1"] 30 | } -------------------------------------------------------------------------------- /resolvers/mutations/createUser.req.vtl: -------------------------------------------------------------------------------- 1 | #set( $cols = [] ) 2 | #set( $vals = [] ) 3 | #foreach( $entry in $ctx.args.input.keySet() ) 4 | #set( $regex = "([a-z])([A-Z]+)") 5 | #set( $replacement = "$1_$2") 6 | #set( $toSnake = $entry.replaceAll($regex, $replacement).toLowerCase() ) 7 | #set( $discard = $cols.add("$toSnake") ) 8 | #if( $util.isBoolean($ctx.args.input[$entry]) ) 9 | #if( $ctx.args.input[$entry] ) 10 | #set( $discard = $vals.add("1") ) 11 | #else 12 | #set( $discard = $vals.add("0") ) 13 | #end 14 | #else 15 | #set( $discard = $vals.add("'$ctx.args.input[$entry]'") ) 16 | #end 17 | #end 18 | 19 | #set( $valStr = $vals.toString().replace("[","(").replace("]",")") ) 20 | #set( $colStr = $cols.toString().replace("[","(").replace("]",")") ) 21 | #if ( $valStr.substring(0, 1) != '(' ) 22 | #set( $valStr = "($valStr)" ) 23 | #end 24 | #if ( $colStr.substring(0, 1) != '(' ) 25 | #set( $colStr = "($colStr)" ) 26 | #end 27 | { 28 | "version": "2018-05-29", 29 | "statements": ["INSERT INTO users $colStr VALUES $valStr", "SELECT * FROM users ORDER BY id DESC LIMIT 1"] 30 | } -------------------------------------------------------------------------------- /.github/workflows/cd.yml: -------------------------------------------------------------------------------- 1 | name: appsync-rds-todo CD develop 2 | on: 3 | push: 4 | branches: 5 | - develop 6 | 7 | jobs: 8 | primary: 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | - uses: actions/checkout@v2 13 | - name: Use Node.js ${{ matrix.node-version }} 14 | uses: actions/setup-node@v2 15 | with: 16 | node-version: 12.x 17 | - name: install dependencies 18 | run: npm i 19 | - name: Get branch name 20 | id: vars 21 | run: echo ::set-output name=stage::${GITHUB_REF#refs/*/} 22 | - name: Create envfile 23 | uses: SpicyPizza/create-envfile@v1 24 | with: 25 | envkey_DB_PASSWORD: ${{ secrets.DB_PASSWORD_DEV }} 26 | envkey_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }} 27 | file_name: .env.${{steps.vars.outputs.stage}}.local 28 | - name: serverless deploy --stage=${{steps.vars.outputs.stage}} 29 | run: npx serverless deploy --stage=${{steps.vars.outputs.stage}} --force 30 | env: 31 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 32 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 33 | -------------------------------------------------------------------------------- /resources/config/route-private.yml: -------------------------------------------------------------------------------- 1 | Resources: 2 | DefaultPrivateRouteTable: 3 | Type: AWS::EC2::RouteTable 4 | Properties: 5 | VpcId: 6 | Ref: ServerlessVPC 7 | DefaultPrivateRoute: 8 | Type: AWS::EC2::Route 9 | Properties: 10 | RouteTableId: 11 | Ref: DefaultPrivateRouteTable 12 | DestinationCidrBlock: 0.0.0.0/0 13 | NatGatewayId: 14 | Ref: ServerlessNatGateway 15 | SubnetRouteTableAssociationLambdaPrivateA: 16 | Type: AWS::EC2::SubnetRouteTableAssociation 17 | Properties: 18 | SubnetId: 19 | Ref: ServerlessPrivateSubnetA 20 | RouteTableId: 21 | Ref: DefaultPrivateRouteTable 22 | SubnetRouteTableAssociationLambdaPrivateB: 23 | Type: AWS::EC2::SubnetRouteTableAssociation 24 | Properties: 25 | SubnetId: 26 | Ref: ServerlessPrivateSubnetB 27 | RouteTableId: 28 | Ref: DefaultPrivateRouteTable 29 | SubnetRouteTableAssociationLambdaPrivateC: 30 | Type: AWS::EC2::SubnetRouteTableAssociation 31 | Properties: 32 | SubnetId: 33 | Ref: ServerlessPrivateSubnetC 34 | RouteTableId: 35 | Ref: DefaultPrivateRouteTable 36 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | 4 | const prettierOptions = JSON.parse( 5 | fs.readFileSync(path.resolve(__dirname, '.prettierrc'), 'utf8') 6 | ); 7 | 8 | module.exports = { 9 | parser: 'babel-eslint', 10 | extends: ['prettier-standard'], 11 | plugins: ['prettier'], 12 | env: { 13 | jest: true, 14 | browser: true, 15 | node: true, 16 | es6: true 17 | }, 18 | parserOptions: { 19 | ecmaVersion: 6, 20 | sourceType: 'module' 21 | }, 22 | rules: { 23 | 'prettier/prettier': ['error', prettierOptions], 24 | 'arrow-body-style': [2, 'as-needed'], 25 | 'class-methods-use-this': 0, 26 | 'import/imports-first': 0, 27 | 'import/newline-after-import': 0, 28 | 'import/no-dynamic-require': 0, 29 | 'import/no-extraneous-dependencies': 0, 30 | 'import/no-named-as-default': 0, 31 | 'import/no-unresolved': 0, 32 | 'import/prefer-default-export': 0, 33 | 'no-param-reassign': 0, 34 | 'max-len': 0, 35 | 'newline-per-chained-call': 0, 36 | 'no-confusing-arrow': 0, 37 | 'no-unused-vars': 2, 38 | 'no-use-before-define': 0, 39 | 'prefer-template': 2, 40 | 'require-yield': 0 41 | } 42 | }; 43 | -------------------------------------------------------------------------------- /migrations/utils/index.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const shell = require('shelljs'); 3 | 4 | function getVersion(currentFileName) { 5 | let version = 1; 6 | shell.ls(`./migrations`).forEach((item, index) => { 7 | if (item === currentFileName) { 8 | version = index + 1; 9 | } 10 | }); 11 | return version; 12 | } 13 | 14 | async function migrate(currentFileName, queryInterface) { 15 | const migrationResourceDir = './migrations/resources/v'; 16 | const version = getVersion(currentFileName.split('/')[currentFileName.split('/').length - 1]); 17 | const directories = shell.ls(`${migrationResourceDir}${version}`); 18 | for (let index = 0; index < directories.length; index++) { 19 | const fileName = directories[index]; 20 | console.log('migrating: ', fileName); 21 | await queryInterface.sequelize 22 | .query(fs.readFileSync(`${migrationResourceDir}${version}/${fileName}`, 'utf-8')) 23 | .catch((e) => { 24 | console.log(e); 25 | const error = e.original.sqlMessage; 26 | if (error.startsWith('Table') && error.endsWith('already exists')) { 27 | // If the database is already built add this migration to sequelizeMeta table. 28 | return; 29 | } 30 | throw e; 31 | }); 32 | } 33 | } 34 | 35 | module.exports = { 36 | migrate, 37 | getVersion 38 | }; 39 | -------------------------------------------------------------------------------- /resources/mapping-templates/mutations.yml: -------------------------------------------------------------------------------- 1 | - type: Mutation 2 | field: createNote 3 | request: "mutations/createNote.req.vtl" 4 | response: "mutations/response.vtl" 5 | dataSource: POSTGRES_RDS 6 | - type: Mutation 7 | field: createList 8 | request: "mutations/createList.req.vtl" 9 | response: "mutations/response.vtl" 10 | dataSource: POSTGRES_RDS 11 | - type: Mutation 12 | field: createUser 13 | request: "mutations/createUser.req.vtl" 14 | response: "mutations/response.vtl" 15 | dataSource: POSTGRES_RDS 16 | - type: Mutation 17 | field: updateList 18 | request: "mutations/updateList.req.vtl" 19 | response: "mutations/response.vtl" 20 | dataSource: POSTGRES_RDS 21 | - type: Mutation 22 | field: updateNote 23 | request: "mutations/updateNote.req.vtl" 24 | response: "mutations/response.vtl" 25 | dataSource: POSTGRES_RDS 26 | - type: Mutation 27 | field: updateUser 28 | request: "mutations/updateUser.req.vtl" 29 | response: "mutations/response.vtl" 30 | dataSource: POSTGRES_RDS 31 | - type: Mutation 32 | field: deleteList 33 | request: "mutations/deleteList.req.vtl" 34 | response: "mutations/response.vtl" 35 | dataSource: POSTGRES_RDS 36 | - type: Mutation 37 | field: deleteNote 38 | request: "mutations/deleteNote.req.vtl" 39 | response: "mutations/response.vtl" 40 | dataSource: POSTGRES_RDS 41 | - type: Mutation 42 | field: deleteUser 43 | request: "mutations/deleteUser.req.vtl" 44 | response: "mutations/response.vtl" 45 | dataSource: POSTGRES_RDS 46 | -------------------------------------------------------------------------------- /resolvers/mutations/response.vtl: -------------------------------------------------------------------------------- 1 | #set ( $index = -1) 2 | #set ( $result = $util.parseJson($ctx.result) ) 3 | #set ( $meta = $result.sqlStatementResults[1].columnMetadata) 4 | #foreach ($column in $meta) 5 | #set ($index = $index + 1) 6 | #if ( $column["typeName"] == "timestamptz" ) 7 | #set ($time = $result["sqlStatementResults"][1]["records"][0][$index]["stringValue"] ) 8 | #set ( $nowEpochMillis = $util.time.parseFormattedToEpochMilliSeconds("$time.substring(0,19)+0000", "yyyy-MM-dd HH:mm:ssZ") ) 9 | #set ( $isoDateTime = $util.time.epochMilliSecondsToISO8601($nowEpochMillis) ) 10 | $util.qr( $result["sqlStatementResults"][1]["records"][0][$index].put("stringValue", "$isoDateTime") ) 11 | #end 12 | #end 13 | #set ( $res = $util.parseJson($util.rds.toJsonString($util.toJson($result)))[1][0] ) 14 | #set ( $response = {} ) 15 | #foreach($mapKey in $res.keySet()) 16 | #set ( $s = $mapKey.split("_") ) 17 | #set ( $camelCase="" ) 18 | #set ( $isFirst=true ) 19 | #foreach($entry in $s) 20 | #if ( $isFirst ) 21 | #set ( $first = $entry.substring(0,1) ) 22 | #else 23 | #set ( $first = $entry.substring(0,1).toUpperCase() ) 24 | #end 25 | #set ( $isFirst=false ) 26 | #set ( $stringLength = $entry.length() ) 27 | #set ( $remaining = $entry.substring(1, $stringLength) ) 28 | #set ( $camelCase = "$camelCase$first$remaining" ) 29 | #end 30 | $util.qr( $response.put("$camelCase", $res[$mapKey]) ) 31 | #end 32 | $utils.toJson($response) -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const CopyPlugin = require('copy-webpack-plugin'); 3 | // eslint-disable-next-line import/no-extraneous-dependencies 4 | const nodeExternals = require('webpack-node-externals'); 5 | // eslint-disable-next-line import/no-extraneous-dependencies 6 | const slsw = require('serverless-webpack'); 7 | 8 | module.exports = { 9 | entry: slsw.lib.entries, 10 | target: 'node', 11 | mode: 'production', 12 | externals: [nodeExternals()], 13 | devtool: 'source-map', 14 | module: { 15 | rules: [ 16 | { 17 | test: /\.js$/, 18 | use: [ 19 | { 20 | loader: 'babel-loader', 21 | options: { 22 | presets: [['env', { targets: { node: '6.10' } }]] 23 | } 24 | } 25 | ] 26 | } 27 | ] 28 | }, 29 | plugins: [ 30 | new CopyPlugin([ 31 | { from: './migrations/**/*.*', to: '' }, 32 | { from: './config/config.js', to: 'config/config.js' } 33 | ]) 34 | ], 35 | resolve: { 36 | modules: ['node_modules', './'], 37 | alias: { 38 | '@models': path.resolve(__dirname, 'models/'), 39 | '@utils': path.resolve(__dirname, 'utils/'), 40 | '@daos': path.resolve(__dirname, 'daos/'), 41 | '@services': path.resolve(__dirname, 'services/') 42 | }, 43 | extensions: ['.js', '.jsx', '.react.js'], 44 | mainFields: ['browser', 'jsnext:main', 'main'] 45 | }, 46 | output: { 47 | libraryTarget: 'commonjs', 48 | path: path.join(__dirname, '.webpack'), 49 | filename: '[name].js' 50 | } 51 | }; 52 | -------------------------------------------------------------------------------- /resources/config/route-public.yml: -------------------------------------------------------------------------------- 1 | Resources: 2 | RouteTablePublic: 3 | Type: AWS::EC2::RouteTable 4 | Properties: 5 | VpcId: 6 | Ref: ServerlessVPC 7 | Tags: 8 | - Key: Name 9 | Value: public-route 10 | RoutePublic: 11 | Type: AWS::EC2::Route 12 | Properties: 13 | DestinationCidrBlock: 0.0.0.0/0 14 | GatewayId: 15 | Ref: ServerlessInternetGateway 16 | RouteTableId: 17 | Ref: RouteTablePublic 18 | 19 | RouteTableAssociationSubnetA: 20 | Type: AWS::EC2::SubnetRouteTableAssociation 21 | Properties: 22 | RouteTableId: 23 | Ref: RouteTablePublic 24 | SubnetId: 25 | Ref: ServerlessSubnetA 26 | RouteTableAssociationSubnetB: 27 | Type: AWS::EC2::SubnetRouteTableAssociation 28 | Properties: 29 | RouteTableId: 30 | Ref: RouteTablePublic 31 | SubnetId: 32 | Ref: ServerlessSubnetB 33 | RouteTableAssociationSubnetC: 34 | Type: AWS::EC2::SubnetRouteTableAssociation 35 | Properties: 36 | RouteTableId: 37 | Ref: RouteTablePublic 38 | SubnetId: 39 | Ref: ServerlessSubnetC 40 | SubnetRouteTableAssociationLambdaPublicA: 41 | Type: AWS::EC2::SubnetRouteTableAssociation 42 | Properties: 43 | SubnetId: 44 | Ref: ServerlessSubnetA 45 | RouteTableId: 46 | Ref: RouteTablePublic 47 | SubnetRouteTableAssociationLambdaPublicB: 48 | Type: AWS::EC2::SubnetRouteTableAssociation 49 | Properties: 50 | SubnetId: 51 | Ref: ServerlessSubnetB 52 | RouteTableId: 53 | Ref: RouteTablePublic 54 | SubnetRouteTableAssociationLambdaPublicC: 55 | Type: AWS::EC2::SubnetRouteTableAssociation 56 | Properties: 57 | SubnetId: 58 | Ref: ServerlessSubnetC 59 | RouteTableId: 60 | Ref: RouteTablePublic -------------------------------------------------------------------------------- /models/index.js: -------------------------------------------------------------------------------- 1 | // eslint-disable-next-line 2 | import pg from 'pg'; 3 | import Sequelize, { DataTypes } from 'sequelize'; 4 | import _lists from './lists'; 5 | import _notes from './notes'; 6 | import _users from './users'; 7 | 8 | let db = null; 9 | export function getDB() { 10 | if (db) { 11 | return db; 12 | } 13 | let sequelize; 14 | if (process.env.NODE_ENV === 'test') { 15 | const SequelizeMock = require('sequelize-mock'); 16 | sequelize = new SequelizeMock(); 17 | } else { 18 | const config = { 19 | uri: `${process.env.DB_DIALECT}://${process.env.DB_USERNAME}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`, 20 | options: { 21 | host: process.env.DB_HOST, 22 | logging: console.log, 23 | dialect: process.env.DB_DIALECT, 24 | dialectOptions: { 25 | decimalNumbers: true, 26 | multipleStatements: true 27 | }, 28 | pool: { 29 | min: 0, 30 | max: 30, 31 | idle: 60000, 32 | acquire: 60000, 33 | handleDisconnects: true, 34 | evict: 3000 35 | }, 36 | define: { 37 | paranoid: true, 38 | underScored: true, 39 | underscoredAll: true, 40 | timestamps: true 41 | } 42 | } 43 | }; 44 | sequelize = new Sequelize(config.uri, config.options); 45 | } 46 | console.log('connected to database', sequelize); 47 | const lists = _lists(sequelize, DataTypes); 48 | const notes = _notes(sequelize, DataTypes); 49 | const users = _users(sequelize, DataTypes); 50 | 51 | notes.belongsTo(lists, { foreignKey: 'list_id' }); 52 | lists.hasMany(notes, { foreignKey: 'list_id' }); 53 | lists.belongsTo(users, { foreignKey: 'user_id' }); 54 | users.hasMany(lists, { foreignKey: 'user_id' }); 55 | 56 | db = { 57 | lists, 58 | notes, 59 | users 60 | }; 61 | return db; 62 | } 63 | getDB(); 64 | export default db; 65 | -------------------------------------------------------------------------------- /config/config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | local: { 3 | url: `${process.env.DB_DIALECT}://${process.env.DB_USERNAME}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`, 4 | logging: true, 5 | dialect: 'postgres', 6 | options: { 7 | dialect: 'postgres', 8 | pool: { 9 | min: 0, 10 | max: 10, 11 | idle: 10000 12 | }, 13 | define: { 14 | userscored: true, 15 | timestamps: false 16 | } 17 | } 18 | }, 19 | development: { 20 | url: `${process.env.DB_DIALECT}://${process.env.DB_USERNAME}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`, 21 | logging: true, 22 | dialect: 'postgres', 23 | options: { 24 | dialect: 'postgres', 25 | pool: { 26 | min: 0, 27 | max: 10, 28 | idle: 10000 29 | }, 30 | define: { 31 | userscored: true, 32 | timestamps: false 33 | } 34 | } 35 | }, 36 | production: { 37 | url: `${process.env.DB_DIALECT}://${process.env.DB_USERNAME}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`, 38 | logging: true, 39 | dialect: 'postgres', 40 | options: { 41 | dialect: 'postgres', 42 | pool: { 43 | min: 0, 44 | max: 10, 45 | idle: 10000 46 | }, 47 | define: { 48 | userscored: true, 49 | timestamps: false 50 | } 51 | } 52 | }, 53 | qa: { 54 | url: `${process.env.DB_DIALECT}://${process.env.DB_USERNAME}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`, 55 | logging: true, 56 | dialect: 'postgres', 57 | options: { 58 | dialect: 'postgres', 59 | pool: { 60 | min: 0, 61 | max: 10, 62 | idle: 10000 63 | }, 64 | define: { 65 | userscored: true, 66 | timestamps: false 67 | } 68 | } 69 | } 70 | }; 71 | -------------------------------------------------------------------------------- /resources/config/subnet.yml: -------------------------------------------------------------------------------- 1 | Resources: 2 | ServerlessSubnetGroup: 3 | DependsOn: 4 | - ServerlessSubnetA 5 | - ServerlessSubnetB 6 | - ServerlessSubnetC 7 | 8 | Type: AWS::RDS::DBSubnetGroup 9 | Properties: 10 | DBSubnetGroupDescription: 'RDS Subnet Group' 11 | SubnetIds: 12 | - Ref: ServerlessSubnetA 13 | - Ref: ServerlessSubnetB 14 | - Ref: ServerlessSubnetC 15 | Tags: 16 | - Key: 'Name' 17 | Value: 'ServerlessSubnetGroup' 18 | ServerlessSubnetA: 19 | DependsOn: 20 | - ServerlessVPC 21 | Type: AWS::EC2::Subnet 22 | Properties: 23 | VpcId: 24 | Ref: ServerlessVPC 25 | AvailabilityZone: ${self:provider.region}a 26 | CidrBlock: ${self:custom.AURORA.VPC_CIDR}.0.0.0/24 27 | Tags: 28 | - Key: 'Name' 29 | Value: 'ServerlessSubnetA' 30 | ServerlessSubnetB: 31 | DependsOn: 32 | - ServerlessVPC 33 | Type: AWS::EC2::Subnet 34 | Properties: 35 | VpcId: 36 | Ref: ServerlessVPC 37 | AvailabilityZone: ${self:provider.region}b 38 | CidrBlock: ${self:custom.AURORA.VPC_CIDR}.0.1.0/24 39 | Tags: 40 | - Key: 'Name' 41 | Value: 'ServerlessSubnetB' 42 | ServerlessSubnetC: 43 | DependsOn: 44 | - ServerlessVPC 45 | Type: AWS::EC2::Subnet 46 | Properties: 47 | VpcId: 48 | Ref: ServerlessVPC 49 | AvailabilityZone: ${self:provider.region}c 50 | CidrBlock: ${self:custom.AURORA.VPC_CIDR}.0.2.0/24 51 | Tags: 52 | - Key: 'Name' 53 | Value: 'ServerlessSubnetC' 54 | ServerlessPrivateSubnetA: 55 | DependsOn: ServerlessVPC 56 | Type: AWS::EC2::Subnet 57 | Properties: 58 | VpcId: 59 | Ref: ServerlessVPC 60 | AvailabilityZone: ${self:provider.region}a 61 | CidrBlock: ${self:custom.AURORA.VPC_CIDR}.0.3.0/24 62 | ServerlessPrivateSubnetB: 63 | DependsOn: ServerlessVPC 64 | Type: AWS::EC2::Subnet 65 | Properties: 66 | VpcId: 67 | Ref: ServerlessVPC 68 | AvailabilityZone: ${self:provider.region}b 69 | CidrBlock: ${self:custom.AURORA.VPC_CIDR}.0.4.0/24 70 | ServerlessPrivateSubnetC: 71 | DependsOn: ServerlessVPC 72 | Type: AWS::EC2::Subnet 73 | Properties: 74 | VpcId: 75 | Ref: ServerlessVPC 76 | AvailabilityZone: ${self:provider.region}c 77 | CidrBlock: ${self:custom.AURORA.VPC_CIDR}.0.5.0/24 78 | -------------------------------------------------------------------------------- /resources/config/roles.yml: -------------------------------------------------------------------------------- 1 | Resources: 2 | AppSyncRDSServiceRole: 3 | Type: "AWS::IAM::Role" 4 | Properties: 5 | RoleName: "lambda_appsync_rds_${self:service.name}_${self:provider.stage}" 6 | AssumeRolePolicyDocument: 7 | Version: "2012-10-17" 8 | Statement: 9 | - Effect: "Allow" 10 | Principal: 11 | Service: 12 | - "appsync.amazonaws.com" 13 | Action: 14 | - "sts:AssumeRole" 15 | Policies: 16 | - PolicyName: "lambda_appsync_rds_${self:service.name}_${self:provider.stage}-Policy" 17 | PolicyDocument: 18 | Version: "2012-10-17" 19 | Statement: 20 | - Effect: 'Allow' 21 | Action: 22 | - 'rds-data:DeleteItems' 23 | - 'rds-data:ExecuteSql' 24 | - 'rds-data:ExecuteStatement' 25 | - 'rds-data:GetItems' 26 | - 'rds-data:InsertItems' 27 | - 'rds-data:UpdateItems' 28 | Resource: 29 | - 'arn:aws:rds:${env:REGION}:${env:ACCOUNT_ID}:cluster:*' 30 | - 'arn:aws:rds:${env:REGION}:${env:ACCOUNT_ID}:cluster:*:*' 31 | - Effect: 'Allow' 32 | Action: 33 | - 'secretsmanager:GetSecretValue' 34 | Resource: 35 | - 'arn:aws:secretsmanager:*:*:secret:RDSInstanceSecret*' 36 | LambdaServiceRole: 37 | Type: "AWS::IAM::Role" 38 | Properties: 39 | RoleName: "lambda_${self:service.name}_${self:provider.stage}" 40 | AssumeRolePolicyDocument: 41 | Version: "2012-10-17" 42 | Statement: 43 | - Effect: "Allow" 44 | Principal: 45 | Service: 46 | - "appsync.amazonaws.com" 47 | - "lambda.amazonaws.com" 48 | Action: 49 | - "sts:AssumeRole" 50 | Policies: 51 | - PolicyName: "lambda_${self:service.name}_${self:provider.stage}-Policy" 52 | PolicyDocument: 53 | Version: "2012-10-17" 54 | Statement: 55 | - Effect: "Allow" 56 | Action: 57 | - "lambda:*" 58 | - "logs:*" 59 | - "dbqms:*" 60 | - "rds-data:*" 61 | - "secretsmanager:*" 62 | - "ec2:*" 63 | Resource: 64 | - "*" -------------------------------------------------------------------------------- /schema.graphql: -------------------------------------------------------------------------------- 1 | type PageInfo { 2 | total: Int! 3 | } 4 | input PaginationInput { 5 | order: String 6 | limit: Int! 7 | offset: Int! 8 | } 9 | input CreateNoteRequest { 10 | note: String! 11 | listId: ID! 12 | deadline: AWSDateTime! 13 | done: Boolean 14 | } 15 | 16 | input CreateListRequest { 17 | name: String! 18 | userId: Int! 19 | } 20 | input UpdateNoteRequest { 21 | id: ID! 22 | note: String 23 | listId: ID 24 | done: Boolean 25 | deadline: AWSDateTime 26 | } 27 | input UpdateListRequest { 28 | id: ID! 29 | name: String 30 | userId: Int 31 | } 32 | input UpdateUserRequest { 33 | id: ID! 34 | name: String 35 | userRef: String 36 | } 37 | 38 | input CreateUserRequest { 39 | name: String! 40 | userRef: String! 41 | } 42 | 43 | type MutatedList { 44 | id: ID! 45 | name: String! 46 | userId: Int! 47 | } 48 | type MutatedUser { 49 | id: ID! 50 | name: String! 51 | userRef: String! 52 | } 53 | type MutatedNote { 54 | id: ID! 55 | note: String! 56 | listId: ID! 57 | deadline: AWSDateTime! 58 | done: Boolean! 59 | } 60 | type Note { 61 | id: ID! 62 | note: String! 63 | listId: ID! 64 | done: Boolean! 65 | deadline: AWSDateTime! 66 | list: List! 67 | } 68 | 69 | type List { 70 | id: ID! 71 | name: String! 72 | user: User! 73 | notes(pagination: PaginationInput!): [Note!]! 74 | } 75 | 76 | type User { 77 | id: ID! 78 | name: String! 79 | userRef:String! 80 | lists(pagination: PaginationInput!): [List!]! 81 | } 82 | type PaginatedLists { 83 | items: [List!]! 84 | pageInfo: PageInfo! 85 | } 86 | type PaginatedUsers { 87 | items: [User!]! 88 | pageInfo: PageInfo! 89 | } 90 | type PaginatedNotes { 91 | items: [Note!]! 92 | pageInfo: PageInfo! 93 | } 94 | type Query { 95 | notes(pagination: PaginationInput!, where: AWSJSON): PaginatedNotes! 96 | lists(pagination: PaginationInput!, where: AWSJSON): PaginatedLists! 97 | users(pagination: PaginationInput!, where: AWSJSON): PaginatedUsers! 98 | } 99 | type Mutation { 100 | # create mutations 101 | createNote(input: CreateNoteRequest!): MutatedNote! 102 | createList(input: CreateListRequest!): MutatedList! 103 | createUser(input: CreateUserRequest!): MutatedUser! 104 | 105 | # update mutations 106 | updateList(input: UpdateListRequest!): MutatedList! 107 | updateNote(input: UpdateNoteRequest!): MutatedNote! 108 | updateUser(input: UpdateUserRequest!): MutatedUser! 109 | 110 | # delete mutations 111 | deleteList(id: ID!): MutatedList! 112 | deleteNote(id: ID!): MutatedNote! 113 | deleteUser(id: ID!): MutatedUser! 114 | } 115 | -------------------------------------------------------------------------------- /utils/dbUtils.js: -------------------------------------------------------------------------------- 1 | import { DEFAULT_LIMIT, DEFAULT_OFFSET } from '@utils/constants'; 2 | import db from '@models'; 3 | import _ from 'lodash'; 4 | import pluralize from 'pluralize'; 5 | import deepMapKeys from 'deep-map-keys'; 6 | import { Op } from 'sequelize'; 7 | import { getArgs } from '@utils'; 8 | 9 | export const sequelizedWhere = (currentWhere = {}, where = {}) => { 10 | where = deepMapKeys(where, (k) => { 11 | if (Op[k]) { 12 | return Op[k]; 13 | } 14 | return k; 15 | }); 16 | return { ...currentWhere, ...where }; 17 | }; 18 | 19 | const getPaginationArgs = (args) => { 20 | let order = [['id', 'ASC']]; 21 | if (args?.order) { 22 | order = [args.order.split(':')]; 23 | } 24 | return { order, limit: args?.limit || DEFAULT_LIMIT, offset: args?.offset || DEFAULT_OFFSET }; 25 | }; 26 | const recursivelyInclude = (model, event, parent = '', depth = 4, include = []) => { 27 | if (!model?.associations) { 28 | return include; 29 | } 30 | if (depth === 0) { 31 | return include; 32 | } 33 | 34 | Object.keys(model.associations).forEach((association) => { 35 | const currentNode = `${parent}${_.isEmpty(parent) ? '' : '.'}${association}`; 36 | const args = getArgs(event, currentNode); 37 | if (args.queryList[currentNode]) { 38 | const m = db[association] || db[pluralize(association)]; 39 | const isList = association === pluralize(association); 40 | let includedModel = { 41 | model: m 42 | }; 43 | 44 | if (isList) { 45 | includedModel = { ...getPaginationArgs(args.pagination), ...includedModel }; 46 | includedModel.separate = true; 47 | includedModel.subQuery = false; 48 | } 49 | 50 | includedModel.include = recursivelyInclude(m, event, currentNode, depth - 1); 51 | 52 | include.push(includedModel); 53 | } 54 | }); 55 | return include; 56 | }; 57 | 58 | export const findAll = async (model, event) => { 59 | const { where: _, pagination, ...args } = event.arguments; 60 | const where = sequelizedWhere(args, event.arguments.where); 61 | const paginationArgs = getPaginationArgs(pagination); 62 | const include = recursivelyInclude(model, event); 63 | const itemsWithCount = await model.findAndCountAll({ 64 | where, 65 | include: include || [], 66 | underscored: true, 67 | underscoredAll: true, 68 | ...paginationArgs, 69 | distinct: true 70 | }); 71 | return { items: itemsWithCount.rows, pageInfo: { total: itemsWithCount.count } }; 72 | }; 73 | 74 | export const findAllRaw = async (model, where, limit = DEFAULT_LIMIT, offset = DEFAULT_OFFSET, include = [], raw) => 75 | await findAll(model, where, limit, offset, include, true); 76 | -------------------------------------------------------------------------------- /serverless.yml: -------------------------------------------------------------------------------- 1 | service: 2 | name: ${env:NAME} 3 | plugins: 4 | - serverless-webpack 5 | - serverless-appsync-plugin 6 | - serverless-dotenv-plugin 7 | - serverless-appsync-simulator 8 | - serverless-offline 9 | - serverless-plugin-scripts 10 | 11 | provider: 12 | name: aws 13 | timeout: 90 14 | runtime: nodejs12.x 15 | stage: ${env:STAGE} 16 | region: ${env:REGION} 17 | versionFunctions: false 18 | environment: 19 | DB_HOST: ${file(./scripts/get-host.js)} 20 | DB_NAME: ${env:RDS_PREFIX}_${env:STAGE} 21 | DB_USERNAME: ${env:RDS_USERNAME}_${env:STAGE} 22 | vpc: 23 | securityGroupIds: 24 | - { Fn::GetAtt: [ServerlessSecurityGroup, GroupId] } 25 | subnetIds: 26 | - Ref: ServerlessSubnetA 27 | - Ref: ServerlessSubnetB 28 | - Ref: ServerlessSubnetC 29 | resources: 30 | - ${file(./resources/config/roles.yml)} 31 | - ${file(./resources/config/vpc.yml)} 32 | - ${file(./resources/config/subnet.yml)} 33 | - ${file(./resources/config/internet-gateway.yml)} 34 | - ${file(./resources/config/nat-gateway.yml)} 35 | - ${file(./resources/config/elastic-ip.yml)} 36 | - ${file(./resources/config/route-private.yml)} 37 | - ${file(./resources/config/route-public.yml)} 38 | - ${file(./resources/config/security-groups.yml)} 39 | - ${file(./resources/config/secrets.yml)} 40 | - ${file(./resources/config/db-cluster.yml)} 41 | 42 | functions: ${file(./resources/lambdas/functions.yml)} 43 | custom: 44 | scripts: 45 | hooks: 46 | 'aws:deploy:finalize:cleanup': ${file(./scripts/post-deployment.js)} 47 | appsync-simulator: 48 | location: '.webpack/service' 49 | rds: 50 | dbName: ${env:RDS_PREFIX}_${env:STAGE} 51 | dbHost: ${file(./scripts/get-host.js)} 52 | dbUsername: ${env:RDS_USERNAME}_${env:STAGE} 53 | dbDialect: ${env:DB_DIALECT} 54 | dbPassword: ${env:DB_PASSWORD} 55 | dbPort: ${env:DB_PORT} 56 | webpack: 57 | includeModules: 58 | forceInclude: 59 | - sequelize-cli 60 | excludeFiles: ./**/*.test.js 61 | AURORA: 62 | VPC_CIDR: 10 63 | DB_NAME: ${env:RDS_PREFIX}_${env:STAGE} 64 | appSync: 65 | name: ${env:NAME}-${env:STAGE} 66 | schema: schema.graphql 67 | logConfig: 68 | level: ALL 69 | excludeVerboseContent: false 70 | serviceRole: AuthenticatedAppSyncServiceRole 71 | authenticationType: API_KEY 72 | mappingTemplatesLocation: resolvers/ 73 | mappingTemplates: 74 | - ${file(./resources/mapping-templates/queries.yml)} 75 | - ${file(./resources/mapping-templates/mutations.yml)} 76 | dataSources: 77 | - ${file(./resources/lambdas/datasources.yml)} 78 | - ${file(./resources/rds/datasources.yml)} 79 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "appsync-rds-todo", 3 | "version": "0.1.0", 4 | "keywords": [ 5 | "AppSync", 6 | "AWS", 7 | "serverless-framework", 8 | "AWS Lambdas", 9 | "serverless", 10 | "RDS", 11 | "postgres", 12 | "GraphQL" 13 | ], 14 | "scripts": { 15 | "debug": "export SLS_DEBUG=*", 16 | "build": "npx serverless package --stage develop", 17 | "build-dev": "sls package --stage=develop", 18 | "link-serverless-appsync-simulator": "yarn link serverless-appsync-simulator", 19 | "start-offline": "sls offline start --stage=develop --offline", 20 | "postinstall": "rm -rf node_modules/@conduitvc/appsync-emulator-serverless/node_modules/velocityjs", 21 | "format": "prettier-standard './**/**/*.json'", 22 | "lint": "npm run lint:js", 23 | "lint:eslint": "eslint --ignore-path .gitignore --ignore-pattern internals/scripts", 24 | "lint:eslint:fix": "eslint --ignore-path .gitignore --ignore-pattern internals/scripts --fix", 25 | "lint:js": "npm run lint:eslint -- . ", 26 | "lint:staged": "lint-staged" 27 | }, 28 | "pre-commit": "lint:staged", 29 | "lint-staged": { 30 | "*.js": [ 31 | "npm run lint:eslint:fix .", 32 | "git add --force" 33 | ], 34 | "*.json": [ 35 | "prettier --write", 36 | "git add --force" 37 | ] 38 | }, 39 | "dependencies": { 40 | "@aws-amplify/api": "^2.1.6", 41 | "@aws-amplify/pubsub": "^2.1.7", 42 | "@babel/core": "^7.11.4", 43 | "@babel/plugin-proposal-class-properties": "^7.10.4", 44 | "@babel/plugin-syntax-class-properties": "7.8.3", 45 | "@sendgrid/mail": "^7.4.2", 46 | "amplify-velocity-template": "^1.4.5", 47 | "aws-sdk": "^2.859.0", 48 | "axios": "^0.21.1", 49 | "babel-loader": "^8.1.0", 50 | "babel-preset-env": "^1.7.0", 51 | "babel-runtime": "^6.26.0", 52 | "deep-map-keys": "^2.0.1", 53 | "espree": "^7.1.0", 54 | "gql": "^1.1.2", 55 | "graphql": "^15.5.0", 56 | "graphql-playground-middleware-koa": "^1.6.17", 57 | "graphql-sequelize": "^9.4.3", 58 | "graphql-sequelize-schema-generator": "^0.2.3", 59 | "graphql-tag": "^2.11.0", 60 | "koa": "^2.13.0", 61 | "merge-graphql-schemas": "^1.7.8", 62 | "moment": "^2.29.1", 63 | "mysql2": "^2.1.0", 64 | "pg": "^8.6.0", 65 | "pg-hstore": "^2.3.4", 66 | "pluralize": "^8.0.0", 67 | "ramda": "^0.27.0", 68 | "sequelize": "^6.6.4", 69 | "sequelize-cli": "^6.2.0", 70 | "sequelize-mock": "^0.10.2", 71 | "serverless": "^2.51.2", 72 | "serverless-appsync-plugin": "^1.8.0", 73 | "serverless-appsync-simulator": "^0.18.0", 74 | "serverless-dependson-plugin": "^1.1.2", 75 | "serverless-dotenv-plugin": "^3.3.0", 76 | "serverless-offline": "^6.8.0", 77 | "serverless-plugin-additional-stacks": "^1.5.0", 78 | "serverless-plugin-scripts": "^1.0.2", 79 | "serverless-plugin-split-stacks": "^1.11.0", 80 | "serverless-plugin-warmup": "^5.2.1", 81 | "serverless-sequelize-migrations": "^1.1.1", 82 | "serverless-webpack": "^5.3.4", 83 | "shelljs": "^0.8.4", 84 | "source-map-support": "^0.5.19", 85 | "webpack": "^4.44.1", 86 | "webpack-node-externals": "^2.5.2" 87 | }, 88 | "devDependencies": { 89 | "@babel/polyfill": "^7.11.5", 90 | "@babel/preset-env": "^7.11.5", 91 | "aws-cognito-cli": "^1.0.9", 92 | "babel-eslint": "^10.1.0", 93 | "copy-webpack-plugin": "5.1.1", 94 | "dotenv": "^8.2.0", 95 | "eslint": "5.16.0", 96 | "eslint-config-prettier": "^4.1.0", 97 | "eslint-config-prettier-standard": "^3.0.1", 98 | "eslint-config-standard": "^14.1.0", 99 | "eslint-plugin-import": "^2.17.2", 100 | "eslint-plugin-node": "^10.0.0", 101 | "eslint-plugin-prettier": "^3.0.1", 102 | "eslint-plugin-promise": "^4.2.1", 103 | "eslint-plugin-react": "^7.16.0", 104 | "eslint-plugin-standard": "^4.0.1", 105 | "faker": "^5.1.0", 106 | "lint-staged": "^11.0.0", 107 | "prettier": "^2.2.1", 108 | "prettier-config-standard": "^1.0.1", 109 | "prettier-standard": "^16.4.1", 110 | "pretty-quick": "^3.1.0", 111 | "serverless-nested-stack": "^0.0.7" 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /utils/index.js: -------------------------------------------------------------------------------- 1 | // import get from "lodash/get"; 2 | import gql from 'graphql-tag'; 3 | import { Kind, print } from 'graphql/language'; 4 | 5 | const get = (a) => a; 6 | export const failure = (callback, error) => { 7 | console.log('failure', error); 8 | return callback(get(error, 'message', 'Something went wrong. Please contact support@appsync_rds_todo.com')); 9 | }; 10 | 11 | export const success = (callback, data) => { 12 | console.log('success', JSON.stringify(data)); 13 | return callback(null, data); 14 | }; 15 | 16 | export const getArgs = (event, key, base) => { 17 | if (!key) { 18 | key = event.field || event?.info?.fieldName; 19 | } 20 | if (!key) { 21 | return {}; 22 | } 23 | const args = { where: {} }; 24 | let queries = {}; 25 | if (key) { 26 | const { pagination, queryList, ...rest } = getCurrentArguments(event, key, base); 27 | args.pagination = pagination; 28 | queries = queryList; 29 | args.where = { ...args.where, ...rest }; 30 | } 31 | args.where = { ...args.where, ...args.where?.filter }; 32 | delete args.where?.filter; 33 | // eslint-disable-next-line 34 | return { ...args, queryList: queries }; 35 | }; 36 | export const logHandler = (event, lambdaCallback, cb) => { 37 | console.log({ event: JSON.stringify(event) }); 38 | if (event?.source === 'serverless-plugin-warmup') { 39 | console.log('WarmUp - Lambda is warm!'); 40 | return lambdaCallback(null, 'Lambda is warm!'); 41 | } 42 | return cb(getArgs(event, '', true)); 43 | }; 44 | 45 | function parseObject(typeName, ast, variables) { 46 | const value = {}; 47 | ast.fields.forEach((field) => { 48 | value[field.name.value] = parseLiteral(field.value, field.value, variables); 49 | }); 50 | 51 | return value; 52 | } 53 | 54 | function parseLiteral(typeName, ast, variables) { 55 | switch (ast.kind) { 56 | case Kind.STRING: 57 | case Kind.BOOLEAN: 58 | return ast.value; 59 | case Kind.INT: 60 | case Kind.FLOAT: 61 | return parseFloat(ast.value); 62 | case Kind.OBJECT: 63 | return parseObject(typeName, ast, variables); 64 | case Kind.LIST: 65 | return ast.values.map((n) => parseLiteral(typeName, n, variables)); 66 | case Kind.NULL: 67 | return null; 68 | case Kind.VARIABLE: 69 | return variables ? variables[ast.name.value] : undefined; 70 | default: 71 | throw new TypeError(`${typeName} cannot represent value: ${print(ast)}`); 72 | } 73 | } 74 | 75 | export const convertToMap = (argArr, variables = {}) => { 76 | const args = {}; 77 | argArr.forEach((arg) => { 78 | if (arg.value.kind === 'Variable') { 79 | args[arg.name.value] = variables[arg.value.name.value]; 80 | } else if (arg.value.kind === 'IntValue') { 81 | args[arg.name.value] = parseInt(arg.value.value, 10); 82 | } else if (arg.value.kind === 'ObjectValue') { 83 | args[arg.name.value] = parseObject(Kind.OBJECT, arg.value, variables); 84 | } else { 85 | args[arg.name.value] = arg.value.value; 86 | } 87 | }); 88 | return args; 89 | }; 90 | export const getCurrentArguments = (event, fieldName = '', base) => { 91 | // handle for queryList[mealsTypes.meals] 92 | fieldName = fieldName.includes('.') ? fieldName.split('.')[fieldName.split('.').length - 1] : fieldName; 93 | 94 | let args = { ...(base ? event.arguments : null), queryList: {} }; 95 | const addSelectionSet = (selection, parent) => { 96 | if (selection.selectionSet?.selections?.length) { 97 | const fieldName = parent ? `${parent}.${selection.name.value}` : selection.name.value; 98 | args.queryList[fieldName] = []; 99 | selection.selectionSet.selections.forEach((s) => { 100 | args.queryList[fieldName].push(s.name.value); 101 | addSelectionSet(s, fieldName); 102 | }); 103 | } 104 | }; 105 | const context = event.ctx; 106 | const typeName = (event.typeName || 'Query').toLowerCase(); 107 | let selectionSetGraphQL = event.selectionSetGraphQL || ''; 108 | const pos = selectionSetGraphQL.indexOf('})'); 109 | if (pos >= 0) { 110 | selectionSetGraphQL = selectionSetGraphQL.substr(pos + 3); 111 | } 112 | let operation; 113 | try { 114 | operation = gql` 115 | ${typeName} 116 | ${event.field} 117 | ${selectionSetGraphQL} 118 | `; 119 | } catch (err) { 120 | operation = gql` 121 | ${typeName} 122 | ${event.field} 123 | ${event.selectionSetGraphQL} 124 | `; 125 | } 126 | const iterateSelectionsRecursively = (selections) => { 127 | selections.forEach((selection) => { 128 | args.queryList[selection.name.value] = []; 129 | if (selection.kind === 'Field' && selection.name?.value === fieldName && selection.arguments?.length) { 130 | args = { ...args, ...convertToMap(selection.arguments, context?.info?.variables) }; 131 | } else if (selection.selectionSet?.selections) { 132 | iterateSelectionsRecursively(selection.selectionSet.selections); 133 | } 134 | addSelectionSet(selection, null); 135 | }); 136 | }; 137 | iterateSelectionsRecursively(operation.definitions[0].selectionSet.selections); 138 | if (args.pagination?.offset && typeof args.pagination?.offset === 'string') { 139 | args.pagination.offset = parseInt(args.pagination.offset, 10); 140 | } 141 | 142 | if (args.pagination?.limit && typeof args.pagination?.limit === 'string') { 143 | args.pagination.limit = parseInt(args.pagination.limit, 10); 144 | } 145 | 146 | args.queryList = mapKeysDeep(args.queryList, (i) => { 147 | if (i.match(/\.items\./)) { 148 | i = i.replace(/\.items\./, '.'); 149 | } 150 | if (i.match(/\bitems\./)) { 151 | i = i.replace(/\bitems\./, ''); 152 | } 153 | return i; 154 | }); 155 | return args; 156 | }; 157 | 158 | export const mapKeysDeep = (obj, fn) => 159 | Array.isArray(obj) 160 | ? obj.map((val) => mapKeysDeep(val, fn)) 161 | : typeof obj === 'object' 162 | ? Object.keys(obj).reduce((acc, current) => { 163 | const key = fn(current); 164 | const val = obj[current]; 165 | acc[key] = val !== null && typeof val === 'object' ? mapKeysDeep(val, fn) : val; 166 | return acc; 167 | }, obj) 168 | : obj; 169 | -------------------------------------------------------------------------------- /functions/queries/Lists/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | buffer-writer@2.0.0: 6 | version "2.0.0" 7 | resolved "https://registry.yarnpkg.com/buffer-writer/-/buffer-writer-2.0.0.tgz#ce7eb81a38f7829db09c873f2fbb792c0c98ec04" 8 | integrity sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw== 9 | 10 | inherits@^2.0.3: 11 | version "2.0.4" 12 | resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" 13 | integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== 14 | 15 | packet-reader@1.0.0: 16 | version "1.0.0" 17 | resolved "https://registry.yarnpkg.com/packet-reader/-/packet-reader-1.0.0.tgz#9238e5480dedabacfe1fe3f2771063f164157d74" 18 | integrity sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ== 19 | 20 | pg-connection-string@^2.5.0: 21 | version "2.5.0" 22 | resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34" 23 | integrity sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ== 24 | 25 | pg-int8@1.0.1: 26 | version "1.0.1" 27 | resolved "https://registry.yarnpkg.com/pg-int8/-/pg-int8-1.0.1.tgz#943bd463bf5b71b4170115f80f8efc9a0c0eb78c" 28 | integrity sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw== 29 | 30 | pg-pool@^3.3.0: 31 | version "3.3.0" 32 | resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.3.0.tgz#12d5c7f65ea18a6e99ca9811bd18129071e562fc" 33 | integrity sha512-0O5huCql8/D6PIRFAlmccjphLYWC+JIzvUhSzXSpGaf+tjTZc4nn+Lr7mLXBbFJfvwbP0ywDv73EiaBsxn7zdg== 34 | 35 | pg-protocol@^1.5.0: 36 | version "1.5.0" 37 | resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.5.0.tgz#b5dd452257314565e2d54ab3c132adc46565a6a0" 38 | integrity sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ== 39 | 40 | pg-types@^2.1.0: 41 | version "2.2.0" 42 | resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-2.2.0.tgz#2d0250d636454f7cfa3b6ae0382fdfa8063254a3" 43 | integrity sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA== 44 | dependencies: 45 | pg-int8 "1.0.1" 46 | postgres-array "~2.0.0" 47 | postgres-bytea "~1.0.0" 48 | postgres-date "~1.0.4" 49 | postgres-interval "^1.1.0" 50 | 51 | pg@^8.6.0: 52 | version "8.6.0" 53 | resolved "https://registry.yarnpkg.com/pg/-/pg-8.6.0.tgz#e222296b0b079b280cce106ea991703335487db2" 54 | integrity sha512-qNS9u61lqljTDFvmk/N66EeGq3n6Ujzj0FFyNMGQr6XuEv4tgNTXvJQTfJdcvGit5p5/DWPu+wj920hAJFI+QQ== 55 | dependencies: 56 | buffer-writer "2.0.0" 57 | packet-reader "1.0.0" 58 | pg-connection-string "^2.5.0" 59 | pg-pool "^3.3.0" 60 | pg-protocol "^1.5.0" 61 | pg-types "^2.1.0" 62 | pgpass "1.x" 63 | 64 | pgpass@1.x: 65 | version "1.0.4" 66 | resolved "https://registry.yarnpkg.com/pgpass/-/pgpass-1.0.4.tgz#85eb93a83800b20f8057a2b029bf05abaf94ea9c" 67 | integrity sha512-YmuA56alyBq7M59vxVBfPJrGSozru8QAdoNlWuW3cz8l+UX3cWge0vTvjKhsSHSJpo3Bom8/Mm6hf0TR5GY0+w== 68 | dependencies: 69 | split2 "^3.1.1" 70 | 71 | postgres-array@~2.0.0: 72 | version "2.0.0" 73 | resolved "https://registry.yarnpkg.com/postgres-array/-/postgres-array-2.0.0.tgz#48f8fce054fbc69671999329b8834b772652d82e" 74 | integrity sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA== 75 | 76 | postgres-bytea@~1.0.0: 77 | version "1.0.0" 78 | resolved "https://registry.yarnpkg.com/postgres-bytea/-/postgres-bytea-1.0.0.tgz#027b533c0aa890e26d172d47cf9ccecc521acd35" 79 | integrity sha1-AntTPAqokOJtFy1Hz5zOzFIazTU= 80 | 81 | postgres-date@~1.0.4: 82 | version "1.0.7" 83 | resolved "https://registry.yarnpkg.com/postgres-date/-/postgres-date-1.0.7.tgz#51bc086006005e5061c591cee727f2531bf641a8" 84 | integrity sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q== 85 | 86 | postgres-interval@^1.1.0: 87 | version "1.2.0" 88 | resolved "https://registry.yarnpkg.com/postgres-interval/-/postgres-interval-1.2.0.tgz#b460c82cb1587507788819a06aa0fffdb3544695" 89 | integrity sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ== 90 | dependencies: 91 | xtend "^4.0.0" 92 | 93 | readable-stream@^3.0.0: 94 | version "3.6.0" 95 | resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" 96 | integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== 97 | dependencies: 98 | inherits "^2.0.3" 99 | string_decoder "^1.1.1" 100 | util-deprecate "^1.0.1" 101 | 102 | safe-buffer@~5.2.0: 103 | version "5.2.1" 104 | resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" 105 | integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== 106 | 107 | split2@^3.1.1: 108 | version "3.2.2" 109 | resolved "https://registry.yarnpkg.com/split2/-/split2-3.2.2.tgz#bf2cf2a37d838312c249c89206fd7a17dd12365f" 110 | integrity sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg== 111 | dependencies: 112 | readable-stream "^3.0.0" 113 | 114 | string_decoder@^1.1.1: 115 | version "1.3.0" 116 | resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" 117 | integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== 118 | dependencies: 119 | safe-buffer "~5.2.0" 120 | 121 | util-deprecate@^1.0.1: 122 | version "1.0.2" 123 | resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" 124 | integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= 125 | 126 | xtend@^4.0.0: 127 | version "4.0.2" 128 | resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" 129 | integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== 130 | -------------------------------------------------------------------------------- /functions/queries/Notes/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | buffer-writer@2.0.0: 6 | version "2.0.0" 7 | resolved "https://registry.yarnpkg.com/buffer-writer/-/buffer-writer-2.0.0.tgz#ce7eb81a38f7829db09c873f2fbb792c0c98ec04" 8 | integrity sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw== 9 | 10 | inherits@^2.0.3: 11 | version "2.0.4" 12 | resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" 13 | integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== 14 | 15 | packet-reader@1.0.0: 16 | version "1.0.0" 17 | resolved "https://registry.yarnpkg.com/packet-reader/-/packet-reader-1.0.0.tgz#9238e5480dedabacfe1fe3f2771063f164157d74" 18 | integrity sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ== 19 | 20 | pg-connection-string@^2.5.0: 21 | version "2.5.0" 22 | resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34" 23 | integrity sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ== 24 | 25 | pg-int8@1.0.1: 26 | version "1.0.1" 27 | resolved "https://registry.yarnpkg.com/pg-int8/-/pg-int8-1.0.1.tgz#943bd463bf5b71b4170115f80f8efc9a0c0eb78c" 28 | integrity sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw== 29 | 30 | pg-pool@^3.3.0: 31 | version "3.3.0" 32 | resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.3.0.tgz#12d5c7f65ea18a6e99ca9811bd18129071e562fc" 33 | integrity sha512-0O5huCql8/D6PIRFAlmccjphLYWC+JIzvUhSzXSpGaf+tjTZc4nn+Lr7mLXBbFJfvwbP0ywDv73EiaBsxn7zdg== 34 | 35 | pg-protocol@^1.5.0: 36 | version "1.5.0" 37 | resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.5.0.tgz#b5dd452257314565e2d54ab3c132adc46565a6a0" 38 | integrity sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ== 39 | 40 | pg-types@^2.1.0: 41 | version "2.2.0" 42 | resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-2.2.0.tgz#2d0250d636454f7cfa3b6ae0382fdfa8063254a3" 43 | integrity sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA== 44 | dependencies: 45 | pg-int8 "1.0.1" 46 | postgres-array "~2.0.0" 47 | postgres-bytea "~1.0.0" 48 | postgres-date "~1.0.4" 49 | postgres-interval "^1.1.0" 50 | 51 | pg@^8.6.0: 52 | version "8.6.0" 53 | resolved "https://registry.yarnpkg.com/pg/-/pg-8.6.0.tgz#e222296b0b079b280cce106ea991703335487db2" 54 | integrity sha512-qNS9u61lqljTDFvmk/N66EeGq3n6Ujzj0FFyNMGQr6XuEv4tgNTXvJQTfJdcvGit5p5/DWPu+wj920hAJFI+QQ== 55 | dependencies: 56 | buffer-writer "2.0.0" 57 | packet-reader "1.0.0" 58 | pg-connection-string "^2.5.0" 59 | pg-pool "^3.3.0" 60 | pg-protocol "^1.5.0" 61 | pg-types "^2.1.0" 62 | pgpass "1.x" 63 | 64 | pgpass@1.x: 65 | version "1.0.4" 66 | resolved "https://registry.yarnpkg.com/pgpass/-/pgpass-1.0.4.tgz#85eb93a83800b20f8057a2b029bf05abaf94ea9c" 67 | integrity sha512-YmuA56alyBq7M59vxVBfPJrGSozru8QAdoNlWuW3cz8l+UX3cWge0vTvjKhsSHSJpo3Bom8/Mm6hf0TR5GY0+w== 68 | dependencies: 69 | split2 "^3.1.1" 70 | 71 | postgres-array@~2.0.0: 72 | version "2.0.0" 73 | resolved "https://registry.yarnpkg.com/postgres-array/-/postgres-array-2.0.0.tgz#48f8fce054fbc69671999329b8834b772652d82e" 74 | integrity sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA== 75 | 76 | postgres-bytea@~1.0.0: 77 | version "1.0.0" 78 | resolved "https://registry.yarnpkg.com/postgres-bytea/-/postgres-bytea-1.0.0.tgz#027b533c0aa890e26d172d47cf9ccecc521acd35" 79 | integrity sha1-AntTPAqokOJtFy1Hz5zOzFIazTU= 80 | 81 | postgres-date@~1.0.4: 82 | version "1.0.7" 83 | resolved "https://registry.yarnpkg.com/postgres-date/-/postgres-date-1.0.7.tgz#51bc086006005e5061c591cee727f2531bf641a8" 84 | integrity sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q== 85 | 86 | postgres-interval@^1.1.0: 87 | version "1.2.0" 88 | resolved "https://registry.yarnpkg.com/postgres-interval/-/postgres-interval-1.2.0.tgz#b460c82cb1587507788819a06aa0fffdb3544695" 89 | integrity sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ== 90 | dependencies: 91 | xtend "^4.0.0" 92 | 93 | readable-stream@^3.0.0: 94 | version "3.6.0" 95 | resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" 96 | integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== 97 | dependencies: 98 | inherits "^2.0.3" 99 | string_decoder "^1.1.1" 100 | util-deprecate "^1.0.1" 101 | 102 | safe-buffer@~5.2.0: 103 | version "5.2.1" 104 | resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" 105 | integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== 106 | 107 | split2@^3.1.1: 108 | version "3.2.2" 109 | resolved "https://registry.yarnpkg.com/split2/-/split2-3.2.2.tgz#bf2cf2a37d838312c249c89206fd7a17dd12365f" 110 | integrity sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg== 111 | dependencies: 112 | readable-stream "^3.0.0" 113 | 114 | string_decoder@^1.1.1: 115 | version "1.3.0" 116 | resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" 117 | integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== 118 | dependencies: 119 | safe-buffer "~5.2.0" 120 | 121 | util-deprecate@^1.0.1: 122 | version "1.0.2" 123 | resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" 124 | integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= 125 | 126 | xtend@^4.0.0: 127 | version "4.0.2" 128 | resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" 129 | integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== 130 | -------------------------------------------------------------------------------- /functions/queries/Users/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | buffer-writer@2.0.0: 6 | version "2.0.0" 7 | resolved "https://registry.yarnpkg.com/buffer-writer/-/buffer-writer-2.0.0.tgz#ce7eb81a38f7829db09c873f2fbb792c0c98ec04" 8 | integrity sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw== 9 | 10 | inherits@^2.0.3: 11 | version "2.0.4" 12 | resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" 13 | integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== 14 | 15 | packet-reader@1.0.0: 16 | version "1.0.0" 17 | resolved "https://registry.yarnpkg.com/packet-reader/-/packet-reader-1.0.0.tgz#9238e5480dedabacfe1fe3f2771063f164157d74" 18 | integrity sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ== 19 | 20 | pg-connection-string@^2.5.0: 21 | version "2.5.0" 22 | resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34" 23 | integrity sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ== 24 | 25 | pg-int8@1.0.1: 26 | version "1.0.1" 27 | resolved "https://registry.yarnpkg.com/pg-int8/-/pg-int8-1.0.1.tgz#943bd463bf5b71b4170115f80f8efc9a0c0eb78c" 28 | integrity sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw== 29 | 30 | pg-pool@^3.3.0: 31 | version "3.3.0" 32 | resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.3.0.tgz#12d5c7f65ea18a6e99ca9811bd18129071e562fc" 33 | integrity sha512-0O5huCql8/D6PIRFAlmccjphLYWC+JIzvUhSzXSpGaf+tjTZc4nn+Lr7mLXBbFJfvwbP0ywDv73EiaBsxn7zdg== 34 | 35 | pg-protocol@^1.5.0: 36 | version "1.5.0" 37 | resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.5.0.tgz#b5dd452257314565e2d54ab3c132adc46565a6a0" 38 | integrity sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ== 39 | 40 | pg-types@^2.1.0: 41 | version "2.2.0" 42 | resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-2.2.0.tgz#2d0250d636454f7cfa3b6ae0382fdfa8063254a3" 43 | integrity sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA== 44 | dependencies: 45 | pg-int8 "1.0.1" 46 | postgres-array "~2.0.0" 47 | postgres-bytea "~1.0.0" 48 | postgres-date "~1.0.4" 49 | postgres-interval "^1.1.0" 50 | 51 | pg@^8.6.0: 52 | version "8.6.0" 53 | resolved "https://registry.yarnpkg.com/pg/-/pg-8.6.0.tgz#e222296b0b079b280cce106ea991703335487db2" 54 | integrity sha512-qNS9u61lqljTDFvmk/N66EeGq3n6Ujzj0FFyNMGQr6XuEv4tgNTXvJQTfJdcvGit5p5/DWPu+wj920hAJFI+QQ== 55 | dependencies: 56 | buffer-writer "2.0.0" 57 | packet-reader "1.0.0" 58 | pg-connection-string "^2.5.0" 59 | pg-pool "^3.3.0" 60 | pg-protocol "^1.5.0" 61 | pg-types "^2.1.0" 62 | pgpass "1.x" 63 | 64 | pgpass@1.x: 65 | version "1.0.4" 66 | resolved "https://registry.yarnpkg.com/pgpass/-/pgpass-1.0.4.tgz#85eb93a83800b20f8057a2b029bf05abaf94ea9c" 67 | integrity sha512-YmuA56alyBq7M59vxVBfPJrGSozru8QAdoNlWuW3cz8l+UX3cWge0vTvjKhsSHSJpo3Bom8/Mm6hf0TR5GY0+w== 68 | dependencies: 69 | split2 "^3.1.1" 70 | 71 | postgres-array@~2.0.0: 72 | version "2.0.0" 73 | resolved "https://registry.yarnpkg.com/postgres-array/-/postgres-array-2.0.0.tgz#48f8fce054fbc69671999329b8834b772652d82e" 74 | integrity sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA== 75 | 76 | postgres-bytea@~1.0.0: 77 | version "1.0.0" 78 | resolved "https://registry.yarnpkg.com/postgres-bytea/-/postgres-bytea-1.0.0.tgz#027b533c0aa890e26d172d47cf9ccecc521acd35" 79 | integrity sha1-AntTPAqokOJtFy1Hz5zOzFIazTU= 80 | 81 | postgres-date@~1.0.4: 82 | version "1.0.7" 83 | resolved "https://registry.yarnpkg.com/postgres-date/-/postgres-date-1.0.7.tgz#51bc086006005e5061c591cee727f2531bf641a8" 84 | integrity sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q== 85 | 86 | postgres-interval@^1.1.0: 87 | version "1.2.0" 88 | resolved "https://registry.yarnpkg.com/postgres-interval/-/postgres-interval-1.2.0.tgz#b460c82cb1587507788819a06aa0fffdb3544695" 89 | integrity sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ== 90 | dependencies: 91 | xtend "^4.0.0" 92 | 93 | readable-stream@^3.0.0: 94 | version "3.6.0" 95 | resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" 96 | integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== 97 | dependencies: 98 | inherits "^2.0.3" 99 | string_decoder "^1.1.1" 100 | util-deprecate "^1.0.1" 101 | 102 | safe-buffer@~5.2.0: 103 | version "5.2.1" 104 | resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" 105 | integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== 106 | 107 | split2@^3.1.1: 108 | version "3.2.2" 109 | resolved "https://registry.yarnpkg.com/split2/-/split2-3.2.2.tgz#bf2cf2a37d838312c249c89206fd7a17dd12365f" 110 | integrity sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg== 111 | dependencies: 112 | readable-stream "^3.0.0" 113 | 114 | string_decoder@^1.1.1: 115 | version "1.3.0" 116 | resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" 117 | integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== 118 | dependencies: 119 | safe-buffer "~5.2.0" 120 | 121 | util-deprecate@^1.0.1: 122 | version "1.0.2" 123 | resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" 124 | integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= 125 | 126 | xtend@^4.0.0: 127 | version "4.0.2" 128 | resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" 129 | integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== 130 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AppSync RDS ToDo 2 | 3 | [![appsync-rds-todo CI Dev](https://github.com/wednesday-solutions/appsync-rds-todo/actions/workflows/ci.yml/badge.svg)](https://github.com/wednesday-solutions/appsync-rds-todo/actions/workflows/ci.yml) 4 | 5 | [![appsync-rds-todo CD develop](https://github.com/wednesday-solutions/appsync-rds-todo/actions/workflows/cd.yml/badge.svg)](https://github.com/wednesday-solutions/appsync-rds-todo/actions/workflows/cd.yml) 6 | 7 | This is a boilerplate AppSync-aurora project, that is deployed using the serverless framework with out of the box support for automated creation of a Serverless database cluster, lambdas, vpcs, security groups, nat gateways, etc. 8 | 9 | ## Local setup 10 | 11 | Run the following command to setup the local database and run migrations 12 | ``` 13 | ./scripts/setup-local.sh 14 | ``` 15 | 16 | This project exposes the following queries and mutations 17 | ## Queries 18 | 19 | ``` 20 | type Query { 21 | notes(pagination: PaginationInput!, where: AWSJSON): PaginatedNotes! 22 | lists(pagination: PaginationInput!, where: AWSJSON): PaginatedLists! 23 | users(pagination: PaginationInput!, where: AWSJSON): PaginatedUsers! 24 | } 25 | ``` 26 | 27 | ## Mutations 28 | 29 | ``` 30 | type Mutation { 31 | # create mutations 32 | createNote(input: CreateNoteRequest!): MutatedNote! 33 | createList(input: CreateListRequest!): MutatedList! 34 | createUser(input: CreateUserRequest!): MutatedUser! 35 | 36 | # update mutations 37 | updateList(input: UpdateListRequest!): MutatedList! 38 | updateNote(input: UpdateNoteRequest!): MutatedNote! 39 | updateUser(input: UpdateUserRequest!): MutatedUser! 40 | 41 | # delete mutations 42 | deleteList(id: ID!): MutatedList! 43 | deleteNote(id: ID!): MutatedNote! 44 | deleteUser(id: ID!): MutatedUser! 45 | } 46 | ``` 47 | 48 | You can find the complete postman collection here: [Collection](postman/collection.json) 49 | 50 | ## Highlights 51 | - Automated creation of resources 52 | 53 | This project handles creation of all resources. 54 | It uses an ApiKey for authentication for this project but to know more about how to integrate AppSync with cognito take a look at the following implementaions 55 | - [Federated and passwordless login](https://github.com/wednesday-solutions/serverless/tree/master/aws/cognito/federated-plus-passwordless-login) 56 | - [Federated Sign in](https://github.com/wednesday-solutions/serverless/tree/master/aws/cognito/federated-signin) 57 | - [Passwordless login](https://github.com/wednesday-solutions/serverless/tree/master/aws/cognito/passwordless-login) 58 | - [SRP](https://github.com/wednesday-solutions/serverless/tree/master/aws/cognito/srp) 59 | 60 | Creation of the following resources is automated 61 | - [Serverless Aurora cluster](resources/rds/resources.yml) 62 | - [Subnets](resources/config/subnet.yml) 63 | - [Public Route Table](resources/config/route-public.yml) 64 | - [Private Route Table](resources/config/route-private.yml) 65 | - [Security Groups](resources/config/security-groups.yml) 66 | - [VPC](resources/config/vpc.yml) 67 | - [Elastic IP](resources/config/elastic-ip.yml) 68 | - [Secret](resources/config/secrets.yml) 69 | - [NAT Gateway](resources/config/nat-gateway.yml) 70 | - [Internet Gateway](resources/config/internet-gateway.yml) 71 | - [IAM Roles](resources/config/roles.yml) 72 | - [Lambdas](resources/lambdas/functions.yml) 73 | - [Lambdas as datasources](resources/lambdas/datasources.yml) 74 | - [RDS as a datasource](resources/rds/datasources.yml) 75 | - Support for running database migrations in the CD pipeline. 76 | If you've used a serverless cluster before you know how problematic this is. Take a look at the following files to get a better idea about how we did it 77 | - [webpack.config.js](webpack.config.js) 78 | - [functions/database/migrate/index.js](functions/database/migrate/index.js) 79 | - [scripts/post-deployment.js](scripts/post-deployment.js) 80 | - All queries support a sequelizedWhere, which allows a highly-configurable queries 81 | - All mutations are resolved directly off of the database 82 | This project has out of the support for a camelCased GraphQL interface, whereas the database layer has snake_case table and column names. So how do we resolve mutations directly off of the database? 83 | - [resolvers/mutations/createNote.req.vtl](resolvers/mutations/createNote.req.vtl) 84 | - [resolvers/mutations/response.vtl](resolvers/mutations/response.vtl) 85 | - Max depth for GraphQL queries is restricted to 4 86 | - Support for paginated queries 87 | Nested pagination and fulfilling of deeply-nested queries is exhausting and overwhelming, take a look at how we've simplified this 88 | - [utils/dbUtils.js](utils/dbUtils.js) 89 | - [functions/queries/Notes/index.js](functions/queries/Notes/index.js) 90 | - Support for serverless-webpack, which allows you to use the latest JavaScript functionality in your lambdas 91 | - Support for serverless-dotenv, which allows you to deploy easily to multiple environments 92 | - Out of the box support for sequelize 93 | - Out of the box CI/CD pipelines with support to innject environment variables using Github Secrets 94 | - [.github/workflows/ci.yml](.github/workflows/ci.yml) 95 | - [.github/workflows/cd.yml](.github/workflows/cd.yml) 96 | 97 | ## Adding features on top of the template 98 | 99 | What we've done here is built a base on top of which we can incrementally add new features easily. 100 | ### Adding a new table 101 | - Create a new migration file 102 | - Increment the version in [migrations/resources/](migrations/resources/) and add the necessary .sql 103 | - Create the sequelize model in the [models/](models/) folder 104 | 105 | 106 | ### Adding a new query 107 | - Create a Lambda 108 | Take a look at the following lambda, you just need to change the model that you're passing to findAll 109 | [functions/queries/Notes/index.js](functions/queries/Notes/index.js) 110 | ``` 111 | exports.handler = async (event, context, callback) => 112 | logHandler(event, callback, async () => { 113 | try { 114 | return success(context.done || callback, await findAll(db[], event)); 115 | } catch (err) { 116 | return failure(context.fail || callback, err); 117 | } 118 | }); 119 | 120 | ``` 121 | - Add the Lambda in the [resources/lambdas/functions.yml](resources/lambdas/functions.yml) 122 | ``` 123 | : 124 | handler: .handler 125 | role: 126 | vpc: 127 | securityGroupIds: 128 | - !Ref ServerlessSecurityGroup 129 | subnetIds: 130 | - Ref: ServerlessPrivateSubnetA 131 | - Ref: ServerlessPrivateSubnetB 132 | - Ref: ServerlessPrivateSubnetC 133 | ``` 134 | - Add the Lambda as a datasource in the [resources/lambdas/datasources.yml](resources/lambdas/datasources.yml) 135 | ``` 136 | - type: AWS_LAMBDA 137 | name: Lambda_ 138 | description: "" 139 | config: 140 | functionName: 141 | ``` 142 | - Add the query in [resources/mapping-templates/queries.yml](resources/mapping-templates/queries.yml) 143 | 144 | ``` 145 | - type: Query 146 | field: 147 | request: "queries/query.req.vtl" 148 | response: "response.vtl" 149 | dataSource: 150 | ``` 151 | 152 | ### Adding a mutation 153 | - Create a new request resolver, based on the type of the mutation you can use one of the following templates 154 | 155 | - Create 156 | 157 | Similar to the one here [createNote.req.vtl](resolvers/mutations/createNote.req.vtl) 158 | 159 | ``` 160 | #set( $cols = [] ) 161 | #set( $vals = [] ) 162 | #foreach( $entry in $ctx.args.input.keySet() ) 163 | #set( $regex = "([a-z])([A-Z]+)") 164 | #set( $replacement = "$1_$2") 165 | #set( $toSnake = $entry.replaceAll($regex, $replacement).toLowerCase() ) 166 | #set( $discard = $cols.add("$toSnake") ) 167 | #if( $util.isBoolean($ctx.args.input[$entry]) ) 168 | #if( $ctx.args.input[$entry] ) 169 | #set( $discard = $vals.add("1") ) 170 | #else 171 | #set( $discard = $vals.add("0") ) 172 | #end 173 | #else 174 | #set( $discard = $vals.add("'$ctx.args.input[$entry]'") ) 175 | #end 176 | #end 177 | 178 | #set( $valStr = $vals.toString().replace("[","(").replace("]",")") ) 179 | #set( $colStr = $cols.toString().replace("[","(").replace("]",")") ) 180 | #if ( $valStr.substring(0, 1) != '(' ) 181 | #set( $valStr = "($valStr)" ) 182 | #end 183 | #if ( $colStr.substring(0, 1) != '(' ) 184 | #set( $colStr = "($colStr)" ) 185 | #end 186 | { 187 | "version": "2018-05-29", 188 | "statements": ["INSERT INTO $colStr VALUES $valStr", "SELECT * FROM ORDER BY id DESC LIMIT 1"] 189 | } 190 | ``` 191 | 192 | - Update 193 | 194 | Similar to the one here [updateNote.req.vtl](resolvers/mutations/updateNote.req.vtl) 195 | 196 | ``` 197 | #set( $update = "" ) 198 | #set( $equals = "=" ) 199 | ## 1 200 | #foreach( $entry in $ctx.args.input.keySet() ) 201 | ## 2 202 | #set( $cur = $ctx.args.input[$entry] ) 203 | #set( $regex = "([a-z])([A-Z]+)") 204 | #set( $replacement = "$1_$2") 205 | #set( $toSnake = $entry.replaceAll($regex, $replacement).toLowerCase() ) 206 | ## 3 207 | #if( $util.isBoolean($cur) ) 208 | #if( $cur ) 209 | #set ( $cur = "1" ) 210 | #else 211 | #set ( $cur = "0" ) 212 | #end 213 | #end 214 | ## 4 215 | #if ( $util.isNullOrEmpty($update) ) 216 | #set($update = "$toSnake$equals'$cur'" ) 217 | #else 218 | #set($update = "$update,$toSnake$equals'$cur'" ) 219 | #end 220 | #end 221 | { 222 | "version": "2018-05-29", 223 | "statements": ["UPDATE SET $update WHERE id=$ctx.args.input.id", "SELECT * FROM WHERE id=$ctx.args.input.id"] 224 | } 225 | ``` 226 | 227 | - Delete 228 | 229 | Similar to the one here [deleteNote.req.vtl](resolvers/mutations/deleteNote.req.vtl) 230 | 231 | ``` 232 | { 233 | "version": "2018-05-29", 234 | "statements": ["UPDATE set deleted_at=NOW() WHERE id=$ctx.args.id", "SELECT * FROM WHERE id=$ctx.args.id"] 235 | } 236 | ``` 237 | 238 | - Add a mutation in [resources/mapping-templates/mutations.yml](resources/mapping-templates/mutations.yml) 239 | 240 | ``` 241 | - type: Mutation 242 | field: createNote 243 | request: "mutations/createNote.req.vtl" 244 | response: "mutations/response.vtl" 245 | dataSource: POSTGRES_RDS 246 | ``` 247 | -------------------------------------------------------------------------------- /postman/collections.json: -------------------------------------------------------------------------------- 1 | { 2 | "info": { 3 | "_postman_id": "dfe6c49e-278b-480c-99ff-933c64b132d7", 4 | "name": "localhost:20002/graphql", 5 | "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" 6 | }, 7 | "item": [ 8 | { 9 | "name": "query", 10 | "item": [ 11 | { 12 | "name": "query notes", 13 | "event": [ 14 | { 15 | "listen": "test", 16 | "script": { 17 | "exec": [ 18 | "pm.test(\"errors is undefined\", function() {", 19 | " const response = JSON.parse(responseBody);", 20 | " pm.expect(response.errors).to.be.undefined", 21 | "});" 22 | ], 23 | "type": "text/javascript" 24 | } 25 | } 26 | ], 27 | "request": { 28 | "method": "POST", 29 | "header": [{ "key": "x-api-key", "value": "0123456789", "type": "text", "disabled": false }], 30 | "body": { 31 | "mode": "graphql", 32 | "graphql": { 33 | "query": "query Notes($notesPagination: PaginationInput!, $notesWhere: AWSJSON) {\n notes(pagination: $notesPagination, where: $notesWhere) {\n items {\n id\n note\n listId\n done\n deadline\n list {\n id\n name\n notes(pagination: $notesPagination) {\n id\n note\n listId\n done\n deadline\n }\n }\n }\n }\n}\n", 34 | "variables": "{\n \"notesPagination\": null,\n \"notesWhere\": null\n}\n" 35 | } 36 | }, 37 | "url": { "raw": "http://localhost:20002/graphql", "host": ["http://localhost:20002/graphql"] } 38 | }, 39 | "response": [] 40 | }, 41 | { 42 | "name": "query users", 43 | "event": [ 44 | { 45 | "listen": "test", 46 | "script": { 47 | "exec": [ 48 | "pm.test(\"errors is undefined\", function() {", 49 | " const response = JSON.parse(responseBody);", 50 | " pm.expect(response.errors).to.be.undefined", 51 | "});" 52 | ], 53 | "type": "text/javascript" 54 | } 55 | } 56 | ], 57 | "request": { 58 | "method": "POST", 59 | "header": [{ "key": "x-api-key", "value": "0123456789", "type": "text", "disabled": false }], 60 | "body": { 61 | "mode": "graphql", 62 | "graphql": { 63 | "query": "query Users(\n $usersPagination: PaginationInput!\n $usersWhere: AWSJSON\n $listsPagination: PaginationInput!\n $notesPagination: PaginationInput!\n) {\n users(pagination: $usersPagination, where: $usersWhere) {\n items {\n id\n name\n userRef\n lists(pagination: $listsPagination) {\n id\n name\n notes(pagination: $notesPagination) {\n id\n note\n listId\n done\n deadline\n }\n }\n }\n }\n}\n", 64 | "variables": "{\n \"usersPagination\": null,\n \"usersWhere\": null,\n \"listsPagination\": null,\n \"notesPagination\": null\n}\n" 65 | } 66 | }, 67 | "url": { "raw": "http://localhost:20002/graphql", "host": ["http://localhost:20002/graphql"] } 68 | }, 69 | "response": [] 70 | }, 71 | { 72 | "name": "query lists", 73 | "event": [ 74 | { 75 | "listen": "test", 76 | "script": { 77 | "exec": [ 78 | "pm.test(\"errors is undefined\", function() {", 79 | " const response = JSON.parse(responseBody);", 80 | " pm.expect(response.errors).to.be.undefined", 81 | "});" 82 | ], 83 | "type": "text/javascript" 84 | } 85 | } 86 | ], 87 | "request": { 88 | "method": "POST", 89 | "header": [{ "key": "x-api-key", "value": "0123456789", "type": "text", "disabled": false }], 90 | "body": { 91 | "mode": "graphql", 92 | "graphql": { 93 | "query": "query Lists(\n $listsPagination: PaginationInput!\n $listsWhere: AWSJSON\n $notesPagination: PaginationInput!\n) {\n lists(pagination: $listsPagination, where: $listsWhere) {\n items {\n id\n name\n notes(pagination: $notesPagination) {\n id\n note\n listId\n done\n deadline\n list {\n id\n name\n }\n }\n }\n }\n}\n", 94 | "variables": "{\n \"listsPagination\": null,\n \"listsWhere\": null,\n \"notesPagination\": null\n}\n" 95 | } 96 | }, 97 | "url": { "raw": "http://localhost:20002/graphql", "host": ["http://localhost:20002/graphql"] } 98 | }, 99 | "response": [] 100 | } 101 | ] 102 | }, 103 | { 104 | "name": "mutation", 105 | "item": [ 106 | { 107 | "name": "mutation createNote", 108 | "event": [ 109 | { 110 | "listen": "test", 111 | "script": { 112 | "exec": [ 113 | "pm.test(\"errors is undefined\", function() {", 114 | " const response = JSON.parse(responseBody);", 115 | " pm.expect(response.errors).to.be.undefined", 116 | "});" 117 | ], 118 | "type": "text/javascript" 119 | } 120 | } 121 | ], 122 | "request": { 123 | "method": "POST", 124 | "header": [{ "key": "x-api-key", "value": "0123456789", "type": "text", "disabled": false }], 125 | "body": { 126 | "mode": "graphql", 127 | "graphql": { 128 | "query": "mutation CreateNote($createNoteInput: CreateNoteRequest!) {\n createNote(input: $createNoteInput) {\n id\n note\n listId\n deadline\n done\n }\n}\n", 129 | "variables": "{\n \"createNoteInput\": null\n}\n" 130 | } 131 | }, 132 | "url": { "raw": "http://localhost:20002/graphql", "host": ["http://localhost:20002/graphql"] } 133 | }, 134 | "response": [] 135 | }, 136 | { 137 | "name": "mutation createUser", 138 | "event": [ 139 | { 140 | "listen": "test", 141 | "script": { 142 | "exec": [ 143 | "pm.test(\"errors is undefined\", function() {", 144 | " const response = JSON.parse(responseBody);", 145 | " pm.expect(response.errors).to.be.undefined", 146 | "});" 147 | ], 148 | "type": "text/javascript" 149 | } 150 | } 151 | ], 152 | "request": { 153 | "method": "POST", 154 | "header": [{ "key": "x-api-key", "value": "0123456789", "type": "text", "disabled": false }], 155 | "body": { 156 | "mode": "graphql", 157 | "graphql": { 158 | "query": "mutation CreateUser($createUserInput: CreateUserRequest!) {\n createUser(input: $createUserInput) {\n id\n name\n userRef\n }\n}\n", 159 | "variables": "{\n \"createUserInput\": null\n}\n" 160 | } 161 | }, 162 | "url": { "raw": "http://localhost:20002/graphql", "host": ["http://localhost:20002/graphql"] } 163 | }, 164 | "response": [] 165 | }, 166 | { 167 | "name": "mutation deleteList", 168 | "event": [ 169 | { 170 | "listen": "test", 171 | "script": { 172 | "exec": [ 173 | "pm.test(\"errors is undefined\", function() {", 174 | " const response = JSON.parse(responseBody);", 175 | " pm.expect(response.errors).to.be.undefined", 176 | "});" 177 | ], 178 | "type": "text/javascript" 179 | } 180 | } 181 | ], 182 | "request": { 183 | "method": "POST", 184 | "header": [{ "key": "x-api-key", "value": "0123456789", "type": "text", "disabled": false }], 185 | "body": { 186 | "mode": "graphql", 187 | "graphql": { 188 | "query": "mutation DeleteList($deleteListId: ID!) {\n deleteList(id: $deleteListId) {\n id\n name\n userId\n }\n}\n", 189 | "variables": "{\n \"deleteListId\": null\n}\n" 190 | } 191 | }, 192 | "url": { "raw": "http://localhost:20002/graphql", "host": ["http://localhost:20002/graphql"] } 193 | }, 194 | "response": [] 195 | }, 196 | { 197 | "name": "mutation deleteUser", 198 | "event": [ 199 | { 200 | "listen": "test", 201 | "script": { 202 | "exec": [ 203 | "pm.test(\"errors is undefined\", function() {", 204 | " const response = JSON.parse(responseBody);", 205 | " pm.expect(response.errors).to.be.undefined", 206 | "});" 207 | ], 208 | "type": "text/javascript" 209 | } 210 | } 211 | ], 212 | "request": { 213 | "method": "POST", 214 | "header": [{ "key": "x-api-key", "value": "0123456789", "type": "text", "disabled": false }], 215 | "body": { 216 | "mode": "graphql", 217 | "graphql": { 218 | "query": "mutation DeleteUser($deleteUserId: ID!) {\n deleteUser(id: $deleteUserId) {\n id\n name\n userRef\n }\n}\n", 219 | "variables": "{\n \"deleteUserId\": null\n}\n" 220 | } 221 | }, 222 | "url": { "raw": "http://localhost:20002/graphql", "host": ["http://localhost:20002/graphql"] } 223 | }, 224 | "response": [] 225 | }, 226 | { 227 | "name": "mutation updateList", 228 | "event": [ 229 | { 230 | "listen": "test", 231 | "script": { 232 | "exec": [ 233 | "pm.test(\"errors is undefined\", function() {", 234 | " const response = JSON.parse(responseBody);", 235 | " pm.expect(response.errors).to.be.undefined", 236 | "});" 237 | ], 238 | "type": "text/javascript" 239 | } 240 | } 241 | ], 242 | "request": { 243 | "method": "POST", 244 | "header": [{ "key": "x-api-key", "value": "0123456789", "type": "text", "disabled": false }], 245 | "body": { 246 | "mode": "graphql", 247 | "graphql": { 248 | "query": "mutation UpdateList($updateListInput: UpdateListRequest!) {\n updateList(input: $updateListInput) {\n id\n name\n userId\n }\n}\n", 249 | "variables": "{\n \"updateListInput\": null\n}\n" 250 | } 251 | }, 252 | "url": { "raw": "http://localhost:20002/graphql", "host": ["http://localhost:20002/graphql"] } 253 | }, 254 | "response": [] 255 | }, 256 | { 257 | "name": "mutation updateNote", 258 | "event": [ 259 | { 260 | "listen": "test", 261 | "script": { 262 | "exec": [ 263 | "pm.test(\"errors is undefined\", function() {", 264 | " const response = JSON.parse(responseBody);", 265 | " pm.expect(response.errors).to.be.undefined", 266 | "});" 267 | ], 268 | "type": "text/javascript" 269 | } 270 | } 271 | ], 272 | "request": { 273 | "method": "POST", 274 | "header": [{ "key": "x-api-key", "value": "0123456789", "type": "text", "disabled": false }], 275 | "body": { 276 | "mode": "graphql", 277 | "graphql": { 278 | "query": "mutation UpdateNote($updateNoteInput: UpdateNoteRequest!) {\n updateNote(input: $updateNoteInput) {\n id\n note\n listId\n deadline\n done\n }\n}\n", 279 | "variables": "{\n \"updateNoteInput\": null\n}\n" 280 | } 281 | }, 282 | "url": { "raw": "http://localhost:20002/graphql", "host": ["http://localhost:20002/graphql"] } 283 | }, 284 | "response": [] 285 | }, 286 | { 287 | "name": "mutation deleteNote", 288 | "event": [ 289 | { 290 | "listen": "test", 291 | "script": { 292 | "exec": [ 293 | "pm.test(\"errors is undefined\", function() {", 294 | " const response = JSON.parse(responseBody);", 295 | " pm.expect(response.errors).to.be.undefined", 296 | "});" 297 | ], 298 | "type": "text/javascript" 299 | } 300 | } 301 | ], 302 | "request": { 303 | "method": "POST", 304 | "header": [{ "key": "x-api-key", "value": "0123456789", "type": "text", "disabled": false }], 305 | "body": { 306 | "mode": "graphql", 307 | "graphql": { 308 | "query": "mutation DeleteNote($deleteNoteId: ID!) {\n deleteNote(id: $deleteNoteId) {\n id\n note\n listId\n deadline\n done\n }\n}\n", 309 | "variables": "{\n \"deleteNoteId\": null\n}\n" 310 | } 311 | }, 312 | "url": { "raw": "http://localhost:20002/graphql", "host": ["http://localhost:20002/graphql"] } 313 | }, 314 | "response": [] 315 | }, 316 | { 317 | "name": "mutation updateUser", 318 | "event": [ 319 | { 320 | "listen": "test", 321 | "script": { 322 | "exec": [ 323 | "pm.test(\"errors is undefined\", function() {", 324 | " const response = JSON.parse(responseBody);", 325 | " pm.expect(response.errors).to.be.undefined", 326 | "});" 327 | ], 328 | "type": "text/javascript" 329 | } 330 | } 331 | ], 332 | "request": { 333 | "method": "POST", 334 | "header": [{ "key": "x-api-key", "value": "0123456789", "type": "text", "disabled": false }], 335 | "body": { 336 | "mode": "graphql", 337 | "graphql": { 338 | "query": "mutation UpdateUser($updateUserInput: UpdateUserRequest!) {\n updateUser(input: $updateUserInput) {\n id\n name\n userRef\n }\n}\n", 339 | "variables": "{\n \"updateUserInput\": null\n}\n" 340 | } 341 | }, 342 | "url": { "raw": "http://localhost:20002/graphql", "host": ["http://localhost:20002/graphql"] } 343 | }, 344 | "response": [] 345 | }, 346 | { 347 | "name": "mutation createList", 348 | "event": [ 349 | { 350 | "listen": "test", 351 | "script": { 352 | "exec": [ 353 | "pm.test(\"errors is undefined\", function() {", 354 | " const response = JSON.parse(responseBody);", 355 | " pm.expect(response.errors).to.be.undefined", 356 | "});" 357 | ], 358 | "type": "text/javascript" 359 | } 360 | } 361 | ], 362 | "request": { 363 | "method": "POST", 364 | "header": [{ "key": "x-api-key", "value": "0123456789", "type": "text", "disabled": false }], 365 | "body": { 366 | "mode": "graphql", 367 | "graphql": { 368 | "query": "mutation CreateList($createListInput: CreateListRequest!) {\n createList(input: $createListInput) {\n id\n name\n userId\n }\n}\n", 369 | "variables": "{\n \"createListInput\": null\n}\n" 370 | } 371 | }, 372 | "url": { "raw": "http://localhost:20002/graphql", "host": ["http://localhost:20002/graphql"] } 373 | }, 374 | "response": [] 375 | } 376 | ] 377 | } 378 | ] 379 | } 380 | --------------------------------------------------------------------------------