├── .babelrc ├── .dockerignore ├── .env.dev ├── .env.example ├── .env.prod ├── .eslintrc ├── .flowconfig ├── .gitignore ├── Dockerfile ├── README.md ├── circle.yml ├── docker-compose.yml ├── package.json ├── pgSql ├── index.js ├── migrationRunner.js ├── migrations │ ├── 1509831413668_migration_down.sql │ └── 1509831413668_migration_up.sql └── seed.sql ├── scripts └── dev_pg_helper.sh ├── src ├── TypeDefinition.js ├── api │ ├── app.js │ ├── helper.js │ ├── index.js │ ├── loader │ │ ├── index.js │ │ └── pg │ │ │ ├── ConnectionFromPgCursor.js │ │ │ ├── db │ │ │ └── PersonLoader.js │ │ │ └── pgLoader.js │ ├── routes │ │ ├── index.js │ │ └── person │ │ │ └── PersonGet.js │ └── type │ │ └── PersonType.js ├── common │ ├── config.js │ ├── consts.js │ ├── events.js │ ├── queue.js │ └── slack.js ├── dbs │ └── postgres.js └── middlewares │ ├── dataloader.js │ ├── errorHandler.js │ └── pgClientFromPool.js ├── test ├── createRows.js └── helper.js └── yarn.lock /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": [ 3 | ["transform-flow-strip-types"] 4 | ], 5 | "presets": ["es2015", "stage-0"] 6 | } 7 | 8 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules/* 2 | .idea/* 3 | *.iml 4 | npm-debug.log 5 | dist 6 | coverage 7 | .env 8 | -------------------------------------------------------------------------------- /.env.dev: -------------------------------------------------------------------------------- 1 | # Restria ENVS 2 | API_PORT=3009 3 | 4 | SLACK_WEBHOOK=rest-api-boilerplate 5 | 6 | REDIS_HOST=localhost 7 | 8 | #Security 9 | JWT_KEY=rest-api-boilerplate 10 | 11 | NODE_ENV=development 12 | 13 | # Database 14 | PG_RESTRIA_HOST=localhost 15 | PG_RESTRIA_DATABASE=restria_dev 16 | PG_RESTRIA_USER=restria_user 17 | PG_RESTRIA_PASSWORD=12345 18 | 19 | #AWS Keys 20 | AWS_ACCESS_KEY_ID=rest-api-boilerplate 21 | AWS_SECRET_ACCESS_KEY=rest-api-boilerplate 22 | AWS_REGION=rest-api-boilerplate 23 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | # Restria ENVS 2 | API_PORT= 3 | 4 | SLACK_WEBHOOK= 5 | 6 | REDIS_HOST= 7 | 8 | #Security 9 | JWT_KEY= 10 | 11 | NODE_ENV= 12 | 13 | # Database 14 | PG_RESTRIA_HOST= 15 | PG_RESTRIA_DATABASE= 16 | PG_RESTRIA_USER= 17 | PG_RESTRIA_PASSWORD= 18 | 19 | #AWS Keys 20 | AWS_ACCESS_KEY_ID= 21 | AWS_SECRET_ACCESS_KEY= 22 | AWS_REGION= 23 | -------------------------------------------------------------------------------- /.env.prod: -------------------------------------------------------------------------------- 1 | # Restria ENVS 2 | API_PORT=3009 3 | 4 | SLACK_WEBHOOK=rest-api-boilerplate 5 | 6 | REDIS_HOST=localhost 7 | 8 | #Security 9 | JWT_KEY=rest-api-boilerplate 10 | 11 | NODE_ENV=production 12 | 13 | #AWS Keys 14 | AWS_ACCESS_KEY_ID=rest-api-boilerplate 15 | AWS_SECRET_ACCESS_KEY=rest-api-boilerplate 16 | AWS_REGION=rest-api-boilerplate 17 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "airbnb/base", 3 | "parser": "babel-eslint", 4 | "rules": { 5 | "no-console": 0, 6 | "max-len": [1, 120, 2], 7 | "no-param-reassign": [2, { "props": false }], 8 | "no-continue": 0, 9 | "no-underscore-dangle": 0, 10 | "generator-star-spacing": 0, 11 | "no-duplicate-imports": 0, 12 | "import/no-duplicates": 2, 13 | "no-use-before-define": 0, 14 | "consistent-return": 0, 15 | "spaced-comment": 0, 16 | "prefer-const": [ 2, { "destructuring": "all" }] 17 | }, 18 | "plugins": [ 19 | "import" 20 | ], 21 | "env": { 22 | "jest": true, 23 | "browser": true 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /.flowconfig: -------------------------------------------------------------------------------- 1 | [ignore] 2 | 3 | [include] 4 | 5 | [libs] 6 | flow-typed 7 | 8 | [options] 9 | sharedmemory.hash_table_pow=21 10 | esproposal.export_star_as=enable 11 | 12 | ;include_warnings=true 13 | 14 | [lints] 15 | all=warn 16 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | *.sublime-project 3 | *.sublime-workspace 4 | .idea/ 5 | 6 | lib-cov 7 | *.seed 8 | *.log 9 | *.csv 10 | *.dat 11 | *.out 12 | *.pid 13 | *.gz 14 | *.map 15 | 16 | pids 17 | logs 18 | results 19 | 20 | node_modules 21 | npm-debug.log 22 | 23 | dump.rdb 24 | bundle.js 25 | 26 | dist 27 | coverage 28 | .nyc_output 29 | .env 30 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:8 2 | 3 | # Create app directory 4 | RUN mkdir -p /usr/src/app 5 | WORKDIR /usr/src/app 6 | 7 | COPY yarn.lock /usr/src/app/ 8 | COPY package.json /usr/src/app/ 9 | RUN yarn install 10 | 11 | # Bundle app source 12 | COPY . /usr/src/app 13 | 14 | EXPOSE 3009 15 | 16 | # Setup ENV 17 | ENV NODE_ENV=production 18 | ENV ENTRIA_ENV=development 19 | 20 | ENV API_PORT=3009 21 | ENV SLACK_WEBHOOK= 22 | ENV MONGO_URI=mongodb://localhost/restria-dev 23 | ENV REDIS_HOST=localhost 24 | 25 | ENV JWT_KEY=rest-api-boilerplate 26 | 27 | ENV AWS_ACCESS_KEY_ID= 28 | ENV AWS_SECRET_ACCESS_KEY= 29 | ENV AWS_REGION= 30 | 31 | RUN npm run build 32 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # [WIP] Restria - Entria's REST API boilerplate 2 | 3 | ## Basic methods 4 | | Method | Endpoint | Description | 5 | | --- | --- | --- | 6 | | GET | /v1/users/:id | get user by id | 7 | | GET | /v1/users | get all users | 8 | | POST | /v1/users/:id | insert user | 9 | | POST | /v1/users | bulk insert users | 10 | | DELETE | /v1/user/:id | soft delete user by id | 11 | | DELETE | /v1/users | soft delete bulk of users | 12 | 13 | ## Postgres 14 | Check [Wiki](https://github.com/jgcmarins/restria/wiki) 15 | -------------------------------------------------------------------------------- /circle.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | executorType: docker 3 | jobs: 4 | build: 5 | environment: 6 | - IMAGE_NAME: "restria" 7 | - ENTRIA_ENVIROMENT: "development" 8 | working_directory: ~/app 9 | docker: 10 | - image: buildpack-deps:trusty 11 | steps: 12 | - checkout 13 | - run: 14 | name: Install Docker Compose 15 | command: | 16 | set -x 17 | curl -L https://github.com/docker/compose/releases/download/1.11.2/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose 18 | chmod +x /usr/local/bin/docker-compose 19 | - run: 20 | name: Install Docker client 21 | command: | 22 | set -x 23 | VER="17.05.0-ce" 24 | curl -L -o /tmp/docker-$VER.tgz https://get.docker.com/builds/Linux/x86_64/docker-$VER.tgz 25 | tar -xz -C /tmp -f /tmp/docker-$VER.tgz 26 | mv /tmp/docker/* /usr/bin 27 | - setup_remote_docker 28 | - run: 29 | name: Start testing environment 30 | command: docker-compose up -d 31 | - run: 32 | name: Run tests 33 | shell: /bin/bash 34 | command: docker-compose run restria yarn test 35 | - run: 36 | name: Run flow 37 | shell: /bin/bash 38 | command: docker-compose run restria yarn flow 39 | - run: 40 | name: Stop testing environment 41 | command: docker-compose down 42 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | 3 | services: 4 | restria: 5 | links: 6 | - redis 7 | - mongodb 8 | build: . 9 | container_name: restria 10 | environment: 11 | MONGO_URI: mongodb://localhost/restria-dev 12 | MONGO_URI_TEST: mongodb://localhost/restria-dev-tests 13 | REDIS_HOST: redis://redis 14 | ports: 15 | - "5001:80" 16 | redis: 17 | image: redis:3.2-alpine 18 | container_name: redis 19 | ports: ["6379"] 20 | mongodb: 21 | image: mongo:3.5.7 22 | container_name: mongodb 23 | ports: ["27017"] 24 | 25 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "restria", 3 | "description": "Entria's REST API boilerplate", 4 | "version": "0.0.1", 5 | "dependencies": { 6 | "@slack/client": "3.14.1", 7 | "babel-polyfill": "^6.23.0", 8 | "bcrypt-as-promised": "^1.1.0", 9 | "dataloader": "^1.3.0", 10 | "dotenv-safe": "^4.0.4", 11 | "isomorphic-fetch": "^2.2.1", 12 | "jsonwebtoken": "^8.1.0", 13 | "kcors": "^2.2.2", 14 | "koa": "^2.3.0", 15 | "koa-convert": "^1.2.0", 16 | "koa-logger": "^2.0.1", 17 | "koa-multer": "^1.0.1", 18 | "koa-router": "^7.2.1", 19 | "kue": "^0.11.6", 20 | "lint-staged": "^4.0.0", 21 | "pg": "^7.4.0", 22 | "pre-commit": "^1.2.2", 23 | "prettier": "^1.4.4", 24 | "pretty-format": "^21.2.1", 25 | "uuid": "^3.1.0" 26 | }, 27 | "devDependencies": { 28 | "babel-cli": "^6.24.1", 29 | "babel-core": "^6.26.3", 30 | "babel-eslint": "^8.0.1", 31 | "babel-jest": "^21.2.0", 32 | "babel-plugin-transform-flow-strip-types": "^6.22.0", 33 | "babel-preset-es2015": "^6.24.1", 34 | "babel-preset-stage-0": "^6.24.1", 35 | "babel-register": "^6.24.1", 36 | "eslint": "^4.0.0", 37 | "eslint-config-airbnb": "^16.1.0", 38 | "eslint-plugin-import": "^2.3.0", 39 | "eslint-plugin-jsx-a11y": "^6.0.2", 40 | "eslint-plugin-react": "^7.1.0", 41 | "flow-bin": "^0.57.3", 42 | "jest": "^21.2.1", 43 | "jest-cli": "^21.2.1", 44 | "nodemon": "^2.0.6" 45 | }, 46 | "jest": { 47 | "testEnvironment": "node", 48 | "testPathIgnorePatterns": [ 49 | "/node_modules/", 50 | "./dist" 51 | ], 52 | "coverageReporters": [ 53 | "lcov", 54 | "html" 55 | ] 56 | }, 57 | "license": "MIT", 58 | "lint-staged": { 59 | "*.js": [ 60 | "prettier --write --single-quote true --trailing-comma all --print-width 100", 61 | "git add" 62 | ] 63 | }, 64 | "main": "index.js", 65 | "pre-commit": "lint:staged", 66 | "private": true, 67 | "repository": { 68 | "type": "git", 69 | "url": "https://github.com/entria/restria" 70 | }, 71 | "scripts": { 72 | "build": "rm -rf dist/* && babel src --ignore *.spec.js --out-dir dist --copy-files --source-maps", 73 | "flow": "flow; test $? -eq 0 -o $? -eq 2", 74 | "api": "nodemon src/api/index.js --exec babel-node", 75 | "lint": "eslint src/**", 76 | "lint:staged": "lint-staged", 77 | "pg:create-db": "./scripts/dev_pg_helper.sh restria_dev restria_user 12345", 78 | "pg:start": "./scripts/dev_pg_helper.sh restria_dev start", 79 | "pg:stop": "./scripts/dev_pg_helper.sh restria_dev stop", 80 | "pg:run": "babel-node pgSql/index.js", 81 | "pg:up": "yarn pg:run -- up", 82 | "pg:down": "yarn pg:run -- down", 83 | "pg:migrate": "yarn pg:run -- migrate", 84 | "pg:migration": "yarn pg:run -- migration", 85 | "pg:rollback": "yarn pg:run -- rollback", 86 | "pg:nuke": "yarn pg:down && yarn pg:up && yarn pg:run seed", 87 | "pg:seed": "yarn pg:run -- seed", 88 | "server": "node dist/api/index.js", 89 | "start": "nodemon src/api/index.js --exec babel-node", 90 | "test": "jest --coverage --runInBand --forceExit", 91 | "test:watch": "jest --watch --coverage" 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /pgSql/index.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | import 'dotenv-safe'; 3 | import pg from 'pg'; 4 | import runner from './migrationRunner'; 5 | 6 | import { dbs } from '../src/common/config'; 7 | 8 | const db = new pg.Client(dbs.restria); 9 | 10 | const commands = ['migration', 'migrate', 'rollback', 'up', 'down', 'seed']; 11 | const command = process.argv[2]; 12 | 13 | if (!commands.includes(command)) { 14 | throw new Error('Command not valid'); 15 | } 16 | 17 | runner(command, db); 18 | -------------------------------------------------------------------------------- /pgSql/migrationRunner.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | import path from 'path'; 3 | import fs from 'fs'; 4 | 5 | import type { PgClient } from '../src/TypeDefinition'; 6 | 7 | const migrationsDir = path.join(__dirname, 'migrations/'); 8 | 9 | export default async function runner( 10 | command: string, 11 | db: PgClient, 12 | handleConnection: boolean = true, 13 | ) { 14 | if (handleConnection) await db.connect(); 15 | 16 | try { 17 | switch (command) { 18 | case 'migration': { 19 | const now = Date.now(); 20 | const migrationFileName = suffix => `${now}_migration_${suffix}.sql`; 21 | console.log('Creating migrations...'); 22 | fs.writeFileSync( 23 | path.join(migrationsDir, migrationFileName('up')).toString(), 24 | '-- Up schema here', 25 | ); 26 | fs.writeFileSync( 27 | path.join(migrationsDir, migrationFileName('down')).toString(), 28 | '-- Down schema here', 29 | ); 30 | break; 31 | } 32 | case 'migrate': 33 | await migrate('_up', db); 34 | break; 35 | case 'rollback': 36 | await migrate('_down', db); 37 | break; 38 | case 'up': 39 | await all('_up', db); 40 | break; 41 | case 'down': 42 | await all('_down', db); 43 | break; 44 | case 'seed': { 45 | const seed = fs.readFileSync(path.join(__dirname, 'seed.sql')).toString(); 46 | await db.query(seed); 47 | break; 48 | } 49 | default: 50 | migrate('_up', db); 51 | } 52 | } finally { 53 | if (handleConnection) await db.end(); 54 | } 55 | } 56 | 57 | function migrate(suffix, db) { 58 | const latestMigration = fs 59 | .readdirSync(migrationsDir) 60 | .filter(fileName => fileName.includes(suffix)) 61 | .reduce( 62 | (lastOne, fileName) => { 63 | const createdAt = datefy(fileName); 64 | if (lastOne != null && (!lastOne.createdAt || createdAt > lastOne.createdAt)) { 65 | return { createdAt, fileName }; 66 | } 67 | return null; 68 | }, 69 | { createdAt: null, fileName: null }, 70 | ); 71 | 72 | const migrationSql = fs 73 | .readFileSync(path.join(migrationsDir, latestMigration.fileName)) 74 | .toString(); 75 | 76 | return db.query(migrationSql).catch(err => console.log(err)); 77 | } 78 | 79 | function all(suffix, db) { 80 | const sortedMigrations = fs 81 | .readdirSync(migrationsDir) 82 | .filter(fileName => fileName.includes(suffix)) 83 | .sort((a, b) => datefy(a) > datefy(b)) 84 | .map(fileName => fs.readFileSync(path.join(migrationsDir, fileName)).toString()); 85 | 86 | return Promise.all(sortedMigrations.map(migrationSql => db.query(migrationSql))); 87 | } 88 | 89 | function datefy(fileName) { 90 | return Number(fileName.match(/[^_]+/)[0]); 91 | } 92 | -------------------------------------------------------------------------------- /pgSql/migrations/1509831413668_migration_down.sql: -------------------------------------------------------------------------------- 1 | -- Down schema here 2 | drop table if exists person; 3 | -------------------------------------------------------------------------------- /pgSql/migrations/1509831413668_migration_up.sql: -------------------------------------------------------------------------------- 1 | -- Up schema here 2 | CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; 3 | 4 | -- Person Type 5 | create table person 6 | ( 7 | id uuid default uuid_generate_v4() UNIQUE not null PRIMARY key, 8 | name text not null, 9 | nick text, 10 | email text not null, 11 | password text not null, 12 | active boolean not null, 13 | email_verified boolean not null, 14 | created_at timestamptz default current_timestamp, 15 | updated_at timestamptz default current_timestamp 16 | ); 17 | -------------------------------------------------------------------------------- /pgSql/seed.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO public.person 2 | (id, name, email, password, active, email_verified, created_at, updated_at) 3 | VALUES(uuid_generate_v4(), 'Person 1', 'person1@person1', '12345', true, false, now(), now()); 4 | 5 | INSERT INTO public.person 6 | (id, name, email, password, active, email_verified, created_at, updated_at) 7 | VALUES(uuid_generate_v4(), 'Person 2', 'person2@person2', '12345', true, false, now(), now()); 8 | 9 | INSERT INTO public.person 10 | (id, name, email, password, active, email_verified, created_at, updated_at) 11 | VALUES(uuid_generate_v4(), 'Person 3', 'person3@person3', '12345', true, false, now(), now()); 12 | 13 | INSERT INTO public.person 14 | (id, name, email, password, active, email_verified, created_at, updated_at) 15 | VALUES(uuid_generate_v4(), 'Person 4', 'person4@person4', '12345', true, false, now(), now()); 16 | 17 | INSERT INTO public.person 18 | (id, name, email, password, active, email_verified, created_at, updated_at) 19 | VALUES(uuid_generate_v4(), 'Person 5', 'person5@person5', '12345', true, false, now(), now()); 20 | 21 | INSERT INTO public.person 22 | (id, name, email, password, active, email_verified, created_at, updated_at) 23 | VALUES(uuid_generate_v4(), 'Person 6', 'person6@person6', '12345', true, false, now(), now()); 24 | 25 | INSERT INTO public.person 26 | (id, name, email, password, active, email_verified, created_at, updated_at) 27 | VALUES(uuid_generate_v4(), 'Person 7', 'person7@person7', '12345', true, false, now(), now()); 28 | 29 | INSERT INTO public.person 30 | (id, name, email, password, active, email_verified, created_at, updated_at) 31 | VALUES(uuid_generate_v4(), 'Person 8', 'person8@person8', '12345', true, false, now(), now()); 32 | 33 | INSERT INTO public.person 34 | (id, name, email, password, active, email_verified, created_at, updated_at) 35 | VALUES(uuid_generate_v4(), 'Person 9', 'person9@person9', '12345', true, false, now(), now()); 36 | 37 | INSERT INTO public.person 38 | (id, name, email, password, active, email_verified, created_at, updated_at) 39 | VALUES(uuid_generate_v4(), 'Person 10', 'person10@person10', '12345', true, false, now(), now()); 40 | -------------------------------------------------------------------------------- /scripts/dev_pg_helper.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | clear; 3 | 4 | BACKUP_OLD_DB=$4; 5 | 6 | DB_NAME=$1; 7 | DB_USER=$2; 8 | PASSWORD=$3 9 | 10 | DBS_PATH="$HOME/pg_databases"; 11 | NEW_DB="$DBS_PATH/$DB_NAME"; 12 | 13 | export LC_CTYPE=pt_BR.UTF-8; 14 | 15 | start_db () { 16 | cd $DBS_PATH; 17 | pg_ctl -D "$DB_NAME" -l logfile start 18 | cd -; 19 | }; 20 | 21 | stop_db () { 22 | cd $DBS_PATH; 23 | pg_ctl -D "$DB_NAME" -l logfile stop 24 | cd -; 25 | sleep 3; 26 | killall -9 postgres; 27 | }; 28 | 29 | if [ $2 == "start" ]; then 30 | start_db 31 | exit; 32 | fi; 33 | 34 | if [ $2 == "stop" ]; then 35 | stop_db 36 | exit; 37 | fi; 38 | 39 | 40 | echo DB Name: ${DB_NAME:?You need to provide a name for the database as the first param} 41 | echo DB_USER: ${DB_USER:?You need to provide a name for the db user as the second param} 42 | echo DB PASSWORD: ${PASSWORD?You need to provide a password for db as the third param} 43 | 44 | init_folder() { 45 | if [ -d $NEW_DB ]; then 46 | echo "DBs folder: $DBS_PATH"; 47 | else 48 | mkdir -p $DBS_PATH; 49 | echo "Created DBs folder in $DBS_PATH" 50 | fi; 51 | } 52 | 53 | 54 | init_db () { 55 | cd $DBS_PATH; 56 | 57 | if [ -d $NEW_DB ]; then 58 | if [ $BACKUP_OLD_DB == 1 ]; then 59 | mv $NEW_DB "$NEW_DB"".bkp.$(date +"%s")"; 60 | else 61 | rm -rf $NEW_DB; 62 | fi 63 | initdb $DB_NAME; 64 | else 65 | initdb $DB_NAME; 66 | fi 67 | 68 | pg_ctl -D "$DB_NAME" -l logfile start; 69 | 70 | cd -; 71 | }; 72 | 73 | recreate_db() { 74 | # stop db process 75 | psql -h localhost postgres ${USER} <<- _EOF_ 76 | SELECT pg_terminate_backend(pg_stat_activity.pid) 77 | FROM pg_stat_activity 78 | WHERE pg_stat_activity.datname = '${DB_NAME}' 79 | AND pid <> pg_backend_pid(); 80 | \q 81 | _EOF_ 82 | 83 | psql -h localhost postgres ${USER} <<- _EOF_ 84 | DROP DATABASE ${DB_NAME}; 85 | CREATE DATABASE ${DB_NAME}; 86 | DROP ROLE ${DB_USER}; 87 | CREATE USER ${DB_USER} WITH PASSWORD '${PASSWORD}'; 88 | GRANT ALL PRIVILEGES ON DATABASE "${DB_NAME}" to ${DB_USER}; 89 | ALTER USER ${DB_USER} WITH SUPERUSER; 90 | 91 | DROP DATABASE ${DB_NAME}_test; 92 | CREATE DATABASE ${DB_NAME}_test; 93 | CREATE USER ${DB_USER} WITH PASSWORD '${PASSWORD}'; 94 | GRANT ALL PRIVILEGES ON DATABASE "${DB_NAME}_test" to ${DB_USER}; 95 | ALTER USER ${DB_USER} WITH SUPERUSER; 96 | _EOF_ 97 | } 98 | 99 | stop_db; 100 | init_folder; 101 | init_db; 102 | recreate_db; 103 | stop_db; 104 | start_db; 105 | -------------------------------------------------------------------------------- /src/TypeDefinition.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import type DataLoader from 'dataloader'; 4 | import type { Context } from 'koa'; 5 | import type { PersonType } from './api/type/PersonType'; 6 | 7 | type Key = string | Array; 8 | 9 | // Table types 10 | export type TableSinglePrimaryKey = { 11 | tableName: string, 12 | primaryKey: string, 13 | fields: { 14 | [key: string]: string, 15 | }, 16 | }; 17 | 18 | export type TableMultiPrimaryKey = { 19 | tableName: string, 20 | primaryKeys: Array, 21 | fields: { 22 | [key: string]: string, 23 | }, 24 | }; 25 | 26 | export type Table = TableSinglePrimaryKey | TableMultiPrimaryKey; 27 | 28 | // Location 29 | export type LocationHeaders = { 30 | longitude: string, 31 | latitude: string, 32 | locationtimestamp: string, 33 | }; 34 | 35 | // ApiDataloader 36 | export type ApiDataloaders = { 37 | UserLoader: DataLoader, 38 | // only valid on console 39 | AdminUserLoader: DataLoader, 40 | }; 41 | 42 | // TODO - type node-pg 43 | export type PgClient = { 44 | query: ((query: string) => Promise) & 45 | ((text: string, values: Array) => Promise), 46 | connect: () => Promise, 47 | end: () => Promise, 48 | }; 49 | 50 | // Args 51 | export type ConnectionCursor = string; 52 | 53 | export type ConnectionArguments = { 54 | before?: ?ConnectionCursor, 55 | after?: ?ConnectionCursor, 56 | first?: ?number, 57 | last?: ?number, 58 | search?: ?string, 59 | }; 60 | 61 | // ApiContext 62 | export type ApiContext = { 63 | ...Context, 64 | conns: { 65 | pg: PgClient, 66 | }, 67 | person: PersonType, 68 | dataloaders: ApiDataloaders, 69 | args: ConnectionArguments, 70 | }; 71 | -------------------------------------------------------------------------------- /src/api/app.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import 'isomorphic-fetch'; 4 | 5 | import Koa from 'koa'; 6 | import logger from 'koa-logger'; 7 | import cors from 'kcors'; 8 | import Router from 'koa-router'; 9 | // import convert from 'koa-convert'; 10 | // import multer from 'koa-multer'; 11 | // import prettyFormat from 'pretty-format'; 12 | 13 | import errorHandler from '../middlewares/errorHandler'; 14 | import dataloader from '../middlewares/dataloader'; 15 | import pgClientFromPool from '../middlewares/pgClientFromPool'; 16 | import { jwtSecret } from '../common/config'; 17 | // import { logApiErrorToSlack } from '../common/slack'; 18 | // import { getUser, getLocation } from './helper'; 19 | 20 | import index from './routes/index'; 21 | import * as PersonGet from './routes/person/PersonGet'; 22 | 23 | const app = new Koa(); 24 | 25 | app.keys = jwtSecret; 26 | 27 | const router = new Router(); 28 | // const storage = multer.memoryStorage(); 29 | // https://github.com/expressjs/multer#limits 30 | // const limits = { 31 | // // Increasing max upload size to 30 mb, since busboy default is only 1 mb 32 | // fieldSize: 30 * 1024 * 1024, 33 | // }; 34 | 35 | app.use(errorHandler()); 36 | app.use(logger()); 37 | app.use(cors()); 38 | app.use(dataloader()); 39 | app.use(pgClientFromPool()); 40 | 41 | router 42 | .get('/', index) 43 | .get('/api', index) 44 | .get('/api/v1', index) 45 | .get('/api/v1/users/:id', PersonGet.personGet) 46 | .get('/api/v1/users', PersonGet.personsGet); 47 | // .post('/api/v1/users/:id', PersonPost.personPost) 48 | // .post('/api/v1/users', PersonPost.personsPost) 49 | // .delete('/api/v1/users/:id', PersonDelete.personDelete) 50 | // .delete('/api/v1/users', PersonDelete.personsDelete) 51 | 52 | // router.all('/api', multer({ storage, limits }).any()); 53 | app.use(router.routes()).use(router.allowedMethods()); 54 | 55 | export default app; 56 | -------------------------------------------------------------------------------- /src/api/helper.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import jwt from 'jsonwebtoken'; 4 | // import { User } from '../models'; 5 | import { jwtSecret } from '../common/config'; 6 | import { validLoginTokenScopes } from '../common/consts'; 7 | 8 | import type { LocationHeaders } from '../TypeDefinition'; 9 | 10 | /** 11 | * Return user and seller given a JWT token 12 | * @param token - jwt token with userId 13 | * @returns {*} 14 | */ 15 | export async function getUser(token: string) { 16 | // console.log(token); 17 | if (!token) { 18 | return null; 19 | } 20 | 21 | try { 22 | const decodedToken = jwt.verify(token.replace('JWT', '').trim(), jwtSecret); 23 | 24 | if (Object.values(validLoginTokenScopes).indexOf(decodedToken.scope) === -1) { 25 | return null; 26 | } 27 | 28 | return {}; 29 | // return await User.findOne({ 30 | // _id: decodedToken.id, 31 | // }); 32 | } catch (err) { 33 | return null; 34 | } 35 | } 36 | 37 | export function getLocation({ longitude, latitude, locationtimestamp }: LocationHeaders) { 38 | if (longitude && latitude && locationtimestamp) { 39 | return { 40 | longitude: parseFloat(longitude), 41 | latitude: parseFloat(latitude), 42 | timestamp: new Date(locationtimestamp), 43 | }; 44 | } 45 | 46 | return null; 47 | } 48 | -------------------------------------------------------------------------------- /src/api/index.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | import 'babel-polyfill'; 3 | import 'isomorphic-fetch'; 4 | import app from './app'; 5 | import { apiPort } from '../common/config'; 6 | 7 | (async () => { 8 | await app.listen(apiPort); 9 | console.log(`Server started on port ${apiPort}`); 10 | })(); 11 | -------------------------------------------------------------------------------- /src/api/loader/index.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | export * as PersonLoader from './pg/db/PersonLoader'; 4 | -------------------------------------------------------------------------------- /src/api/loader/pg/ConnectionFromPgCursor.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | import type { ApiContext, ConnectionArguments } from '../../../TypeDefinition'; 3 | 4 | export const PREFIX = 'pg:'; 5 | 6 | // Pre-12c syntax [could also customize the original query and use row_number()] 7 | export const sqlPaginated = (sql: string): string => `${sql} OFFSET $1 LIMIT $2`; 8 | 9 | export const base64 = (str: string): string => Buffer.from(str, 'ascii').toString('base64'); 10 | export const unbase64 = (b64: string): string => Buffer.from(b64, 'base64').toString('ascii'); 11 | 12 | /** 13 | * Rederives the offset from the cursor string 14 | */ 15 | export const cursorToOffset = (cursor: string): number => 16 | parseInt(unbase64(cursor).substring(PREFIX.length), 10); 17 | 18 | /** 19 | * Given an optional cursor and a default offset, returns the offset to use; 20 | * if the cursor contains a valid offset, that will be used, otherwise it will 21 | * be the default. 22 | */ 23 | export const getOffsetWithDefault = (cursor: string, defaultOffset: number): number => { 24 | if (cursor === undefined || cursor === null) { 25 | return defaultOffset; 26 | } 27 | const offset = cursorToOffset(cursor); 28 | return isNaN(offset) ? defaultOffset : offset; 29 | }; 30 | 31 | /** 32 | * Creates the cursor string from an offset. 33 | */ 34 | export const offsetToCursor = (offset: number): string => base64(PREFIX + offset); 35 | 36 | type TotalCountOptions = { 37 | // Connection Object 38 | client: Object, 39 | // From sql statement 40 | from: string, 41 | // Where sql statement 42 | where?: ?string, 43 | // distinct query statement 44 | distinctQuery?: ?string, 45 | }; 46 | export const getTotalCount = async ({ client, from, where, distinctQuery }: TotalCountOptions) => { 47 | const whereSt = where != null ? `where ${where}` : ''; 48 | 49 | const sqlCount = !distinctQuery 50 | ? `select count(*) from ${from} ${whereSt}` 51 | : `select count(DISTINCT ${distinctQuery}) from ${from} ${whereSt}`; 52 | 53 | // maybe this can optimize count distinct 54 | //SELECT COUNT(*) FROM (SELECT DISTINCT column_name FROM table_name) AS temp; 55 | 56 | const resultCount = await client.query(sqlCount); 57 | 58 | const totalCount = resultCount.rows[0].count; 59 | 60 | return parseInt(totalCount); 61 | }; 62 | 63 | type OffsetOptions = { 64 | // Connection Args 65 | args: ConnectionArguments, 66 | // total Count 67 | totalCount: number, 68 | }; 69 | export const calculateOffsets = ({ args, totalCount }: OffsetOptions) => { 70 | const { after, before } = args; 71 | let { first, last } = args; 72 | 73 | // Limit the maximum number of elements in a query 74 | if (!first && !last) first = 10; 75 | if (first > 1000) first = 1000; 76 | if (last > 1000) last = 1000; 77 | 78 | const beforeOffset = getOffsetWithDefault(before, totalCount); 79 | const afterOffset = getOffsetWithDefault(after, -1); 80 | 81 | let startOffset = Math.max(-1, afterOffset) + 1; 82 | let endOffset = Math.min(totalCount, beforeOffset); 83 | 84 | if (first !== undefined) { 85 | endOffset = Math.min(endOffset, startOffset + first); 86 | } 87 | if (last !== undefined) { 88 | startOffset = Math.max(startOffset, endOffset - last); 89 | } 90 | 91 | const skip = Math.max(startOffset, 0); 92 | 93 | const limit = endOffset - startOffset; 94 | 95 | return { 96 | first, 97 | last, 98 | before, 99 | after, 100 | skip, 101 | limit, 102 | beforeOffset, 103 | afterOffset, 104 | startOffset, 105 | endOffset, 106 | }; 107 | }; 108 | 109 | type PageInfoOptions = { 110 | edges: Array, 111 | before: number, 112 | after: number, 113 | first: number, 114 | last: number, 115 | afterOffset: number, 116 | beforeOffset: number, 117 | startOffset: number, 118 | endOffset: number, 119 | totalCount: number, 120 | }; 121 | export const getPageInfo = ({ 122 | edges, 123 | before, 124 | after, 125 | first, 126 | last, 127 | afterOffset, 128 | beforeOffset, 129 | startOffset, 130 | endOffset, 131 | totalCount, 132 | }: PageInfoOptions) => { 133 | const firstEdge = edges[0]; 134 | const lastEdge = edges[edges.length - 1]; 135 | const lowerBound = after ? afterOffset + 1 : 0; 136 | const upperBound = before ? Math.min(beforeOffset, totalCount) : totalCount; 137 | 138 | return { 139 | startCursor: firstEdge ? firstEdge.cursor : null, 140 | endCursor: lastEdge ? lastEdge.cursor : null, 141 | hasPreviousPage: last !== null ? startOffset > lowerBound : false, 142 | hasNextPage: first !== null ? endOffset < upperBound : false, 143 | }; 144 | }; 145 | 146 | type ConnectionOptions = { 147 | // Connection Object 148 | client: Object, 149 | // SQL statement 150 | sql: string, 151 | // From sql statement 152 | from: string, 153 | // Where sql statement 154 | where?: ?string, 155 | // GraphQL context 156 | context: ApiContext, 157 | // Connection Args 158 | args: ConnectionArguments, 159 | // Loader to load individually objects 160 | loader: (context: ApiContext, id: string) => Object, 161 | // distinct query statement 162 | distinctQuery?: ?string, 163 | }; 164 | const connectionFromPgCursor = async ({ 165 | client, 166 | sql, 167 | from, 168 | where, 169 | context, 170 | args = {}, 171 | loader, 172 | distinctQuery, 173 | }: ConnectionOptions) => { 174 | const totalCount = await getTotalCount({ 175 | client, 176 | from, 177 | where, 178 | distinctQuery, 179 | }); 180 | 181 | const { 182 | first, 183 | last, 184 | before, 185 | after, 186 | skip, 187 | limit, 188 | beforeOffset, 189 | afterOffset, 190 | startOffset, 191 | endOffset, 192 | } = calculateOffsets({ args, totalCount }); 193 | 194 | // Add LIMIT and OFFSET to query 195 | const sqlPaged = sqlPaginated(sql); 196 | 197 | // console.time(sql); 198 | const result = await client.query(sqlPaged, [skip, limit]); 199 | // console.timeEnd(sql); 200 | 201 | const { rows } = result; 202 | 203 | const edges = rows.map((value, index) => { 204 | const { id } = value; 205 | 206 | return { 207 | cursor: offsetToCursor(startOffset + index), 208 | node: loader(context, id), 209 | }; 210 | }); 211 | 212 | return { 213 | edges, 214 | count: totalCount, 215 | pageInfo: getPageInfo({ 216 | edges, 217 | before, 218 | after, 219 | first, 220 | last, 221 | afterOffset, 222 | beforeOffset, 223 | startOffset, 224 | endOffset, 225 | totalCount, 226 | }), 227 | }; 228 | }; 229 | 230 | export default connectionFromPgCursor; 231 | -------------------------------------------------------------------------------- /src/api/loader/pg/db/PersonLoader.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | import DataLoader from 'dataloader'; 3 | 4 | import pgLoader from '../pgLoader'; 5 | import connectionFromPgCursor from '../ConnectionFromPgCursor'; 6 | import tPerson from '../../../type/PersonType'; 7 | 8 | import type { ApiContext, ConnectionArguments } from '../../../../TypeDefinition'; 9 | import type { PersonType } from '../../../type/PersonType'; 10 | 11 | export default class Person { 12 | id: string; 13 | name: string; 14 | nick: ?string; 15 | email: string; 16 | password: string; 17 | active: boolean; 18 | emailVerified: boolean; 19 | createdAt: ?Date; 20 | updatedAt: ?Date; 21 | 22 | constructor(data: PersonType) { 23 | this.id = data.id; 24 | this.name = data.name; 25 | this.nick = data.nick; 26 | this.email = data.email; 27 | this.password = data.password; 28 | this.active = data.active; 29 | this.emailVerified = data.email_verified; 30 | this.createdAt = data.created_at ? new Date(data.created_at) : null; 31 | this.updatedAt = data.updated_at ? new Date(data.updated_at) : null; 32 | } 33 | } 34 | 35 | const viewerCanSee = (): boolean => true; 36 | 37 | export const getLoader = (context: ApiContext) => { 38 | return new DataLoader(async ids => pgLoader(context.conns.restria, tPerson, ids), { 39 | maxBatchSize: 500, 40 | }); 41 | }; 42 | 43 | export const load = async (context: ApiContext, id: string): Promise => { 44 | if (!id) return null; 45 | 46 | const data = await context.dataloaders.PersonLoader.load(id); 47 | 48 | if (!data) return null; 49 | 50 | return viewerCanSee() ? new Person(data) : null; 51 | }; 52 | 53 | export const loadPersons = async (context: ApiContext, args: ConnectionArguments) => { 54 | const client = context.conns.restria; 55 | 56 | const from = `${tPerson.tableName}`; 57 | 58 | const sql = ` 59 | select ${tPerson.primaryKey} ID 60 | from ${from} 61 | order by created_at desc 62 | `; 63 | 64 | return connectionFromPgCursor({ 65 | client, 66 | sql, 67 | from, 68 | context, 69 | args, 70 | loader: load, 71 | }); 72 | }; 73 | 74 | export const clearCache = (context: ApiContext, id: string) => 75 | context.dataloaders.PersonLoader.clear(id.toString()); 76 | -------------------------------------------------------------------------------- /src/api/loader/pg/pgLoader.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Postgresql Loader use dataloader to batch sql queries 3 | * @flow 4 | */ 5 | import type { TableSinglePrimaryKey, PgClient } from '../../../TypeDefinition'; 6 | 7 | function indexResults(results, indexField, cacheKeyFn = key => key) { 8 | const indexedResults = new Map(); 9 | results.forEach(res => { 10 | indexedResults.set(cacheKeyFn(res[indexField]), res); 11 | }); 12 | return indexedResults; 13 | } 14 | 15 | function normalizeResults(keys, indexField, cacheKeyFn = key => key) { 16 | return results => { 17 | const indexedResults = indexResults(results, indexField, cacheKeyFn); 18 | return keys.map(val => indexedResults.get(cacheKeyFn(val)) || null); 19 | //new Error(`Key not found : ${val}`)); 20 | }; 21 | } 22 | 23 | const pgLoader = async ( 24 | client: PgClient, 25 | table: TableSinglePrimaryKey, 26 | ids: Array, 27 | key?: string, 28 | ) => { 29 | const _key = key || table.primaryKey; 30 | 31 | const where = ids.map(id => `'${id}'`).join(' , '); 32 | 33 | const sql = `select ${Object.keys(table.fields).join( 34 | ' , ', 35 | )} from ${table.tableName} where ${_key} in (${where})`; 36 | 37 | const result = await client.query(sql, []); 38 | 39 | const { rows } = result; 40 | 41 | // order rows by ids 42 | return normalizeResults(ids, _key, id => id.toString())(rows); 43 | }; 44 | 45 | export default pgLoader; 46 | -------------------------------------------------------------------------------- /src/api/routes/index.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import { description, version } from '../../../package.json'; 4 | 5 | import type { ApiContext } from '../../TypeDefinition'; 6 | 7 | const index = async (ctx: ApiContext) => { 8 | ctx.status = 200; 9 | ctx.body = ` 10 | ${description} 11 | Package version: ${version} 12 | Node Environment: ${process.env.NODE_ENV || ''} 13 | `; 14 | }; 15 | 16 | export default index; 17 | -------------------------------------------------------------------------------- /src/api/routes/person/PersonGet.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import * as PersonLoader from '../../loader/pg/db/PersonLoader'; 4 | 5 | import type { ApiContext } from '../../../TypeDefinition'; 6 | 7 | const personGet = async (ctx: ApiContext) => { 8 | const { id } = ctx.params; 9 | if (!id) { 10 | ctx.throw(400, 'Missing id'); 11 | } 12 | try { 13 | ctx.status = 200; 14 | ctx.body = await PersonLoader.load(ctx, id); 15 | } catch (err) { 16 | ctx.throw(404, err); 17 | } 18 | }; 19 | 20 | const personsGet = async (ctx: ApiContext) => { 21 | try { 22 | ctx.status = 200; 23 | ctx.body = await PersonLoader.loadPersons(ctx, ctx.args); 24 | } catch (err) { 25 | ctx.throw(404, err); 26 | } 27 | }; 28 | 29 | export { personGet, personsGet }; 30 | -------------------------------------------------------------------------------- /src/api/type/PersonType.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import type { TableSinglePrimaryKey } from '../../TypeDefinition'; 4 | 5 | const tPerson: TableSinglePrimaryKey = { 6 | tableName: 'person', 7 | primaryKey: 'id', 8 | fields: { 9 | id: 'id', 10 | name: 'name', 11 | nick: 'nick', 12 | email: 'email', 13 | password: 'password', 14 | active: 'active', 15 | email_verified: 'emailVerified', 16 | created_at: 'createdAt', 17 | updated_at: 'updatedAt', 18 | }, 19 | }; 20 | 21 | export type PersonType = { 22 | id: string, 23 | name: string, 24 | nick?: string, 25 | email: string, 26 | password: string, 27 | active: boolean, 28 | email_verified: boolean, 29 | created_at?: string, 30 | updated_at?: string, 31 | }; 32 | 33 | export default tPerson; 34 | -------------------------------------------------------------------------------- /src/common/config.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import path from 'path'; 4 | import dotenvSafe from 'dotenv-safe'; 5 | 6 | const root = path.join.bind(this, __dirname, '../../'); 7 | 8 | dotenvSafe.load({ 9 | path: root('.env'), 10 | sample: root('.env.example'), 11 | }); 12 | 13 | export const ENV = ((process.env: any): { 14 | API_PORT: string, 15 | NODE_ENV: string, 16 | PG_RESTRIA_HOST: string, 17 | PG_RESTRIA_DATABASE: string, 18 | PG_RESTRIA_USER: string, 19 | PG_RESTRIA_PASSWORD: string, 20 | REDIS_HOST: string, 21 | [string]: ?string, 22 | }); 23 | 24 | // Display a friendly message on console to indicate if we're are runnning in a prodution or development enviroment 25 | const status = process.env.NODE_ENV === 'production' ? 'production' : 'development'; 26 | 27 | if (process.env.NODE_ENV) { 28 | console.log(`CONFIG: NODE_ENV: '${process.env.NODE_ENV}' running in: '${status}'`); 29 | } 30 | 31 | // Export worker settings 32 | export const redisConfig = { 33 | redis: { 34 | host: process.env.REDIS_HOST, 35 | }, 36 | }; 37 | 38 | // Ports 39 | export const apiPort = process.env.API_PORT || 3009; 40 | 41 | // Slack 42 | export const slackWebhook = process.env.SLACK_WEBHOOK; 43 | 44 | // Jwt 45 | export const jwtSecret = process.env.JWT_KEY; 46 | 47 | // Databases 48 | export const dbs = { 49 | restria: { 50 | name: 'restria', 51 | host: ENV.PG_RESTRIA_HOST, 52 | database: ENV.PG_RESTRIA_DATABASE, 53 | user: ENV.PG_RESTRIA_USER, 54 | password: ENV.PG_RESTRIA_PASSWORD, 55 | port: 5432, 56 | max: 10, 57 | idleTimeoutMillis: 30000, 58 | }, 59 | }; 60 | -------------------------------------------------------------------------------- /src/common/consts.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | export const ERROR = 'ERROR'; 4 | export const OK = 'OK'; 5 | 6 | // TODO - handle user language on server 7 | // ERROR_CODES 8 | export const errCodes = { 9 | notAuthenticated: 'NOT_AUTHENTICATED', 10 | missingEmail: 'MISSING_EMAIL', 11 | invalidEmail: 'INVALID_EMAIL', 12 | notRegistered: 'NOT_REGISTERED', 13 | alreadyFBAccount: 'ALREADY_FB_ACCOUNT', 14 | invalidPassword: 'INVALID_PASSWORD', 15 | invalidEmailOrPassword: 'INVALID_EMAIL_PASSWORD', 16 | emailAlreadyInUse: 'EMAIL_ALREADY_IN_USE', 17 | }; 18 | 19 | export const validLoginTokenScopes = { 20 | userLogin: 'USER_LOGIN', 21 | userLoginAfterSignUp: 'USER_LOGIN_AFTER_SIGN_UP', 22 | userManagerInvitationEmail: 'USER_MANAGER_INVITATION_EMAIL', 23 | userLoginComplaintEmail: 'USER_LOGIN_COMPLAINT_EMAIL', 24 | }; 25 | -------------------------------------------------------------------------------- /src/common/events.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | /** 4 | List of static events supported by the worker 5 | Each event is described by a unique string (listed below) 6 | This is a common file between the Server and the Workers in order to avoid typos. 7 | */ 8 | 9 | const EVENTS = { 10 | // User-initiated tasks 11 | USER: { 12 | EMAIL: { 13 | SIGN_UP: 'USER:EMAIL:SIGN_UP', 14 | }, 15 | }, 16 | }; 17 | 18 | export default EVENTS; 19 | -------------------------------------------------------------------------------- /src/common/queue.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import kue from 'kue'; 4 | import { redisConfig } from './config'; 5 | 6 | const queue = kue.createQueue(redisConfig); 7 | export default queue; 8 | -------------------------------------------------------------------------------- /src/common/slack.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import { IncomingWebhook } from '@slack/client'; 4 | import prettyFormat from 'pretty-format'; 5 | import { slackWebhook } from './config'; 6 | 7 | export const wh = new IncomingWebhook(slackWebhook); 8 | 9 | type SlackWebhookAttachment = { 10 | text: string, 11 | color?: string, 12 | }; 13 | 14 | type SlackWebhookOptions = { 15 | channel: string, 16 | attachments: Array, 17 | [optionName: string]: any, 18 | }; 19 | 20 | /** 21 | * Thin wrapper to handle DEV and PROD environments 22 | */ 23 | export function sendtoSlack({ channel, ...args }: SlackWebhookOptions) { 24 | wh.send({ 25 | ...args, 26 | channel: process.env.NODE_ENV === 'production' ? channel : `${channel}-dev`, 27 | }); 28 | } 29 | 30 | export function logApiErrorToSlack(application: string, error: Object, header: string, channel: string) { 31 | if (error.message.indexOf('Must provide query string') > -1) { 32 | return; 33 | } 34 | 35 | sendtoSlack({ 36 | channel, 37 | attachments: [ 38 | { text: application, color: '#0000ff' }, 39 | { text: prettyFormat(header) }, 40 | { text: error.message }, 41 | { text: prettyFormat(error.locations) }, 42 | { text: prettyFormat(error.stack), color: '#ff0000' }, 43 | { 44 | text: prettyFormat(error.source), 45 | color: '#00ff00', 46 | }, 47 | { text: prettyFormat(error.originalError) }, 48 | ], 49 | }); 50 | } 51 | -------------------------------------------------------------------------------- /src/dbs/postgres.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import pg from 'pg'; 4 | import { dbs } from '../common/config'; 5 | 6 | const pools = Object.keys(dbs).reduce((obj, key) => { 7 | const pool = new pg.Pool(dbs[key]); 8 | 9 | pool.on('error', error => console.error(`Error on ${key}`, error.message, error.stack)); 10 | 11 | if (process.env.NODE_ENV === 'development') { 12 | const oldQuery = pool.query; 13 | pool.query = (...args) => { 14 | console.log('--> PostgreSQL query'); 15 | console.log(...args); 16 | return oldQuery.call(pool, ...args); 17 | }; 18 | } 19 | 20 | return { 21 | ...pools, 22 | [key]: pool, 23 | }; 24 | }, {}); 25 | 26 | export default pools; 27 | -------------------------------------------------------------------------------- /src/middlewares/dataloader.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import type { Middleware } from 'koa'; 4 | 5 | import * as loaders from '../api/loader'; 6 | import type { ApiContext } from '../TypeDefinition'; 7 | 8 | const dataloader = (): Middleware => async (ctx: ApiContext, next: () => void) => { 9 | const dataloaders = Object.keys(loaders).reduce( 10 | (prev, loader) => ({ 11 | ...prev, 12 | [loader]: loaders[loader].getLoader(ctx), 13 | }), 14 | {}, 15 | ); 16 | 17 | ctx.dataloaders = { 18 | ...dataloaders, 19 | }; 20 | 21 | await next(); 22 | }; 23 | 24 | export default dataloader; 25 | -------------------------------------------------------------------------------- /src/middlewares/errorHandler.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | // import prettyFormat from 'pretty-format'; 4 | 5 | import type { Middleware } from 'koa'; 6 | import type { ApiContext } from '../TypeDefinition'; 7 | 8 | // import { sendtoSlack } from '../common/slack'; 9 | 10 | const errorHandler = (): Middleware => async (ctx: ApiContext, next: () => void) => { 11 | try { 12 | await next(); 13 | } catch (error) { 14 | console.log('request:', ctx.req.body); 15 | console.log('error: ', new Date().toISOString(), error); 16 | 17 | // sendtoSlack({ 18 | // channel: 'restria', 19 | // attachments: [{ text: error.message }, { text: prettyFormat(error.stack), color: '#ff0000' }], 20 | // }); 21 | ctx.throw(404, `Not Found: ${error}`); 22 | } 23 | }; 24 | 25 | export default errorHandler; 26 | -------------------------------------------------------------------------------- /src/middlewares/pgClientFromPool.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import type { Middleware } from 'koa'; 4 | 5 | import pools from '../dbs/postgres'; 6 | 7 | import type { ApiContext } from '../TypeDefinition'; 8 | 9 | const pgClientFromPool = (): Middleware => async (ctx: ApiContext, next: () => void) => { 10 | // there is no need to get an exclusive client from the pool 11 | ctx.conns = { 12 | ...pools, 13 | }; 14 | 15 | await next(); 16 | }; 17 | 18 | export default pgClientFromPool; 19 | -------------------------------------------------------------------------------- /test/createRows.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | import { User, AdminUser } from '../src/models'; 3 | 4 | // Counter handles creating multiple rows on a single test 5 | let counter = { 6 | user: 0, 7 | adminUser: 0, 8 | }; 9 | 10 | // Function to restart the counter before every test 11 | export const restartCounter = () => { 12 | counter = Object.keys(counter).reduce((prev, curr) => ({ ...prev, [curr]: 0 }), {}); 13 | }; 14 | 15 | export const createUser = async ({ payload } = {}) => { 16 | counter.user++; 17 | 18 | return await new User({ 19 | name: `Normal User ${counter.user}`, 20 | email: `user-${counter.user}@example.com`, 21 | ...payload, 22 | }).save(); 23 | }; 24 | 25 | export const createAdminUser = async ({ payload } = {}) => { 26 | counter.adminUser++; 27 | 28 | return await new AdminUser({ 29 | name: `Admin User ${counter.adminUser}`, 30 | email: `user-${counter.adminUser}@example.com`, 31 | ...payload, 32 | }).save(); 33 | }; 34 | -------------------------------------------------------------------------------- /test/helper.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | import mongoose from 'mongoose'; 3 | import * as graphqlLoaders from '../src/graphql/loader'; 4 | import * as consoleLoaders from '../src/console/loader'; 5 | import { restartCounter } from './createRows'; 6 | 7 | const { ObjectId } = mongoose.Types; 8 | const loaders = { ...graphqlLoaders, ...consoleLoaders }; 9 | 10 | process.env.NODE_ENV = 'test'; 11 | 12 | const config = { 13 | db: { 14 | test: process.env.MONGO_URI_TEST || 'mongodb://localhost/graphql-boilerplate-test', 15 | }, 16 | connection: null, 17 | }; 18 | 19 | mongoose.Promise = Promise; 20 | 21 | export * from './createRows'; 22 | 23 | function connect() { 24 | return new Promise((resolve, reject) => { 25 | if (config.connection) { 26 | return resolve(); 27 | } 28 | 29 | const mongoUri = config.db.test; 30 | 31 | const options = { 32 | server: { 33 | auto_reconnect: true, 34 | reconnectTries: Number.MAX_VALUE, 35 | reconnectInterval: 1000, 36 | }, 37 | }; 38 | 39 | mongoose.connect(mongoUri, options); 40 | 41 | config.connection = mongoose.connection; 42 | 43 | config.connection.once('open', resolve).on('error', e => { 44 | if (e.message.code === 'ETIMEDOUT') { 45 | console.log(e); 46 | 47 | mongoose.connect(mongoUri, options); 48 | } 49 | 50 | console.log(e); 51 | reject(e); 52 | }); 53 | }); 54 | } 55 | 56 | async function clearDatabase() { 57 | await mongoose.connection.db.dropDatabase(); 58 | } 59 | 60 | export async function setupTest() { 61 | // TODO Temporary, check the problem on the promises involving the tests. 62 | jasmine.DEFAULT_TIMEOUT_INTERVAL = 20000; 63 | 64 | await connect(); 65 | await clearDatabase(); 66 | 67 | restartCounter(); 68 | 69 | // kue.clearJobs(); 70 | } 71 | 72 | export function getContext(context) { 73 | const dataloaders = Object.keys(loaders).reduce( 74 | (prev, loaderKey) => ({ 75 | ...prev, 76 | [loaderKey]: loaders[loaderKey].getLoader(), 77 | }), 78 | {}, 79 | ); 80 | 81 | return { 82 | ...context, 83 | req: {}, 84 | dataloaders, 85 | }; 86 | } 87 | 88 | function dropCollection(collection) { 89 | return new Promise((resolve, reject) => { 90 | mongoose.connection.collections[collection].drop(err => { 91 | if (err) reject(err); 92 | 93 | resolve(); 94 | }); 95 | }); 96 | } 97 | 98 | export function disconnect() { 99 | return new Promise(async (resolve, reject) => { 100 | mongoose.disconnect(err => { 101 | resolve(); 102 | }); 103 | }); 104 | } 105 | 106 | function isValidDate(val) { 107 | if (val === false) { 108 | return false; 109 | } 110 | 111 | let temp; 112 | if (val instanceof Date) { 113 | temp = val; 114 | } else { 115 | temp = new Date(val); 116 | } 117 | 118 | return isNaN(temp.getTime()) === false; 119 | } 120 | 121 | /** 122 | * Prepare an object to be snapshoted by jest 123 | * replace objectID 124 | * replace datetime 125 | * frozen same specific keys 126 | * @param obj 127 | * @param frozenKeys 128 | * @returns {{}} 129 | */ 130 | export function prepareObject(obj, frozenKeys = []) { 131 | const placeholder = {}; 132 | let counter = 0; 133 | 134 | const objectIdToNumber = {}; 135 | 136 | Object.keys(obj).map(key => { 137 | const value = obj[key]; 138 | 139 | if (frozenKeys.indexOf(key) > -1) { 140 | placeholder[key] = `${key}-FROZED`; 141 | return; 142 | } 143 | 144 | // Handle objectID 145 | if (value !== 0 && ObjectId.isValid('' + value)) { 146 | if (value in objectIdToNumber) { 147 | } else { 148 | objectIdToNumber[value] = counter; 149 | counter++; 150 | } 151 | 152 | const internalId = objectIdToNumber[value]; 153 | 154 | placeholder[key] = `OBJECTID_${internalId}`; 155 | return; 156 | } 157 | 158 | // Handle Date 159 | if (typeof value !== 'number' && isValidDate(value)) { 160 | placeholder[key] = 'FAKE-DA-TET00:00:00.000Z'; 161 | return; 162 | } 163 | 164 | // Handle array 165 | if (Array.isArray(value)) { 166 | placeholder[key] = value.map(item => prepareObject(item)); 167 | return; 168 | } 169 | 170 | if (typeof value === 'object') { 171 | placeholder[key] = prepareObject(value); 172 | return; 173 | } 174 | 175 | // if (value && typeof value !== 'undefined') { 176 | // console.log('valid: ', value); 177 | // 178 | // if (value._id) { 179 | // placeholder[key] = prepareMongooseObject(value); 180 | // return; 181 | // } 182 | // } 183 | 184 | placeholder[key] = value; 185 | }); 186 | 187 | return placeholder; 188 | } 189 | 190 | /** 191 | * Prepare a koa response 192 | * @param res 193 | * @param frozenKeys 194 | * @returns {{status, body: {}}} 195 | */ 196 | export function prepareResponse(res, frozenKeys = []) { 197 | return { 198 | status: res.status, 199 | body: prepareObject(res.body, frozenKeys), 200 | }; 201 | } 202 | 203 | /** 204 | * Extract object ids from mongoose objects to make it possible to use with snapshot feature 205 | * @param obj 206 | * @returns {{}} 207 | */ 208 | export function prepareMongooseObject(obj, frozenKeys = []) { 209 | return prepareObject(obj.toJSON(), frozenKeys); 210 | } 211 | 212 | export const worker = { 213 | job: { 214 | data: {}, 215 | log: log => console.log('Job log', log), 216 | }, 217 | done: message => console.log('Finished', message), 218 | }; 219 | 220 | /** 221 | * Sanitize a test text removing the mentions of a `ObjectId` 222 | * @param text {string} The text to be sanitized 223 | * @returns {string} The sanitized text 224 | */ 225 | export const sanitizeTestText = text => { 226 | const values = text.split(' '); 227 | return values 228 | .map(value => { 229 | // Remove any non-alphanumeric character from value 230 | const cleanValue = value.replace(/[^a-z0-9]/gi, ''); 231 | 232 | // Check if it's a valid `ObjectId`, if so, replace it with a static value 233 | if (ObjectId.isValid(cleanValue)) { 234 | return value.replace(cleanValue, 'ObjectId'); 235 | } 236 | 237 | return value; 238 | }) 239 | .join(' '); 240 | }; 241 | 242 | const sanitizeValue = (value, field, keys) => { 243 | // If this current field is specified on the `keys` array, we simply redefine it 244 | // so it stays the same on the snapshot 245 | if (keys.indexOf(field) !== -1) { 246 | return `FROZEN-${field.toUpperCase()}`; 247 | } 248 | 249 | // Check if value is boolean 250 | if (typeof value === 'boolean') { 251 | return value; 252 | } 253 | 254 | // If value is empty, return `EMPTY` value so it's easier to debug 255 | if (!value && value !== 0) { 256 | return 'EMPTY'; 257 | } 258 | 259 | // Check if it's not an array and can be transformed into a string 260 | if (!Array.isArray(value) && typeof value.toString === 'function') { 261 | // Remove any non-alphanumeric character from value 262 | const cleanValue = value.toString().replace(/[^a-z0-9]/gi, ''); 263 | 264 | // Check if it's a valid `ObjectId`, if so, replace it with a static value 265 | if (ObjectId.isValid(cleanValue) && value.toString().indexOf(cleanValue) !== -1) { 266 | return value.toString().replace(cleanValue, 'ObjectId'); 267 | } 268 | } 269 | 270 | // if it's an array, sanitize the field 271 | if (Array.isArray(value)) { 272 | return value.map(item => sanitizeValue(item, null, keys)); 273 | } 274 | 275 | // If it's an object, we call sanitizeTestObject function again to handle nested fields 276 | if (typeof value === 'object') { 277 | return sanitizeTestObject(value, keys); 278 | } 279 | 280 | return value; 281 | }; 282 | 283 | /** 284 | * Sanitize a test object removing the mentions of a `ObjectId` 285 | * @param payload {object} The object to be sanitized 286 | * @param keys {[string]} Array of keys to redefine the value on the payload 287 | * @param ignore {[string]} Array of keys to ignore 288 | * @returns {object} The sanitized object 289 | */ 290 | export const sanitizeTestObject = (payload, keys = ['id'], ignore = []) => { 291 | return Object.keys(payload).reduce((sanitizedObj, field) => { 292 | if (ignore.indexOf(field) !== -1) { 293 | return sanitizedObj; 294 | } 295 | 296 | const value = payload[field]; 297 | const sanitizedValue = sanitizeValue(value, field, keys); 298 | 299 | return { 300 | ...sanitizedObj, 301 | [field]: sanitizedValue, 302 | }; 303 | }, {}); 304 | }; 305 | --------------------------------------------------------------------------------