├── .env.example ├── .eslintrc.js ├── .gitignore ├── .vscode └── extensions.json ├── Dockerfile ├── LICENCE ├── README.md ├── config └── default.js ├── data ├── .gitignore └── pgadmin4 │ └── servers.json ├── docker-compose.dev.yml ├── docker-compose.yml ├── ecosystem.config.js ├── knex ├── migrations │ ├── 20190315214938_users.js │ ├── 20190316152324_clients.js │ └── 20190316152336_products.js └── seeds │ ├── 01.users.js │ ├── 02.clients.js │ └── 03.products.js ├── knexfile.js ├── package.json ├── src ├── directives │ ├── cache.directive.js │ └── needAuth.directive.js ├── index.js ├── resolvers │ ├── fields │ │ ├── Client.resolvers.js │ │ ├── IRecord.resolvers.js │ │ ├── IResource.resolvers.js │ │ ├── Product.resolvers.js │ │ ├── RootQuery.resolvers.js │ │ └── User.resolvers.js │ ├── mutations │ │ ├── addProduct.mutation.js │ │ └── login.mutation.js │ └── resources.js ├── server │ ├── context.js │ ├── directives.js │ ├── index.js │ ├── playground.js │ ├── resolvers │ │ ├── fields.js │ │ ├── index.js │ │ └── mutations.js │ ├── type-defs.js │ └── utils │ │ ├── config.js │ │ ├── debug.js │ │ └── server-info.js ├── services │ ├── container │ │ ├── factory.js │ │ └── index.js │ ├── factories │ │ ├── auth │ │ │ ├── index.js │ │ │ ├── login.js │ │ │ └── token.js │ │ ├── cache.js │ │ ├── db.js │ │ └── index.js │ └── index.js ├── type-defs │ ├── Client.graphql │ ├── Directives.graphql │ ├── Product.graphql │ ├── User.graphql │ ├── resources.graphql │ ├── root-mutations.graphql │ ├── root-query.graphql │ └── schema.graphql └── utils │ ├── knex │ ├── dataloaders │ │ ├── factory.js │ │ ├── index.js │ │ └── load.js │ ├── resolvers │ │ ├── index.js │ │ └── resolvers.js │ ├── resources.js │ └── utils.js │ └── load-files │ ├── common.js │ ├── directives.js │ ├── graphql.js │ ├── mutations.js │ └── resolvers.js └── yarn.lock /.env.example: -------------------------------------------------------------------------------- 1 | APP_NAME=graphql 2 | APP_KEY=ILqlg6...PSvRkni4...q0F8t07oC...q7hWQ6hlzNo 3 | DB_HOST=postgres 4 | DB_NAME=demo 5 | DB_USER=demo 6 | DB_PASSWORD=demo 7 | PGADMIN_USER=local@local.dev 8 | PORT=7373 -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | 'env': { 3 | 'commonjs': true, 4 | 'es6': true 5 | }, 6 | 'extends': 'standard', 7 | 'globals': { 8 | 'Atomics': 'readonly', 9 | 'SharedArrayBuffer': 'readonly' 10 | }, 11 | 'parserOptions': { 12 | 'ecmaVersion': 2018 13 | }, 14 | 'rules': { 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | 10 | # Diagnostic reports (https://nodejs.org/api/report.html) 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 | 13 | # Runtime data 14 | pids 15 | *.pid 16 | *.seed 17 | *.pid.lock 18 | 19 | # Directory for instrumented libs generated by jscoverage/JSCover 20 | lib-cov 21 | 22 | # Coverage directory used by tools like istanbul 23 | coverage 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # TypeScript v1 declaration files 45 | typings/ 46 | 47 | # Optional npm cache directory 48 | .npm 49 | 50 | # Optional eslint cache 51 | .eslintcache 52 | 53 | # Optional REPL history 54 | .node_repl_history 55 | 56 | # Output of 'npm pack' 57 | *.tgz 58 | 59 | # Yarn Integrity file 60 | .yarn-integrity 61 | 62 | # dotenv environment variables file 63 | .env 64 | .env.test 65 | 66 | # parcel-bundler cache (https://parceljs.org/) 67 | .cache 68 | 69 | # next.js build output 70 | .next 71 | 72 | # nuxt.js build output 73 | .nuxt 74 | 75 | # vuepress build output 76 | .vuepress/dist 77 | 78 | # Serverless directories 79 | .serverless/ 80 | 81 | # FuseBox cache 82 | .fusebox/ 83 | 84 | # DynamoDB Local files 85 | .dynamodb/ -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | // List of extensions which should be recommended for users of this workspace. 3 | "recommendations": [ 4 | "dbaeumer.vscode-eslint", 5 | "stpn.vscode-graphql", 6 | "editorconfig.editorconfig", 7 | "p1c2u.docker-compose" 8 | ], 9 | // List of extensions recommended by VS Code that should not be recommended for users of this workspace. 10 | "unwantedRecommendations": [ 11 | 12 | ] 13 | } -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:10.15-alpine 2 | 3 | RUN apk --no-cache --virtual build-dependencies add \ 4 | python \ 5 | make \ 6 | g++ \ 7 | && npm config set unsafe-perm true \ 8 | && npm install --quiet node-gyp -g 9 | -------------------------------------------------------------------------------- /LICENCE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Vinicius Reis 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # GraphQL Start Project 2 | A project with opinionated architecture that is used as the basis for new projects. 3 | 4 | ## Requirements 5 | Resources that must be installed for this project to work. 6 | 7 | - [node v10+](https://nodejs.org/en/download/) 8 | - [yarn v1.15+](https://yarnpkg.com/lang/en/docs/install/#debian-stable) 9 | - [docker](https://docs.docker.com/install/) 10 | - [docker-compose](https://docs.docker.com/compose/install/) 11 | 12 | ## Stack 13 | Primary libs and resources used in this project 14 | 15 | - [Apollo Server](https://github.com/apollographql/apollo-server) 16 | - [knex](https://knexjs.org/) 17 | - [awilix](https://github.com/jeffijoe/awilix) 18 | - [node-config](https://github.com/lorenwest/node-config) 19 | - [dotenv](https://github.com/motdotla/dotenv) 20 | - [pm2](https://pm2.io/doc/en/runtime/overview/) 21 | - [PostgreSQL](https://www.postgresql.org/) 22 | - [Redis](https://redis.io/) 23 | 24 | ### Commit tool 25 | This project use [gitmoji-cli](https://github.com/carloscuesta/gitmoji-cli) for commit messages 26 | 27 | ## Project structure 28 | 29 | ``` 30 | src 31 | ├── directives 32 | │ └── *.directive.js 33 | ├── index.js 34 | ├── resolvers 35 | │ ├── fields // fields resolvers 36 | │ ├── mutations // mutation resolvers 37 | │ └── resources.js // for auto ganeration 38 | ├── server // server builder 39 | ├── services 40 | │ ├── factories // services factories 41 | │ └── index.js 42 | ├── type-defs 43 | │ └── *.graphql 44 | └── utils 45 | 46 | ``` 47 | 48 | ## Running project 49 | This project uses docker-compose to upload all the services it depends on to work. 50 | 51 | ## Environments 52 | Copy `.env.example` to `.env` 53 | 54 | ```shell 55 | cp .env.example .env 56 | ``` 57 | 58 | > **APP_KEY** is very important. All tokens use this env. Do not change in production. 59 | 60 | See `config/default.js` for more info. 61 | 62 | ### dev mode 63 | Up service with auto reload when change source files 64 | 65 | ```shell 66 | yarn run docker:dev 67 | ``` 68 | 69 | The graphql server will bi aveilable in `http://localhost:7373` by default, see PORT env for confirmation. 70 | 71 | When docker up in dev mode, follow below commands will run. 72 | 73 | ```shell 74 | yarn run knex:migrate 75 | yarn run knex:seed 76 | ``` 77 | 78 | This commands configure and populate the database. 79 | 80 | See `package.json > scripts` for more info. 81 | 82 | > If You need run commands inside container use `yarn run docker:dev:exec sh` 83 | 84 | #### pgadmin4 85 | When run project in development mode, pgadmin4 will be available in `http://localhost:16543` 86 | Use `local@local.dev` and value of `DB_PASSWORD` to access pgadmin4 87 | 88 | ### prod mode 89 | Up to 2 pm2 service in cluster mode. 90 | 91 | ```shell 92 | yarn run docker:prod 93 | ``` -------------------------------------------------------------------------------- /config/default.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | 3 | const envConfig = require('dotenv').config() 4 | 5 | for (var k in envConfig) { 6 | process.env[k] = envConfig[k] 7 | } 8 | 9 | module.exports = { 10 | SERVER_NAME: 'GraphQL Demo', 11 | SERVER_BASE_DIR: path.join(__dirname, '../src'), 12 | PORT: process.env.PORT, 13 | NODE_ENV: process.env.NODE_ENV, 14 | APP_KEY: process.env.APP_KEY || 'no key', 15 | services: { 16 | database: { 17 | client: 'pg', 18 | connection: { 19 | host: process.env.DB_HOST || 'portgres', 20 | database: process.env.DB_NAME || 'demo', 21 | user: process.env.DB_USER, 22 | password: process.env.DB_PASSWORD 23 | } 24 | }, 25 | redis: { 26 | host: process.env.REDIS_HOST || 'redis', 27 | port: process.env.REDIS_PORT || 6379, 28 | // auth_pass: process.env.REDIS_AUTH_PASS || undefined, 29 | db: process.env.REDIS_DB || 0, 30 | ttl: 600 31 | } 32 | }, 33 | env: { 34 | DEBUG_COLORS: true 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /data/.gitignore: -------------------------------------------------------------------------------- 1 | db 2 | db/* -------------------------------------------------------------------------------- /data/pgadmin4/servers.json: -------------------------------------------------------------------------------- 1 | { 2 | "Servers": { 3 | "1": { 4 | "Name": "postgres", 5 | "Group": "Servers", 6 | "Host": "postgres", 7 | "Port": 5432, 8 | "MaintenanceDB": "postgres", 9 | "Username": "demo", 10 | "SSLMode": "prefer", 11 | "SSLCert": "/.postgresql/postgresql.crt", 12 | "SSLKey": "/.postgresql/postgresql.key", 13 | "SSLCompression": 0, 14 | "Timeout": 0, 15 | "UseSSHTunnel": 0, 16 | "TunnelPort": "22", 17 | "TunnelAuthentication": 0 18 | } 19 | } 20 | } -------------------------------------------------------------------------------- /docker-compose.dev.yml: -------------------------------------------------------------------------------- 1 | version: '3.5' 2 | 3 | x-networks: 4 | &default-networks 5 | networks: 6 | - app-network 7 | 8 | networks: 9 | app-network: 10 | driver: bridge 11 | name: "${APP_NAME}-network" 12 | 13 | services: 14 | # Database 15 | postgres: 16 | << : *default-networks 17 | 18 | # Redis 19 | redis: 20 | << : *default-networks 21 | 22 | # GraphQL 23 | server: 24 | << : *default-networks 25 | environment: 26 | - NODE_ENV=development 27 | command: "yarn run startup:dev" 28 | 29 | # PG Admin 30 | pgadmin: 31 | << : *default-networks 32 | image: dpage/pgadmin4 33 | container_name: "${APP_NAME}-pgadmin4" 34 | environment: 35 | PGADMIN_DEFAULT_EMAIL: ${PGADMIN_USER} 36 | PGADMIN_DEFAULT_PASSWORD: ${DB_PASSWORD} 37 | volumes: 38 | - $PWD/data/pgadmin4/servers.json:/pgadmin4/servers.json 39 | ports: 40 | - "16543:80" 41 | links: 42 | - postgres 43 | depends_on: 44 | - postgres 45 | logging: 46 | driver: none -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.5" 2 | 3 | services: 4 | # Database 5 | postgres: 6 | image: postgres:11.2 7 | restart: unless-stopped 8 | container_name: "${APP_NAME}-postgres" 9 | environment: 10 | POSTGRES_DB: ${DB_NAME} 11 | POSTGRES_USER: ${DB_USER} 12 | POSTGRES_PASSWORD: ${DB_PASSWORD} 13 | volumes: 14 | - ./data/db:/var/lib/postgresql/data 15 | ports: 16 | - "15432:5432" 17 | 18 | # Redis 19 | redis: 20 | image: redis:5-alpine 21 | restart: unless-stopped 22 | container_name: "${APP_NAME}-redis" 23 | ports: 24 | - "16379:6379" 25 | 26 | # GraphQL 27 | server: 28 | build: 29 | context: . 30 | dockerfile: Dockerfile 31 | user: "node" 32 | restart: unless-stopped 33 | working_dir: /home/node/app 34 | container_name: "${APP_NAME}-server" 35 | links: 36 | - postgres 37 | - redis 38 | depends_on: 39 | - postgres 40 | - redis 41 | environment: 42 | - NODE_ENV=production 43 | volumes: 44 | - ./:/home/node/app 45 | ports: 46 | - ${PORT}:${PORT} 47 | command: "yarn run startup:prod" 48 | -------------------------------------------------------------------------------- /ecosystem.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | apps: [{ 3 | name: 'graphql-server', 4 | script: './src/index.js', 5 | ignore_watch: ['node_modules', 'data', '.git', '*.log'], 6 | env: { 7 | NODE_ENV: 'development' 8 | }, 9 | env_production: { 10 | NODE_ENV: 'production' 11 | } 12 | }] 13 | } 14 | -------------------------------------------------------------------------------- /knex/migrations/20190315214938_users.js: -------------------------------------------------------------------------------- 1 | 2 | exports.up = function (knex) { 3 | return knex.schema.createTable('users', function (table) { 4 | table.uuid('id').primary() 5 | table.string('email').unique() 6 | table.string('name').notNullable() 7 | table.string('password') 8 | table.boolean('is_active').defaultTo(true) 9 | table.boolean('is_admin').defaultTo(false) 10 | table.timestamps() 11 | }) 12 | } 13 | 14 | exports.down = function (knex) { 15 | return knex.schema.dropTable('users') 16 | } 17 | -------------------------------------------------------------------------------- /knex/migrations/20190316152324_clients.js: -------------------------------------------------------------------------------- 1 | 2 | exports.up = function (knex) { 3 | return knex.schema.createTable('clients', function (table) { 4 | table.uuid('id').primary() 5 | table.string('name').notNullable() 6 | table.uuid('user_id').notNullable() 7 | table.boolean('deleted').defaultTo(false) 8 | table.timestamp('deleted_at').defaultTo(null) 9 | table.timestamps() 10 | 11 | table.foreign('user_id') 12 | .references('id') 13 | .on('users') 14 | }) 15 | } 16 | 17 | exports.down = function (knex) { 18 | return knex.schema.dropTable('clients') 19 | } 20 | -------------------------------------------------------------------------------- /knex/migrations/20190316152336_products.js: -------------------------------------------------------------------------------- 1 | 2 | exports.up = function (knex) { 3 | return knex.schema.createTable('products', function (table) { 4 | table.uuid('id').primary() 5 | table.string('name').notNullable() 6 | table.text('description') 7 | table.uuid('client_id').notNullable() 8 | table.boolean('deleted').defaultTo(false) 9 | table.timestamp('deleted_at').defaultTo(null) 10 | table.timestamps(true, true) 11 | 12 | table.foreign('client_id') 13 | .references('id') 14 | .inTable('clients') 15 | .onDelete('CASCADE') 16 | }) 17 | } 18 | 19 | exports.down = function (knex) { 20 | return knex.schema.dropTable('products') 21 | } 22 | -------------------------------------------------------------------------------- /knex/seeds/01.users.js: -------------------------------------------------------------------------------- 1 | const { defaults, range, toLower } = require('lodash') 2 | const faker = require('faker') 3 | const uuid = require('uuid') 4 | const bcrypt = require('bcrypt') 5 | 6 | const basePassword = '1234567890x1' 7 | 8 | const newHash = () => bcrypt.hash(basePassword, 5) 9 | 10 | const newUser = async (def = {}) => { 11 | const password = await newHash() 12 | const name = faker.name.findName() 13 | 14 | return defaults({}, def, { 15 | id: uuid(), 16 | email: toLower(faker.internet.email(...name.split(' '))), 17 | created_at: faker.date.recent(30), 18 | updated_at: faker.random.boolean() ? faker.date.recent(5) : null, 19 | is_active: faker.random.boolean(), 20 | is_admin: faker.random.boolean(), 21 | name, 22 | password 23 | }) 24 | } 25 | 26 | exports.seed = async knex => { 27 | // generate fake users 28 | const entries = await Promise.all( 29 | range(0, 60).map(() => newUser()) 30 | ) 31 | 32 | entries.unshift(await newUser({ 33 | email: 'demo@demo.dev', 34 | name: 'demo', 35 | is_admin: true, 36 | is_active: true 37 | })) 38 | 39 | console.warn('Admin Demo Data', { 40 | email: 'demo@demo.dev', 41 | password: basePassword 42 | }) 43 | 44 | await knex.raw('ALTER TABLE users DISABLE TRIGGER ALL') 45 | await knex('users').del() 46 | await knex.raw('ALTER TABLE users ENABLE TRIGGER ALL') 47 | 48 | // Inserts seed entries 49 | return knex('users').insert(entries) 50 | } 51 | -------------------------------------------------------------------------------- /knex/seeds/02.clients.js: -------------------------------------------------------------------------------- 1 | const { range, shuffle, sample } = require('lodash') 2 | const faker = require('faker') 3 | const uuid = require('uuid') 4 | 5 | const newProduct = async (userId) => { 6 | const deleted = faker.random.boolean() 7 | 8 | return { 9 | id: uuid(), 10 | name: faker.company.companyName(), 11 | created_at: faker.date.recent(30), 12 | updated_at: faker.random.boolean() ? faker.date.recent(5) : null, 13 | deleted_at: deleted ? faker.date.recent(5) : null, 14 | user_id: userId, 15 | deleted 16 | } 17 | } 18 | 19 | exports.seed = async knex => { 20 | const users = shuffle(await knex('users').select('id')) 21 | const entries = await Promise.all( 22 | range(0, 300).map(() => newProduct(sample(users).id)) 23 | ) 24 | 25 | await knex.raw('ALTER TABLE clients DISABLE TRIGGER ALL') 26 | await knex('clients').del() 27 | await knex.raw('ALTER TABLE clients ENABLE TRIGGER ALL') 28 | 29 | // Inserts seed entries 30 | return knex('clients').insert(entries) 31 | } 32 | -------------------------------------------------------------------------------- /knex/seeds/03.products.js: -------------------------------------------------------------------------------- 1 | const { range, shuffle, sample } = require('lodash') 2 | const faker = require('faker') 3 | const uuid = require('uuid') 4 | 5 | const newProduct = async (clientId) => { 6 | const deleted = faker.random.boolean() 7 | const createdAt = faker.date.recent(30) 8 | 9 | return { 10 | id: uuid(), 11 | name: faker.commerce.productName(), 12 | description: faker.random.boolean() ? faker.lorem.paragraph() : null, 13 | created_at: createdAt, 14 | updated_at: faker.random.boolean() ? faker.date.recent(5) : createdAt, 15 | deleted_at: deleted ? faker.date.recent(5) : null, 16 | client_id: clientId, 17 | deleted 18 | } 19 | } 20 | 21 | exports.seed = async knex => { 22 | const users = shuffle(await knex('clients').select('id')) 23 | const entries = await Promise.all( 24 | range(0, 2500).map(() => newProduct(sample(users).id)) 25 | ) 26 | 27 | // Deletes ALL existing entries 28 | await knex('products').del() 29 | 30 | // Inserts seed entries 31 | return knex('products').insert(entries) 32 | } 33 | -------------------------------------------------------------------------------- /knexfile.js: -------------------------------------------------------------------------------- 1 | const { getConfig } = require('./src/server/utils/config') 2 | 3 | const config = getConfig() 4 | 5 | module.exports = { 6 | ...config.services.database, 7 | pool: { 8 | min: 2, 9 | max: 10 10 | }, 11 | migrations: { 12 | directory: './knex/migrations', 13 | tableName: 'knex_migrations' 14 | }, 15 | seeds: { 16 | directory: './knex/seeds' 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "graphql-auto-resolvers-demo", 3 | "version": "0.1.0", 4 | "description": "A proof of concept of automated resolvers with GraphQL", 5 | "main": "src/index.js", 6 | "repository": "https://github.com/vinicius73/graphql-auto-resolvers-demo", 7 | "author": "Vinicius Reis ", 8 | "license": "MIT", 9 | "engines": { 10 | "node": ">=10" 11 | }, 12 | "scripts": { 13 | "start:dev": "yarn pm2:watch && yarn pm2:logs", 14 | "start:prod": "yarn pm2:production && yarn pm2:logs", 15 | "setup": "yarn install && yarn run knex:migrate", 16 | "setup:dev": "yarn run setup && yarn run knex:seed", 17 | "knex:migrate": "knex migrate:latest", 18 | "knex:rollback": "knex migrate:rollback", 19 | "knex:seed": "knex seed:run", 20 | "startup:prod": "NODE_ENV=production yarn run setup && yarn run start:prod", 21 | "startup:dev": "NODE_ENV=development yarn run setup:dev && yarn run start:dev", 22 | "dev:monit": "pm2 monit graphql-server", 23 | "docker:dev": "docker-compose -f docker-compose.yml -f docker-compose.dev.yml up", 24 | "docker:dev:exec": "docker-compose -f docker-compose.yml -f docker-compose.dev.yml exec server", 25 | "docker:prod": "docker-compose up -d", 26 | "pm2:watch": "pm2 start --watch", 27 | "pm2:production": "pm2 start --env production -i 2", 28 | "pm2:prune": "pm2 delete graphql-server", 29 | "pm2:stop": "pm2 stop graphql-server", 30 | "pm2:logs": "pm2 logs -n graphql-server --raw", 31 | "pm2:flush": "pm2 flush" 32 | }, 33 | "dependencies": { 34 | "apollo-server": "^2.9.3", 35 | "awilix": "^4.2.2", 36 | "bcrypt": "^3.0.6", 37 | "cache-manager": "^2.10.0", 38 | "cache-manager-redis-store": "^1.5.0", 39 | "config": "^3.2.2", 40 | "dataloader": "^1.4.0", 41 | "debug": "^4.1.1", 42 | "dotenv": "^8.1.0", 43 | "graphql": "^14.5.4", 44 | "graphql-query-compress": "^1.2.2", 45 | "jsonwebtoken": "^8.5.1", 46 | "knex": "^0.19.5", 47 | "lodash": "^4.17.15", 48 | "object-hash": "^1.3.1", 49 | "pg": "^7.12.1", 50 | "pluralize": "^8.0.0", 51 | "pm2": "^3.5.1", 52 | "ramda": "^0.26.1", 53 | "readdir-enhanced": "^4.0.2", 54 | "table": "^5.4.6", 55 | "uuid": "^3.3.3" 56 | }, 57 | "devDependencies": { 58 | "eslint": "^6.3.0", 59 | "eslint-config-standard": "^14.1.0", 60 | "eslint-plugin-import": "^2.18.2", 61 | "eslint-plugin-node": "^10.0.0", 62 | "eslint-plugin-promise": "^4.2.1", 63 | "eslint-plugin-standard": "^4.0.1", 64 | "faker": "^4.1.0" 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /src/directives/cache.directive.js: -------------------------------------------------------------------------------- 1 | const { SchemaDirectiveVisitor } = require('apollo-server') 2 | const { defaultFieldResolver } = require('graphql') 3 | const objectHash = require('object-hash') 4 | 5 | const makePrefix = ({ prefix = 'graphql' }, field, details) => { 6 | const { objectType } = details 7 | const { name } = field 8 | 9 | return `${prefix}:${objectType}:${name}` 10 | } 11 | 12 | const makeHash = (root, args) => objectHash({ root, args }) 13 | const makeKey = (prefix, root, args) => { 14 | const hash = makeHash(root, args) 15 | 16 | return `${prefix}:${hash}` 17 | } 18 | 19 | class CacheDiretive extends SchemaDirectiveVisitor { 20 | visitFieldDefinition (field, details) { 21 | const { ttl } = this.args 22 | const { resolve = defaultFieldResolver } = field 23 | const prefix = makePrefix(this.args, field, details) 24 | 25 | field.resolve = function (...resolveArgs) { 26 | const [root, args, context] = resolveArgs 27 | const key = makeKey(prefix, root, args) 28 | 29 | // call original resolver 30 | const loader = () => resolve.apply(field, resolveArgs) 31 | 32 | return context 33 | .services 34 | .cache 35 | .wrap(key, loader, { ttl }) 36 | } 37 | } 38 | } 39 | 40 | module.exports = CacheDiretive 41 | -------------------------------------------------------------------------------- /src/directives/needAuth.directive.js: -------------------------------------------------------------------------------- 1 | const { isEmpty } = require('lodash') 2 | const { SchemaDirectiveVisitor } = require('apollo-server') 3 | const { defaultFieldResolver } = require('graphql') 4 | 5 | class NeedAuthDiretive extends SchemaDirectiveVisitor { 6 | visitObject (type) { 7 | this.ensureFieldsWrapped(type) 8 | type._onlyAdmin = this.args.onlyAdmin 9 | } 10 | 11 | visitFieldDefinition (field, details) { 12 | this.ensureFieldsWrapped(details.objectType) 13 | field._onlyAdmin = this.args.onlyAdmin 14 | } 15 | 16 | ensureFieldsWrapped (objectType) { 17 | if (objectType._NeedAuthFieldsWrapped) { 18 | return 19 | } 20 | objectType._NeedAuthFieldsWrapped = true 21 | 22 | const fields = objectType.getFields() 23 | 24 | Object.values(fields) 25 | .forEach(field => { 26 | const { resolve = defaultFieldResolver } = field 27 | 28 | field.resolve = async function (...args) { 29 | const requiredAdmin = field._onlyAdmin || objectType._onlyAdmin 30 | 31 | if (requiredAdmin === undefined) { 32 | return resolve.apply(this, args) 33 | } 34 | 35 | const { user } = args[2] 36 | 37 | if (isEmpty(user)) { 38 | throw new Error('not authorized') 39 | } 40 | 41 | if (requiredAdmin === true && user.is_admin === false) { 42 | throw new Error('not authorized [only admin]') 43 | } 44 | 45 | return resolve.apply(this, args) 46 | } 47 | }) 48 | } 49 | } 50 | 51 | module.exports = NeedAuthDiretive 52 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | const { serverFactory } = require('./server') 2 | 3 | serverFactory() 4 | .then(() => { 5 | console.log('✔️ All done') 6 | }) 7 | .catch(e => { 8 | console.error('⛔', e.message) 9 | }) 10 | -------------------------------------------------------------------------------- /src/resolvers/fields/Client.resolvers.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | products: ({ id }, args, { services }) => { 3 | return services.db('products').where({ client_id: id }) 4 | }, 5 | // eslint-disable-next-line camelcase 6 | user: ({ user_id }, args, { dataLoaders }) => { 7 | return dataLoaders.users.load(user_id) 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /src/resolvers/fields/IRecord.resolvers.js: -------------------------------------------------------------------------------- 1 | const { cond, T, always, has } = require('ramda') 2 | 3 | const getType = cond([ 4 | [has('email'), always('User')], 5 | [T, always(null)] 6 | ]) 7 | 8 | module.exports = { 9 | __resolveType: (obj, context, info) => { 10 | return getType(obj) 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/resolvers/fields/IResource.resolvers.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | __resolveType: (obj) => { 3 | return obj.__type || null 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /src/resolvers/fields/Product.resolvers.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | // eslint-disable-next-line camelcase 3 | client: ({ client_id }, args, { dataLoaders }) => { 4 | return dataLoaders.clients.load(client_id) 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /src/resolvers/fields/RootQuery.resolvers.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | 3 | } 4 | -------------------------------------------------------------------------------- /src/resolvers/fields/User.resolvers.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | clients: ({ id }, args, { services }) => { 3 | return services.db('clients').where({ user_id: id }) 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /src/resolvers/mutations/addProduct.mutation.js: -------------------------------------------------------------------------------- 1 | const uuid = require('uuid') 2 | const { head } = require('lodash') 3 | 4 | const AddProductMutation = async (root, { input }, { services }, info) => { 5 | const { db } = services 6 | const { name, client, description } = input 7 | 8 | return db('products') 9 | .returning('*') 10 | .insert({ 11 | id: uuid(), 12 | name, 13 | description, 14 | client_id: client 15 | }) 16 | .then(head) 17 | } 18 | 19 | module.exports = AddProductMutation 20 | -------------------------------------------------------------------------------- /src/resolvers/mutations/login.mutation.js: -------------------------------------------------------------------------------- 1 | const LoginMutation = async (root, { email, password }, { services }) => { 2 | const { auth } = services 3 | const user = await auth.attempt(email, password) 4 | 5 | return auth.makeToken(user) 6 | } 7 | 8 | module.exports = LoginMutation 9 | -------------------------------------------------------------------------------- /src/resolvers/resources.js: -------------------------------------------------------------------------------- 1 | module.exports = [{ 2 | table: 'users' 3 | }, { 4 | table: 'clients' 5 | }, { 6 | table: 'products' 7 | }] 8 | -------------------------------------------------------------------------------- /src/server/context.js: -------------------------------------------------------------------------------- 1 | const { last, isEmpty } = require('lodash') 2 | const { makeServicesFactoy } = require('../services') 3 | const { generateDataLoaders } = require('../utils/knex/dataloaders') 4 | const graphqlQueryCompress = require('graphql-query-compress') 5 | const resources = require('../resolvers/resources') 6 | 7 | const debugRequest = req => { 8 | let query 9 | 10 | try { 11 | query = graphqlQueryCompress(req.body.query) 12 | } catch (e) { 13 | console.warn(e) 14 | } 15 | 16 | console.log('request on: %s \n -> %s \n', new Date(), query) 17 | } 18 | 19 | const loadUser = ({ auth, cache }, token) => { 20 | const loader = () => auth.loadUser(token) 21 | const key = `token:${token}` 22 | const options = { ttl: 900 } // 15min 23 | 24 | return cache.wrap(key, loader, options) 25 | } 26 | 27 | /** 28 | * extract token from request 29 | * 30 | * @param {request} req 31 | * 32 | * @returns {String|Boolean} 33 | */ 34 | const getToken = req => { 35 | try { 36 | const header = (req.header('authorization') || req.header('Authorization')) 37 | const values = (header || '').split(' ') 38 | 39 | const token = last(values) 40 | 41 | return isEmpty(token) 42 | ? false 43 | : token 44 | } catch (e) { 45 | console.warn(e) 46 | return false 47 | } 48 | } 49 | 50 | const contextFactory = ({ debug, config }) => { 51 | // service container factory 52 | const serviceFatory = makeServicesFactoy(config, debug) 53 | 54 | return ({ req }) => { 55 | // just log received query 56 | debugRequest(req) 57 | 58 | // get request token 59 | const token = getToken(req) 60 | 61 | // build services 62 | const services = serviceFatory(token) 63 | 64 | // generate dataLoaders 65 | const dataLoaders = generateDataLoaders(resources, services) 66 | 67 | const context = { 68 | token, 69 | debug, 70 | services, 71 | dataLoaders 72 | } 73 | 74 | // have no token, no need try load user 75 | if (!token) { 76 | return Promise.resolve(context) 77 | } 78 | 79 | // load and inject user 80 | return loadUser(services, token) 81 | .then(user => { 82 | return Promise.resolve({ 83 | user, 84 | ...context 85 | }) 86 | }) 87 | } 88 | } 89 | 90 | module.exports = { contextFactory } 91 | -------------------------------------------------------------------------------- /src/server/directives.js: -------------------------------------------------------------------------------- 1 | const loadDirectives = require('../utils/load-files/directives') 2 | const { resolve } = require('path') 3 | 4 | // https://www.apollographql.com/docs/graphql-tools/schema-directives 5 | 6 | module.exports = { 7 | loadDirectives: () => loadDirectives(resolve(__dirname, '../directives')) 8 | } 9 | -------------------------------------------------------------------------------- /src/server/index.js: -------------------------------------------------------------------------------- 1 | const { ApolloServer } = require('apollo-server') 2 | const { loadTypeDefs } = require('./type-defs') 3 | const { getConfig } = require('./utils/config') 4 | const { showServerInfo } = require('./utils/server-info') 5 | const { contextFactory } = require('./context') 6 | const { factoryPlaygroundOptions } = require('./playground') 7 | const { loadResolvers } = require('./resolvers') 8 | const { loadDirectives } = require('./directives') 9 | 10 | let started = false 11 | 12 | /** 13 | * get server port 14 | * @param {config} config 15 | * @returns {number} 16 | */ 17 | const getPort = config => { 18 | if (config.PORT) { 19 | return config.PORT 20 | } 21 | 22 | return process.env.PORT || 8081 23 | } 24 | 25 | const serverFactory = async () => { 26 | // prevents the server from starting more than once. 27 | if (started) { 28 | return Promise.reject(new Error('server was created')) 29 | } 30 | 31 | started = true 32 | 33 | const config = getConfig() 34 | 35 | // need to be imported after handling the env 36 | const debug = require('./utils/debug') 37 | 38 | const [typeDefs, resolvers, schemaDirectives] = await Promise.all( 39 | [loadTypeDefs(), loadResolvers(), loadDirectives()] 40 | ) 41 | 42 | const isProduction = config.NODE_ENV === 'production' 43 | 44 | const server = new ApolloServer({ 45 | cors: true, 46 | resolvers, 47 | typeDefs, 48 | schemaDirectives, 49 | tracing: !isProduction, 50 | introspection: !isProduction, 51 | playground: factoryPlaygroundOptions(config), 52 | context: contextFactory({ config, debug }) 53 | }) 54 | 55 | const info = await server.listen({ port: getPort(config) }) 56 | 57 | await showServerInfo(config, info) 58 | 59 | console.log(`🚀 Server ready (${config.NODE_ENV || 'development'})`) 60 | 61 | // allows extra manipulation of this variables 62 | return { server, info, config, debug } 63 | } 64 | 65 | module.exports = { serverFactory } 66 | -------------------------------------------------------------------------------- /src/server/playground.js: -------------------------------------------------------------------------------- 1 | const factoryPlaygroundOptions = config => { 2 | if (config.NODE_ENV === 'production') { 3 | return false 4 | } 5 | 6 | return { 7 | settings: { 8 | 'editor.theme': 'light' 9 | } 10 | } 11 | } 12 | 13 | module.exports = { factoryPlaygroundOptions } 14 | -------------------------------------------------------------------------------- /src/server/resolvers/fields.js: -------------------------------------------------------------------------------- 1 | const loadResolvers = require('../../utils/load-files/resolvers') 2 | const { resolve } = require('path') 3 | 4 | module.exports = { 5 | loadFields: () => loadResolvers(resolve(__dirname, '../../resolvers/fields')) 6 | } 7 | -------------------------------------------------------------------------------- /src/server/resolvers/index.js: -------------------------------------------------------------------------------- 1 | const { defaultsDeep } = require('lodash') 2 | const { generateResolvers } = require('../../utils/knex/resolvers') 3 | const { loadMutations } = require('./mutations') 4 | const { loadFields } = require('./fields') 5 | 6 | const makeResourceResolvers = () => { 7 | return generateResolvers( 8 | require('../../resolvers/resources') 9 | ) 10 | } 11 | 12 | module.exports = { 13 | loadResolvers: async () => { 14 | const autoResolvers = makeResourceResolvers() 15 | const [fieldsResolvers, mutationsResolvers] = await Promise.all([ 16 | loadFields(), 17 | loadMutations() 18 | ]) 19 | 20 | return defaultsDeep( 21 | {}, 22 | mutationsResolvers, 23 | autoResolvers, 24 | fieldsResolvers 25 | ) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/server/resolvers/mutations.js: -------------------------------------------------------------------------------- 1 | const loadMutations = require('../../utils/load-files/mutations') 2 | const { resolve } = require('path') 3 | 4 | module.exports = { 5 | loadMutations: async () => { 6 | const RootMutations = await loadMutations(resolve(__dirname, '../../resolvers/mutations')) 7 | return { RootMutations } 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /src/server/type-defs.js: -------------------------------------------------------------------------------- 1 | const { gql } = require('apollo-server') 2 | const { resolve } = require('path') 3 | const loadGraphQLFiles = require('../utils/load-files/graphql') 4 | 5 | /** 6 | * load and generate GraphQL types defs 7 | * 8 | * @returns {Promise} 9 | */ 10 | const loadTypeDefs = () => { 11 | const dirName = resolve(__dirname, '../type-defs') 12 | 13 | return loadGraphQLFiles(dirName) // list of contents 14 | .then(types => types.join('\n')) // join all files in a single string 15 | .then(typeDefs => gql`${typeDefs}`) // apply "gql" function 16 | } 17 | 18 | module.exports = { loadTypeDefs } 19 | -------------------------------------------------------------------------------- /src/server/utils/config.js: -------------------------------------------------------------------------------- 1 | /** 2 | * require config and add env data to process.env 3 | * @returns {config} 4 | */ 5 | const getConfig = () => { 6 | const config = require('config') 7 | const { env } = config 8 | 9 | // inject env config into process.env 10 | Object.keys(env) 11 | .forEach(key => { 12 | process.env[key] = env[key] 13 | }) 14 | 15 | return config 16 | } 17 | 18 | module.exports = { getConfig } 19 | -------------------------------------------------------------------------------- /src/server/utils/debug.js: -------------------------------------------------------------------------------- 1 | const factory = require('debug') 2 | 3 | // pm2 support 4 | factory.enable(process.env.DEBUG) 5 | 6 | const debug = factory('server') 7 | 8 | module.exports = debug 9 | -------------------------------------------------------------------------------- /src/server/utils/server-info.js: -------------------------------------------------------------------------------- 1 | const { compose } = require('ramda') 2 | const { pick, deburr, trim, omit } = require('lodash') 3 | const { table } = require('table') 4 | const { exec } = require('child_process') 5 | const SERVER_VERSION = require('../../../package.json').version 6 | const APOLLO_SERVER_VERSION = require('apollo-server/package.json').version 7 | 8 | const sanitize = compose(deburr, trim, st => (st || '').replace('\n', '')) 9 | 10 | const execPromise = (cmd) => new Promise((resolve, reject) => { 11 | exec(cmd, (err, output) => { 12 | if (err) { 13 | reject(err) 14 | return 15 | } 16 | resolve(output) 17 | }) 18 | }) 19 | 20 | const getGitInfo = baseDir => { 21 | const commands = [ 22 | `cd ${baseDir} && git rev-parse HEAD`, 23 | `cd ${baseDir} && git branch 2> /dev/null | sed -e '/^[^*]/d' -e 's/* \\(.*\\)/ (\\1)/'` 24 | ] 25 | return Promise.all(commands.map(execPromise)) 26 | .then(result => { 27 | const [GIT_COMMIT_HASH, GIT_BRANCH] = result.map(sanitize) 28 | return { GIT_COMMIT_HASH, GIT_BRANCH } 29 | }) 30 | .catch(err => { 31 | console.warn( 32 | 'fail to get git info', 33 | err.message 34 | ) 35 | 36 | return { 37 | GIT_COMMIT_HASH: 'Unknown', 38 | GIT_BRANCH: 'Unknown' 39 | } 40 | }) 41 | } 42 | 43 | const getNodeInfo = () => { 44 | const data = pick(process, ['platform', 'pid', 'title']) 45 | data.node_version = process.version 46 | 47 | return data 48 | } 49 | 50 | const getServerInfo = (config, httpServer) => { 51 | return getGitInfo(config.SERVER_BASE_DIR) 52 | .then(git => { 53 | return Object.assign( 54 | { now: new Date() }, 55 | pick(config, ['SERVER_NAME', 'NODE_ENV']), 56 | pick(process.env, ['PM2_USAGE', 'USER']), 57 | { 'config.env': JSON.stringify(config.env) }, 58 | { SERVER_VERSION, APOLLO_SERVER_VERSION }, 59 | omit(httpServer, ['server']), 60 | getNodeInfo(), 61 | git 62 | ) 63 | }) 64 | } 65 | 66 | const showServerInfo = (config, server) => { 67 | return getServerInfo(config, server) 68 | .then(info => { 69 | const rows = Object.keys(info) 70 | .reduce((acc, key) => { 71 | acc.push([key, info[key]]) 72 | return acc 73 | }, []) 74 | 75 | console.info(table([ 76 | ['key', 'value'], 77 | ...rows 78 | ])) 79 | }) 80 | .catch(err => { 81 | console.error({ 82 | message: 'fail to get server info', 83 | err 84 | }) 85 | }) 86 | } 87 | 88 | module.exports = { showServerInfo, getServerInfo } 89 | -------------------------------------------------------------------------------- /src/services/container/factory.js: -------------------------------------------------------------------------------- 1 | const { createContainer, InjectionMode, asValue, asFunction } = require('awilix') 2 | 3 | const factory = (config, debug, factories) => { 4 | const container = createContainer({ 5 | injectionMode: InjectionMode.PROXY 6 | }) 7 | 8 | container.register('config', asValue(config)) 9 | container.register('debug', asValue(debug)) 10 | 11 | Object.entries(factories) 12 | .forEach(([key, fn]) => { 13 | const service = key === 'db' 14 | ? asFunction(fn).singleton() 15 | : asFunction(fn).scoped() 16 | 17 | container.register( 18 | key, 19 | service 20 | ) 21 | }) 22 | 23 | return container 24 | } 25 | 26 | module.exports = factory 27 | -------------------------------------------------------------------------------- /src/services/container/index.js: -------------------------------------------------------------------------------- 1 | const { asValue } = require('awilix') 2 | const factoryContainer = require('./factory') 3 | 4 | const factoryTokenInject = (config, debug, factories) => { 5 | const container = factoryContainer(config, debug, factories) 6 | return token => { 7 | const scoped = container.createScope() 8 | 9 | scoped.register({ 10 | token: asValue(token), 11 | has: asValue(container.has.bind(container)) 12 | }) 13 | 14 | return scoped.cradle 15 | } 16 | } 17 | 18 | module.exports = { factoryTokenInject } 19 | -------------------------------------------------------------------------------- /src/services/factories/auth/index.js: -------------------------------------------------------------------------------- 1 | const { attempt } = require('./login') 2 | const { makeToken, loadUser } = require('./token') 3 | 4 | const factory = ({ db, config }) => { 5 | const secret = config.APP_KEY 6 | 7 | return { 8 | loadUser: token => loadUser(db, secret, token), 9 | attempt: (email, password) => attempt(db, email, password), 10 | makeToken: (payload, expiresIn = '3h') => makeToken(secret, payload, expiresIn) 11 | } 12 | } 13 | 14 | module.exports = factory 15 | -------------------------------------------------------------------------------- /src/services/factories/auth/login.js: -------------------------------------------------------------------------------- 1 | const bcrypt = require('bcrypt') 2 | const { isEmpty } = require('lodash') 3 | const makeError = () => Promise.reject(new Error('Invalid User/Password combination')) 4 | 5 | const loadUser = (db, email) => db('users') 6 | .select(['id', 'email', 'password']) 7 | .where({ email, is_active: true }) 8 | .first() 9 | .then(row => { 10 | if (isEmpty(row)) { 11 | return makeError() 12 | } 13 | 14 | return row 15 | }) 16 | 17 | const assetPassword = (data, encripted) => { 18 | return bcrypt.compare(data, encripted) 19 | .then(isMatch => { 20 | if (isMatch) { 21 | return 22 | } 23 | 24 | return makeError() 25 | }) 26 | } 27 | 28 | const attempt = async (db, email, password) => { 29 | const user = await loadUser(db, email) 30 | 31 | // compare password 32 | await assetPassword(password, user.password) 33 | 34 | return user 35 | } 36 | 37 | module.exports = { attempt } 38 | -------------------------------------------------------------------------------- /src/services/factories/auth/token.js: -------------------------------------------------------------------------------- 1 | const jwt = require('jsonwebtoken') 2 | const { isEmpty } = require('lodash') 3 | 4 | /** 5 | * @param {string} secret 6 | * @param {Object} payload 7 | * @param {string|number} expiresIn 8 | * @returns {Promise} 9 | */ 10 | const makeToken = (secret, payload, expiresIn) => { 11 | return new Promise((resolve, reject) => { 12 | jwt.sign(payload, secret, { expiresIn }, (err, token) => { 13 | if (err) { 14 | reject(err) 15 | return 16 | } 17 | 18 | resolve(token) 19 | }) 20 | }) 21 | } 22 | 23 | /** 24 | * @param {string} secret 25 | * @param {string} token 26 | * @returns {Promise} 27 | */ 28 | const decode = (secret, token) => { 29 | return new Promise((resolve, reject) => { 30 | jwt.verify(token, secret, (err, decoded) => { 31 | if (err) { 32 | reject(err) 33 | return 34 | } 35 | 36 | resolve(decoded) 37 | }) 38 | }) 39 | } 40 | 41 | /** 42 | * @param {knex} db 43 | * @param {string} secret 44 | * @param {string} token 45 | * @returns {Promise} 46 | */ 47 | const loadUser = async (db, secret, token) => { 48 | return decode(secret, token) 49 | .then(decoded => { 50 | console.log({ decoded }) 51 | const { id } = decoded 52 | return db('users') 53 | .where({ id, is_active: true }) 54 | .first() 55 | }) 56 | .then(user => { 57 | if (isEmpty(user)) { 58 | return Promise.reject(new Error('token error: user not available')) 59 | } 60 | 61 | return user 62 | }) 63 | .catch(err => { 64 | console.error(err) 65 | return Promise.reject(err) 66 | }) 67 | } 68 | 69 | module.exports = { makeToken, loadUser } 70 | -------------------------------------------------------------------------------- /src/services/factories/cache.js: -------------------------------------------------------------------------------- 1 | const cacheManager = require('cache-manager') 2 | const redisStore = require('cache-manager-redis-store') 3 | const { get } = require('lodash') 4 | 5 | const factory = ({ config }) => { 6 | const options = get(config, ['services', 'redis']) 7 | 8 | return cacheManager.caching({ 9 | store: redisStore, 10 | ...options 11 | }) 12 | } 13 | 14 | module.exports = factory 15 | -------------------------------------------------------------------------------- /src/services/factories/db.js: -------------------------------------------------------------------------------- 1 | const { get } = require('lodash') 2 | const knex = require('knex') 3 | 4 | const factory = ({ config }) => { 5 | const options = get(config, ['services', 'database']) 6 | 7 | return knex({ 8 | ...options 9 | }) 10 | } 11 | 12 | module.exports = factory 13 | -------------------------------------------------------------------------------- /src/services/factories/index.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | db: require('./db'), 3 | cache: require('./cache'), 4 | auth: require('./auth') 5 | } 6 | -------------------------------------------------------------------------------- /src/services/index.js: -------------------------------------------------------------------------------- 1 | const factories = require('./factories') 2 | const { factoryTokenInject } = require('./container') 3 | 4 | /** 5 | * factory context container 6 | * 7 | * @param {c.config} config 8 | * @param {debug} debug 9 | * 10 | * @returns {Function} 11 | */ 12 | const makeServicesFactoy = (config, debug) => { 13 | return factoryTokenInject(config, debug, factories) 14 | } 15 | 16 | module.exports = { makeServicesFactoy } 17 | -------------------------------------------------------------------------------- /src/type-defs/Client.graphql: -------------------------------------------------------------------------------- 1 | type Client implements IRecord { 2 | id: ID! 3 | name: String! 4 | user: User! 5 | created_at: String! 6 | update_at: String! 7 | products: [Product]! 8 | } 9 | 10 | type ClientsResource implements IResource { 11 | pagination: Pagination! 12 | records: [Client]! 13 | } -------------------------------------------------------------------------------- /src/type-defs/Directives.graphql: -------------------------------------------------------------------------------- 1 | directive @needAuth( 2 | onlyAdmin: Boolean = false, 3 | ) on FIELD_DEFINITION 4 | 5 | directive @cache( 6 | ttl: Int!, 7 | prefix: String 8 | ) on FIELD_DEFINITION -------------------------------------------------------------------------------- /src/type-defs/Product.graphql: -------------------------------------------------------------------------------- 1 | type Product implements IRecord { 2 | id: ID! 3 | name: String! 4 | description: String 5 | created_at: String! 6 | update_at: String! 7 | client: Client! 8 | } 9 | 10 | type ProductsResource implements IResource { 11 | pagination: Pagination! @cache(ttl: 120) 12 | records: [Product]! @cache(ttl: 120) 13 | } 14 | 15 | input CreateProductInput { 16 | name: String! 17 | description: String 18 | client: ID! 19 | } -------------------------------------------------------------------------------- /src/type-defs/User.graphql: -------------------------------------------------------------------------------- 1 | type User implements IRecord { 2 | id: ID! 3 | email: String! 4 | name: String! 5 | is_admin: Boolean! @needAuth(onlyAdmin: true) 6 | clients: [Client]! @needAuth(onlyAdmin: true) 7 | } 8 | 9 | type UsersResource implements IResource { 10 | pagination: Pagination! 11 | records: [User]! 12 | } -------------------------------------------------------------------------------- /src/type-defs/resources.graphql: -------------------------------------------------------------------------------- 1 | interface IResource { 2 | pagination: Pagination! 3 | records: [IRecord]! 4 | } 5 | 6 | interface IRecord { 7 | id: ID! 8 | } 9 | 10 | type Pagination { 11 | per_page: Int! 12 | current_page: Int! 13 | total_pages: Int! 14 | total_records: Int! 15 | } 16 | 17 | input PaginationInput { 18 | page: Int 19 | per_page: Int 20 | } -------------------------------------------------------------------------------- /src/type-defs/root-mutations.graphql: -------------------------------------------------------------------------------- 1 | type RootMutations { 2 | login (email: String!, password: String!): String! 3 | addProduct(input: CreateProductInput!): Product! 4 | } -------------------------------------------------------------------------------- /src/type-defs/root-query.graphql: -------------------------------------------------------------------------------- 1 | type RootQuery { 2 | Users(pagination: PaginationInput): UsersResource! 3 | User (id: ID!): User! 4 | 5 | Clients(pagination: PaginationInput): ClientsResource! 6 | Client (id: ID!): Client! 7 | 8 | Products(pagination: PaginationInput): ProductsResource! 9 | Product (id: ID!): Product! 10 | } -------------------------------------------------------------------------------- /src/type-defs/schema.graphql: -------------------------------------------------------------------------------- 1 | schema { 2 | query: RootQuery 3 | mutation: RootMutations 4 | } -------------------------------------------------------------------------------- /src/utils/knex/dataloaders/factory.js: -------------------------------------------------------------------------------- 1 | const DataLoader = require('dataloader') 2 | const { find } = require('lodash') 3 | const { factoryBatchingFunc } = require('./load') 4 | 5 | /** 6 | * 7 | * @param {Array} resources 8 | * @param {Object} services 9 | * @param {string} key 10 | * 11 | * @returns {DataLoader} 12 | */ 13 | const factoryDataLoaderBuilder = (resources, services) => key => { 14 | const resourceConfig = find(resources, row => row.resource === key) 15 | 16 | if (!resourceConfig) { 17 | throw new Error(`${key} not a valid dataLoader`) 18 | } 19 | 20 | return new DataLoader(factoryBatchingFunc(resourceConfig, services)) 21 | } 22 | 23 | module.exports = { factoryDataLoaderBuilder } 24 | -------------------------------------------------------------------------------- /src/utils/knex/dataloaders/index.js: -------------------------------------------------------------------------------- 1 | const { map } = require('lodash') 2 | const { normalizeResources } = require('../resources') 3 | const { factoryDataLoaderBuilder } = require('./factory') 4 | 5 | const generateDataLoaders = (resourcesRaw, services) => { 6 | const cache = {} 7 | 8 | const resources = normalizeResources(resourcesRaw) 9 | const factory = factoryDataLoaderBuilder(resources, services) 10 | 11 | return new Proxy(cache, { 12 | set () { 13 | throw new Error('it is not possible to modify the dataLoader.') 14 | }, 15 | ownKeys () { 16 | return map(resources, 'resource') 17 | }, 18 | get (target, prop) { 19 | if (!target[prop]) { 20 | cache[prop] = factory(prop) 21 | } 22 | 23 | return target[prop] 24 | } 25 | }) 26 | } 27 | 28 | module.exports = { generateDataLoaders } 29 | -------------------------------------------------------------------------------- /src/utils/knex/dataloaders/load.js: -------------------------------------------------------------------------------- 1 | const { map } = require('lodash') 2 | const { queryFactory } = require('../resources') 3 | 4 | const loadData = (resourceConfig, db, id) => { 5 | const query = queryFactory(db, resourceConfig, { id }) 6 | return query 7 | .first() 8 | } 9 | 10 | const factoryBatchingFunc = (resourceConfig, services) => { 11 | return ids => { 12 | const { db } = services 13 | return Promise.all( 14 | map(ids, id => loadData(resourceConfig, db, id)) 15 | ) 16 | } 17 | } 18 | 19 | module.exports = { loadData, factoryBatchingFunc } 20 | -------------------------------------------------------------------------------- /src/utils/knex/resolvers/index.js: -------------------------------------------------------------------------------- 1 | const { reduce } = require('lodash') 2 | const { makeListResolver, makeResourceResolver, makeSingleResolver } = require('./resolvers') 3 | const { makeListName, makeSingleName, makeResourceName } = require('../utils') 4 | const { normalizeResources } = require('../resources') 5 | 6 | const generateResolvers = resources => { 7 | return reduce(normalizeResources(resources), (acc, resourceConfig) => { 8 | const { resource } = resourceConfig 9 | 10 | acc.RootQuery[makeSingleName(resource)] = makeSingleResolver(resourceConfig) 11 | acc.RootQuery[makeListName(resource)] = makeListResolver(resourceConfig) 12 | acc[makeResourceName(resource)] = makeResourceResolver(resourceConfig) 13 | 14 | return acc 15 | }, { RootQuery: {} }) 16 | } 17 | 18 | module.exports = { generateResolvers } 19 | -------------------------------------------------------------------------------- /src/utils/knex/resolvers/resolvers.js: -------------------------------------------------------------------------------- 1 | const { isEmpty, first, divide } = require('lodash') 2 | const { queryFactory } = require('../resources') 3 | const { makeSingleName, parsePagination } = require('../utils') 4 | 5 | const makeSingleResolver = resourceConfig => { 6 | return (root, { id }, { services }) => { 7 | const query = queryFactory(services.db, resourceConfig) 8 | 9 | return query 10 | .where({ id }) 11 | .first() 12 | .then(result => { 13 | return isEmpty(result) 14 | ? Promise.reject(new Error('registry not found')) 15 | : result 16 | }) 17 | } 18 | } 19 | 20 | const makeListResolver = resourceConfig => { 21 | const __type = makeSingleName(resourceConfig.resource) 22 | return (root, args, context) => { 23 | return { 24 | base_args: args, 25 | __type 26 | } 27 | } 28 | } 29 | 30 | const makeRecordsResolver = resourceConfig => { 31 | return (root, args, { services }) => { 32 | const { pagination, filters } = root.base_args 33 | const query = queryFactory(services.db, resourceConfig, filters, pagination) 34 | 35 | return query 36 | } 37 | } 38 | 39 | const makePaginationResolver = resourceConfig => { 40 | return (root, args, { services }) => { 41 | const { pagination, filters } = root.base_args 42 | const { limit, page } = parsePagination(pagination) 43 | 44 | const base = { 45 | per_page: limit, 46 | current_page: page 47 | } 48 | 49 | const query = queryFactory(services.db, resourceConfig, filters, {}) 50 | 51 | return query 52 | .count('* as count') 53 | .then(result => first(result)) 54 | .then(({ count }) => { 55 | const pages = divide(count, limit) 56 | 57 | return { 58 | ...base, 59 | total_pages: Math.ceil(pages), 60 | total_records: count 61 | } 62 | }) 63 | } 64 | } 65 | 66 | const makeResourceResolver = resourceConfig => { 67 | return { 68 | pagination: makePaginationResolver(resourceConfig), 69 | records: makeRecordsResolver(resourceConfig) 70 | } 71 | } 72 | 73 | module.exports = { 74 | makeSingleResolver, makeListResolver, makeResourceResolver 75 | } 76 | -------------------------------------------------------------------------------- /src/utils/knex/resources.js: -------------------------------------------------------------------------------- 1 | const { parsePagination } = require('./utils') 2 | const { map, isEmpty } = require('lodash') 3 | 4 | const queryFactory = (db, resourceConfig, filters = {}, pagination = {}) => { 5 | const { table, applyWhere } = resourceConfig 6 | const query = db(table) 7 | 8 | if (!isEmpty(applyWhere)) { 9 | query.where(applyWhere) 10 | } 11 | 12 | if (!isEmpty(filters)) { 13 | query.where(filters) 14 | } 15 | 16 | if (!isEmpty(pagination)) { 17 | const { limit, page } = parsePagination(pagination) 18 | 19 | query.limit(limit) 20 | .offset(limit * (page - 1)) 21 | } 22 | 23 | return query 24 | } 25 | 26 | const normalizeResources = tables => { 27 | return map(tables, row => { 28 | return { 29 | ...row, 30 | resource: row.resource || row.table, 31 | applyWhere: row.applyWhere || {} 32 | } 33 | }) 34 | } 35 | 36 | module.exports = { normalizeResources, queryFactory } 37 | -------------------------------------------------------------------------------- /src/utils/knex/utils.js: -------------------------------------------------------------------------------- 1 | const { get, upperFirst } = require('lodash') 2 | const { compose } = require('ramda') 3 | const pluralize = require('pluralize') 4 | 5 | const parsePagination = raw => { 6 | const limit = get(raw, ['per_page'], 15) 7 | const page = get(raw, ['page'], 1) 8 | 9 | return { limit, page } 10 | } 11 | 12 | const makeSingleName = compose(upperFirst, pluralize.singular) 13 | const makeListName = compose(upperFirst, pluralize.plural) 14 | const makeResourceName = compose(name => `${name}Resource`, upperFirst, pluralize.plural) 15 | 16 | module.exports = { parsePagination, makeSingleName, makeListName, makeResourceName } 17 | -------------------------------------------------------------------------------- /src/utils/load-files/common.js: -------------------------------------------------------------------------------- 1 | const { 2 | compose, curry, 3 | contains, map, 4 | cond, split, 5 | head, T, 6 | replace, reduce 7 | } = require('ramda') 8 | const { readFile } = require('fs') 9 | const { join } = require('path') 10 | const readdir = require('readdir-enhanced') 11 | 12 | /** 13 | * load a list of files 14 | * 15 | * @param {string} filter 16 | * @param {string} dirname 17 | * @returns {Promise>} 18 | */ 19 | const loadFilesNames = curry((filter, dirname) => { 20 | return readdir(dirname, { deep: true, filter }) 21 | }) 22 | 23 | /** 24 | * @param {String} dirname 25 | * @param {String} fileName 26 | * 27 | * @returns {String} 28 | */ 29 | const makeFullname = curry((dirname, fileName) => join(dirname, fileName)) 30 | 31 | /** 32 | * load content of file 33 | * 34 | * @param {String} fileName 35 | * @returns {Promise} 36 | */ 37 | const loadFileContent = fileName => { 38 | return new Promise((resolve, reject) => { 39 | readFile(fileName, 'utf8', (err, data) => { 40 | if (err) { 41 | reject(err) 42 | return 43 | } 44 | 45 | resolve(data) 46 | }) 47 | }) 48 | } 49 | 50 | /** 51 | * load content of files 52 | * 53 | * @param {String} dirname 54 | * @param {Array} files 55 | * @returns {Promise>} 56 | */ 57 | const loadFilesContent = curry((dirname, files) => { 58 | const promises = map( 59 | compose(loadFileContent, makeFullname(dirname)), 60 | files 61 | ) 62 | 63 | return Promise.all(promises) 64 | }) 65 | 66 | /** 67 | * load files 68 | * 69 | * @param {Function} dirname 70 | * @param {String} dirname 71 | * @param {Array} files 72 | * 73 | * @returns {Object} 74 | */ 75 | const loadFiles = curry((getNameFn, dirname, files) => { 76 | return reduce((acc, fileName) => { 77 | const fullName = join(dirname, fileName) 78 | 79 | // import 80 | acc[getNameFn(fileName)] = require(fullName) 81 | 82 | return acc 83 | }, {}, files) 84 | }) 85 | 86 | /** 87 | * @param {String} sufix 88 | * @returns {Function} 89 | */ 90 | const makeGetName = sufix => cond([ 91 | [contains('/'), compose(head, split('/'))], // if is a subfolder 92 | [T, replace(sufix, '')] 93 | ]) 94 | 95 | module.exports = { loadFilesNames, loadFilesContent, loadFiles, makeGetName } 96 | -------------------------------------------------------------------------------- /src/utils/load-files/directives.js: -------------------------------------------------------------------------------- 1 | const { composeP } = require('ramda') 2 | const { loadFilesNames, loadFiles, makeGetName } = require('./common') 3 | 4 | /** 5 | * load a list of directives files 6 | * 7 | * @param {string} dirname 8 | * @returns {Promise>} 9 | */ 10 | const loadDirectivesFilesNames = loadFilesNames('**/**.directive.js') 11 | 12 | /** 13 | * @param {String} fileName 14 | * @returns {String} 15 | */ 16 | const getName = makeGetName('.directive.js') 17 | 18 | const loadDirectives = dirName => { 19 | return composeP(loadFiles(getName, dirName), loadDirectivesFilesNames)(dirName) 20 | } 21 | 22 | module.exports = loadDirectives 23 | -------------------------------------------------------------------------------- /src/utils/load-files/graphql.js: -------------------------------------------------------------------------------- 1 | const { composeP } = require('ramda') 2 | const { loadFilesNames, loadFilesContent } = require('./common') 3 | 4 | /** 5 | * load a list of graphql files 6 | * 7 | * @param {string} dirname 8 | * @returns {Promise>} 9 | */ 10 | const loadGraphQLFilesNames = loadFilesNames('*.graphql') 11 | 12 | /** 13 | * @method loadGraphQLFiles 14 | * @param {string} dirname 15 | * @return {Promise>} 16 | */ 17 | const loadGraphQLFiles = dirname => composeP(loadFilesContent(dirname), loadGraphQLFilesNames)(dirname) 18 | 19 | module.exports = loadGraphQLFiles 20 | -------------------------------------------------------------------------------- /src/utils/load-files/mutations.js: -------------------------------------------------------------------------------- 1 | const { composeP } = require('ramda') 2 | const { loadFilesNames, loadFiles, makeGetName } = require('./common') 3 | 4 | /** 5 | * load a list of mutations files 6 | * 7 | * @param {string} dirname 8 | * @returns {Promise>} 9 | */ 10 | const loadMutationsFilesNames = loadFilesNames('**/**.mutation.js') 11 | 12 | /** 13 | * @param {String} fileName 14 | * @returns {String} 15 | */ 16 | const getName = makeGetName('.mutation.js') 17 | 18 | const loadMutations = dirName => { 19 | return composeP(loadFiles(getName, dirName), loadMutationsFilesNames)(dirName) 20 | } 21 | 22 | module.exports = loadMutations 23 | -------------------------------------------------------------------------------- /src/utils/load-files/resolvers.js: -------------------------------------------------------------------------------- 1 | const { composeP } = require('ramda') 2 | const { loadFilesNames, loadFiles, makeGetName } = require('./common') 3 | 4 | /** 5 | * load a list of resolvers files 6 | * 7 | * @param {string} dirname 8 | * @returns {Promise>} 9 | */ 10 | const loadResolversFilesNames = loadFilesNames('**/**.resolvers.js') 11 | 12 | /** 13 | * @param {String} fileName 14 | * @returns {String} 15 | */ 16 | const getName = makeGetName('.resolvers.js') 17 | 18 | const loadResolvers = dirName => { 19 | return composeP(loadFiles(getName, dirName), loadResolversFilesNames)(dirName) 20 | } 21 | 22 | module.exports = loadResolvers 23 | --------------------------------------------------------------------------------