├── .all-contributorsrc ├── .babelrc ├── .dockerignore ├── .eslintignore ├── .eslintrc ├── .flowconfig ├── .gitignore ├── .gqlconfig ├── .nvmrc ├── .vscode ├── launch.json └── settings.json ├── .watchmanconfig ├── CREDITS.md ├── Dockerfile ├── LICENSE ├── README.md ├── __mocks__ └── knex.js ├── config ├── README.md ├── default.yml ├── development.yml ├── staging.yml └── test.yml ├── docker-compose.staging.yaml ├── docker-compose.yaml ├── docker ├── Dockerfile └── templates │ └── nginx.tmpl ├── docs └── images │ └── layers.png ├── flow └── graphqlApolloError.js ├── jsconfig.json ├── knexfile.js ├── package.json ├── src ├── business │ ├── README.md │ ├── bamer.js │ ├── book.js │ ├── index.js │ └── utils │ │ ├── __tests__ │ │ ├── __snapshots__ │ │ │ └── auth.js.snap │ │ └── auth.js │ │ └── auth.js ├── db │ ├── README.md │ ├── flow │ │ ├── README.md │ │ ├── bamer.js │ │ └── book.js │ ├── index.js │ ├── migrations │ │ ├── 20170430000642_add_table_bamer.js │ │ ├── 20170430001704_add_table_book.js │ │ ├── 20170507134620_add_email_in_bamer.js │ │ ├── 20170524191943_add-default-id-uuid.js │ │ └── README.md │ ├── queryBuilders │ │ ├── README.md │ │ ├── bamer.js │ │ ├── book.js │ │ └── index.js │ └── seeds │ │ ├── README.md │ │ └── development │ │ └── base_data.js ├── index.js ├── koa │ ├── README.md │ ├── flow │ │ └── context.js │ ├── middlewares │ │ ├── knex.js │ │ └── logger.js │ ├── server.js │ └── views │ │ ├── graphiql │ │ └── index.html │ │ └── login │ │ └── index.html └── presentation │ ├── Bamer │ ├── Bamer.resolvers.js │ └── Bamer.type.gql │ ├── Book │ ├── Book.resolvers.js │ └── Book.type.gql │ ├── Mutation │ ├── Mutation.resolvers.js │ └── Mutation.type.gql │ ├── Query │ ├── Query.resolvers.js │ └── Query.type.gql │ ├── README.md │ ├── mocks.js │ ├── resolvers.js │ └── schema.js └── yarn.lock /.all-contributorsrc: -------------------------------------------------------------------------------- 1 | { 2 | "projectName": "bam-api", 3 | "projectOwner": "bamlab", 4 | "files": [ 5 | "README.md" 6 | ], 7 | "imageSize": 150, 8 | "commit": true, 9 | "contributors": [ 10 | { 11 | "login": "tychota", 12 | "name": "TychoTa", 13 | "avatar_url": "https://avatars1.githubusercontent.com/u/13785185?v=3", 14 | "profile": "https://twitter.com/TychoTa", 15 | "contributions": [ 16 | "question", 17 | "code", 18 | "doc", 19 | "infra", 20 | "tool" 21 | ] 22 | }, 23 | { 24 | "login": "yleflour", 25 | "name": "Yann Leflour", 26 | "avatar_url": "https://avatars3.githubusercontent.com/u/1863461?v=3", 27 | "profile": "http://bamlab.fr/", 28 | "contributions": [ 29 | "bug", 30 | "code", 31 | "example", 32 | "plugin", 33 | "review" 34 | ] 35 | } 36 | ] 37 | } 38 | -------------------------------------------------------------------------------- /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": ["backpack-core/babel", "flow"], 3 | "env": { 4 | "test": { 5 | "plugins": ["transform-es2015-modules-commonjs"] 6 | } 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | *.md 3 | */*.md 4 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | build 3 | docs 4 | docker 5 | .vscode 6 | koa/views 7 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "parser": "babel-eslint", 3 | "parserOptions": { 4 | "ecmaVersion": 8, 5 | "sourceType": "module" 6 | }, 7 | "plugins": ["flowtype", "prettier", "jest"], 8 | "env": { 9 | "node": true, 10 | "es6": true, 11 | "jest/globals": true 12 | }, 13 | "rules": { 14 | "no-duplicate-imports": "error", 15 | "no-undef": "error", 16 | "no-unused-vars": "error", 17 | "no-else-return": "error", 18 | "flowtype/define-flow-type": 1, 19 | "flowtype/use-flow-type": 1, 20 | "jest/no-disabled-tests": "warn", 21 | "jest/no-focused-tests": "error", 22 | "jest/no-identical-title": "error", 23 | "jest/valid-expect": "error", 24 | "prettier/prettier": [ 25 | "error", 26 | { 27 | "trailingComma": "es5", 28 | "singleQuote": true, 29 | "bracketSpacing": true, 30 | "tabWidth": 2, 31 | "printWidth": 100 32 | } 33 | ] 34 | }, 35 | "settings": { 36 | "flowtype": { 37 | "onlyFilesWithFlowAnnotation": false 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /.flowconfig: -------------------------------------------------------------------------------- 1 | [ignore] 2 | /node_modules/findup/test/fixture/.* 3 | /node_modules/protobufjs-no-cli/src/bower.json 4 | 5 | [include] 6 | 7 | [libs] 8 | flow 9 | src/db/flow 10 | src/koa/flow 11 | 12 | [options] 13 | unsafe.enable_getters_and_setters=true 14 | 15 | [version] 16 | ^0.50.0 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | *.sqlite 3 | 4 | docker/certs 5 | docker/conf.d 6 | docker/proxy 7 | docker/database 8 | docker/vhost.d 9 | 10 | build -------------------------------------------------------------------------------- /.gqlconfig: -------------------------------------------------------------------------------- 1 | /* .gqlconfig */ 2 | { 3 | schema: { 4 | files: '**/*.gql' 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | 8.1.4 2 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible Node.js debug attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "node", 9 | "request": "launch", 10 | "protocol": "inspector", 11 | "name": "Launch Program", 12 | "program": "${workspaceRoot}/build/main.js", 13 | "env": { 14 | "NODE_ENV": "development" 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | // Place your settings in this file to overwrite default and user settings. 2 | { 3 | "editor.formatOnSave": true, 4 | "prettier.bracketSpacing": true, 5 | "prettier.printWidth": 100, 6 | "prettier.singleQuote": true, 7 | "prettier.tabWidth": 2, 8 | "prettier.trailingComma": "es5", 9 | "graphqlForVSCode.nodePath": "node_modules", 10 | "javascript.validate.enable": false 11 | } 12 | -------------------------------------------------------------------------------- /.watchmanconfig: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bamlab/bam-api/b325e16e210ada8ecc20c7662ba2e92dcc4c60d5/.watchmanconfig -------------------------------------------------------------------------------- /CREDITS.md: -------------------------------------------------------------------------------- 1 | # Credits 2 | 3 | ## https://github.com/entria/graphql-dataloader-boilerplate 4 | 5 | by @sibelius and rest of entria 6 | 7 | This boilerplate was really good and inspired alot of this project, in peculiar for the viewerCanSee pattern. 8 | However we use postgres instead of mongo and does not target relay but apollo. 9 | 10 | We are using apollo server, to write less code, given the "*.gql syntaxic language" types. 11 | 12 | Thus the structure and the goal of the app is slightly different. 13 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:8.0-alpine 2 | 3 | ENV DOCKERIZE_VERSION v0.4.0 4 | RUN apk add --no-cache --virtual .build-deps curl \ 5 | && curl -fSL -o dockerize-alpine-linux-amd64-$DOCKERIZE_VERSION.tar.gz https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-alpine-linux-amd64-$DOCKERIZE_VERSION.tar.gz \ 6 | && tar -C /usr/local/bin -xzvf dockerize-alpine-linux-amd64-$DOCKERIZE_VERSION.tar.gz \ 7 | && rm dockerize-alpine-linux-amd64-$DOCKERIZE_VERSION.tar.gz 8 | 9 | WORKDIR /srv 10 | COPY . /srv 11 | RUN yarn install && yarn build 12 | 13 | CMD dockerize -wait tcp://postgres:5432 -timeout 10s && yarn start 14 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2017 Tycho Tatitscheff and all contributors 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # BAM API 2 | 3 | A GraphQL, "production ready"™ api for internal use at bam, but open sourced as an example ! 4 | 5 | ## Foreword 6 | 7 | Finding great ressources on the client side graphql is pretty easy, for instance, for apollo, there is https://www.learnapollo.com/. 8 | 9 | Finding great ressources on the server side is way more difficult. GraphQL is only a spec and there is litterally at least 20 differents languages implementations for the backend. 10 | 11 | That being said using the project as a boilerplate to start simple project with GraphQL may be less preagmatic than using great SASS, like https://www.graph.cool/ ! 12 | 13 | ## Goals 14 | 15 | The goals of this repo are: 16 | - to have a graphql api that can be used as a reference for existing and new graphql projects, 17 | - to have a graphql api that show some "production ready"™ tricks, that are not easy to find on the web (for instance oauth authentication and access control), 18 | - to investigate the devlopment tooling around graphql apis (flow, eslint ect), 19 | - to investigate good way to put containers on production (docker, maybe kubernetes ^^), 20 | - to have a centralized api to start cool project at bam, like internal tools and so ! 21 | 22 | Non goals of this repo are: 23 | - to be minimalist, (if you want that, use a sass, like graphcool) 24 | - to be a boilerplate generator for the forseable future (that being said, we might extract our `create-graphql-api` app from this later on) 25 | 26 | ## Repository stucture 27 | 28 | We organized the sever using the following layers: 29 | 30 | ![Layers](docs/images/layers.png) 31 | 32 | ### Database 33 | 34 | All the file related are either: 35 | - the `knexfile.js` at the root of the directory, used for knex command line 36 | - in the `db` folder 37 | 38 | The database layer are the `db/migrations/*.js` and `db/seed/*.js` files. Migrations are common for traditional RDBMs. 39 | 40 | I'm personnaly found of them : I think they offer a extra security by providing incremental, reversible and documented changes. 41 | 42 | We use the http://knexjs.org/ query builder to write and execute the migrations. Knex also provides a way to seed the devlopment with some fixtures data. 43 | 44 | Last but not least, not using a full ORM make it actually easier to write performant and understandable database queries. 45 | 46 | ### Query Builders 47 | 48 | We then have Query Builders in the `db/queryBuilders` directory. 49 | 50 | Query builders provide a domain driven abstraction to raw sql calls. 51 | 52 | Considering the following example: 53 | 54 | ```js 55 | class UserQueryBuilder { 56 | async createOrUpdateNameByID(id: string, name: string): UserType { 57 | const result = await knex.table('users').first('id', 'name').where('id', id); 58 | if (result) { 59 | return await knex.table('users').update('name', name).where('id', id).returning('id', 'name'); 60 | } 61 | return await knex.table('users').insert({name}).returning('id', 'name'); 62 | } 63 | } 64 | ``` 65 | 66 | In the rest of the code we will use `UserQueryBuilder.createOrUpdateNameByID('long-uuid', 'Tom')`, which is a nice abstraction. 67 | 68 | That being said we still have a great granularity about the real SQL query send, and we are able to use feature specifics like `returning` in our case, or index. 69 | 70 | ### Business logic 71 | 72 | Above the query building layer, we have the business layer : `business/*.js`. 73 | 74 | Basically the query building layer was "how can I access the data" in a naive way without taking into account access control. 75 | On the contrary, the buisness logic handle the following tasks: 76 | 77 | - **access controll** : if the user requesting a data has no right to access it, return null or throw an error, 78 | - **scoping / whitelisting** : some properties are private, and you do not want to give them. For example, you never want to return the hash of the users password. You may want have a scoping that vary based on user role, for instance allowing only you and your manager to see your performance results, 79 | - **batching and catching** : in order to solve the N+1 problem, we can use batching and caching, with Facebook dataloader. Example: if you want query a list of user and for each, the books he have readen, you will and up doing at least one query for the list of user and one query for each user to get the list of book. Using datloader you will do two query, one for the list of user, and one for the agregated list of book. 80 | - **computed properties** : for example, if you want to still maintain the deprecated `name` field and now you have the `firstName` and a `lastName` in database, you can compute the `name` properties in the business class. 81 | 82 | 83 | ### Presentation 84 | 85 | GraphQL is advanced presentation layer. It gives more informations to the client than most of REST apis around, in a simpler way. 86 | 87 | Once one query is received, graphql parse it match it to a type system. Then it call resolvers, than will call our business logic. Graphql then return a response with the same data structure than the query, but with data inside. 88 | 89 | In the `presentation` folder, we can find modules that all match the following structure: 90 | - `XXXX.type.gql` that describes the type and the documentation. It uses the [GraphQL schema language](https://wehavefaces.net/graphql-shorthand-notation-cheatsheet-17cd715861b6) 91 | - `XXXX.schema.gql` that will be concatenated to create the root schema. Evry field in the `XXXX.schema.gql` is directly accessible on top of the tree. 92 | - `XXXX.resolvers.js` that will be concatenated to create the resolvers map, that will call the business layer to resolve efficiently and with security layer the data. 93 | 94 | ### Transport 95 | 96 | So far the transport layer is a simple koa2 server, using HTTP 1.1. 97 | 98 | The SSL termination is ensured by a nginx proxy, see the docker-compose file. 99 | 100 | The authentication is done using public key JWT (RS256), with a google ouath provider, on auth0. In the future, we may consider self hosted solution like 101 | 102 | ## Contributors 103 | 104 | Thanks goes to these wonderful people ([emoji key](https://github.com/kentcdodds/all-contributors#emoji-key)): 105 | 106 | 107 | | [
TychoTa](https://twitter.com/TychoTa)
[💬](#question-tychota "Answering Questions") [💻](https://github.com/bamlab/bam-api/commits?author=tychota "Code") [📖](https://github.com/bamlab/bam-api/commits?author=tychota "Documentation") [🚇](#infra-tychota "Infrastructure (Hosting, Build-Tools, etc)") [🔧](#tool-tychota "Tools") | [
Yann Leflour](http://bamlab.fr/)
[🐛](https://github.com/bamlab/bam-api/issues?q=author%3Ayleflour "Bug reports") [💻](https://github.com/bamlab/bam-api/commits?author=yleflour "Code") [💡](#example-yleflour "Examples") [🔌](#plugin-yleflour "Plugin/utility libraries") [👀](#review-yleflour "Reviewed Pull Requests") | 108 | | :---: | :---: | 109 | 110 | 111 | This project follows the [all-contributors](https://github.com/kentcdodds/all-contributors) specification. Contributions of any kind welcome! 112 | 113 | ## Licence 114 | 115 | MIT -------------------------------------------------------------------------------- /__mocks__/knex.js: -------------------------------------------------------------------------------- 1 | /* eslint-env jest */ 2 | 3 | // shamelesly copied from https://gist.github.com/cpsubrian/b4820b475e7262251a16fb286606e4f7 4 | 5 | import _ from 'lodash'; 6 | import path from 'path'; 7 | import fs from 'fs'; 8 | import callsites from 'callsites'; 9 | import hash from 'object-hash'; 10 | import Config from 'config'; 11 | 12 | const knex = require.requireActual('knex'); 13 | 14 | const config = Config.get('Database'); 15 | const dirs = { 16 | migrations: path.resolve(__dirname, '../src/db/migrations'), 17 | seeds: path.resolve(__dirname, '../src/db/seeds'), 18 | }; 19 | 20 | // Track jasmine suites and specs. 21 | let suites = []; 22 | let specs = []; 23 | global.jasmine.getEnv().addReporter({ 24 | suiteStarted: suite => suites.push(suite), 25 | suiteDone: () => suites.pop(), 26 | specStarted: spec => specs.push(spec), 27 | specDone: () => specs.pop(), 28 | }); 29 | 30 | // Helper to get the current spec's fullname, or in absense, the nearest 31 | // suite. 32 | function getSpecName() { 33 | let spec = _.last(specs); 34 | let suite = _.last(suites); 35 | if (spec) { 36 | return spec.fullName; 37 | } else if (suite) { 38 | return suite.description; 39 | } 40 | throw new Error('Not currently in a spec or a suite'); 41 | } 42 | // Test dbs will be stored here. 43 | const stack = []; 44 | stack.ensure = function() { 45 | if (!stack.length) { 46 | stack.unshift(knex(config)); 47 | } 48 | }; 49 | 50 | // Create the knex proxy. This will treat whichever db is at the front 51 | // of the stack as the active one. 52 | const db = new Proxy( 53 | function(...args) { 54 | stack.ensure(); 55 | return stack[0].apply(stack[0], args); 56 | }, 57 | { 58 | get(target, name) { 59 | stack.ensure(); 60 | if (!(name in stack[0])) { 61 | console.warn("Getting non-existant property '" + name + "'"); 62 | return undefined; 63 | } 64 | return stack[0][name]; 65 | }, 66 | set(target, name, value) { 67 | stack.ensure(); 68 | stack[0][name] = value; 69 | return true; 70 | }, 71 | } 72 | ); 73 | 74 | // Destroy any open databases. 75 | afterAll(() => { 76 | // We need to do this after other afterAll() handlers run. 77 | setImmediate(() => { 78 | while (stack.length) { 79 | stack.shift().destroy(); 80 | } 81 | }); 82 | }); 83 | 84 | // Mock the db.client and run tests with overridable mocks. 85 | function withMockDatabase(tests) { 86 | let mocks = { 87 | _query: jest.fn(() => { 88 | return Promise.reject(new Error('Not implemented')); 89 | }), 90 | _stream: jest.fn(() => { 91 | return Promise.reject(new Error('Not implemented')); 92 | }), 93 | acquireConnection: jest.fn(() => Promise.resolve({})), 94 | releaseConnection: jest.fn(() => Promise.resolve()), 95 | }; 96 | 97 | beforeAll(() => { 98 | stack.ensure(); 99 | // Override prototype methods with instance properties. 100 | _.each(mocks, (val, key) => { 101 | db.client[key] = val; 102 | }); 103 | }); 104 | 105 | tests(mocks); 106 | 107 | afterAll(() => { 108 | // Remove instance properties to restore prototype versions. 109 | _.each(mocks, (val, key) => { 110 | delete db.client[key]; 111 | }); 112 | }); 113 | } 114 | 115 | // Inject a real test database for the current test scenario. 116 | function withTestDatabase(tests) { 117 | const name = `ac_test__${Date.now()}_${Math.floor(Math.random() * 100)}`; 118 | 119 | beforeAll(() => { 120 | return db 121 | .raw('CREATE DATABASE ??', [name]) 122 | .then(() => { 123 | let _config = _.cloneDeep(config); 124 | _config.database = name; 125 | stack.unshift(knex(_config)); 126 | }) 127 | .then(() => { 128 | return db.migrate.latest({ directory: dirs.migrations }); 129 | }) 130 | .then(() => { 131 | return db.seed.run({ directory: dirs.seeds }); 132 | }); 133 | }); 134 | 135 | tests(name); 136 | 137 | afterAll(() => { 138 | return stack.shift().destroy().then(() => { 139 | return db.raw('DROP DATABASE ??', [name]); 140 | }); 141 | }); 142 | } 143 | 144 | // Store snapshots created in this test run. 145 | const snapshots = {}; 146 | 147 | // Cache query responses and mock them on subsequent test runs. 148 | function withQuerySnapshots(_filename, tests) { 149 | let dir = path.resolve(path.dirname(_filename), '__fixtures__'); 150 | let filename = path.basename(_filename) + '.queries'; 151 | let filepath = path.join(dir, filename); 152 | let exists = fs.existsSync(filepath); 153 | let update = typeof process.env.REQUERY !== 'undefined'; 154 | 155 | if (exists && !update) { 156 | let cached = require(filepath); 157 | withMockDatabase(mocks => { 158 | mocks._query.mockImplementation((conn, obj) => { 159 | let specName = getSpecName(); 160 | let queryHash = hash(obj.sql); 161 | let querySnaps = _.get(cached, [specName, queryHash]) || []; 162 | let snapshot = querySnaps.shift(); 163 | if (snapshot) { 164 | if (snapshot.error) { 165 | throw _.extend(new Error(snapshot.error.message), snapshot.error.data); 166 | } else { 167 | return Promise.resolve(_.extend({}, obj, snapshot)); 168 | } 169 | } else { 170 | throw _.extend(new Error('Could not find snapshot for query'), { obj }); 171 | } 172 | }); 173 | tests(); 174 | }); 175 | } else { 176 | withTestDatabase(() => { 177 | beforeAll(() => { 178 | db.on('query-response', function captureSnapshot(rows, obj) { 179 | obj.sql = obj.sql.replace(/\$\d+/g, '?'); 180 | let specName = getSpecName(); 181 | let queryHash = hash(obj.sql); 182 | let querySnaps = _.get(snapshots, [filepath, specName, queryHash]) || []; 183 | let snapshot = _.cloneDeep(_.pick(obj, 'sql', 'bindings', 'response')); 184 | _.set(snapshots, [filepath, specName, queryHash, querySnaps.length], snapshot); 185 | }); 186 | db.on('query-error', function captureSnapshot(err, obj) { 187 | obj.sql = obj.sql.replace(/\$\d+/g, '?'); 188 | let specName = getSpecName(); 189 | let queryHash = hash(obj.sql); 190 | let querySnaps = _.get(snapshots, [filepath, specName, queryHash]) || []; 191 | let snapshot = _.cloneDeep(_.pick(obj, 'sql', 'bindings')); 192 | snapshot.error = { message: err.message, data: err }; 193 | _.set(snapshots, [filepath, specName, queryHash, querySnaps.length], snapshot); 194 | }); 195 | }); 196 | 197 | tests(); 198 | 199 | afterAll(() => { 200 | if (_.isEmpty(snapshots[filepath])) { 201 | if (exists) { 202 | fs.unlinkSync(filepath); 203 | if (fs.existsSync(dir) && !fs.readdirSync(dir).length) { 204 | fs.rmdirSync(dir); 205 | } 206 | } 207 | } else { 208 | let obj = JSON.stringify(snapshots[filepath] || {}, null, 2); 209 | if (!fs.existsSync(dir)) { 210 | fs.mkdirSync(dir); 211 | } 212 | fs.writeFileSync(filepath, `module.exports = ${obj};`); 213 | } 214 | }); 215 | }); 216 | } 217 | } 218 | 219 | // Return a string with an incrementing count appended. 220 | const counts = {}; 221 | function appendCount(str) { 222 | counts[str] = counts[str] ? ++counts[str] : 0; 223 | return str + (counts[str] ? `(${counts[str]})` : ''); 224 | } 225 | 226 | // Extend the global describe object. 227 | global.describe.withMockDatabase = function(description, tests) { 228 | if (typeof description === 'function') { 229 | tests = description; 230 | description = 'with mock database'; 231 | } 232 | describe(appendCount(description), () => { 233 | withMockDatabase(tests); 234 | }); 235 | }; 236 | global.describe.withTestDatabase = function(description, tests) { 237 | if (typeof description === 'function') { 238 | tests = description; 239 | description = 'with test database'; 240 | } 241 | describe(appendCount(description), () => { 242 | withTestDatabase(tests); 243 | }); 244 | }; 245 | global.describe.withQuerySnapshots = function(description, tests) { 246 | const caller = callsites()[1]; 247 | if (typeof description === 'function') { 248 | tests = description; 249 | description = 'with query snapshots'; 250 | } 251 | describe(appendCount(description), () => { 252 | withQuerySnapshots(caller.getFileName(), tests); 253 | }); 254 | }; 255 | 256 | export default db; 257 | -------------------------------------------------------------------------------- /config/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bamlab/bam-api/b325e16e210ada8ecc20c7662ba2e92dcc4c60d5/config/README.md -------------------------------------------------------------------------------- /config/default.yml: -------------------------------------------------------------------------------- 1 | Database: 2 | migrations: 3 | directory: './src/db/migrations' 4 | Server: 5 | port: 3000 6 | Logger: 7 | verboseLevel: 1 -------------------------------------------------------------------------------- /config/development.yml: -------------------------------------------------------------------------------- 1 | Database: 2 | client: 'pg' 3 | connection: postgresql://postgres@127.0.0.1/tql 4 | seeds: 5 | directory: './src/db/seeds/development' 6 | debug: false 7 | Security: 8 | jwks: 9 | cache: true 10 | rateLimit: true 11 | jwksRequestsPerMinute: 5 12 | jwksUri: 'https://tychot.eu.auth0.com/.well-known/jwks.json' 13 | jwt: 14 | audience: X1hVKtRbNWrpj4puBwjIJsf9ghTaMakZ 15 | issuer: 'https://tychot.eu.auth0.com/' 16 | debug: true 17 | algorithms: [ 'RS256' ] 18 | Logger: 19 | verboseLevel: 1 -------------------------------------------------------------------------------- /config/staging.yml: -------------------------------------------------------------------------------- 1 | Database: 2 | client: 'pg' 3 | connection: postgresql://tql:itstqldude@postgres/tql 4 | debug: false 5 | Security: 6 | jwks: 7 | cache: true 8 | rateLimit: true 9 | jwksRequestsPerMinute: 5 10 | jwksUri: 'https://tychot.eu.auth0.com/.well-known/jwks.json' 11 | jwt: 12 | audience: X1hVKtRbNWrpj4puBwjIJsf9ghTaMakZ 13 | issuer: 'https://tychot.eu.auth0.com/' 14 | debug: true 15 | algorithms: [ 'RS256' ] -------------------------------------------------------------------------------- /config/test.yml: -------------------------------------------------------------------------------- 1 | Database: 2 | client: 'pg' 3 | connection: postgresql://postgres@127.0.0.1/tql 4 | seeds: 5 | directory: './src/db/seeds/development' 6 | debug: false 7 | Security: 8 | jwks: 9 | cache: true 10 | rateLimit: true 11 | jwksRequestsPerMinute: 5 12 | jwksUri: 'https://tychot.eu.auth0.com/.well-known/jwks.json' 13 | jwt: 14 | audience: X1hVKtRbNWrpj4puBwjIJsf9ghTaMakZ 15 | issuer: 'https://tychot.eu.auth0.com/' 16 | debug: true 17 | algorithms: [ 'RS256' ] 18 | Logger: 19 | verboseLevel: 0 -------------------------------------------------------------------------------- /docker-compose.staging.yaml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | 3 | volumes: 4 | database: 5 | driver: local 6 | 7 | networks: 8 | proxy-tier: 9 | driver: bridge 10 | 11 | services: 12 | nginx: 13 | image: pixelfordinner/nginx 14 | container_name: nginx_proxy-nginx 15 | restart: unless-stopped 16 | ports: 17 | - "80:80" 18 | - "443:443" 19 | volumes: 20 | - "./docker/conf.d:/etc/nginx/conf.d:ro" 21 | - "./docker/vhost.d:/etc/nginx/vhost.d:ro" 22 | - "./docker/certs:/etc/nginx/certs:ro" 23 | - "/usr/share/nginx/html" 24 | networks: 25 | - proxy-tier 26 | 27 | docker-gen: 28 | image: quay.io/tychot/docker-gen-nginx 29 | container_name: nginx_proxy-docker_gen 30 | restart: unless-stopped 31 | depends_on: 32 | - nginx 33 | volumes_from: 34 | - nginx 35 | volumes: 36 | - "/var/run/docker.sock:/tmp/docker.sock:ro" 37 | - "./docker/conf.d:/etc/nginx/conf.d:rw" 38 | networks: 39 | - proxy-tier 40 | entrypoint: /usr/local/bin/docker-gen -notify-sighup nginx_proxy-nginx -watch -wait 5s:30s /etc/docker-gen/templates/nginx.tmpl /etc/nginx/conf.d/default.conf 41 | 42 | lets-encrypt-companion: 43 | image: jrcs/letsencrypt-nginx-proxy-companion 44 | container_name: nginx_proxy-lets_encrypt 45 | restart: unless-stopped 46 | depends_on: 47 | - nginx 48 | - docker-gen 49 | volumes_from: 50 | - nginx 51 | volumes: 52 | - "/var/run/docker.sock:/var/run/docker.sock:ro" 53 | - "./docker/vhost.d:/etc/nginx/vhost.d:rw" 54 | - "./docker/certs:/etc/nginx/certs:rw" 55 | environment: 56 | - "NGINX_DOCKER_GEN_CONTAINER=nginx_proxy-docker_gen" 57 | networks: 58 | - proxy-tier 59 | 60 | graphql: 61 | image: quay.io/tychot/tql 62 | container_name: bam_api-graphql 63 | restart: unless-stopped 64 | environment: 65 | - NODE_ENV=staging 66 | - VIRTUAL_HOST=bam-api.cloud.bam.tech 67 | - VIRTUAL_NETWORK=nginx-proxy 68 | - VIRTUAL_PORT=3000 69 | - LETSENCRYPT_HOST=bam-api.cloud.bam.tech 70 | - LETSENCRYPT_EMAIL=tychot@bam.tech 71 | ports: 72 | - "3000" 73 | depends_on: 74 | - postgres 75 | networks: 76 | - proxy-tier 77 | 78 | postgres: 79 | image: postgres:9.6 80 | container_name: bam_api-postgres 81 | restart: unless-stopped 82 | environment: 83 | - POSTGRES_USER=tql 84 | - POSTGRES_PASSWORD=itstqldude 85 | volumes: 86 | - database:/var/lib/postgresql/data 87 | networks: 88 | - proxy-tier 89 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | 3 | volumes: 4 | database: 5 | driver: local 6 | 7 | networks: 8 | proxy-tier: 9 | driver: bridge 10 | 11 | services: 12 | graphql: 13 | build: . 14 | container_name: bam_api-graphql 15 | restart: unless-stopped 16 | environment: 17 | - NODE_ENV=staging 18 | - VIRTUAL_HOST=bam-api.cloud.bam.tech 19 | - VIRTUAL_NETWORK=nginx-proxy 20 | - VIRTUAL_PORT=3000 21 | - LETSENCRYPT_HOST=bam-api.cloud.bam.tech 22 | - LETSENCRYPT_EMAIL=tychot@bam.tech 23 | ports: 24 | - "3000:3000" 25 | depends_on: 26 | - postgres 27 | networks: 28 | - proxy-tier 29 | 30 | postgres: 31 | image: postgres:9.6 32 | container_name: bam_api-postgres 33 | restart: unless-stopped 34 | environment: 35 | - POSTGRES_USER=tql 36 | - POSTGRES_PASSWORD=itstqldude 37 | volumes: 38 | - database:/var/lib/postgresql/data 39 | networks: 40 | - proxy-tier 41 | -------------------------------------------------------------------------------- /docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:latest 2 | 3 | RUN apk -U add openssl 4 | 5 | ENV VERSION 0.7.3 6 | ENV DOWNLOAD_URL https://github.com/jwilder/docker-gen/releases/download/$VERSION/docker-gen-alpine-linux-amd64-$VERSION.tar.gz 7 | ENV DOCKER_HOST unix:///tmp/docker.sock 8 | 9 | RUN wget -qO- $DOWNLOAD_URL | tar xvz -C /usr/local/bin 10 | 11 | ADD https://raw.githubusercontent.com/jwilder/nginx-proxy/master/nginx.tmpl /etc/docker-gen/templates/nginx.tmpl 12 | 13 | ENTRYPOINT ["/usr/local/bin/docker-gen"] -------------------------------------------------------------------------------- /docker/templates/nginx.tmpl: -------------------------------------------------------------------------------- 1 | {{ $CurrentContainer := where $ "ID" .Docker.CurrentContainerID | first }} 2 | 3 | {{ define "upstream" }} 4 | {{ if .Address }} 5 | {{/* If we got the containers from swarm and this container's port is published to host, use host IP:PORT */}} 6 | {{ if and .Container.Node.ID .Address.HostPort }} 7 | # {{ .Container.Node.Name }}/{{ .Container.Name }} 8 | server {{ .Container.Node.Address.IP }}:{{ .Address.HostPort }}; 9 | {{/* If there is no swarm node or the port is not published on host, use container's IP:PORT */}} 10 | {{ else if .Network }} 11 | # {{ .Container.Name }} 12 | server {{ .Network.IP }}:{{ .Address.Port }}; 13 | {{ end }} 14 | {{ else if .Network }} 15 | # {{ .Container.Name }} 16 | server {{ .Network.IP }} down; 17 | {{ end }} 18 | {{ end }} 19 | 20 | # If we receive X-Forwarded-Proto, pass it through; otherwise, pass along the 21 | # scheme used to connect to this server 22 | map $http_x_forwarded_proto $proxy_x_forwarded_proto { 23 | default $http_x_forwarded_proto; 24 | '' $scheme; 25 | } 26 | 27 | # If we receive X-Forwarded-Port, pass it through; otherwise, pass along the 28 | # server port the client connected to 29 | map $http_x_forwarded_port $proxy_x_forwarded_port { 30 | default $http_x_forwarded_port; 31 | '' $server_port; 32 | } 33 | 34 | # If we receive Upgrade, set Connection to "upgrade"; otherwise, delete any 35 | # Connection header that may have been passed to this server 36 | map $http_upgrade $proxy_connection { 37 | default upgrade; 38 | '' close; 39 | } 40 | 41 | # Set appropriate X-Forwarded-Ssl header 42 | map $scheme $proxy_x_forwarded_ssl { 43 | default off; 44 | https on; 45 | } 46 | 47 | gzip_types text/plain text/css application/javascript application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript; 48 | 49 | log_format vhost '$host $remote_addr - $remote_user [$time_local] ' 50 | '"$request" $status $body_bytes_sent ' 51 | '"$http_referer" "$http_user_agent"'; 52 | 53 | access_log off; 54 | 55 | {{ if (exists "/etc/nginx/proxy.conf") }} 56 | include /etc/nginx/proxy.conf; 57 | {{ else }} 58 | # HTTP 1.1 support 59 | proxy_http_version 1.1; 60 | proxy_buffering off; 61 | proxy_set_header Host $http_host; 62 | proxy_set_header Upgrade $http_upgrade; 63 | proxy_set_header Connection $proxy_connection; 64 | proxy_set_header X-Real-IP $remote_addr; 65 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 66 | proxy_set_header X-Forwarded-Proto $proxy_x_forwarded_proto; 67 | proxy_set_header X-Forwarded-Ssl $proxy_x_forwarded_ssl; 68 | proxy_set_header X-Forwarded-Port $proxy_x_forwarded_port; 69 | 70 | # Mitigate httpoxy attack (see README for details) 71 | proxy_set_header Proxy ""; 72 | {{ end }} 73 | 74 | {{ $enable_ipv6 := eq (or ($.Env.ENABLE_IPV6) "") "true" }} 75 | server { 76 | server_name _; # This is just an invalid value which will never trigger on a real hostname. 77 | listen 80; 78 | {{ if $enable_ipv6 }} 79 | listen [::]:80; 80 | {{ end }} 81 | access_log /var/log/nginx/access.log vhost; 82 | return 503; 83 | } 84 | 85 | {{ if (and (exists "/etc/nginx/certs/default.crt") (exists "/etc/nginx/certs/default.key")) }} 86 | server { 87 | server_name _; # This is just an invalid value which will never trigger on a real hostname. 88 | listen 443 ssl http2; 89 | {{ if $enable_ipv6 }} 90 | listen [::]:443 ssl http2; 91 | {{ end }} 92 | access_log /var/log/nginx/access.log vhost; 93 | return 503; 94 | 95 | ssl_session_tickets off; 96 | ssl_certificate /etc/nginx/certs/default.crt; 97 | ssl_certificate_key /etc/nginx/certs/default.key; 98 | } 99 | {{ end }} 100 | 101 | {{ range $host, $containers := groupByMulti $ "Env.VIRTUAL_HOST" "," }} 102 | {{ $is_regexp := hasPrefix "~" $host }} 103 | {{ $upstream_name := when $is_regexp (sha1 $host) $host }} 104 | # {{ $host }} 105 | upstream {{ $upstream_name }} { 106 | {{ range $container := $containers }} 107 | {{ $addrLen := len $container.Addresses }} 108 | 109 | {{ range $knownNetwork := $CurrentContainer.Networks }} 110 | {{ range $containerNetwork := $container.Networks }} 111 | {{ if eq $knownNetwork.Name $containerNetwork.Name }} 112 | ## Can be connect with "{{ $containerNetwork.Name }}" network 113 | 114 | {{/* If only 1 port exposed, use that */}} 115 | {{ if eq $addrLen 1 }} 116 | {{ $address := index $container.Addresses 0 }} 117 | {{ template "upstream" (dict "Container" $container "Address" $address "Network" $containerNetwork) }} 118 | {{/* If more than one port exposed, use the one matching VIRTUAL_PORT env var, falling back to standard web port 80 */}} 119 | {{ else }} 120 | {{ $port := coalesce $container.Env.VIRTUAL_PORT "80" }} 121 | {{ $address := where $container.Addresses "Port" $port | first }} 122 | {{ template "upstream" (dict "Container" $container "Address" $address "Network" $containerNetwork) }} 123 | {{ end }} 124 | {{ end }} 125 | {{ end }} 126 | {{ end }} 127 | {{ end }} 128 | } 129 | 130 | {{ $default_host := or ($.Env.DEFAULT_HOST) "" }} 131 | {{ $default_server := index (dict $host "" $default_host "default_server") $host }} 132 | 133 | {{/* Get the VIRTUAL_PROTO defined by containers w/ the same vhost, falling back to "http" */}} 134 | {{ $proto := or (first (groupByKeys $containers "Env.VIRTUAL_PROTO")) "http" }} 135 | 136 | {{/* Get the HTTPS_METHOD defined by containers w/ the same vhost, falling back to "redirect" */}} 137 | {{ $https_method := or (first (groupByKeys $containers "Env.HTTPS_METHOD")) "redirect" }} 138 | 139 | {{/* Get the first cert name defined by containers w/ the same vhost */}} 140 | {{ $certName := (first (groupByKeys $containers "Env.CERT_NAME")) }} 141 | 142 | {{/* Get the best matching cert by name for the vhost. */}} 143 | {{ $vhostCert := (closest (dir "/etc/nginx/certs") (printf "%s.crt" $host))}} 144 | 145 | {{/* vhostCert is actually a filename so remove any suffixes since they are added later */}} 146 | {{ $vhostCert := trimSuffix ".crt" $vhostCert }} 147 | {{ $vhostCert := trimSuffix ".key" $vhostCert }} 148 | 149 | {{/* Use the cert specified on the container or fallback to the best vhost match */}} 150 | {{ $cert := (coalesce $certName $vhostCert) }} 151 | 152 | {{ $is_https := (and (ne $https_method "nohttps") (ne $cert "") (exists (printf "/etc/nginx/certs/%s.crt" $cert)) (exists (printf "/etc/nginx/certs/%s.key" $cert))) }} 153 | 154 | {{ if $is_https }} 155 | 156 | {{ if eq $https_method "redirect" }} 157 | server { 158 | server_name {{ $host }}; 159 | listen 80 {{ $default_server }}; 160 | {{ if $enable_ipv6 }} 161 | listen [::]:80 {{ $default_server }}; 162 | {{ end }} 163 | access_log /var/log/nginx/access.log vhost; 164 | return 301 https://$host$request_uri; 165 | } 166 | {{ end }} 167 | 168 | server { 169 | server_name {{ $host }}; 170 | listen 443 ssl http2 {{ $default_server }}; 171 | {{ if $enable_ipv6 }} 172 | listen [::]:443 ssl http2 {{ $default_server }}; 173 | {{ end }} 174 | access_log /var/log/nginx/access.log vhost; 175 | 176 | ssl_protocols TLSv1 TLSv1.1 TLSv1.2; 177 | ssl_ciphers 'ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS'; 178 | 179 | ssl_prefer_server_ciphers on; 180 | ssl_session_timeout 5m; 181 | ssl_session_cache shared:SSL:50m; 182 | ssl_session_tickets off; 183 | 184 | ssl_certificate /etc/nginx/certs/{{ (printf "%s.crt" $cert) }}; 185 | ssl_certificate_key /etc/nginx/certs/{{ (printf "%s.key" $cert) }}; 186 | 187 | {{ if (exists (printf "/etc/nginx/certs/%s.dhparam.pem" $cert)) }} 188 | ssl_dhparam {{ printf "/etc/nginx/certs/%s.dhparam.pem" $cert }}; 189 | {{ end }} 190 | 191 | {{ if (ne $https_method "noredirect") }} 192 | add_header Strict-Transport-Security "max-age=31536000"; 193 | {{ end }} 194 | 195 | {{ if (exists (printf "/etc/nginx/vhost.d/%s" $host)) }} 196 | include {{ printf "/etc/nginx/vhost.d/%s" $host }}; 197 | {{ else if (exists "/etc/nginx/vhost.d/default") }} 198 | include /etc/nginx/vhost.d/default; 199 | {{ end }} 200 | 201 | location / { 202 | {{ if eq $proto "uwsgi" }} 203 | include uwsgi_params; 204 | uwsgi_pass {{ trim $proto }}://{{ trim $upstream_name }}; 205 | {{ else }} 206 | proxy_pass {{ trim $proto }}://{{ trim $upstream_name }}; 207 | {{ end }} 208 | {{ if (exists (printf "/etc/nginx/htpasswd/%s" $host)) }} 209 | auth_basic "Restricted {{ $host }}"; 210 | auth_basic_user_file {{ (printf "/etc/nginx/htpasswd/%s" $host) }}; 211 | {{ end }} 212 | {{ if (exists (printf "/etc/nginx/vhost.d/%s_location" $host)) }} 213 | include {{ printf "/etc/nginx/vhost.d/%s_location" $host}}; 214 | {{ else if (exists "/etc/nginx/vhost.d/default_location") }} 215 | include /etc/nginx/vhost.d/default_location; 216 | {{ end }} 217 | } 218 | } 219 | 220 | {{ end }} 221 | 222 | {{ if or (not $is_https) (eq $https_method "noredirect") }} 223 | 224 | server { 225 | server_name {{ $host }}; 226 | listen 80 {{ $default_server }}; 227 | {{ if $enable_ipv6 }} 228 | listen [::]:80 {{ $default_server }}; 229 | {{ end }} 230 | access_log /var/log/nginx/access.log vhost; 231 | 232 | {{ if (exists (printf "/etc/nginx/vhost.d/%s" $host)) }} 233 | include {{ printf "/etc/nginx/vhost.d/%s" $host }}; 234 | {{ else if (exists "/etc/nginx/vhost.d/default") }} 235 | include /etc/nginx/vhost.d/default; 236 | {{ end }} 237 | 238 | location / { 239 | {{ if eq $proto "uwsgi" }} 240 | include uwsgi_params; 241 | uwsgi_pass {{ trim $proto }}://{{ trim $upstream_name }}; 242 | {{ else }} 243 | proxy_pass {{ trim $proto }}://{{ trim $upstream_name }}; 244 | {{ end }} 245 | {{ if (exists (printf "/etc/nginx/htpasswd/%s" $host)) }} 246 | auth_basic "Restricted {{ $host }}"; 247 | auth_basic_user_file {{ (printf "/etc/nginx/htpasswd/%s" $host) }}; 248 | {{ end }} 249 | {{ if (exists (printf "/etc/nginx/vhost.d/%s_location" $host)) }} 250 | include {{ printf "/etc/nginx/vhost.d/%s_location" $host}}; 251 | {{ else if (exists "/etc/nginx/vhost.d/default_location") }} 252 | include /etc/nginx/vhost.d/default_location; 253 | {{ end }} 254 | } 255 | } 256 | 257 | {{ if (and (not $is_https) (exists "/etc/nginx/certs/default.crt") (exists "/etc/nginx/certs/default.key")) }} 258 | server { 259 | server_name {{ $host }}; 260 | listen 443 ssl http2 {{ $default_server }}; 261 | {{ if $enable_ipv6 }} 262 | listen [::]:443 ssl http2 {{ $default_server }}; 263 | {{ end }} 264 | access_log /var/log/nginx/access.log vhost; 265 | return 500; 266 | 267 | ssl_certificate /etc/nginx/certs/default.crt; 268 | ssl_certificate_key /etc/nginx/certs/default.key; 269 | } 270 | {{ end }} 271 | 272 | {{ end }} 273 | {{ end }} -------------------------------------------------------------------------------- /docs/images/layers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bamlab/bam-api/b325e16e210ada8ecc20c7662ba2e92dcc4c60d5/docs/images/layers.png -------------------------------------------------------------------------------- /flow/graphqlApolloError.js: -------------------------------------------------------------------------------- 1 | type ConfigType = {}; 2 | type FormatErrorType = {}; 3 | 4 | declare module 'graphql-apollo-errors' { 5 | declare var SevenBoom: { 6 | badRequest: Function, 7 | unauthorized: Function, 8 | paymentRequired: Function, 9 | forbidden: Function, 10 | notFound: Function, 11 | methodNotAllowed: Function, 12 | notAcceptable: Function, 13 | proxyAuthRequired: Function, 14 | clientTimeout: Function, 15 | conflict: Function, 16 | resourceGone: Function, 17 | lengthRequired: Function, 18 | preconditionFailed: Function, 19 | entityTooLarge: Function, 20 | uriTooLong: Function, 21 | unsupportedMediaType: Function, 22 | rangeNotSatisfiable: Function, 23 | expectationFailed: Function, 24 | teapot: Function, 25 | badData: Function, 26 | locked: Function, 27 | preconditionRequired: Function, 28 | tooManyRequests: Function, 29 | illegal: Function, 30 | badImplementation: Function, 31 | notImplemented: Function, 32 | badGateway: Function, 33 | serverUnavailable: Function, 34 | gatewayTimeout: Function, 35 | }; 36 | declare var formatErrorGenerator: (config: ConfigType) => FormatErrorType; 37 | } 38 | -------------------------------------------------------------------------------- /jsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "allowSyntheticDefaultImports": true 4 | }, 5 | "exclude": ["node_modules"] 6 | } 7 | -------------------------------------------------------------------------------- /knexfile.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @description: this file is used by knex cli for migration / seed 3 | * 4 | * @flow 5 | */ 6 | 7 | // remove flow type 8 | require('flow-remove-types/register'); 9 | // in our case, it is just the database config 10 | const config = require('config'); 11 | module.exports = config.get('Database'); 12 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "bam-api", 3 | "private": true, 4 | "version": "1.0.0", 5 | "description": "A GraphQL, 'production ready'™ api for internal use at bam, but open sourced as an example !", 6 | "main": "index.js", 7 | "homepage": "https://github.com/bamlab/bam-api", 8 | "keywords": [ 9 | "graphql", 10 | "knex", 11 | "postgres", 12 | "docker" 13 | ], 14 | "bugs": { 15 | "url": "https://github.com/bamlab/bam-api/issues" 16 | }, 17 | "repository": { 18 | "type": "git", 19 | "url": "https://github.com/bamlab/bam-api.git" 20 | }, 21 | "author": { 22 | "name": "Tycho Tatitscheff" 23 | }, 24 | "contributors": [ 25 | { 26 | "name": "Yann Leflour" 27 | } 28 | ], 29 | "license": "MIT", 30 | "scripts": { 31 | "dev": "backpack", 32 | "build": "backpack build", 33 | "start:dev": "yarn install && yarn db:migrate && yarn dev", 34 | "start": "yarn db:migrate && node ./build/main.js", 35 | "db:migrate": "knex migrate:latest", 36 | "db:seed": "knex seed:run", 37 | "commitmsg": "validate-commit-msg", 38 | "test:lint": "yarn eslint ." 39 | }, 40 | "engines": { 41 | "node": ">=8.0.0" 42 | }, 43 | "dependencies": { 44 | "@tychot/jwks-rsa": "1.1.2", 45 | "apollo-engine": "^0.4.7", 46 | "backpack-core": "^0.4.0-rc1", 47 | "casual": "^1.5.14", 48 | "config": "^1.25.1", 49 | "dataloader": "^1.3.0", 50 | "deep-assign": "^2.0.0", 51 | "ect": "^0.5.9", 52 | "flow-remove-types": "^1.2.0", 53 | "glob": "^7.1.1", 54 | "graphql": "^0.10.1", 55 | "graphql-apollo-errors": "^2.0.2", 56 | "graphql-server-koa": "^1.0.0", 57 | "graphql-tools": "^1.0.0", 58 | "husky": "^0.14.3", 59 | "js-yaml": "^3.9.0", 60 | "knex": "^0.13.0", 61 | "koa": "^2.3.0", 62 | "koa-bodyparser": "^4.2.0", 63 | "koa-helmet": "^3.1.0", 64 | "koa-jwt": "^3.2.1", 65 | "koa-pino-logger": "2", 66 | "koa-requestid": "^2.0.1", 67 | "koa-router": "^7.2.1", 68 | "koa-views": "^6.0.2", 69 | "koa2-connect": "^1.0.2", 70 | "pg": "^6.4.1", 71 | "pino": "^4.7.0", 72 | "pino-noir": "^1.2.0", 73 | "sqlite3": "^3.1.8", 74 | "uuid": "^3.1.0", 75 | "validate-commit-msg": "^2.12.2" 76 | }, 77 | "devDependencies": { 78 | "@tychot/gql": "^2.0.0", 79 | "babel-eslint": "^7.2.3", 80 | "babel-jest": "^20.0.3", 81 | "babel-preset-env": "^1.6.0", 82 | "babel-preset-flow": "^6.23.0", 83 | "cz-conventional-changelog": "^2.0.0", 84 | "eslint": "^3.19.0", 85 | "eslint-plugin-flowtype": "^2.35.0", 86 | "eslint-plugin-jest": "^20.0.3", 87 | "eslint-plugin-prettier": "^2.1.2", 88 | "flow-bin": "^0.50.0", 89 | "jest": "^20.0.4", 90 | "object-hash": "^1.1.8", 91 | "prettier": "^1.5.3" 92 | }, 93 | "config": { 94 | "commitizen": { 95 | "path": "./node_modules/cz-conventional-changelog" 96 | } 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /src/business/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bamlab/bam-api/b325e16e210ada8ecc20c7662ba2e92dcc4c60d5/src/business/README.md -------------------------------------------------------------------------------- /src/business/bamer.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @description: this file contains the Bamer buisness object, in the context of a given http request 3 | * 4 | * This file will handle batching and caching, as well a http authentication, scoping and so on 5 | * 6 | * @flow 7 | */ 8 | import DataLoader from 'dataloader'; 9 | import BamerModel from '../db/queryBuilders/bamer'; 10 | import { assertIsBamer } from './utils/auth'; 11 | 12 | class Bamer { 13 | id: $PropertyType; 14 | firstName: $PropertyType; 15 | lastName: $PropertyType; 16 | role: $PropertyType; 17 | name: $PropertyType; 18 | email: $PropertyType; 19 | 20 | constructor(data: BamerDBType, viewer: { id?: string }) { 21 | this.id = data.id; 22 | this.firstName = data.firstName; 23 | this.lastName = data.lastName; 24 | this.role = data.role; 25 | 26 | // handle the deprecated properties here 27 | 28 | if (viewer.id === data.id) { 29 | this.email = data.email; 30 | } 31 | } 32 | 33 | /** @deprecated */ 34 | get name(): string { 35 | return [this.firstName, this.lastName].join(' '); 36 | } 37 | 38 | // get the loaders for the request, in order to batch and cach the db calls 39 | static getLoaders(): { byId: *, primeLoaders: * } { 40 | const primeLoaders = (bamers: Array) => { 41 | for (let bamer of bamers) { 42 | byId.prime(bamer.id, bamer); 43 | } 44 | }; 45 | const byId = new DataLoader(ids => BamerModel.getByListofIds(ids)); 46 | return { 47 | byId, 48 | primeLoaders, 49 | }; 50 | } 51 | static async load({ user: viewer, dataloaders, roles }, id): Promise { 52 | assertIsBamer(viewer, roles); 53 | // return null if no id is given 54 | if (!id) return null; 55 | // return null if no id is given 56 | const data = await dataloaders.bamer.byId.load(id); 57 | if (!data) return null; 58 | 59 | return new Bamer(data, viewer); 60 | } 61 | static async loadAll({ user: viewer, roles, dataloaders }): Promise> { 62 | assertIsBamer(viewer, roles); 63 | 64 | const data = await BamerModel.getAll(); 65 | dataloaders.bamer.primeLoaders(data); 66 | return data.map(row => new Bamer(row, viewer)); 67 | } 68 | static async register( 69 | { user: viewer, roles }, 70 | { firstName, lastName, role, email } 71 | ): Promise { 72 | assertIsBamer(viewer, roles); 73 | let data; 74 | try { 75 | data = await BamerModel.createAndReturn({ firstName, lastName, role, email }); 76 | } catch (e) { 77 | switch (true) { 78 | case e.code === '23505' && e.constraint === 'bamer_email_unique': 79 | throw new Error('Bamer already registred'); 80 | default: 81 | throw new Error('Something wrong happened'); 82 | } 83 | } 84 | if (!data) return null; 85 | 86 | return new Bamer(data, viewer); 87 | } 88 | } 89 | 90 | export default Bamer; 91 | -------------------------------------------------------------------------------- /src/business/book.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @description: this file contains the Book buisness object, in the context of a given http request 3 | * 4 | * This file will handle batching and caching, as well a http authentication, scoping and so on 5 | * 6 | * @flow 7 | */ 8 | import DataLoader from 'dataloader'; 9 | import BookModel from '../db/queryBuilders/book'; 10 | 11 | class Book { 12 | id: $PropertyType; 13 | name: $PropertyType; 14 | author: $PropertyType; 15 | // eslint-disable-next-line no-unused-vars 16 | constructor(data: BookDBType, viewer: { id?: string }) { 17 | this.id = data.id; 18 | this.name = data.name; 19 | this.author = data.author; 20 | } 21 | // get the loader for the request, in order to batch and cach the db calls 22 | // https://github.com/facebook/dataloader#loading-by-alternative-keys 23 | static getLoaders() { 24 | const primeLoaders = (books: Array) => { 25 | for (let book of books) { 26 | //book.bamerBorrowingId && byBorrowingUserId.prime(book.bamerBorrowingId, book); 27 | byId.prime(book.id, book); 28 | } 29 | }; 30 | const byId = new DataLoader(ids => BookModel.getByListofIds(ids)); 31 | // Fix me : this loader does not work 32 | // DataLoader must be constructed with a function which accepts Array and returns Promise>, but the function did not return a Promise of an Array of the same length as the Array of keys. 33 | // const byBorrowingUserId = new DataLoader(ids => BookModel.getByBorrowerId(ids)); 34 | return { 35 | byId, 36 | //byBorrowingUserId, 37 | primeLoaders, 38 | }; 39 | } 40 | static async load({ user: viewer, dataloaders }, id): Promise { 41 | // return null if no id is given 42 | if (!id) return null; 43 | // return null if no id is given 44 | const data = await dataloaders.book.byId.load(id); 45 | dataloaders.book.primeLoaders(data); 46 | if (!data) return null; 47 | 48 | return new Book(data, viewer); 49 | } 50 | static async loadByBorrowing({ user: viewer, dataloaders }, id): Promise> { 51 | // return null if no id is given 52 | if (!id) return []; 53 | // return null if no id is given 54 | // @todo : refactore this to use loader 55 | const data = await BookModel.getByBorrowerId(id); 56 | dataloaders.book.primeLoaders(data); 57 | if (!data) return []; 58 | 59 | return data.map(row => new Book(row, viewer)); 60 | } 61 | static async loadAll({ user: viewer, dataloaders }): Promise> { 62 | const data = await BookModel.getAll(); 63 | dataloaders.book.primeLoaders(data); 64 | return data.map(row => new Book(row, viewer)); 65 | } 66 | } 67 | 68 | export default Book; 69 | -------------------------------------------------------------------------------- /src/business/index.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | export { default as bamer } from './bamer'; 4 | export { default as book } from './book'; 5 | -------------------------------------------------------------------------------- /src/business/utils/__tests__/__snapshots__/auth.js.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`auth utils (unit) assertIsBammer should throw an error 1`] = `"User should be a bamer"`; 4 | 5 | exports[`auth utils (unit) assertIsBammer should throw an error 2`] = `"User should be a bamer"`; 6 | 7 | exports[`auth utils (unit) assertIsBammer should throw an error 3`] = `"Please connect to use this functionality"`; 8 | -------------------------------------------------------------------------------- /src/business/utils/__tests__/auth.js: -------------------------------------------------------------------------------- 1 | jest.mock('knex'); 2 | // import knex from 'knex'; 3 | import getViewerAndRoles, { assertIsBamer, getRolesByEmail } from '../auth'; 4 | 5 | describe('auth utils (unit)', () => { 6 | it('assertIsBammer should throw an error', () => { 7 | expect(() => assertIsBamer('george@bam.tech', [])).toThrowErrorMatchingSnapshot(); 8 | expect(() => assertIsBamer('george@bam.tech', ['ANONYMOUS'])).toThrowErrorMatchingSnapshot(); 9 | expect(() => 10 | assertIsBamer('george@bam.tech', ['NOT_REGISTRED']) 11 | ).toThrowErrorMatchingSnapshot(); 12 | }); 13 | it('return bammer roles for bammer email', () => { 14 | expect(getRolesByEmail('george@bam.tech')).toEqual(['BAMER']); 15 | expect(getRolesByEmail('george@theodo.fr')).toEqual([]); 16 | }); 17 | }); 18 | 19 | describe.withTestDatabase('auth utils (integration)', () => { 20 | it('does return the right user / roles', async () => { 21 | const email = 'george2@bam.tech'; 22 | const viewerAndRoles = await getViewerAndRoles({ email }); 23 | expect(viewerAndRoles.roles).toEqual(['BAMER']); 24 | expect(viewerAndRoles.user.email).toEqual('george2@bam.tech'); 25 | }); 26 | }); 27 | -------------------------------------------------------------------------------- /src/business/utils/auth.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | import * as queryBuilders from '../../db/queryBuilders'; 3 | import { SevenBoom } from 'graphql-apollo-errors'; 4 | 5 | export const ROLES = { 6 | ANONYMOUS: 'ANONYMOUS', 7 | NOT_REGISTRED: 'NOT_REGISTRED', 8 | BAMER: 'BAMER', 9 | }; 10 | 11 | export default async function getViewerAndRoles( 12 | requestUser: any 13 | ): Promise<{| user: ?BamerDBType, roles: Array |}> { 14 | // if there is no user (ie req.state.user is falsy), return no user and anonymous role 15 | if (!requestUser) { 16 | return { 17 | user: null, 18 | roles: [ROLES.ANONYMOUS], 19 | }; 20 | } 21 | const user: ?BamerDBType = await queryBuilders.bamer.getByEmail(requestUser.email); 22 | if (!user) { 23 | return { 24 | user: null, 25 | roles: [ROLES.NOT_REGISTRED], 26 | }; 27 | } 28 | return { 29 | user, 30 | roles: getRolesByEmail(user.email), 31 | }; 32 | } 33 | 34 | export function getRolesByEmail(email: string): Array { 35 | let roles = []; 36 | const isBammerEmail = /^\w+@bam\.tech$/; 37 | if (isBammerEmail.test(email)) { 38 | roles.push(ROLES.BAMER); 39 | } 40 | return roles; 41 | } 42 | 43 | export function assertIsBamer(user: BamerDBType, roles: Array) { 44 | if (roles.includes(ROLES.NOT_REGISTRED)) { 45 | throw new SevenBoom.unauthorized( 46 | `Please connect to use this functionality`, 47 | {}, 48 | 'ANONYMOUS_DISALOWED' 49 | ); 50 | } 51 | if (roles.includes(ROLES.NOT_REGISTRED)) { 52 | throw new SevenBoom.notFound( 53 | `User with email ${user.email} is unregistred : please perform a registration mutation`, 54 | { email: user.email }, 55 | 'REGISTRED_USER_NOT_FOUND' 56 | ); 57 | } 58 | if (!roles.includes(ROLES.BAMER)) { 59 | throw new SevenBoom.forbidden('User should be a bamer', {}, 'FORBIDEN'); 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/db/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bamlab/bam-api/b325e16e210ada8ecc20c7662ba2e92dcc4c60d5/src/db/README.md -------------------------------------------------------------------------------- /src/db/flow/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bamlab/bam-api/b325e16e210ada8ecc20c7662ba2e92dcc4c60d5/src/db/flow/README.md -------------------------------------------------------------------------------- /src/db/flow/bamer.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-undef */ 2 | declare type BamerDBType = { 3 | id: string, 4 | role: 'DEV' | 'SALE' | 'GROWTH' | 'ADMIN', 5 | firstName: string, 6 | lastName: string, 7 | email: string, 8 | }; 9 | -------------------------------------------------------------------------------- /src/db/flow/book.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-undef */ 2 | declare type BookDBType = { 3 | id: string, 4 | name: string, 5 | author: string, 6 | bamerBorrowingId: string, 7 | currentlyBorrowedBy: ?string, 8 | }; 9 | -------------------------------------------------------------------------------- /src/db/index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @description: this file contains the connection to the database 3 | * 4 | * @flow 5 | */ 6 | 7 | import config from 'config'; 8 | import knex from 'knex'; 9 | export default knex(config.get('Database')); 10 | -------------------------------------------------------------------------------- /src/db/migrations/20170430000642_add_table_bamer.js: -------------------------------------------------------------------------------- 1 | exports.up = function(knex) { 2 | return knex.schema.createTableIfNotExists('Bamer', function(table) { 3 | table.uuid('id').primary(); 4 | table.string('firstName'); 5 | table.string('lastName'); 6 | table.enu('role', ['DEV', 'SALE', 'GROWTH', 'ADMIN']); 7 | table.timestamps(); 8 | }); 9 | }; 10 | 11 | exports.down = function(knex) { 12 | return knex.schema.dropTableIfExists('Bamer'); 13 | }; 14 | -------------------------------------------------------------------------------- /src/db/migrations/20170430001704_add_table_book.js: -------------------------------------------------------------------------------- 1 | exports.up = function(knex) { 2 | return knex.schema.createTableIfNotExists('Book', function(table) { 3 | table.uuid('id').primary(); 4 | table.string('name'); 5 | table.string('author'); 6 | table.uuid('bamerBorrowingId').references('id').inTable('Bamer'); 7 | table.timestamps(); 8 | }); 9 | }; 10 | 11 | exports.down = function(knex) { 12 | return knex.schema.dropTableIfExists('Book'); 13 | }; 14 | -------------------------------------------------------------------------------- /src/db/migrations/20170507134620_add_email_in_bamer.js: -------------------------------------------------------------------------------- 1 | exports.up = function(knex) { 2 | return knex.schema.table('Bamer', table => { 3 | table.string('email'); 4 | table.unique('email'); 5 | }); 6 | }; 7 | 8 | exports.down = function(knex) { 9 | return knex.schema.table('Bamer', table => { 10 | table.dropUnique('email'); 11 | table.dropColumn('email'); 12 | }); 13 | }; 14 | -------------------------------------------------------------------------------- /src/db/migrations/20170524191943_add-default-id-uuid.js: -------------------------------------------------------------------------------- 1 | exports.up = function(knex, Promise) { 2 | return knex.raw('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"').then(() => { 3 | return Promise.all([ 4 | knex.raw('ALTER TABLE ONLY "Bamer" ALTER COLUMN id SET DEFAULT uuid_generate_v4()'), 5 | knex.raw('ALTER TABLE ONLY "Book" ALTER COLUMN id SET DEFAULT uuid_generate_v4()'), 6 | ]); 7 | }); 8 | }; 9 | 10 | exports.down = function(knex, Promise) { 11 | return Promise.all([ 12 | knex.raw('ALTER TABLE ONLY "Bamer" ALTER COLUMN id DROP DEFAULT'), 13 | knex.raw('ALTER TABLE ONLY "Book" ALTER COLUMN id DROP DEFAULT'), 14 | ]); 15 | }; 16 | -------------------------------------------------------------------------------- /src/db/migrations/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bamlab/bam-api/b325e16e210ada8ecc20c7662ba2e92dcc4c60d5/src/db/migrations/README.md -------------------------------------------------------------------------------- /src/db/queryBuilders/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bamlab/bam-api/b325e16e210ada8ecc20c7662ba2e92dcc4c60d5/src/db/queryBuilders/README.md -------------------------------------------------------------------------------- /src/db/queryBuilders/bamer.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @description: this file contains the query builder to access the Bamer table 3 | * 4 | * @flow 5 | */ 6 | 7 | import db from '..'; 8 | 9 | class BamerModel { 10 | /** 11 | * Get a bamer by the uuid 12 | * 13 | * @static 14 | * @memberOf BamerModel 15 | */ 16 | static async getById(id: string): Promise { 17 | return await db.first().table('Bamer').where('id', id); 18 | } 19 | /** 20 | * Get a bamer by email 21 | * 22 | * Used for authentication 23 | * 24 | * @static 25 | * @memberOf BamerModel 26 | */ 27 | static async getByEmail(email: string): Promise { 28 | return await db.first().table('Bamer').where('email', email); 29 | } 30 | /** 31 | * Get every bamer 32 | * 33 | * @static 34 | * @memberOf BamerModel 35 | */ 36 | static async getAll(): Promise> { 37 | return await db.select().table('Bamer'); 38 | } 39 | /** 40 | * Get the corresponding list of bamer of a given list of uuid 41 | * 42 | * Used for batching in dataloader 43 | * 44 | * @static 45 | * @memberOf BamerModel 46 | */ 47 | static async getByListofIds(ids: Array): Promise> { 48 | return await db.select().table('Bamer').whereIn('id', ids); 49 | } 50 | /** 51 | * Create a new bammer 52 | * 53 | * Used for batching in dataloader 54 | * 55 | * @static 56 | * @memberOf BamerModel 57 | */ 58 | static async createAndReturn({ firstName, lastName, email, role }): Promise { 59 | const [id] = await db 60 | .table('Bamer') 61 | .returning('id') 62 | .insert({ firstName, lastName, email, role }); 63 | return await this.getById(id); 64 | } 65 | } 66 | 67 | export default BamerModel; 68 | -------------------------------------------------------------------------------- /src/db/queryBuilders/book.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @description: this file contains the query builder to access the Book table 3 | * 4 | * @flow 5 | */ 6 | 7 | import db from '..'; 8 | 9 | class BookModel { 10 | /** 11 | * Get Book by id 12 | * 13 | * @static 14 | * @memberOf BookModel 15 | */ 16 | static async getById(id: string): Promise { 17 | return await db.first().table('Book').where('id', id); 18 | } 19 | /** 20 | * Get Books that one bamer is currently borrowing 21 | * 22 | * @static 23 | * @memberOf BookModel 24 | */ 25 | static async getByBorrowerId(id: string): Promise> { 26 | return await db.select().table('Book').where('bamerBorrowingId', id); 27 | } 28 | /** 29 | * Get every books 30 | * 31 | * @static 32 | * @memberOf BookModel 33 | */ 34 | static async getAll(): Promise> { 35 | return await db.select().table('Book'); 36 | } 37 | /** 38 | * Get the corresponding list of bamer of a given list of uuid 39 | * 40 | * Used for batching in dataloader 41 | * 42 | * @static 43 | * @memberOf BamerModel 44 | */ 45 | static async getByListofIds(ids: Array): Promise> { 46 | return await db.select().table('Book').whereIn('id', ids); 47 | } 48 | } 49 | 50 | export default BookModel; 51 | -------------------------------------------------------------------------------- /src/db/queryBuilders/index.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | export { default as bamer } from './bamer'; 4 | export { default as book } from './book'; 5 | -------------------------------------------------------------------------------- /src/db/seeds/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bamlab/bam-api/b325e16e210ada8ecc20c7662ba2e92dcc4c60d5/src/db/seeds/README.md -------------------------------------------------------------------------------- /src/db/seeds/development/base_data.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Development fixtures 3 | * 4 | * @flow 5 | */ 6 | 7 | const knex = require('knex'); 8 | 9 | exports.seed = function(knex: knex, Promise: typeof Promise) { 10 | // Deletes ALL existing entries 11 | return Promise.all([ 12 | knex('Bamer').del().then(function() { 13 | // Insert three Bamer 14 | return knex('Bamer').insert([ 15 | { 16 | id: 'd906656b-2b4f-4afe-bcd5-34c7d50ab035', 17 | firstName: 'Marek', 18 | lastName: 'Kalnik', 19 | email: 'marek@example.com', 20 | }, 21 | { 22 | id: '48201f35-5e07-46a0-a7cc-a4b9eabafcf1', 23 | firstName: 'Yann', 24 | lastName: 'Leflour', 25 | email: 'yannl@bam.tech', 26 | }, 27 | { 28 | id: 'ce53da3a-da7d-4893-a2a1-b48ffdbb4644', 29 | firstName: 'Tycho', 30 | lastName: 'Tatitscheff', 31 | email: 'tychot@bam.tech', 32 | }, 33 | { 34 | id: 'ed09776a-6c35-4527-9c1f-d333ac9f8abf', 35 | firstName: 'Florian', 36 | lastName: 'Rival', 37 | email: 'flo@example.com', 38 | }, 39 | ]); 40 | }), 41 | // Deletes ALL existing entries 42 | knex('Book').del().then(function() { 43 | // Insert three Books 44 | return knex('Book').insert([ 45 | { 46 | id: '21be5479-5ed6-423a-ad6e-b78eb59eeb07', 47 | name: 'Refactoring', 48 | author: 'Martin Fowler', 49 | bamerBorrowingId: 'd906656b-2b4f-4afe-bcd5-34c7d50ab035', 50 | }, 51 | { 52 | id: 'b4310c4d-2bc9-4936-9462-e8426ac51988', 53 | name: 'The Five Dysfunctions of a Team', 54 | author: 'Patrick M. Lencioni', 55 | bamerBorrowingId: 'ce53da3a-da7d-4893-a2a1-b48ffdbb4644', 56 | }, 57 | { 58 | id: '66240ca5-9cc5-43ce-9633-fbf159382d34', 59 | name: 'Elixir in Action', 60 | author: 'Saša Jurić', 61 | bamerBorrowingId: 'ce53da3a-da7d-4893-a2a1-b48ffdbb4644', 62 | }, 63 | ]); 64 | }), 65 | ]); 66 | }; 67 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | import './koa/server'; 2 | -------------------------------------------------------------------------------- /src/koa/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bamlab/bam-api/b325e16e210ada8ecc20c7662ba2e92dcc4c60d5/src/koa/README.md -------------------------------------------------------------------------------- /src/koa/flow/context.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-undef */ 2 | import DataLoader from 'dataloader'; 3 | 4 | declare type ContextType = { 5 | dataloaders: { 6 | book: { byId: DataLoader, primeLoaders: Function }, 7 | bamer: { byId: DataLoader, primeLoaders: Function }, 8 | }, 9 | state: { user?: { email: string } }, 10 | }; 11 | -------------------------------------------------------------------------------- /src/koa/middlewares/knex.js: -------------------------------------------------------------------------------- 1 | export default knex => async (ctx, next) => { 2 | const queries = []; 3 | 4 | const captureQueries = builder => { 5 | const startTime = process.hrtime(); 6 | const group = []; 7 | 8 | builder.on('query', query => { 9 | group.push(query); 10 | queries.push(query); 11 | }); 12 | 13 | builder.on('end', () => { 14 | // all queries are completed at this point. 15 | // in the future, it'd be good to separate out each individual query, 16 | // but for now, this isn't something that knex supports. see the 17 | // discussion here for details: 18 | // https://github.com/tgriesser/knex/pull/335#issuecomment-46787879 19 | const diff = process.hrtime(startTime); 20 | const ms = diff[0] * 1e3 + diff[1] * 1e-6; 21 | group.forEach(query => { 22 | query.duration = ms.toFixed(3); 23 | }); 24 | }); 25 | }; 26 | 27 | const logQueries = () => { 28 | queries.forEach(query => { 29 | ctx.log 30 | .child({ name: 'sql' }) 31 | .info( 32 | '%s %s %s', 33 | query.sql.replace(/\\/g, ''), 34 | `{${query.bindings.join(', ')}}`, 35 | `${query.duration}ms` 36 | ); 37 | }); 38 | }; 39 | 40 | knex.client.on('start', captureQueries); 41 | await next(); 42 | knex.client.removeListener('start', captureQueries); 43 | logQueries(); 44 | }; 45 | -------------------------------------------------------------------------------- /src/koa/middlewares/logger.js: -------------------------------------------------------------------------------- 1 | import pino from 'pino'; 2 | // Use config to externalize the configuration 3 | import config from 'config'; 4 | 5 | const verboseLevel = config.get('Logger.verboseLevel'); 6 | 7 | const logger = (opts, stream) => { 8 | opts = opts || {}; 9 | opts.serializers = opts.serializers || {}; 10 | opts.serializers.req = opts.serializers.req || asReqValue; 11 | opts.serializers.res = opts.serializers.res || asResValue; 12 | opts.serializers.err = opts.serializers.err || asErrValue; 13 | 14 | opts.useLevel = opts.useLevel || 'info'; 15 | 16 | var theStream = opts.stream || stream; 17 | delete opts.stream; 18 | 19 | const middleware = async (ctx, next) => { 20 | ctx.log = ctx.request.log = ctx.response.log = ctx.req.log = pino(opts, theStream); 21 | let logReq: object | string = {}; 22 | switch (verboseLevel) { 23 | case 1: 24 | logReq = `${ctx.req.method} ${ctx.request.url} (req-id : "${ctx.req.id}")`; 25 | break; 26 | case 2: 27 | logReq.req = ctx.req; 28 | break; 29 | case 0: 30 | default: 31 | break; 32 | } 33 | ctx.log.child({ name: 'req' }).info(logReq); 34 | ctx.onerror = catchErr(ctx, ctx.onerror); 35 | const startTime = Date.now(); 36 | 37 | await next(); 38 | 39 | let logRes: object | string = {}; 40 | 41 | const responseTime = Date.now() - startTime; 42 | switch (verboseLevel) { 43 | case 1: 44 | logRes = `responded in ${responseTime}ms (req-id: "${ctx.req.id}")`; 45 | break; 46 | case 2: 47 | logRes.req = ctx.res; 48 | logRes.responseTime = responseTime; 49 | break; 50 | case 0: 51 | default: 52 | break; 53 | } 54 | ctx.log.child({ name: 'res' }).info(logRes); 55 | ctx.set('X-Response-Time', `${responseTime}ms`); 56 | }; 57 | return middleware; 58 | }; 59 | 60 | function catchErr(ctx, handler) { 61 | return function(e) { 62 | if (!e) { 63 | return handler.call(ctx, e); 64 | } 65 | ctx.log.error( 66 | { 67 | res: ctx.res, 68 | err: { 69 | type: e.constructor.name, 70 | message: e.message, 71 | stack: e.stack, 72 | }, 73 | responseTime: ctx.res.responseTime, 74 | }, 75 | 'request errored' 76 | ); 77 | return handler.call(ctx, e); 78 | }; 79 | } 80 | 81 | function asReqValue(req) { 82 | return { 83 | id: req.id, 84 | method: req.method, 85 | body: req.body, 86 | url: req.url, 87 | headers: req.headers, 88 | remoteAddress: req.connection.remoteAddress, 89 | remotePort: req.connection.remotePort, 90 | }; 91 | } 92 | 93 | function asResValue(res) { 94 | return { 95 | statusCode: res.statusCode, 96 | header: res.getHeaders(), 97 | }; 98 | } 99 | 100 | function asErrValue(err) { 101 | var obj = { 102 | type: err.constructor.name, 103 | message: err.message, 104 | stack: err.stack, 105 | }; 106 | for (var key in err) { 107 | if (obj[key] === undefined) { 108 | obj[key] = err[key]; 109 | } 110 | } 111 | return obj; 112 | } 113 | 114 | export default logger; 115 | -------------------------------------------------------------------------------- /src/koa/server.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @description: this file starts the web server 3 | * 4 | * @todo : split the middlewares in small pieces 5 | * 6 | * @flow 7 | */ 8 | 9 | // Import koa 2, the modern express 10 | import koa from 'koa'; 11 | // Import koa 2router, so we can mount graphQl under the /graphql endpoint 12 | import koaRouter from 'koa-router'; 13 | // Import koa 2 bodyparser, to parse the html bodycontaining the query 14 | // and pass the decoded string to graphql 15 | import koaBody from 'koa-bodyparser'; 16 | // Import helmet middleware to add extra security for free 17 | import helmet from 'koa-helmet'; 18 | // Use config to externalize the configuration 19 | import config from 'config'; 20 | // import graphqlKoa and graphiql 21 | import { graphqlKoa } from 'graphql-server-koa'; 22 | // import jwt-verification 23 | import koaJwt from 'koa-jwt'; 24 | // and jwks to delegate the auth to auth0 25 | import jwksRsa from '@tychot/jwks-rsa'; 26 | // import view to render the static login page 27 | import koaViews from 'koa-views'; 28 | // instrument graphql 29 | import { Engine } from 'apollo-engine'; 30 | // log information in a scalable way 31 | import logger from './middlewares/logger'; 32 | // log sql query 33 | import knexMiddleware from './middlewares/knex'; 34 | import knex from '../db'; 35 | // add a request id to the response 36 | import koaRequestId from 'koa-requestid'; 37 | 38 | // create a new app 39 | const app = new koa(); 40 | // create a new router, not really usefull for now 41 | const router = new koaRouter(); 42 | 43 | // use the helmet middleware, to offfer a bit of extra security 44 | app.use(helmet()); 45 | 46 | // add request id to response and request state 47 | app.use(koaRequestId()); 48 | app.use(async (ctx, next) => { 49 | ctx.req.id = ctx.state.id; 50 | await next(); 51 | }); 52 | 53 | // use the body middlleware, to decode the body of the request 54 | app.use(koaBody()); 55 | 56 | // use the loging middleware, to log request and log special event 57 | // override koa's undocumented error handler 58 | app.context.onerror = () => {}; 59 | // specify that this is our api 60 | app.context.api = true; 61 | // use our logger middleware 62 | app.use(logger()); 63 | 64 | app.use(knexMiddleware(knex)); 65 | 66 | // use the ect template string for views 67 | import path from 'path'; 68 | app.use( 69 | koaViews(path.join(__dirname, 'views'), { 70 | map: { 71 | html: 'ect', 72 | }, 73 | }) 74 | ); 75 | 76 | const engine = new Engine({ 77 | engineConfig: { apiKey: 'service:tychota-Bam-Api:1Z3thyxiVF84L4nF97NUmw' }, 78 | graphqlPort: 3000, // GraphQL port 79 | endpoint: '/graphql', // GraphQL endpoint suffix - '/graphql' by default 80 | dumpTraffic: true, 81 | }); 82 | engine.start(); 83 | 84 | // configure jwt middleware to connect to auth0, check the token and 85 | const jwtConfig = { 86 | secret: jwksRsa.koaJwtSecret(config.get('Security.jwks')), 87 | ...config.get('Security.jwt'), 88 | passthrough: true, 89 | }; 90 | app.use(koaJwt(jwtConfig)); 91 | 92 | app.use(engine.koaMiddleware()); 93 | 94 | // import the schema and mount it under /graphql 95 | import schema from '../presentation/schema'; 96 | import getViewerAndRoles from '../business/utils/auth'; 97 | 98 | import { formatErrorGenerator } from 'graphql-apollo-errors'; 99 | 100 | // get the dataloader for each request 101 | import * as business from '../business'; 102 | router.post( 103 | '/graphql', 104 | graphqlKoa(async ctx => { 105 | // create error formatter 106 | const formatErrorConfig = { 107 | publicDataPath: 'public', 108 | hooks: { 109 | onProcessedError: processedError => { 110 | const message = processedError.output.payload.message; 111 | const guid = processedError.output.payload.guid; 112 | ctx.log.child({ name: 'gql' }).warn({ 113 | msg: `${message} (error-id: "${guid}")`, 114 | stack: processedError.stack, 115 | }); 116 | ctx.status = processedError.output.statusCode || 500; 117 | }, 118 | }, 119 | }; 120 | const formatError = formatErrorGenerator(formatErrorConfig); 121 | const { user, roles } = await getViewerAndRoles(ctx.state.user); 122 | // build the data loader map, using reduce 123 | const dataloaders = Object.keys(business).reduce((dataloaders, loaderKey) => { 124 | return { ...dataloaders, [loaderKey]: business[loaderKey].getLoaders() }; 125 | }, {}); 126 | // create an optic context 127 | // create a context for each request 128 | const context = { dataloaders, user, roles }; 129 | return { 130 | // instrument the schema 131 | schema, 132 | context, 133 | debug: false, 134 | formatError, 135 | tracing: true, 136 | }; 137 | }) 138 | ); 139 | 140 | // redirect to graphiql 141 | router.get('/', ctx => { 142 | return ctx.redirect('/graphiql'); 143 | }); 144 | 145 | // create the /graphiql endpoint and connect it to the /graphql 146 | router.get('/graphiql', ctx => { 147 | return ctx.render('graphiql', {}); 148 | }); 149 | 150 | router.get('/login', ctx => { 151 | return ctx.render('login', {}); 152 | }); 153 | 154 | // use the router routes and restrict the method 155 | app.use(router.routes()); 156 | app.use(router.allowedMethods()); 157 | 158 | // start the app and listen to incomming request 159 | app.listen(3001); 160 | -------------------------------------------------------------------------------- /src/koa/views/graphiql/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | GraphiQL 7 | 8 | 17 | 18 | 19 | 20 | 21 | 22 | 27 | 28 | 29 | 30 | 145 | 146 | 147 | -------------------------------------------------------------------------------- /src/koa/views/login/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Auth0-VanillaJS 6 | 7 | 8 | 9 | 10 | 12 | 17 | 18 | 90 | 91 | 92 | 93 | 94 | 102 | 103 |

Welcome

104 | 105 | 106 | 107 | 108 | 109 | -------------------------------------------------------------------------------- /src/presentation/Bamer/Bamer.resolvers.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Resolver for the connection between Bamer and other model 3 | * 4 | * @flow 5 | */ 6 | 7 | import BookLoader from '../../business/book'; 8 | 9 | export default { 10 | Bamer: { 11 | /** 12 | * Resolve the connection of the books currently borrowed by one bamer 13 | * 14 | * @param {Bamer} bamer 15 | * @param {Context} ctx 16 | * @returns 17 | */ 18 | booksCurrentlyBorrowed(bamer: BamerDBType, _: {}, ctx: ContextType) { 19 | return BookLoader.loadByBorrowing(ctx, bamer.id); 20 | }, 21 | }, 22 | }; 23 | -------------------------------------------------------------------------------- /src/presentation/Bamer/Bamer.type.gql: -------------------------------------------------------------------------------- 1 | enum BAMER_ROLE { 2 | # A developer 3 | DEV 4 | # A sale 5 | SALE 6 | # A member of growth team 7 | GROWTH 8 | # An admin fi 9 | ADMIN 10 | } 11 | 12 | # represent a bamer 13 | type Bamer { 14 | id: ID! 15 | # the bamer role 16 | role: BAMER_ROLE 17 | # the bamer full name 18 | name: String @deprecated(reason: "Use firstName and lastName instead") 19 | # the bamer firstName 20 | firstName: String 21 | # the bamer lastName 22 | lastName: String 23 | # the bamer email 24 | email: String 25 | # the books currently borrowed by the bamer 26 | booksCurrentlyBorrowed: [Book] 27 | } 28 | -------------------------------------------------------------------------------- /src/presentation/Book/Book.resolvers.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Get the connections between Book and other model 3 | * 4 | * @flow 5 | */ 6 | 7 | import BamerLoader from '../../business/bamer'; 8 | 9 | export default { 10 | Book: { 11 | /** 12 | * Resolve the connection of the bamer currently borrowing one book 13 | * 14 | * @returns 15 | */ 16 | currentlyBorrowedBy(book: BookDBType, _: {}, ctx: ContextType) { 17 | return BamerLoader.load(ctx, book.bamerBorrowingId); 18 | }, 19 | }, 20 | }; 21 | -------------------------------------------------------------------------------- /src/presentation/Book/Book.type.gql: -------------------------------------------------------------------------------- 1 | # represent a book 2 | type Book { 3 | # the book id 4 | id: ID! 5 | # the book title 6 | name: String! 7 | # the book author 8 | author: String 9 | # the bamer that may have borrowed the book 10 | currentlyBorrowedBy: Bamer 11 | } -------------------------------------------------------------------------------- /src/presentation/Mutation/Mutation.resolvers.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Get the root mutation 3 | * 4 | * @flow 5 | */ 6 | 7 | import BamerLoader from '../../business/bamer'; 8 | 9 | export default { 10 | Mutation: { 11 | /** 12 | * Resolve the connection of the bamer currently borrowing one book 13 | * 14 | * @returns 15 | */ 16 | registerMyself(root: {}, args: { bamer: any }, ctx: ContextType) { 17 | console.log(args); 18 | return BamerLoader.register(ctx, args.bamer); 19 | }, 20 | }, 21 | }; 22 | -------------------------------------------------------------------------------- /src/presentation/Mutation/Mutation.type.gql: -------------------------------------------------------------------------------- 1 | # the structure of the imput required by 2 | # - `registerMyself` mutation 3 | input BamerInput { 4 | # the bamer role 5 | role: BAMER_ROLE! 6 | # the bamer first name 7 | firstName: String! 8 | # the bamer lastname name 9 | lastName: String! 10 | # the bamer email, validates by JWT and bam patterns 11 | email: String! 12 | } 13 | 14 | type Mutation { 15 | registerMyself(bamer: BamerInput): Bamer 16 | } -------------------------------------------------------------------------------- /src/presentation/Query/Query.resolvers.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Get the root queries 3 | * 4 | * @flow 5 | */ 6 | 7 | import BamerLoader from '../../business/bamer'; 8 | import BookLoader from '../../business/book'; 9 | 10 | export default { 11 | Query: { 12 | allBamers(root: {}, _: {}, ctx: ContextType) { 13 | return BamerLoader.loadAll(ctx); 14 | }, 15 | bamer(root: {}, args: { id: string }, ctx: ContextType) { 16 | return BamerLoader.load(ctx, args.id); 17 | }, 18 | allBooks(root: {}, _: {}, ctx: ContextType) { 19 | return BookLoader.loadAll(ctx); 20 | }, 21 | book(root: {}, args: { id: string }, ctx: ContextType) { 22 | return BookLoader.load(ctx, args.id); 23 | }, 24 | }, 25 | }; 26 | -------------------------------------------------------------------------------- /src/presentation/Query/Query.type.gql: -------------------------------------------------------------------------------- 1 | type Query { 2 | # a bamer, by id 3 | bamer(id: ID!): Bamer 4 | # all the bamer 5 | allBamers: [Bamer] 6 | # a book, by id 7 | book(id: ID!): Book 8 | # all the book 9 | allBooks: [Book] 10 | } 11 | -------------------------------------------------------------------------------- /src/presentation/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bamlab/bam-api/b325e16e210ada8ecc20c7662ba2e92dcc4c60d5/src/presentation/README.md -------------------------------------------------------------------------------- /src/presentation/mocks.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @description: this file allows to have better mocks, by using a dedicated mock library 3 | * 4 | * @flow 5 | */ 6 | 7 | import casual from 'casual'; 8 | import { MockList } from 'graphql-tools'; 9 | 10 | export default { 11 | String: () => casual.word, 12 | Bamer: () => { 13 | const lastName = casual.last_name; 14 | const firstName = casual.first_name; 15 | return { 16 | lastName, 17 | firstName, 18 | name: [firstName, lastName].join(' '), 19 | booksCurrentlyBorrowed: () => new MockList([0, 5]), 20 | }; 21 | }, 22 | Book: () => ({ 23 | name: () => casual.title, 24 | author: () => casual.name, 25 | }), 26 | Query: () => ({ 27 | allBamers: () => new MockList([1, 15]), 28 | }), 29 | }; 30 | -------------------------------------------------------------------------------- /src/presentation/resolvers.js: -------------------------------------------------------------------------------- 1 | import BammerResolver from './Bamer/Bamer.resolvers'; 2 | import BookResolver from './Book/Book.resolvers'; 3 | import QueryResolver from './Query/Query.resolvers'; 4 | import MutationResolver from './Mutation/Mutation.resolvers'; 5 | 6 | export default { 7 | ...BammerResolver, 8 | ...BookResolver, 9 | ...QueryResolver, 10 | ...MutationResolver, 11 | }; 12 | -------------------------------------------------------------------------------- /src/presentation/schema.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @description: this file defines the graphql schema 3 | * 4 | * @flow 5 | */ 6 | import glob from 'glob'; 7 | import fs from 'fs'; 8 | import path from 'path'; 9 | import { makeExecutableSchema } from 'graphql-tools'; 10 | 11 | const typeDefs = []; 12 | 13 | // loop over types and import the contents 14 | glob.sync('**/*.type.gql', { cwd: __dirname }).forEach(filename => { 15 | const filePath = path.join(__dirname, filename); 16 | const fileContent = fs.readFileSync(filePath, { encoding: 'utf8' }); 17 | typeDefs.push(fileContent); 18 | }); 19 | 20 | import resolvers from './resolvers'; 21 | 22 | // create a schema 23 | const schema = makeExecutableSchema({ typeDefs, resolvers }); 24 | 25 | // add mocks to the schema, preserving the existing resolvers (none for the time beeing) 26 | // import mocks from './mocks'; 27 | // addMockFunctionsToSchema({ schema, mocks, preserveResolvers: true }); 28 | 29 | export default schema; 30 | --------------------------------------------------------------------------------