├── .dockerignore ├── .editorconfig ├── .eslintignore ├── .eslintrc ├── .gitignore ├── .npmignore ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── README.md ├── apidoc.json ├── circle.yml ├── dockercfg-template ├── docs ├── README.md └── apidoc_intro.ejs.md ├── npmrc-env ├── package-lock.json ├── package.json ├── scripts ├── bench_password_hash.js ├── build_docs.js ├── ci-deploy.sh ├── ci-test.sh ├── drop-tables.sql ├── generate_keys.js ├── json-schema-to-markdown-table2.js └── publish_web.js ├── src ├── app.js ├── controllers │ ├── _apidoc.js │ ├── accounts.js │ ├── authTokens.js │ ├── health.js │ ├── messages.js │ ├── metadata.js │ └── transfers.js ├── errors │ ├── already-rolled-back-error.js │ ├── expired-transfer-error.js │ ├── insufficient-funds-error.js │ ├── invalid-body-error.js │ ├── invalid-modification-error.js │ ├── missing-fulfillment-error.js │ ├── no-subscriptions-error.js │ ├── transfer-not-conditional-error.js │ ├── transfer-not-found-error.js │ └── unmet-condition-error.js ├── lib │ ├── app.js │ ├── config.js │ ├── db.js │ ├── disabledAccounts.js │ ├── holds.js │ ├── knex.js │ ├── koa-websocket.js │ ├── notificationBroadcasterWebsocket.js │ ├── rpcHandler.js │ ├── seed-db.js │ ├── timeQueue.js │ ├── timerWorker.js │ ├── tokenStrategy.js │ ├── transferExpiryMonitor.js │ ├── transferUtils.js │ ├── updateState.js │ └── utils.js ├── models │ ├── accounts.js │ ├── authTokens.js │ ├── converters │ │ ├── accounts.js │ │ ├── fulfillments.js │ │ └── transfers.js │ ├── db │ │ ├── accounts.js │ │ ├── adjustments.js │ │ ├── audit.js │ │ ├── entries.js │ │ ├── fulfillments.js │ │ ├── rejectionReasons.js │ │ ├── transferStatuses.js │ │ ├── transfers.js │ │ └── utils.js │ ├── health.js │ ├── messages.js │ └── transfers.js ├── services │ ├── app.js │ ├── auth.js │ ├── config.js │ ├── log.js │ ├── makeRpcHandler.js │ ├── notificationBroadcaster.js │ ├── timeQueue.js │ ├── timerWorker.js │ ├── transferExpiryMonitor.js │ ├── uriManager.js │ └── validator.js └── sql │ ├── pg │ ├── 1-2.sql │ ├── 2-1.sql │ ├── 2-3.sql │ └── 3-2.sql │ └── sqlite3 │ ├── create.sql │ └── drop.sql ├── test ├── .eslintrc ├── accountSpec.js ├── configSpec.js ├── data │ ├── accounts.json │ ├── fulfillments │ │ ├── cancellation.json │ │ ├── execution.json │ │ ├── executionInvalid.json │ │ ├── executionNoData.json │ │ └── executionTypeAnd.json │ ├── messages │ │ ├── fromto.json │ │ └── simple.json │ ├── notificationDatabaseEntry.json │ ├── notificationDeletedSubscription.json │ ├── notificationResponse.json │ ├── private.pem │ ├── public.pem │ ├── signKeyRSAPrv.pem │ ├── signKeyRSAPub.pem │ ├── subscriptions │ │ ├── alice.json │ │ ├── bob.json │ │ └── deleted.json │ └── transfers │ │ ├── executed.json │ │ ├── fromDisabledAccount.json │ │ ├── fromEve.json │ │ ├── fromFiniteMinBalance.json │ │ ├── fromInfiniteMinBalance.json │ │ ├── fromNoBalanceAccount.json │ │ ├── fromZeroMinBalance.json │ │ ├── multiCredit.json │ │ ├── multiDebit.json │ │ ├── multiDebitAndCredit.json │ │ ├── noAuthorization.json │ │ ├── prepared.json │ │ ├── proposed.json │ │ ├── simple.json │ │ ├── simpleWithExpiry.json │ │ ├── toDisabledAccount.json │ │ ├── withAndCondition.json │ │ └── withExpiry.json ├── fulfillmentSpec.js ├── getAuthTokenSpec.js ├── getTransferSpec.js ├── healthSpec.js ├── helpers │ ├── app.js │ ├── db.js │ ├── dbAccountsMock.js │ ├── dbFailureMock.js │ ├── log.js │ ├── timing.js │ └── validator.js ├── metadataSpec.js ├── mocha.opts ├── notificationSpec.js ├── postMessageSpec.js ├── putTransferSpec.js ├── rejectionSpec.js ├── timeQueueSpec.js ├── timerWorkerSpec.js └── transferStateSpec.js ├── wallaby.js └── yarn.lock /.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | 3 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # http://editorconfig.org 2 | root = true 3 | 4 | [*] 5 | indent_style = space 6 | indent_size = 2 7 | end_of_line = lf 8 | charset = utf-8 9 | trim_trailing_whitespace = true 10 | insert_final_newline = true 11 | 12 | [*.md] 13 | trim_trailing_whitespace = false 14 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | apidoc-out/ 3 | coverage/ 4 | integration-test/ 5 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | # We use JavaScript Standard Style (https://github.com/feross/standard) 2 | extends: standard 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /node_modules/ 2 | /web/ 3 | /apidoc-out/ 4 | coverage/ 5 | *.cluster 6 | /npm-debug.log 7 | /integration-test/ 8 | *.db 9 | *.db-journal 10 | *v8.log 11 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | apidoc-out/ 3 | coverage/ 4 | *.cluster 5 | /npm-debug.log 6 | 7 | .npmrc 8 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Interledger.js Repositories 2 | 3 | ## Getting Involved 4 | 5 | Detailed contribution guidelines are coming soon... 6 | 7 | ## Contributor License Agreement 8 | 9 | This project uses the [JS Foundation](https://js.foundation) CLA for licensing contributions. 10 | 11 | Please submit your pull request as you would with any other open source project on GitHub and our CLA Assistant bot will guide you through the signing process. 12 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:7.10 2 | 3 | RUN mkdir -p /usr/src/app 4 | WORKDIR /usr/src/app 5 | 6 | COPY . /usr/src/app 7 | 8 | RUN npm install 9 | 10 | EXPOSE 3000 11 | 12 | CMD [ "npm", "start" ] 13 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2016 JS Foundation and contributors 2 | Copyright 2015-2016 Ripple, Inc. and contributors 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); you may not use 5 | this file except in compliance with the License. You may obtain a copy of the 6 | License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 11 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Five Bells Ledger [![npm][npm-image]][npm-url] [![circle][circle-image]][circle-url] [![codecov][codecov-image]][codecov-url] [![Known Vulnerabilities][snyk-image]][snyk-url] 2 | 3 | [npm-image]: https://img.shields.io/npm/v/five-bells-ledger.svg?style=flat 4 | [npm-url]: https://npmjs.org/package/five-bells-ledger 5 | [circle-image]: https://circleci.com/gh/interledgerjs/five-bells-ledger.svg?style=shield&circle-token=e31b3ba89c015bf7f1c6de9f5156e7daa32fd793 6 | [circle-url]: https://circleci.com/gh/interledgerjs/five-bells-ledger 7 | [codecov-image]: https://codecov.io/gh/interledgerjs/five-bells-ledger/branch/master/graph/badge.svg 8 | [codecov-url]: https://codecov.io/gh/interledgerjs/five-bells-ledger 9 | [snyk-image]: https://snyk.io/test/github/interledgerjs/five-bells-ledger/badge.svg 10 | [snyk-url]: https://snyk.io/test/github/interledgerjs/five-bells-ledger 11 | 12 | > A reference implementation of the Five Bells Ledger API 13 | 14 | 15 | ## Usage 16 | 17 | You can see the ledger in action as part of the [`five-bells-demo`](https://github.com/interledgerjs/five-bells-demo)! 18 | 19 | To run the ledger as a standalone server: 20 | 21 | ### Step 1: Clone repo 22 | 23 | ``` sh 24 | git clone https://github.com/interledgerjs/five-bells-ledger.git 25 | cd five-bells-ledger 26 | ``` 27 | 28 | ### Step 2: Install dependencies 29 | 30 | ``` sh 31 | npm install 32 | ``` 33 | 34 | ### Step 3: Run it! 35 | 36 | To run it using an in-memory database (the simplest option), run: 37 | 38 | ``` sh 39 | LEDGER_ADMIN_PASS=mypassword LEDGER_DB_URI=sqlite://:memory: npm start 40 | ``` 41 | 42 | Or run: 43 | 44 | ```sh 45 | npm start 46 | ``` 47 | 48 | See "Environment Variables" in the generated documentation for config options. 49 | 50 | ## Building Docs 51 | 52 | After installation: 53 | 54 | ```sh 55 | npm run docs 56 | ``` 57 | 58 | Open `apidocs-out/index.html` in a web browser to see the generated API documentation. 59 | 60 | ## Running with Docker (Alternative Method) 61 | 62 | This project can be run in a [Docker](https://www.docker.com/) container. 63 | 64 | You need to start a postgres container: 65 | 66 | ``` sh 67 | docker run --name five-bells-ledger-db -e POSTGRES_PASSWORD=password -d postgres 68 | ``` 69 | 70 | After giving postgres a few seconds to start up, you can run a five-bells-ledger Docker container, linking to that database: 71 | 72 | ``` sh 73 | docker run -d -e LEDGER_PORT=1337 -e LEDGER_ADMIN_PASS=admin -e LEDGER_DB_URI=postgres://postgres:password@db --link five-bells-ledger-db:db -p 1337:1337 -h localhost --name fivebells interledger/five-bells-ledger 74 | ``` 75 | 76 | Breaking down that command: 77 | 78 | * `-d` Run in the background 79 | * `-e LEDGER_PORT=1337` Set the ledger's port to 1337. This is just an example for how to set a config option. 80 | * `-e LEDGER_ADMIN_PASS=admin` Create an "admin" user with password "admin" at startup 81 | * `-e LEDGER_DB_URI=postgres://postgres:password@db` Set the database URL. Here, 'db' is a host that is Docker-linked: 82 | * `--link five-bells-ledger-db:db` This allows Five Bells Ledger to see the database that we set up above. 83 | * `-p 1337:1337` Expose port 1337 to localhost 84 | * `-h localhost` makes the ledger use 'localhost' as its hostname in the endpoint URLs it announces 85 | * `--name fivebells` This allows you to refer to this container in for instance `docker inspect fivebells` 86 | * `interledger/five-bells-ledger` Use the [`five-bells-ledger` Docker image](https://hub.docker.com/r/interledger/five-bells-ledger/) 87 | 88 | Now open http://localhost:1337/health in your browser. 89 | 90 | To create a user, you can run: 91 | 92 | ```sh 93 | curl -i -sS -X PUT --user admin:admin -H "Content-Type: application/json" -d'{ "name" : "alice", "password" : "alice", "balance" : "20000" }' http://localhost:1337/accounts/alice 94 | ``` 95 | 96 | To see the database contents, you can create a postgres container that interactively runs psql: 97 | ```sh 98 | docker run -it --rm --link five-bells-ledger-db:db postgres psql postgres://postgres:password@db 99 | ``` 100 | 101 | You can then use [`ilp-plugin-bells`](https://github.com/interledgerjs/ilp-plugin-bells) to develop a client that connects to this ledger. Make sure you use the matching plugin version to connect to the ledger. 102 | 103 | In particular, ledger version 20 can be accessed using `ilp-plugin-bells` version 12. 104 | 105 | ## Running tests 106 | 107 | To run tests using an in-memory database, run: 108 | 109 | ``` sh 110 | npm test 111 | ``` 112 | 113 | By default, stdout from the app process is buffered up, and only shown after a test fails. That way, you can easily debug a failing test: 114 | 115 | ```sh 116 | DEBUG=ledger:* npm test 117 | ``` 118 | 119 | If you want to see the output for passing tests as well, and not buffered until the test is over, use the `SHOW_STDOUT` environment variable for this: 120 | 121 | ```sh 122 | SHOW_STDOUT=true DEBUG=ledger:transfers npm test 123 | ``` 124 | 125 | If you wish to specify the database against which the tests are run, use the `LEDGER_UNIT_DB_URI` environment variable. 126 | 127 | ``` sh 128 | LEDGER_UNIT_DB_URI=postgres://root:password@localhost:5432/ledger_test_db npm test 129 | ``` 130 | 131 | For example, to run against a Postgres instance in Docker, first start the database server: 132 | 133 | ``` sh 134 | docker run -it --rm --name fbl-pg-test postgres 135 | ``` 136 | 137 | Then, in another terminal, run the tests: 138 | 139 | ``` sh 140 | LEDGER_UNIT_DB_URI=postgres://postgres@`docker inspect --format '{{ .NetworkSettings.IPAddress }}' fbl-pg-test`/postgres npm test 141 | ``` 142 | 143 | ## A word of warning 144 | 145 | This software is under development and no guarantees are made regarding reliability. 146 | -------------------------------------------------------------------------------- /apidoc.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Interledger Reference Ledger API", 3 | "url": "https://acme-ledger.example", 4 | "order": [ 5 | "Transfer_Methods", 6 | "PutTransfer", 7 | "PutTransferFulfillment", 8 | "GetTransfer", 9 | "GetTransferState", 10 | "GetTransferFulfillment", 11 | "Account_Methods", 12 | "PutAccount", 13 | "GetAccount", 14 | "SubscribeAccountTransfers", 15 | "Metadata_Methods" 16 | ], 17 | "header": { 18 | "title": "Intro and Data Types", 19 | "filename": "apidoc-out/apidoc_intro.md" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /circle.yml: -------------------------------------------------------------------------------- 1 | machine: 2 | services: 3 | - docker 4 | - mysql 5 | - postgresql 6 | node: 7 | version: 7.7.1 8 | environment: 9 | # Read-only 10 | AWS_SECRET_ACCESS_KEY: Q0SsTtjzt9zK3VpGpdp3VdGdYZshfdJ7IDtdr8BU 11 | AWS_ACCESS_KEY_ID: AKIAIVUX7BG4NS7A2UHQ 12 | dependencies: 13 | cache_directories: 14 | - "integration-test/node_modules" 15 | test: 16 | override: 17 | - scripts/ci-test.sh "$CIRCLE_NODE_INDEX" "$CIRCLE_NODE_TOTAL": 18 | parallel: true 19 | deployment: 20 | production: 21 | branch: master 22 | commands: 23 | # Necessary git config to run website deployment on CircleCI 24 | - git config --global user.email "info@circleci.com" 25 | - git config --global user.name "CircleCI" 26 | - git config --global push.default simple 27 | # Deployment tasks 28 | - scripts/ci-deploy.sh 29 | general: 30 | artifacts: 31 | - "coverage/lcov-report" 32 | - "apidoc-out" 33 | -------------------------------------------------------------------------------- /dockercfg-template: -------------------------------------------------------------------------------- 1 | {"https://index.docker.io/v1/":{"auth":"","email":""}} 2 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # five-bells-ledger Documentation 2 | 3 | This folder contains the **source files** for the [five-bells-ledger documentation](https://interledgerjs.github.io/five-bells-ledger/apidoc/). To see the rendered documentation, please visit https://interledgerjs.github.io/five-bells-ledger/apidoc/. 4 | -------------------------------------------------------------------------------- /npmrc-env: -------------------------------------------------------------------------------- 1 | //registry.npmjs.org/:_authToken=${NPM_TOKEN} 2 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "five-bells-ledger", 3 | "version": "21.2.5", 4 | "description": "Five Bells ledger reference implementation", 5 | "keywords": [ 6 | "interledger", 7 | "five-bells", 8 | "ilp" 9 | ], 10 | "license": "Apache-2.0", 11 | "repository": { 12 | "type": "git", 13 | "url": "git@github.com:interledgerjs/five-bells-ledger.git" 14 | }, 15 | "scripts": { 16 | "start": "node src/app.js", 17 | "start-prof": "node --prof --logfile=${LEDGER_V8_LOGFILE:-v8.log} src/app.js", 18 | "lint": "eslint .", 19 | "test": "mocha", 20 | "coverage": "istanbul cover _mocha", 21 | "report-coverage": "codecov", 22 | "docs": "npm run docs:prep-intro && npm run docs:apidoc", 23 | "docs:prep-intro": "scripts/build_docs.js", 24 | "docs:apidoc": "apidoc -o apidoc-out -i src/controllers/", 25 | "integration": "integration-loader && integration all", 26 | "bump": "version-bump" 27 | }, 28 | "engines": { 29 | "node": ">=7.6.0" 30 | }, 31 | "dependencies": { 32 | "bignumber.js": "^4.0.1", 33 | "canonical-json": "0.0.4", 34 | "co": "^4.6.0", 35 | "co-body": "^5.1.1", 36 | "co-defer": "^1.0.0", 37 | "co-emitter": "^0.2.3", 38 | "deep-diff": "^0.3.4", 39 | "eslint-plugin-standard": "^3.0.1", 40 | "extensible-error": "^1.0.2", 41 | "five-bells-condition": "^5.0.1", 42 | "five-bells-shared": "^26.1.0", 43 | "http-errors": "^1.6.2", 44 | "jsonwebtoken": "^8.1.1", 45 | "kcors": "^2.2.1", 46 | "knex": "^0.14.4", 47 | "koa": "^2.2.0", 48 | "koa-compose": "^3.2.1", 49 | "koa-compress": "^2.0.0", 50 | "koa-cors": "0.0.16", 51 | "koa-json-error": "^3.1.2", 52 | "koa-passport": "^3.0.0", 53 | "koa-riverpig": "^2.0.0", 54 | "koa-router": "^7.1.1", 55 | "koa-static": "^3.0.0", 56 | "lodash": "^4.17.4", 57 | "methods": "^1.1.2", 58 | "moment": "^2.19.3", 59 | "passport-anonymous": "^1.0.1", 60 | "passport-client-certificate": "^0.1.1", 61 | "passport-http": "^0.3.0", 62 | "passport-http-signature": "^1.0.0", 63 | "passport-strategy": "^1.0.0", 64 | "pg-migrator": "^1.0.5", 65 | "priorityqueuejs": "^1.0.0", 66 | "promise-retry": "^1.1.1", 67 | "riverpig": "^1.1.2", 68 | "spdy": "^3.4.4", 69 | "tweetnacl": "^1.0.0-rc.1", 70 | "ws": "^3.3.1" 71 | }, 72 | "optionalDependencies": { 73 | "mysql": "^2.13.0", 74 | "pg": "^6.1.5", 75 | "tedious": "^2.0.0" 76 | }, 77 | "devDependencies": { 78 | "apidoc": "^0.17.5", 79 | "chai": "^3.5.0", 80 | "chalk": "^1.1.3", 81 | "codecov": "^2.1.0", 82 | "ejs": "^2.5.6", 83 | "eslint": "^3.19.0", 84 | "eslint-config-standard": "^10.2.0", 85 | "eslint-plugin-import": "^2.2.0", 86 | "eslint-plugin-node": "^4.2.2", 87 | "eslint-plugin-promise": "^3.5.0", 88 | "five-bells-integration-test-loader": "^1.3.0", 89 | "istanbul": "^1.1.0-alpha.1", 90 | "mocha": "~3.2.0", 91 | "mock-require": "^2.0.1", 92 | "nock": "^9.0.13", 93 | "sinon": "^2.1.0", 94 | "sinon-chai": "^2.9.0", 95 | "spec-xunit-file": "0.0.1-3", 96 | "sqlite3": "^3.1.8", 97 | "supertest": "^3.0.0", 98 | "through2": "^2.0.3" 99 | }, 100 | "config": { 101 | "five-bells-integration-test-loader": { 102 | "module": "five-bells-integration-test", 103 | "repo": "interledgerjs/five-bells-integration-test" 104 | } 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /scripts/bench_password_hash.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | /** 4 | * Benchmark for password hashing with varying number of iterations. 5 | * 6 | * Useful for deciding the number of iterations, depending on the desired 7 | * latency vs security trade-off. 8 | * 9 | * To use, please run `npm install do-you-even-bench` and then 10 | * `node scripts/bench_password_hash.js`. 11 | */ 12 | require('do-you-even-bench')([ 13 | { 14 | name: 'pbkdf2-100000', 15 | fn: () => { 16 | const crypto = require('crypto') 17 | crypto.pbkdf2Sync('test', 'test', 100000, 512, 'sha512') 18 | } 19 | }, 20 | { 21 | name: 'pbkdf2-10000', 22 | fn: () => { 23 | const crypto = require('crypto') 24 | crypto.pbkdf2Sync('test', 'test', 10000, 512, 'sha512') 25 | } 26 | }, 27 | { 28 | name: 'pbkdf2-1000', 29 | fn: () => { 30 | const crypto = require('crypto') 31 | crypto.pbkdf2Sync('test', 'test', 1000, 512, 'sha512') 32 | } 33 | }, 34 | { 35 | name: 'pbkdf2-100', 36 | fn: () => { 37 | const crypto = require('crypto') 38 | crypto.pbkdf2Sync('test', 'test', 100, 512, 'sha512') 39 | } 40 | } 41 | // { 42 | // name: 'bcrypt-10', 43 | // fn: () => { 44 | // const bcrypt = require('bcrypt') 45 | // const salt = bcrypt.genSaltSync(10) 46 | // bcrypt.hashSync('test', salt) 47 | // } 48 | // } 49 | ]) 50 | -------------------------------------------------------------------------------- /scripts/build_docs.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | /* Forked from https://github.com/ripple/ripple-lib/blob/develop/scripts/build_docs.js 3 | * Copyright (c) 2012-2015 Ripple Labs Inc. 4 | * 5 | * Permission to use, copy, modify, and distribute this software for any 6 | * purpose with or without fee is hereby granted, provided that the above 7 | * copyright notice and this permission notice appear in all copies. 8 | */ 9 | 10 | 'use strict' 11 | const fs = require('fs') 12 | const path = require('path') 13 | // const execSync = require('child_process').execSync; 14 | const ejs = require('ejs') 15 | const renderFromPaths = 16 | require('./json-schema-to-markdown-table2.js').renderFromPaths 17 | const ROOT = path.dirname(path.normalize(__dirname)) 18 | 19 | // function strip(string) { 20 | // return string.replace(/^\s+|\s+$/g, ''); 21 | // } 22 | // 23 | // function importFile(relativePath) { 24 | // const absolutePath = path.join(ROOT, relativePath); 25 | // return strip(fs.readFileSync(absolutePath).toString('utf-8')); 26 | // } 27 | 28 | // function renderFixture(fixtureRelativePath) { 29 | // const json = importFile(path.join('test', 'fixtures', fixtureRelativePath)); 30 | // return '\n```json\n' + json + '\n```\n'; 31 | // } 32 | 33 | function renderSchema (schemaRelativePath) { 34 | const schemasPath = path.join(path.dirname(require.resolve('five-bells-shared')), 'schemas') 35 | const schemaPath = path.join(schemasPath, schemaRelativePath) 36 | return renderFromPaths(schemaPath, schemasPath) 37 | } 38 | 39 | function main () { 40 | const locals = { 41 | // importFile: importFile, 42 | // renderFixture: renderFixture, 43 | renderSchema: renderSchema 44 | } 45 | 46 | const introDocPath = path.join(ROOT, 'docs', 'apidoc_intro.ejs.md') 47 | ejs.renderFile(introDocPath, locals, function (error, output) { 48 | if (error) { 49 | console.error(error) 50 | process.exit(1) 51 | } else { 52 | const outputPath = path.join(ROOT, 'apidoc-out', 'apidoc_intro.md') 53 | try { 54 | fs.mkdirSync('apidoc-out') 55 | } catch (e) { 56 | if (e.code !== 'EEXIST') throw e 57 | } 58 | fs.writeFileSync(outputPath, output) 59 | // execSync('npm run apidoc', {cwd: ROOT}); 60 | process.exit(0) 61 | } 62 | }) 63 | } 64 | 65 | main() 66 | -------------------------------------------------------------------------------- /scripts/ci-deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | 3 | publishNpm() { 4 | # Push NPM package if not yet published 5 | mv npmrc-env .npmrc 6 | if [ "$(npm show five-bells-ledger version)" != "$(npm ls --depth=-1 2>/dev/null | head -1 | cut -f 1 -d " " | cut -f 2 -d @)" ]; then 7 | npm publish 8 | fi 9 | } 10 | 11 | pushDocker() { 12 | # Push Docker image tagged latest and tagged with commit descriptor 13 | sed "s//${DOCKER_TOKEN}/" < "dockercfg-template" > ~/.dockercfg 14 | docker tag interledger/five-bells-ledger:latest interledger/five-bells-ledger:"$(git describe)" 15 | docker push interledger/five-bells-ledger:latest 16 | docker push interledger/five-bells-ledger:"$(git describe)" 17 | } 18 | 19 | updateWebsite() { 20 | node scripts/publish_web.js 21 | } 22 | 23 | publishNpm 24 | pushDocker 25 | updateWebsite 26 | -------------------------------------------------------------------------------- /scripts/ci-test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | 3 | NODE_INDEX="$1" 4 | TOTAL_NODES="$2" 5 | 6 | lint() { 7 | npm run lint 8 | } 9 | 10 | integrationtest() { 11 | if git log -1 --pretty=%B | grep -qF "[skip tests]"; then true; else npm run integration; fi 12 | } 13 | 14 | docs() { 15 | npm run docs 16 | } 17 | 18 | dockerBuild() { 19 | docker build -t interledger/five-bells-ledger . 20 | } 21 | 22 | mysqltest() { 23 | mysql -u ubuntu -e 'DROP DATABASE circle_test;' 24 | mysql -u ubuntu -e 'CREATE DATABASE circle_test;' 25 | docker run --name=ledger-test-mysql -it --net=host -e LEDGER_UNIT_DB_URI=mysql://ubuntu@localhost/circle_test interledger/five-bells-ledger npm test 26 | } 27 | 28 | postgrestest() { 29 | psql -U ubuntu -c 'DROP DATABASE circle_test;' 30 | psql -U ubuntu -c 'CREATE DATABASE circle_test;' 31 | LEDGER_UNIT_DB_URI=postgres://ubuntu@localhost/circle_test npm test 32 | } 33 | 34 | sqlitetest() { 35 | # Run tests with coverage (SQLite) 36 | mkdir coverage 37 | LEDGER_UNIT_DB_URI=sqlite:// XUNIT_FILE=coverage/xunit.xml npm run coverage -- -- -R spec-xunit-file 38 | 39 | # Extract test results 40 | cp coverage/xunit.xml "${CIRCLE_TEST_REPORTS}/" 41 | 42 | # Report coverage 43 | npm run report-coverage 44 | } 45 | 46 | oneNode() { 47 | lint 48 | dockerBuild 49 | sqlitetest 50 | integrationtest 51 | postgrestest 52 | docs 53 | } 54 | 55 | twoNodes() { 56 | case "$NODE_INDEX" in 57 | 0) lint; dockerBuild; sqlitetest; integrationtest;; 58 | 1) dockerBuild; postgrestest; docs;; 59 | *) echo "ERROR: invalid usage"; exit 2;; 60 | esac 61 | } 62 | 63 | threeNodes() { 64 | case "$NODE_INDEX" in 65 | 0) lint; dockerBuild; sqlitetest integrationtest;; 66 | 1) dockerBuild; postgrestest;; 67 | 2) docs;; 68 | *) echo "ERROR: invalid usage"; exit 2;; 69 | esac 70 | } 71 | 72 | fourNodes() { 73 | case "$NODE_INDEX" in 74 | 0) lint; dockerBuild; integrationtest;; 75 | 1) dockerBuild; postgrestest;; 76 | 2) dockerBuild; sqlitetest;; 77 | 3) docs;; 78 | *) echo "ERROR: invalid usage"; exit 2;; 79 | esac 80 | } 81 | 82 | case "$TOTAL_NODES" in 83 | "") oneNode;; 84 | 1) oneNode;; 85 | 2) twoNodes;; 86 | 3) threeNodes;; 87 | 4) fourNodes;; 88 | *) echo "ERROR: invalid usage"; exit 2;; 89 | esac 90 | -------------------------------------------------------------------------------- /scripts/drop-tables.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE "L_ACCOUNTS"; 2 | DROP TABLE "L_FULFILLMENTS"; 3 | DROP TABLE "L_ENTRIES"; 4 | DROP TABLE "L_TRANSFERS"; 5 | DROP TABLE "L_TRANSFER_ADJUSTMENTS"; 6 | -------------------------------------------------------------------------------- /scripts/generate_keys.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const crypto = require('crypto') 4 | const ed25519 = require('ed25519') 5 | 6 | const secret = crypto.randomBytes(32) 7 | const pubkey = ed25519.MakeKeypair(secret).publicKey 8 | 9 | console.log('secret', secret.toString('base64')) 10 | console.log('public', pubkey.toString('base64')) 11 | -------------------------------------------------------------------------------- /scripts/json-schema-to-markdown-table2.js: -------------------------------------------------------------------------------- 1 | /* Forked from https://github.com/clark800/json-schema-to-markdown 2 | * Copyright (c) 2015, Ripple Labs 3 | * 4 | * Permission to use, copy, modify, and/or distribute this software for any 5 | * purpose with or without fee is hereby granted, provided that the above 6 | * copyright notice and this permission notice appear in all copies. 7 | */ 8 | /* eslint no-use-before-define: [2, "nofunc"] */ 9 | 'use strict' 10 | const fs = require('fs') 11 | const join = require('path').join 12 | // const dirname = require('path').dirname 13 | 14 | // rows are represented as an array of three strings: name, type, description 15 | function includes (array, item) { 16 | return array && array.indexOf(item) !== -1 17 | } 18 | 19 | function flatten (arrays) { 20 | return [].concat.apply([], arrays) 21 | } 22 | 23 | function formatTable (rows) { 24 | const header = ['Name', 'Type', 'Description'] 25 | const divider = ['----', '----', '-----------'] 26 | const allRows = [header, divider].concat(rows) 27 | return allRows.map(row => row.join(' | ')).join('\n') 28 | } 29 | 30 | function formatName (path) { 31 | if (path.length === 0) { 32 | return '' 33 | } 34 | if (path.length === 1) { 35 | return path[0] 36 | } 37 | return '*' + path.slice(0, -1).join('.') + '.* ' + path.slice(-1)[0] 38 | } 39 | 40 | function formatType (schema) { 41 | if (schema.link) { 42 | const prefix = includes(schema.link, '://') ? '' : '#' 43 | return '[' + schema.title + '](' + prefix + schema.link + ')' 44 | } 45 | if (schema.format) { 46 | return schema.format + ' string' 47 | } 48 | if (schema.pattern) { 49 | return 'string' 50 | } 51 | if (schema.enum) { 52 | return 'string' 53 | } 54 | return schema.type || 'object' 55 | } 56 | 57 | function formatRow (schema, path, isRequired, typeOverride) { 58 | const description = (isRequired ? '' : '*Optional* ') + 59 | (schema.description || '') 60 | return [formatName(path), typeOverride || formatType(schema), description] 61 | } 62 | 63 | function sortKeys (keys, requiredKeys) { 64 | const result = requiredKeys ? requiredKeys.slice() : [] 65 | result.forEach(key => { 66 | if (!includes(keys, key)) { 67 | throw new Error('Property in "required" not in "properties": ' + key) 68 | } 69 | }) 70 | keys.sort().forEach(key => { 71 | if (!includes(result, key)) { 72 | result.push(key) 73 | } 74 | }) 75 | return result 76 | } 77 | 78 | function generateRowsForObject (schema, path, schemas, isRequired) { 79 | const keys = sortKeys(Object.keys(schema.properties), schema.required) 80 | const rows = flatten(keys.map(name => { 81 | const isRequiredField = includes(schema.required, name) 82 | return generateRowsForSchema( 83 | schema.properties[name], path.concat([name]), schemas, isRequiredField) 84 | })) 85 | return path.length > 0 ? [formatRow(schema, path, isRequired)].concat(rows) : rows 86 | } 87 | 88 | function generateRowsForArray (schema, path, schemas, isRequired) { 89 | const newPath = path.slice(0, -1).concat([path.slice(-1)[0] + '[]']) 90 | const rows = generateRowsForSchema(schema.items, newPath, schemas, true) 91 | if (rows.length === 1) { 92 | const typeOverride = 'array\\[' + rows[0][1] + '\\]' 93 | return [formatRow(schema, path, isRequired, typeOverride)] 94 | } 95 | const firstRow = formatRow(schema, path, isRequired) 96 | return [firstRow].concat(rows) 97 | } 98 | 99 | function removeDuplicates (rows) { 100 | const hash = {} 101 | const result = [] 102 | rows.forEach(row => { 103 | const key = row.join('|') 104 | if (!hash[key]) { 105 | result.push(row) 106 | hash[key] = true 107 | } 108 | }) 109 | return result 110 | } 111 | 112 | function overrideDescription (schema, description) { 113 | const override = description ? {description: description} : {} 114 | return assign(assign({}, schema), override) 115 | } 116 | 117 | function generateRowsForBranch (branchSchemas, path, description, schemas, 118 | isRequired) { 119 | const nonNullSchemas = branchSchemas.filter(schema => schema.type !== 'null') 120 | const rows = flatten(nonNullSchemas.map(branchSchema => 121 | generateRowsForSchema(overrideDescription(branchSchema, description), 122 | path, schemas, isRequired))) 123 | const result = removeDuplicates(rows) 124 | const hasNull = (nonNullSchemas.length < branchSchemas.length) 125 | if (hasNull) { 126 | for (var i = 0; i < result.length; i++) { 127 | result[i][1] = result[i][1] + ', null' 128 | } 129 | } 130 | return result 131 | } 132 | 133 | function generateRowsForCompleteSchema (schema, path, schemas, isRequired) { 134 | if (schema.link && path.length > 0) { 135 | return [formatRow(schema, path, isRequired)] 136 | } 137 | if (schema.type === 'array') { 138 | if (path.length > 0) { 139 | return generateRowsForArray(schema, path, schemas, isRequired) 140 | } 141 | return generateRowsForSchema(schema.items, path, schemas, true) 142 | } 143 | if (schema.properties) { 144 | return generateRowsForObject(schema, path, schemas, isRequired) 145 | } 146 | if (schema.additionalProperties) { 147 | return generateRowsForSchema(schema.additionalProperties, 148 | path.concat('\\*'), schemas, isRequired) 149 | } 150 | if (schema.oneOf) { 151 | return generateRowsForBranch(schema.oneOf, path, schema.description, 152 | schemas, isRequired) 153 | } 154 | if (schema.anyOf) { 155 | return generateRowsForBranch(schema.anyOf, path, schema.description, 156 | schemas, isRequired) 157 | } 158 | return [formatRow(schema, path, isRequired)] 159 | } 160 | 161 | function assign (destination, source) { 162 | for (let key in source) { // eslint-disable-line 163 | if (source.hasOwnProperty(key)) { 164 | destination[key] = source[key] 165 | } 166 | } 167 | return destination 168 | } 169 | 170 | function completeSchema (schema, schemas) { 171 | if (schema.$ref) { 172 | const refSchema = schemas[schema.$ref] 173 | if (!refSchema) { 174 | throw new Error('Could not find schema for: ' + schema.$ref) 175 | } 176 | return assign(assign({}, refSchema), schema) 177 | } 178 | return schema 179 | } 180 | 181 | function generateRowsForSchema (schema, path, schemas, isRequired) { 182 | const completedSchema = completeSchema(schema, schemas) 183 | return generateRowsForCompleteSchema(completedSchema, path, schemas, 184 | isRequired) 185 | } 186 | 187 | function recursivelyListDirectory (directory) { 188 | const filenames = fs.readdirSync(directory) 189 | let results = [] 190 | filenames.forEach(filename => { 191 | const filepath = join(directory, filename) 192 | const stat = fs.statSync(filepath) 193 | if (stat && stat.isDirectory()) { 194 | results = results.concat(recursivelyListDirectory(filepath)) 195 | } else { 196 | results.push([filename, filepath]) 197 | } 198 | }) 199 | return results 200 | } 201 | 202 | function loadSchema (filepath) { 203 | return JSON.parse(fs.readFileSync(filepath)) 204 | } 205 | 206 | function loadSchemas (schemaDirectory) { 207 | const filepaths = recursivelyListDirectory(schemaDirectory) 208 | const schemas = {} 209 | filepaths.forEach(fpair => { 210 | let filename = fpair[0] 211 | let filepath = fpair[1] 212 | if (filepath.endsWith('.json')) { 213 | const schema = loadSchema(filepath) 214 | schemas[filename] = schema 215 | } 216 | }) 217 | return schemas 218 | } 219 | 220 | function render (schema, schemas) { 221 | return formatTable(generateRowsForSchema(schema, [], schemas || {}, true)) 222 | } 223 | 224 | function renderFromPaths (schemaPath, schemasPath) { 225 | return render(loadSchema(schemaPath), loadSchemas(schemasPath)) 226 | } 227 | 228 | module.exports.render = render 229 | module.exports.renderFromPaths = renderFromPaths 230 | -------------------------------------------------------------------------------- /scripts/publish_web.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | 'use strict' 4 | 5 | const path = require('path') 6 | const exec = require('child_process').execSync 7 | 8 | let cwd = path.resolve(__dirname, '..') 9 | 10 | // Get current web branch 11 | console.log('\n# Cloning web branch') 12 | exec('rm -rf web', { cwd }) 13 | exec('git clone git@github.com:interledgerjs/five-bells-ledger.git --branch gh-pages --single-branch web', { cwd }) 14 | 15 | // Update apidoc 16 | console.log('\n# Updating API docs') 17 | exec('npm run docs', { cwd }) 18 | exec('mkdir -p web/apidoc', { cwd }) 19 | exec('cp -r apidoc-out/* web/apidoc/', { cwd }) 20 | 21 | // Update apidoc-template 22 | console.log('\n# Updating API doc template') 23 | exec('wget https://github.com/interledger/apidoc-template/archive/master.tar.gz -O - | tar xzf - --strip 1 -C web/apidoc', { cwd }) 24 | exec('rm web/apidoc/.gitignore') 25 | 26 | // Push changes 27 | console.log('\n# Pushing web branch') 28 | cwd = path.resolve(cwd, 'web') 29 | exec('cd web') 30 | exec('git add --all', { cwd }) 31 | 32 | const status = exec('git status --porcelain', { cwd }).toString('utf8') 33 | if (!status.length) { 34 | console.log('no changes') 35 | } else { 36 | console.log(status) 37 | exec('git commit -m \'chore: update gh-pages\'', { cwd }) 38 | exec('git push', { cwd }) 39 | } 40 | -------------------------------------------------------------------------------- /src/app.js: -------------------------------------------------------------------------------- 1 | /* @flow */ 2 | 'use strict' 3 | 4 | // SIGINT does not cleanup the DB connection pool 5 | // so we have an exitHandler to clean up the database connection 6 | // See: https://github.com/tgriesser/bookshelf/issues/405 7 | function exitHandler (error) { 8 | if (error) { 9 | console.error(error.stack) 10 | process.exit(1) 11 | } 12 | process.exit(0) 13 | } 14 | 15 | module.exports = require('./lib/app') 16 | 17 | if (!module.parent) { 18 | process.on('SIGINT', exitHandler) 19 | require('./services/app').start() 20 | } 21 | -------------------------------------------------------------------------------- /src/controllers/_apidoc.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @apiDefine InvalidUriParameterError 3 | * 4 | * @apiError InvalidUriParameterError (One of) the provided URI parameter(s) 5 | * was invalid. 6 | * 7 | * @apiErrorExample InvalidUriParameterError 8 | * HTTP/1.1 400 Bad Request 9 | * { 10 | * "id": "InvalidUriParameterError", 11 | * "message": "Error description here.", 12 | * "validationErrors": [ ... ] 13 | * } 14 | */ 15 | 16 | /** 17 | * @apiDefine InvalidBodyError 18 | * 19 | * @apiError InvalidBodyError The submitted JSON entity does not match the 20 | * required schema. 21 | * 22 | * @apiErrorExample InvalidBodyError 23 | * HTTP/1.1 400 Bad Request 24 | * { 25 | * "id": "InvalidBodyError", 26 | * "message": "Error description here.", 27 | * "validationErrors": [ ... ] 28 | * } 29 | */ 30 | 31 | /** 32 | * @apiDefine NotFoundError 33 | * 34 | * @apiError NotFoundError The requested resource could not be found. 35 | * 36 | * @apiErrorExample NotFoundError 37 | * HTTP/1.1 404 Not Found 38 | * { 39 | * "id": "NotFoundError", 40 | * "message": "Error description here." 41 | * } 42 | */ 43 | 44 | /** 45 | * @apiDefine UnprocessableEntityError 46 | * 47 | * @apiError UnprocessableEntityError The provided entity is syntactically 48 | * correct, but there is a generic semantic problem with it. 49 | * 50 | * @apiErrorExample UnprocessableEntityError 51 | * HTTP/1.1 422 Unprocessable Entity 52 | * { 53 | * "id": "UnprocessableEntityError", 54 | * "message": "Error description here." 55 | * } 56 | */ 57 | 58 | /** 59 | * @apiDefine InsufficientFundsError 60 | * 61 | * @apiError InsufficientFundsError The source account does not have sufficient 62 | * funds to satisfy the request. 63 | * 64 | * @apiErrorExample InsufficientFundsError 65 | * HTTP/1.1 422 Unprocessable Entity 66 | * { 67 | * "id": "InsufficientFundsError", 68 | * "message": "Error description here.", 69 | * "owner": "bob" 70 | * } 71 | */ 72 | 73 | /** 74 | * @apiDefine AlreadyExistsError 75 | * 76 | * @apiError AlreadyExistsError The specified entity already exists and may not 77 | * be modified. 78 | * 79 | * @apiErrorExample AlreadyExistsError 80 | * HTTP/1.1 422 Unprocessable Entity 81 | * { 82 | * "id": "AlreadyExistsError", 83 | * "message": "Error description here." 84 | * } 85 | */ 86 | 87 | /** 88 | * @apiDefine UnauthorizedError 89 | * 90 | * @apiError UnauthorizedError You do not have permissions to access this resource. 91 | * 92 | * @apiErrorExample UnauthorizedError 93 | * HTTP/1.1 403 Forbidden 94 | * { 95 | * "id": "UnauthorizedError", 96 | * "message": "Error description here." 97 | * } 98 | */ 99 | 100 | /** 101 | * @apiDefine UnmetConditionError 102 | * 103 | * @apiError UnmetConditionError Execution Condition Not Met 104 | * 105 | * @apiErrorExample UnmetConditionError 106 | * HTTP/1.1 422 Unprocessable Entity 107 | * { 108 | * "id": "UnmetConditionError", 109 | * "message": "Error description here." 110 | * } 111 | */ 112 | 113 | /** 114 | * @apiDefine NoSubscriptionsError 115 | * 116 | * @apiError NoSubscriptionsError Destination account could not be reached 117 | * 118 | * @apiErrorExample NoSubscriptionsError 119 | * HTTP/1.1 422 Unprocessable Entity 120 | * { 121 | * "id": "NoSubscriptionsError", 122 | * "message": "Error description here." 123 | * } 124 | */ 125 | -------------------------------------------------------------------------------- /src/controllers/authTokens.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const model = require('../models/authTokens') 4 | const config = require('../services/config') 5 | 6 | /** 7 | * @api {get} /auth_token Get Auth Token 8 | * @apiName GetAuthToken 9 | * @apiGroup Auth Tokens 10 | * @apiVersion 1.0.0 11 | * 12 | * @apiDescription Get a token that can be used to authenticate future requests. 13 | * 14 | * @apiExample {shell} Get Auth Token 15 | * curl -X GET -H "Authorization: Basic QWxhZGRpbjpPcGVuU2VzYW1l" 16 | * http://usd-ledger.example/auth_token 17 | * 18 | * @apiSuccessExample {json} 200 Token Response: 19 | * HTTP/1.1 200 OK 20 | * Content-Type: application/json 21 | * 22 | * { 23 | * "token": "9AtVZPN3t49Kx07stO813UHXv6pcES", 24 | * "token_max_age": 604800000 25 | * } 26 | * 27 | * @apiUse UnauthorizedError 28 | */ 29 | async function getAuthToken (ctx) { 30 | ctx.body = { 31 | token: await model.getAuthToken(ctx.state.user), 32 | token_max_age: config.authTokenMaxAge // milliseconds 33 | } 34 | } 35 | 36 | module.exports = { getAuthToken } 37 | -------------------------------------------------------------------------------- /src/controllers/health.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const model = require('../models/health') 4 | 5 | /** 6 | * @api {get} /health Get server health status 7 | * @apiName GetHealth 8 | * @apiGroup Health 9 | * @apiVersion 1.0.0 10 | * 11 | * @apiDescription This endpoint will perform a quick self-check to ensure the 12 | * server is still operating correctly. 13 | * 14 | * @apiIgnore For internal use. 15 | */ 16 | /** 17 | * @returns {void} 18 | */ 19 | exports.getResource = async function health (ctx) { 20 | ctx.body = await model.getDbHealth() 21 | } 22 | -------------------------------------------------------------------------------- /src/controllers/messages.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const model = require('../models/messages') 4 | 5 | /** 6 | * @api {post} /messages Send Message 7 | * @apiName SendMessage 8 | * @apiGroup Message Methods 9 | * @apiVersion 1.0.0 10 | * 11 | * @apiDescription Send a message to another account. This is not a reliable delivery mechanism. 12 | * 13 | * @apiParam (Request Body) {Message} Object A [Message object](#message_object) to be 14 | * forwarded to the recipient. 15 | * 16 | * @apiExample {shell} Send a Message 17 | * curl -X POST -H "Content-Type: application/json" -d \ 18 | * '{ 19 | * "ledger": "http://usd-ledger.example", 20 | * "from": "http://usd-ledger.example/accounts/alice", 21 | * "to": "http://usd-ledger.example/accounts/bob", 22 | * "data": { "foo": "bar" } 23 | * }' \ 24 | * http://usd-ledger.example/messages 25 | * 26 | * @apiSuccessExample {json} 201 Message Accepted Response: 27 | * HTTP/1.1 201 CREATED 28 | * 29 | * @apiUse InvalidBodyError 30 | * @apiUse NoSubscriptionsError 31 | */ 32 | async function postMessage (ctx) { 33 | const message = ctx.body 34 | await model.sendMessage(message, ctx.state.user) 35 | ctx.status = 201 36 | } 37 | 38 | module.exports = { postMessage } 39 | -------------------------------------------------------------------------------- /src/controllers/metadata.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const accounts = require('../models/accounts') 4 | const version = require('../../package.json').version.split('.')[0] 5 | 6 | /** 7 | * @api {get} / Get Server Metadata 8 | * @apiName GetMetadata 9 | * @apiGroup Metadata Methods 10 | * @version 1.0.0 11 | * 12 | * @apiDescription This endpoint will return server metadata. 13 | * @apiExample {shell} Get Metadata 14 | * curl http://usd-ledger.example/ 15 | * 16 | * @apiSuccess (200 OK) {String} [currency_code] Three-letter ([ISO 4217](http://www.xe.com/iso4217.php)) code of the currency this ledger tracks. 17 | * @apiSuccess (200 OK) {String} [currency_symbol] Currency symbol to use in user interfaces for the currency represented in this ledger. For example, "$". 18 | * @apiSuccess (200 OK) {Integer} precision How many total decimal digits of precision this ledger uses to represent currency amounts. 19 | * @apiSuccess (200 OK) {Integer} scale How many digits after the decimal place this ledger supports in currency amounts. 20 | * @apiSuccess (200 OK) {Object} urls Paths to other methods exposed by this ledger 21 | * @apiSuccess (200 OK) {(Various)} ... The ledger may report additional arbitrary parameters as desired. 22 | * 23 | * @apiSuccessExample {json} 200 OK 24 | * HTTP/1.1 200 OK 25 | * 26 | * { 27 | * "currency_code": null, 28 | * "currency_symbol": null, 29 | * "condition_sign_public_key": "YNDefwo4LB+AjkCRzuCSGuAlDLvSCWUxPRX7lXLhV1I=", 30 | * "urls": { 31 | * "health": "http://usd-ledger.example/health", 32 | * "transfer": "http://usd-ledger.example/transfers/:id", 33 | * "transfer_fulfillment": "http://usd-ledger.example/transfers/:id/fulfillment", 34 | * "transfer_rejection": "http://usd-ledger.example/transfers/:id/rejection", 35 | * "transfer_state": "http://usd-ledger.example/transfers/:id/state", 36 | * "accounts": "http://usd-ledger.example/accounts", 37 | * "account": "http://usd-ledger.example/accounts/:name", 38 | * "auth_token": "http://usd-ledger.example/auth_token", 39 | * "websocket": "ws://usd-ledger.example/websocket", 40 | * "message": "http://usd-ledger.example/messages" 41 | * }, 42 | * "precision": 10, 43 | * "scale": 2, 44 | * "connectors": [ 45 | * { 46 | * "id": "http://usd-ledger.example/accounts/chloe", 47 | * "name": "chloe" 48 | * } 49 | * ] 50 | * } 51 | */ 52 | /* 53 | * @returns {void} 54 | */ 55 | module.exports = (config) => { 56 | const base = config.getIn(['server', 'base_uri']) 57 | const metadata = { 58 | currency_code: config.getIn(['currency', 'code']), 59 | currency_symbol: config.getIn(['currency', 'symbol']), 60 | ilp_prefix: config.getIn(['ilp', 'prefix']), 61 | condition_sign_public_key: config.getIn(['keys', 'ed25519', 'public']), 62 | urls: { 63 | health: base + '/health', 64 | transfer: base + '/transfers/:id', 65 | transfer_fulfillment: base + '/transfers/:id/fulfillment', 66 | transfer_fulfillment2: base + '/transfers/:id/fulfillment2', 67 | transfer_rejection: base + '/transfers/:id/rejection', 68 | transfer_state: base + '/transfers/:id/state', 69 | accounts: base + '/accounts', 70 | account: base + '/accounts/:name', 71 | auth_token: base + '/auth_token', 72 | websocket: base.replace(/^http/, 'ws') + '/websocket', 73 | message: base + '/messages' 74 | }, 75 | precision: config.get('amount.precision'), 76 | scale: config.get('amount.scale') 77 | } 78 | 79 | try { 80 | metadata.version = `five-bells@${version}` 81 | } catch (e) { 82 | } 83 | 84 | return { 85 | getResource: async function (ctx) { 86 | ctx.body = Object.assign({ 87 | connectors: await accounts.getConnectors(config) 88 | }, metadata) 89 | } 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /src/errors/already-rolled-back-error.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const ExtensibleError = require('extensible-error') 4 | 5 | class AlreadyRolledBackError extends ExtensibleError { 6 | constructor (message) { 7 | super(message) 8 | 9 | this.status = 422 10 | } 11 | } 12 | 13 | module.exports = AlreadyRolledBackError 14 | -------------------------------------------------------------------------------- /src/errors/expired-transfer-error.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const ExtensibleError = require('extensible-error') 4 | 5 | class ExpiredTransferError extends ExtensibleError { 6 | constructor (message, accountIdentifier) { 7 | super(message) 8 | 9 | this.status = 422 10 | this.accountIdentifier = accountIdentifier 11 | } 12 | } 13 | 14 | module.exports = ExpiredTransferError 15 | -------------------------------------------------------------------------------- /src/errors/insufficient-funds-error.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const ExtensibleError = require('extensible-error') 4 | 5 | class InsufficientFundsError extends ExtensibleError { 6 | constructor (message, accountIdentifier) { 7 | super(message) 8 | 9 | this.status = 422 10 | this.accountIdentifier = accountIdentifier 11 | } 12 | } 13 | 14 | module.exports = InsufficientFundsError 15 | -------------------------------------------------------------------------------- /src/errors/invalid-body-error.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const ExtensibleError = require('extensible-error') 4 | 5 | class InvalidBodyError extends ExtensibleError { 6 | constructor (message, validationErrors) { 7 | super(message) 8 | 9 | this.status = 400 10 | this.validationErrors = validationErrors 11 | } 12 | 13 | debugPrint (log, validationError, indent) { 14 | if (!validationError) { 15 | if (this.validationErrors) { 16 | for (let ve of this.validationErrors) { 17 | this.debugPrint(log, ve) 18 | } 19 | } else { 20 | return 21 | } 22 | } 23 | 24 | indent = indent || '' 25 | log.debug(indent + '-- ' + validationError) 26 | 27 | // For additionalProperties errors we want to show the name of the property 28 | // that violated the constraint. 29 | if (validationError.code === 303) { 30 | log.debug(indent + ' ' + validationError.dataPath) 31 | } else { 32 | log.debug(indent + ' ' + validationError.schemaPath) 33 | } 34 | 35 | if (validationError.subErrors) { 36 | validationError.subErrors.forEach((subError) => { 37 | this.debugPrint(log, subError, ' ' + indent) 38 | }) 39 | } 40 | } 41 | } 42 | 43 | module.exports = InvalidBodyError 44 | -------------------------------------------------------------------------------- /src/errors/invalid-modification-error.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const ExtensibleError = require('extensible-error') 4 | 5 | class InvalidModificationError extends ExtensibleError { 6 | constructor (message, invalidDiffs) { 7 | super(message) 8 | 9 | this.status = 400 10 | this.invalidDiffs = invalidDiffs 11 | } 12 | 13 | formatDiff (diff) { 14 | if (typeof diff !== 'object') { 15 | return JSON.stringify(diff) 16 | } 17 | 18 | const path = diff.path ? ' `' + diff.path.join('.') + '`' : '' 19 | switch (diff.kind) { 20 | case 'N': 21 | return 'added' + path + ', value: ' + JSON.stringify(diff.rhs) 22 | case 'D': 23 | return 'deleted' + path + ', was: ' + JSON.stringify(diff.lhs) 24 | case 'E': 25 | return 'changed' + path + ' from: ' + JSON.stringify(diff.lhs) + 26 | ' to: ' + JSON.stringify(diff.rhs) 27 | case 'A': 28 | return 'array' + path + ', index ' + diff.index + 29 | ' ' + this.formatDiff(diff.item) 30 | default: 31 | return JSON.stringify(diff) 32 | } 33 | } 34 | 35 | debugPrint (log) { 36 | for (let diff of this.invalidDiffs) { 37 | log.debug(' -- ' + this.formatDiff(diff)) 38 | } 39 | } 40 | } 41 | 42 | module.exports = InvalidModificationError 43 | -------------------------------------------------------------------------------- /src/errors/missing-fulfillment-error.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const ExtensibleError = require('extensible-error') 4 | 5 | class MissingFulfillmentError extends ExtensibleError { 6 | constructor (message) { 7 | super(message) 8 | 9 | this.status = 404 10 | } 11 | } 12 | 13 | module.exports = MissingFulfillmentError 14 | -------------------------------------------------------------------------------- /src/errors/no-subscriptions-error.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const ExtensibleError = require('extensible-error') 4 | 5 | class NoSubscriptionsError extends ExtensibleError { 6 | constructor (message) { 7 | super(message) 8 | 9 | this.status = 422 10 | } 11 | } 12 | 13 | module.exports = NoSubscriptionsError 14 | -------------------------------------------------------------------------------- /src/errors/transfer-not-conditional-error.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const ExtensibleError = require('extensible-error') 4 | 5 | class TransferNotConditionalError extends ExtensibleError { 6 | constructor (message) { 7 | super(message) 8 | 9 | this.status = 422 10 | } 11 | } 12 | 13 | module.exports = TransferNotConditionalError 14 | -------------------------------------------------------------------------------- /src/errors/transfer-not-found-error.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const ExtensibleError = require('extensible-error') 4 | 5 | class TransferNotFoundError extends ExtensibleError { 6 | constructor (message) { 7 | super(message) 8 | 9 | this.status = 404 10 | } 11 | } 12 | 13 | module.exports = TransferNotFoundError 14 | -------------------------------------------------------------------------------- /src/errors/unmet-condition-error.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const ExtensibleError = require('extensible-error') 4 | 5 | class UnmetConditionError extends ExtensibleError { 6 | constructor (message) { 7 | super(message) 8 | 9 | this.status = 422 10 | } 11 | } 12 | 13 | module.exports = UnmetConditionError 14 | -------------------------------------------------------------------------------- /src/lib/config.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _ = require('lodash') 4 | const Config = require('five-bells-shared').Config 5 | const envPrefix = 'ledger' 6 | const log = require('../services/log').create('config') 7 | 8 | function isRunningTests () { 9 | return process.argv[0].endsWith('mocha') || 10 | (process.argv.length > 1 && process.argv[0].endsWith('node') && 11 | process.argv[1].endsWith('mocha')) 12 | } 13 | 14 | function useTestConfig () { 15 | return !Config.castBool(process.env.UNIT_TEST_OVERRIDE) && isRunningTests() 16 | } 17 | 18 | function parseAmountConfig () { 19 | return { 20 | precision: parseInt(Config.getEnv(envPrefix, 'AMOUNT_PRECISION'), 10) || 19, 21 | scale: parseInt(Config.getEnv(envPrefix, 'AMOUNT_SCALE'), 10) || 9 22 | } 23 | } 24 | 25 | function parseCurrencyConfig () { 26 | return { 27 | code: Config.getEnv(envPrefix, 'CURRENCY_CODE') || null, 28 | symbol: Config.getEnv(envPrefix, 'CURRENCY_SYMBOL') || null 29 | } 30 | } 31 | 32 | function parseIlpConfig () { 33 | return { 34 | prefix: Config.getEnv(envPrefix, 'ILP_PREFIX') || null 35 | } 36 | } 37 | 38 | function parseAdminConfig () { 39 | const adminUser = Config.getEnv(envPrefix, 'ADMIN_USER') || 'admin' 40 | const adminPass = Config.getEnv(envPrefix, 'ADMIN_PASS') 41 | const adminFingerprint = Config.getEnv(envPrefix, 'ADMIN_TLS_FINGERPRINT') 42 | 43 | if (adminPass || adminFingerprint) { 44 | return _.omitBy({ 45 | user: adminUser, 46 | pass: adminPass, 47 | fingerprint: adminFingerprint 48 | }, _.isUndefined) 49 | } 50 | } 51 | 52 | function parseFeaturesConfig () { 53 | return { 54 | hasCreditAuth: Config.castBool(Config.getEnv(envPrefix, 'FEATURE_CREDIT_AUTH')) 55 | } 56 | } 57 | 58 | function parseKeysConfig () { 59 | if (useTestConfig()) { 60 | return { 61 | ed25519: { 62 | secret: 'lu+43o/0NUeF5iJTHXQQY6eqMaY06Xx6G1ABc6q1UQk=', 63 | public: 'YXg177AOkDlGGrBaoSET+UrMscbHGwFXHqfUMBZTtCY=' 64 | } 65 | } 66 | } else { 67 | return {} 68 | } 69 | } 70 | 71 | function parseRecommendedConnectors () { 72 | const connectorList = Config.getEnv(envPrefix, 'RECOMMENDED_CONNECTORS') 73 | if (!connectorList) return [] 74 | if (connectorList === '*') { 75 | log.warn('DEPRECATED: Ledger no longer supports autodetecting recommended connectors') 76 | return [] 77 | } 78 | return connectorList.split(',') 79 | } 80 | 81 | function getLogLevel () { 82 | if (useTestConfig()) { 83 | return 'debug' 84 | } else { 85 | // https://github.com/trentm/node-bunyan#levels 86 | return Config.getEnv(envPrefix, 'LOG_LEVEL') || 'info' 87 | } 88 | } 89 | 90 | function parseWebsocketConfig () { 91 | const intervalSeconds = parseInt(Config.getEnv(envPrefix, 'WEBSOCKET_PING_INTERVAL'), 10) || 20 92 | return { 93 | pingInterval: intervalSeconds * 1000 94 | } 95 | } 96 | 97 | function isEmpty (value) { 98 | return _.isEmpty(value) && typeof value !== 'number' 99 | } 100 | 101 | function loadConfig () { 102 | const localConfig = {} 103 | 104 | localConfig.maxHttpPayload = '64kb' 105 | localConfig.features = parseFeaturesConfig() 106 | localConfig.amount = parseAmountConfig() 107 | localConfig.default_admin = parseAdminConfig() 108 | localConfig.ilp = parseIlpConfig() 109 | localConfig.recommendedConnectors = parseRecommendedConnectors() 110 | localConfig.logLevel = getLogLevel() 111 | localConfig.authTokenSecret = Config.generateSecret(envPrefix, 'authToken') 112 | localConfig.authTokenMaxAge = 7 * 24 * 60 * 60 * 1000 // 7 days in milliseconds 113 | localConfig.websocket = parseWebsocketConfig() 114 | 115 | // optional 116 | localConfig.currency = parseCurrencyConfig() 117 | localConfig.keys = parseKeysConfig() 118 | 119 | const config = Config.loadConfig(envPrefix, _.omitBy(localConfig, isEmpty)) 120 | return config 121 | } 122 | 123 | module.exports = loadConfig 124 | -------------------------------------------------------------------------------- /src/lib/db.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _ = require('lodash') 4 | const fs = require('fs') 5 | const path = require('path') 6 | const assert = require('assert') 7 | const connection = require('./knex').config.connection 8 | const spawn = require('child_process').spawn 9 | const knex = require('./knex').knex 10 | const promiseRetry = require('promise-retry') 11 | const sequence = require('./utils').sequence 12 | const readRejectionReasons = require('../models/db/rejectionReasons') 13 | .readRejectionReasons 14 | const readTransferStatuses = require('../models/db/transferStatuses') 15 | .readTransferStatuses 16 | const config = require('../services/config') 17 | const pgMigratorPackageJson = require('pg-migrator/package.json') 18 | const pgMigratorPackageJsonPath = require.resolve('pg-migrator/package.json') 19 | const pgMigratorBinPath = path.resolve( 20 | path.dirname(pgMigratorPackageJsonPath), 21 | pgMigratorPackageJson.bin['pg-migrator'] 22 | ) 23 | const sqlDir = path.resolve(__dirname, '..', 'sql') 24 | const log = require('../services/log').create('db') 25 | 26 | const TABLE_NAMES = [ 27 | 'L_TRANSFER_ADJUSTMENTS', 28 | 'L_ACCOUNTS', 29 | 'L_FULFILLMENTS', 30 | 'L_ENTRIES', 31 | 'L_TRANSFERS', 32 | 'L_LU_REJECTION_REASON', 33 | 'L_LU_TRANSFER_STATUS' 34 | ] 35 | 36 | const DEFAULT_DB_RETRIES = 5 37 | 38 | const withTransaction = knex.transaction.bind(knex) 39 | 40 | function withSerializableTransaction (callback, retries = DEFAULT_DB_RETRIES) { 41 | const dbType = knex.client.config.client 42 | return promiseRetry(function (retry, attemptNo) { 43 | return withTransaction(async function (transaction) { 44 | // Set isolation level to avoid reading "prepared" transaction that is currently being 45 | // executed by another request. This ensures the transfer can be fulfilled only once. 46 | assert(_.includes(['sqlite3', 'pg', 'mysql'], dbType), 47 | 'A valid client must be specified on the db object') 48 | if (dbType === 'pg') { 49 | await transaction.raw('SET TRANSACTION ISOLATION LEVEL SERIALIZABLE') 50 | } 51 | 52 | return callback(transaction) 53 | }).catch(err => { 54 | // 40001 is a postgres error code meaning the database could not complete the transaction 55 | // because this would interfere with other concurrent transactions 56 | if (err.code === '40001') { 57 | log.debug('retrying database query', `${attemptNo}/${retries}`, err) 58 | err.isDbRetry = true 59 | return retry(err) 60 | } 61 | throw err 62 | }) 63 | }, { 64 | minTimeout: 10, // milliseconds 65 | factor: 1.2, // consecutive retries have an increasing timeout 66 | retries, 67 | randomize: true // randomizing the duration after which the retry occurs helps if a batch 68 | // of transfers/fulfillments are submitted at the same time. Since the 69 | // retries do not happen all at the same time, they have a higher likelihood 70 | // of succeeding. 71 | }) 72 | } 73 | 74 | function executeStatements (sql) { 75 | const separator = ';\n' 76 | const statements = sql.split(separator) 77 | return sequence(statements.map((statement) => { 78 | const line = statement.replace(/\n$/, '') 79 | return line ? knex.raw(line) : Promise.resolve() 80 | })) 81 | } 82 | 83 | function executePSQL (sqlFilepath) { 84 | return new Promise((resolve, reject) => { 85 | const command = 'psql' 86 | const args = [ 87 | '--quiet', 88 | '--host=' + connection.host, 89 | '--port=' + (connection.port || 5432), 90 | '--dbname=' + connection.database, 91 | '--file=' + path.resolve(sqlFilepath), 92 | '--set=ON_ERROR_STOP=1' 93 | ] 94 | 95 | if (connection.user) { 96 | args.push('--username=' + connection.user) 97 | } 98 | 99 | const env = { 100 | PATH: process.env.PATH, 101 | PGPASSWORD: connection.password 102 | } 103 | const childProcess = spawn(command, args, {env}) 104 | childProcess.on('close', (code) => { 105 | return code === 0 ? resolve() : reject( 106 | new Error('psql exited with code ' + code)) 107 | }) 108 | childProcess.on('error', reject) 109 | }) 110 | } 111 | 112 | function executeScript (filename) { 113 | const dbType = knex.client.config.client 114 | const filepath = path.resolve(sqlDir, dbType, filename) 115 | 116 | if (dbType === 'pg') { 117 | return executePSQL(filepath) 118 | } else { 119 | const sql = fs.readFileSync(filepath, {encoding: 'utf8'}) 120 | return executeStatements(sql) 121 | } 122 | } 123 | 124 | async function createTables () { 125 | if (knex.client.config.client === 'pg') { 126 | await migratePostgres() 127 | } else { 128 | await executeScript('create.sql') 129 | } 130 | } 131 | 132 | async function dropTables () { 133 | if (knex.client.config.client === 'pg') { 134 | await migratePostgres('1') 135 | } else { 136 | await executeScript('drop.sql') 137 | } 138 | } 139 | 140 | function migratePostgres (step) { 141 | return new Promise((resolve, reject) => { 142 | const args = [config.db.uri] 143 | if (step) args.push(step) 144 | const childProcess = spawn(pgMigratorBinPath, args, {cwd: path.resolve(sqlDir, 'pg')}) 145 | let error = '' 146 | childProcess.on('error', reject) 147 | childProcess.stderr.on('data', (data) => { error += data.toString() }) 148 | childProcess.on('close', (code) => { 149 | return code === 0 ? resolve() : reject( 150 | new Error('pg-migrator exited with code ' + code + ' stderr: ' + error)) 151 | }) 152 | }) 153 | } 154 | 155 | async function truncateTables () { 156 | const dbType = knex.client.config.client 157 | for (const tableName of TABLE_NAMES) { 158 | if (!tableName.includes('_LU_')) { 159 | if (dbType === 'pg') { 160 | await knex.raw('TRUNCATE TABLE "' + tableName + '" CASCADE;') 161 | } else { 162 | await knex(tableName).truncate() 163 | } 164 | } 165 | } 166 | } 167 | 168 | async function isConnected () { 169 | return knex.raw('SELECT 1') 170 | .then(() => { 171 | return true 172 | }) 173 | .catch(() => { 174 | return false 175 | }) 176 | } 177 | 178 | function readLookupTables () { 179 | return Promise.all([readRejectionReasons(), readTransferStatuses()]) 180 | } 181 | 182 | module.exports = { 183 | createTables, 184 | dropTables, 185 | truncateTables, 186 | readLookupTables, 187 | withTransaction, 188 | withSerializableTransaction, 189 | isConnected 190 | } 191 | -------------------------------------------------------------------------------- /src/lib/disabledAccounts.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _ = require('lodash') 4 | const getAccount = require('../models/db/accounts').getAccount 5 | const HttpErrors = require('http-errors') 6 | 7 | async function validateNoDisabledAccounts (transaction, transfer) { 8 | const accounts = _.uniq(_.map(transfer.debits.concat(transfer.credits), (creditOrDebit) => { 9 | return creditOrDebit.account 10 | })) 11 | 12 | for (const account of accounts) { 13 | const accountObj = await getAccount(account, { transaction: transaction }) 14 | if (accountObj === null) { 15 | throw new HttpErrors.UnprocessableEntity('Account `' + account + '` does not exist.') 16 | } 17 | if (accountObj.is_disabled) { 18 | throw new HttpErrors.UnprocessableEntity('Account `' + account + '` is disabled.') 19 | } 20 | } 21 | } 22 | 23 | module.exports = validateNoDisabledAccounts 24 | -------------------------------------------------------------------------------- /src/lib/holds.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const InsufficientFundsError = require('../errors/insufficient-funds-error') 4 | const accounts = require('../models/db/accounts') 5 | const entries = require('../models/db/entries') 6 | 7 | function adjustBalance (accountName, amount, transaction) { 8 | /* eslint-disable handle-callback-err */ 9 | return accounts.adjustBalance(accountName, amount, {transaction}) 10 | .catch((error) => { 11 | // 40001 is a postgres error code meaning the database could not complete the transaction 12 | // because this would interfere with other concurrent transactions 13 | // letting this type of errors through so they can trigger a retry at the src/lib/db.js level 14 | if (error.code === '40001') { 15 | throw error 16 | } 17 | throw new InsufficientFundsError( 18 | 'Sender has insufficient funds.', accountName) 19 | }) 20 | /* eslint-enable */ 21 | } 22 | 23 | function insertEntryByName (accountName, transferId, transaction) { 24 | return accounts.getAccountId(accountName, {transaction}).then((accountId) => { 25 | const entry = { 26 | transfer_id: transferId, 27 | account_id: accountId 28 | } 29 | return entries.insertEntry(entry, {transaction}) 30 | }) 31 | } 32 | 33 | function holdFunds (transfer, transaction) { 34 | return Promise.all(transfer.debits.map((debit) => { 35 | return Promise.all([ 36 | adjustBalance(debit.account, -debit.amount, transaction), 37 | insertEntryByName(debit.account, transfer.id, transaction) 38 | ]) 39 | })) 40 | } 41 | 42 | function disburseFunds (transfer, transaction) { 43 | return Promise.all(transfer.credits.map((credit) => { 44 | return Promise.all([ 45 | adjustBalance(credit.account, credit.amount, transaction), 46 | insertEntryByName(credit.account, transfer.id, transaction) 47 | ]) 48 | })) 49 | } 50 | 51 | function returnHeldFunds (transfer, transaction) { 52 | return Promise.all(transfer.debits.map((debit) => { 53 | return Promise.all([ 54 | adjustBalance(debit.account, debit.amount, transaction), 55 | insertEntryByName(debit.account, transfer.id, transaction) 56 | ]) 57 | })) 58 | } 59 | 60 | module.exports = { 61 | holdFunds, 62 | disburseFunds, 63 | returnHeldFunds 64 | } 65 | -------------------------------------------------------------------------------- /src/lib/knex.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const config = require('../services/config') 4 | const _ = require('lodash') 5 | const url = require('url') 6 | const path = require('path') 7 | 8 | function parseKnexConnection (uri) { 9 | if (!uri) { 10 | return undefined 11 | } 12 | if (uri.startsWith('sqlite://')) { 13 | return {filename: uri.slice(9)} 14 | } 15 | const parsed = url.parse(uri) 16 | const auth = parsed.auth ? parsed.auth.split(':') : [] 17 | return { 18 | host: parsed.hostname, 19 | port: parsed.port, 20 | user: auth[0], 21 | password: auth[1], 22 | database: parsed.pathname ? parsed.pathname.slice(1) : undefined 23 | } 24 | } 25 | 26 | function parseDatabaseType (uri) { 27 | return uri.split(':')[0] 28 | } 29 | 30 | function getKnexConfig () { 31 | const knexConfig = { 32 | sqlite: {client: 'sqlite3', useNullAsDefault: true}, 33 | mysql: {client: 'mysql'}, 34 | postgres: {client: 'pg'} 35 | } 36 | const uri = config.getIn(['db', 'uri']) 37 | if (!uri) { 38 | throw new Error('Must set LEDGER_DB_URI or LEDGER_UNIT_DB_URI') 39 | } 40 | const databaseType = parseDatabaseType(uri) 41 | if (!knexConfig[databaseType]) { 42 | throw new Error('Invalid database type in DB URI') 43 | } 44 | const migrations = {directory: path.join(__dirname, 'migrations')} 45 | const connection = parseKnexConnection(uri) 46 | const commonConfig = {connection, migrations} 47 | return _.assign(commonConfig, knexConfig[databaseType]) 48 | } 49 | 50 | const knexConfig = getKnexConfig() 51 | const knex = require('knex')(knexConfig) 52 | 53 | module.exports.knex = knex 54 | module.exports.config = knexConfig 55 | -------------------------------------------------------------------------------- /src/lib/koa-websocket.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const url = require('url') 4 | const compose = require('koa-compose') 5 | const WebSocketServer = require('ws').Server 6 | const log = require('../services/log').create('koa-websocket') 7 | 8 | // If MAX_PAYLOAD is too high, the ledger may slow down or run out of memory. 9 | // If it is too low, valid messages will be discarded. 10 | const MAX_PAYLOAD = 64 * 1024 11 | 12 | // Originally from https://github.com/kudos/koa-websocket 13 | // Modified to set custom `maxPayload` and fix crash in `onConnection`. 14 | class KoaWebSocketServer { 15 | constructor (app) { 16 | this.app = app 17 | this.middleware = [] 18 | } 19 | 20 | listen (server) { 21 | this.server = new WebSocketServer({server, maxPayload: MAX_PAYLOAD}) 22 | this.server.on('connection', this.onConnection.bind(this)) 23 | } 24 | 25 | onConnection (socket, req) { 26 | log.debug('Connection received') 27 | socket.on('error', (err) => { log.debug('Error occurred:', err) }) 28 | const fn = compose(this.middleware) 29 | 30 | const context = this.app.createContext(req) 31 | context.websocket = socket 32 | context.path = url.parse(req.url).pathname 33 | 34 | fn(context).catch((err) => { log.debug(err) }) 35 | } 36 | 37 | use (fn) { 38 | this.middleware.push(fn) 39 | return this 40 | } 41 | } 42 | 43 | module.exports = function (app) { 44 | const oldListen = app.listen 45 | app.listen = function () { 46 | app.server = oldListen.apply(app, arguments) 47 | app.ws.listen(app.server) 48 | return app.server 49 | } 50 | app.ws = new KoaWebSocketServer(app) 51 | return app 52 | } 53 | -------------------------------------------------------------------------------- /src/lib/notificationBroadcasterWebsocket.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _ = require('lodash') 4 | const transferDictionary = require('five-bells-shared').TransferStateDictionary 5 | const transferStates = transferDictionary.transferStates 6 | const isTransferFinalized = require('./transferUtils').isTransferFinalized 7 | const convertToExternalTransfer = require('../models/converters/transfers') 8 | .convertToExternalTransfer 9 | const maybeGetFulfillment = require('../models/db/fulfillments').maybeGetFulfillment 10 | const convertToExternalFulfillment = require('../models/converters/fulfillments') 11 | .convertToExternalFulfillment 12 | 13 | class NotificationBroadcaster { 14 | constructor (log) { 15 | this.log = log 16 | 17 | // This value is a Map mapping accounts to a Map mapping types to a Set of 18 | // listeners. 19 | // 20 | // { account → { type → [ listener ] } } 21 | this.listeners = new Map() 22 | } 23 | 24 | async sendNotifications (transfer, transaction) { 25 | const affectedAccounts = _([transfer.debits, transfer.credits]) 26 | .flatten().map('account').uniq().value() 27 | 28 | let relatedResources 29 | // If the transfer is finalized, see if it was finalized by a fulfillment 30 | if (isTransferFinalized(transfer)) { 31 | const fulfillment = await maybeGetFulfillment(transfer.id, { transaction }) 32 | 33 | if (fulfillment) { 34 | const externalFulfillment = convertToExternalFulfillment(fulfillment) 35 | if (transfer.state === transferStates.TRANSFER_STATE_EXECUTED) { 36 | relatedResources = { 37 | execution_condition_fulfillment: 38 | externalFulfillment.condition_fulfillment, 39 | fulfillment_data: externalFulfillment.fulfillment_data 40 | } 41 | } else if (transfer.state === transferStates.TRANSFER_STATE_REJECTED) { 42 | relatedResources = { 43 | cancellation_condition_fulfillment: 44 | externalFulfillment.condition_fulfillment, 45 | fulfillment_data: externalFulfillment.fulfillment_data 46 | } 47 | } 48 | } 49 | } 50 | 51 | const eventName = transfer.state === transferStates.TRANSFER_STATE_PREPARED 52 | ? 'transfer.create' : 'transfer.update' 53 | await this.emitNotification(affectedAccounts, eventName, 54 | convertToExternalTransfer(transfer), relatedResources) 55 | } 56 | 57 | sendMessage (destinationAccount, message) { 58 | return this.emitNotification([destinationAccount], 'message.send', message) 59 | } 60 | 61 | async emitNotification (affectedAccounts, eventType, resource, relatedResources) { 62 | // Always notify global listeners - as identified by the special "*" account name 63 | affectedAccounts = affectedAccounts.concat('*') 64 | 65 | const eventTypes = ['*', eventType] 66 | const eventParts = eventType.split('.') 67 | for (let i = 1; i < eventParts.length; i++) { 68 | eventTypes.push(eventParts.slice(0, i).join('.') + '.*') 69 | } 70 | 71 | const notification = { event: eventType, resource } 72 | if (relatedResources) notification.related_resources = relatedResources 73 | 74 | this.log.debug('emitting notification:{' + affectedAccounts.join(',') + '}:' + eventType) 75 | 76 | const selectedListeners = new Set() 77 | for (const account of affectedAccounts) { 78 | const accountListeners = this.listeners.get(account) 79 | 80 | if (accountListeners) { 81 | for (const eventType of eventTypes) { 82 | const typeListeners = accountListeners.get(eventType) 83 | 84 | if (typeListeners) { 85 | for (const listener of typeListeners) { 86 | selectedListeners.add(listener) 87 | } 88 | } 89 | } 90 | } 91 | } 92 | 93 | for (const listener of selectedListeners) { 94 | listener(notification) 95 | } 96 | 97 | return !!selectedListeners.size 98 | } 99 | 100 | addNotificationListener (accountName, eventType, listener) { 101 | let accountListeners = this.listeners.get(accountName) 102 | if (!accountListeners) { 103 | accountListeners = new Map() 104 | this.listeners.set(accountName, accountListeners) 105 | } 106 | 107 | let typeListeners = accountListeners.get(eventType) 108 | if (!typeListeners) { 109 | typeListeners = new Set() 110 | accountListeners.set(eventType, typeListeners) 111 | } 112 | 113 | typeListeners.add(listener) 114 | } 115 | 116 | removeNotificationListener (accountName, eventType, listener) { 117 | const accountListeners = this.listeners.get(accountName) 118 | if (!accountListeners) return 119 | 120 | const typeListeners = accountListeners.get(eventType) 121 | if (!typeListeners) return 122 | 123 | typeListeners.delete(listener) 124 | 125 | if (!typeListeners.size) { 126 | accountListeners.delete(eventType) 127 | 128 | if (!accountListeners.size) { 129 | this.listeners.delete(accountName) 130 | } 131 | } 132 | } 133 | } 134 | 135 | module.exports = NotificationBroadcaster 136 | -------------------------------------------------------------------------------- /src/lib/rpcHandler.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const ExtensibleError = require('extensible-error') 4 | 5 | const errors = { 6 | INVALID_REQUEST: -32600, 7 | INVALID_METHOD: -32601, 8 | INVALID_PARAMS: -32602, 9 | SYNTAX_ERROR: -32700, 10 | INVALID_ID: 40000, 11 | INVALID_ACCOUNT_NAME: 40001, 12 | INVALID_ACCOUNT: 40002, 13 | UNAUTHORIZED: 40300, 14 | INTERNAL_ERROR: 50000 15 | } 16 | 17 | class RpcHandler { 18 | constructor (params, websocket, requestingUser) { 19 | this.log = params.log 20 | this.uri = params.uriManager 21 | this.validator = params.validator 22 | this.notificationBroadcaster = params.notificationBroadcaster 23 | 24 | this.websocket = websocket 25 | this.requestingUser = requestingUser 26 | this.accountSubscriptions = [] 27 | this.sendNotification = this._sendNotification.bind(this) 28 | this.pingInterval = setInterval(this._ping.bind(this), params.pingInterval) 29 | 30 | websocket.on('message', this.handleMessage.bind(this)) 31 | websocket.on('close', this._onClose.bind(this)) 32 | this._send({ jsonrpc: '2.0', id: null, method: 'connect' }) 33 | } 34 | 35 | handleMessage (reqMessageString) { 36 | const resMessage = { jsonrpc: '2.0', id: null } 37 | try { 38 | const reqMessage = JSON.parse(reqMessageString) 39 | const validatorResult = this.validator.create('RpcRequest')(reqMessage) 40 | if (!validatorResult.valid) { 41 | throw new RpcError(errors.INVALID_REQUEST, 'Invalid Request', {validationErrors: validatorResult.errors}) 42 | } 43 | if (reqMessage.id === null) throw new RpcError(errors.INVALID_ID, 'Invalid id') 44 | resMessage.id = reqMessage.id 45 | 46 | if (reqMessage.method === 'subscribe_account') { 47 | resMessage.result = this.subscribeAccount(reqMessage.params.eventType, 48 | reqMessage.params.accounts) 49 | } else if (reqMessage.method === 'subscribe_all_accounts') { 50 | resMessage.result = this.subscribeAllAccounts(reqMessage.params.eventType) 51 | } else { 52 | throw new RpcError(errors.INVALID_METHOD, 'Unknown method: ' + reqMessage.method) 53 | } 54 | } catch (err) { 55 | resMessage.error = { 56 | code: err instanceof SyntaxError ? errors.SYNTAX_ERROR : (err.code || errors.INTERNAL_ERROR), 57 | message: err.name + ': ' + err.message, 58 | data: Object.assign({ 59 | name: err.name, 60 | message: err.message 61 | }, err.data || {}) 62 | } 63 | } 64 | this._send(resMessage) 65 | } 66 | 67 | /** 68 | * @param {String} eventType 69 | * @param {URI[]} accounts 70 | */ 71 | subscribeAccount (eventType, accounts) { 72 | if (typeof eventType !== 'string' || !Array.isArray(accounts)) { 73 | throw new RpcError(errors.INVALID_PARAMS, 'Invalid params') 74 | } 75 | 76 | const accountNames = accounts.map(this._accountToName, this) 77 | this._validateAccountNames(accountNames) 78 | 79 | // Clear the old subscriptions. 80 | this.removeAccountSubscriptions() 81 | 82 | for (const accountName of accountNames) { 83 | this.log.info('new ws subscriber for ' + accountName + ':' + eventType) 84 | this.notificationBroadcaster.addNotificationListener(accountName, eventType, this.sendNotification) 85 | this.accountSubscriptions.push({ accountName, eventType }) 86 | } 87 | 88 | // Updated number of active account subscriptions on this WebSocket connection. 89 | return accounts.length 90 | } 91 | 92 | subscribeAllAccounts (eventType) { 93 | if (typeof eventType !== 'string') { 94 | throw new RpcError(errors.INVALID_PARAMS, 'Invalid params') 95 | } 96 | 97 | if (!this.requestingUser.is_admin) { 98 | throw new RpcError(errors.UNAUTHORIZED, 'Not authorized') 99 | } 100 | 101 | this.notificationBroadcaster.addNotificationListener('*', eventType, this.sendNotification) 102 | this.accountSubscriptions.push({ accountName: '*', eventType }) 103 | 104 | return 1 105 | } 106 | 107 | _onClose () { 108 | clearInterval(this.pingInterval) 109 | this.removeAccountSubscriptions() 110 | } 111 | 112 | removeAccountSubscriptions () { 113 | for (const eventInfo of this.accountSubscriptions) { 114 | this.notificationBroadcaster.removeNotificationListener( 115 | eventInfo.accountName, eventInfo.eventType, this.sendNotification 116 | ) 117 | } 118 | this.accountSubscriptions = [] 119 | } 120 | 121 | // Keep the websocket connection alive. 122 | _ping () { this.websocket.ping() } 123 | 124 | _validateAccountNames (accountNames) { 125 | for (const accountName of accountNames) { 126 | const validatorResult = this.validator.create('Identifier')(accountName) 127 | if (!validatorResult.valid) { 128 | throw new RpcError(errors.INVALID_ACCOUNT_NAME, 'Invalid account name: ' + accountName) 129 | } 130 | } 131 | 132 | if (this.requestingUser.is_admin) return 133 | for (const accountName of accountNames) { 134 | if (this.requestingUser.name !== accountName) { 135 | throw new RpcError(errors.UNAUTHORIZED, 'Not authorized') 136 | } 137 | } 138 | } 139 | 140 | _accountToName (account) { 141 | try { 142 | return this.uri.parse(account, 'account').name.toLowerCase() 143 | } catch (err) { 144 | throw new RpcError(errors.INVALID_ACCOUNT, 'Invalid account: ' + account) 145 | } 146 | } 147 | 148 | _sendNotification (notification) { 149 | this._send({ 150 | jsonrpc: '2.0', 151 | id: null, 152 | method: 'notify', 153 | params: notification 154 | }) 155 | } 156 | 157 | _send (resMessage) { 158 | this.websocket.send(JSON.stringify(resMessage), (error) => { 159 | if (error) { 160 | this.log.error('failed to send notification to ' + this.requestingUser.name, error) 161 | } 162 | }) 163 | } 164 | } 165 | 166 | class RpcError extends ExtensibleError { 167 | constructor (code, message, data) { 168 | super(message) 169 | this.code = code 170 | this.data = data || {} 171 | } 172 | } 173 | 174 | RpcHandler.websocketErrorCodes = errors 175 | module.exports = RpcHandler 176 | -------------------------------------------------------------------------------- /src/lib/seed-db.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const hashPassword = require('five-bells-shared/utils/hashPassword') 3 | const getAccount = require('../models/db/accounts').getAccount 4 | const upsertAccount = require('../models/db/accounts').upsertAccount 5 | const insertAccounts = require('../models/accounts').insertAccounts 6 | 7 | module.exports = async function (config) { 8 | if (config.get('default_admin')) { 9 | await setupAdminAccount(config.get('default_admin')) 10 | } 11 | } 12 | 13 | // adminParams - {user, pass, fingerprint} 14 | async function setupAdminAccount (adminParams) { 15 | const adminAccount = await getAccount(adminParams.user) 16 | const passwordHash = 17 | adminParams.pass ? (await hashPassword(adminParams.pass)).toString('base64') : undefined 18 | 19 | // Update the password if the account already exists. 20 | if (adminAccount) { 21 | adminAccount.password_hash = passwordHash 22 | adminAccount.fingerprint = adminParams.fingerprint 23 | await upsertAccount(adminAccount) 24 | } else { 25 | await insertAccounts([{ 26 | name: adminParams.user, 27 | balance: '0', 28 | password: adminParams.pass, 29 | is_admin: true, 30 | minimum_allowed_balance: '-infinity', 31 | fingerprint: adminParams.fingerprint 32 | }]) 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/lib/timeQueue.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _ = require('lodash') 4 | const moment = require('moment') 5 | const emitter = require('co-emitter') 6 | const PriorityQueue = require('priorityqueuejs') 7 | 8 | class TimeQueue { 9 | constructor () { 10 | this._queue = new PriorityQueue(function (a, b) { 11 | return b.date - a.date 12 | }) 13 | emitter(this) 14 | } 15 | 16 | async insert (date, item) { 17 | const dateValue = moment(date).valueOf() 18 | this._queue.enq({ 19 | date: dateValue, 20 | item: item 21 | }) 22 | 23 | // This event is used by the worker started in app.js 24 | await this.emit('insert', dateValue, item) 25 | } 26 | 27 | getEarliestDate () { 28 | if (this._queue.isEmpty()) { 29 | return null 30 | } 31 | return this._queue.peek().date 32 | } 33 | 34 | /** 35 | * Return all items that are due on or before the provided date. 36 | * 37 | * @param {Number} date Cutoff date 38 | * @return {Array} Queued items 39 | */ 40 | popBeforeDate (date) { 41 | // pull modifies the original array 42 | const dateValue = moment(date).valueOf() 43 | const arrayItems = [] 44 | while (!this._queue.isEmpty() && this._queue.peek().date <= dateValue) { 45 | arrayItems.push(this._queue.deq().item) 46 | } 47 | return arrayItems 48 | } 49 | 50 | includes (item) { 51 | return _.some(this._queue._elements, function (obj) { 52 | return _.isEqual(obj.item, item) 53 | }) 54 | } 55 | 56 | remove (item) { 57 | _.remove(this._queue._elements, function (obj) { 58 | return obj.item === item 59 | }) 60 | } 61 | } 62 | 63 | exports.TimeQueue = TimeQueue 64 | -------------------------------------------------------------------------------- /src/lib/timerWorker.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const moment = require('moment') 4 | const defer = require('co-defer') 5 | 6 | const MAX_32INT = 2147483647 7 | 8 | class TimerWorker { 9 | constructor (timeQueue, transferExpiryMonitor) { 10 | this.timeQueue = timeQueue 11 | this.transferExpiryMonitor = transferExpiryMonitor 12 | this.timeout = null 13 | this.listener = null 14 | } 15 | 16 | async start () { 17 | const _this = this 18 | 19 | // Make sure we only have one listener waiting for new 20 | // items to be added to the timeQueue 21 | _this.listener = async function () { 22 | await _this.processTimeQueue() 23 | } 24 | _this.timeQueue.on('insert', _this.listener) 25 | 26 | await this.processTimeQueue() 27 | } 28 | 29 | async processTimeQueue () { 30 | const _this = this 31 | 32 | // Process expired transfers 33 | await this.transferExpiryMonitor.processExpiredTransfers() 34 | 35 | // Set the timer to the earliest date on the timeQueue 36 | if (this.timeout) { 37 | clearTimeout(this.timeout) 38 | } 39 | const earliestDate = this.timeQueue.getEarliestDate() 40 | 41 | // Don't reschedule the timer if nothing is waiting 42 | if (!earliestDate) { 43 | return 44 | } 45 | 46 | // If we set the timeout to greater than the MAX_32INT it 47 | // will be triggered right away so we'll just set it to 48 | // the longest possible timeout and that will cause us to check again 49 | const timeoutDuration = Math.min(moment(earliestDate).diff(moment()), MAX_32INT) 50 | this.timeout = defer.setTimeout(async function () { 51 | await _this.processTimeQueue() 52 | }, timeoutDuration) 53 | } 54 | 55 | stop () { 56 | const _this = this 57 | 58 | clearTimeout(_this.timeout) 59 | if (_this.listener) { 60 | _this.timeQueue.off('insert', _this.listener) 61 | _this.listener = null 62 | } 63 | } 64 | } 65 | 66 | exports.TimerWorker = TimerWorker 67 | -------------------------------------------------------------------------------- /src/lib/tokenStrategy.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const passport = require('passport-strategy') 3 | 4 | class TokenStrategy extends passport.Strategy { 5 | constructor (verify) { 6 | super() 7 | this.name = 'token' 8 | this._verify = verify 9 | } 10 | 11 | authenticate (req) { 12 | const token = this._extractToken(req) 13 | if (!token) return this.fail(401) 14 | 15 | this._verify(token, (err, user) => { 16 | if (err) return this.error(err) 17 | if (!user) return this.fail(401) 18 | this.success(user) 19 | }) 20 | } 21 | 22 | _extractToken (req) { 23 | // Look for the token in auth header and query string 24 | const authHeader = req.header.authorization 25 | if (authHeader) { 26 | const keyVal = authHeader.split(' ') 27 | if (keyVal.length === 2 && keyVal[0] === 'Bearer') { 28 | return keyVal[1] 29 | } 30 | } 31 | 32 | return req.query.token 33 | } 34 | } 35 | 36 | module.exports = TokenStrategy 37 | -------------------------------------------------------------------------------- /src/lib/transferExpiryMonitor.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const moment = require('moment') 4 | const withSerializableTransaction = require('./db').withSerializableTransaction 5 | const log = require('../services/log').create('expiry monitor') 6 | const holds = require('./holds') 7 | const updateState = require('./updateState') 8 | const ExpiredTransferError = require('../errors/expired-transfer-error') 9 | const getTransfer = require('../models/db/transfers').getTransfer 10 | const updateTransfer = require('../models/db/transfers').updateTransfer 11 | const transferDictionary = require('five-bells-shared').TransferStateDictionary 12 | const isTransferFinalized = require('./transferUtils').isTransferFinalized 13 | 14 | const DB_RETRIES = 10 15 | 16 | const transferStates = transferDictionary.transferStates 17 | 18 | class TransferExpiryMonitor { 19 | constructor (timeQueue, notificationBroadcaster) { 20 | this.queue = timeQueue 21 | this.notificationBroadcaster = notificationBroadcaster 22 | } 23 | 24 | validateNotExpired (transfer) { 25 | const now = moment() 26 | if (transfer.expires_at && 27 | now.isAfter(transfer.expires_at)) { 28 | throw new ExpiredTransferError('Cannot modify transfer ' + 29 | 'after expires_at date') 30 | } 31 | return now 32 | } 33 | 34 | async expireTransfer (transferId) { 35 | const _this = this 36 | 37 | await withSerializableTransaction(async function (transaction) { 38 | const transfer = await getTransfer(transferId, { transaction }) 39 | 40 | if (!transfer) { 41 | log.error('trying to expire transfer that cannot be found ' + 42 | 'in the database: ' + transferId) 43 | return 44 | } 45 | 46 | if (!isTransferFinalized(transfer)) { 47 | if (transfer.state === transferStates.TRANSFER_STATE_PREPARED) { 48 | // Return the money to the original senders 49 | holds.returnHeldFunds(transfer, transaction) 50 | } 51 | 52 | updateState(transfer, transferStates.TRANSFER_STATE_REJECTED) 53 | transfer.rejection_reason = 'expired' 54 | await updateTransfer(transfer, {transaction}) 55 | 56 | log.debug('expired transfer: ' + transferId) 57 | 58 | await _this.notificationBroadcaster.sendNotifications(transfer, transaction) 59 | } 60 | }, DB_RETRIES) // retries if database is busy 61 | } 62 | 63 | async watch (transfer) { 64 | // Start the expiry countdown if we're not already watching it 65 | if (!this.queue.includes(transfer.id)) { 66 | const now = moment() 67 | const expiry = moment(transfer.expires_at) 68 | if (transfer.expires_at && now.isBefore(expiry)) { 69 | await this.queue.insert(expiry, transfer.id) 70 | 71 | log.debug('transfer ' + transfer.id + 72 | ' will expire in ' + expiry.diff(now, 'milliseconds') + 'ms') 73 | } 74 | } else if (transfer.state === transferStates.TRANSFER_STATE_EXECUTED || 75 | transfer.state === transferStates.TRANSFER_STATE_REJECTED) { 76 | this.unwatch(transfer.id) 77 | } 78 | } 79 | 80 | async processExpiredTransfers () { 81 | log.debug('checking for transfers to expire') 82 | const transfersToExpire = this.queue.popBeforeDate(moment()) 83 | for (const id of transfersToExpire) { 84 | await this.expireTransfer(id) 85 | } 86 | } 87 | 88 | unwatch (transferId) { 89 | log.debug('unwatch transfer: ' + transferId) 90 | this.queue.remove(transferId) 91 | } 92 | } 93 | 94 | exports.TransferExpiryMonitor = TransferExpiryMonitor 95 | -------------------------------------------------------------------------------- /src/lib/transferUtils.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _ = require('lodash') 4 | const FINAL_STATES = require('five-bells-shared') 5 | .TransferStateDictionary.finalStates 6 | 7 | function isTransferFinalized (transfer) { 8 | return _.includes(FINAL_STATES, transfer.state) 9 | } 10 | 11 | module.exports = { 12 | isTransferFinalized 13 | } 14 | -------------------------------------------------------------------------------- /src/lib/updateState.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const moment = require('moment') 4 | const log = require('../services/log').create('updateState') 5 | 6 | function updateState (transfer, state, opts) { 7 | log.debug('updating transfer state from ' + transfer.state + 8 | ' to ' + state) 9 | 10 | transfer.state = state 11 | const updatedAt = (opts && opts.updatedAt) || moment() 12 | transfer[state + '_at'] = updatedAt.toISOString() 13 | } 14 | 15 | module.exports = updateState 16 | -------------------------------------------------------------------------------- /src/lib/utils.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | function sequence (promises) { 4 | return promises.length === 0 ? Promise.resolve() 5 | : promises[0].then(() => sequence(promises.slice(1))) 6 | } 7 | 8 | module.exports = { 9 | sequence 10 | } 11 | -------------------------------------------------------------------------------- /src/models/accounts.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const _ = require('lodash') 3 | const assert = require('assert') 4 | const config = require('../services/config') 5 | const log = require('../services/log').create('accounts') 6 | const db = require('./db/accounts') 7 | const hashPassword = require('five-bells-shared/utils/hashPassword') 8 | const HttpErrors = require('http-errors') 9 | const uri = require('../services/uriManager') 10 | const validator = require('../services/validator') 11 | const converters = require('./converters/accounts') 12 | 13 | function getPublicData (data) { 14 | return { 15 | id: uri.make('account', data.name.toLowerCase()), 16 | name: data.name 17 | } 18 | } 19 | 20 | function getConnectors (config) { 21 | if (!config.recommendedConnectors) return [] 22 | 23 | return config.recommendedConnectors.map(name => ({ name })).map(getPublicData) 24 | } 25 | 26 | async function verifyConnectors (config) { 27 | if (!config.recommendedConnectors) return 28 | for (const connector of config.recommendedConnectors) { 29 | try { 30 | await getAccount(connector) 31 | } catch (err) { 32 | log.warn('connector account ' + err.name + ': ' + err.message + ': ' + connector) 33 | } 34 | } 35 | } 36 | 37 | async function getAccounts () { 38 | const accounts = await db.getAccounts() 39 | return accounts.map(converters.convertToExternalAccount) 40 | } 41 | 42 | async function getAccount (name, requestingUser) { 43 | log.debug('fetching account name ' + name) 44 | 45 | let canExamine = false 46 | let account 47 | if (!requestingUser) { // anonymous request 48 | account = {id: name, name: name} 49 | } else { // authenticated request 50 | canExamine = requestingUser.name === name || requestingUser.is_admin 51 | account = await db.getAccount(name) 52 | if (!account) { 53 | throw new HttpErrors.NotFound('Unknown account') 54 | } else if (account.is_disabled && 55 | (requestingUser && !requestingUser.is_admin)) { 56 | throw new HttpErrors.Forbidden('This account is disabled') 57 | } 58 | 59 | // TODO get rid of this when we start using biginteger math everywhere 60 | account.balance = Number(account.balance).toString() 61 | delete account.password_hash 62 | } 63 | 64 | const data = canExamine ? converters.convertToExternalAccount(account) 65 | : getPublicData(account) 66 | data.ledger = config.getIn(['server', 'base_uri']) 67 | return data 68 | } 69 | 70 | async function setAccount (externalAccount, requestingUser) { 71 | assert(requestingUser) 72 | 73 | const validationResult = validator.create('Account')(externalAccount) 74 | if (validationResult.valid !== true) { 75 | const message = validationResult.schema 76 | ? 'Body did not match schema ' + validationResult.schema 77 | : 'Body did not pass validation' 78 | throw new HttpErrors.BadRequest(message, validationResult.errors) 79 | } 80 | const account = converters.convertToInternalAccount(externalAccount) 81 | 82 | if (account.password) { 83 | account.password_hash = (await hashPassword(account.password)).toString('base64') 84 | delete account.password 85 | } 86 | 87 | const allowedKeys = ['name', 'connector', 'password_hash', 'fingerprint', 88 | 'public_key'] 89 | if (!requestingUser.is_admin && !(requestingUser.name === account.name && ( 90 | _.every(_.keys(account), (key) => _.includes(allowedKeys, key))))) { 91 | throw new HttpErrors.Forbidden('Not authorized') 92 | } 93 | const existed = await db.upsertAccount(account) 94 | log.debug((existed ? 'updated' : 'created') + ' account name ' + 95 | account.name) 96 | return { 97 | account: converters.convertToExternalAccount(account), 98 | existed: existed 99 | } 100 | } 101 | 102 | async function insertAccounts (externalAccounts) { 103 | const accounts = externalAccounts.map(converters.convertToInternalAccount) 104 | // Hash passwords 105 | for (let account of accounts) { 106 | if (account.password) { 107 | account.password_hash = (await hashPassword(account.password)) 108 | .toString('base64') 109 | delete account.password 110 | } 111 | } 112 | await db.insertAccounts(accounts) 113 | } 114 | 115 | function setBalance (name, balance, options) { 116 | return db.upsertAccount({name, balance}, options && options.transaction) 117 | } 118 | 119 | module.exports = { 120 | getAccounts, 121 | getConnectors, 122 | verifyConnectors, 123 | getAccount, 124 | setAccount, 125 | setBalance, 126 | insertAccounts 127 | } 128 | -------------------------------------------------------------------------------- /src/models/authTokens.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const jwt = require('jsonwebtoken') 4 | const config = require('../services/config') 5 | const uri = require('../services/uriManager') 6 | 7 | function getAuthToken (requestingUser) { 8 | return new Promise((resolve, reject) => { 9 | jwt.sign({}, config.authTokenSecret, { 10 | algorithm: 'HS256', 11 | subject: uri.make('account', requestingUser.name.toLowerCase()), 12 | issuer: config.server.base_uri, 13 | expiresIn: config.authTokenMaxAge 14 | }, (err, token) => { 15 | if (err) return reject(err) 16 | resolve(token) 17 | }) 18 | }) 19 | } 20 | 21 | module.exports = { getAuthToken } 22 | -------------------------------------------------------------------------------- /src/models/converters/accounts.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _ = require('lodash') 4 | const uri = require('../../services/uriManager') 5 | 6 | function convertToInternalAccount (data) { 7 | // ID is optional on the incoming side 8 | data = _.cloneDeep(data) 9 | if (data.id) { 10 | data.name = uri.parse(data.id, 'account').name.toLowerCase() 11 | delete data.id 12 | } else { 13 | data.name = data.name.toLowerCase() 14 | } 15 | 16 | if (data.balance) { 17 | data.balance = Number(data.balance) 18 | } 19 | 20 | // DEPRECATED: The connector field is deprecated and ignored if provided 21 | delete data.connector 22 | 23 | // Passing in a password hash is a potential DoS vector because the hash 24 | // specifies the number of iterations needed to verify it. So a malicious 25 | // client could set it to UINT32_MAX and make the server do an insane amount 26 | // of hashing work. 27 | // 28 | // There are other places in the code that should prevent users from setting 29 | // the hash directly, but it's a good idea to put an extra layer of 30 | // protection and prevent setting it here. 31 | if (typeof data.password_hash !== 'undefined') { 32 | delete data.password_hash 33 | } 34 | 35 | if (data.minimum_allowed_balance) { 36 | if (data.minimum_allowed_balance === '-infinity') { 37 | data.minimum_allowed_balance = Number.NEGATIVE_INFINITY 38 | } else { 39 | data.minimum_allowed_balance = Number(data.minimum_allowed_balance) 40 | } 41 | } 42 | 43 | return data 44 | } 45 | 46 | function convertToExternalAccount (data) { 47 | data = _.cloneDeep(data) 48 | data.id = uri.make('account', data.name.toLowerCase()) 49 | data.balance = String(Number(data.balance)) 50 | 51 | // Never show any information about credentials 52 | delete data.password 53 | delete data.password_hash 54 | delete data.public_key 55 | delete data.fingerprint 56 | 57 | if (data.minimum_allowed_balance === Number.NEGATIVE_INFINITY) { 58 | data.minimum_allowed_balance = '-infinity' 59 | } else if (data.minimum_allowed_balance) { 60 | data.minimum_allowed_balance = String(Number(data.minimum_allowed_balance)) 61 | } else { 62 | data.minimum_allowed_balance = '0' 63 | } 64 | if (!data.is_admin) delete data.is_admin 65 | 66 | // DEPRECATED: The connector field is no longer supported 67 | delete data.connector 68 | 69 | return data 70 | } 71 | 72 | module.exports = { 73 | convertToExternalAccount, 74 | convertToInternalAccount 75 | } 76 | -------------------------------------------------------------------------------- /src/models/converters/fulfillments.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | function convertToExternalFulfillment (data) { 4 | return { 5 | condition_fulfillment: data.condition_fulfillment, 6 | fulfillment_data: data.fulfillment_data 7 | } 8 | } 9 | 10 | function convertToInternalFulfillment (data) { 11 | return data 12 | } 13 | 14 | module.exports = { 15 | convertToExternalFulfillment, 16 | convertToInternalFulfillment 17 | } 18 | -------------------------------------------------------------------------------- /src/models/converters/transfers.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _ = require('lodash') 4 | const uri = require('../../services/uriManager') 5 | 6 | function convertToInternalTransfer (data) { 7 | // ID is optional on the incoming side 8 | data = _.cloneDeep(data) 9 | if (data.id && data.id.startsWith('http')) { 10 | data.id = uri.parse(data.id, 'transfer').id.toLowerCase() 11 | } 12 | data.debits = _.sortBy(data.debits, (debit) => debit.account) 13 | data.credits = _.sortBy(data.credits, (credit) => credit.account) 14 | for (let debit of data.debits) { 15 | debit.account = uri.parse(debit.account, 'account').name.toLowerCase() 16 | } 17 | for (let credit of data.credits) { 18 | credit.account = uri.parse(credit.account, 'account').name.toLowerCase() 19 | } 20 | 21 | if (typeof data.timeline === 'object') { 22 | data.proposed_at = data.timeline.proposed_at 23 | data.prepared_at = data.timeline.prepared_at 24 | data.executed_at = data.timeline.executed_at 25 | data.rejected_at = data.timeline.rejected_at 26 | delete data.timeline 27 | } 28 | 29 | if (typeof data.expires_at === 'string') { 30 | data.expires_at = new Date(data.expires_at) 31 | } 32 | 33 | return data 34 | } 35 | 36 | function convertToExternalTransfer (data) { 37 | data = _.cloneDeep(data) 38 | data.id = uri.make('transfer', data.id.toLowerCase()) 39 | 40 | for (let debit of data.debits) { 41 | debit.account = uri.make('account', debit.account) 42 | } 43 | for (let credit of data.credits) { 44 | credit.account = uri.make('account', credit.account) 45 | } 46 | 47 | const timelineProperties = [ 48 | 'proposed_at', 49 | 'prepared_at', 50 | 'executed_at', 51 | 'rejected_at' 52 | ] 53 | 54 | data.timeline = _.pick(data, timelineProperties) 55 | data = _.omit(data, timelineProperties) 56 | if (_.isEmpty(data.timeline)) delete data.timeline 57 | 58 | if (data.expires_at instanceof Date) { 59 | data.expires_at = data.expires_at.toISOString() 60 | } 61 | 62 | return data 63 | } 64 | 65 | module.exports = { 66 | convertToExternalTransfer, 67 | convertToInternalTransfer 68 | } 69 | -------------------------------------------------------------------------------- /src/models/db/accounts.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const TABLE_NAME = 'L_ACCOUNTS' 4 | const _ = require('lodash') 5 | const db = require('./utils')(TABLE_NAME, 6 | convertToPersistent, convertFromPersistent) 7 | const config = require('../../services/config') 8 | const knex = require('../../lib/knex').knex 9 | const removeAuditFields = require('./audit').removeAuditFields 10 | 11 | function convertFromPersistent (data) { 12 | data = _.cloneDeep(data) 13 | data = _.mapKeys(data, (value, key) => key.toLowerCase()) 14 | delete data.account_id 15 | // some databases store booleans as 0 and 1, and knex does not convert 16 | data.is_disabled = Boolean(data.is_disabled) 17 | data.is_admin = Boolean(data.is_admin) 18 | data.balance = Number(Number(data.balance).toFixed(config.amount.scale)) 19 | if (data.minimum_allowed_balance === null) { 20 | data.minimum_allowed_balance = Number.NEGATIVE_INFINITY 21 | } else if (data.minimum_allowed_balance) { 22 | data.minimum_allowed_balance = Number(data.minimum_allowed_balance) 23 | } else { 24 | data.minimum_allowed_balance = 0 25 | } 26 | delete data.created_at 27 | delete data.updated_at 28 | return removeAuditFields(data) 29 | } 30 | 31 | function convertToPersistent (data) { 32 | data = _.cloneDeep(data) 33 | if (data.balance) { 34 | data.balance = Number(Number(data.balance).toFixed(config.amount.scale)) 35 | } 36 | if (data.minimum_allowed_balance) { 37 | if (data.minimum_allowed_balance === Number.NEGATIVE_INFINITY) { 38 | // A null value in the db column means the balance can go negative without limit 39 | data.minimum_allowed_balance = null 40 | } else { 41 | data.minimum_allowed_balance = Number(data.minimum_allowed_balance) 42 | } 43 | } // Otherwise the db defaults minimum_allowed_balance to 0 44 | if (!_.isUndefined(data.is_admin)) { 45 | data.is_admin = Number(data.is_admin) 46 | } 47 | if (!_.isUndefined(data.is_disabled)) { 48 | data.is_disabled = Number(data.is_disabled) 49 | } 50 | return _.mapKeys(data, (value, key) => key.toUpperCase()) 51 | } 52 | 53 | function getAccounts (options) { 54 | return db.select({}, options && options.transaction) 55 | } 56 | 57 | function getAccount (name, options) { 58 | return db.selectOne({NAME: name}, options && options.transaction) 59 | } 60 | 61 | function getAccountByFingerprint (fingerprint, options) { 62 | return db.selectOne({FINGERPRINT: fingerprint}, options && options.transaction) 63 | } 64 | 65 | function getAccountById (id, options) { 66 | return db.selectOne({ACCOUNT_ID: id}, options && options.transaction) 67 | } 68 | 69 | function getAccountId (name, options) { 70 | const transaction = options && options.transaction 71 | return (transaction || knex).from(TABLE_NAME).select() 72 | .where('NAME', name).then((accounts) => { 73 | return accounts.length === 1 ? accounts[0].ACCOUNT_ID : null 74 | }) 75 | } 76 | 77 | function adjustBalance (name, amount, options) { 78 | const updateSQL = 79 | 'UPDATE "L_ACCOUNTS" SET "BALANCE" = "BALANCE" + ? WHERE "NAME" = ?' 80 | return db.getTransaction(options).raw(updateSQL, [amount, name]) 81 | } 82 | 83 | function updateAccount (account, options) { 84 | return db.update(account, {ACCOUNT_ID: account.id}, 85 | options && options.transaction) 86 | } 87 | 88 | function insertAccounts (accounts, options) { 89 | return db.insertAll(accounts, options && options.transaction) 90 | } 91 | 92 | function upsertAccount (account, options) { 93 | return db.upsert(account, {NAME: account.name}, 94 | options && options.transaction) 95 | } 96 | 97 | module.exports = { 98 | getAccounts, 99 | getAccountId, 100 | getAccount, 101 | getAccountByFingerprint, 102 | getAccountById, 103 | adjustBalance, 104 | updateAccount, 105 | upsertAccount, 106 | insertAccounts 107 | } 108 | -------------------------------------------------------------------------------- /src/models/db/adjustments.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _ = require('lodash') 4 | const assert = require('assert') 5 | const TABLE_NAME = 'L_TRANSFER_ADJUSTMENTS' 6 | const getAccountId = require('./accounts').getAccountId 7 | const getAccountById = require('./accounts').getAccountById 8 | const db = require('./utils')() 9 | 10 | const DB_RETRIES = 5 11 | 12 | function isNil (x) { 13 | return x === null || x === undefined 14 | } 15 | 16 | function convertFromPersistentAdjustment (data, options) { 17 | return getAccountById(data.ACCOUNT_ID, options).then((account) => { 18 | return _.omitBy({ 19 | account: account.name, 20 | amount: Number(data.AMOUNT).toString(), 21 | authorized: Boolean(data.IS_AUTHORIZED) || null, 22 | rejected: Boolean(data.IS_REJECTED) || null, 23 | rejection_message: data.REJECTION_MESSAGE ? JSON.parse(data.REJECTION_MESSAGE) : null, 24 | memo: data.MEMO ? JSON.parse(data.MEMO) : null 25 | }, isNil) 26 | }) 27 | } 28 | 29 | function convertFromPersistent (rows, options) { 30 | const debits = Promise.all(rows.filter((row) => row.DEBIT_CREDIT === 'debit') 31 | .map((debit) => convertFromPersistentAdjustment(debit, options))) 32 | const credits = Promise.all(rows.filter((row) => row.DEBIT_CREDIT === 'credit') 33 | .map((credit) => convertFromPersistentAdjustment(credit, options))) 34 | return Promise.all([debits, credits]).then((results) => { 35 | if (results[0].length === 0 && results[1].length === 0) { 36 | return {} 37 | } 38 | return { 39 | debits: _.sortBy(results[0], (adjustment) => adjustment.account), 40 | credits: _.sortBy(results[1], (adjustment) => adjustment.account) 41 | } 42 | }) 43 | } 44 | 45 | function convertToPersistentAdjustment (transferId, type, data, options) { 46 | return getAccountId(data.account, options).then((accountId) => { 47 | return _.omitBy({ 48 | TRANSFER_ID: transferId, 49 | ACCOUNT_ID: accountId, 50 | DEBIT_CREDIT: type, 51 | AMOUNT: data.amount, 52 | IS_AUTHORIZED: isNil(data.authorized) ? null : Number(data.authorized), 53 | IS_REJECTED: isNil(data.rejected) ? null : Number(data.rejected), 54 | REJECTION_MESSAGE: isNil(data.rejection_message) ? null : JSON.stringify(data.rejection_message), 55 | MEMO: data.memo ? JSON.stringify(data.memo) : null 56 | }, (x) => isNil(x)) 57 | }) 58 | } 59 | 60 | function convertToPersistent (data, options) { 61 | const debits = Promise.all(data.debits.map((debit) => 62 | convertToPersistentAdjustment(data._id, 'debit', debit, options))) 63 | const credits = Promise.all(data.credits.map((credit) => 64 | convertToPersistentAdjustment(data._id, 'credit', credit, options))) 65 | return Promise.all([debits, credits]).then((results) => 66 | results[0].concat(results[1])) 67 | } 68 | 69 | function getAdjustments (transferId, options) { 70 | return db.getTransaction(options).from(TABLE_NAME).select() 71 | .where({TRANSFER_ID: transferId}).then((rows) => { 72 | return convertFromPersistent(rows, options) 73 | }) 74 | } 75 | 76 | function insertAdjustments (transfer, options) { 77 | return convertToPersistent(transfer, options).then((rows) => { 78 | const transaction = db.getTransaction(options) 79 | return Promise.all( 80 | rows.map((row) => transaction.into(TABLE_NAME).insert(row))) 81 | }) 82 | } 83 | 84 | function _upsertAdjustment (persistentAdjustment, transaction) { 85 | assert(transaction, 'transaction is required for upsert') 86 | const where = { 87 | TRANSFER_ID: persistentAdjustment.TRANSFER_ID, 88 | ACCOUNT_ID: persistentAdjustment.ACCOUNT_ID, 89 | DEBIT_CREDIT: persistentAdjustment.DEBIT_CREDIT 90 | } 91 | return transaction.from(TABLE_NAME) 92 | .select().where(where).then((rows) => { 93 | if (rows.length > 0) { 94 | return transaction.into(TABLE_NAME).update(persistentAdjustment) 95 | .where(where) 96 | } else { 97 | return transaction.into(TABLE_NAME).insert(persistentAdjustment) 98 | } 99 | }) 100 | } 101 | 102 | function upsertAdjustment (persistentAdjustment, options) { 103 | if (options && options.transaction) { 104 | return _upsertAdjustment(persistentAdjustment, options.transaction) 105 | } else { 106 | // second parameter is number of retries in case database is busy 107 | return db.withSerializableTransaction((transaction) => 108 | _upsertAdjustment(persistentAdjustment, transaction), DB_RETRIES) 109 | } 110 | } 111 | 112 | function upsertAdjustments (transfer, options) { 113 | return convertToPersistent(transfer, options).then((rows) => { 114 | return Promise.all(rows.map((row) => upsertAdjustment(row, options))) 115 | }) 116 | } 117 | 118 | module.exports = { 119 | getAdjustments, 120 | insertAdjustments, 121 | upsertAdjustments 122 | } 123 | -------------------------------------------------------------------------------- /src/models/db/audit.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _ = require('lodash') 4 | 5 | function removeAuditFields (data) { 6 | return _.omit(data, ['db_created_dttm', 'db_updated_dttm', 'db_updated_user']) 7 | } 8 | 9 | module.exports = { 10 | removeAuditFields 11 | } 12 | -------------------------------------------------------------------------------- /src/models/db/entries.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const TABLE_NAME = 'L_ENTRIES' 4 | const _ = require('lodash') 5 | const db = require('./utils')(TABLE_NAME, 6 | convertToPersistent, convertFromPersistent) 7 | const getTransferId = require('./transfers').getTransferId 8 | const removeAuditFields = require('./audit').removeAuditFields 9 | 10 | function convertToPersistent (data) { 11 | const result = _.cloneDeep(data) 12 | if (result.id) { 13 | result.entry_id = result.id 14 | delete result.id 15 | } 16 | if (result.created_at) { 17 | result.created_dttm = result.created_at 18 | delete result.created_at 19 | } 20 | return _.mapKeys(result, (value, key) => key.toUpperCase()) 21 | } 22 | 23 | function convertFromPersistent (data) { 24 | const result = _.mapKeys(_.cloneDeep(data), (value, key) => key.toLowerCase()) 25 | result.id = result.entry_id 26 | delete result.entry_id 27 | result.created_at = result.created_dttm 28 | delete result.created_dttm 29 | return removeAuditFields(result) 30 | } 31 | 32 | function insertEntry (entry, options) { 33 | return getTransferId(entry.transfer_id, options).then((transferId) => { 34 | const row = _.assign({}, entry, {transfer_id: transferId}) 35 | return db.insert(row, options && options.transaction) 36 | }) 37 | } 38 | 39 | module.exports = { 40 | insertEntry 41 | } 42 | -------------------------------------------------------------------------------- /src/models/db/fulfillments.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _ = require('lodash') 4 | const moment = require('moment') 5 | const db = require('./utils')('L_FULFILLMENTS', 6 | convertToPersistent, convertFromPersistent) 7 | const getTransferId = require('./transfers').getTransferId 8 | const getTransferById = require('./transfers').getTransferById 9 | const removeAuditFields = require('./audit').removeAuditFields 10 | const TransferNotFoundError = require('../../errors/transfer-not-found-error') 11 | const MissingFulfillmentError = require('../../errors/missing-fulfillment-error') 12 | const TransferNotConditionalError = require('../../errors/transfer-not-conditional-error') 13 | const AlreadyRolledBackError = require('../../errors/already-rolled-back-error') 14 | const transferDictionary = require('five-bells-shared').TransferStateDictionary 15 | const transferStates = transferDictionary.transferStates 16 | 17 | function convertFromPersistent (data) { 18 | const result = _.mapKeys(_.cloneDeep(data), (value, key) => key.toLowerCase()) 19 | result.id = result.fulfillment_id 20 | delete result.fulfillment_id 21 | delete result.created_at 22 | delete result.updated_at 23 | return removeAuditFields(result) 24 | } 25 | 26 | function convertToPersistent (data) { 27 | const result = _.cloneDeep(data) 28 | if (result.id) { 29 | result.fulfillment_id = result.id 30 | delete result.id 31 | } 32 | return _.mapKeys(result, (value, key) => key.toUpperCase()) 33 | } 34 | 35 | function convertToIntegerTransferId (fulfillment, options) { 36 | return getTransferId(fulfillment.transfer_id, options).then((transferId) => { 37 | return _.assign({}, fulfillment, {transfer_id: transferId}) 38 | }) 39 | } 40 | 41 | async function insertFulfillments (fulfillments, options) { 42 | for (const fulfillment of fulfillments) { 43 | const row = await convertToIntegerTransferId(fulfillment, options) 44 | await db.insert(row, options && options.transaction) 45 | } 46 | } 47 | 48 | async function getFulfillment (transferUuid, options) { 49 | const transferId = await getTransferId(transferUuid, options) 50 | if (!transferId) { 51 | throw new TransferNotFoundError('This transfer does not exist') 52 | } 53 | const transfer = await getTransferById(transferId, options) 54 | if (transfer.state === transferStates.TRANSFER_STATE_REJECTED) { 55 | throw new AlreadyRolledBackError('This transfer has already been rejected') 56 | } 57 | if (!transfer.execution_condition && !transfer.cancellation_condition) { 58 | throw new TransferNotConditionalError('Transfer does not have any conditions') 59 | } 60 | 61 | return db.selectOne({TRANSFER_ID: transferId}, 62 | options && options.transaction).then((result) => { 63 | if (!result) { 64 | if (transfer.expires_at && moment().isAfter(transfer.expires_at)) { 65 | throw new MissingFulfillmentError('This transfer expired before it was fulfilled') 66 | } 67 | throw new MissingFulfillmentError('This transfer has not yet been fulfilled') 68 | } 69 | result.transfer_id = transferUuid 70 | return result 71 | }) 72 | } 73 | 74 | async function maybeGetFulfillment (transferUuid, options) { 75 | try { 76 | return await getFulfillment(transferUuid, options) 77 | } catch (err) { 78 | return null 79 | } 80 | } 81 | 82 | async function insertFulfillment (fulfillment, options) { 83 | const row = await convertToIntegerTransferId(fulfillment, options) 84 | await db.insertIgnore(row, options && options.transaction) 85 | } 86 | 87 | module.exports = { 88 | maybeGetFulfillment, 89 | getFulfillment, 90 | insertFulfillments, 91 | insertFulfillment 92 | } 93 | -------------------------------------------------------------------------------- /src/models/db/rejectionReasons.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const knex = require('../../lib/knex').knex 4 | const assert = require('assert') 5 | 6 | const TABLE_NAME = 'L_LU_REJECTION_REASON' 7 | const ID_TO_NAME = new Map() 8 | const NAME_TO_ID = new Map() 9 | 10 | function readRejectionReasons () { 11 | return knex(TABLE_NAME).select().then((rows) => { 12 | rows.forEach((row) => { 13 | ID_TO_NAME.set(row.REJECTION_REASON_ID, row.NAME) 14 | NAME_TO_ID.set(row.NAME, row.REJECTION_REASON_ID) 15 | }) 16 | }) 17 | } 18 | 19 | function getRejectionReasonName (rejectionReasonId) { 20 | assert(ID_TO_NAME.has(rejectionReasonId), 'Unable to find name for id ' + 21 | rejectionReasonId + ' in ' + TABLE_NAME) 22 | return ID_TO_NAME.get(rejectionReasonId) 23 | } 24 | 25 | function getRejectionReasonId (rejectionReasonName) { 26 | assert(NAME_TO_ID.has(rejectionReasonName), 'Unable to find name ' + 27 | rejectionReasonName + ' in ' + TABLE_NAME) 28 | return NAME_TO_ID.get(rejectionReasonName) 29 | } 30 | 31 | module.exports = { 32 | readRejectionReasons, 33 | getRejectionReasonName, 34 | getRejectionReasonId 35 | } 36 | -------------------------------------------------------------------------------- /src/models/db/transferStatuses.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const knex = require('../../lib/knex').knex 4 | const assert = require('assert') 5 | 6 | const TABLE_NAME = 'L_LU_TRANSFER_STATUS' 7 | const ID_TO_NAME = new Map() 8 | const NAME_TO_ID = new Map() 9 | 10 | function readTransferStatuses () { 11 | return knex(TABLE_NAME).select().then((rows) => { 12 | rows.forEach((row) => { 13 | ID_TO_NAME.set(row.STATUS_ID, row.NAME) 14 | NAME_TO_ID.set(row.NAME, row.STATUS_ID) 15 | }) 16 | }) 17 | } 18 | 19 | function getTransferStatusName (transferStatusId) { 20 | assert(ID_TO_NAME.has(transferStatusId), 'Unable to find name for id ' + 21 | transferStatusId + ' in ' + TABLE_NAME) 22 | return ID_TO_NAME.get(transferStatusId) 23 | } 24 | 25 | function getTransferStatusId (transferStatusName) { 26 | assert(NAME_TO_ID.has(transferStatusName), 'Unable to find name ' + 27 | transferStatusName + ' in ' + TABLE_NAME) 28 | return NAME_TO_ID.get(transferStatusName) 29 | } 30 | 31 | module.exports = { 32 | readTransferStatuses, 33 | getTransferStatusName, 34 | getTransferStatusId 35 | } 36 | -------------------------------------------------------------------------------- /src/models/db/transfers.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const TABLE_NAME = 'L_TRANSFERS' 4 | const _ = require('lodash') 5 | const client = require('./utils').client 6 | const db = require('./utils')(TABLE_NAME, 7 | convertToPersistent, convertFromPersistent) 8 | const withSerializableTransaction = require('../../lib/db').withSerializableTransaction 9 | const rejectionReasons = require('./rejectionReasons') 10 | const transferStatuses = require('./transferStatuses') 11 | const adjustments = require('./adjustments') 12 | const removeAuditFields = require('./audit').removeAuditFields 13 | 14 | function convertFromPersistent (data) { 15 | data = _.cloneDeep(data) 16 | data = _.mapKeys(data, (value, key) => key.toLowerCase()) 17 | data.id = data.transfer_uuid 18 | data._id = data.transfer_id 19 | delete data.transfer_id 20 | delete data.transfer_uuid 21 | delete data.created_at 22 | delete data.updated_at 23 | data.additional_info = JSON.parse(data.additional_info) 24 | if (data.expires_dttm) { 25 | data.expires_at = new Date(data.expires_dttm) 26 | delete data.expires_dttm 27 | } 28 | if (data.proposed_dttm) { 29 | data.proposed_at = new Date(data.proposed_dttm) 30 | delete data.proposed_dttm 31 | } 32 | if (data.prepared_dttm) { 33 | data.prepared_at = new Date(data.prepared_dttm) 34 | delete data.prepared_dttm 35 | } 36 | if (data.executed_dttm) { 37 | data.executed_at = new Date(data.executed_dttm) 38 | delete data.executed_dttm 39 | } 40 | if (data.rejected_dttm) { 41 | data.rejected_at = new Date(data.rejected_dttm) 42 | delete data.rejected_dttm 43 | } 44 | if (data.rejection_reason_id !== null) { 45 | data.rejection_reason = rejectionReasons.getRejectionReasonName( 46 | data.rejection_reason_id) 47 | delete data.rejection_reason_id 48 | } 49 | data.state = transferStatuses.getTransferStatusName(data.status_id) 50 | delete data.status_id 51 | data = _.omitBy(data, _.isNull) 52 | return removeAuditFields(data) 53 | } 54 | 55 | function convertToPersistent (data) { 56 | data = _.cloneDeep(data) 57 | delete data.credits 58 | delete data.debits 59 | data.additional_info = JSON.stringify(data.additional_info) 60 | if (data.proposed_at) { 61 | data.proposed_dttm = new Date(data.proposed_at) 62 | delete data.proposed_at 63 | } 64 | if (data.prepared_at) { 65 | data.prepared_dttm = new Date(data.prepared_at) 66 | delete data.prepared_at 67 | } 68 | if (data.executed_at) { 69 | data.executed_dttm = new Date(data.executed_at) 70 | delete data.executed_at 71 | } 72 | if (data.rejected_at) { 73 | data.rejected_dttm = new Date(data.rejected_at) 74 | delete data.rejected_at 75 | } 76 | if (data.expires_at) { 77 | data.expires_dttm = new Date(data.expires_at) 78 | delete data.expires_at 79 | } 80 | if (data.rejection_reason) { 81 | data.rejection_reason_id = rejectionReasons.getRejectionReasonId( 82 | data.rejection_reason) 83 | delete data.rejection_reason 84 | } 85 | if (data.state) { 86 | data.status_id = transferStatuses.getTransferStatusId(data.state) 87 | delete data.state 88 | } 89 | data.transfer_uuid = data.id 90 | delete data.id 91 | return _.mapKeys(data, (value, key) => key.toUpperCase()) 92 | } 93 | 94 | function getTransferWhere (where, options) { 95 | return db.selectOne(where, options && options.transaction) 96 | .then((transfer) => { 97 | if (transfer === null) { 98 | return null 99 | } 100 | return adjustments.getAdjustments(transfer._id, options) 101 | .then((adjustments) => { 102 | const result = _.assign({}, transfer, adjustments) 103 | return _.isEmpty(result) ? null : _.omit(result, '_id') 104 | }) 105 | }) 106 | } 107 | 108 | function getTransfer (uuid, options) { 109 | return getTransferWhere({TRANSFER_UUID: uuid}, options) 110 | } 111 | 112 | function getTransferId (uuid, options) { 113 | return db.selectOne({TRANSFER_UUID: uuid}, 114 | options && options.transaction).then( 115 | (transfer) => transfer ? transfer._id : null) 116 | } 117 | 118 | function getTransferById (id, options) { 119 | return getTransferWhere({TRANSFER_ID: id}, options) 120 | } 121 | 122 | async function updateTransfer (transfer, options) { 123 | const transaction = options && options.transaction 124 | const result = await db.update(transfer, {TRANSFER_UUID: transfer.id}, transaction) 125 | const dbTransfer = await db.selectOne({TRANSFER_UUID: transfer.id}, transaction) 126 | const transferWithId = _.assign({}, transfer, {'_id': dbTransfer._id}) 127 | await adjustments.upsertAdjustments(transferWithId, options) 128 | return result 129 | } 130 | 131 | function insertTransfer (transfer, options) { 132 | const transaction = options && options.transaction 133 | return db.insert(transfer, options).then(() => { 134 | return db.selectOne({TRANSFER_UUID: transfer.id}, transaction) 135 | }).then((dbTransfer) => { 136 | const transferWithId = _.assign({}, transfer, {'_id': dbTransfer._id}) 137 | return adjustments.insertAdjustments(transferWithId, options) 138 | }) 139 | } 140 | 141 | function insertTransfers (transfers, options) { 142 | return Promise.all(transfers.map( 143 | (transfer) => insertTransfer(transfer, options))) 144 | } 145 | 146 | function upsertTransfer (transfer, options) { 147 | const transaction = options && options.transaction 148 | return db.upsert(transfer, {TRANSFER_UUID: transfer.id}, transaction) 149 | .then((result) => { 150 | return db.selectOne({TRANSFER_UUID: transfer.id}, transaction) 151 | .then((dbTransfer) => { 152 | const transferWithId = _.assign({}, transfer, {'_id': dbTransfer._id}) 153 | return adjustments.upsertAdjustments(transferWithId, options) 154 | .then(() => result) 155 | }) 156 | }) 157 | } 158 | 159 | module.exports = { 160 | getTransfer, 161 | getTransferId, 162 | getTransferById, 163 | upsertTransfer, 164 | updateTransfer, 165 | insertTransfers, 166 | withSerializableTransaction, 167 | client 168 | } 169 | -------------------------------------------------------------------------------- /src/models/db/utils.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const assert = require('assert') 4 | const knex = require('../../lib/knex').knex 5 | const InvalidModificationError = require('../../errors/invalid-modification-error') 6 | const _ = require('lodash') 7 | 8 | function createModule (tableName, convertToPersistent, convertFromPersistent) { 9 | function getTransaction (options) { 10 | return options && options.transaction ? options.transaction : knex 11 | } 12 | 13 | function select (where, transaction) { 14 | return (transaction || knex).from(tableName).select().where(where) 15 | .then((results) => results.map(convertFromPersistent)) 16 | } 17 | 18 | function selectOne (where, transaction) { 19 | return (transaction || knex).from(tableName).select().where(where) 20 | .then((results) => { 21 | if (results.length === 1) { 22 | return convertFromPersistent(results[0]) 23 | } else if (results.length === 0) { 24 | return null 25 | } else { 26 | assert(false, 'Multiple rows in ' + tableName + ' match ' + 27 | JSON.stringify(where)) 28 | } 29 | }) 30 | } 31 | 32 | function update (data, where, transaction) { 33 | return (transaction || knex)(tableName) 34 | .update(convertToPersistent(data)).where(where) 35 | } 36 | 37 | function insert (data, transaction) { 38 | return (transaction || knex).insert(convertToPersistent(data)) 39 | .into(tableName) 40 | } 41 | 42 | function insertIgnore (data, transaction) { 43 | const dbType = knex.client.config.client 44 | if (dbType === 'pg') { 45 | const sql = knex('L_FULFILLMENTS').insert(convertToPersistent(data)).toString() + ' ON CONFLICT DO NOTHING' 46 | return (transaction || knex).raw(sql) 47 | } else { 48 | return insert(data, transaction) 49 | } 50 | } 51 | 52 | function insertAll (data, transaction) { 53 | return Promise.all(_.map(data.map(convertToPersistent), (tableRow) => { 54 | return (transaction || knex).insert(tableRow) 55 | .into(tableName) 56 | })) 57 | } 58 | 59 | function _upsert (data, where, transaction) { 60 | assert(transaction, 'transaction is required for upsert') 61 | return selectOne(where, transaction).then((existing) => { 62 | if (existing && existing.is_deleted) { 63 | throw new InvalidModificationError( 64 | 'Already deleted, please use a new ID: ' + JSON.stringify(where)) 65 | } 66 | const execute = existing 67 | ? update(data, where, transaction) : insert(data, transaction) 68 | return execute.then(() => Boolean(existing)) 69 | }) 70 | } 71 | 72 | function upsert (data, where, transaction) { 73 | if (transaction) { 74 | return _upsert(data, where, transaction) 75 | } else { 76 | return withTransaction((transaction) => _upsert(data, where, transaction)) 77 | } 78 | } 79 | 80 | function withTransaction (callback) { 81 | return knex.transaction((transaction) => callback(transaction)) 82 | } 83 | 84 | return { 85 | getTransaction, 86 | select, 87 | selectOne, 88 | update, 89 | insert, 90 | insertIgnore, 91 | insertAll, 92 | upsert, 93 | withTransaction 94 | } 95 | } 96 | 97 | module.exports = createModule 98 | module.exports.client = knex.client.config.client 99 | -------------------------------------------------------------------------------- /src/models/health.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const db = require('../lib/db') 4 | 5 | async function getDbHealth () { 6 | const isConnected = await db.isConnected() 7 | return isConnected ? { 8 | status: 'OK' 9 | } : { 10 | status: 'NOT OK' 11 | } 12 | } 13 | 14 | module.exports = { 15 | getDbHealth 16 | } 17 | -------------------------------------------------------------------------------- /src/models/messages.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const InvalidBodyError = require('../errors/invalid-body-error') 4 | const NoSubscriptionsError = require('../errors/no-subscriptions-error') 5 | const uri = require('../services/uriManager') 6 | const validator = require('../services/validator') 7 | const notificationBroadcaster = require('../services/notificationBroadcaster') 8 | const log = require('../services/log').create('messages') 9 | 10 | async function sendMessage (message, requestingUser) { 11 | const validationResult = validator.create('Message')(message) 12 | if (validationResult.valid !== true) { 13 | const error = validationResult.schema 14 | ? 'Body did not match schema ' + validationResult.schema 15 | : 'Body did not pass validation' 16 | throw new InvalidBodyError(error, validationResult.errors) 17 | } 18 | 19 | // For backwards compatibility. 20 | if (message.account && !message.from && !message.to) { 21 | message.to = message.account 22 | message.from = uri.make('account', requestingUser.name) 23 | } 24 | 25 | const senderAccount = message.from 26 | const senderName = uri.parse(senderAccount, 'account').name.toLowerCase() 27 | const recipientName = uri.parse(message.to, 'account').name.toLowerCase() 28 | 29 | log.debug('%s -> %s: %o', senderName, recipientName, message.data) 30 | 31 | // Only admin can impersonate users. 32 | if (!requestingUser.is_admin && senderName !== requestingUser.name) { 33 | throw new InvalidBodyError('You do not have permission to impersonate this user') 34 | } 35 | 36 | const messageDelivered = await notificationBroadcaster.sendMessage( 37 | recipientName, Object.assign({}, message, {account: senderAccount})) 38 | if (!messageDelivered) { 39 | throw new NoSubscriptionsError('Destination account could not be reached') 40 | } 41 | } 42 | 43 | module.exports = { sendMessage } 44 | -------------------------------------------------------------------------------- /src/services/app.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const App = require('../lib/app') 4 | module.exports = new App({ 5 | log: require('./log'), 6 | config: require('./config'), 7 | timerWorker: require('./timerWorker'), 8 | notificationBroadcaster: require('./notificationBroadcaster') 9 | }) 10 | -------------------------------------------------------------------------------- /src/services/auth.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const jwt = require('jsonwebtoken') 4 | const passport = require('koa-passport') 5 | const BasicStrategy = require('passport-http').BasicStrategy 6 | const ClientCertStrategy = require('passport-client-certificate').Strategy 7 | const HTTPSignatureStrategy = require('passport-http-signature') 8 | const AnonymousStrategy = require('passport-anonymous').Strategy 9 | const TokenStrategy = require('../lib/tokenStrategy') 10 | const getAccount = require('../models/db/accounts').getAccount 11 | const getAccountByFingerprint = require('../models/db/accounts') 12 | .getAccountByFingerprint 13 | const verifyPassword = require('five-bells-shared/utils/hashPassword').verifyPassword 14 | const HttpErrors = require('http-errors') 15 | const config = require('./config') 16 | const uri = require('./uriManager') 17 | 18 | passport.use(new BasicStrategy( 19 | function (username, password, done) { 20 | if (!config.getIn(['auth', 'basic_enabled'])) { 21 | return done(new HttpErrors.Unauthorized('Unsupported authentication method')) 22 | } 23 | 24 | // If no Authorization is provided we can still 25 | // continue without throwing an error 26 | if (!username) { 27 | return done(null, false) 28 | } 29 | 30 | getAccount(username) 31 | .then(function (userObj) { 32 | if (!userObj || userObj.is_disabled || !userObj.password_hash) { 33 | return done(new HttpErrors.Unauthorized( 34 | 'Unknown or invalid account / password')) 35 | } 36 | return verifyPassword(password, Buffer.from(userObj.password_hash, 'base64')) 37 | .then((valid) => { 38 | if (!valid) { 39 | return done(new HttpErrors.Unauthorized('Invalid password')) 40 | } 41 | 42 | return done(null, userObj) 43 | }) 44 | }) 45 | })) 46 | 47 | passport.use(new HTTPSignatureStrategy( 48 | function (username, done) { 49 | if (!config.getIn(['auth', 'http_signature_enabled'])) { 50 | return done(new HttpErrors.Unauthorized('Unsupported authentication method')) 51 | } 52 | 53 | getAccount(username) 54 | .then(function (userObj) { 55 | if (!userObj || userObj.is_disabled) { 56 | return done(new HttpErrors.Unauthorized('Unknown or invalid account')) 57 | } 58 | if (!userObj.public_key) { 59 | return done(new HttpErrors.Unauthorized('User doesn\'t have a public key')) 60 | } 61 | done(null, userObj, userObj.public_key) 62 | }) 63 | })) 64 | 65 | passport.use(new ClientCertStrategy((certificate, done) => { 66 | if (!config.getIn(['auth', 'client_certificates_enabled'])) { 67 | return done(new HttpErrors.Unauthorized('Unsupported authentication method')) 68 | } 69 | 70 | const fingerprint = certificate.fingerprint.toUpperCase() 71 | getAccountByFingerprint(fingerprint) 72 | .then(function (userObj) { 73 | if (!userObj || userObj.is_disabled || !userObj.fingerprint || 74 | userObj.fingerprint !== fingerprint) { 75 | return done(new HttpErrors.Unauthorized('Unknown or invalid account')) 76 | } 77 | done(null, userObj) 78 | }) 79 | })) 80 | 81 | passport.use(new TokenStrategy( 82 | function (tokenString, done) { 83 | jwt.verify(tokenString, config.authTokenSecret, { 84 | algorithms: ['HS256'], 85 | issuer: config.server.base_uri 86 | }, function (err, token) { 87 | if (err) { 88 | return done(new HttpErrors.Unauthorized( 89 | err.name === 'TokenExpiredError' ? 'Token has expired' : 'Invalid token')) 90 | } 91 | const username = uri.parse(token.sub, 'account').name.toLowerCase() 92 | getAccount(username) 93 | .then(function (userObj) { 94 | if (!userObj || userObj.is_disabled) { 95 | return done(new HttpErrors.Unauthorized('Unknown or invalid account')) 96 | } 97 | done(null, userObj) 98 | }) 99 | }) 100 | })) 101 | 102 | // Allow unauthenticated requests (transfers will just 103 | // be in the proposed state) 104 | passport.use(new AnonymousStrategy()) 105 | -------------------------------------------------------------------------------- /src/services/config.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const loadConfig = require('../lib/config') 4 | 5 | module.exports = loadConfig() 6 | -------------------------------------------------------------------------------- /src/services/log.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const riverpig = require('riverpig') 4 | 5 | const logStream = require('through2')() 6 | logStream.pipe(process.stdout) 7 | 8 | const create = (namespace) => { 9 | return riverpig('ledger:' + namespace, { 10 | stream: logStream 11 | }) 12 | } 13 | 14 | let outputStream = process.stdout 15 | const setOutputStream = (newOutputStream) => { 16 | logStream.unpipe(outputStream) 17 | logStream.pipe(newOutputStream) 18 | outputStream = newOutputStream 19 | } 20 | 21 | module.exports = { 22 | create, 23 | setOutputStream 24 | } 25 | -------------------------------------------------------------------------------- /src/services/makeRpcHandler.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const log = require('./log') 4 | const RpcHandler = require('../lib/rpcHandler') 5 | const config = require('./config') 6 | 7 | module.exports = function (websocket, requestingUser) { 8 | return new RpcHandler({ 9 | log: log.create('rpcHandler'), 10 | uriManager: require('./uriManager'), 11 | validator: require('./validator'), 12 | notificationBroadcaster: require('./notificationBroadcaster'), 13 | pingInterval: config.websocket.pingInterval 14 | }, websocket, requestingUser) 15 | } 16 | -------------------------------------------------------------------------------- /src/services/notificationBroadcaster.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const NotificationBroadcaster = require('../lib/notificationBroadcasterWebsocket') 4 | const log = require('./log') 5 | 6 | module.exports = new NotificationBroadcaster(log.create('notificationBroadcaster')) 7 | -------------------------------------------------------------------------------- /src/services/timeQueue.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const TimeQueue = require('../lib/timeQueue').TimeQueue 4 | 5 | module.exports = new TimeQueue() 6 | -------------------------------------------------------------------------------- /src/services/timerWorker.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const TimerWorker = require('../lib/timerWorker').TimerWorker 4 | const timeQueue = require('./timeQueue') 5 | const transferExpiryMonitor = require('./transferExpiryMonitor') 6 | 7 | module.exports = new TimerWorker(timeQueue, transferExpiryMonitor) 8 | -------------------------------------------------------------------------------- /src/services/transferExpiryMonitor.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const TransferExpiryMonitor = 4 | require('../lib/transferExpiryMonitor').TransferExpiryMonitor 5 | const timeQueue = require('./timeQueue') 6 | const notificationBroadcaster = require('./notificationBroadcaster') 7 | 8 | module.exports = new TransferExpiryMonitor(timeQueue, notificationBroadcaster) 9 | -------------------------------------------------------------------------------- /src/services/uriManager.js: -------------------------------------------------------------------------------- 1 | const UriManager = require('five-bells-shared/lib/uri-manager').UriManager 2 | const config = require('./config') 3 | 4 | const uri = module.exports = new UriManager(config.getIn(['server', 'base_uri'])) 5 | 6 | uri.addResource('account', '/accounts/:name') 7 | uri.addResource('transfer', '/transfers/:id') 8 | uri.addResource('subscription', '/subscriptions/:id') 9 | -------------------------------------------------------------------------------- /src/services/validator.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const Validator = require('five-bells-shared').Validator 4 | 5 | const validator = module.exports = new Validator() 6 | 7 | validator.loadSharedSchemas() 8 | -------------------------------------------------------------------------------- /src/sql/pg/2-1.sql: -------------------------------------------------------------------------------- 1 | 2 | DROP TRIGGER IF EXISTS "L_ACCOUNTS" ON "L_ACCOUNTS"; 3 | DROP TRIGGER IF EXISTS "L_LU_REJECTION_REASON" ON "L_LU_REJECTION_REASON"; 4 | DROP TRIGGER IF EXISTS "L_LU_TRANSFER_STATUS" ON "L_LU_TRANSFER_STATUS"; 5 | DROP TRIGGER IF EXISTS "L_ENTRIES" ON "L_ENTRIES"; 6 | DROP TRIGGER IF EXISTS "L_FULFILLMENTS" ON "L_FULFILLMENTS"; 7 | DROP TRIGGER IF EXISTS "L_TRANSFER_ADJUSTMENTS" ON "L_TRANSFER_ADJUSTMENTS"; 8 | DROP TRIGGER IF EXISTS "L_TRANSFERS" ON "L_TRANSFERS"; 9 | 10 | DROP FUNCTION IF EXISTS SYNC_DB_CREATED_DTTM(); 11 | 12 | DROP TABLE IF EXISTS "L_TRANSFER_ADJUSTMENTS" CASCADE; 13 | DROP TABLE IF EXISTS "L_ACCOUNTS" CASCADE; 14 | DROP TABLE IF EXISTS "L_LU_REJECTION_REASON" CASCADE; 15 | DROP TABLE IF EXISTS "L_LU_TRANSFER_STATUS" CASCADE; 16 | DROP TABLE IF EXISTS "L_ENTRIES" CASCADE; 17 | DROP TABLE IF EXISTS "L_FULFILLMENTS" CASCADE; 18 | DROP TABLE IF EXISTS "L_TRANSFERS" CASCADE; 19 | -------------------------------------------------------------------------------- /src/sql/pg/2-3.sql: -------------------------------------------------------------------------------- 1 | -- ILP fulfillment data can be 32KiB, plus the ILP packet envelope, encoded in 2 | -- Base64 (33% overhead). 64KiB should be safe. 3 | 4 | ALTER TABLE "L_FULFILLMENTS" 5 | ADD COLUMN "FULFILLMENT_DATA" CHARACTER VARYING(65535) NULL; 6 | 7 | -- Memos may contain ILP packets which can be slightly larger than 33KiB, 8 | -- encoded in Base64. 9 | ALTER TABLE "L_TRANSFER_ADJUSTMENTS" 10 | ALTER COLUMN "MEMO" TYPE VARCHAR(65535), 11 | ALTER COLUMN "REJECTION_MESSAGE" TYPE VARCHAR(65535); 12 | -------------------------------------------------------------------------------- /src/sql/pg/3-2.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE "L_FULFILLMENTS" DROP COLUMN "FULFILLMENT_DATA"; 2 | 3 | ALTER TABLE "L_TRANSFER_ADJUSTMENTS" 4 | ALTER COLUMN "MEMO" TYPE VARCHAR(4000), 5 | ALTER COLUMN "REJECTION_MESSAGE" TYPE VARCHAR(4000); 6 | -------------------------------------------------------------------------------- /src/sql/sqlite3/create.sql: -------------------------------------------------------------------------------- 1 | create table if not exists "L_ACCOUNTS" ( 2 | "ACCOUNT_ID" integer not null primary key, 3 | "NAME" varchar(255), 4 | "BALANCE" float default 0 not null check ("BALANCE" >= "MINIMUM_ALLOWED_BALANCE"), 5 | "PASSWORD_HASH" varchar(1024), 6 | "PUBLIC_KEY" text, 7 | "IS_ADMIN" boolean default 0 not null, 8 | "IS_DISABLED" boolean default 0 not null, 9 | "FINGERPRINT" varchar(255), 10 | "MINIMUM_ALLOWED_BALANCE" float default 0 11 | ); 12 | 13 | create unique index accounts_name_unique on "L_ACCOUNTS" 14 | ("NAME"); 15 | create index fingerprint on "L_ACCOUNTS" 16 | ("FINGERPRINT"); 17 | 18 | 19 | create table if not exists "L_LU_REJECTION_REASON" ( 20 | "REJECTION_REASON_ID" integer not null primary key, 21 | "NAME" varchar(10) not null, 22 | "DESCRIPTION" varchar(255) null 23 | ); 24 | 25 | create unique index rejection_reason_name on "L_LU_REJECTION_REASON" 26 | ("NAME"); 27 | 28 | 29 | create table if not exists "L_LU_TRANSFER_STATUS" ( 30 | "STATUS_ID" integer not null primary key, 31 | "NAME" varchar(20) not null, 32 | "DESCRIPTION" varchar(255) null 33 | ); 34 | 35 | create unique index transfer_status_name on "L_LU_TRANSFER_STATUS" 36 | ("NAME"); 37 | 38 | 39 | create table if not exists "L_TRANSFERS" ( 40 | "TRANSFER_ID" integer not null primary key, 41 | "TRANSFER_UUID" char(36) not null unique, 42 | "LEDGER" varchar(1024), 43 | "ADDITIONAL_INFO" text, 44 | "STATUS_ID" integer not null, 45 | "REJECTION_REASON_ID" integer, 46 | "EXECUTION_CONDITION" text, 47 | "CANCELLATION_CONDITION" text, 48 | "EXPIRES_DTTM" datetime, 49 | "PROPOSED_DTTM" datetime, 50 | "PREPARED_DTTM" datetime, 51 | "EXECUTED_DTTM" datetime, 52 | "REJECTED_DTTM" datetime, 53 | FOREIGN KEY("REJECTION_REASON_ID") REFERENCES "L_LU_REJECTION_REASON" 54 | ("REJECTION_REASON_ID"), 55 | FOREIGN KEY("STATUS_ID") REFERENCES "L_LU_TRANSFER_STATUS" ("STATUS_ID") 56 | ); 57 | 58 | create table if not exists "L_TRANSFER_ADJUSTMENTS" 59 | ( 60 | "TRANSFER_ADJUSTMENT_ID" integer not null primary key, 61 | "TRANSFER_ID" integer not null, 62 | "ACCOUNT_ID" integer not null, 63 | "DEBIT_CREDIT" varchar(10) not null, 64 | "AMOUNT" float DEFAULT 0 not null, 65 | "IS_AUTHORIZED" boolean default 0 not null, 66 | "IS_REJECTED" boolean default 0 not null, 67 | "REJECTION_MESSAGE" text, 68 | "MEMO" varchar(4000) null, 69 | FOREIGN KEY("TRANSFER_ID") REFERENCES "L_TRANSFERS" ("TRANSFER_ID"), 70 | FOREIGN KEY("ACCOUNT_ID") REFERENCES "L_ACCOUNTS" ("ACCOUNT_ID") 71 | ); 72 | 73 | create table if not exists "L_ENTRIES" ( 74 | "ENTRY_ID" integer not null primary key, 75 | "TRANSFER_ID" integer not null, 76 | "ACCOUNT_ID" integer not null, 77 | "CREATED_DTTM" datetime default CURRENT_TIMESTAMP 78 | ); 79 | 80 | create table if not exists "L_FULFILLMENTS" ( 81 | "FULFILLMENT_ID" integer not null primary key, 82 | "TRANSFER_ID" integer, 83 | "CONDITION_FULFILLMENT" text, 84 | "FULFILLMENT_DATA" text 85 | ); 86 | 87 | create unique index fulfillments_transfer_id_index on "L_FULFILLMENTS" 88 | ("TRANSFER_ID"); 89 | 90 | 91 | INSERT INTO "L_LU_REJECTION_REASON" ("REJECTION_REASON_ID", "NAME", "DESCRIPTION") 92 | VALUES (0, 'cancelled', 'The transfer was cancelled'); 93 | INSERT INTO "L_LU_REJECTION_REASON" ("REJECTION_REASON_ID", "NAME", "DESCRIPTION") 94 | VALUES (1, 'expired', 'The transfer expired automatically'); 95 | INSERT INTO "L_LU_TRANSFER_STATUS" ("STATUS_ID", "NAME") VALUES (0, 'proposed'); 96 | INSERT INTO "L_LU_TRANSFER_STATUS" ("STATUS_ID", "NAME") VALUES (1, 'prepared'); 97 | INSERT INTO "L_LU_TRANSFER_STATUS" ("STATUS_ID", "NAME") VALUES (2, 'executed'); 98 | INSERT INTO "L_LU_TRANSFER_STATUS" ("STATUS_ID", "NAME") VALUES (3, 'rejected'); 99 | -------------------------------------------------------------------------------- /src/sql/sqlite3/drop.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS L_TRANSFER_ADJUSTMENTS; 2 | DROP TABLE IF EXISTS L_ACCOUNTS; 3 | DROP TABLE IF EXISTS L_LU_REJECTION_REASON; 4 | DROP TABLE IF EXISTS L_LU_TRANSFER_STATUS; 5 | DROP TABLE IF EXISTS L_ENTRIES; 6 | DROP TABLE IF EXISTS L_FULFILLMENTS; 7 | DROP TABLE IF EXISTS L_TRANSFERS; 8 | -------------------------------------------------------------------------------- /test/.eslintrc: -------------------------------------------------------------------------------- 1 | extends: ../.eslintrc 2 | env: 3 | mocha: true 4 | -------------------------------------------------------------------------------- /test/configSpec.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _ = require('lodash') 4 | const chai = require('chai') 5 | const expect = chai.expect 6 | const loadConfig = require('../src/lib/config') 7 | const logger = require('../src/services/log') 8 | const logHelper = require('./helpers/log') 9 | 10 | const originalEnv = _.cloneDeep(process.env) 11 | describe('loadConfig', () => { 12 | logHelper(logger) 13 | 14 | beforeEach(() => { 15 | process.env = _.cloneDeep(originalEnv) 16 | }) 17 | 18 | describe('config.default_admin', () => { 19 | const defaults = { 20 | user: 'admin' 21 | } 22 | 23 | it('LEDGER_ADMIN_PASS=foo', () => { 24 | process.env.LEDGER_ADMIN_PASS = 'foo' 25 | const admin = _.defaults({ 26 | pass: 'foo' 27 | }, defaults) 28 | const _config = loadConfig() 29 | expect(_config.default_admin).to.deep.equal(admin) 30 | }) 31 | 32 | it('LEDGER_ADMIN_TLS_FINGERPRINT=foo', () => { 33 | process.env.LEDGER_ADMIN_TLS_FINGERPRINT = 'foo' 34 | const admin = _.defaults({ 35 | fingerprint: 'foo' 36 | }, defaults) 37 | const _config = loadConfig() 38 | expect(_config.default_admin).to.deep.equal(admin) 39 | }) 40 | }) 41 | 42 | describe('config.currency', () => { 43 | const defaults = { 44 | code: null, 45 | symbol: null 46 | } 47 | 48 | it('returns defaults', () => { 49 | const _config = loadConfig() 50 | expect(_config.currency).to.deep.equal(defaults) 51 | }) 52 | 53 | it('LEDGER_CURRENCY_CODE=foo', () => { 54 | process.env.LEDGER_CURRENCY_CODE = 'foo' 55 | const currency = _.defaults({ 56 | code: 'foo' 57 | }, defaults) 58 | const _config = loadConfig() 59 | expect(_config.currency).to.deep.equal(currency) 60 | }) 61 | 62 | it('LEDGER_CURRENCY_SYMBOL=foo', () => { 63 | process.env.LEDGER_CURRENCY_SYMBOL = 'foo' 64 | const currency = _.defaults({ 65 | symbol: 'foo' 66 | }, defaults) 67 | const _config = loadConfig() 68 | expect(_config.currency).to.deep.equal(currency) 69 | }) 70 | }) 71 | 72 | describe('config.authTokenSecret', () => { 73 | it('returns defaults', () => { 74 | const _config = loadConfig() 75 | expect(_config.authTokenSecret).to.be.an.instanceof(Buffer) 76 | expect(_config.authTokenSecret.length).to.equal(32) 77 | }) 78 | }) 79 | 80 | describe('config.ilp', () => { 81 | const defaults = { 82 | prefix: null 83 | } 84 | 85 | it('returns defaults', () => { 86 | const _config = loadConfig() 87 | expect(_config.ilp).to.deep.equal(defaults) 88 | }) 89 | 90 | it('LEDGER_ILP_PREFIX=example.red.', () => { 91 | process.env.LEDGER_ILP_PREFIX = 'example.red.' 92 | const ilp = { 93 | prefix: 'example.red.' 94 | } 95 | const _config = loadConfig() 96 | expect(_config.ilp).to.deep.equal(ilp) 97 | }) 98 | }) 99 | 100 | describe('config.features', () => { 101 | const defaults = { 102 | hasCreditAuth: false 103 | } 104 | 105 | it('returns default features config when no env vars set', () => { 106 | const _config = loadConfig() 107 | expect(_config.features).to.deep.equal(defaults) 108 | }) 109 | 110 | it('LEDGER_FEATURE_CREDIT_AUTH=1', () => { 111 | process.env.LEDGER_FEATURE_CREDIT_AUTH = '1' 112 | const features = _.defaults({ 113 | hasCreditAuth: true 114 | }, defaults) 115 | const _config = loadConfig() 116 | expect(_config.features).to.deep.equal(features) 117 | }) 118 | }) 119 | 120 | describe('config.amount', () => { 121 | const defaults = { 122 | precision: 19, 123 | scale: 9 124 | } 125 | 126 | it('returns default amount config when no env vars set', () => { 127 | const _config = loadConfig() 128 | expect(_config.amount).to.deep.equal(defaults) 129 | }) 130 | 131 | it('LEDGER_AMOUNT_SCALE=7', () => { 132 | process.env.LEDGER_AMOUNT_SCALE = '7' 133 | const amount = _.defaults({ 134 | scale: 7 135 | }, defaults) 136 | const _config = loadConfig() 137 | expect(_config.amount).to.deep.equal(amount) 138 | }) 139 | 140 | it('LEDGER_AMOUNT_PRECISION=20', () => { 141 | process.env.LEDGER_AMOUNT_PRECISION = '20' 142 | const amount = _.defaults({ 143 | precision: 20 144 | }, defaults) 145 | const _config = loadConfig() 146 | expect(_config.amount).to.deep.equal(amount) 147 | }) 148 | }) 149 | 150 | describe('config.keys', () => { 151 | const testDefault = { 152 | ed25519: { 153 | secret: 'lu+43o/0NUeF5iJTHXQQY6eqMaY06Xx6G1ABc6q1UQk=', 154 | public: 'YXg177AOkDlGGrBaoSET+UrMscbHGwFXHqfUMBZTtCY=' 155 | } 156 | } 157 | 158 | describe('when testing', () => { 159 | it('returns test defaults', () => { 160 | const _config = loadConfig() 161 | expect(_config.keys).to.deep.equal(testDefault) 162 | }) 163 | }) 164 | }) 165 | 166 | describe('config.recommendedConnectors', () => { 167 | it('defaults to undefined', () => { 168 | const _config = loadConfig() 169 | expect(_config.recommendedConnectors).to.equal(undefined) 170 | }) 171 | 172 | it('is undefined when recommendedConnectors is "*"', () => { 173 | process.env.LEDGER_RECOMMENDED_CONNECTORS = '*' 174 | const _config = loadConfig() 175 | expect(_config.recommendedConnectors).to.equal(undefined) 176 | }) 177 | 178 | it('is a list of connector names, otherwise', () => { 179 | process.env.LEDGER_RECOMMENDED_CONNECTORS = 'alice,bob,carl' 180 | const _config = loadConfig() 181 | expect(_config.recommendedConnectors).to.deep.equal(['alice', 'bob', 'carl']) 182 | }) 183 | }) 184 | }) 185 | -------------------------------------------------------------------------------- /test/data/accounts.json: -------------------------------------------------------------------------------- 1 | { 2 | "admin": { 3 | "id": "http://localhost/accounts/admin", 4 | "name": "admin", 5 | "balance": "0", 6 | "password": "admin", 7 | "is_admin": true, 8 | "is_disabled": false, 9 | "minimum_allowed_balance": "0" 10 | }, 11 | "alice": { 12 | "id": "http://localhost/accounts/alice", 13 | "name": "alice", 14 | "balance": "100", 15 | "password": "alice", 16 | "is_disabled": false, 17 | "minimum_allowed_balance": "0" 18 | }, 19 | "bob": { 20 | "id": "http://localhost/accounts/bob", 21 | "name": "bob", 22 | "balance": "0", 23 | "password": "bob", 24 | "is_disabled": false, 25 | "minimum_allowed_balance": "0" 26 | }, 27 | "candice": { 28 | "id": "http://localhost/accounts/candice", 29 | "name": "candice", 30 | "balance": "50", 31 | "password": "candice", 32 | "is_disabled": false, 33 | "minimum_allowed_balance": "0" 34 | }, 35 | "dave": { 36 | "id": "http://localhost/accounts/dave", 37 | "name": "dave", 38 | "balance": "0", 39 | "password": "dave", 40 | "is_disabled": false, 41 | "minimum_allowed_balance": "0" 42 | }, 43 | "eve": { 44 | "id": "http://localhost/accounts/eve", 45 | "name": "eve", 46 | "balance": "50", 47 | "is_disabled": false, 48 | "minimum_allowed_balance": "0" 49 | }, 50 | "trader": { 51 | "id": "http://localhost/accounts/trader", 52 | "name": "trader", 53 | "balance": "0", 54 | "is_disabled": false, 55 | "minimum_allowed_balance": "0" 56 | }, 57 | "disabledAccount": { 58 | "id": "http://localhost/accounts/disabled", 59 | "name": "disabled", 60 | "balance": "150", 61 | "password": "disabled", 62 | "is_disabled": true, 63 | "minimum_allowed_balance": "0" 64 | }, 65 | "infiniteMinBalance": { 66 | "id": "http://localhost/accounts/infiniteminbal", 67 | "name": "infiniteminbal", 68 | "balance": "0", 69 | "password": "infiniteminbal", 70 | "is_disabled": false, 71 | "minimum_allowed_balance": "-infinity" 72 | }, 73 | "finiteMinBalance": { 74 | "id": "http://localhost/accounts/finiteminbal", 75 | "name": "finiteminbal", 76 | "balance": "0", 77 | "password": "finiteminbal", 78 | "is_disabled": false, 79 | "minimum_allowed_balance": "-100" 80 | }, 81 | "unspecifiedMinBalance": { 82 | "id": "http://localhost/accounts/nominbal", 83 | "name": "nominbal", 84 | "balance": "0", 85 | "password": "nominbal", 86 | "is_disabled": false 87 | }, 88 | "noBalance": { 89 | "id": "http://localhost/accounts/nobalance", 90 | "name": "nobalance", 91 | "password": "nobalance", 92 | "is_disabled": false, 93 | "minimum_allowed_balance": "-100" 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /test/data/fulfillments/cancellation.json: -------------------------------------------------------------------------------- 1 | { 2 | "condition_fulfillment": "oAiABmNhbmNlbA", 3 | "fulfillment_data": "BCBC" 4 | } 5 | -------------------------------------------------------------------------------- /test/data/fulfillments/execution.json: -------------------------------------------------------------------------------- 1 | { 2 | "condition_fulfillment": "oAmAB2V4ZWN1dGU", 3 | "fulfillment_data": "ABAB" 4 | } 5 | -------------------------------------------------------------------------------- /test/data/fulfillments/executionInvalid.json: -------------------------------------------------------------------------------- 1 | { 2 | "condition_fulfillment": "oAmAB56LXsXnLrU", 3 | "fulfillment_data": "ABAB" 4 | } 5 | -------------------------------------------------------------------------------- /test/data/fulfillments/executionNoData.json: -------------------------------------------------------------------------------- 1 | { 2 | "condition_fulfillment": "oAmAB2V4ZWN1dGU" 3 | } 4 | -------------------------------------------------------------------------------- /test/data/fulfillments/executionTypeAnd.json: -------------------------------------------------------------------------------- 1 | { 2 | "condition_fulfillment": "ohugF6AJgAdleGVjdXRloAqACGV4ZWN1dGUyoQA", 3 | "fulfillment_data": "CDCD" 4 | } 5 | -------------------------------------------------------------------------------- /test/data/messages/fromto.json: -------------------------------------------------------------------------------- 1 | { 2 | "ledger": "http://localhost", 3 | "from": "http://localhost/accounts/alice", 4 | "to": "http://localhost/accounts/bob", 5 | "data": {"foo": "bar"} 6 | } 7 | -------------------------------------------------------------------------------- /test/data/messages/simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "ledger": "http://localhost", 3 | "account": "http://localhost/accounts/bob", 4 | "data": {"foo": "bar"} 5 | } 6 | -------------------------------------------------------------------------------- /test/data/notificationDatabaseEntry.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "89ae630b-959a-47cc-adcf-d7be85e310c0", 3 | "subscription_id": "f49697a6-d52c-4f46-84c8-9070a31feab7", 4 | "transfer_id": "155dff3f-4915-44df-a707-acc4b527bcbd" 5 | } 6 | -------------------------------------------------------------------------------- /test/data/notificationDeletedSubscription.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "4e702fea-7fd3-463f-97b2-3f00ad17cae5", 3 | "subscription_id": "a92e7294-29c5-4a49-9790-cb1fe7924e6f", 4 | "transfer_id": "155dff3f-4915-44df-a707-acc4b527bcbd" 5 | } 6 | -------------------------------------------------------------------------------- /test/data/notificationResponse.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/subscriptions/f49697a6-d52c-4f46-84c8-9070a31feab7/notifications/89ae630b-959a-47cc-adcf-d7be85e310c0", 3 | "subscription": "http://localhost/subscriptions/f49697a6-d52c-4f46-84c8-9070a31feab7", 4 | "event": "transfer.update", 5 | "resource": { 6 | "debits": [ 7 | { 8 | "account": "http://localhost/accounts/alice", 9 | "amount": "10", 10 | "authorized": true 11 | } 12 | ], 13 | "credits": [ 14 | { 15 | "account": "http://localhost/accounts/bob", 16 | "amount": "10" 17 | } 18 | ], 19 | "id": "http://localhost/transfers/155dff3f-4915-44df-a707-acc4b527bcbd", 20 | "ledger": "http://localhost", 21 | "state": "executed" 22 | }, 23 | "related_resources": { 24 | "execution_condition_fulfillment": "oAmAB2V4ZWN1dGU" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /test/data/private.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | Proc-Type: 4,ENCRYPTED 3 | DEK-Info: DES-EDE3-CBC,264DE34D41E4B316 4 | 5 | cEI22ZJlBs+7PeM6shan2t/r+5JhjXjEVq3LioY5pUanFvkjBAk/h0jim+oK6Rb+ 6 | PtdB5mTduqcWnQauQXQWmIiGmDws0gw1OdrzRhDAw3hLr7BWHYj0gjgXq3j+mxyU 7 | 4xEPe+zsNP3L9v+X9viHfzSDzldnBRKfG68vJTCZ8Teaf3k3DKHOUdXa73Nv2/ke 8 | ZxH1R6pwaocFqdVYURm9JbSPESJ73J7HJCkvrnck1n3CMPRMb0BbHom2rW+Hu83U 9 | C0yuimpW3st0il+yFhsCX+WTEBr2nIRKRIKkyF219T3UhNlkZRnbaP39+X4HMimC 10 | 3tn9YXfwE3EJgWSLN2QLYAg++devIsknBXqkmzs9aGpRbJ/ZzDH/6TZ+9sLTJ7GA 11 | /DzqdrAKNb2D48iYsUtA0DwrO9t2DR1mWVPgztcmdjHC8xMWmFe0DFde/CS1ICFU 12 | zKnvH2mXJdGUygELH/ca9Qfc0X+oS3lASWi0pHj2MuUfyoE3/+sgipcg0aY/o1qj 13 | QUCnHdWXJOM/NeSINufi3RFcBBmBErnRHbnnjcs06I8Rb7+0a9Tci8XkGFmKDL+a 14 | Zyv6Yz3bmIoycMqCFLW2K+Vibb8VWeBedJkzKefq5+qRiXZgMe/aA/ebKlA2/2eY 15 | nXoHmmoCUpk02cWddyroGrCoTH1mKvJUMUZUlIy1hThloo/wV4xQPKu7KlhO7+XP 16 | TO48UjCsjmK5WYD8RQCvJnFJLsZuUTvNCdJE3knWUSr6QaGn0I41//F0Zh1uJ/Nv 17 | YiZWtWhrAG6S0rQ6snJVfEu4JCLA5MwEHiUAu1aeAu5fvTwGm4VUZLaU7UIXqiQI 18 | GxeV70STcZstzLLQcZg/pVSsTedksU3ycE54Betuns+dyA51xbV78AQpLLfQbhis 19 | hO+qSWZcHPHTtm4CZhhS5vtbLgyVUqQtZHA7TocizhvPtUPlzGS2ogauzm7clHS1 20 | cahQHPmDDnWrpcdZDbEoh8RHmT+5ryIUpUL2h1J5IdvjCiVvbm8cGov5BZlmoDB2 21 | CGdgIjsYQvMuG/ZYStyStNbq6zqcxbRWKFub3C8HHeo5ODnHyTAdVAKNifgAVeXl 22 | 6tECYtV5BwRRBQ4hB4ufqbh3zczUw0iCFp5CFSiPhbLoYyZiPLZD/rmf6FSN74Tw 23 | ViGtlo/Xa5NiADSZQf3G4Fr3m/kEx+733wKrHno+ggUBnAi+ziqMsSPdF9z2p3S3 24 | VoBpq9ketH8Sh4kU0sfWPGvPGqYZyGjvP6GxHlK3ZMPLNW88vLYuNep97HgtDj8/ 25 | Wp0yVTUpUYFX9UrMtE9CcfBdUws9ZayBdzwc1VzS4qkf8o7vZrIvCAmFv/dVnpeP 26 | BUXHtxXC5A7JMf9EwxJcZvlHeyIpMTqvVC5lVK7joLXno+FkqKNCfW3/617Pnmd6 27 | fjrgxpa2Qzi3Ro0B/TfS+XD7caFdLkg9gaMCbHFYST7pT2wBbZH7A8xsbe5fIMdi 28 | /MMZuknYTn6GvL7YecdT8W8NnxcjYEmOgi19Woe4B4Fz9Kz/t+6mTCQJiNjiN/m3 29 | I3HHJHmqzsH+zvONjnQWcmmrYrqFZ4mCzTwliNF4CulZcXCPHj1HCD6TcnsPK+yw 30 | -----END RSA PRIVATE KEY----- 31 | -------------------------------------------------------------------------------- /test/data/public.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN PUBLIC KEY----- 2 | MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA7RPQYrJY9+aPr55Nz0xE 3 | aQQaRdW28Wl9wcSAFhub7QBHrikGezgq8zlLz9WOOGpYBQ2GXHuNP4z87daWm7+e 4 | HbBNSb1KqGve7G3xbbxiKmYhIcuH9PjI6+fe7SrRgLZAqm3uu6LsnqJfrg8OmGNI 5 | TesbJXf4t+BQn8e+S5Haso4zdRxeKX9+8kgNBfoZgpTmKovT7voxgWTsDckmEADC 6 | AIwO8HLdzRCPx1kZE5twb31VIEEbEEvfiI7Rx/M1zoVFrKkYFAeKBY4wcf9gK/7G 7 | kxsDR9KZ9ZIz8pGuYA3onpIFgGIs0B81FzEaknFk+MKkekF5EEOUhICNCJxQWVZc 8 | MQIDAQAB 9 | -----END PUBLIC KEY----- 10 | -------------------------------------------------------------------------------- /test/data/signKeyRSAPrv.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEpAIBAAKCAQEAtVGc36A/gLrkf4I3audrSkW1RAGhRlOWM9ZTLMGUd33cq6vh 3 | Qh5uOJub5du0qKRWCAcV6sUM/xNDereVD6ES/GhUBtry/aQ1rWQ/nxqjr8CVFten 4 | 9icqK0FwTaHmYUlB5PL4rwhQ/sN33ANVE+TY99R0wsLCWB51xwxvZPSX/pjQsRKl 5 | NOKHhQa++AnEtxXVyGY46mIwlqHEzwnPesd5LRC62ZiQVBHYZ2mPePK9aftOBurL 6 | wrj9pcRouh3eaDnrNMawWfFXgsujE9hxiJTRSfsCJgmqbhR9onrCtxZORVS3AR6h 7 | RqJM92opqiAHzu+poO2FaPgnFpoEat7mUOYyawIDAQABAoIBAEYws8O16x9QOghK 8 | Rl2x/EyiM4uB/gaVcP3cKBKZVXn0VSp3vRm1YgGz62PN61+lTlCJPajGY0OTF3o3 9 | +xIzzn+7e1u0AoDBwq2+swpKa7crnAtdrH2Tp7Va/bhvxhk/HK0eibpoQ8J7tLiM 10 | 6bBr1LoLmq+SaV1nXTySycSREw3RPrPw+JJ3pjaSSDDD1RoaA0gzKUKatdlC7n0h 11 | RQqGYCmt0m/b/gTT2EHMobpVwNMuZKVtU+wkCSB+isvuT2kPeBBePw1yMbbUN7nS 12 | VUzBBW8LapnUc2CdPfM5IkbKLiq5tAK0JowwyyTh3hIlJLvRD5/1MD6MLHmZT5Gc 13 | nDeDQjECgYEA7wF/FHJn5ygxawdUJIplTsDIdfIFWeXxiv/kiggt/4jPsBGLxYWS 14 | /XRlPYmCVtdMEiNuQqXnnYa/us1Eh4FX5cTbNCmb0mrQJK58JT+1xckNzHch7SMf 15 | 7xVk8V8Pe+Vei7gD6tYeGNAoszM0lRAeZN/gfEygWEwea+JCjqApYmUCgYEAwjYR 16 | aTXAoOfBqeHGavovRbiKmETUeApD73Xn/8N9+zSCoVZYFwXvWuwvxnPVb9On80IE 17 | GZ28jkU+ul9OZdJ9qxt1OntGX0ij5rZ3+bZAMiZx90pKbx6r1n4pCRCzhhm7X5yS 18 | mOQAJNbIWFVtObQK7qZ77gzcFgNbHMCXhVwFjI8CgYA1DRz7GeXsTmShwC+yKLIS 19 | CAK3A3CiosK5utEsvmIPbXvENjDTODIHZlWukh5HTmTJReoMiyzdL3LRcQy8Ez7C 20 | dq69Zlr7GtB2w/A0iG69p0IT14HFVF2bQSF13kbelmwlEUR1VO+7TEEsfbo3OCu2 21 | 8mau0Idq8gJyqfdiFzT4bQKBgQC++aYDXF0wnNVetWqE9cIYgFChTUXVSNrnFX1u 22 | jxFKPaea+SokcayeLxUHxA4FKg/EcI1Rlz+8czEs2d1g2SzEXnrXaDOaLPcnPxUZ 23 | U/MqnY2CC+bIAFQf1u2JQYgXfSQ47+FkQQg3MZHO8yo2DOqXJNVHEBQ3/VLSIjyd 24 | YhD+1wKBgQCCE4dE5B0SlsapKEKvdPEWP20HyIAVUjUbp4OlKUOleLz6k52rK4hm 25 | h2b/ltPE8c201zw5ksgsEPSEkEFs2xjY79o71HZTSEXl09W62t96aF0ff03ynH/P 26 | KSJl2WsWFn30tqPtLk74kGlwpZnzgmjWikk3zER+hBnPSvBgjQmKLA== 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /test/data/signKeyRSAPub.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN PUBLIC KEY----- 2 | MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtVGc36A/gLrkf4I3audr 3 | SkW1RAGhRlOWM9ZTLMGUd33cq6vhQh5uOJub5du0qKRWCAcV6sUM/xNDereVD6ES 4 | /GhUBtry/aQ1rWQ/nxqjr8CVFten9icqK0FwTaHmYUlB5PL4rwhQ/sN33ANVE+TY 5 | 99R0wsLCWB51xwxvZPSX/pjQsRKlNOKHhQa++AnEtxXVyGY46mIwlqHEzwnPesd5 6 | LRC62ZiQVBHYZ2mPePK9aftOBurLwrj9pcRouh3eaDnrNMawWfFXgsujE9hxiJTR 7 | SfsCJgmqbhR9onrCtxZORVS3AR6hRqJM92opqiAHzu+poO2FaPgnFpoEat7mUOYy 8 | awIDAQAB 9 | -----END PUBLIC KEY----- 10 | -------------------------------------------------------------------------------- /test/data/subscriptions/alice.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/subscriptions/f49697a6-d52c-4f46-84c8-9070a31feab7", 3 | "owner": "http://localhost/accounts/alice", 4 | "subject": "http://localhost/accounts/alice", 5 | "event": "transfer.update", 6 | "target": "http://subscriber.example/notifications" 7 | } 8 | -------------------------------------------------------------------------------- /test/data/subscriptions/bob.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/subscriptions/a92e7294-29c5-4a49-9790-cb1fe7924e6d", 3 | "owner": "http://localhost/accounts/bob", 4 | "subject": "http://localhost/accounts/bob", 5 | "event": "transfer.update", 6 | "target": "http://subscriber.example/notifications" 7 | } 8 | -------------------------------------------------------------------------------- /test/data/subscriptions/deleted.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/subscriptions/a92e7294-29c5-4a49-9790-cb1fe7924e6f", 3 | "owner": "http://localhost/accounts/mark", 4 | "subject": "http://localhost/accounts/mark", 5 | "event": "transfer.update", 6 | "target": "http://subscriber.example/notifications", 7 | "is_deleted": true 8 | } 9 | -------------------------------------------------------------------------------- /test/data/transfers/executed.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/9e97a403-f604-44de-9223-4ec36aa466d9", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/alice", 6 | "amount": "10", 7 | "authorized": true 8 | }], 9 | "credits": [{ 10 | "account": "http://localhost/accounts/bob", 11 | "amount": "10" 12 | }], 13 | "execution_condition": "ni:///sha-256;vmvf6B7EpFalN6RGDx9F4f4z0wtOIgsIdCmbgv06ceI?fpt=preimage-sha-256&cost=7", 14 | "cancellation_condition": "ni:///sha-256;I3TZF5S3n0-07JWH0s8ArsxPmVP6s-0d0SqxR6C3Ifk?fpt=preimage-sha-256&cost=6", 15 | "state": "executed" 16 | } 17 | -------------------------------------------------------------------------------- /test/data/transfers/fromDisabledAccount.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/ba6795b4-162a-4192-9eac-1a6f42a3afb3", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/disabledAccount", 6 | "amount": "10", 7 | "authorized": true 8 | }], 9 | "credits": [{ 10 | "account": "http://localhost/accounts/bob", 11 | "amount": "10" 12 | }] 13 | } 14 | -------------------------------------------------------------------------------- /test/data/transfers/fromEve.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/255dff3f-4915-44df-a707-acc4b527bcbf", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/eve", 6 | "amount": "10", 7 | "authorized": true 8 | }], 9 | "credits": [{ 10 | "account": "http://localhost/accounts/bob", 11 | "amount": "10" 12 | }] 13 | } 14 | -------------------------------------------------------------------------------- /test/data/transfers/fromFiniteMinBalance.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/d5ffa750-cf02-4a6f-9e85-5f6ab0b0f151", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/finiteminbal", 6 | "amount": "50", 7 | "authorized": true 8 | }], 9 | "credits": [{ 10 | "account": "http://localhost/accounts/bob", 11 | "amount": "50" 12 | }] 13 | } 14 | -------------------------------------------------------------------------------- /test/data/transfers/fromInfiniteMinBalance.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/4e961931-c822-4fc4-8691-abb6a4a0b920", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/infiniteminbal", 6 | "amount": "100000", 7 | "authorized": true 8 | }], 9 | "credits": [{ 10 | "account": "http://localhost/accounts/bob", 11 | "amount": "100000" 12 | }] 13 | } 14 | -------------------------------------------------------------------------------- /test/data/transfers/fromNoBalanceAccount.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/d5ffa750-cf02-4a6f-9e85-5f6ab0b0f151", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/nobalance", 6 | "amount": "50", 7 | "authorized": true 8 | }], 9 | "credits": [{ 10 | "account": "http://localhost/accounts/bob", 11 | "amount": "50" 12 | }] 13 | } -------------------------------------------------------------------------------- /test/data/transfers/fromZeroMinBalance.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/f1ec03c9-4a81-4f36-bdc8-1e8da056b317", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/alice", 6 | "amount": "200", 7 | "authorized": true 8 | }], 9 | "credits": [{ 10 | "account": "http://localhost/accounts/bob", 11 | "amount": "200" 12 | }] 13 | } 14 | -------------------------------------------------------------------------------- /test/data/transfers/multiCredit.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/3a8ce37e-3bb4-430d-82f5-713bafe4d9fc", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/alice", 6 | "amount": "20", 7 | "authorized": true 8 | }], 9 | "credits": [{ 10 | "account": "http://localhost/accounts/bob", 11 | "amount": "10" 12 | }, { 13 | "account": "http://localhost/accounts/dave", 14 | "amount": "10" 15 | }] 16 | } 17 | -------------------------------------------------------------------------------- /test/data/transfers/multiDebit.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/0a95bb26-ff60-454e-9ec2-6621b78233ab", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/alice", 6 | "amount": "10", 7 | "authorized": true 8 | }, { 9 | "account": "http://localhost/accounts/candice", 10 | "amount": "10", 11 | "authorized": true 12 | }], 13 | "credits": [{ 14 | "account": "http://localhost/accounts/bob", 15 | "amount": "20" 16 | }] 17 | } 18 | -------------------------------------------------------------------------------- /test/data/transfers/multiDebitAndCredit.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/3a2a1d9e-8640-4d2d-b06c-84f2cd613204", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/alice", 6 | "amount": "50", 7 | "authorized": true 8 | }, { 9 | "account": "http://localhost/accounts/candice", 10 | "amount": "20", 11 | "authorized": true 12 | }], 13 | "credits": [{ 14 | "account": "http://localhost/accounts/bob", 15 | "amount": "30" 16 | }, { 17 | "account": "http://localhost/accounts/dave", 18 | "amount": "40" 19 | }] 20 | } 21 | -------------------------------------------------------------------------------- /test/data/transfers/noAuthorization.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/44793135-7022-4fd7-bd00-3a14c1e896c6", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/alice", 6 | "amount": "10" 7 | }], 8 | "credits": [{ 9 | "account": "http://localhost/accounts/bob", 10 | "amount": "10" 11 | }], 12 | "state": "proposed", 13 | "expires_at": "2015-06-16T00:00:10.000Z" 14 | } 15 | -------------------------------------------------------------------------------- /test/data/transfers/prepared.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/4e36fe38-8171-4aab-b60e-08d4b56fbbf1", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/alice", 6 | "amount": "10", 7 | "authorized": true 8 | }], 9 | "credits": [{ 10 | "account": "http://localhost/accounts/bob", 11 | "amount": "10" 12 | }], 13 | "execution_condition": "ni:///sha-256;vmvf6B7EpFalN6RGDx9F4f4z0wtOIgsIdCmbgv06ceI?fpt=preimage-sha-256&cost=7", 14 | "cancellation_condition": "ni:///sha-256;I3TZF5S3n0-07JWH0s8ArsxPmVP6s-0d0SqxR6C3Ifk?fpt=preimage-sha-256&cost=6", 15 | "state": "prepared" 16 | } 17 | -------------------------------------------------------------------------------- /test/data/transfers/proposed.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/25644640-d140-450e-b94b-badbe23d3389", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/alice", 6 | "amount": "10" 7 | }], 8 | "credits": [{ 9 | "account": "http://localhost/accounts/bob", 10 | "amount": "10" 11 | }], 12 | "execution_condition": "ni:///sha-256;vmvf6B7EpFalN6RGDx9F4f4z0wtOIgsIdCmbgv06ceI?fpt=preimage-sha-256&cost=7", 13 | "cancellation_condition": "ni:///sha-256;I3TZF5S3n0-07JWH0s8ArsxPmVP6s-0d0SqxR6C3Ifk?fpt=preimage-sha-256&cost=6", 14 | "state": "proposed" 15 | } 16 | -------------------------------------------------------------------------------- /test/data/transfers/simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/155dff3f-4915-44df-a707-acc4b527bcbd", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/alice", 6 | "amount": "10", 7 | "authorized": true 8 | }], 9 | "credits": [{ 10 | "account": "http://localhost/accounts/bob", 11 | "amount": "10" 12 | }], 13 | "state": "executed" 14 | } 15 | -------------------------------------------------------------------------------- /test/data/transfers/simpleWithExpiry.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/155dff3f-4915-44df-a707-acc4b527bcbd", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/alice", 6 | "amount": "10", 7 | "authorized": true 8 | }], 9 | "credits": [{ 10 | "account": "http://localhost/accounts/bob", 11 | "amount": "10" 12 | }], 13 | "state": "executed", 14 | "execution_condition": "ni:///sha-256;vmvf6B7EpFalN6RGDx9F4f4z0wtOIgsIdCmbgv06ceI?fpt=preimage-sha-256&cost=7", 15 | "expires_at": "2015-06-16T00:00:01.000Z" 16 | } 17 | -------------------------------------------------------------------------------- /test/data/transfers/toDisabledAccount.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/ba6795b4-162a-4192-9eac-1a6f42a3afb3", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/bob", 6 | "amount": "10", 7 | "authorized": true 8 | }], 9 | "credits": [{ 10 | "account": "http://localhost/accounts/disabledAccount", 11 | "amount": "10" 12 | }] 13 | } 14 | -------------------------------------------------------------------------------- /test/data/transfers/withAndCondition.json: -------------------------------------------------------------------------------- 1 | { 2 | "ledger": "http://localhost", 3 | "debits": [ 4 | { 5 | "amount": "10", 6 | "account": "http://localhost/accounts/alice", 7 | "authorized": true 8 | } 9 | ], 10 | "credits": [ 11 | { 12 | "account": "http://localhost/accounts/bob", 13 | "amount": "10" 14 | } 15 | ], 16 | "id": "http://localhost/transfers/9e97a403-f604-44de-9223-4ec36aa466d9", 17 | "additional_info": { 18 | "part_of_payment": "http://localhost:3002/payments/14abab3b-c13c-4567-b21f-0094348e9ba7", 19 | "cases": [ 20 | "http://localhost:3003/cases/b1f11915-1fb5-4c4d-9523-8267ecc7f15d" 21 | ] 22 | }, 23 | "execution_condition": "ni:///sha-256;3XlGNxVGkB4YN5hLRRzKK0uZlfMYa-ZT-mIKde_8E1c?fpt=threshold-sha-256&cost=2063&subtypes=preimage-sha-256", 24 | "cancellation_condition": "ni:///sha-256;I3TZF5S3n0-07JWH0s8ArsxPmVP6s-0d0SqxR6C3Ifk?fpt=preimage-sha-256&cost=6", 25 | "state": "proposed" 26 | } 27 | -------------------------------------------------------------------------------- /test/data/transfers/withExpiry.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "http://localhost/transfers/3a2a1d9e-8640-4d2d-b06c-84f2cd613204", 3 | "ledger": "http://localhost", 4 | "debits": [{ 5 | "account": "http://localhost/accounts/alice", 6 | "amount": "50", 7 | "authorized": true 8 | }, { 9 | "account": "http://localhost/accounts/candice", 10 | "amount": "20", 11 | "authorized": true 12 | }], 13 | "credits": [{ 14 | "account": "http://localhost/accounts/bob", 15 | "amount": "30" 16 | }, { 17 | "account": "http://localhost/accounts/dave", 18 | "amount": "40" 19 | }], 20 | "execution_condition": "ni:///sha-256;Xk14jPQJs7vrbEZ9FkeZPGBr0YqVzkpOYjFp_tIZMgs?fpt=preimage-sha-256&cost=7", 21 | "expires_at": "2015-06-16T00:00:01.000Z", 22 | "state": "prepared" 23 | } 24 | -------------------------------------------------------------------------------- /test/getAuthTokenSpec.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _ = require('lodash') 4 | const assert = require('assert') 5 | const jwt = require('jsonwebtoken') 6 | const app = require('../src/services/app') 7 | const logger = require('../src/services/log') 8 | const config = require('../src/services/config') 9 | const dbHelper = require('./helpers/db') 10 | const appHelper = require('./helpers/app') 11 | const logHelper = require('./helpers/log') 12 | const accounts = require('./data/accounts') 13 | 14 | describe('GET /auth_token', function () { 15 | logHelper(logger) 16 | 17 | before(async function () { 18 | await dbHelper.init() 19 | }) 20 | 21 | beforeEach(async function () { 22 | appHelper.create(this, app) 23 | await dbHelper.clean() 24 | // Store some example data 25 | await dbHelper.addAccounts(_.values(_.omit(accounts, 'noBalance'))) 26 | }) 27 | 28 | it('returns 200 and a token on success', async function () { 29 | await this.request() 30 | .get('/auth_token') 31 | .auth('alice', 'alice') 32 | .expect(200) 33 | .expect((res) => { 34 | assert.deepEqual(Object.keys(res.body), ['token', 'token_max_age']) 35 | const token = jwt.verify(res.body.token, config.authTokenSecret) 36 | assert.equal(token.iss, 'http://localhost') 37 | assert.equal(token.sub, 'http://localhost/accounts/alice') 38 | assert.ok(typeof token.iat === 'number') 39 | assert.ok(typeof token.exp === 'number') 40 | assert.equal(res.body.token_max_age, 7 * 24 * 60 * 60 * 1000) 41 | }) 42 | }) 43 | 44 | it('returns 401 when not authenticated', async function () { 45 | await this.request() 46 | .get('/auth_token') 47 | .expect(401) 48 | }) 49 | }) 50 | -------------------------------------------------------------------------------- /test/getTransferSpec.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _ = require('lodash') 4 | const nock = require('nock') 5 | nock.enableNetConnect(['localhost', '127.0.0.1']) 6 | const app = require('../src/services/app') 7 | const logger = require('../src/services/log') 8 | const dbHelper = require('./helpers/db') 9 | const appHelper = require('./helpers/app') 10 | const logHelper = require('./helpers/log') 11 | const sinon = require('sinon') 12 | const transferExpiryMonitor = require('../src/services/transferExpiryMonitor') 13 | const validator = require('./helpers/validator') 14 | const transferDictionary = require('five-bells-shared').TransferStateDictionary 15 | 16 | const transferStates = transferDictionary.transferStates 17 | 18 | const START_DATE = 1434412800000 // June 16, 2015 00:00:00 GMT 19 | 20 | describe('GET /transfers/:uuid', function () { 21 | logHelper(logger) 22 | 23 | before(async function () { 24 | await dbHelper.init() 25 | }) 26 | 27 | beforeEach(async function () { 28 | appHelper.create(this, app) 29 | await dbHelper.clean() 30 | this.clock = sinon.useFakeTimers(START_DATE, 'Date') 31 | 32 | // Define example data 33 | this.exampleTransfer = _.cloneDeep(require('./data/transfers/simple')) 34 | this.existingTransfer = _.cloneDeep(require('./data/transfers/noAuthorization')) 35 | this.multiCreditTransfer = _.cloneDeep(require('./data/transfers/multiCredit')) 36 | this.multiDebitTransfer = _.cloneDeep(require('./data/transfers/multiDebit')) 37 | this.multiDebitAndCreditTransfer = 38 | _.cloneDeep(require('./data/transfers/multiDebitAndCredit')) 39 | this.executedTransfer = _.cloneDeep(require('./data/transfers/executed')) 40 | this.transferWithExpiry = _.cloneDeep(require('./data/transfers/withExpiry')) 41 | 42 | // Store some example data 43 | await dbHelper.addAccounts(_.values(require('./data/accounts'))) 44 | await dbHelper.addTransfers([this.existingTransfer]) 45 | }) 46 | 47 | afterEach(async function () { 48 | nock.cleanAll() 49 | this.clock.restore() 50 | }) 51 | 52 | it('should return 200 for an existing transfer', async function () { 53 | const transfer = this.existingTransfer 54 | await this.request() 55 | .get(transfer.id) 56 | .auth('alice', 'alice') 57 | .expect(200) 58 | .expect(transfer) 59 | .expect(validator.validateTransfer) 60 | }) 61 | 62 | it('should return 404 when the transfer does not exist', async function () { 63 | await this.request() 64 | .get(this.exampleTransfer.id) 65 | .auth('admin', 'admin') 66 | .expect(404) 67 | }) 68 | 69 | it('should return a rejected transfer if the expiry date has passed', async function () { 70 | const transfer = this.transferWithExpiry 71 | delete transfer.debits[0].authorized 72 | delete transfer.debits[1].authorized 73 | 74 | await this.request() 75 | .put(transfer.id) 76 | .auth('alice', 'alice') 77 | .send(transfer) 78 | .expect(201) 79 | .expect(validator.validateTransfer) 80 | 81 | this.clock.tick(1000) 82 | 83 | // In production this function should be triggered by the worker started in app.js 84 | await transferExpiryMonitor.processExpiredTransfers() 85 | 86 | await this.request() 87 | .get(transfer.id) 88 | .auth('alice', 'alice') 89 | .expect(200, _.assign({}, transfer, { 90 | state: transferStates.TRANSFER_STATE_REJECTED, 91 | rejection_reason: 'expired', 92 | timeline: { 93 | proposed_at: '2015-06-16T00:00:00.000Z', 94 | rejected_at: transfer.expires_at 95 | } 96 | })) 97 | .expect(validator.validateTransfer) 98 | }) 99 | }) 100 | -------------------------------------------------------------------------------- /test/healthSpec.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const superagent = require('supertest') 4 | const nock = require('nock') 5 | nock.enableNetConnect(['localhost', '127.0.0.1']) 6 | const app = require('../src/services/app') 7 | const dbHelper = require('./helpers/db') 8 | const accounts = require('./data/accounts') 9 | const logger = require('../src/services/log') 10 | const logHelper = require('./helpers/log') 11 | const expect = require('chai').expect 12 | 13 | function request () { 14 | return superagent(app.koa.listen()) 15 | } 16 | 17 | describe('Health', function () { 18 | logHelper(logger) 19 | 20 | before(function * () { 21 | yield dbHelper.init() 22 | yield dbHelper.clean() 23 | yield dbHelper.addAccounts([ 24 | accounts.admin 25 | ]) 26 | }) 27 | 28 | describe('GET /health', function () { 29 | it('should return 200 for an authenticated request', async function () { 30 | await request() 31 | .get('/health') 32 | .auth('admin', 'admin') 33 | .expect(200) 34 | .expect((res) => { 35 | expect(res.body).to.deep.equal({ 36 | status: 'OK' 37 | }) 38 | }) 39 | }) 40 | 41 | it('should return 401 for an unauthenticated request', function * () { 42 | yield request() 43 | .get('/health') 44 | .expect(401) 45 | .end() 46 | }) 47 | }) 48 | }) 49 | -------------------------------------------------------------------------------- /test/helpers/app.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const superagent = require('supertest') 4 | const WebSocket = require('ws') 5 | const methods = require('methods') 6 | const url = require('url') 7 | 8 | const TEST_HOSTNAME = 'localhost' 9 | const TEST_PORT = null 10 | 11 | /** 12 | * Rewrite a test URL to point to the temporary test app port. 13 | * 14 | * This method will take a URL like 'http://localhost/foo' and make it point to 15 | * the test endpoint, e.g. 'http://localhost:42810/foo'. 16 | * 17 | * @private 18 | */ 19 | function processUrl (inputUrl, port) { 20 | const parsedUrl = url.parse(inputUrl) 21 | 22 | if (parsedUrl.host === TEST_HOSTNAME && 23 | parsedUrl.port === TEST_PORT) { 24 | const path = url.format({ 25 | protocol: 'http', 26 | hostname: 'localhost', 27 | port, 28 | pathname: parsedUrl.pathname, 29 | search: parsedUrl.search 30 | }) 31 | return path 32 | } 33 | 34 | return inputUrl 35 | } 36 | 37 | /** 38 | * Wraps a method to preprocess url parameter. 39 | * 40 | * Modifies a method such that the first parameter is preprocessed into a local 41 | * URL if it matches the TEST_HOSTNAME and TEST_PORT set in this file. 42 | * 43 | * Used internally. 44 | * 45 | * @private 46 | */ 47 | function wrapMethodUrlPreprocess (method) { 48 | return function (inputUrl, fn) { 49 | const parsedUrl = url.parse(inputUrl) 50 | 51 | // Replace local 52 | if (parsedUrl.host === TEST_HOSTNAME && 53 | parsedUrl.port === TEST_PORT) { 54 | const path = url.format({ 55 | pathname: parsedUrl.pathname, 56 | search: parsedUrl.search 57 | }) 58 | return method.call(this, path, fn) 59 | } 60 | 61 | return method.call(this, inputUrl, fn) 62 | } 63 | } 64 | 65 | methods.forEach(function (method) { 66 | const _previous = superagent.agent.prototype[method] 67 | superagent.agent.prototype[method] = wrapMethodUrlPreprocess(_previous) 68 | }) 69 | 70 | exports.create = function (context, app) { 71 | context.server = app.koa.listen() 72 | const port = context.port = context.server.address().port 73 | 74 | context.request = function () { 75 | const request = superagent(context.server) 76 | 77 | methods.forEach(function (method) { 78 | const _previous = request[method] 79 | request[method] = wrapMethodUrlPreprocess(_previous) 80 | }) 81 | 82 | return request 83 | } 84 | 85 | context.ws = function (uri, protocols, options) { 86 | const processedUri = processUrl(uri, port) 87 | return new WebSocket(processedUri, protocols, options) 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /test/helpers/db.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const db = require('../../src/lib/db') 4 | const insertTransfers = require('../../src/models/transfers').insertTransfers 5 | const insertAccounts = require('../../src/models/accounts').insertAccounts 6 | const insertFulfillments = require('../../src/models/db/fulfillments') 7 | .insertFulfillments 8 | 9 | // Only run migrations once during tests 10 | let init = false 11 | exports.init = async function () { 12 | if (init) { 13 | return 14 | } 15 | await db.dropTables() 16 | await db.createTables() 17 | await db.readLookupTables() 18 | init = true 19 | } 20 | 21 | exports.clean = async function () { 22 | await db.truncateTables() 23 | } 24 | 25 | exports.addAccounts = async function (accounts) { 26 | if (!Array.isArray(accounts)) { 27 | throw new Error('Requires an array of accounts, got ' + accounts) 28 | } 29 | 30 | await insertAccounts(accounts) 31 | } 32 | 33 | exports.addTransfers = async function (transfers) { 34 | if (!Array.isArray(transfers)) { 35 | throw new Error('Requires an array of transfers, got ' + transfers) 36 | } 37 | await insertTransfers(transfers) 38 | } 39 | 40 | exports.addFulfillments = async function (fulfillments) { 41 | if (!Array.isArray(fulfillments)) { 42 | throw new Error('Requires an array of fulfillments, got ' + fulfillments) 43 | } 44 | await insertFulfillments(fulfillments) 45 | } 46 | -------------------------------------------------------------------------------- /test/helpers/dbAccountsMock.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | function createMock () { 4 | const mockGetAccount = function () { 5 | throw new Error('DB was queried') 6 | } 7 | let mock = {} 8 | mock.getAccount = mockGetAccount 9 | return mock 10 | } 11 | 12 | module.exports = createMock() 13 | -------------------------------------------------------------------------------- /test/helpers/dbFailureMock.js: -------------------------------------------------------------------------------- 1 | const mock = require('mock-require') 2 | 3 | const RealUtils = require('../../src/models/db/utils') 4 | const mockUtils = function () { 5 | const utils = RealUtils.apply(null, arguments) 6 | const realGetTransaction = utils.getTransaction 7 | utils.getTransaction = function (options) { 8 | const transaction = realGetTransaction(options); 9 | ['raw', 'from'].map(fn => { 10 | const tmp = transaction[fn].bind(transaction) 11 | transaction[fn] = function () { 12 | if (mockUtils.timesQueryShouldFail[JSON.stringify(arguments)]) { 13 | mockUtils.timesQueryShouldFail[JSON.stringify(arguments)]-- 14 | const err = new Error('Mock database error') 15 | err.code = '40001' 16 | if (fn === 'from') { 17 | return { 18 | select: () => { 19 | return { 20 | where: () => { 21 | return Promise.reject(err) 22 | } 23 | } 24 | } 25 | } 26 | } else { 27 | return Promise.reject(err) 28 | } 29 | } 30 | // console.log('Mockable call to database', fn, arguments, mockUtils.timesQueryShouldFail) 31 | return tmp.apply(transaction, arguments) 32 | } 33 | }) 34 | return transaction 35 | } 36 | return utils 37 | } 38 | mockUtils.timesQueryShouldFail = {} 39 | mock('../../src/models/db/utils', mockUtils) 40 | -------------------------------------------------------------------------------- /test/helpers/log.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | // This helper captures log output for each test and prints it in case of 4 | // failure. This means that a successful test run will only print mocha's output 5 | // whereas a failed run will include more information. 6 | 7 | const through = require('through2') 8 | const chalk = require('chalk') 9 | 10 | module.exports = function (logger) { 11 | let buffer 12 | if (process.env['SHOW_STDOUT']) { 13 | return 14 | } 15 | beforeEach(function () { 16 | buffer = through() 17 | buffer.pause() 18 | logger.setOutputStream(buffer) 19 | }) 20 | 21 | afterEach(function (done) { 22 | const ARROW_UP = '\u2191' 23 | const ARROW_DOWN = '\u2193' 24 | function format (str, arrow) { 25 | return '\n' + chalk.red(arrow + ' ' + str + ' ' + arrow) + '\n\n' 26 | } 27 | if (this.currentTest.state !== 'passed') { 28 | process.stdout.write(format('stdout for failing test', ARROW_DOWN)) 29 | buffer.pipe(process.stdout, { end: false }) 30 | buffer.end() 31 | logger.setOutputStream(process.stdout) 32 | buffer.on('end', () => { 33 | process.stdout.write(format('stdout for failing test', ARROW_UP)) 34 | done() 35 | }) 36 | } else { 37 | done() 38 | } 39 | }) 40 | } 41 | -------------------------------------------------------------------------------- /test/helpers/timing.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | /** 4 | * Return a promise that resolves after the current event loop iteration. 5 | */ 6 | exports.defer = function () { 7 | return new Promise((resolve) => setImmediate(resolve)) 8 | } 9 | 10 | /** 11 | * Return a promise that resolves after a time. 12 | * 13 | * @param {Number} duration Wait time in milliseconds 14 | */ 15 | exports.sleep = function (duration) { 16 | return new Promise((resolve) => setTimeout(resolve, duration)) 17 | } 18 | -------------------------------------------------------------------------------- /test/helpers/validator.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const _ = require('lodash') 3 | const validator = require('../../src/services/validator') 4 | 5 | function validate (schema, json) { 6 | const validatorResult = validator.create(schema)(json) 7 | if (!validatorResult.valid) { 8 | throw new Error(schema + ' schema validation error: ' + JSON.stringify(_.omit(validatorResult.errors[0], ['stack']))) 9 | } 10 | } 11 | 12 | function validateTransfer (res) { 13 | validate('Transfer', res.body) 14 | } 15 | 16 | function validateTransfers (res) { 17 | res.body.forEach((transfer) => { 18 | validateTransfer({body: transfer}) 19 | }) 20 | } 21 | 22 | function validateAccount (res) { 23 | validate('Account', res.body) 24 | } 25 | 26 | function validateAccounts (res) { 27 | res.body.forEach((account) => { 28 | validateAccount({body: account}) 29 | }) 30 | } 31 | 32 | function validateNotification (res) { 33 | validate('Notification', res.body) 34 | } 35 | 36 | function validateFulfillment (res) { 37 | validate('ConditionFulfillment', res.text) 38 | } 39 | 40 | function validateFulfillmentModel (res) { 41 | validate('Fulfillment', res.body) 42 | } 43 | 44 | function validateSubscription (res) { 45 | validate('Subscription', res.body) 46 | } 47 | 48 | function validateTransferStateReceipt (res) { 49 | validate('TransferStateReceipt', res.body) 50 | } 51 | 52 | module.exports = { 53 | validateTransfer, 54 | validateTransfers, 55 | validateAccount, 56 | validateAccounts, 57 | validateFulfillment, 58 | validateFulfillmentModel, 59 | validateNotification, 60 | validateSubscription, 61 | validateTransferStateReceipt 62 | } 63 | -------------------------------------------------------------------------------- /test/metadataSpec.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const superagent = require('supertest') 4 | const nock = require('nock') 5 | const expect = require('chai').expect 6 | nock.enableNetConnect(['localhost', '127.0.0.1']) 7 | const App = require('../src/lib/app') 8 | const app = require('../src/services/app') 9 | const logger = require('../src/services/log') 10 | const logHelper = require('./helpers/log') 11 | 12 | function request () { 13 | return superagent(app.koa.listen()) 14 | } 15 | 16 | describe('Metadata', function () { 17 | logHelper(logger) 18 | 19 | delete process.env.LEDGER_AMOUNT_PRECISION 20 | delete process.env.UNIT_TEST_OVERRIDE 21 | 22 | describe('GET /', function () { 23 | it('should return metadata', async function () { 24 | await request() 25 | .get('/') 26 | .expect(200) 27 | .expect(function (res) { 28 | expect(res.body).to.deep.equal({ 29 | currency_code: null, 30 | currency_symbol: null, 31 | ilp_prefix: null, 32 | condition_sign_public_key: 'YXg177AOkDlGGrBaoSET+UrMscbHGwFXHqfUMBZTtCY=', 33 | urls: { 34 | health: 'http://localhost/health', 35 | transfer: 'http://localhost/transfers/:id', 36 | transfer_fulfillment: 'http://localhost/transfers/:id/fulfillment', 37 | transfer_fulfillment2: 'http://localhost/transfers/:id/fulfillment2', 38 | transfer_rejection: 'http://localhost/transfers/:id/rejection', 39 | transfer_state: 'http://localhost/transfers/:id/state', 40 | accounts: 'http://localhost/accounts', 41 | account: 'http://localhost/accounts/:name', 42 | auth_token: 'http://localhost/auth_token', 43 | websocket: 'ws://localhost/websocket', 44 | message: 'http://localhost/messages' 45 | }, 46 | version: 'five-bells@21', 47 | precision: 19, 48 | scale: 9, 49 | connectors: [] 50 | }) 51 | }) 52 | }) 53 | 54 | it('should return metadata when values are set', async function () { 55 | delete process.env.UNIT_TEST_OVERRIDE 56 | 57 | process.env.LEDGER_CURRENCY_CODE = 'USD' 58 | process.env.LEDGER_CURRENCY_SYMBOL = '$' 59 | process.env.LEDGER_ILP_PREFIX = 'example.red.' 60 | process.env.LEDGER_RECOMMENDED_CONNECTORS = 'trader' 61 | 62 | const newApp = new App({ 63 | log: require('../src/services/log'), 64 | // required in order to reload environment variables 65 | config: require('../src/lib/config')(), 66 | timerWorker: require('../src/services/timerWorker'), 67 | notificationBroadcaster: require('../src/services/notificationBroadcaster') 68 | }) 69 | const agent = superagent(newApp.koa.listen()) 70 | 71 | await agent 72 | .get('/') 73 | .expect(200) 74 | .expect(function (res) { 75 | expect(res.body).to.deep.equal({ 76 | currency_code: 'USD', 77 | currency_symbol: '$', 78 | ilp_prefix: 'example.red.', 79 | condition_sign_public_key: 'YXg177AOkDlGGrBaoSET+UrMscbHGwFXHqfUMBZTtCY=', 80 | urls: { 81 | health: 'http://localhost/health', 82 | transfer: 'http://localhost/transfers/:id', 83 | transfer_fulfillment: 'http://localhost/transfers/:id/fulfillment', 84 | transfer_fulfillment2: 'http://localhost/transfers/:id/fulfillment2', 85 | transfer_rejection: 'http://localhost/transfers/:id/rejection', 86 | transfer_state: 'http://localhost/transfers/:id/state', 87 | accounts: 'http://localhost/accounts', 88 | account: 'http://localhost/accounts/:name', 89 | auth_token: 'http://localhost/auth_token', 90 | websocket: 'ws://localhost/websocket', 91 | message: 'http://localhost/messages' 92 | }, 93 | version: 'five-bells@21', 94 | precision: 19, 95 | scale: 9, 96 | connectors: [ 97 | { 98 | id: 'http://localhost/accounts/trader', 99 | name: 'trader' 100 | } 101 | ] 102 | }) 103 | }) 104 | 105 | delete process.env.LEDGER_CURRENCY_CODE 106 | delete process.env.LEDGER_CURRENCY_SYMBOL 107 | delete process.env.LEDGER_ILP_PREFIX 108 | }) 109 | }) 110 | }) 111 | -------------------------------------------------------------------------------- /test/mocha.opts: -------------------------------------------------------------------------------- 1 | --timeout 150000 2 | --require test/helpers/dbFailureMock 3 | -------------------------------------------------------------------------------- /test/postMessageSpec.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _ = require('lodash') 4 | const assert = require('assert') 5 | const sinon = require('sinon') 6 | const app = require('../src/services/app') 7 | const logger = require('../src/services/log') 8 | const dbHelper = require('./helpers/db') 9 | const appHelper = require('./helpers/app') 10 | const logHelper = require('./helpers/log') 11 | const accounts = require('./data/accounts') 12 | const timingHelper = require('./helpers/timing') 13 | 14 | describe('POST /messages', function () { 15 | logHelper(logger) 16 | 17 | before(async function () { 18 | await dbHelper.init() 19 | }) 20 | 21 | beforeEach(async function () { 22 | appHelper.create(this, app) 23 | await dbHelper.clean() 24 | this.exampleMessage = _.cloneDeep(require('./data/messages/simple')) 25 | this.fromToMessage = _.cloneDeep(require('./data/messages/fromto')) 26 | // Store some example data 27 | await dbHelper.addAccounts(_.values(_.omit(accounts, 'noBalance'))) 28 | 29 | this.socket = this.ws('http://localhost/websocket', { 30 | headers: { 31 | Authorization: 'Basic ' + Buffer.from('bob:bob', 'utf8').toString('base64') 32 | } 33 | }) 34 | 35 | // Wait until WS connection is established 36 | await new Promise((resolve) => { 37 | this.socket.once('message', (msg) => { 38 | assert.deepEqual(JSON.parse(msg), { jsonrpc: '2.0', id: null, method: 'connect' }) 39 | resolve() 40 | }) 41 | }) 42 | 43 | this.socket.send(JSON.stringify({ 44 | jsonrpc: '2.0', 45 | id: 1, 46 | method: 'subscribe_account', 47 | params: { eventType: 'message.send', accounts: ['http://localhost/accounts/bob'] } 48 | })) 49 | 50 | await new Promise((resolve) => { 51 | this.socket.once('message', (msg) => { 52 | assert.deepEqual(JSON.parse(msg), { jsonrpc: '2.0', id: 1, result: 1 }) 53 | resolve() 54 | }) 55 | }) 56 | }) 57 | 58 | afterEach(async function () { 59 | this.socket.terminate() 60 | }) 61 | 62 | it('returns 201 if the message is valid', async function () { 63 | const message = this.exampleMessage 64 | 65 | await this.request() 66 | .post('/messages') 67 | .auth('alice', 'alice') 68 | .send(message) 69 | .expect(201) 70 | }) 71 | 72 | it('returns 201 if the message has "from", "to", and "account', async function () { 73 | const message = this.fromToMessage 74 | message.account = message.from 75 | 76 | await this.request() 77 | .post('/messages') 78 | .auth('alice', 'alice') 79 | .send(message) 80 | .expect(201) 81 | }) 82 | 83 | it('returns 400 if the message is missing "ledger"', async function () { 84 | const message = this.exampleMessage 85 | delete message.ledger 86 | 87 | await this.request() 88 | .post('/messages') 89 | .auth('alice', 'alice') 90 | .send(message) 91 | .expect(400) 92 | }) 93 | 94 | it('returns 400 if the message is missing "account"', async function () { 95 | const message = this.exampleMessage 96 | delete message.account 97 | 98 | await this.request() 99 | .post('/messages') 100 | .auth('alice', 'alice') 101 | .send(message) 102 | .expect(400) 103 | }) 104 | 105 | it('returns 400 if the message has "from" but no "to"', async function () { 106 | const message = this.fromToMessage 107 | delete message.to 108 | 109 | await this.request() 110 | .post('/messages') 111 | .auth('alice', 'alice') 112 | .send(message) 113 | .expect(400) 114 | }) 115 | 116 | it('returns 400 if the message has "to" but no "from"', async function () { 117 | const message = this.fromToMessage 118 | delete message.from 119 | 120 | await this.request() 121 | .post('/messages') 122 | .auth('alice', 'alice') 123 | .send(message) 124 | .expect(400) 125 | }) 126 | 127 | it('returns 400 if the message is missing "data"', async function () { 128 | const message = this.exampleMessage 129 | delete message.data 130 | 131 | await this.request() 132 | .post('/messages') 133 | .auth('alice', 'alice') 134 | .send(message) 135 | .expect(400) 136 | }) 137 | 138 | it('returns 400 if "from" doesn\'t match the sender (when the sender isn\'t admin)', async function () { 139 | const message = this.fromToMessage 140 | message.from = 'http://localhost/accounts/carl' 141 | 142 | await this.request() 143 | .post('/messages') 144 | .auth('alice', 'alice') 145 | .send(message) 146 | .expect(400) 147 | .expect({ 148 | id: 'InvalidBodyError', 149 | message: 'You do not have permission to impersonate this user' 150 | }) 151 | }) 152 | 153 | it('returns 422 if the message recipient isn\'t listening', async function () { 154 | const message = Object.assign(this.exampleMessage, { 155 | account: 'http://localhost/accounts/carl' 156 | }) 157 | 158 | await this.request() 159 | .post('/messages') 160 | .auth('alice', 'alice') 161 | .send(message) 162 | .expect(422) 163 | .expect({ 164 | id: 'NoSubscriptionsError', 165 | message: 'Destination account could not be reached' 166 | }) 167 | }) 168 | 169 | it('relays a message with "account"', async function () { 170 | const message = this.exampleMessage 171 | const listener = sinon.spy() 172 | this.socket.on('message', (msg) => listener(JSON.parse(msg))) 173 | 174 | await timingHelper.sleep(50) 175 | await this.request() 176 | .post('/messages') 177 | .auth('alice', 'alice') 178 | .send(message) 179 | .expect(201) 180 | await timingHelper.sleep(50) 181 | 182 | sinon.assert.calledOnce(listener) 183 | sinon.assert.calledWith(listener.firstCall, { 184 | jsonrpc: '2.0', 185 | id: null, 186 | method: 'notify', 187 | params: { 188 | event: 'message.send', 189 | resource: { 190 | ledger: 'http://localhost', 191 | from: 'http://localhost/accounts/alice', 192 | to: 'http://localhost/accounts/bob', 193 | account: 'http://localhost/accounts/alice', 194 | data: {foo: 'bar'} 195 | } 196 | } 197 | }) 198 | }) 199 | 200 | it('relays a message with "from"/"to"', async function () { 201 | const message = this.fromToMessage 202 | const listener = sinon.spy() 203 | this.socket.on('message', (msg) => listener(JSON.parse(msg))) 204 | 205 | await timingHelper.sleep(50) 206 | await this.request() 207 | .post('/messages') 208 | .auth('alice', 'alice') 209 | .send(message) 210 | .expect(201) 211 | await timingHelper.sleep(50) 212 | 213 | sinon.assert.calledOnce(listener) 214 | sinon.assert.calledWith(listener.firstCall, { 215 | jsonrpc: '2.0', 216 | id: null, 217 | method: 'notify', 218 | params: { 219 | event: 'message.send', 220 | resource: { 221 | ledger: 'http://localhost', 222 | from: 'http://localhost/accounts/alice', 223 | to: 'http://localhost/accounts/bob', 224 | account: 'http://localhost/accounts/alice', 225 | data: {foo: 'bar'} 226 | } 227 | } 228 | }) 229 | }) 230 | 231 | it('relays a message when the admin is impersonating another user', async function () { 232 | const message = this.fromToMessage 233 | const listener = sinon.spy() 234 | this.socket.on('message', (msg) => listener(JSON.parse(msg))) 235 | 236 | await timingHelper.sleep(50) 237 | await this.request() 238 | .post('/messages') 239 | .auth('admin', 'admin') 240 | .send(message) 241 | .expect(201) 242 | await timingHelper.sleep(50) 243 | 244 | sinon.assert.calledOnce(listener) 245 | sinon.assert.calledWith(listener.firstCall, { 246 | jsonrpc: '2.0', 247 | id: null, 248 | method: 'notify', 249 | params: { 250 | event: 'message.send', 251 | resource: { 252 | ledger: 'http://localhost', 253 | from: 'http://localhost/accounts/alice', 254 | to: 'http://localhost/accounts/bob', 255 | account: 'http://localhost/accounts/alice', 256 | data: {foo: 'bar'} 257 | } 258 | } 259 | }) 260 | }) 261 | }) 262 | -------------------------------------------------------------------------------- /test/rejectionSpec.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _ = require('lodash') 4 | const nock = require('nock') 5 | nock.enableNetConnect(['localhost', '127.0.0.1']) 6 | const expect = require('chai').expect 7 | const app = require('../src/services/app') 8 | const logger = require('../src/services/log') 9 | const dbHelper = require('./helpers/db') 10 | const appHelper = require('./helpers/app') 11 | const logHelper = require('./helpers/log') 12 | const sinon = require('sinon') 13 | const accounts = require('./data/accounts') 14 | const validator = require('./helpers/validator') 15 | const getAccount = require('../src/models/db/accounts').getAccount 16 | 17 | const START_DATE = 1434412800000 // June 16, 2015 00:00:00 GMT 18 | 19 | describe('PUT /rejection', function () { 20 | logHelper(logger) 21 | 22 | before(async function () { 23 | await dbHelper.init() 24 | }) 25 | 26 | beforeEach(async function () { 27 | appHelper.create(this, app) 28 | await dbHelper.clean() 29 | this.clock = sinon.useFakeTimers(START_DATE, 'Date') 30 | 31 | this.proposedTransfer = _.cloneDeep(require('./data/transfers/proposed')) 32 | this.preparedTransfer = _.cloneDeep(require('./data/transfers/prepared')) 33 | this.executedTransfer = _.cloneDeep(require('./data/transfers/executed')) 34 | this.multiCreditTransfer = _.cloneDeep(require('./data/transfers/multiCredit')) 35 | 36 | this.rejectionMessage1 = { 37 | code: '123', 38 | name: 'Error 1', 39 | message: 'error 1', 40 | triggered_by: 'example.red.bob', 41 | additional_info: {} 42 | } 43 | this.rejectionMessage2 = { 44 | code: '123', 45 | name: 'Error 2', 46 | message: 'error 2', 47 | triggered_by: 'example.red.bob', 48 | additional_info: {} 49 | } 50 | 51 | await dbHelper.addAccounts(_.values(accounts)) 52 | }) 53 | 54 | afterEach(async function () { 55 | nock.cleanAll() 56 | this.clock.restore() 57 | }) 58 | 59 | it('should return 401 if the request is not authenticated', async function () { 60 | await this.request() 61 | .put(this.preparedTransfer.id + '/rejection') 62 | .expect(401) 63 | }) 64 | 65 | it('should return 404 when rejecting a non-existent transfer', async function () { 66 | const transfer = this.preparedTransfer 67 | await this.request() 68 | .put(transfer.id + '/rejection') 69 | .auth('bob', 'bob') 70 | .send(this.rejectionMessage1) 71 | .expect(404) 72 | }) 73 | 74 | it('should return 403 when rejecting a transfer as the wrong user', async function () { 75 | const transfer = this.preparedTransfer 76 | await this.request() 77 | .put(transfer.id) 78 | .auth('alice', 'alice') 79 | .send(transfer) 80 | .expect(201) 81 | .expect(validator.validateTransfer) 82 | 83 | await this.request() 84 | .put(transfer.id + '/rejection') 85 | .auth('alice', 'alice') 86 | .send(this.rejectionMessage1) 87 | .expect(403) 88 | .expect({ 89 | id: 'ForbiddenError', 90 | message: 'Invalid attempt to reject credit' 91 | }) 92 | }) 93 | 94 | it('should reject a prepared transfer', async function () { 95 | const transfer = this.preparedTransfer 96 | await this.request() 97 | .put(transfer.id) 98 | .auth('alice', 'alice') 99 | .send(transfer) 100 | .expect(201) 101 | .expect(validator.validateTransfer) 102 | 103 | // Check balances 104 | expect((await getAccount('alice')).balance).to.equal(90) 105 | expect((await getAccount('bob')).balance).to.equal(0) 106 | 107 | await this.request() 108 | .put(transfer.id + '/rejection') 109 | .auth('bob', 'bob') 110 | .send(this.rejectionMessage1) 111 | .expect(201) 112 | .expect(this.rejectionMessage1) 113 | 114 | // Check balances 115 | expect((await getAccount('alice')).balance).to.equal(100) 116 | expect((await getAccount('bob')).balance).to.equal(0) 117 | 118 | await this.request() 119 | .put(transfer.id + '/rejection') 120 | .auth('bob', 'bob') 121 | .send(this.rejectionMessage2) 122 | .expect(400) 123 | .expect(function (res) { 124 | expect(res.body.id).to.equal('InvalidModificationError') 125 | expect(res.body.message).to.equal('Transfer may not be modified in this way') 126 | }) 127 | 128 | await this.request() 129 | .get(transfer.id) 130 | .auth('alice', 'alice') 131 | .expect(200) 132 | .expect(Object.assign(transfer, { 133 | state: 'rejected', 134 | rejection_reason: 'cancelled', 135 | credits: [ 136 | Object.assign(transfer.credits[0], { 137 | rejected: true, 138 | rejection_message: this.rejectionMessage1 139 | }) 140 | ], 141 | timeline: { 142 | prepared_at: '2015-06-16T00:00:00.000Z', 143 | proposed_at: '2015-06-16T00:00:00.000Z', 144 | rejected_at: '2015-06-16T00:00:00.000Z' 145 | } 146 | })) 147 | }) 148 | 149 | it('rejects the transfer when a credit is rejected', async function () { 150 | const transfer = Object.assign(this.multiCreditTransfer, 151 | {execution_condition: 'ni:///sha-256;vmvf6B7EpFalN6RGDx9F4f4z0wtOIgsIdCmbgv06ceI?fpt=preimage-sha-256&cost=7'}) 152 | await this.request() 153 | .put(transfer.id) 154 | .auth('alice', 'alice') 155 | .send(transfer) 156 | .expect(201) 157 | .expect(validator.validateTransfer) 158 | 159 | // Check balances 160 | expect((await getAccount('alice')).balance).to.equal(80) 161 | expect((await getAccount('bob')).balance).to.equal(0) 162 | 163 | await this.request() 164 | .put(transfer.id + '/rejection') 165 | .auth('dave', 'dave') 166 | .send(this.rejectionMessage1) 167 | .expect(201) 168 | .expect(this.rejectionMessage1) 169 | 170 | // Check balances 171 | expect((await getAccount('alice')).balance).to.equal(100) 172 | expect((await getAccount('bob')).balance).to.equal(0) 173 | 174 | await this.request() 175 | .put(transfer.id + '/rejection') 176 | .auth('bob', 'bob') 177 | .send(this.rejectionMessage2) 178 | .expect(201) 179 | .expect(this.rejectionMessage2) 180 | 181 | await this.request() 182 | .get(transfer.id) 183 | .auth('alice', 'alice') 184 | .expect(200) 185 | .expect(Object.assign(transfer, { 186 | state: 'rejected', 187 | rejection_reason: 'cancelled', 188 | credits: [ 189 | Object.assign(transfer.credits[0], { // bob 190 | rejected: true, 191 | rejection_message: this.rejectionMessage2 192 | }), 193 | Object.assign(transfer.credits[1], { // dave 194 | rejected: true, 195 | rejection_message: this.rejectionMessage1 196 | }) 197 | ], 198 | timeline: { 199 | prepared_at: '2015-06-16T00:00:00.000Z', 200 | proposed_at: '2015-06-16T00:00:00.000Z', 201 | rejected_at: '2015-06-16T00:00:00.000Z' 202 | } 203 | })) 204 | }) 205 | }) 206 | -------------------------------------------------------------------------------- /test/timeQueueSpec.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const chai = require('chai') 4 | const expect = chai.expect 5 | const sinon = require('sinon') 6 | const sinonChai = require('sinon-chai') 7 | chai.use(sinonChai) 8 | const logger = require('../src/services/log') 9 | const logHelper = require('./helpers/log') 10 | const TimeQueue = require('../src/lib/timeQueue').TimeQueue 11 | 12 | const START_DATE = 1434412800000 // June 16, 2015 00:00:00 GMT 13 | 14 | describe('TimeQueue', function () { 15 | logHelper(logger) 16 | 17 | beforeEach(function () { 18 | this.timeQueue = new TimeQueue() 19 | }) 20 | 21 | describe('.insert()', function () { 22 | it('should insert an item to the priority queue', async function () { 23 | const bananaItem = {'day-o': 'daaaay-o'} 24 | await this.timeQueue.insert(START_DATE, bananaItem) 25 | expect(this.timeQueue._queue.peek().item).to.deep.equal(bananaItem) 26 | }) 27 | it('should emit an "insert" event when adding items', async function () { 28 | const insertListener = sinon.spy() 29 | this.timeQueue.on('insert', async function () { 30 | insertListener() 31 | }) 32 | expect(this.timeQueue.listeners('insert')).to.have.length(1) 33 | expect(insertListener.called).to.equal(false) 34 | await this.timeQueue.insert(START_DATE, {}) 35 | expect(insertListener.calledOnce).to.equal(true) 36 | }) 37 | }) 38 | 39 | describe('.getEarliestDate()', function () { 40 | it('should return the earliest item', async function () { 41 | const bananaItem1 = {'day-o': 'daaaay-o'} 42 | const bananaItem2 = {'daylight': 'come'} 43 | const bananaItem3 = {'and me': 'wanna go home'} 44 | await this.timeQueue.insert(START_DATE, bananaItem1) 45 | await this.timeQueue.insert(START_DATE + 100, bananaItem2) 46 | await this.timeQueue.insert(START_DATE + 100000, bananaItem3) 47 | expect(this.timeQueue.getEarliestDate()).to.equal(START_DATE) 48 | }) 49 | it('should return the earliest item even when added out of order', async function () { 50 | const bananaItem1 = {'day-o': 'daaaay-o'} 51 | const bananaItem2 = {'daylight': 'come'} 52 | const bananaItem3 = {'and me': 'wanna go home'} 53 | await this.timeQueue.insert(START_DATE + 100000, bananaItem1) 54 | await this.timeQueue.insert(START_DATE, bananaItem2) 55 | await this.timeQueue.insert(START_DATE + 100, bananaItem3) 56 | expect(this.timeQueue.getEarliestDate()).to.equal(START_DATE) 57 | }) 58 | }) 59 | 60 | describe('.popBeforeDate()', function () { 61 | it('should return all items before the cutoff date', async function () { 62 | const bananaItem1 = {'day-o': 'daaaay-o'} 63 | const bananaItem2 = {'daylight': 'come'} 64 | const bananaItem3 = {'and me': 'wanna go home'} 65 | await this.timeQueue.insert(START_DATE + 100000, bananaItem1) 66 | await this.timeQueue.insert(START_DATE, bananaItem2) 67 | await this.timeQueue.insert(START_DATE + 100, bananaItem3) 68 | expect(this.timeQueue.popBeforeDate(START_DATE + 101)).to.deep.equal([ 69 | bananaItem2, 70 | bananaItem3 71 | ]) 72 | }) 73 | it('or no items at all if there are none', async function () { 74 | const bananaItem1 = {'day-o': 'daaaay-o'} 75 | const bananaItem2 = {'daylight': 'come'} 76 | const bananaItem3 = {'and me': 'wanna go home'} 77 | await this.timeQueue.insert(START_DATE + 100000, bananaItem1) 78 | await this.timeQueue.insert(START_DATE, bananaItem2) 79 | await this.timeQueue.insert(START_DATE + 100, bananaItem3) 80 | expect(this.timeQueue.popBeforeDate(START_DATE - 1)).to.deep.equal([]) 81 | }) 82 | }) 83 | 84 | describe('.includes()', function () { 85 | it('should find an included item', async function () { 86 | const bananaItem1 = {'day-o': 'daaaay-o'} 87 | const bananaItem2 = {'daylight': 'come'} 88 | const bananaItem3 = {'and me': 'wanna go home'} 89 | await this.timeQueue.insert(START_DATE + 100000, bananaItem1) 90 | await this.timeQueue.insert(START_DATE, bananaItem2) 91 | await this.timeQueue.insert(START_DATE + 100, bananaItem3) 92 | expect(this.timeQueue.includes(bananaItem1)).to.equal(true) 93 | expect(this.timeQueue.includes(bananaItem2)).to.equal(true) 94 | expect(this.timeQueue.includes(bananaItem3)).to.equal(true) 95 | }) 96 | it('should not find a non-included item', async function () { 97 | const bananaItem1 = {'day-o': 'daaaay-o'} 98 | const bananaItem2 = {'daylight': 'come'} 99 | const bananaItem3 = {'and me': 'wanna go home'} 100 | await this.timeQueue.insert(START_DATE + 100000, bananaItem1) 101 | await this.timeQueue.insert(START_DATE, bananaItem2) 102 | expect(this.timeQueue.includes(bananaItem1)).to.equal(true) 103 | expect(this.timeQueue.includes(bananaItem2)).to.equal(true) 104 | expect(this.timeQueue.includes(bananaItem3)).to.equal(false) 105 | }) 106 | }) 107 | 108 | describe('.remove()', function () { 109 | it('should remove an included item', async function () { 110 | const bananaItem1 = {'day-o': 'daaaay-o'} 111 | const bananaItem2 = {'daylight': 'come'} 112 | const bananaItem3 = {'and me': 'wanna go home'} 113 | await this.timeQueue.insert(START_DATE + 100000, bananaItem1) 114 | await this.timeQueue.insert(START_DATE, bananaItem2) 115 | await this.timeQueue.insert(START_DATE + 100, bananaItem3) 116 | expect(this.timeQueue.includes(bananaItem1)).to.equal(true) 117 | expect(this.timeQueue.includes(bananaItem2)).to.equal(true) 118 | expect(this.timeQueue.includes(bananaItem3)).to.equal(true) 119 | this.timeQueue.remove(bananaItem2) 120 | expect(this.timeQueue.includes(bananaItem1)).to.equal(true) 121 | expect(this.timeQueue.includes(bananaItem2)).to.equal(false) 122 | expect(this.timeQueue.includes(bananaItem3)).to.equal(true) 123 | }) 124 | }) 125 | }) 126 | -------------------------------------------------------------------------------- /test/timerWorkerSpec.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const chai = require('chai') 4 | const expect = chai.expect 5 | const sinon = require('sinon') 6 | const sinonChai = require('sinon-chai') 7 | chai.use(sinonChai) 8 | const logger = require('../src/services/log') 9 | const logHelper = require('./helpers/log') 10 | const TimerWorker = require('../src/lib/timerWorker').TimerWorker 11 | const TransferExpiryMonitor = 12 | require('../src/lib/transferExpiryMonitor').TransferExpiryMonitor 13 | const TimeQueue = require('../src/lib/timeQueue').TimeQueue 14 | 15 | const START_DATE = 1434412800000 // June 16, 2015 00:00:00 GMT 16 | 17 | describe('TimerWorker', function () { 18 | logHelper(logger) 19 | 20 | beforeEach(function () { 21 | this.clock = sinon.useFakeTimers(START_DATE, 'Date', 'setTimeout', 'clearTimeout', 'setImmediate') 22 | 23 | this.timeQueue = new TimeQueue() 24 | this.transferExpiryMonitor = new TransferExpiryMonitor(this.timeQueue) 25 | sinon.stub( 26 | this.transferExpiryMonitor, 27 | 'processExpiredTransfers' 28 | ) 29 | .callsFake(async function () { }) 30 | this.timerWorker = new TimerWorker(this.timeQueue, this.transferExpiryMonitor) 31 | }) 32 | 33 | afterEach(function () { 34 | this.timerWorker.stop() 35 | this.clock.restore() 36 | }) 37 | 38 | describe('.start()', function () { 39 | it('should add a listener to the timeQueue to watch for newly inserted transfers', async function () { 40 | await this.timerWorker.start() 41 | expect(this.timeQueue.listeners('insert')).to.have.length(1) 42 | }) 43 | 44 | it('should trigger the transferExpiryMonitor to process expired transfers when called', async function () { 45 | await this.timerWorker.start() 46 | 47 | expect(this.transferExpiryMonitor.processExpiredTransfers).to.have.callCount(1) 48 | }) 49 | }) 50 | 51 | describe('.processTimeQueue()', function () { 52 | it('should trigger the transferExpiryMonitor to process expired transfers when called', async function () { 53 | await this.timerWorker.processTimeQueue() 54 | 55 | expect(this.transferExpiryMonitor.processExpiredTransfers).to.have.callCount(1) 56 | }) 57 | 58 | it('should set a timeout to trigger itself again at the expiry date of the earliest item in the timeQueue', async function () { 59 | await this.timerWorker.start() 60 | await this.timeQueue.insert(START_DATE + 100, 'hello') 61 | 62 | this.clock.tick(100) 63 | 64 | expect(this.transferExpiryMonitor.processExpiredTransfers).to.have.callCount(3) 65 | }) 66 | 67 | it('should be trigger the transferExpiryMonitor to process expired transfers each time a new item is inserted into the timeQueue', async function () { 68 | await this.timerWorker.start() 69 | await this.timeQueue.insert(START_DATE + 100, 'hello') 70 | await this.timeQueue.insert(START_DATE + 200, 'hello') 71 | 72 | // The function will be called on the next tick 73 | await Promise.resolve() 74 | expect(this.transferExpiryMonitor.processExpiredTransfers).to.have.callCount(3) 75 | }) 76 | 77 | it('should only have one timeQueue listener at a time, even if it is triggered by a timeout', async function () { 78 | await this.timerWorker.start() 79 | await this.timeQueue.insert(START_DATE + 100, 'hello') 80 | 81 | this.clock.tick(100) 82 | 83 | expect(this.timeQueue.listeners('insert')).to.have.length(1) 84 | }) 85 | 86 | it('should keep the timeQueue ordered from earliest date to latest', async function () { 87 | await this.timerWorker.start() 88 | await this.timeQueue.insert(START_DATE + 100, 'hello') 89 | await this.timeQueue.insert(START_DATE + 200, 'hello again') 90 | 91 | this.clock.tick(100) 92 | 93 | // The function will be called on the next tick 94 | await Promise.resolve() 95 | expect(this.transferExpiryMonitor.processExpiredTransfers).to.have.callCount(4) 96 | }) 97 | 98 | it('should work with a timeout that is greater than the maximum for setTimeout', 99 | async function () { 100 | const max32int = 2147483647 101 | 102 | await this.timerWorker.start() 103 | await this.timeQueue.insert(START_DATE + max32int + 1, 'hello') 104 | 105 | expect(this.transferExpiryMonitor.processExpiredTransfers).to.have.callCount(2) 106 | 107 | this.clock.tick(1) 108 | 109 | await Promise.resolve() 110 | expect(this.transferExpiryMonitor.processExpiredTransfers).to.have.callCount(2) 111 | 112 | this.clock.tick(max32int) 113 | 114 | await Promise.resolve() 115 | expect(this.transferExpiryMonitor.processExpiredTransfers).to.have.callCount(3) 116 | }) 117 | }) 118 | }) 119 | -------------------------------------------------------------------------------- /wallaby.js: -------------------------------------------------------------------------------- 1 | module.exports = function (wallaby) { 2 | return { 3 | files: [ 4 | 'src/**/*.js', 5 | 'app.js', 6 | 'test/helpers/*.js', 7 | 'test/data/*' 8 | ], 9 | 10 | tests: [ 11 | 'test/*Spec.js' 12 | ], 13 | 14 | testFramework: 'mocha', 15 | 16 | env: { 17 | type: 'node', 18 | runner: 'node', 19 | params: { 20 | env: 'NODE_ENV=unit' 21 | } 22 | }, 23 | 24 | bootstrap: function () { 25 | require('co-mocha')(wallaby.testFramework.constructor) 26 | } 27 | } 28 | } 29 | --------------------------------------------------------------------------------