├── .circleci └── config.yml ├── .eslintrc.json ├── .flowconfig ├── .gitignore ├── .npmignore ├── .nvmrc ├── .prettierrc.json ├── LICENSE.md ├── README.md ├── aws-lambda.js ├── claudia.json ├── config ├── aws-lambda.js ├── ci.js ├── custom-environment-variables.json ├── default.js ├── deploy.js ├── develop.js ├── staging.js └── test.js ├── default.nix ├── docker ├── .dockerignore └── Dockerfile ├── flow-typed └── npm │ ├── bunyan_v1.x.x.js │ └── mocha_v5.x.x.js ├── flow ├── libs │ ├── hippie.js │ └── pg_v7.x.x.js └── types.js ├── package-lock.json ├── package.json ├── scripts ├── docker │ ├── Readme.md │ ├── build-docker.sh │ ├── helper.sh │ └── launch-staging.sh ├── test │ ├── clean_test_db.js │ └── load_test_db.js └── tls │ ├── README.md │ ├── server.crt │ ├── server.csr │ ├── server.key │ ├── server_rootCA.csr.cnf │ ├── server_rootCA.key │ ├── server_rootCA.pem │ ├── server_rootCA.srl │ ├── tls.sh │ └── v3.ext ├── src ├── cleanup.js ├── db-api.js ├── db.js ├── importer-api.js ├── index.js ├── logger.js ├── routes.js ├── server.js └── ws-connections.js ├── test ├── integration │ ├── filter-used-addresses.integration-test.js │ ├── healthcheck.integration-test.js │ ├── test-db.sql │ ├── test-utils.js │ ├── transactions-history.integration-test.js │ ├── utxo-for-addresses.integration-test.js │ └── utxo-sum-for-addresses.integration-test.js └── unit │ └── routes.test.js ├── tls-files ├── develop │ ├── ca.pem │ ├── server.crt │ └── server.key └── staging │ ├── ca.pem │ ├── server.crt │ └── server.key └── yarn.lock /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | # Javascript Node CircleCI 2.0 configuration file 2 | # 3 | # Check https://circleci.com/docs/2.0/language-javascript/ for more details 4 | # 5 | version: 2 6 | jobs: 7 | build: 8 | docker: 9 | # specify the version you desire here 10 | - image: circleci/node:8-browsers 11 | 12 | - image: circleci/postgres:9.6.5-alpine-ram 13 | 14 | working_directory: ~/repo 15 | 16 | steps: 17 | - checkout 18 | 19 | # Install integration tests tool 20 | - run: sudo apt-get update 21 | - run: sudo apt-get install postgresql-client 22 | 23 | # Download and cache dependencies 24 | - restore_cache: 25 | keys: 26 | - v1-dependencies-{{ checksum "package.json" }} 27 | # fallback to using the latest cache if no exact match is found 28 | - v1-dependencies- 29 | 30 | - run: npm install 31 | 32 | - save_cache: 33 | paths: 34 | - node_modules 35 | key: v1-dependencies-{{ checksum "package.json" }} 36 | 37 | # code checks 38 | - run: npm run flow 39 | - run: npm run eslint 40 | 41 | # run tests! 42 | - run: npm run all-tests 43 | 44 | 45 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "airbnb-base", 3 | "parser": "babel-eslint", 4 | "plugins": ["flowtype", "mocha"], 5 | "rules": { 6 | "arrow-parens": "off", 7 | "function-paren-newline": "off" 8 | }, 9 | "env": { 10 | "mocha": true 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /.flowconfig: -------------------------------------------------------------------------------- 1 | [ignore] 2 | .*/node_modules/npm/.* 3 | ./flow-typed 4 | 5 | [include] 6 | 7 | [libs] 8 | ./flow/ 9 | ./flow/libs/ 10 | ./flow-typed 11 | 12 | [lints] 13 | 14 | [options] 15 | 16 | [strict] 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | 8 | # Runtime data 9 | pids 10 | *.pid 11 | *.seed 12 | *.pid.lock 13 | 14 | # Directory for instrumented libs generated by jscoverage/JSCover 15 | lib-cov 16 | 17 | # Coverage directory used by tools like istanbul 18 | coverage 19 | 20 | # nyc test coverage 21 | .nyc_output 22 | 23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 24 | .grunt 25 | 26 | # Bower dependency directory (https://bower.io/) 27 | bower_components 28 | 29 | # node-waf configuration 30 | .lock-wscript 31 | 32 | # Compiled binary addons (http://nodejs.org/api/addons.html) 33 | build/Release 34 | 35 | # Dependency directories 36 | node_modules/ 37 | jspm_packages/ 38 | 39 | # Typescript v1 declaration files 40 | typings/ 41 | 42 | # Optional npm cache directory 43 | .npm 44 | 45 | # Optional eslint cache 46 | .eslintcache 47 | 48 | # Optional REPL history 49 | .node_repl_history 50 | 51 | # Output of 'npm pack' 52 | *.tgz 53 | 54 | # Yarn Integrity file 55 | .yarn-integrity 56 | 57 | # dotenv environment variables file 58 | .env 59 | 60 | # Flow generated files 61 | flow-files 62 | 63 | # Claudia Production config file 64 | claudia-prod.json 65 | 66 | # nix 67 | result* 68 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/input-output-hk/project-icarus-backend-service/b9ad929d75187b5d5c73a32924d1cd5df0db12cc/.npmignore -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | v8.9.4 2 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": true, 3 | "trailingComma": "all" 4 | } 5 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Apache License 2 | 3 | Version 2.0, January 2004 4 | 5 | http://www.apache.org/licenses/ 6 | 7 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 8 | 9 | 1. Definitions. 10 | "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. 11 | "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. 12 | "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. 13 | "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. 14 | "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. 15 | "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. 16 | "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). 17 | "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. 18 | "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." 19 | "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 20 | 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 21 | 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 22 | 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: 23 | You must give any other recipients of the Work or Derivative Works a copy of this License; and 24 | You must cause any modified files to carry prominent notices stating that You changed the files; and 25 | You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and 26 | If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. 27 | 28 | 29 | You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 30 | 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 31 | 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 32 | 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 33 | 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 34 | 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. 35 | END OF TERMS AND CONDITIONS 36 | 37 | Copyright 2018 IOHK Ltd. 38 | 39 | Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. 40 | 41 | You may obtain a copy of the License at 42 | 43 | http://www.apache.org/licenses/LICENSE-2.0 44 | 45 | Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 46 | 47 | See the License for the specific language governing permissions and limitations under the License. 48 | 49 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Project Icarus - Backend Service 2 | 3 | [![CircleCI](https://circleci.com/gh/input-output-hk/icarus-poc-backend-service.svg?style=svg)](https://circleci.com/gh/input-output-hk/icarus-poc-backend-service) 4 | 5 | 6 | Icarus, a reference implementation for a lightweight wallet developed by the IOHK Engineering Team. This code base can be used as a point of reference to enable developers to create their own secure light and mobile wallets for Cardano. Icarus is a fully open-source code base that will be the first step in a range of open source initiatives to provide developers with a suite of tools to integrate with Cardano. 7 | 8 | Icarus Backend Service will allow wallet users to access blockchain data. For a detailed architecture explanation, please refer to the [wiki](https://github.com/input-output-hk/icarus-poc/wiki/Architecture). 9 | 10 | # Setup 11 | 12 | ## Pre-requisites 13 | 14 | * NodeJS v8.9.4. We recommend [nvm](https://github.com/creationix/nvm) to install it 15 | * [Postgres](https://www.postgresql.org/) as DB engine. For development purposes we 16 | suggest using Docker but local installation could be used as well (not both, 17 | obviously) 18 | 19 | ## Configuration 20 | 21 | All the environment specific configurations can be found in `$PROJ_ROOT/config` folder. 22 | They are loaded using [config](https://www.npmjs.com/package/config) package. 23 | 24 | ## Development environment 25 | 26 | We recommend using [Docker](https://hub.docker.com/_/postgres/) to quickly setup the DB in dev environment: 27 | 28 | `docker run --name postgres -p 5432:5432 -e POSTGRES_PASSWORD=mysecretpassword -d postgres` 29 | 30 | And then, to create the db, you need to do: 31 | 32 | ``` 33 | docker exec -it postgres psql -U postgres; 34 | create database icaruspocbackendservice; 35 | ``` 36 | 37 | 1. Clone this repo, `git@github.com:input-output-hk/icaraus-poc-backend-service.git` 38 | 2. Select correct NodeJs version, `nvm use` 39 | 3. Install dependencies, `npm install` 40 | 4. Start the app, `npm run dev`. 41 | 42 | In order to run targeting staging DB from local environment, you need to: 43 | 44 | 1. Create a file with the necessary environment variables set. E.g.: 45 | 46 | ``` 47 | export DB_USER=dbUser 48 | export DB_HOST=dbHost 49 | export DB=dbName 50 | export DB_PASSWORD=password 51 | export DB_PORT=8080 52 | ``` 53 | 54 | 2. Import the environment variables in your terminal, e.g: `source ~/path/to/file` (To verify 55 | the variables where exported: `echo $DB`) 56 | 3. Go to the repository's path 57 | 4. Execute the following command: `npm run dev` 58 | 59 | ## Checks & Tests 60 | 61 | ### Flow and Eslint 62 | 63 | * Flow checks: `npm run flow` 64 | * Eslint checks: `npm run eslint` 65 | 66 | ### Unit tests 67 | 68 | To run unit tests, you just need to run 69 | 70 | `npm run unit-tests` 71 | 72 | ### Integration tests 73 | 74 | Integration tests will: 75 | 76 | 1. Create a new DB 77 | 2. Preload sample data 78 | 3. Startup the application 79 | 4. Exercise and assert several endpoints 80 | 81 | To do so, before running them, you need to be sure a PostgreSQL db instance is accessible from localhost 82 | using the following config: 83 | 84 | * Server: localhost 85 | * User: postgres 86 | * Password: mysecretpassword 87 | * Port: 5432 88 | 89 | Then, run `export NODE_ENV=test; npm run integration-tests` 90 | 91 | ### Coverage 92 | 93 | Istanbul will be used to get test coverage. It will execute both unit and integration tests. 94 | 95 | To run it, execute `npm run coverage` 96 | 97 | ## License 98 | 99 | Licensed under the [Apache License, Version 2.0](LICENSE.md) 100 | -------------------------------------------------------------------------------- /aws-lambda.js: -------------------------------------------------------------------------------- 1 | const config = require('config'); 2 | const ApiBuilder = require('claudia-api-builder'); 3 | const pg = require('pg'); 4 | const routes = require('./flow-files/routes'); 5 | const dbApi = require('./flow-files/db-api'); 6 | const importerApi = require('./flow-files/importer-api'); 7 | 8 | const serverConfig = config.get('server'); 9 | const { importerSendTxEndpoint } = serverConfig; 10 | const api = new ApiBuilder(); 11 | 12 | Object.values(routes).forEach(({ method, path, handler }) => { 13 | api[method](path, async req => { 14 | const db = new pg.Client(config.get('db')); 15 | await db.connect(); 16 | try { 17 | return await handler( 18 | dbApi(db), 19 | serverConfig, 20 | importerApi(importerSendTxEndpoint), 21 | )(req); 22 | } finally { 23 | db.end(); 24 | } 25 | }); 26 | }); 27 | 28 | module.exports = api; 29 | -------------------------------------------------------------------------------- /claudia.json: -------------------------------------------------------------------------------- 1 | { 2 | "lambda": { 3 | "role": "icarus-poc-backend-staging-executor", 4 | "name": "icarus-poc-backend-staging", 5 | "region": "us-east-1" 6 | }, 7 | "api": { 8 | "id": "lld5qq5jl5", 9 | "module": "aws-lambda" 10 | } 11 | } -------------------------------------------------------------------------------- /config/aws-lambda.js: -------------------------------------------------------------------------------- 1 | const { raw } = require('config/raw'); 2 | 3 | module.exports = { 4 | server: { 5 | logger: raw(console), 6 | importerSendTxEndpoint: 'http://ec2-18-206-30-1.compute-1.amazonaws.com:8200/api/txs/signed', 7 | }, 8 | db: { 9 | user: 'fake', 10 | host: 'fake', 11 | database: 'fake', 12 | password: 'fake', 13 | port: '5432', 14 | min: 0, 15 | max: 1, 16 | idleTimeoutMillis: 1000, 17 | connectionTimeoutMillis: 1000 * 10, 18 | }, 19 | }; 20 | -------------------------------------------------------------------------------- /config/ci.js: -------------------------------------------------------------------------------- 1 | const { raw } = require('config/raw'); 2 | const { consoleLogger } = require('../src/logger'); 3 | 4 | module.exports = { 5 | server: { 6 | corsEnabledFor: ['*'], 7 | logger: raw(consoleLogger('fatal')), 8 | importerSendTxEndpoint: 'https://localhost:8200/api/txs/signed', 9 | }, 10 | db: { 11 | user: 'root', 12 | host: 'localhost', 13 | database: 'icaruspocbackendservice-test', 14 | password: '', 15 | port: 5432, 16 | min: 4, 17 | max: 20, 18 | idleTimeoutMillis: 1000, 19 | connectionTimeoutMillis: 1000, 20 | }, 21 | }; 22 | -------------------------------------------------------------------------------- /config/custom-environment-variables.json: -------------------------------------------------------------------------------- 1 | { 2 | "db": { 3 | "user": "DB_USER", 4 | "host": "DB_HOST", 5 | "database": "DB", 6 | "password": "DB_PASSWORD", 7 | "port": "DB_PORT" 8 | }, 9 | "server": { 10 | "importerSendTxEndpoint": "IMPORTER_ENDPOINT" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /config/default.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | appName: 'icarus-poc-backend-service', 3 | server: { 4 | port: 8080, 5 | apiConfig: { 6 | addressesRequestLimit: 50, 7 | txHistoryResponseLimit: 20, 8 | }, 9 | }, 10 | }; 11 | -------------------------------------------------------------------------------- /config/deploy.js: -------------------------------------------------------------------------------- 1 | const { raw } = require('config/raw'); 2 | const { consoleLogger } = require('../src/logger'); 3 | 4 | var getEnv = function(name) { 5 | var ret = process.env[name]; 6 | if (!ret) 7 | throw ("Environment variables should be defined: " + name); 8 | return ret; 9 | } 10 | 11 | module.exports = { 12 | server: { 13 | corsEnabledFor: ['*'], 14 | logger: raw(consoleLogger('info')), 15 | port: 8080, 16 | importerSendTxEndpoint: getEnv("IMPORTER_ENDPOINT"), 17 | }, 18 | db: { 19 | user: getEnv("DB_USER"), 20 | host: getEnv("DB_HOST"), 21 | database: getEnv("DB"), 22 | password: getEnv("DB_PASSWORD"), 23 | port: '5432', 24 | min: 4, 25 | max: 50, 26 | idleTimeoutMillis: 1000, 27 | connectionTimeoutMillis: 5000, 28 | }, 29 | }; 30 | -------------------------------------------------------------------------------- /config/develop.js: -------------------------------------------------------------------------------- 1 | const { raw } = require('config/raw'); 2 | const { consoleLogger } = require('../src/logger'); 3 | 4 | module.exports = { 5 | server: { 6 | corsEnabledFor: ['*'], 7 | logger: raw(consoleLogger('debug')), 8 | https: { 9 | tlsDir: './tls-files', 10 | }, 11 | importerSendTxEndpoint: 'https://localhost:8200/api/txs/signed', 12 | }, 13 | db: { 14 | user: 'postgres', 15 | host: 'localhost', 16 | database: 'icaruspocbackendservice', 17 | password: 'mysecretpassword', 18 | port: 5432, 19 | min: 4, 20 | max: 20, 21 | idleTimeoutMillis: 1000, 22 | connectionTimeoutMillis: 5000, 23 | }, 24 | }; 25 | -------------------------------------------------------------------------------- /config/staging.js: -------------------------------------------------------------------------------- 1 | const { raw } = require('config/raw'); 2 | const { consoleLogger } = require('../src/logger'); 3 | 4 | module.exports = { 5 | server: { 6 | corsEnabledFor: ['*'], 7 | logger: raw(consoleLogger('error')), 8 | port: 443, 9 | https: { 10 | tlsDir: './tls-files', 11 | }, 12 | importerSendTxEndpoint: 'https://ec2-18-206-30-1.compute-1.amazonaws.com:8200/api/txs/signed', 13 | }, 14 | db: { 15 | user: 'fake', 16 | host: 'fake', 17 | database: 'fake', 18 | password: 'fake', 19 | port: '5432', 20 | min: 4, 21 | max: 50, 22 | idleTimeoutMillis: 1000, 23 | connectionTimeoutMillis: 5000, 24 | }, 25 | }; 26 | -------------------------------------------------------------------------------- /config/test.js: -------------------------------------------------------------------------------- 1 | const { raw } = require('config/raw'); 2 | const { consoleLogger } = require('../src/logger'); 3 | 4 | module.exports = { 5 | server: { 6 | corsEnabledFor: ['*'], 7 | logger: raw(consoleLogger('fatal')), 8 | importerSendTxEndpoint: 'https://localhost:8200/api/txs/signed', 9 | }, 10 | db: { 11 | user: 'postgres', 12 | host: 'localhost', 13 | database: 'icaruspocbackendservice-test', 14 | password: 'mysecretpassword', 15 | port: 5432, 16 | min: 4, 17 | max: 20, 18 | idleTimeoutMillis: 1000, 19 | connectionTimeoutMillis: 1000, 20 | }, 21 | }; 22 | -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | with (import {}); 2 | rec { 3 | project-icarus-backend = mkYarnPackage { 4 | name = "project-icarus-backend"; 5 | src = lib.cleanSource ./.; 6 | postInstall = '' 7 | mkdir -p $out/bin 8 | yarn run flow-remove-types 9 | cp -vir flow-files $out/ 10 | cat > $out/bin/icarus-backend-service <=v0.25.x 3 | 4 | declare module "bunyan" { 5 | declare var TRACE: 10; 6 | declare var DEBUG: 20; 7 | declare var INFO: 30; 8 | declare var WARN: 40; 9 | declare var ERROR: 50; 10 | declare var FATAL: 60; 11 | 12 | declare type BunyanLogLevels = 13 | | 60 // fatal 14 | | 50 // error 15 | | 40 // warn 16 | | 30 // info 17 | | 20 // debug 18 | | 10; // info 19 | declare type BunyanRecord = { 20 | v: number, 21 | level: BunyanLogLevels, 22 | name: string, 23 | hostname: string, 24 | pid: string, 25 | time: Date, 26 | msg: string, 27 | src: string, 28 | err?: { 29 | message: string, 30 | name: string, 31 | code: any, 32 | signal: any, 33 | stack: string 34 | }, 35 | [key: string]: any 36 | }; 37 | declare type Writable = { 38 | write(rec: BunyanRecord): void 39 | }; 40 | declare class Logger extends events$EventEmitter { 41 | constructor(options: LoggerOptions): any; 42 | addStream(stream: Stream): void; 43 | addSerializers(serializers: Serializers): void; 44 | child(opts?: LoggerOptions, simple?: boolean): Logger; 45 | reopenFileStreams(): void; 46 | level(): string | number; 47 | level(value: number | string): void; 48 | levels(name: number | string, value: number | string): void; 49 | trace(...params: Array): boolean; 50 | trace(error: Error, format?: any, ...params: Array): void; 51 | trace(buffer: Buffer, format?: any, ...params: Array): void; 52 | trace(obj: Object, format?: any, ...params: Array): void; 53 | trace(format: string, ...params: Array): void; 54 | debug(...params: Array): boolean; 55 | debug(error: Error, format?: any, ...params: Array): void; 56 | debug(buffer: Buffer, format?: any, ...params: Array): void; 57 | debug(obj: Object, format?: any, ...params: Array): void; 58 | debug(format: string, ...params: Array): void; 59 | info(...params: Array): boolean; 60 | info(error: Error, format?: any, ...params: Array): void; 61 | info(buffer: Buffer, format?: any, ...params: Array): void; 62 | info(obj: Object, format?: any, ...params: Array): void; 63 | info(format: string, ...params: Array): void; 64 | warn(...params: Array): boolean; 65 | warn(error: Error, format?: any, ...params: Array): void; 66 | warn(buffer: Buffer, format?: any, ...params: Array): void; 67 | warn(obj: Object, format?: any, ...params: Array): void; 68 | warn(format: string, ...params: Array): void; 69 | error(...params: Array): boolean; 70 | error(error: Error, format?: any, ...params: Array): void; 71 | error(buffer: Buffer, format?: any, ...params: Array): void; 72 | error(obj: Object, format?: any, ...params: Array): void; 73 | error(format: string, ...params: Array): void; 74 | fatal(...params: Array): boolean; 75 | fatal(error: Error, format?: any, ...params: Array): void; 76 | fatal(buffer: Buffer, format?: any, ...params: Array): void; 77 | fatal(obj: Object, format?: any, ...params: Array): void; 78 | fatal(format: string, ...params: Array): void; 79 | static stdSerializers: { 80 | req: ( 81 | req: http$ClientRequest 82 | ) => { 83 | method: string, 84 | url: string, 85 | headers: mixed, 86 | remoteAddress: string, 87 | remotePort: number 88 | }, 89 | res: ( 90 | res: http$IncomingMessage 91 | ) => { statusCode: number, header: string }, 92 | err: ( 93 | err: Error 94 | ) => { 95 | message: string, 96 | name: string, 97 | stack: string, 98 | code: string, 99 | signal: string 100 | } 101 | }; 102 | } 103 | declare interface LoggerOptions { 104 | streams?: Array; 105 | level?: BunyanLogLevels | string; 106 | stream?: stream$Writable; 107 | serializers?: Serializers; 108 | src?: boolean; 109 | } 110 | declare type Serializers = { 111 | [key: string]: (input: any) => mixed 112 | }; 113 | declare type Stream = { 114 | type?: string, 115 | level?: number | string, 116 | path?: string, 117 | stream?: stream$Writable | tty$WriteStream | Stream | Writable, 118 | closeOnExit?: boolean, 119 | period?: string, 120 | count?: number 121 | }; 122 | declare var stdSerializers: Serializers; 123 | declare function resolveLevel(value: number | string): number; 124 | declare function createLogger( 125 | options: LoggerOptions & { name: string } 126 | ): Logger; 127 | declare class RingBuffer extends events$EventEmitter { 128 | constructor(options: RingBufferOptions): any; 129 | writable: boolean; 130 | records: Array; 131 | write(record: BunyanRecord): void; 132 | end(record?: any): void; 133 | destroy(): void; 134 | destroySoon(): void; 135 | } 136 | declare interface RingBufferOptions { 137 | limit: number; 138 | } 139 | declare function safeCycles(): (key: string, value: any) => any; 140 | declare class ConsoleRawStream { 141 | write(rec: BunyanRecord): void; 142 | } 143 | declare var levelFromName: { 144 | trace: typeof TRACE, 145 | debug: typeof DEBUG, 146 | info: typeof INFO, 147 | warn: typeof WARN, 148 | error: typeof ERROR, 149 | fatal: typeof FATAL 150 | }; 151 | declare var nameFromLevel: { 152 | [key: BunyanLogLevels]: string 153 | }; 154 | declare var VERSION: string; 155 | declare var LOG_VERSION: string; 156 | } 157 | -------------------------------------------------------------------------------- /flow-typed/npm/mocha_v5.x.x.js: -------------------------------------------------------------------------------- 1 | // flow-typed signature: f58bffa67453f8927660cb5f142b2c7f 2 | // flow-typed version: 03669c2773/mocha_v5.x.x/flow_>=v0.28.x 3 | 4 | declare interface $npm$mocha$SetupOptions { 5 | slow?: number; 6 | timeout?: number; 7 | ui?: string; 8 | globals?: Array; 9 | reporter?: any; 10 | bail?: boolean; 11 | ignoreLeaks?: boolean; 12 | grep?: any; 13 | } 14 | 15 | declare type $npm$mocha$done = (error?: any) => any; 16 | 17 | // declare interface $npm$mocha$SuiteCallbackContext { 18 | // timeout(ms: number): void; 19 | // retries(n: number): void; 20 | // slow(ms: number): void; 21 | // } 22 | 23 | // declare interface $npm$mocha$TestCallbackContext { 24 | // skip(): void; 25 | // timeout(ms: number): void; 26 | // retries(n: number): void; 27 | // slow(ms: number): void; 28 | // [index: string]: any; 29 | // } 30 | 31 | declare interface $npm$mocha$Suite { 32 | parent: $npm$mocha$Suite; 33 | title: string; 34 | fullTitle(): string; 35 | } 36 | 37 | declare interface $npm$mocha$ContextDefinition { 38 | (description: string, callback: (/* this: $npm$mocha$SuiteCallbackContext */) => void): $npm$mocha$Suite; 39 | only(description: string, callback: (/* this: $npm$mocha$SuiteCallbackContext */) => void): $npm$mocha$Suite; 40 | skip(description: string, callback: (/* this: $npm$mocha$SuiteCallbackContext */) => void): void; 41 | timeout(ms: number): void; 42 | } 43 | 44 | declare interface $npm$mocha$TestDefinition { 45 | (expectation: string, callback?: (/* this: $npm$mocha$TestCallbackContext, */ done: $npm$mocha$done) => mixed): $npm$mocha$Test; 46 | only(expectation: string, callback?: (/* this: $npm$mocha$TestCallbackContext, */ done: $npm$mocha$done) => mixed): $npm$mocha$Test; 47 | skip(expectation: string, callback?: (/* this: $npm$mocha$TestCallbackContext, */ done: $npm$mocha$done) => mixed): void; 48 | timeout(ms: number): void; 49 | state: 'failed' | 'passed'; 50 | } 51 | 52 | declare interface $npm$mocha$Runner {} 53 | 54 | declare class $npm$mocha$BaseReporter { 55 | stats: { 56 | suites: number; 57 | tests: number; 58 | passes: number; 59 | pending: number; 60 | failures: number; 61 | }; 62 | 63 | constructor(runner: $npm$mocha$Runner): $npm$mocha$BaseReporter; 64 | } 65 | 66 | declare class $npm$mocha$DocReporter extends $npm$mocha$BaseReporter {} 67 | declare class $npm$mocha$DotReporter extends $npm$mocha$BaseReporter {} 68 | declare class $npm$mocha$HTMLReporter extends $npm$mocha$BaseReporter {} 69 | declare class $npm$mocha$HTMLCovReporter extends $npm$mocha$BaseReporter {} 70 | declare class $npm$mocha$JSONReporter extends $npm$mocha$BaseReporter {} 71 | declare class $npm$mocha$JSONCovReporter extends $npm$mocha$BaseReporter {} 72 | declare class $npm$mocha$JSONStreamReporter extends $npm$mocha$BaseReporter {} 73 | declare class $npm$mocha$LandingReporter extends $npm$mocha$BaseReporter {} 74 | declare class $npm$mocha$ListReporter extends $npm$mocha$BaseReporter {} 75 | declare class $npm$mocha$MarkdownReporter extends $npm$mocha$BaseReporter {} 76 | declare class $npm$mocha$MinReporter extends $npm$mocha$BaseReporter {} 77 | declare class $npm$mocha$NyanReporter extends $npm$mocha$BaseReporter {} 78 | declare class $npm$mocha$ProgressReporter extends $npm$mocha$BaseReporter { 79 | constructor(runner: $npm$mocha$Runner, options?: { 80 | open?: string; 81 | complete?: string; 82 | incomplete?: string; 83 | close?: string; 84 | }): $npm$mocha$ProgressReporter; 85 | } 86 | declare class $npm$mocha$SpecReporter extends $npm$mocha$BaseReporter {} 87 | declare class $npm$mocha$TAPReporter extends $npm$mocha$BaseReporter {} 88 | declare class $npm$mocha$XUnitReporter extends $npm$mocha$BaseReporter { 89 | constructor(runner: $npm$mocha$Runner, options?: any): $npm$mocha$XUnitReporter; 90 | } 91 | 92 | declare class $npm$mocha$Mocha { 93 | currentTest: $npm$mocha$TestDefinition; 94 | constructor(options?: { 95 | grep?: RegExp; 96 | ui?: string; 97 | reporter?: string; 98 | timeout?: number; 99 | reporterOptions?: any; 100 | slow?: number; 101 | bail?: boolean; 102 | }): $npm$mocha$Mocha; 103 | setup(options: $npm$mocha$SetupOptions): this; 104 | bail(value?: boolean): this; 105 | addFile(file: string): this; 106 | reporter(name: string): this; 107 | reporter(reporter: (runner: $npm$mocha$Runner, options: any) => any): this; 108 | ui(value: string): this; 109 | grep(value: string): this; 110 | grep(value: RegExp): this; 111 | invert(): this; 112 | ignoreLeaks(value: boolean): this; 113 | checkLeaks(): this; 114 | throwError(error: Error): void; 115 | growl(): this; 116 | globals(value: string): this; 117 | globals(values: Array): this; 118 | useColors(value: boolean): this; 119 | useInlineDiffs(value: boolean): this; 120 | timeout(value: number): this; 121 | slow(value: number): this; 122 | enableTimeouts(value: boolean): this; 123 | asyncOnly(value: boolean): this; 124 | noHighlighting(value: boolean): this; 125 | run(onComplete?: (failures: number) => void): $npm$mocha$Runner; 126 | 127 | static reporters: { 128 | Doc: $npm$mocha$DocReporter, 129 | Dot: $npm$mocha$DotReporter, 130 | HTML: $npm$mocha$HTMLReporter, 131 | HTMLCov: $npm$mocha$HTMLCovReporter, 132 | JSON: $npm$mocha$JSONReporter, 133 | JSONCov: $npm$mocha$JSONCovReporter, 134 | JSONStream: $npm$mocha$JSONStreamReporter, 135 | Landing: $npm$mocha$LandingReporter, 136 | List: $npm$mocha$ListReporter, 137 | Markdown: $npm$mocha$MarkdownReporter, 138 | Min: $npm$mocha$MinReporter, 139 | Nyan: $npm$mocha$NyanReporter, 140 | Progress: $npm$mocha$ProgressReporter, 141 | }; 142 | } 143 | 144 | // declare interface $npm$mocha$HookCallbackContext { 145 | // skip(): void; 146 | // timeout(ms: number): void; 147 | // [index: string]: any; 148 | // } 149 | 150 | declare interface $npm$mocha$Runnable { 151 | title: string; 152 | fn: Function; 153 | async: boolean; 154 | sync: boolean; 155 | timedOut: boolean; 156 | } 157 | 158 | declare interface $npm$mocha$Test extends $npm$mocha$Runnable { 159 | parent: $npm$mocha$Suite; 160 | pending: boolean; 161 | state: 'failed' | 'passed' | void; 162 | fullTitle(): string; 163 | } 164 | 165 | // declare interface $npm$mocha$BeforeAndAfterContext extends $npm$mocha$HookCallbackContext { 166 | // currentTest: $npm$mocha$Test; 167 | // } 168 | 169 | declare var mocha: $npm$mocha$Mocha; 170 | declare var describe: $npm$mocha$ContextDefinition; 171 | declare var xdescribe: $npm$mocha$ContextDefinition; 172 | declare var context: $npm$mocha$ContextDefinition; 173 | declare var suite: $npm$mocha$ContextDefinition; 174 | declare var it: $npm$mocha$TestDefinition; 175 | declare var xit: $npm$mocha$TestDefinition; 176 | declare var test: $npm$mocha$TestDefinition; 177 | declare var specify: $npm$mocha$TestDefinition; 178 | 179 | declare function run(): void; 180 | 181 | declare function setup(callback: (/* this: $npm$mocha$BeforeAndAfterContext, */ done: $npm$mocha$done) => mixed): void; 182 | declare function teardown(callback: (/* this: $npm$mocha$BeforeAndAfterContext, */ done: $npm$mocha$done) => mixed): void; 183 | declare function suiteSetup(callback: (/* this: $npm$mocha$HookCallbackContext, */ done: $npm$mocha$done) => mixed): void; 184 | declare function suiteTeardown(callback: (/* this: $npm$mocha$HookCallbackContext, */ done: $npm$mocha$done) => mixed): void; 185 | declare function before(callback: (/* this: $npm$mocha$HookCallbackContext, */ done: $npm$mocha$done) => mixed): void; 186 | declare function before(description: string, callback: (/* this: $npm$mocha$HookCallbackContext, */ done: $npm$mocha$done) => mixed): void; 187 | declare function after(callback: (/* this: $npm$mocha$HookCallbackContext, */ done: $npm$mocha$done) => mixed): void; 188 | declare function after(description: string, callback: (/* this: $npm$mocha$HookCallbackContext, */ done: $npm$mocha$done) => mixed): void; 189 | declare function beforeEach(callback: (/* this: $npm$mocha$BeforeAndAfterContext, */ done: $npm$mocha$done) => mixed): void; 190 | declare function beforeEach(description: string, callback: (/* this: $npm$mocha$BeforeAndAfterContext, */ done: $npm$mocha$done) => mixed): void; 191 | declare function afterEach(callback: (/* this: $npm$mocha$BeforeAndAfterContext, */ done: $npm$mocha$done) => mixed): void; 192 | declare function afterEach(description: string, callback: (/* this: $npm$mocha$BeforeAndAfterContext, */ done: $npm$mocha$done) => mixed): void; 193 | 194 | declare module "mocha" { 195 | declare export var mocha: typeof mocha; 196 | declare export var describe: typeof describe; 197 | declare export var xdescribe: typeof xdescribe; 198 | declare export var context: typeof context; 199 | declare export var suite: typeof suite; 200 | declare export var it: typeof it; 201 | declare export var xit: typeof xit; 202 | declare export var test: typeof test; 203 | declare export var specify: typeof specify; 204 | 205 | declare export var run: typeof run; 206 | 207 | declare export var setup: typeof setup; 208 | declare export var teardown: typeof teardown; 209 | declare export var suiteSetup: typeof suiteSetup; 210 | declare export var suiteTeardown: typeof suiteTeardown; 211 | declare export var before: typeof before; 212 | declare export var before: typeof before; 213 | declare export var after: typeof after; 214 | declare export var after: typeof after; 215 | declare export var beforeEach: typeof beforeEach; 216 | declare export var beforeEach: typeof beforeEach; 217 | declare export var afterEach: typeof afterEach; 218 | declare export var afterEach: typeof afterEach; 219 | 220 | declare export default $npm$mocha$Mocha; 221 | } 222 | -------------------------------------------------------------------------------- /flow/libs/hippie.js: -------------------------------------------------------------------------------- 1 | declare module 'hippie' { 2 | declare type Hippie = { 3 | (string): Hippie, 4 | 5 | base: string => Hippie, 6 | 7 | post: string => Hippie, 8 | get: string => Hippie, 9 | send: Object => Hippie, 10 | end: () => Promise, 11 | 12 | expectValue: (string, any) => Hippie, 13 | expectBody: any => Hippie, 14 | // FIXME: implement (res, body, next) parameter types 15 | expect: ((Object, Object, Function) => void) => any, 16 | json: () => Hippie, 17 | }; 18 | } 19 | -------------------------------------------------------------------------------- /flow/libs/pg_v7.x.x.js: -------------------------------------------------------------------------------- 1 | declare module pg { 2 | // Note: Currently There are some issues in Function overloading. 3 | // https://github.com/facebook/flow/issues/2423 4 | // So i temporarily remove the 5 | // `((event: string, listener: Function) => EventEmitter );` 6 | // from all overloading for EventEmitter.on(). 7 | 8 | // `any` types exised in this file, cause of currently `mixed` did not work well 9 | // in Function Overloading. 10 | 11 | // `Function` types exised in this file, cause of they come from another 12 | // untyped npm lib. 13 | 14 | /* Cause of > 28 | /* 29 | * PgPoolConfig's properties are passed unchanged to both 30 | * the node-postgres Client constructor and the node-pool constructor 31 | * allowing you to fully configure the behavior of both 32 | * node-pool (https://github.com/coopernurse/node-pool) 33 | */ 34 | declare type PgPoolConfig = { 35 | // node-pool ---------------- 36 | name: string, 37 | create: Function, 38 | destroy: Function, 39 | max: number, 40 | min: number, 41 | refreshIdle: boolean, 42 | idleTimeoutMillis: number, 43 | connectionTimeoutMillis: number, 44 | reapIntervalMillis: number, 45 | returnToHead: boolean, 46 | priorityRange: number, 47 | validate: Function, 48 | validateAsync: Function, 49 | log: Function, 50 | 51 | // node-postgres Client ------ 52 | //database user's name 53 | user: string, 54 | //name of database to connect 55 | database: string, 56 | //database user's password 57 | password: string, 58 | //database port 59 | port: number, 60 | // database host. defaults to localhost 61 | host?: string, 62 | // whether to try SSL/TLS to connect to server. default value: false 63 | ssl?: boolean, 64 | // name displayed in the pg_stat_activity view and included in CSV log entries 65 | // default value: process.env.PGAPPNAME 66 | application_name?: string, 67 | // fallback value for the application_name configuration parameter 68 | // default value: false 69 | fallback_application_name?: string, 70 | // max milliseconds any query using this connection will execute for before timing out in error. false=unlimited 71 | // default value: false 72 | statement_timeout?: boolean | number, 73 | // pg-pool 74 | Client: mixed, 75 | Promise: mixed, 76 | onCreate: Function, 77 | }; 78 | 79 | /* 80 | * Not extends from Client, cause some of Client's functions(ex: connect and end) 81 | * should not be used by PoolClient (which returned from Pool.connect). 82 | */ 83 | declare type PoolClient = { 84 | release(error?: mixed): void, 85 | 86 | query: 87 | ( (query: QueryConfig|string, callback?: QueryCallback) => Query ) & 88 | ( (text: string, values: Array, callback?: QueryCallback) => Query ), 89 | 90 | on: 91 | ((event: 'drain', listener: () => void) => events$EventEmitter )& 92 | ((event: 'error', listener: (err: PG_ERROR) => void) => events$EventEmitter )& 93 | ((event: 'notification', listener: (message: any) => void) => events$EventEmitter )& 94 | ((event: 'notice', listener: (message: any) => void) => events$EventEmitter )& 95 | ((event: 'end', listener: () => void) => events$EventEmitter ), 96 | } 97 | 98 | declare type PoolConnectCallback = (error: PG_ERROR|null, 99 | client: PoolClient|null, done: DoneCallback) => void; 100 | declare type DoneCallback = (error?: mixed) => void; 101 | // https://github.com/facebook/flow/blob/master/lib/node.js#L581 102 | // on() returns a events$EventEmitter 103 | declare class Pool extends events$EventEmitter { 104 | constructor(options: $Shape, Client?: Class): void; 105 | connect(cb?: PoolConnectCallback): Promise; 106 | take(cb?: PoolConnectCallback): Promise; 107 | end(cb?: DoneCallback): Promise; 108 | 109 | // Note: not like the pg's Client, the Pool.query return a Promise, 110 | // not a Thenable Query which Client returned. 111 | // And there is a flow(<0.34) issue here, when Array, 112 | // the overloading will not work 113 | query: 114 | ( (query: QueryConfig|string, callback?: QueryCallback) => Promise ) & 115 | ( (text: string, values: Array, callback?: QueryCallback) => Promise); 116 | 117 | /* flow issue: https://github.com/facebook/flow/issues/2423 118 | * When this fixed, this overloading can be used. 119 | */ 120 | /* 121 | on: 122 | ((event: 'connect', listener: (client: PoolClient) => void) => events$EventEmitter )& 123 | ((event: 'acquire', listener: (client: PoolClient) => void) => events$EventEmitter )& 124 | ((event: "error", listener: (err: PG_ERROR) => void) => events$EventEmitter )& 125 | ((event: string, listener: Function) => events$EventEmitter); 126 | */ 127 | 128 | // Added as a fix to the pg_v7.x.x.js Pool type schema 129 | ending: boolean; 130 | } 131 | 132 | // <<------------- copy from 'pg-pool' ------------------------------ 133 | 134 | 135 | // error 136 | declare type PG_ERROR = { 137 | name: string, 138 | length: number, 139 | severity: string, 140 | code: string, 141 | detail: string|void, 142 | hint: string|void, 143 | position: string|void, 144 | internalPosition: string|void, 145 | internalQuery: string|void, 146 | where: string|void, 147 | schema: string|void, 148 | table: string|void, 149 | column: string|void, 150 | dataType: string|void, 151 | constraint: string|void, 152 | file: string|void, 153 | line: string|void, 154 | routine: string|void 155 | }; 156 | 157 | declare type ClientConfig = { 158 | //database user's name 159 | user?: string, 160 | //name of database to connect 161 | database?: string, 162 | //database user's password 163 | password?: string, 164 | //database port 165 | port?: number, 166 | // database host. defaults to localhost 167 | host?: string, 168 | // whether to try SSL/TLS to connect to server. default value: false 169 | ssl?: boolean, 170 | // name displayed in the pg_stat_activity view and included in CSV log entries 171 | // default value: process.env.PGAPPNAME 172 | application_name?: string, 173 | // fallback value for the application_name configuration parameter 174 | // default value: false 175 | fallback_application_name?: string, 176 | } 177 | 178 | declare type Row = { 179 | [key: string]: mixed, 180 | }; 181 | declare type ResultSet = { 182 | command: string, 183 | rowCount: number, 184 | oid: number, 185 | rows: Array, 186 | }; 187 | declare type ResultBuilder = { 188 | command: string, 189 | rowCount: number, 190 | oid: number, 191 | rows: Array, 192 | addRow: (row: Row) => void, 193 | }; 194 | declare type QueryConfig = { 195 | name?: string, 196 | text: string, 197 | values?: any[], 198 | }; 199 | 200 | declare type QueryCallback = (err: PG_ERROR|null, result: ResultSet|void) => void; 201 | declare type ClientConnectCallback = (err: PG_ERROR|null, client: Client|void) => void; 202 | 203 | /* 204 | * lib/query.js 205 | * Query extends from EventEmitter in source code. 206 | * but in Flow there is no multiple extends. 207 | * And in Flow await is a `declare function $await(p: Promise | T): T;` 208 | * seems can not resolve a Thenable's value type directly 209 | * so `Query extends Promise` to make thing temporarily work. 210 | * like this: 211 | * const q = client.query('select * from some'); 212 | * q.on('row',cb); // Event 213 | * const result = await q; // or await 214 | * 215 | * ToDo: should find a better way. 216 | */ 217 | declare class Query extends Promise { 218 | then( onFulfill?: null | (value: ResultSet) => Promise | U, 219 | onReject?: null | (error: PG_ERROR) => Promise | U 220 | ): Promise; 221 | // Because then and catch return a Promise, 222 | // .then.catch will lose catch's type information PG_ERROR. 223 | catch( onReject?: null | (error: PG_ERROR) => ?Promise | U ): Promise; 224 | 225 | on : 226 | ((event: 'row', listener: (row: Row, result: ResultBuilder) => void) => events$EventEmitter )& 227 | ((event: 'end', listener: (result: ResultBuilder) => void) => events$EventEmitter )& 228 | ((event: 'error', listener: (err: PG_ERROR) => void) => events$EventEmitter ); 229 | } 230 | 231 | /* 232 | * lib/client.js 233 | * Note: not extends from EventEmitter, for This Type returned by on(). 234 | * Flow's EventEmitter force return a EventEmitter in on(). 235 | * ToDo: Not sure in on() if return events$EventEmitter or this will be more suitable 236 | * return this will restrict event to given literial when chain on().on().on(). 237 | * return a events$EventEmitter will fallback to raw EventEmitter, when chains 238 | */ 239 | declare class Client { 240 | constructor(config?: string | ClientConfig): void; 241 | connect(callback?: ClientConnectCallback):void; 242 | end(): void; 243 | 244 | escapeLiteral(str: string): string; 245 | escapeIdentifier(str: string): string; 246 | 247 | query: 248 | ( (query: QueryConfig|string, callback?: QueryCallback) => Query ) & 249 | ( (text: string, values: Array, callback?: QueryCallback) => Query ); 250 | 251 | on: 252 | ((event: 'drain', listener: () => void) => this )& 253 | ((event: 'error', listener: (err: PG_ERROR) => void) => this )& 254 | ((event: 'notification', listener: (message: any) => void) => this )& 255 | ((event: 'notice', listener: (message: any) => void) => this )& 256 | ((event: 'end', listener: () => void) => this ); 257 | } 258 | 259 | /* 260 | * require('pg-types') 261 | */ 262 | declare type TypeParserText = (value: string) => any; 263 | declare type TypeParserBinary = (value: Buffer) => any; 264 | declare type Types = { 265 | getTypeParser: 266 | ((oid: number, format?: 'text') => TypeParserText )& 267 | ((oid: number, format: 'binary') => TypeParserBinary ); 268 | 269 | setTypeParser: 270 | ((oid: number, format?: 'text', parseFn: TypeParserText) => void )& 271 | ((oid: number, format: 'binary', parseFn: TypeParserBinary) => void)& 272 | ((oid: number, parseFn: TypeParserText) => void), 273 | } 274 | 275 | /* 276 | * lib/index.js ( class PG) 277 | */ 278 | declare class PG extends events$EventEmitter { 279 | types: Types; 280 | Client: Class; 281 | Pool: Class; 282 | Connection: mixed; //Connection is used internally by the Client. 283 | constructor(client: Client): void; 284 | native: { // native binding, have the same capability like PG 285 | types: Types; 286 | Client: Class; 287 | Pool: Class; 288 | Connection: mixed; 289 | }; 290 | // The end(),connect(),cancel() in PG is abandoned ? 291 | } 292 | 293 | // These class are not exposed by pg. 294 | declare type PoolType = Pool; 295 | declare type PGType = PG; 296 | declare type QueryType = Query; 297 | // module export, keep same structure with index.js 298 | declare module.exports: PG; 299 | } 300 | -------------------------------------------------------------------------------- /flow/types.js: -------------------------------------------------------------------------------- 1 | import type { Logger } from 'bunyan'; 2 | import type { ResultSet } from 'pg'; 3 | import type { AxiosPromise } from 'axios'; 4 | 5 | declare module 'icarus-backend' { 6 | declare type ServerConfig = { 7 | logger: Logger, 8 | apiConfig: ApiConfig 9 | }; 10 | 11 | declare type ApiConfig = { 12 | addressesRequestLimit: number, 13 | txHistoryResponseLimit: number, 14 | }; 15 | 16 | declare type Request = { 17 | body: { 18 | addresses: Array, 19 | }, 20 | }; 21 | 22 | declare type Response = { 23 | send: Function, 24 | }; 25 | 26 | declare type TxHistoryRequest = { 27 | body: { 28 | addresses: Array, 29 | dateFrom: Date, 30 | }, 31 | }; 32 | 33 | declare type SignedTxRequest = { 34 | body: SignedTx 35 | }; 36 | 37 | declare type SignedTx = { 38 | signedTx: string, 39 | }; 40 | 41 | declare type DbApi = { 42 | filterUsedAddresses: (addresses: Array) => Promise, 43 | unspentAddresses: () => Promise, 44 | utxoForAddresses: (addresses: Array) => Promise, 45 | utxoSumForAddresses: (addresses: Array) => Promise, 46 | transactionsHistoryForAddresses: ( 47 | limit: number, 48 | addresses: Array, 49 | dateFrom: Date, 50 | txHash: ?string, 51 | ) => Promise, 52 | }; 53 | 54 | declare type ImporterApi = { 55 | sendTx: (tx: SignedTx) => AxiosPromise 56 | }; 57 | 58 | declare type ImporterResponse = { 59 | status: number, 60 | data: any 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "icarus-poc-backend-service", 3 | "version": "0.4.0", 4 | "description": "Icarus backend service that will allow wallet users to access blockchain data", 5 | "main": "./src/index.js", 6 | "scripts": { 7 | "flow": "flow", 8 | "flow-remove-types": "flow-remove-types ./src/ -d ./flow-files/ --all --pretty", 9 | "dev": "npm run flow-remove-types && export NODE_ENV=develop && nodemon ./flow-files/index.js | bunyan", 10 | "staging": "npm run flow-remove-types && export NODE_ENV=staging && nodemon ./flow-files/index.js | bunyan", 11 | "eslint": "eslint ./src", 12 | "load-test-db": "node ./scripts/test/load_test_db.js", 13 | "clean-test-db": "node ./scripts/test/clean_test_db.js", 14 | "unit-tests": "export NODE_ENV=test && mocha -r flow-remove-types/register \"test/unit/**/*.js\"", 15 | "integration-tests": "npm run load-test-db && export NODE_ENV=test && mocha -r flow-remove-types/register \"test/integration/**/*.js\"", 16 | "all-tests": "npm run unit-tests && npm run integration-tests", 17 | "coverage": "nyc npm run all-tests", 18 | "postintegration-tests": "npm run clean-test-db", 19 | "build-docker": "./scripts/docker/build-docker.sh", 20 | "launch-staging": "./scripts/docker/launch-staging.sh", 21 | "claudia": "flow-remove-types ./src/ -d ./flow-files/ --all --pretty && claudia" 22 | }, 23 | "repository": { 24 | "type": "git", 25 | "url": "git+https://github.com/input-output-hk/icaraus-poc-backend-service.git" 26 | }, 27 | "author": "", 28 | "license": "ISC", 29 | "bugs": { 30 | "url": "https://github.com/input-output-hk/icaraus-poc-backend-service/issues" 31 | }, 32 | "homepage": "https://github.com/input-output-hk/icaraus-poc-backend-service#readme", 33 | "dependencies": { 34 | "axios": "0.18.0", 35 | "bunyan": "1.8.12", 36 | "claudia-api-builder": "4.0.2", 37 | "config": "1.30.0", 38 | "lodash": "4.17.10", 39 | "moment": "2.22.1", 40 | "pg": "7.4.1", 41 | "restify": "7.1.1", 42 | "restify-bunyan-logger": "2.0.7", 43 | "restify-cors-middleware": "1.1.0", 44 | "restify-errors": "6.1.0", 45 | "ws": "5.2.0" 46 | }, 47 | "devDependencies": { 48 | "babel-eslint": "8.2.3", 49 | "chai": "4.1.2", 50 | "chai-as-promised": "7.1.1", 51 | "claudia": "5.0.0", 52 | "eslint": "4.19.1", 53 | "eslint-config-airbnb": "16.1.0", 54 | "eslint-config-airbnb-base": "12.1.0", 55 | "eslint-plugin-flowtype": "2.48.0", 56 | "eslint-plugin-import": "2.11.0", 57 | "eslint-plugin-mocha": "5.0.0", 58 | "flow-bin": "0.73.0", 59 | "flow-remove-types": "1.2.3", 60 | "hippie": "0.5.2", 61 | "mocha": "5.2.0", 62 | "nodemon": "1.17.5", 63 | "npm": "6.1.0", 64 | "nyc": "12.0.1", 65 | "shuffle-array": "1.0.1", 66 | "sinon": "5.0.10" 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /scripts/docker/Readme.md: -------------------------------------------------------------------------------- 1 | # About 2 | 3 | This scripts can be used to: 4 | 5 | * Build a docker image 6 | * Launch the container in staging 7 | 8 | _Note: this is a first version, the process should be improved_ 9 | 10 | ## Build a docker image 11 | 12 | 1. Checkout the corresponding branch 13 | 2. As docker image tag will be set as the package.json version field, if necessary, bump 14 | the version number 15 | 3. In $PROJECT_ROOT folder, run `sudo npm run build-docker` 16 | 4. A new image in the repository `icarus/icarus-poc-backend-service` with tag `version` 17 | will be created 18 | 19 | **Important: If there is already an image with the same version number, it will overwrite it** 20 | 21 | ## Deploy in staging 22 | 23 | 1. Access (ssh) into the server 24 | 2. Checkout branch to deploy 25 | 3. Execute [build](# Build a docker image) 26 | 4. In the $PROJECT_ROOT folder, run `sudo npm run launch-staging -- "$VERSION"` where `$VERSION` 27 | corresponds to the one generated in build step. 28 | 29 | ### Important Note about staging config 30 | 31 | Staging database configuration (or any other sensitive information) **will never be pushed** 32 | to the repository. In order to be able to configure it we will use environment variables. 33 | 34 | To do so, there is a file `$PROJECT_ROOT/config/custom-environment-variables.json` where 35 | the mapping to `node-config` library is defined. In the server, this enviroment variables 36 | are loaded from `~/icarus-backend-staging-env` so that file must exist and look like: 37 | 38 | ``` 39 | export DB_USER=the_user 40 | export DB_HOST=the_host 41 | export DB=the_db 42 | export DB_PASSWORD=the_password 43 | export DB_PORT=the_port 44 | ``` 45 | -------------------------------------------------------------------------------- /scripts/docker/build-docker.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | MY_DIR="$(dirname "$0")" 4 | source "$MY_DIR/helper.sh"; 5 | 6 | # Version key/value should be on his own line 7 | PACKAGE_VERSION=$(readPackageJSON 'version'); 8 | NAME=$(readPackageJSON 'name'); 9 | 10 | docker build -t "icarus/$NAME:$PACKAGE_VERSION" -f ./docker/Dockerfile . 11 | -------------------------------------------------------------------------------- /scripts/docker/helper.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | function readPackageJSON() { 4 | echo $(cat package.json \ 5 | | grep "$1" \ 6 | | head -1 \ 7 | | awk -F: '{ print $2 }' \ 8 | | sed 's/[",]//g' \ 9 | | sed 's/\ //g') 10 | } 11 | -------------------------------------------------------------------------------- /scripts/docker/launch-staging.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | MY_DIR="$(dirname "$0")" 4 | source "$MY_DIR/helper.sh"; 5 | 6 | TAG=$1 7 | NAME=$(readPackageJSON 'name'); 8 | 9 | docker stop $NAME || true && docker rm $NAME || true 10 | 11 | source ~/icarus-backend-staging-env 12 | 13 | docker run -d -e DB_USER=$DB_USER \ 14 | -e DB_HOST=$DB_HOST \ 15 | -e DB=$DB \ 16 | -e DB_PASSWORD=$DB_PASSWORD \ 17 | -e DB_PORT=$DB_PORT \ 18 | -e NODE_ENV=staging \ 19 | --name $NAME \ 20 | -p 443:443 icarus/$NAME:$TAG -------------------------------------------------------------------------------- /scripts/test/clean_test_db.js: -------------------------------------------------------------------------------- 1 | #! /bin/node 2 | 3 | const config = require('config'); 4 | const { execFileSync } = require('child_process'); 5 | 6 | const { host, password, user, database } = config.get('db'); 7 | 8 | process.env.PGPASSWORD = password; 9 | 10 | execFileSync('dropdb', [`-U${user}`, `-h${host}`, database]); 11 | -------------------------------------------------------------------------------- /scripts/test/load_test_db.js: -------------------------------------------------------------------------------- 1 | #! /bin/node 2 | 3 | const config = require('config'); 4 | const { execFileSync, execSync } = require('child_process'); 5 | 6 | const { host, password, user, database } = config.get('db'); 7 | 8 | process.env.PGPASSWORD = password; 9 | 10 | const runDBCommand = fn => { 11 | try { 12 | fn(); 13 | } catch (err) { 14 | // do nothing 15 | } 16 | }; 17 | 18 | try { 19 | console.log('Creating DB'); 20 | runDBCommand(() => execFileSync('createdb', [`-U${user}`, `-h${host}`, database])); 21 | console.log('Loading Data'); 22 | runDBCommand(() => execSync(`psql -U ${user} -h ${host} ${database} < ./test/integration/test-db.sql`)); 23 | } catch (err) { 24 | // DB is already present, it's fine 25 | } 26 | -------------------------------------------------------------------------------- /scripts/tls/README.md: -------------------------------------------------------------------------------- 1 | # About 2 | 3 | This script allows to create a self signed cert to be used in dev enviroment to allow 4 | HTTPS connections. 5 | 6 | # How to run 7 | 8 | 1. Execute `bash ./tls.sh` 9 | 10 | * When asked for _Common Name_ insert localhost 11 | 12 | 2. Update chrome 13 | 1. chrome://settings/certificates?search=ssl 14 | 2. authorities > import 15 | 3. Select ca.pem 16 | 17 | 3. Restart chrome 18 | -------------------------------------------------------------------------------- /scripts/tls/server.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDwTCCAqmgAwIBAgIJAJ506F8lSFNEMA0GCSqGSIb3DQEBCwUAMFkxCzAJBgNV 3 | BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX 4 | aWRnaXRzIFB0eSBMdGQxEjAQBgNVBAMMCWxvY2FsaG9zdDAeFw0xODA0MjYxNzI0 5 | NDBaFw0yODA0MjMxNzI0NDBaMIGTMQswCQYDVQQGEwJERTEPMA0GA1UECAwGQmVy 6 | bGluMRIwEAYDVQQHDAlOZXVLb2VsbG4xFTATBgNVBAoMDFdlaXNlc3RyYXNzZTEV 7 | MBMGA1UECwwMbG9jYWxfUm9vdENBMR0wGwYJKoZIhvcNAQkBFg5pa2tlQGxvY2Fs 8 | aG9zdDESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A 9 | MIIBCgKCAQEApn1tfMT9wpp3yIE86Dt3EicXuBPre8Ktc9DqQr3+9W7dLM5A61ay 10 | jdEiiQVCMhpJV8eeSAEkBZG6pCKenRwqp7DoCwQuMOP7UjkalGNlC91Tma2akjVA 11 | MAjiFgPvOCwAIcUzWrCiU6OtlkvrkDDJI7zt13PFgumRzdHDOyQzb3TIEQm45ZUX 12 | bK+MWh0IeM49u1p4GqBjXKNi97Wm41BHStpXNkXAi2xeVY4hFP/w1sGzjen9uDwj 13 | 9v29M/jChjlnc28fHsgrMqBIX7NIRKR8AYZbdCGw04ItqC3ZF0dT9emfQkLPnwCd 14 | 4bpIM61vYfDaxIU6aiBp0AHCkQhzt+jsiwIDAQABo1EwTzAfBgNVHSMEGDAWgBTJ 15 | uSQQ5X6O8lKPFhSPOHUtf+IKUzAJBgNVHRMEAjAAMAsGA1UdDwQEAwIE8DAUBgNV 16 | HREEDTALgglsb2NhbGhvc3QwDQYJKoZIhvcNAQELBQADggEBAETHi3+fobbomL5n 17 | HIN0qSWfjZ9JUiVxl+WBlw7Iyx9PSh5177KUTpqr/1wcaP1JpIYHewKtlZj8QnTD 18 | aIHd+Z6Jz9RzhlQCPwi2Qmii41J30Zj2DuehH0WDvGNacHNHtIsAQ2Y9x8UV2n2E 19 | eA+ZxeEUbSg0CXn1viN0AbvpJs3bJb+CbVGaqnP+wpa8ySnqMFmwIpUWameGVRO7 20 | jNpxIPHX9aGtal2M4FGXV4GN7680JXq+wi4j02RIkJRN00XfOZAXWOWi4rUoT4OD 21 | vEHnn7mSpoIfKk/TwGN/DOsGWl4bH0+KBGYhOvPET+JUj/QFJWGxS6rgdUgw+ltn 22 | yw41YQI= 23 | -----END CERTIFICATE----- 24 | -------------------------------------------------------------------------------- /scripts/tls/server.csr: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE REQUEST----- 2 | MIIC2TCCAcECAQAwgZMxCzAJBgNVBAYTAkRFMQ8wDQYDVQQIDAZCZXJsaW4xEjAQ 3 | BgNVBAcMCU5ldUtvZWxsbjEVMBMGA1UECgwMV2Vpc2VzdHJhc3NlMRUwEwYDVQQL 4 | DAxsb2NhbF9Sb290Q0ExHTAbBgkqhkiG9w0BCQEWDmlra2VAbG9jYWxob3N0MRIw 5 | EAYDVQQDDAlsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB 6 | AQCmfW18xP3CmnfIgTzoO3cSJxe4E+t7wq1z0OpCvf71bt0szkDrVrKN0SKJBUIy 7 | GklXx55IASQFkbqkIp6dHCqnsOgLBC4w4/tSORqUY2UL3VOZrZqSNUAwCOIWA+84 8 | LAAhxTNasKJTo62WS+uQMMkjvO3Xc8WC6ZHN0cM7JDNvdMgRCbjllRdsr4xaHQh4 9 | zj27WngaoGNco2L3tabjUEdK2lc2RcCLbF5VjiEU//DWwbON6f24PCP2/b0z+MKG 10 | OWdzbx8eyCsyoEhfs0hEpHwBhlt0IbDTgi2oLdkXR1P16Z9CQs+fAJ3hukgzrW9h 11 | 8NrEhTpqIGnQAcKRCHO36OyLAgMBAAGgADANBgkqhkiG9w0BAQsFAAOCAQEADqSG 12 | gls2wFWIJQZ+yQyQWX9EWhVk+DhN9QeWdiNJg3jHLlexBQFcxAARF7+UePucKZpY 13 | tTPut7kqSWpXhxOyGy26sPzcsuiBnzK/NzhsCxVtCFv26NklDin10l0cz/IgpFnS 14 | MbZqS5wWGTwIWyG1Tw9Q6GXOcckX4YKaprrFsJKbgYQbvmVOzFJH4SToj3MzfxqQ 15 | snG4/IAmNCqFlS5DfTHb2zTEkefnBE4+mm+RTihRwxKRncuQwADxewY1ZoNRT8El 16 | e15xYOqRPxYgQE1rxyCQWnLu07VPkDnJVLuRDivhEqB7e8lvmTZ19vuMscNAdb/l 17 | ICZZjqp04gp3Fl0zpw== 18 | -----END CERTIFICATE REQUEST----- 19 | -------------------------------------------------------------------------------- /scripts/tls/server.key: -------------------------------------------------------------------------------- 1 | -----BEGIN PRIVATE KEY----- 2 | MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCmfW18xP3CmnfI 3 | gTzoO3cSJxe4E+t7wq1z0OpCvf71bt0szkDrVrKN0SKJBUIyGklXx55IASQFkbqk 4 | Ip6dHCqnsOgLBC4w4/tSORqUY2UL3VOZrZqSNUAwCOIWA+84LAAhxTNasKJTo62W 5 | S+uQMMkjvO3Xc8WC6ZHN0cM7JDNvdMgRCbjllRdsr4xaHQh4zj27WngaoGNco2L3 6 | tabjUEdK2lc2RcCLbF5VjiEU//DWwbON6f24PCP2/b0z+MKGOWdzbx8eyCsyoEhf 7 | s0hEpHwBhlt0IbDTgi2oLdkXR1P16Z9CQs+fAJ3hukgzrW9h8NrEhTpqIGnQAcKR 8 | CHO36OyLAgMBAAECggEARNmN05ErV7TFYFeBtpAQ16Lu/iLOuSlyzXhRyDQGIb0t 9 | HgBVfH0n7dnUrFFOZKXWOsTEQgamvN7dDk96xIIHab2Q2C1sZK+DGSegREkuWk+2 10 | 6NCQIXKADt09kdkirloT2+NPL2voVSpU+NiAm+pw46SXvweTYGXy8+iREf4Qf0Bj 11 | NB/EyLb5ZCQPtKC0h4fxR1k8zuU3ZJK35G+R9HhFKECo8jZtOpDTTcJzpJrqEPEN 12 | Z04tqDvWzKthGL+TMcNJeh+QIIf4ayakY5PZ1g6tGCx8mRT1L4MKS0KoQxSMLHCV 13 | HGlFyiiB/RETB5OqILxWWbjt3gdecYL9K0b87YuiwQKBgQDWb/MHEV1mEQ+KFCX0 14 | /HIz/7LsVniMdahV61VmImEKp62Fu1UCOkKeEndLml7QLyF+cOYrujW0hzndy2K3 15 | uzsVRnV9lIkxOOdJKJjAweQjNbq0DsglTmY/sk3H9D/C961EdWvVdaj6ntdY18i5 16 | bTIrRF/7BZ3V0kv56O5sZpNdkQKBgQDGwmd8REjm7IdbVzWT1UJH9/hwM79/my/B 17 | DjHVJZFjDtcKA7iyhPHGxi4m2qGX8Autsewj2H4HOcpHdlU07W2jMDvJedjgl+4C 18 | 1yHO4lGwGuEzsF9ccAr3ep4VRs12f22g0mi+KstPQR6X/4HHPPGzlKF7jaTv/BwT 19 | S3mGDm8KWwKBgCibBlAeT6tWK7Uf35gmfZGvJ7htH2KpnPi4GXG/rNSRr+AU+yHy 20 | 2k6IfKKKVs3hPtxskIlTN3w55xFCxCy6fZOnxfPDvr7dYZ6O224UyRiJwhL6gZfe 21 | 4qnJH8VvUOugbuT5vzOqlhbvISQnqFDEhq1+wxYLmDm3Gr/2Bb/tJJahAoGAC3X8 22 | R6McXQx4haDSuxKJ/uJilf215iLJryUlZHgFGSQq7C3owYp+8opM8PqcZMl3su1W 23 | g36hTcNcUaCWKzYho05NtEFtaw33sHw9O9O7CB+UqslZxlfDmh+c6/coM9QHO0XG 24 | umh3Trufiv41uopeGY/Re/Ff+GKeL9KjUEKWzo8CgYBg3s3tAPFhJkh9kvqoHv7f 25 | r+sgDWjHhVmIpSJ8wWXxIINcjsVcdjBou24QlzfS2xg8x7qDeRSTL+OgyJQ24akT 26 | gaUHOaD3dUVxuq0pJry5TR1f50s/6MmScP7d2qT/XrFXxxDluDQgqRqFc60E/vBo 27 | UlFTXfdIOKTcTErf7wpc7Q== 28 | -----END PRIVATE KEY----- 29 | -------------------------------------------------------------------------------- /scripts/tls/server_rootCA.csr.cnf: -------------------------------------------------------------------------------- 1 | # server_rootCA.csr.cnf 2 | [req] 3 | default_bits = 2048 4 | prompt = no 5 | default_md = sha256 6 | distinguished_name = dn 7 | 8 | [dn] 9 | C=DE 10 | ST=Berlin 11 | L=NeuKoelln 12 | O=Weisestrasse 13 | OU=local_RootCA 14 | emailAddress=ikke@localhost 15 | CN = localhost -------------------------------------------------------------------------------- /scripts/tls/server_rootCA.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEpAIBAAKCAQEAqMXyNpkl/MzbGFd453r7FfWkLzMSnpf+uMcVF4r/XIqXKEkj 3 | AfbijnxW1/m4BJ6OwY5b52OtVbrZc90ohXej21GTyHbUTigQawAH2T7OJl22RDMi 4 | nlkRkC62KgjRNPKzxhmgFj8LYPShrllY55UZWblm/HVgbRmgPh+SWfFIDR8ev0Ho 5 | YfywYgQS7DH/xSZxesnzH7p51bMQtFZz6jjMtpBtzIKlivXT2soxyxBYvAXyy7fV 6 | l0MKNsdu+/6KyLZ5Q1UaNPdBYi5FjsoAHV/zcr9hbuOCmrPN4sv3+v2GnCBEJFmD 7 | IRce77SG+YfnnI6tpNEAnoJUyCyGQVKVTaOPnwIDAQABAoIBAQCguRkny4K0kzXW 8 | oX5/GsvAJtR7Km0B4QdvWg+JAxKaQ1UdxUB0X3Nzc4+UT7hhlcmnI/X7jh4EtVLe 9 | Fu++4qeFddOBpUAbSeYQr8/JoBjbyoSHwBaoq3OhVgqeTxVEEhkHf2HHrbQdWfoa 10 | ufWDXKbunjQoGXOBklTatGBADRmre7pqQD1gAJ4vYzbWk8YoBs7UatSBWoAAAy/H 11 | Td5g2Y1hwOGuSLWf4SI0wvPUrGPHPz48+UDMb/w2GIGWT1jgSzgqZnjqJuELWLi+ 12 | 3Pz6EapkWST7nzKMnqppPmT5nVOVDR9NwvrB8+5qi22Qz4senUPvWgoBM/gsAIGr 13 | zOgAkhaBAoGBANO36lAfK1ZtLTkFKYLIBrn8v9T043uisKd3JArjZq7vDfN/EQpA 14 | qgadzfwRriC50zDcyql6VfPtj8YP6YmZGDqkUJ00naGzYLcB+0GjoQgD1vqA51JJ 15 | /bk07ZEj7TK6BSM9uJ1DGzFhO9/QfHlRTIzsXOMBiVZLS4QcH5jlWLnfAoGBAMwS 16 | m3P7u5+IJJeqs2WKE1q6cMZiEjKOoahgbdfX17j7yVwg8gX37p+6jeBIypR/ll1a 17 | 81WPK+ZljMyI/oBMp5lZ8arxixU8jzd4OtDfQWjeEt8FKwCZbHgRMQhL9f44BS6l 18 | ihGs0QzGEG7VNwJTHakChlSbBjzGPd8NhFFtq2JBAoGANd5uxPIZUcU3o3F3zWnd 19 | pEb1/FZO5y9O6V08E3UmtE3o69Kh4EzxtftC5OcOug7t+FnFSvfwlL8WPNvhnPwS 20 | GSGjzUwM6467hfUAHARpPaPUeCqCMQLYht47sBlHCgggtIXIymK/sYG+/USL10Zj 21 | RtwBClViv00rKrMJKxr3x1sCgYEAsckYIzBRIcfS0UsGpVRoTieVLk+sON1n2q+/ 22 | okZJir/Yiemt0CNSgVdeWOIxkrOgxM4Cpixn+33G7z3+MroVXRUt5FYwbBzpTFLz 23 | Mpr9eiXWXPM8pnYDOIsJo4fB1GrroNrY8VMUgMNg953LIbxVX7NyYez4vyOaLWOy 24 | PdqEb8ECgYBxFdxhFypt780r83u6qpBpStz8wdK+/busA51sg5zwWqg+ddOlIU8Q 25 | sQLLO14QaXAxCBA0VBO/5IMUB4f4k07QUZVOLoPkP6c2N5mgWCm/0dLdt1klhBzt 26 | mQov7ErJo3wjMeRBfCawgg2P9MAegEcjXhBpPxD3KLk7ci19QAReuw== 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /scripts/tls/server_rootCA.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDhTCCAm2gAwIBAgIJALummY/PlhhcMA0GCSqGSIb3DQEBCwUAMFkxCzAJBgNV 3 | BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX 4 | aWRnaXRzIFB0eSBMdGQxEjAQBgNVBAMMCWxvY2FsaG9zdDAeFw0xODA0MjYxNzI0 5 | MzlaFw0yODA0MjMxNzI0MzlaMFkxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21l 6 | LVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQxEjAQBgNV 7 | BAMMCWxvY2FsaG9zdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKjF 8 | 8jaZJfzM2xhXeOd6+xX1pC8zEp6X/rjHFReK/1yKlyhJIwH24o58Vtf5uASejsGO 9 | W+djrVW62XPdKIV3o9tRk8h21E4oEGsAB9k+ziZdtkQzIp5ZEZAutioI0TTys8YZ 10 | oBY/C2D0oa5ZWOeVGVm5Zvx1YG0ZoD4fklnxSA0fHr9B6GH8sGIEEuwx/8UmcXrJ 11 | 8x+6edWzELRWc+o4zLaQbcyCpYr109rKMcsQWLwF8su31ZdDCjbHbvv+isi2eUNV 12 | GjT3QWIuRY7KAB1f83K/YW7jgpqzzeLL9/r9hpwgRCRZgyEXHu+0hvmH55yOraTR 13 | AJ6CVMgshkFSlU2jj58CAwEAAaNQME4wHQYDVR0OBBYEFMm5JBDlfo7yUo8WFI84 14 | dS1/4gpTMB8GA1UdIwQYMBaAFMm5JBDlfo7yUo8WFI84dS1/4gpTMAwGA1UdEwQF 15 | MAMBAf8wDQYJKoZIhvcNAQELBQADggEBADh/dwz7bUeCQHz1+E3xkdQ3pOy5yv/8 16 | tPdSVmLcLAcXuY+GbCjfksruGS2VWwy2R7GJlIZyWp5S8jmohTcq57cPPR9OvzB2 17 | LzJuBE8GXqCoCs9eGY7sgvIzXv6PuOJlDWXIh8apJ9hBY3s4jI5NAIrmGwTe3mgF 18 | B/UW9A8SuJOYnUf5ozh+f8ibPl84JSQS7A2e+h4Ffutp8IbtrSIW9zbLAe+urrwV 19 | qpFnZuOVgr+N78AYhyXm/QgMdFtfBRsosDGkiBcto5S+zz+yyjI1RMquKeoICMng 20 | AS1cnDcTlNEY+3zuqaqmTyYo+AYdDMlEQSGksNoZqNiBPnO3C2HMnxQ= 21 | -----END CERTIFICATE----- 22 | -------------------------------------------------------------------------------- /scripts/tls/server_rootCA.srl: -------------------------------------------------------------------------------- 1 | 9E74E85F25485344 2 | -------------------------------------------------------------------------------- /scripts/tls/tls.sh: -------------------------------------------------------------------------------- 1 | # Create CA key and cert 2 | openssl genrsa -out server_rootCA.key 2048 3 | openssl req -x509 -new -nodes -key server_rootCA.key -sha256 -days 3650 -out server_rootCA.pem 4 | 5 | # Create server key 6 | openssl req -new -sha256 -nodes -out server.csr -newkey rsa:2048 -keyout server.key -config <( cat server_rootCA.csr.cnf ) 7 | 8 | # Create server cert 9 | openssl x509 -req -in server.csr -CA server_rootCA.pem -CAkey server_rootCA.key -CAcreateserial -out server.crt -days 3650 -sha256 -extfile v3.ext 10 | 11 | cp server_rootCA.pem ../../tls-files/develop/ca.pem 12 | cp server.crt ../../tls-files/develop/server.crt 13 | cp server.key ../../tls-files/develop/server.key -------------------------------------------------------------------------------- /scripts/tls/v3.ext: -------------------------------------------------------------------------------- 1 | # v3.ext 2 | authorityKeyIdentifier=keyid,issuer 3 | basicConstraints=CA:FALSE 4 | keyUsage = digitalSignature, nonRepudiation, keyEncipherment, dataEncipherment 5 | subjectAltName = @alt_names 6 | 7 | [alt_names] 8 | DNS.1 = localhost -------------------------------------------------------------------------------- /src/cleanup.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import type { Pool } from 'pg'; 4 | import type { Logger } from 'bunyan'; 5 | 6 | const exitHandler = (db: Pool, logger: Logger) => options => () => { 7 | if (!db.ending) { 8 | logger.info('Cleaning the APP'); 9 | db.end().then(() => { 10 | logger.info('DB Pool released!'); 11 | if (options.exit) process.exit(); 12 | }).catch(err => logger.error(err)); 13 | } else if (options.exit) process.exit(); 14 | }; 15 | 16 | function config(db: Pool, logger: Logger) { 17 | const onExit = options => exitHandler(db, logger)(options); 18 | 19 | // do something when app is closing 20 | process.on('exit', onExit({ caller: 'exit' })); 21 | 22 | // catches ctrl+c event 23 | process.on('SIGINT', onExit({ caller: 'SIGINT', exit: true })); 24 | 25 | // catches "kill pid" (for example: nodemon restart) 26 | process.on('SIGUSR1', onExit({ exit: true })); 27 | process.on('SIGUSR2', onExit({ caller: 'SIGUSR2', exit: true })); 28 | } 29 | 30 | module.exports = config; 31 | -------------------------------------------------------------------------------- /src/db-api.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import type { Pool, ResultSet } from 'pg'; 4 | import type { DbApi } from 'icarus-backend'; // eslint-disable-line 5 | 6 | /** 7 | * Returns the list of addresses that were used at least once (as input or output) 8 | * @param {Db Object} db 9 | * @param {Array
} addresses 10 | */ 11 | const filterUsedAddresses = (db: Pool) => async ( 12 | addresses: Array, 13 | ): Promise => 14 | db.query({ 15 | text: 'SELECT DISTINCT address FROM "tx_addresses" WHERE address = ANY($1)', 16 | values: [addresses], 17 | rowMode: 'array', 18 | }); 19 | 20 | const unspentAddresses = (db: Pool) => async (): Promise => 21 | db.query({ 22 | text: 'SELECT DISTINCT utxos.receiver FROM utxos', 23 | rowMode: 'array', 24 | }); 25 | 26 | /** 27 | * Queries UTXO table looking for unspents for given addresses 28 | * 29 | * @param {Db Object} db 30 | * @param {Array
} addresses 31 | */ 32 | const utxoForAddresses = (db: Pool) => async (addresses: Array) => 33 | db.query('SELECT * FROM "utxos" WHERE receiver = ANY($1)', [addresses]); 34 | 35 | const utxoSumForAddresses = (db: Pool) => async (addresses: Array) => 36 | db.query('SELECT SUM(amount) FROM "utxos" WHERE receiver = ANY($1)', [ 37 | addresses, 38 | ]); 39 | 40 | // Cached queries 41 | const txHistoryQuery = (limit: number) => ` 42 | SELECT * 43 | FROM "txs" 44 | LEFT JOIN (SELECT * from "bestblock" LIMIT 1) f ON true 45 | WHERE 46 | hash = ANY ( 47 | SELECT tx_hash 48 | FROM "tx_addresses" 49 | where address = ANY ($1) 50 | ) 51 | AND last_update >= $2 52 | ORDER BY last_update ASC 53 | LIMIT ${limit} 54 | `; 55 | 56 | /** 57 | * Queries DB looking for transactions including (either inputs or outputs) 58 | * for the given addresses 59 | * 60 | * @param {Db Object} db 61 | * @param {Array
} addresses 62 | */ 63 | const transactionsHistoryForAddresses = (db: Pool) => async ( 64 | limit: number, 65 | addresses: Array, 66 | dateFrom: Date, 67 | ): Promise => db.query(txHistoryQuery(limit), [addresses, dateFrom]); 68 | 69 | module.exports = (db: Pool): DbApi => ({ 70 | filterUsedAddresses: filterUsedAddresses(db), 71 | unspentAddresses: unspentAddresses(db), 72 | utxoForAddresses: utxoForAddresses(db), 73 | utxoSumForAddresses: utxoSumForAddresses(db), 74 | transactionsHistoryForAddresses: transactionsHistoryForAddresses(db), 75 | }); 76 | -------------------------------------------------------------------------------- /src/db.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import type { PgPoolConfig, Pool } from 'pg'; 4 | 5 | const PG = require('pg'); 6 | 7 | module.exports = (dbSettings: PgPoolConfig): Pool => new PG.Pool(dbSettings); 8 | -------------------------------------------------------------------------------- /src/importer-api.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | const axios = require('axios'); 3 | 4 | import type { ImporterApi } from 'icarus-backend'; // eslint-disable-line 5 | 6 | module.exports = (importerSendTxEndpoint: string): ImporterApi => ({ 7 | sendTx: tx => axios.post(importerSendTxEndpoint, tx), 8 | }); 9 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | const server = require('./server'); 2 | // Don't check client certs 3 | process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0'; 4 | 5 | server(); 6 | -------------------------------------------------------------------------------- /src/logger.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | const Logger = require('bunyan'); 3 | const defer = require('config/defer').deferConfig; 4 | 5 | // $FlowFixMe if setting types here, `conf` libray fails when parsing 6 | const consoleLogger = (level = 'debug') => 7 | // eslint-disable-next-line new-cap 8 | new Logger.createLogger({ 9 | // $FlowFixMe `this` global object comes from defer 10 | name: defer(() => this.appName), 11 | level, 12 | }); 13 | 14 | module.exports = { 15 | consoleLogger, 16 | }; 17 | -------------------------------------------------------------------------------- /src/routes.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import type { Logger } from 'bunyan'; 4 | import type { 5 | ServerConfig, 6 | Request, 7 | TxHistoryRequest, 8 | SignedTxRequest, 9 | DbApi, 10 | ImporterApi, 11 | } from 'icarus-backend'; // eslint-disable-line 12 | 13 | const moment = require('moment'); 14 | const { version } = require('../package.json'); 15 | const errs = require('restify-errors'); 16 | 17 | const withPrefix = route => `/api${route}`; 18 | 19 | /** 20 | * This method validates addresses request body 21 | * @param {Array[String]} addresses 22 | */ 23 | function validateAddressesReq(addressRequestLimit: number, { addresses } = {}) { 24 | if (!addresses || addresses.length > addressRequestLimit || addresses.length === 0) { 25 | throw new Error(`Addresses request length should be (0, ${addressRequestLimit}]`); 26 | } 27 | // TODO: Add address validation 28 | return true; 29 | } 30 | 31 | /** 32 | * This method validates dateFrom sent as request body is valid datetime 33 | * @param {String} dateFrom DateTime as String 34 | */ 35 | function validateDatetimeReq({ dateFrom } = {}) { 36 | if (!dateFrom || !moment(dateFrom).isValid()) { 37 | throw new Error('DateFrom should be a valid datetime'); 38 | } 39 | return true; 40 | } 41 | 42 | /** 43 | * This method validates signedTransaction endpoint body in order to check 44 | * if signedTransaction is received ok and is valid 45 | * @param {Object} Signed Transaction Payload 46 | */ 47 | function validateSignedTransactionReq({ signedTx } = {}) { 48 | if (!signedTx) { 49 | throw new Error('Signed transaction missing'); 50 | } 51 | // TODO: Add Transaction signature validation or other validations 52 | return true; 53 | } 54 | 55 | /** 56 | * Endpoint to handle getting UTXOs for given addresses 57 | * @param {*} db Database 58 | * @param {*} Server Server Config object 59 | */ 60 | const utxoForAddresses = (dbApi: DbApi, { logger, apiConfig }: ServerConfig) => async ( 61 | req: Request, 62 | ) => { 63 | validateAddressesReq(apiConfig.addressesRequestLimit, req.body); 64 | logger.debug('[utxoForAddresses] request is valid'); 65 | const result = await dbApi.utxoForAddresses(req.body.addresses); 66 | logger.debug('[utxoForAddresses] result calculated'); 67 | return result.rows; 68 | }; 69 | 70 | /** 71 | * This endpoint filters the given addresses returning the ones that were 72 | * used at least once 73 | * @param {*} db Database 74 | * @param {*} Server Server Config Object 75 | */ 76 | const filterUsedAddresses = (dbApi: DbApi, { logger, apiConfig }: ServerConfig) => async ( 77 | req: Request, 78 | ) => { 79 | validateAddressesReq(apiConfig.addressesRequestLimit, req.body); 80 | logger.debug('[filterUsedAddresses] request is valid'); 81 | const result = await dbApi.filterUsedAddresses(req.body.addresses); 82 | logger.debug('[filterUsedAddresses] result calculated'); 83 | return result.rows.reduce((acc, row) => acc.concat(row), []); 84 | }; 85 | 86 | /** 87 | * Endpoint to handle getting Tx History for given addresses and Date Filter 88 | * @param {*} db Database 89 | * @param {*} Server Server Config Object 90 | */ 91 | const utxoSumForAddresses = (dbApi: DbApi, { logger, apiConfig }: ServerConfig) => async ( 92 | req: Request, 93 | ) => { 94 | validateAddressesReq(apiConfig.addressesRequestLimit, req.body); 95 | logger.debug('[utxoSumForAddresses] request is valid'); 96 | const result = await dbApi.utxoSumForAddresses(req.body.addresses); 97 | logger.debug('[utxoSumForAddresses] result calculated'); 98 | return result.rows[0]; 99 | }; 100 | 101 | /** 102 | * 103 | * @param {*} db Database 104 | * @param {*} Server Config Object 105 | */ 106 | const transactionsHistory = (dbApi: DbApi, { logger, apiConfig }: ServerConfig) => async ( 107 | req: TxHistoryRequest, 108 | ) => { 109 | validateAddressesReq(apiConfig.addressesRequestLimit, req.body); 110 | validateDatetimeReq(req.body); 111 | logger.debug('[transactionsHistory] request is valid'); 112 | const result = await dbApi.transactionsHistoryForAddresses( 113 | apiConfig.txHistoryResponseLimit, 114 | req.body.addresses, 115 | moment(req.body.dateFrom).toDate(), 116 | ); 117 | logger.debug('[transactionsHistory] result calculated'); 118 | return result.rows; 119 | }; 120 | 121 | /** 122 | * Broadcasts a signed transaction to the block-importer node 123 | * @param {*} db Database 124 | * @param {*} Server Server Config object 125 | */ 126 | const signedTransaction = ( 127 | dbApi: DbApi, 128 | { 129 | logger, 130 | }: { logger: Logger }, 131 | importerApi: ImporterApi, 132 | ) => async (req: SignedTxRequest) => { 133 | validateSignedTransactionReq(req.body); 134 | logger.debug('[signedTransaction] request start'); 135 | let response; 136 | try { 137 | response = await importerApi.sendTx(req.body); 138 | } catch (err) { 139 | logger.debug('[signedTransaction] Error trying to connect with importer'); 140 | throw new errs.InternalError('Error trying to connect with importer', err); 141 | } 142 | logger.debug('[signedTransaction] transaction sent to backend, response:', response); 143 | if (response.status === 200) { 144 | const parsedBody = response.data; 145 | if (parsedBody.Right) { 146 | // "Right" means 200 ok (success) -> also handle if Right: false (boolean response) 147 | return parsedBody.Right; 148 | } else if (parsedBody.Left) { 149 | // "Left" means error case 150 | if (parsedBody.Left.includes('witness doesn\'t match address') || 151 | parsedBody.Left.includes('witness doesn\'t pass verification')) { 152 | logger.debug('[signedTransaction] Invalid witness'); 153 | throw new errs.InvalidContentError( 154 | 'Invalid witness', 155 | parsedBody.Left, 156 | ); 157 | } 158 | logger.debug('[signedTransaction] Error processing transaction'); 159 | throw new errs.InvalidContentError( 160 | 'Error processing transaction', 161 | parsedBody.Left, 162 | ); 163 | } 164 | logger.debug('[signedTransaction] Unknown response from backend'); 165 | throw new errs.InternalServerError('Unknown response from backend.', parsedBody); 166 | } 167 | logger.error( 168 | '[signedTransaction] Error while doing request to backend', 169 | response, 170 | ); 171 | throw new Error(`Error trying to send transaction ${response.data}`); 172 | }; 173 | 174 | /** 175 | * This endpoint returns the current deployed version. The goal of this endpoint is to 176 | * be used by monitoring tools to check service availability. 177 | * @param {*} req 178 | * @param {*} res 179 | * @param {*} next 180 | */ 181 | const healthCheck = () => () => Promise.resolve({ version }); 182 | 183 | module.exports = { 184 | healthCheck: { 185 | method: 'get', 186 | path: withPrefix('/healthcheck'), 187 | handler: healthCheck, 188 | }, 189 | filterUsedAddresses: { 190 | method: 'post', 191 | path: withPrefix('/addresses/filterUsed'), 192 | handler: filterUsedAddresses, 193 | }, 194 | utxoForAddresses: { 195 | method: 'post', 196 | path: withPrefix('/txs/utxoForAddresses'), 197 | handler: utxoForAddresses, 198 | }, 199 | utxoSumForAddresses: { 200 | method: 'post', 201 | path: withPrefix('/txs/utxoSumForAddresses'), 202 | handler: utxoSumForAddresses, 203 | }, 204 | transactionsHistory: { 205 | method: 'post', 206 | path: withPrefix('/txs/history'), 207 | handler: transactionsHistory, 208 | }, 209 | signedTransaction: { 210 | method: 'post', 211 | path: withPrefix('/txs/signed'), 212 | handler: signedTransaction, 213 | }, 214 | }; 215 | -------------------------------------------------------------------------------- /src/server.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | import type { Pool } from 'pg'; // eslint-disable-line 3 | import type { DbApi } from 'icarus-backend'; // eslint-disable-line 4 | 5 | const fs = require('fs'); 6 | const pathLib = require('path'); 7 | const restify = require('restify'); 8 | const WebSocket = require('ws'); 9 | const corsMiddleware = require('restify-cors-middleware'); 10 | const restifyBunyanLogger = require('restify-bunyan-logger'); 11 | const config = require('config'); 12 | const routes = require('./routes'); 13 | const createDB = require('./db'); 14 | const dbApi = require('./db-api'); 15 | const importerApi = require('./importer-api'); 16 | const configCleanup = require('./cleanup'); 17 | const manageConnections = require('./ws-connections'); 18 | 19 | const serverConfig = config.get('server'); 20 | const { logger, importerSendTxEndpoint } = serverConfig; 21 | 22 | function addHttps(defaultRestifyConfig) { 23 | const TLS_DIR = pathLib.join( 24 | serverConfig.https.tlsDir, 25 | process.env.NODE_ENV ? process.env.NODE_ENV : '', 26 | ); 27 | const httpsConfig = { 28 | certificate: fs.readFileSync(`${TLS_DIR}/server.crt`), 29 | key: fs.readFileSync(`${TLS_DIR}/server.key`), 30 | ca: fs.readFileSync(`${TLS_DIR}/ca.pem`), 31 | }; 32 | return Object.assign({}, defaultRestifyConfig, httpsConfig); 33 | } 34 | 35 | async function createServer() { 36 | const db = await createDB(config.get('db')); 37 | logger.info('Connected to db'); 38 | 39 | const defaultRestifyConfig = { 40 | log: logger, 41 | }; 42 | 43 | const restifyConfig = serverConfig.https 44 | ? addHttps(defaultRestifyConfig) 45 | : defaultRestifyConfig; 46 | 47 | const server = restify.createServer(restifyConfig); 48 | 49 | const cors = corsMiddleware({ origins: serverConfig.corsEnabledFor }); 50 | server.pre(cors.preflight); 51 | server.use(cors.actual); 52 | server.use(restify.plugins.bodyParser()); 53 | server.on('after', restifyBunyanLogger()); 54 | 55 | Object.values(routes).forEach(({ method, path, handler }: any) => { 56 | server[method](path, async (req, res, next) => { 57 | try { 58 | const result = await handler( 59 | dbApi(db), 60 | serverConfig, 61 | importerApi(importerSendTxEndpoint), 62 | )(req); 63 | res.send(result); 64 | next(); 65 | } catch (err) { 66 | next(err); 67 | } 68 | }); 69 | }); 70 | 71 | const wss = new WebSocket.Server({ server }); 72 | wss.on('connection', manageConnections(dbApi(db), serverConfig)); 73 | 74 | configCleanup(db, logger); 75 | 76 | server.listen(serverConfig.port, () => { 77 | logger.info('%s listening at %s', server.name, server.url); 78 | }); 79 | 80 | return server; 81 | } 82 | 83 | module.exports = createServer; 84 | -------------------------------------------------------------------------------- /src/ws-connections.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | import type { 3 | ServerConfig, 4 | DbApi, 5 | } from 'icarus-backend'; // eslint-disable-line 6 | const _ = require('lodash'); 7 | 8 | const fromMessage: any = JSON.parse; 9 | const toMessage = JSON.stringify; 10 | 11 | const MSG_TYPE_RESTORE = 'RESTORE'; 12 | 13 | async function handleRestore( 14 | dbApi: DbApi, 15 | { logger }: ServerConfig, 16 | ws: any, 17 | ) { 18 | try { 19 | logger.debug('[WS::handleRestore] Start'); 20 | const result = await dbApi.unspentAddresses(); 21 | logger.debug('[WS::handleRestore] Db result ready'); 22 | logger.debug('[WS::handleRestore] Addresses processing start'); 23 | const addresses = _.flatten(result.rows); 24 | logger.debug('[WS::handleRestore] About to send the addresses'); 25 | ws.send(toMessage({ 26 | msg: MSG_TYPE_RESTORE, 27 | addresses, 28 | })); 29 | logger.debug('[WS::handleRestore] End'); 30 | } catch (err) { 31 | logger.error('[WS::handleRestore]', err); 32 | } 33 | } 34 | 35 | module.exports = (dbApi: DbApi, { logger, apiConfig }: ServerConfig) => (ws: any) => { 36 | ws.on('message', (msg) => { 37 | logger.debug(`[WS::onMessage] ${msg}`); 38 | const data = fromMessage(msg); 39 | switch (data.msg) { 40 | case MSG_TYPE_RESTORE: 41 | handleRestore(dbApi, { logger, apiConfig }, ws); 42 | break; 43 | default: 44 | break; 45 | } 46 | }); 47 | }; 48 | -------------------------------------------------------------------------------- /test/integration/filter-used-addresses.integration-test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | const shuffle = require('shuffle-array'); 3 | const { expect } = require('chai'); 4 | const { runInServer, assertOnResults } = require('./test-utils'); 5 | 6 | const ENDPOINT = '/addresses/filterUsed'; 7 | 8 | describe('FilterUsedAddresses endpoint', () => { 9 | it('should return empty if addresses do not exist', async () => 10 | runInServer(api => 11 | api 12 | .post(ENDPOINT) 13 | .send({ 14 | addresses: [ 15 | 'DdzFFzCqrhsfYMUNRxtQ5NNKbWVw3ZJBNcMLLZSoqmD5trHHPBDwsjonoBgw1K6e8Qi8bEMs5Y62yZfReEVSFFMncFYDUHUTMM436KjQ', 16 | 'DdzFFzCqrht4s7speawymCPkm9waYHFSv2zwxhmFqHHQK5FDFt7fd9EBVvm64CrELzxaRGMcygh3gnBrXCtJzzodvzJqVR8VTZqW4rKJ', 17 | ], 18 | }) 19 | .expectBody([]) 20 | .end(), 21 | )); 22 | 23 | it('should return used addresses just once', async () => { 24 | const usedAddresses = [ 25 | 'DdzFFzCqrht4wFnWC5TJA5UUVE54JC9xZWq589iKyCrWa6hek3KKevyaXzQt6FsdunbkZGzBFQhwZi1MDpijwRoC7kj1MkEPh2Uu5Ssz', 26 | 'DdzFFzCqrht4wFnWC5TJA5UUVE54JC9xZWq589iKyCrWa6hek3KKevyaXzQt6FsdunbkZGzBFQhwZi1MDpijwRoC7kj1MkEPh2Uu5Ssz', 27 | ]; 28 | 29 | return runInServer(api => 30 | api 31 | .post(ENDPOINT) 32 | .send({ addresses: usedAddresses }) 33 | .expectBody([usedAddresses[0]]) 34 | .end(), 35 | ); 36 | }); 37 | 38 | it('should filter unused addresses', async () => { 39 | const usedAddresses = [ 40 | 'DdzFFzCqrht4wFnWC5TJA5UUVE54JC9xZWq589iKyCrWa6hek3KKevyaXzQt6FsdunbkZGzBFQhwZi1MDpijwRoC7kj1MkEPh2Uu5Ssz', 41 | 'DdzFFzCqrhtBBX4VvncQ6Zxn8UHawaqSB4jf9EELRBuWUT9gZTmCDWCNTVMotEdof1g26qbrDc8qcHZvtntxR4FaBN1iKxQ5ttjZSZoj', 42 | 'DdzFFzCqrhsvrpQgsnTxPsCAeEUcGTwxUtBv94F2jGGW8s3ZT7V2xPYBAL4renccQQv6bnVtuSr5a5N6cJuAh8Nw58dzZDJTesodN2kV', 43 | 'DdzFFzCqrht9eptGZnVrBCcoLn6fWJF4CS1Dvs8KCKutDXgQ9hdNTEPxFqWwfM3gwpVv3zrLQf7dV7xsUpxLPQKGagGX3CscjWeeTEXz', 44 | ]; 45 | 46 | const unusedAddresses = [ 47 | 'DdzFFzCqrhsfYMUNRxtQ5NNKbWVw3ZJBNcMLLZSoqmD5trHHPBDwsjonoBgw1K6e8Qi8bEMs5Y62yZfReEVSFFMncFYDUHUTMM436KjQ', 48 | 'DdzFFzCqrht4s7speawymCPkm9waYHFSv2zwxhmFqHHQK5FDFt7fd9EBVvm64CrELzxaRGMcygh3gnBrXCtJzzodvzJqVR8VTZqW4rKJ', 49 | 'DdzFFzCqrht8d5FeU62PpBw1e3JLUP48LKfDfNtUyfuBJjBEqmgfYpwcbNHCh3csA4DEzu7SYquoUdmkcknR1E1D6zz5byvpMx632VJx', 50 | ]; 51 | 52 | const addresses = shuffle(usedAddresses.concat(unusedAddresses)); 53 | return runInServer(api => 54 | api 55 | .post(ENDPOINT) 56 | .send({ addresses }) 57 | .expect( 58 | assertOnResults((res, body) => { 59 | expect(body).to.have.same.members(usedAddresses); 60 | }), 61 | ) 62 | .end(), 63 | ); 64 | }); 65 | }); 66 | -------------------------------------------------------------------------------- /test/integration/healthcheck.integration-test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | const { runInServer } = require('./test-utils'); 3 | const packageJson = require('../../package.json'); 4 | 5 | describe('Healthcheck endpoint', () => { 6 | it('Should return package.json version', async () => 7 | runInServer(api => 8 | api 9 | .get('/healthcheck') 10 | .expectValue('version', packageJson.version) 11 | .end(), 12 | )); 13 | }); 14 | -------------------------------------------------------------------------------- /test/integration/test-utils.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | import type { Hippie } from 'hippie'; 3 | 4 | // $FlowFixMe Fix this assignment as it throws an error 5 | const hippie: Hippie = require('hippie'); 6 | const createServer = require('../../src/server'); 7 | 8 | function api(server): Hippie { 9 | return hippie(server) 10 | .json() 11 | .base('http://localhost:8080/api'); 12 | } 13 | 14 | /** 15 | * This function starts a server and executes test endpoint function on it 16 | * @param {function} testEndpoint Hippie functions to be called 17 | */ 18 | async function runInServer(testEndpoint: Hippie => Promise) { 19 | const server = await createServer(); 20 | let promise; 21 | try { 22 | await testEndpoint(api(server)); 23 | } finally { 24 | promise = new Promise(resolve => { 25 | server.close(() => resolve(true)); 26 | }); 27 | } 28 | return promise; 29 | } 30 | 31 | /** 32 | * Helper in order to use chai assertions with hippie expect function 33 | * It will call next if no errors, next(err) if an assertion thrown 34 | * @param {function} assertionsFn Set of assertions. 35 | */ 36 | const assertOnResults = (assertionsFn: (Object, Object, Function) => void) => ( 37 | res: Object, 38 | body: Object, 39 | next: Function, 40 | ) => { 41 | try { 42 | assertionsFn(res, body, next); 43 | next(); 44 | } catch (err) { 45 | next(err); 46 | } 47 | }; 48 | 49 | module.exports = { 50 | runInServer, 51 | assertOnResults, 52 | }; 53 | -------------------------------------------------------------------------------- /test/integration/transactions-history.integration-test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | const shuffle = require('shuffle-array'); 3 | const { expect } = require('chai'); 4 | const { runInServer, assertOnResults } = require('./test-utils'); 5 | const moment = require('moment'); 6 | 7 | const ENDPOINT = '/txs/history'; 8 | 9 | // To avoid Possible EventEmitter memory leak detected message 10 | process.setMaxListeners(0); 11 | 12 | describe('Transaction History endpoint', () => { 13 | it('should return empty if addresses do not exist', async () => 14 | runInServer(api => 15 | api 16 | .post(ENDPOINT) 17 | .send({ 18 | addresses: [ 19 | 'DdzFFzCqrhsfYMUNRxtQ5NNKbWVw3ZJBNcMLLZSoqmD5trHHPBDwsjonoBgw1K6e8Qi8bEMs5Y62yZfReEVSFFMncFYDUHUTMM436KjQ', 20 | 'DdzFFzCqrht4s7speawymCPkm9waYHFSv2zwxhmFqHHQK5FDFt7fd9EBVvm64CrELzxaRGMcygh3gnBrXCtJzzodvzJqVR8VTZqW4rKJ', 21 | ], 22 | dateFrom: moment('1995-12-25').toISOString(), 23 | }) 24 | .expectBody([]) 25 | .end(), 26 | )); 27 | 28 | it('should return empty if there are no tx after the given address', async () => 29 | runInServer(api => 30 | api 31 | .post(ENDPOINT) 32 | .send({ 33 | addresses: [ 34 | 'DdzFFzCqrhsqFM8QxHC4ASk4QLfuoWqbY65GeprG8ezEY6VFkP4jz4C4fcDT57fkUUrPN8E2gaPXiWQxjD3BryptceQEx98ALsrYMoSi', 35 | ], 36 | dateFrom: moment('2050-12-25').toISOString(), 37 | }) 38 | .expectBody([]) 39 | .end(), 40 | )); 41 | 42 | it('should return history for input and output addresses', async () => { 43 | const usedAddresses = [ 44 | // Input and Output 45 | 'DdzFFzCqrhsgBCt25t6JArdDHfJZkzzebapE2qqrg1yoquLZzeEyxzhLAb9x7rVf5aby9jwLvL65hH9zTWjbekwzbeYCjJ5pUKn1rYgB', 46 | // Output 47 | 'DdzFFzCqrhsqFM8QxHC4ASk4QLfuoWqbY65GeprG8ezEY6VFkP4jz4C4fcDT57fkUUrPN8E2gaPXiWQxjD3BryptceQEx98ALsrYMoSi', 48 | ]; 49 | 50 | return runInServer(api => 51 | api 52 | .post(ENDPOINT) 53 | .send({ 54 | addresses: usedAddresses, 55 | dateFrom: moment('1995-12-25').toISOString(), 56 | }) 57 | .expectBody([ 58 | { 59 | hash: 'e1a958d42f7a064ef447feee5859fd45b8c925de825a7460819f67e8a4f320d0', 60 | inputs_address: [ 61 | 'DdzFFzCqrhsjRXHaGcwLdb82izDN3WNzJpyXnLQ2XPa7PKsuqVWbccKLHymdhgzys117xwosU7Kg8XrqHihHHJNNLDte6WrKq5zJ2Njk', 62 | 'DdzFFzCqrhsjRXHaGcwLdb82izDN3WNzJpyXnLQ2XPa7PKsuqVWbccKLHymdhgzys117xwosU7Kg8XrqHihHHJNNLDte6WrKq5zJ2Njk', 63 | ], 64 | inputs_amount: [ 65 | '10000000', 66 | '10000000', 67 | ], 68 | outputs_address: [ 69 | 'DdzFFzCqrhsqFM8QxHC4ASk4QLfuoWqbY65GeprG8ezEY6VFkP4jz4C4fcDT57fkUUrPN8E2gaPXiWQxjD3BryptceQEx98ALsrYMoSi', 70 | 'DdzFFzCqrhsxi87yX3WBKVJ37n7frUZjiVTwoc7qxdVeEAoqiRiLUecLngUhgYbc1hfTyzxtwvwSRtGNeWKJfaqMefs4dwybgHmwBj8c', 71 | ], 72 | outputs_amount: [ 73 | '4821151', 74 | '15000000', 75 | ], 76 | block_num: '116147', 77 | time: '2017-10-23T18:03:53.000Z', 78 | tx_state: 'Successful', 79 | last_update: '2018-07-13T20:06:04.197Z', 80 | best_block_num: '1266738', 81 | }, 82 | { 83 | hash: 'a8fb2c6cce6d68ea4c65b8301eb26636178c40d3c65071e738d5a4e5cde4d91d', 84 | inputs_address: [ 85 | 'DdzFFzCqrht9SryvcbmahwFFbXkDGzDtuA26Qccf1nQ9bWPmkej9i7q6e9A2bbEVEs2szYJtUupPAQLbh9fANEh1zBLikREmL3XubFAr', 86 | ], 87 | inputs_amount: [ 88 | '96599520', 89 | ], 90 | outputs_address: [ 91 | 'DdzFFzCqrhsgBCt25t6JArdDHfJZkzzebapE2qqrg1yoquLZzeEyxzhLAb9x7rVf5aby9jwLvL65hH9zTWjbekwzbeYCjJ5pUKn1rYgB', 92 | 'DdzFFzCqrhszk2XG2vdMcB3JhkpGTTnMeWvwoE5wHacAu1H38bp5Smr6pxEvJDk5KzeKsTaPSmBVJ24hp2FfqxGDdgH7hp1H1bt5U8Hk', 93 | 'DdzFFzCqrhszk2XG2vdMcB3JhkpGTTnMeWvwoE5wHacAu1H38bp5Smr6pxEvJDk5KzeKsTaPSmBVJ24hp2FfqxGDdgH7hp1H1bt5U8Hk', 94 | 'DdzFFzCqrhszk2XG2vdMcB3JhkpGTTnMeWvwoE5wHacAu1H38bp5Smr6pxEvJDk5KzeKsTaPSmBVJ24hp2FfqxGDdgH7hp1H1bt5U8Hk', 95 | ], 96 | outputs_amount: [ 97 | '96421943', 98 | '1', 99 | '1', 100 | '1', 101 | ], 102 | block_num: '872076', 103 | time: '2018-04-17T04:24:13.000Z', 104 | tx_state: 'Successful', 105 | last_update: '2018-07-13T21:08:55.778Z', 106 | best_block_num: '1266738', 107 | }, 108 | { 109 | hash: 'de5dbbed46ef5c69f52b3a77ee74585bef07aebcd90383de28348159c697b568', 110 | inputs_address: [ 111 | 'DdzFFzCqrhsgBCt25t6JArdDHfJZkzzebapE2qqrg1yoquLZzeEyxzhLAb9x7rVf5aby9jwLvL65hH9zTWjbekwzbeYCjJ5pUKn1rYgB', 112 | ], 113 | inputs_amount: [ 114 | '96421943', 115 | ], 116 | outputs_address: [ 117 | 'DdzFFzCqrhsrDmGpSbh2LBRmStmMyGznXaeBLoDMSKjLfRuf9DWpLMEzbXw9eQcFsSwNX5sunRuxsJnSZFbu8pTe1qLerrWwwiinEzVe', 118 | 'DdzFFzCqrhstXeWBtWKg1Z189XE5uwwwfbKeUHdacmnD1qMaNqs6Qk3ctZF1frH1wT5PnnJXzLC2fumc9qVWLFp9aMGPEfVzzL6eyKjM', 119 | 'DdzFFzCqrhstXeWBtWKg1Z189XE5uwwwfbKeUHdacmnD1qMaNqs6Qk3ctZF1frH1wT5PnnJXzLC2fumc9qVWLFp9aMGPEfVzzL6eyKjM', 120 | 'DdzFFzCqrhstXeWBtWKg1Z189XE5uwwwfbKeUHdacmnD1qMaNqs6Qk3ctZF1frH1wT5PnnJXzLC2fumc9qVWLFp9aMGPEfVzzL6eyKjM', 121 | ], 122 | outputs_amount: [ 123 | '96244366', 124 | '1', 125 | '1', 126 | '1', 127 | ], 128 | block_num: '872089', 129 | time: '2018-04-17T04:28:33.000Z', 130 | tx_state: 'Successful', 131 | last_update: '2018-07-13T21:08:55.794Z', 132 | best_block_num: '1266738', 133 | }, 134 | ]) 135 | .end(), 136 | ); 137 | }); 138 | 139 | it('should history once even if addresses sent twice', async () => { 140 | const usedAddresses = [ 141 | 'DdzFFzCqrhsqFM8QxHC4ASk4QLfuoWqbY65GeprG8ezEY6VFkP4jz4C4fcDT57fkUUrPN8E2gaPXiWQxjD3BryptceQEx98ALsrYMoSi', 142 | 'DdzFFzCqrhsqFM8QxHC4ASk4QLfuoWqbY65GeprG8ezEY6VFkP4jz4C4fcDT57fkUUrPN8E2gaPXiWQxjD3BryptceQEx98ALsrYMoSi', 143 | ]; 144 | 145 | return runInServer(api => 146 | api 147 | .post(ENDPOINT) 148 | .send({ 149 | addresses: usedAddresses, 150 | dateFrom: moment('1995-12-25').toISOString(), 151 | }) 152 | .expectBody([ 153 | { 154 | hash: 155 | 'e1a958d42f7a064ef447feee5859fd45b8c925de825a7460819f67e8a4f320d0', 156 | inputs_address: [ 157 | 'DdzFFzCqrhsjRXHaGcwLdb82izDN3WNzJpyXnLQ2XPa7PKsuqVWbccKLHymdhgzys117xwosU7Kg8XrqHihHHJNNLDte6WrKq5zJ2Njk', 158 | 'DdzFFzCqrhsjRXHaGcwLdb82izDN3WNzJpyXnLQ2XPa7PKsuqVWbccKLHymdhgzys117xwosU7Kg8XrqHihHHJNNLDte6WrKq5zJ2Njk', 159 | ], 160 | inputs_amount: ['10000000', '10000000'], 161 | outputs_address: [ 162 | 'DdzFFzCqrhsqFM8QxHC4ASk4QLfuoWqbY65GeprG8ezEY6VFkP4jz4C4fcDT57fkUUrPN8E2gaPXiWQxjD3BryptceQEx98ALsrYMoSi', 163 | 'DdzFFzCqrhsxi87yX3WBKVJ37n7frUZjiVTwoc7qxdVeEAoqiRiLUecLngUhgYbc1hfTyzxtwvwSRtGNeWKJfaqMefs4dwybgHmwBj8c', 164 | ], 165 | outputs_amount: ['4821151', '15000000'], 166 | block_num: '116147', 167 | time: '2017-10-23T18:03:53.000Z', 168 | best_block_num: '1266738', 169 | tx_state: 'Successful', 170 | last_update: '2018-07-13T20:06:04.197Z', 171 | }, 172 | ]) 173 | .end(), 174 | ); 175 | }); 176 | 177 | it('should history once even if addresses is present in input and output', async () => { 178 | const usedAddresses = [ 179 | 'CYhGP86nCaiEEEUSLWTS3gvAzmLTWM8Nj5CuJyqg5y2iJ1jNhwrZWsNE9n9xsmk5HFDa6DdZcPoXTUEYKddVsqJ1Y', 180 | ]; 181 | 182 | return runInServer(api => 183 | api 184 | .post(ENDPOINT) 185 | .send({ 186 | addresses: usedAddresses, 187 | dateFrom: moment('1995-12-25').toISOString(), 188 | }) 189 | .expect( 190 | assertOnResults((res, body) => { 191 | // https://explorer.iohkdev.io/address/CYhGP86nCaiEEEUSLWTS3gvAzmLTWM8Nj5CuJyqg5y2iJ1jNhwrZWsNE9n9xsmk5HFDa6DdZcPoXTUEYKddVsqJ1Y 192 | expect(body.length).to.equal(3); 193 | }), 194 | ) 195 | .end(), 196 | ); 197 | }); 198 | 199 | it('should filter unused addresses', async () => { 200 | const usedAddresses = [ 201 | 'DdzFFzCqrhsqFM8QxHC4ASk4QLfuoWqbY65GeprG8ezEY6VFkP4jz4C4fcDT57fkUUrPN8E2gaPXiWQxjD3BryptceQEx98ALsrYMoSi', 202 | ]; 203 | 204 | const unusedAddresses = [ 205 | 'DdzFFzCqrhsfYMUNRxtQ5NNKbWVw3ZJBNcMLLZSoqmD5trHHPBDwsjonoBgw1K6e8Qi8bEMs5Y62yZfReEVSFFMncFYDUHUTMM436KjQ', 206 | 'DdzFFzCqrht4s7speawymCPkm9waYHFSv2zwxhmFqHHQK5FDFt7fd9EBVvm64CrELzxaRGMcygh3gnBrXCtJzzodvzJqVR8VTZqW4rKJ', 207 | 'DdzFFzCqrht8d5FeU62PpBw1e3JLUP48LKfDfNtUyfuBJjBEqmgfYpwcbNHCh3csA4DEzu7SYquoUdmkcknR1E1D6zz5byvpMx632VJx', 208 | ]; 209 | 210 | const addresses = shuffle(usedAddresses.concat(unusedAddresses)); 211 | return runInServer(api => 212 | api 213 | .post(ENDPOINT) 214 | .send({ addresses, dateFrom: moment('1995-12-25').toISOString() }) 215 | .expectBody([ 216 | { 217 | hash: 218 | 'e1a958d42f7a064ef447feee5859fd45b8c925de825a7460819f67e8a4f320d0', 219 | inputs_address: [ 220 | 'DdzFFzCqrhsjRXHaGcwLdb82izDN3WNzJpyXnLQ2XPa7PKsuqVWbccKLHymdhgzys117xwosU7Kg8XrqHihHHJNNLDte6WrKq5zJ2Njk', 221 | 'DdzFFzCqrhsjRXHaGcwLdb82izDN3WNzJpyXnLQ2XPa7PKsuqVWbccKLHymdhgzys117xwosU7Kg8XrqHihHHJNNLDte6WrKq5zJ2Njk', 222 | ], 223 | inputs_amount: ['10000000', '10000000'], 224 | outputs_address: [ 225 | 'DdzFFzCqrhsqFM8QxHC4ASk4QLfuoWqbY65GeprG8ezEY6VFkP4jz4C4fcDT57fkUUrPN8E2gaPXiWQxjD3BryptceQEx98ALsrYMoSi', 226 | 'DdzFFzCqrhsxi87yX3WBKVJ37n7frUZjiVTwoc7qxdVeEAoqiRiLUecLngUhgYbc1hfTyzxtwvwSRtGNeWKJfaqMefs4dwybgHmwBj8c', 227 | ], 228 | outputs_amount: ['4821151', '15000000'], 229 | block_num: '116147', 230 | time: '2017-10-23T18:03:53.000Z', 231 | best_block_num: '1266738', 232 | last_update: '2018-07-13T20:06:04.197Z', 233 | tx_state: 'Successful', 234 | }, 235 | ]) 236 | .end(), 237 | ); 238 | }); 239 | 240 | it('should paginate responses', async () => { 241 | const addresses = [ 242 | 'DdzFFzCqrhsjyFvzVsaahmL93VEno1PRkXxUFqJAxRpA52VAyTHVKRGBFyGvGQmr9Ya8kiQF4bmXqTqMZ8G84Krp4xmHkJSvt6txEMXA', 243 | ]; 244 | 245 | let lastDateFrom; 246 | 247 | await runInServer(api => 248 | api 249 | .post(ENDPOINT) 250 | .send({ addresses, dateFrom: moment('1995-12-25').toISOString() }) 251 | .expect( 252 | assertOnResults((res, body) => { 253 | expect(body.length).to.equal(20); 254 | const lastElem = body[body.length - 1]; 255 | expect(lastElem.hash).to.equal( 256 | 'a3f8d071d027b44571fc9dd50d17edb8c55768d8a7cb8a6709256f146d228ca8', 257 | ); 258 | lastDateFrom = lastElem.last_update; 259 | }), 260 | ) 261 | .end(), 262 | ); 263 | 264 | return runInServer(api => 265 | api 266 | .post(ENDPOINT) 267 | .send({ 268 | addresses, 269 | // Paginate from previous response 270 | dateFrom: lastDateFrom, 271 | }) 272 | .expect( 273 | assertOnResults((res, body) => { 274 | expect(body.length).to.equal(20); 275 | expect(body[0].hash).to.equal( 276 | 'a3f8d071d027b44571fc9dd50d17edb8c55768d8a7cb8a6709256f146d228ca8', 277 | ); 278 | }), 279 | ) 280 | .end(), 281 | ); 282 | }); 283 | }); 284 | -------------------------------------------------------------------------------- /test/integration/utxo-for-addresses.integration-test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | const shuffle = require('shuffle-array'); 3 | const { expect } = require('chai'); 4 | const { runInServer, assertOnResults } = require('./test-utils'); 5 | 6 | const ENDPOINT = '/txs/utxoForAddresses'; 7 | 8 | describe('UtxoForAddresses endpoint', () => { 9 | it('should return empty if addresses do not exist', async () => 10 | runInServer(api => 11 | api 12 | .post(ENDPOINT) 13 | .send({ 14 | addresses: [ 15 | 'DdzFFzCqrhsfYMUNRxtQ5NNKbWVw3ZJBNcMLLZSoqmD5trHHPBDwsjonoBgw1K6e8Qi8bEMs5Y62yZfReEVSFFMncFYDUHUTMM436KjQ', 16 | 'DdzFFzCqrht4s7speawymCPkm9waYHFSv2zwxhmFqHHQK5FDFt7fd9EBVvm64CrELzxaRGMcygh3gnBrXCtJzzodvzJqVR8VTZqW4rKJ', 17 | ], 18 | }) 19 | .expectBody([]) 20 | .end(), 21 | )); 22 | 23 | it('should return data for addresses balance once even if sent twice', async () => { 24 | const usedAddresses = [ 25 | 'DdzFFzCqrhshvqw9GrHmSw6ySwViBj5cj2njWj5mbnLu4uNauJCKuXhHS3wNUoGRNBGGTkyTFDQNrUWMumZ3mxarAjoXiYvyhead7yKQ', 26 | 'DdzFFzCqrhshvqw9GrHmSw6ySwViBj5cj2njWj5mbnLu4uNauJCKuXhHS3wNUoGRNBGGTkyTFDQNrUWMumZ3mxarAjoXiYvyhead7yKQ', 27 | ]; 28 | 29 | return runInServer(api => 30 | api 31 | .post(ENDPOINT) 32 | .send({ addresses: usedAddresses }) 33 | .expectBody([ 34 | { 35 | utxo_id: 36 | '6cc6d736e3a4395acabfae4c7cfe409b65d8c7c6bbf9ff85a0bd4a95334b7a5f0', 37 | tx_hash: 38 | '6cc6d736e3a4395acabfae4c7cfe409b65d8c7c6bbf9ff85a0bd4a95334b7a5f', 39 | tx_index: 0, 40 | receiver: 41 | 'DdzFFzCqrhshvqw9GrHmSw6ySwViBj5cj2njWj5mbnLu4uNauJCKuXhHS3wNUoGRNBGGTkyTFDQNrUWMumZ3mxarAjoXiYvyhead7yKQ', 42 | amount: '1463071700828754', 43 | }, 44 | ]) 45 | .end(), 46 | ); 47 | }); 48 | 49 | it('should filter unused addresses', async () => { 50 | const usedAddresses = [ 51 | 'DdzFFzCqrhshvqw9GrHmSw6ySwViBj5cj2njWj5mbnLu4uNauJCKuXhHS3wNUoGRNBGGTkyTFDQNrUWMumZ3mxarAjoXiYvyhead7yKQ', 52 | 'DdzFFzCqrhskrzzPrXynkZ3gteGy8GmWYrswqz9SueoFP9PV5suFnGv9sQqg3o5pxzFpDTJ2HFJzHrThxBYarQi8guzMUhuiePB1T6ff', 53 | ]; 54 | 55 | const unusedAddresses = [ 56 | 'DdzFFzCqrhsfYMUNRxtQ5NNKbWVw3ZJBNcMLLZSoqmD5trHHPBDwsjonoBgw1K6e8Qi8bEMs5Y62yZfReEVSFFMncFYDUHUTMM436KjQ', 57 | 'DdzFFzCqrht4s7speawymCPkm9waYHFSv2zwxhmFqHHQK5FDFt7fd9EBVvm64CrELzxaRGMcygh3gnBrXCtJzzodvzJqVR8VTZqW4rKJ', 58 | 'DdzFFzCqrht8d5FeU62PpBw1e3JLUP48LKfDfNtUyfuBJjBEqmgfYpwcbNHCh3csA4DEzu7SYquoUdmkcknR1E1D6zz5byvpMx632VJx', 59 | ]; 60 | 61 | const expectedUTOXs = [ 62 | { 63 | utxo_id: 64 | '6cc6d736e3a4395acabfae4c7cfe409b65d8c7c6bbf9ff85a0bd4a95334b7a5f0', 65 | tx_hash: 66 | '6cc6d736e3a4395acabfae4c7cfe409b65d8c7c6bbf9ff85a0bd4a95334b7a5f', 67 | tx_index: 0, 68 | receiver: 69 | 'DdzFFzCqrhshvqw9GrHmSw6ySwViBj5cj2njWj5mbnLu4uNauJCKuXhHS3wNUoGRNBGGTkyTFDQNrUWMumZ3mxarAjoXiYvyhead7yKQ', 70 | amount: '1463071700828754', 71 | }, 72 | { 73 | utxo_id: 74 | 'aba9ad6b8360542698038dea31ca23037ad933c057abc18c5c17c2c63dbc3d131', 75 | tx_hash: 76 | 'aba9ad6b8360542698038dea31ca23037ad933c057abc18c5c17c2c63dbc3d13', 77 | tx_index: 1, 78 | receiver: 79 | 'DdzFFzCqrhskrzzPrXynkZ3gteGy8GmWYrswqz9SueoFP9PV5suFnGv9sQqg3o5pxzFpDTJ2HFJzHrThxBYarQi8guzMUhuiePB1T6ff', 80 | amount: '9829100', 81 | }, 82 | ]; 83 | 84 | const addresses = shuffle(usedAddresses.concat(unusedAddresses)); 85 | return runInServer(api => 86 | api 87 | .post(ENDPOINT) 88 | .send({ addresses }) 89 | .expect( 90 | assertOnResults((res, body) => { 91 | expect(body).to.have.same.deep.members(expectedUTOXs); 92 | }), 93 | ) 94 | .end(), 95 | ); 96 | }); 97 | }); 98 | -------------------------------------------------------------------------------- /test/integration/utxo-sum-for-addresses.integration-test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | const shuffle = require('shuffle-array'); 3 | const { runInServer } = require('./test-utils'); 4 | 5 | const ENDPOINT = '/txs/utxoSumForAddresses'; 6 | 7 | describe('UtxoSumForAddresses endpoint', () => { 8 | it('should return empty if addresses do not exist', async () => 9 | runInServer(api => 10 | api 11 | .post(ENDPOINT) 12 | .send({ 13 | addresses: [ 14 | 'DdzFFzCqrhsfYMUNRxtQ5NNKbWVw3ZJBNcMLLZSoqmD5trHHPBDwsjonoBgw1K6e8Qi8bEMs5Y62yZfReEVSFFMncFYDUHUTMM436KjQ', 15 | 'DdzFFzCqrht4s7speawymCPkm9waYHFSv2zwxhmFqHHQK5FDFt7fd9EBVvm64CrELzxaRGMcygh3gnBrXCtJzzodvzJqVR8VTZqW4rKJ', 16 | ], 17 | }) 18 | .expectValue('sum', null) 19 | .end(), 20 | )); 21 | 22 | it('should sum addresses balance once even if sent twice', async () => { 23 | const usedAddresses = [ 24 | 'DdzFFzCqrht4wFnWC5TJA5UUVE54JC9xZWq589iKyCrWa6hek3KKevyaXzQt6FsdunbkZGzBFQhwZi1MDpijwRoC7kj1MkEPh2Uu5Ssz', 25 | 'DdzFFzCqrht4wFnWC5TJA5UUVE54JC9xZWq589iKyCrWa6hek3KKevyaXzQt6FsdunbkZGzBFQhwZi1MDpijwRoC7kj1MkEPh2Uu5Ssz', 26 | ]; 27 | 28 | return runInServer(api => 29 | api 30 | .post(ENDPOINT) 31 | .send({ addresses: usedAddresses }) 32 | .expectValue('sum', '621894750') 33 | .end(), 34 | ); 35 | }); 36 | 37 | it('should filter unused addresses', async () => { 38 | const usedAddresses = [ 39 | 'DdzFFzCqrht4wFnWC5TJA5UUVE54JC9xZWq589iKyCrWa6hek3KKevyaXzQt6FsdunbkZGzBFQhwZi1MDpijwRoC7kj1MkEPh2Uu5Ssz', 40 | ]; 41 | 42 | const unusedAddresses = [ 43 | 'DdzFFzCqrhsfYMUNRxtQ5NNKbWVw3ZJBNcMLLZSoqmD5trHHPBDwsjonoBgw1K6e8Qi8bEMs5Y62yZfReEVSFFMncFYDUHUTMM436KjQ', 44 | 'DdzFFzCqrht4s7speawymCPkm9waYHFSv2zwxhmFqHHQK5FDFt7fd9EBVvm64CrELzxaRGMcygh3gnBrXCtJzzodvzJqVR8VTZqW4rKJ', 45 | 'DdzFFzCqrht8d5FeU62PpBw1e3JLUP48LKfDfNtUyfuBJjBEqmgfYpwcbNHCh3csA4DEzu7SYquoUdmkcknR1E1D6zz5byvpMx632VJx', 46 | ]; 47 | 48 | const addresses = shuffle(usedAddresses.concat(unusedAddresses)); 49 | return runInServer(api => 50 | api 51 | .post(ENDPOINT) 52 | .send({ addresses }) 53 | .expectValue('sum', '621894750') 54 | .end(), 55 | ); 56 | }); 57 | }); 58 | -------------------------------------------------------------------------------- /test/unit/routes.test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | const assert = require('assert'); 3 | const chai = require('chai'); 4 | const chaiAsPromised = require('chai-as-promised'); 5 | const sinon = require('sinon'); 6 | const Bunyan = require('bunyan'); 7 | const routes = require('../../src/routes'); 8 | const packageJson = require('../../package.json'); 9 | 10 | chai.use(chaiAsPromised); 11 | const { expect } = chai; 12 | 13 | // eslint-disable-next-line new-cap 14 | const logger = new Bunyan.createLogger({ 15 | name: 'test', 16 | // $FlowFixMe Doesn't like string literal 17 | level: 'fatal', 18 | }); 19 | 20 | const apiConfig = { addressesRequestLimit: 50, txHistoryResponseLimit: 20 }; 21 | 22 | describe('Routes', () => { 23 | // This returns fake data. It's ok if they are not real objects (for example utxo or txs) 24 | // as we are checking the response is being returned, not the queries 25 | const dbApi = { 26 | filterUsedAddresses: sinon.fake.resolves({ rows: [['a1', 'a2']] }), 27 | utxoForAddresses: sinon.fake.resolves({ rows: ['utxo1', 'utxo2'] }), 28 | utxoSumForAddresses: sinon.fake.resolves({ rows: [10, 20] }), 29 | transactionsHistoryForAddresses: sinon.fake.resolves({ 30 | rows: ['tx1', 'tx2'], 31 | }), 32 | unspentAddresses: sinon.fake.resolves([]), 33 | }; 34 | 35 | function validateMethodAndPath(endpoint, methodToCheck, pathToCheck) { 36 | const { method, path } = endpoint; 37 | assert.equal(methodToCheck, method); 38 | assert.equal(pathToCheck, path); 39 | } 40 | 41 | function assertInvalidAddressesPayload(handler) { 42 | it('should reject bodies without addresses', () => { 43 | // $FlowFixMe Ignore this as we are trying invalid payloads 44 | const response = handler({}); 45 | return expect(response).to.be.rejectedWith( 46 | Error, 47 | `Addresses request length should be (0, ${ 48 | apiConfig.addressesRequestLimit 49 | }]`, 50 | ); 51 | }); 52 | 53 | it(`should reject bodies with more than ${ 54 | apiConfig.addressesRequestLimit 55 | } addresses`, () => { 56 | const response = handler( 57 | // $FlowFixMe Ignore this as we are trying invalid payloads 58 | { body: { addresses: Array(apiConfig.addressesRequestLimit + 1).fill('an_address') } }, 59 | ); 60 | return expect(response).to.be.rejectedWith( 61 | Error, 62 | `Addresses request length should be (0, ${ 63 | apiConfig.addressesRequestLimit 64 | }]`, 65 | ); 66 | }); 67 | } 68 | 69 | describe('Healthcheck', () => { 70 | it('should have GET as method and /api/healthcheck as path', () => { 71 | validateMethodAndPath(routes.healthCheck, 'get', '/api/healthcheck'); 72 | }); 73 | 74 | it('should return package.json version as response', async () => { 75 | const handler = routes.healthCheck.handler(); 76 | const response = await handler(); 77 | return expect(response).to.eql({ version: packageJson.version }); 78 | }); 79 | }); 80 | 81 | describe('Filter Used Addresses', () => { 82 | it('should have POST as method and /api/addresses/filterUsed as path', () => { 83 | validateMethodAndPath( 84 | routes.filterUsedAddresses, 85 | 'post', 86 | '/api/addresses/filterUsed', 87 | ); 88 | }); 89 | 90 | assertInvalidAddressesPayload( 91 | routes.filterUsedAddresses.handler(dbApi, { logger, apiConfig }), 92 | ); 93 | 94 | it('should accept bodies with 20 addresses', async () => { 95 | const handler = routes.filterUsedAddresses.handler(dbApi, { 96 | logger, 97 | apiConfig, 98 | }); 99 | const response = await handler({ 100 | body: { addresses: Array(20).fill('an_address') }, 101 | }); 102 | return expect(response).to.eql(['a1', 'a2']); 103 | }); 104 | }); 105 | 106 | describe('UTXO for addresses', () => { 107 | it('should have POST as method and /txs/utxoForAddresses as path', () => { 108 | validateMethodAndPath( 109 | routes.utxoForAddresses, 110 | 'post', 111 | '/api/txs/utxoForAddresses', 112 | ); 113 | }); 114 | 115 | assertInvalidAddressesPayload( 116 | routes.utxoForAddresses.handler(dbApi, { logger, apiConfig }), 117 | ); 118 | 119 | it('should accept bodies with 20 addresses', async () => { 120 | const handler = routes.utxoForAddresses.handler(dbApi, { 121 | logger, 122 | apiConfig, 123 | }); 124 | const response = await handler({ 125 | body: { addresses: Array(20).fill('an_address') }, 126 | }); 127 | return expect(response).to.eql(['utxo1', 'utxo2']); 128 | }); 129 | }); 130 | 131 | describe('UTXO Sum for addresses', () => { 132 | it('should have POST as method and /txs/utxoSumForAddresses as path', () => { 133 | validateMethodAndPath( 134 | routes.utxoSumForAddresses, 135 | 'post', 136 | '/api/txs/utxoSumForAddresses', 137 | ); 138 | }); 139 | 140 | assertInvalidAddressesPayload( 141 | routes.utxoSumForAddresses.handler(dbApi, { logger, apiConfig }), 142 | ); 143 | 144 | it('should accept bodies with 20 addresses', async () => { 145 | const handler = routes.utxoSumForAddresses.handler(dbApi, { 146 | logger, 147 | apiConfig, 148 | }); 149 | const response = await handler({ 150 | body: { addresses: Array(20).fill('an_address') }, 151 | }); 152 | return expect(response).to.equal(10); 153 | }); 154 | }); 155 | 156 | describe('Transactions history', () => { 157 | it('should have POST as method and /txs/history as path', () => { 158 | validateMethodAndPath( 159 | routes.transactionsHistory, 160 | 'post', 161 | '/api/txs/history', 162 | ); 163 | }); 164 | 165 | assertInvalidAddressesPayload( 166 | routes.transactionsHistory.handler(dbApi, { logger, apiConfig }), 167 | ); 168 | 169 | it('should fail if no dateFrom sent', async () => { 170 | const handler = routes.transactionsHistory.handler(dbApi, { 171 | logger, 172 | apiConfig, 173 | }); 174 | const response = handler({ 175 | body: { 176 | addresses: ['an_address'], 177 | // $FlowFixMe ignore this line as we are testing invalid dateFrom 178 | dateFrom: undefined, 179 | }, 180 | }); 181 | return expect(response).to.be.rejectedWith( 182 | Error, 183 | 'DateFrom should be a valid datetime', 184 | ); 185 | }); 186 | }); 187 | 188 | describe('Signed Transaction', () => { 189 | it('should have POST as method and /txs/signed as path', () => { 190 | validateMethodAndPath( 191 | routes.signedTransaction, 192 | 'post', 193 | '/api/txs/signed', 194 | ); 195 | }); 196 | 197 | it('should send a given signed tx', async () => { 198 | const importerApi = { 199 | sendTx: sinon.fake.resolves({ status: 200, data: { Right: [] } }), 200 | }; 201 | const handler = routes.signedTransaction.handler(dbApi, { 202 | logger, 203 | }, importerApi); 204 | const response = await handler({ body: { signedTx: 'signedTx' } }); 205 | return expect(response.length).to.equal(0); 206 | }); 207 | 208 | it('should reject empty bodies', async () => { 209 | const importerApi = { 210 | sendTx: sinon.fake.resolves(), 211 | }; 212 | const handler = routes.signedTransaction.handler(dbApi, { 213 | logger, 214 | }, importerApi); 215 | // $FlowFixMe Ignore this error as we are testing invalid payload 216 | const request = handler({ body: { signedTx: undefined } }); 217 | return expect(request).to.be.rejectedWith( 218 | Error, 219 | 'Signed transaction missing', 220 | ); 221 | }); 222 | 223 | it('should reject on importer error', async () => { 224 | const importerApi = { 225 | sendTx: sinon.fake.rejects(), 226 | }; 227 | const handler = routes.signedTransaction.handler(dbApi, { 228 | logger, 229 | }, importerApi); 230 | // $FlowFixMe Ignore this error as we are testing invalid payload 231 | const request = handler({ body: { signedTx: 'fakeSignedTx' } }); 232 | return expect(request).to.be.rejectedWith( 233 | Error, 234 | 'Error trying to connect with importer', 235 | ); 236 | }); 237 | 238 | it('should reject on invalid transaction', async () => { 239 | const importerApi = { 240 | sendTx: sinon.fake.resolves({ status: 200, data: { Left: 'Error' } }), 241 | }; 242 | const handler = routes.signedTransaction.handler(dbApi, { 243 | logger, 244 | }, importerApi); 245 | // $FlowFixMe Ignore this error as we are testing invalid payload 246 | const request = handler({ body: { signedTx: 'fakeSignedTx' } }); 247 | return expect(request).to.be.rejectedWith( 248 | Error, 249 | 'Error processing transaction', 250 | ); 251 | }); 252 | 253 | it('should reject on invalid witness', async () => { 254 | const invalidWitnessError = 'Tx not broadcasted 3cb8547f391537ba: input #0\'s witness' 255 | + ' doesn\'t pass verification:\n witness: PkWitness: key = pub:0ff1c324, key' 256 | + ' hash = 04666a4a, sig = \n reason: the signature in the witness doesn\'t' 257 | + ' pass validation'; 258 | const importerApi = { 259 | sendTx: sinon.fake.resolves({ status: 200, data: { Left: invalidWitnessError } }), 260 | }; 261 | const handler = routes.signedTransaction.handler(dbApi, { 262 | logger, 263 | }, importerApi); 264 | const request = handler({ body: { signedTx: 'fakeSignedTx' } }); 265 | return expect(request).to.be.rejectedWith( 266 | Error, 267 | 'Invalid witness', 268 | ); 269 | }); 270 | }); 271 | }); 272 | -------------------------------------------------------------------------------- /tls-files/develop/ca.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDhTCCAm2gAwIBAgIJALummY/PlhhcMA0GCSqGSIb3DQEBCwUAMFkxCzAJBgNV 3 | BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX 4 | aWRnaXRzIFB0eSBMdGQxEjAQBgNVBAMMCWxvY2FsaG9zdDAeFw0xODA0MjYxNzI0 5 | MzlaFw0yODA0MjMxNzI0MzlaMFkxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21l 6 | LVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQxEjAQBgNV 7 | BAMMCWxvY2FsaG9zdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKjF 8 | 8jaZJfzM2xhXeOd6+xX1pC8zEp6X/rjHFReK/1yKlyhJIwH24o58Vtf5uASejsGO 9 | W+djrVW62XPdKIV3o9tRk8h21E4oEGsAB9k+ziZdtkQzIp5ZEZAutioI0TTys8YZ 10 | oBY/C2D0oa5ZWOeVGVm5Zvx1YG0ZoD4fklnxSA0fHr9B6GH8sGIEEuwx/8UmcXrJ 11 | 8x+6edWzELRWc+o4zLaQbcyCpYr109rKMcsQWLwF8su31ZdDCjbHbvv+isi2eUNV 12 | GjT3QWIuRY7KAB1f83K/YW7jgpqzzeLL9/r9hpwgRCRZgyEXHu+0hvmH55yOraTR 13 | AJ6CVMgshkFSlU2jj58CAwEAAaNQME4wHQYDVR0OBBYEFMm5JBDlfo7yUo8WFI84 14 | dS1/4gpTMB8GA1UdIwQYMBaAFMm5JBDlfo7yUo8WFI84dS1/4gpTMAwGA1UdEwQF 15 | MAMBAf8wDQYJKoZIhvcNAQELBQADggEBADh/dwz7bUeCQHz1+E3xkdQ3pOy5yv/8 16 | tPdSVmLcLAcXuY+GbCjfksruGS2VWwy2R7GJlIZyWp5S8jmohTcq57cPPR9OvzB2 17 | LzJuBE8GXqCoCs9eGY7sgvIzXv6PuOJlDWXIh8apJ9hBY3s4jI5NAIrmGwTe3mgF 18 | B/UW9A8SuJOYnUf5ozh+f8ibPl84JSQS7A2e+h4Ffutp8IbtrSIW9zbLAe+urrwV 19 | qpFnZuOVgr+N78AYhyXm/QgMdFtfBRsosDGkiBcto5S+zz+yyjI1RMquKeoICMng 20 | AS1cnDcTlNEY+3zuqaqmTyYo+AYdDMlEQSGksNoZqNiBPnO3C2HMnxQ= 21 | -----END CERTIFICATE----- 22 | -------------------------------------------------------------------------------- /tls-files/develop/server.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDwTCCAqmgAwIBAgIJAJ506F8lSFNEMA0GCSqGSIb3DQEBCwUAMFkxCzAJBgNV 3 | BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX 4 | aWRnaXRzIFB0eSBMdGQxEjAQBgNVBAMMCWxvY2FsaG9zdDAeFw0xODA0MjYxNzI0 5 | NDBaFw0yODA0MjMxNzI0NDBaMIGTMQswCQYDVQQGEwJERTEPMA0GA1UECAwGQmVy 6 | bGluMRIwEAYDVQQHDAlOZXVLb2VsbG4xFTATBgNVBAoMDFdlaXNlc3RyYXNzZTEV 7 | MBMGA1UECwwMbG9jYWxfUm9vdENBMR0wGwYJKoZIhvcNAQkBFg5pa2tlQGxvY2Fs 8 | aG9zdDESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A 9 | MIIBCgKCAQEApn1tfMT9wpp3yIE86Dt3EicXuBPre8Ktc9DqQr3+9W7dLM5A61ay 10 | jdEiiQVCMhpJV8eeSAEkBZG6pCKenRwqp7DoCwQuMOP7UjkalGNlC91Tma2akjVA 11 | MAjiFgPvOCwAIcUzWrCiU6OtlkvrkDDJI7zt13PFgumRzdHDOyQzb3TIEQm45ZUX 12 | bK+MWh0IeM49u1p4GqBjXKNi97Wm41BHStpXNkXAi2xeVY4hFP/w1sGzjen9uDwj 13 | 9v29M/jChjlnc28fHsgrMqBIX7NIRKR8AYZbdCGw04ItqC3ZF0dT9emfQkLPnwCd 14 | 4bpIM61vYfDaxIU6aiBp0AHCkQhzt+jsiwIDAQABo1EwTzAfBgNVHSMEGDAWgBTJ 15 | uSQQ5X6O8lKPFhSPOHUtf+IKUzAJBgNVHRMEAjAAMAsGA1UdDwQEAwIE8DAUBgNV 16 | HREEDTALgglsb2NhbGhvc3QwDQYJKoZIhvcNAQELBQADggEBAETHi3+fobbomL5n 17 | HIN0qSWfjZ9JUiVxl+WBlw7Iyx9PSh5177KUTpqr/1wcaP1JpIYHewKtlZj8QnTD 18 | aIHd+Z6Jz9RzhlQCPwi2Qmii41J30Zj2DuehH0WDvGNacHNHtIsAQ2Y9x8UV2n2E 19 | eA+ZxeEUbSg0CXn1viN0AbvpJs3bJb+CbVGaqnP+wpa8ySnqMFmwIpUWameGVRO7 20 | jNpxIPHX9aGtal2M4FGXV4GN7680JXq+wi4j02RIkJRN00XfOZAXWOWi4rUoT4OD 21 | vEHnn7mSpoIfKk/TwGN/DOsGWl4bH0+KBGYhOvPET+JUj/QFJWGxS6rgdUgw+ltn 22 | yw41YQI= 23 | -----END CERTIFICATE----- 24 | -------------------------------------------------------------------------------- /tls-files/develop/server.key: -------------------------------------------------------------------------------- 1 | -----BEGIN PRIVATE KEY----- 2 | MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCmfW18xP3CmnfI 3 | gTzoO3cSJxe4E+t7wq1z0OpCvf71bt0szkDrVrKN0SKJBUIyGklXx55IASQFkbqk 4 | Ip6dHCqnsOgLBC4w4/tSORqUY2UL3VOZrZqSNUAwCOIWA+84LAAhxTNasKJTo62W 5 | S+uQMMkjvO3Xc8WC6ZHN0cM7JDNvdMgRCbjllRdsr4xaHQh4zj27WngaoGNco2L3 6 | tabjUEdK2lc2RcCLbF5VjiEU//DWwbON6f24PCP2/b0z+MKGOWdzbx8eyCsyoEhf 7 | s0hEpHwBhlt0IbDTgi2oLdkXR1P16Z9CQs+fAJ3hukgzrW9h8NrEhTpqIGnQAcKR 8 | CHO36OyLAgMBAAECggEARNmN05ErV7TFYFeBtpAQ16Lu/iLOuSlyzXhRyDQGIb0t 9 | HgBVfH0n7dnUrFFOZKXWOsTEQgamvN7dDk96xIIHab2Q2C1sZK+DGSegREkuWk+2 10 | 6NCQIXKADt09kdkirloT2+NPL2voVSpU+NiAm+pw46SXvweTYGXy8+iREf4Qf0Bj 11 | NB/EyLb5ZCQPtKC0h4fxR1k8zuU3ZJK35G+R9HhFKECo8jZtOpDTTcJzpJrqEPEN 12 | Z04tqDvWzKthGL+TMcNJeh+QIIf4ayakY5PZ1g6tGCx8mRT1L4MKS0KoQxSMLHCV 13 | HGlFyiiB/RETB5OqILxWWbjt3gdecYL9K0b87YuiwQKBgQDWb/MHEV1mEQ+KFCX0 14 | /HIz/7LsVniMdahV61VmImEKp62Fu1UCOkKeEndLml7QLyF+cOYrujW0hzndy2K3 15 | uzsVRnV9lIkxOOdJKJjAweQjNbq0DsglTmY/sk3H9D/C961EdWvVdaj6ntdY18i5 16 | bTIrRF/7BZ3V0kv56O5sZpNdkQKBgQDGwmd8REjm7IdbVzWT1UJH9/hwM79/my/B 17 | DjHVJZFjDtcKA7iyhPHGxi4m2qGX8Autsewj2H4HOcpHdlU07W2jMDvJedjgl+4C 18 | 1yHO4lGwGuEzsF9ccAr3ep4VRs12f22g0mi+KstPQR6X/4HHPPGzlKF7jaTv/BwT 19 | S3mGDm8KWwKBgCibBlAeT6tWK7Uf35gmfZGvJ7htH2KpnPi4GXG/rNSRr+AU+yHy 20 | 2k6IfKKKVs3hPtxskIlTN3w55xFCxCy6fZOnxfPDvr7dYZ6O224UyRiJwhL6gZfe 21 | 4qnJH8VvUOugbuT5vzOqlhbvISQnqFDEhq1+wxYLmDm3Gr/2Bb/tJJahAoGAC3X8 22 | R6McXQx4haDSuxKJ/uJilf215iLJryUlZHgFGSQq7C3owYp+8opM8PqcZMl3su1W 23 | g36hTcNcUaCWKzYho05NtEFtaw33sHw9O9O7CB+UqslZxlfDmh+c6/coM9QHO0XG 24 | umh3Trufiv41uopeGY/Re/Ff+GKeL9KjUEKWzo8CgYBg3s3tAPFhJkh9kvqoHv7f 25 | r+sgDWjHhVmIpSJ8wWXxIINcjsVcdjBou24QlzfS2xg8x7qDeRSTL+OgyJQ24akT 26 | gaUHOaD3dUVxuq0pJry5TR1f50s/6MmScP7d2qT/XrFXxxDluDQgqRqFc60E/vBo 27 | UlFTXfdIOKTcTErf7wpc7Q== 28 | -----END PRIVATE KEY----- 29 | -------------------------------------------------------------------------------- /tls-files/staging/ca.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDhTCCAm2gAwIBAgIJALummY/PlhhcMA0GCSqGSIb3DQEBCwUAMFkxCzAJBgNV 3 | BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX 4 | aWRnaXRzIFB0eSBMdGQxEjAQBgNVBAMMCWxvY2FsaG9zdDAeFw0xODA0MjYxNzI0 5 | MzlaFw0yODA0MjMxNzI0MzlaMFkxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21l 6 | LVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQxEjAQBgNV 7 | BAMMCWxvY2FsaG9zdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKjF 8 | 8jaZJfzM2xhXeOd6+xX1pC8zEp6X/rjHFReK/1yKlyhJIwH24o58Vtf5uASejsGO 9 | W+djrVW62XPdKIV3o9tRk8h21E4oEGsAB9k+ziZdtkQzIp5ZEZAutioI0TTys8YZ 10 | oBY/C2D0oa5ZWOeVGVm5Zvx1YG0ZoD4fklnxSA0fHr9B6GH8sGIEEuwx/8UmcXrJ 11 | 8x+6edWzELRWc+o4zLaQbcyCpYr109rKMcsQWLwF8su31ZdDCjbHbvv+isi2eUNV 12 | GjT3QWIuRY7KAB1f83K/YW7jgpqzzeLL9/r9hpwgRCRZgyEXHu+0hvmH55yOraTR 13 | AJ6CVMgshkFSlU2jj58CAwEAAaNQME4wHQYDVR0OBBYEFMm5JBDlfo7yUo8WFI84 14 | dS1/4gpTMB8GA1UdIwQYMBaAFMm5JBDlfo7yUo8WFI84dS1/4gpTMAwGA1UdEwQF 15 | MAMBAf8wDQYJKoZIhvcNAQELBQADggEBADh/dwz7bUeCQHz1+E3xkdQ3pOy5yv/8 16 | tPdSVmLcLAcXuY+GbCjfksruGS2VWwy2R7GJlIZyWp5S8jmohTcq57cPPR9OvzB2 17 | LzJuBE8GXqCoCs9eGY7sgvIzXv6PuOJlDWXIh8apJ9hBY3s4jI5NAIrmGwTe3mgF 18 | B/UW9A8SuJOYnUf5ozh+f8ibPl84JSQS7A2e+h4Ffutp8IbtrSIW9zbLAe+urrwV 19 | qpFnZuOVgr+N78AYhyXm/QgMdFtfBRsosDGkiBcto5S+zz+yyjI1RMquKeoICMng 20 | AS1cnDcTlNEY+3zuqaqmTyYo+AYdDMlEQSGksNoZqNiBPnO3C2HMnxQ= 21 | -----END CERTIFICATE----- 22 | -------------------------------------------------------------------------------- /tls-files/staging/server.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDwTCCAqmgAwIBAgIJAJ506F8lSFNEMA0GCSqGSIb3DQEBCwUAMFkxCzAJBgNV 3 | BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX 4 | aWRnaXRzIFB0eSBMdGQxEjAQBgNVBAMMCWxvY2FsaG9zdDAeFw0xODA0MjYxNzI0 5 | NDBaFw0yODA0MjMxNzI0NDBaMIGTMQswCQYDVQQGEwJERTEPMA0GA1UECAwGQmVy 6 | bGluMRIwEAYDVQQHDAlOZXVLb2VsbG4xFTATBgNVBAoMDFdlaXNlc3RyYXNzZTEV 7 | MBMGA1UECwwMbG9jYWxfUm9vdENBMR0wGwYJKoZIhvcNAQkBFg5pa2tlQGxvY2Fs 8 | aG9zdDESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A 9 | MIIBCgKCAQEApn1tfMT9wpp3yIE86Dt3EicXuBPre8Ktc9DqQr3+9W7dLM5A61ay 10 | jdEiiQVCMhpJV8eeSAEkBZG6pCKenRwqp7DoCwQuMOP7UjkalGNlC91Tma2akjVA 11 | MAjiFgPvOCwAIcUzWrCiU6OtlkvrkDDJI7zt13PFgumRzdHDOyQzb3TIEQm45ZUX 12 | bK+MWh0IeM49u1p4GqBjXKNi97Wm41BHStpXNkXAi2xeVY4hFP/w1sGzjen9uDwj 13 | 9v29M/jChjlnc28fHsgrMqBIX7NIRKR8AYZbdCGw04ItqC3ZF0dT9emfQkLPnwCd 14 | 4bpIM61vYfDaxIU6aiBp0AHCkQhzt+jsiwIDAQABo1EwTzAfBgNVHSMEGDAWgBTJ 15 | uSQQ5X6O8lKPFhSPOHUtf+IKUzAJBgNVHRMEAjAAMAsGA1UdDwQEAwIE8DAUBgNV 16 | HREEDTALgglsb2NhbGhvc3QwDQYJKoZIhvcNAQELBQADggEBAETHi3+fobbomL5n 17 | HIN0qSWfjZ9JUiVxl+WBlw7Iyx9PSh5177KUTpqr/1wcaP1JpIYHewKtlZj8QnTD 18 | aIHd+Z6Jz9RzhlQCPwi2Qmii41J30Zj2DuehH0WDvGNacHNHtIsAQ2Y9x8UV2n2E 19 | eA+ZxeEUbSg0CXn1viN0AbvpJs3bJb+CbVGaqnP+wpa8ySnqMFmwIpUWameGVRO7 20 | jNpxIPHX9aGtal2M4FGXV4GN7680JXq+wi4j02RIkJRN00XfOZAXWOWi4rUoT4OD 21 | vEHnn7mSpoIfKk/TwGN/DOsGWl4bH0+KBGYhOvPET+JUj/QFJWGxS6rgdUgw+ltn 22 | yw41YQI= 23 | -----END CERTIFICATE----- 24 | -------------------------------------------------------------------------------- /tls-files/staging/server.key: -------------------------------------------------------------------------------- 1 | -----BEGIN PRIVATE KEY----- 2 | MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCmfW18xP3CmnfI 3 | gTzoO3cSJxe4E+t7wq1z0OpCvf71bt0szkDrVrKN0SKJBUIyGklXx55IASQFkbqk 4 | Ip6dHCqnsOgLBC4w4/tSORqUY2UL3VOZrZqSNUAwCOIWA+84LAAhxTNasKJTo62W 5 | S+uQMMkjvO3Xc8WC6ZHN0cM7JDNvdMgRCbjllRdsr4xaHQh4zj27WngaoGNco2L3 6 | tabjUEdK2lc2RcCLbF5VjiEU//DWwbON6f24PCP2/b0z+MKGOWdzbx8eyCsyoEhf 7 | s0hEpHwBhlt0IbDTgi2oLdkXR1P16Z9CQs+fAJ3hukgzrW9h8NrEhTpqIGnQAcKR 8 | CHO36OyLAgMBAAECggEARNmN05ErV7TFYFeBtpAQ16Lu/iLOuSlyzXhRyDQGIb0t 9 | HgBVfH0n7dnUrFFOZKXWOsTEQgamvN7dDk96xIIHab2Q2C1sZK+DGSegREkuWk+2 10 | 6NCQIXKADt09kdkirloT2+NPL2voVSpU+NiAm+pw46SXvweTYGXy8+iREf4Qf0Bj 11 | NB/EyLb5ZCQPtKC0h4fxR1k8zuU3ZJK35G+R9HhFKECo8jZtOpDTTcJzpJrqEPEN 12 | Z04tqDvWzKthGL+TMcNJeh+QIIf4ayakY5PZ1g6tGCx8mRT1L4MKS0KoQxSMLHCV 13 | HGlFyiiB/RETB5OqILxWWbjt3gdecYL9K0b87YuiwQKBgQDWb/MHEV1mEQ+KFCX0 14 | /HIz/7LsVniMdahV61VmImEKp62Fu1UCOkKeEndLml7QLyF+cOYrujW0hzndy2K3 15 | uzsVRnV9lIkxOOdJKJjAweQjNbq0DsglTmY/sk3H9D/C961EdWvVdaj6ntdY18i5 16 | bTIrRF/7BZ3V0kv56O5sZpNdkQKBgQDGwmd8REjm7IdbVzWT1UJH9/hwM79/my/B 17 | DjHVJZFjDtcKA7iyhPHGxi4m2qGX8Autsewj2H4HOcpHdlU07W2jMDvJedjgl+4C 18 | 1yHO4lGwGuEzsF9ccAr3ep4VRs12f22g0mi+KstPQR6X/4HHPPGzlKF7jaTv/BwT 19 | S3mGDm8KWwKBgCibBlAeT6tWK7Uf35gmfZGvJ7htH2KpnPi4GXG/rNSRr+AU+yHy 20 | 2k6IfKKKVs3hPtxskIlTN3w55xFCxCy6fZOnxfPDvr7dYZ6O224UyRiJwhL6gZfe 21 | 4qnJH8VvUOugbuT5vzOqlhbvISQnqFDEhq1+wxYLmDm3Gr/2Bb/tJJahAoGAC3X8 22 | R6McXQx4haDSuxKJ/uJilf215iLJryUlZHgFGSQq7C3owYp+8opM8PqcZMl3su1W 23 | g36hTcNcUaCWKzYho05NtEFtaw33sHw9O9O7CB+UqslZxlfDmh+c6/coM9QHO0XG 24 | umh3Trufiv41uopeGY/Re/Ff+GKeL9KjUEKWzo8CgYBg3s3tAPFhJkh9kvqoHv7f 25 | r+sgDWjHhVmIpSJ8wWXxIINcjsVcdjBou24QlzfS2xg8x7qDeRSTL+OgyJQ24akT 26 | gaUHOaD3dUVxuq0pJry5TR1f50s/6MmScP7d2qT/XrFXxxDluDQgqRqFc60E/vBo 27 | UlFTXfdIOKTcTErf7wpc7Q== 28 | -----END PRIVATE KEY----- 29 | --------------------------------------------------------------------------------