├── apidoc.json ├── production └── docker │ ├── .env │ ├── cleanup-images.sh │ ├── restore-auto.sh │ ├── docker-compose.yml │ ├── start-production.sh │ ├── README.md │ └── Dockerfile ├── leveldb ├── current │ └── README.md └── zips │ └── README.md ├── util ├── README.md ├── wipe-db.md ├── index │ ├── misc │ │ ├── README.md │ │ └── getSimpleNfts.js │ ├── getAllUtxos.js │ ├── getAllTxData.js │ ├── getAllAddresses.js │ ├── getOneTx.js │ ├── getOneAddr.js │ ├── getAllTokens.js │ ├── getOneToken.js │ ├── getAllAddressesWithTxid.js │ ├── getAllPinClaims.js │ ├── getGroupTokens.js │ ├── getNFTTokens.js │ ├── getAllTxs.js │ └── create-tx-map.js ├── data │ └── get-data.js └── wipe-test-db.js ├── index.js ├── swarm.key ├── examples ├── README.md ├── create-proof-of-burn.js └── pin-ipfs-content.js ├── logs └── README.md ├── .on-save.json ├── test ├── unit │ ├── mocks │ │ ├── app-mock.js │ │ ├── ipfs-coord-mock.js │ │ ├── leveldb-mock.js │ │ ├── ipfs-mock.js │ │ ├── log-api-mock.js │ │ ├── use-cases │ │ │ └── index.js │ │ ├── adapters │ │ │ └── index.js │ │ ├── utils-mock.js │ │ └── ctx-mock.js │ ├── README.md │ ├── controllers │ │ ├── rest-api │ │ │ ├── README.md │ │ │ ├── logs │ │ │ │ ├── logs.rest.controller.unit.js │ │ │ │ └── logs.rest.router.unit.js │ │ │ ├── rest.controller.unit.js │ │ │ ├── contact │ │ │ │ ├── contact.rest.router.unit.js │ │ │ │ └── contact.rest.controller.unit.js │ │ │ └── slp │ │ │ │ └── slp.rest.router.unit.js │ │ ├── json-rpc │ │ │ ├── about.json-rpc.controller.unit.js │ │ │ └── a14-rate-limits.js │ │ ├── controllers.unit.js │ │ └── timer-controllers.unit.js │ ├── adapters │ │ ├── wlogger.adapter.unit.js │ │ ├── webhook.adapter.unit.js │ │ ├── slp-indexer │ │ │ └── lib │ │ │ │ ├── level-db.unit.js │ │ │ │ ├── blacklist.unit.js │ │ │ │ ├── start-stop.unit.js │ │ │ │ ├── query.unit.js │ │ │ │ ├── ptxdb.unit.js │ │ │ │ ├── zmq.unit.js │ │ │ │ └── db-backup.unit.js │ │ ├── adapters-index-unit.js │ │ ├── ipfs-index.adapter.unit.js │ │ ├── ipfs.adapter.unit.js │ │ ├── contact.adapter.unit.js │ │ └── ipfs-coord.adapter.unit.js │ ├── misc │ │ ├── config.js │ │ └── server-unit.js │ └── use-cases │ │ └── index.use-case.unit.js └── integration │ └── adapters │ └── slp-indexer │ ├── lib │ ├── cache-integration.js │ └── rpc.integration.js │ └── tx-types │ ├── send.integration.js │ ├── genesis.integration.js │ └── mint.integration.js ├── .eslintrc.json ├── backup-leveldb.sh ├── wipe-db.sh ├── install-mongo.sh ├── config ├── env │ ├── development.js │ ├── test.js │ └── production.js └── index.js ├── slp-indexer.sh ├── shell-scripts ├── ipfs-service-provider-generic.sh ├── local-external-ipfs-node.sh └── ipfs-service-provider-relay.sh ├── src ├── use-cases │ └── index.js ├── adapters │ ├── webhook.js │ ├── slp-indexer │ │ ├── lib │ │ │ ├── blacklist.js │ │ │ ├── query.js │ │ │ ├── start-stop.js │ │ │ ├── cache.js │ │ │ ├── ptxdb.js │ │ │ ├── level-db.js │ │ │ ├── retry-queue.js │ │ │ └── zmq.js │ │ └── tx-maps │ │ │ ├── get-tx-map.js │ │ │ └── combine-maps.js │ ├── contact.js │ ├── ipfs │ │ ├── index.js │ │ ├── ipfs-coord.js │ │ └── ipfs.js │ ├── json-files.js │ ├── index.js │ ├── wlogger.js │ └── fullstack-jwt.js └── controllers │ ├── rest-api │ ├── logs │ │ ├── index.js │ │ └── controller.js │ ├── contact │ │ ├── index.js │ │ └── controller.js │ ├── index.js │ └── slp │ │ └── index.js │ ├── json-rpc │ ├── about │ │ └── index.js │ └── rate-limit.js │ ├── timer-controllers.js │ └── index.js ├── .editorconfig ├── CONTRIBUTING.md ├── .gitignore ├── dev-docs └── dev-notes.md ├── package.json ├── bin └── server.js └── README.md /apidoc.json: -------------------------------------------------------------------------------- 1 | { 2 | "sampleUrl": null 3 | 4 | } -------------------------------------------------------------------------------- /production/docker/.env: -------------------------------------------------------------------------------- 1 | COMPOSE_PROJECT_NAME=slp-indexer 2 | -------------------------------------------------------------------------------- /leveldb/current/README.md: -------------------------------------------------------------------------------- 1 | This directory stores the current database. 2 | -------------------------------------------------------------------------------- /leveldb/zips/README.md: -------------------------------------------------------------------------------- 1 | This directory hold snapshots of the database. 2 | -------------------------------------------------------------------------------- /util/README.md: -------------------------------------------------------------------------------- 1 | This directory contains utility functions for managing the database. 2 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | import Server from './bin/server.js' 2 | const server = new Server() 3 | 4 | server.startServer() 5 | -------------------------------------------------------------------------------- /swarm.key: -------------------------------------------------------------------------------- 1 | /key/swarm/psk/1.0.0/ 2 | /base16/ 3 | bbd935b70105b03ebd0c6a3c2d2730cd22fc0d18c490ccf689b6c8a22e1bed2a -------------------------------------------------------------------------------- /examples/README.md: -------------------------------------------------------------------------------- 1 | # Examples 2 | This directory contains helpful code examples for working with the psf-slp-indexer. 3 | -------------------------------------------------------------------------------- /logs/README.md: -------------------------------------------------------------------------------- 1 | This directory will hold the Winston daily logs. Any files saved to this directory will be ignored by Git. 2 | -------------------------------------------------------------------------------- /util/wipe-db.md: -------------------------------------------------------------------------------- 1 | 2 | Here's how to wipe the db: 3 | 1. mongo 4 | 2. use koa-server-dev 5 | 3. db.dropDatabase() 6 | 4. exit 7 | -------------------------------------------------------------------------------- /.on-save.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "srcDir": "", 4 | "destDir": "", 5 | "files": "**/*.js", 6 | "command": "npm run lint" 7 | } 8 | ] 9 | -------------------------------------------------------------------------------- /test/unit/mocks/app-mock.js: -------------------------------------------------------------------------------- 1 | /* 2 | Mocks for Koa 'app' object. 3 | */ 4 | 5 | const app = { 6 | use: () => {} 7 | } 8 | 9 | export default app; 10 | -------------------------------------------------------------------------------- /production/docker/cleanup-images.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Remove all untagged docker images. 4 | docker rmi $(docker images | grep "^" | awk '{print $3}') 5 | 6 | -------------------------------------------------------------------------------- /util/index/misc/README.md: -------------------------------------------------------------------------------- 1 | # Misc Indexer Scripts 2 | 3 | These scripts were useful enough to hang on to as examples. This is a sort of 'junk drawer' for useful query scripts. 4 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "standard", 3 | "env": { 4 | "node": true, 5 | "mocha": true 6 | }, 7 | "parserOptions": { 8 | "ecmaVersion": 8 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /backup-leveldb.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo What is the block height? 4 | 5 | read height 6 | 7 | zip -r slp-indexer-$height.zip leveldb/current 8 | 9 | #mv slp-indexer-$height.zip ~/tmp/ 10 | mv slp-indexer-$height.zip leveldb/zips/ 11 | -------------------------------------------------------------------------------- /wipe-db.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | rm -r leveldb/current/addrs 4 | rm -r leveldb/current/status 5 | rm -r leveldb/current/tokens 6 | rm -r leveldb/current/txs 7 | rm -r leveldb/backup 8 | rm -r leveldb/current/ptxs 9 | rm -r leveldb/current/utxos 10 | -------------------------------------------------------------------------------- /test/unit/mocks/ipfs-coord-mock.js: -------------------------------------------------------------------------------- 1 | /* 2 | Mocks for the ipfs-coord library 3 | */ 4 | 5 | class IPFSCoord { 6 | async isReady () { 7 | return true 8 | } 9 | 10 | async start () {} 11 | 12 | async subscribeToChat() {} 13 | } 14 | 15 | export default IPFSCoord; 16 | -------------------------------------------------------------------------------- /production/docker/restore-auto.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | rm -rf /home/safeuser/psf-slp-indexer/leveldb/current/* 4 | 5 | cp -r /home/safeuser/psf-slp-indexer/leveldb/zips/home/safeuser/psf-slp-indexer/src/adapters/slp-indexer/lib/leveldb/current/* /home/safeuser/psf-slp-indexer/leveldb/current/ 6 | -------------------------------------------------------------------------------- /install-mongo.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | curl -fsSL https://www.mongodb.org/static/pgp/server-4.4.asc | sudo apt-key add - 3 | echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-4.4.list 4 | sudo apt-get update 5 | sudo apt-get install -y mongodb-org 6 | sudo service mongod start 7 | sudo systemctl enable mongod 8 | -------------------------------------------------------------------------------- /config/env/development.js: -------------------------------------------------------------------------------- 1 | /* 2 | These are the environment settings for the DEVELOPMENT environment. 3 | This is the environment run by default with `npm start` if KOA_ENV is not 4 | specified. 5 | */ 6 | 7 | export default { 8 | session: 'secret-boilerplate-token', 9 | token: 'secret-jwt-token', 10 | database: 'mongodb://localhost:27017/psf-slp-indexer-dev', 11 | env: 'dev' 12 | } 13 | -------------------------------------------------------------------------------- /config/env/test.js: -------------------------------------------------------------------------------- 1 | /* 2 | These are the environment settings for the TEST environment. 3 | This is the environment run with `npm start` if KOA_ENV=test. 4 | This is the environment run by the test suite. 5 | */ 6 | 7 | export default { 8 | session: 'secret-boilerplate-token', 9 | token: 'secret-jwt-token', 10 | database: 'mongodb://localhost:27017/psf-slp-indexer-test', 11 | env: 'test' 12 | } 13 | -------------------------------------------------------------------------------- /test/unit/README.md: -------------------------------------------------------------------------------- 1 | # Unit Tests 2 | Unit tests are defined as testing the smallest possible unit of a function. They also do not make any live network calls. 3 | 4 | Unit tests are broken up by directory: 5 | 6 | - [biz-logic](./biz-logic) tests the business logic libraries. 7 | - [rest-api](./rest-api) tests the REST API specific handling of the router. 8 | - json-rpc (coming soon) tests the JSON-RPC routing using ipfs-coord library. 9 | -------------------------------------------------------------------------------- /test/unit/mocks/leveldb-mock.js: -------------------------------------------------------------------------------- 1 | /* 2 | Mocks for LevelDB. 3 | */ 4 | 5 | class MockLevel { 6 | get () { 7 | return {} 8 | } 9 | 10 | put () { 11 | return {} 12 | } 13 | 14 | del () { 15 | return {} 16 | } 17 | 18 | createReadStream () { 19 | const stream = { 20 | on: () => {} 21 | } 22 | 23 | return stream 24 | } 25 | 26 | async close() { 27 | return {} 28 | } 29 | 30 | async open() { 31 | return {} 32 | } 33 | } 34 | 35 | export default MockLevel 36 | -------------------------------------------------------------------------------- /test/unit/controllers/rest-api/README.md: -------------------------------------------------------------------------------- 1 | # REST API Unit Tests 2 | 3 | The tests in this directory are unit tests of REST API. These tests are not 4 | concerned with the business logic behind the endpoints. They are only concerned 5 | with the handling of the REST API endpoint. These tests answer questions like: 6 | 7 | - Is the endpoint responding properly when the business logic throws an error? 8 | - When returning an error, is it returning the proper HTTP response? 9 | - When returning success, is it returning the correct payload? 10 | -------------------------------------------------------------------------------- /slp-indexer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ## Customize these environment variables for your own full node. 4 | export RPC_IP=172.17.0.1 5 | export RPC_PORT=8332 6 | export ZMQ_PORT=28332 7 | export RPC_USER=bitcoin 8 | export RPC_PASS=password 9 | 10 | ## Uncomment this if you do not want the indexer to automatically old 11 | ## backup zip files. 12 | #export DELETE_BACKUP=1 13 | 14 | # Normal indexing, scanning every block and starting at SLP genesis. 15 | npm start 16 | 17 | # Fast reindex using a tx-map of SLP transactions. 18 | #npm run reindex 19 | -------------------------------------------------------------------------------- /util/index/getAllUtxos.js: -------------------------------------------------------------------------------- 1 | /* 2 | Utility tool to retrieve all token keys in the token DB. 3 | */ 4 | 5 | const level = require('level') 6 | 7 | const tokenDb = level(`${__dirname.toString()}/../../leveldb/current/utxos`, { 8 | valueEncoding: 'json' 9 | }) 10 | 11 | async function getTokens () { 12 | try { 13 | const stream = tokenDb.createReadStream() 14 | 15 | stream.on('data', function (data) { 16 | console.log(data.key, ' = ', data.value) 17 | }) 18 | } catch (err) { 19 | console.error(err) 20 | } 21 | } 22 | getTokens() 23 | -------------------------------------------------------------------------------- /util/index/getAllTxData.js: -------------------------------------------------------------------------------- 1 | /* 2 | Utility tool to retrieve all token keys in the token DB. 3 | */ 4 | 5 | const level = require('level') 6 | 7 | const txDb = level(`${__dirname.toString()}/../../leveldb/current/txs`, { 8 | valueEncoding: 'json' 9 | }) 10 | 11 | async function getTxs () { 12 | try { 13 | const stream = txDb.createReadStream() 14 | 15 | stream.on('data', function (data) { 16 | console.log(data.key, ' = ', JSON.stringify(data.value, null, 2)) 17 | }) 18 | } catch (err) { 19 | console.error(err) 20 | } 21 | } 22 | getTxs() 23 | -------------------------------------------------------------------------------- /util/index/getAllAddresses.js: -------------------------------------------------------------------------------- 1 | /* 2 | Utility tool to retrieve all token keys in the token DB. 3 | */ 4 | 5 | const level = require('level') 6 | 7 | const addrDb = level(`${__dirname.toString()}/../../leveldb/current/addrs`, { 8 | valueEncoding: 'json' 9 | }) 10 | 11 | async function getAddrs () { 12 | try { 13 | const stream = addrDb.createReadStream() 14 | 15 | stream.on('data', function (data) { 16 | // console.log(data.key, ' = ', data.value) 17 | console.log(`${data.key} = ${JSON.stringify(data.value, null, 2)}`) 18 | }) 19 | } catch (err) { 20 | console.error(err) 21 | } 22 | } 23 | getAddrs() 24 | -------------------------------------------------------------------------------- /config/index.js: -------------------------------------------------------------------------------- 1 | import common from './env/common.js' 2 | 3 | import development from './env/development.js' 4 | import production from './env/production.js' 5 | import test from './env/test.js' 6 | 7 | const env = process.env.SVC_ENV || 'development' 8 | console.log(`Loading config for this environment: ${env}`) 9 | 10 | let config = development 11 | if (env === 'test') { 12 | config = test 13 | } else if (env === 'prod') { 14 | config = production 15 | } 16 | 17 | // const importStr = `./env/${env}.js` 18 | // console.log('importStr: ', importStr) 19 | // import config from importStr 20 | 21 | export default Object.assign({}, common, config) 22 | -------------------------------------------------------------------------------- /util/index/getOneTx.js: -------------------------------------------------------------------------------- 1 | /* 2 | Utility tool to retrieve a single TX from the TX database. 3 | */ 4 | 5 | // const TXID = '0717811273b8f2a517ccfad9ff70cb839b6a190146ec911dc96975497b61f399' 6 | const TXID = '662bc5b6fe6fa2ab7ee3257ee31549d8490f3a7d591c52eecc053e97fc4c3a1c' 7 | 8 | const level = require('level') 9 | 10 | const txDb = level(`${__dirname.toString()}/../../leveldb/current/txs`, { 11 | valueEncoding: 'json' 12 | }) 13 | 14 | async function getTx () { 15 | try { 16 | const txData = await txDb.get(TXID) 17 | 18 | console.log(`${JSON.stringify(txData, null, 2)}`) 19 | } catch (err) { 20 | console.error(err) 21 | } 22 | } 23 | getTx() 24 | -------------------------------------------------------------------------------- /test/unit/mocks/ipfs-mock.js: -------------------------------------------------------------------------------- 1 | /* 2 | Mocks for the js-ipfs 3 | */ 4 | 5 | // class IPFS { 6 | // constructor () { 7 | // this.ipfs = {} 8 | // } 9 | // 10 | // static create () { 11 | // const mockIpfs = new MockIpfsInstance() 12 | // 13 | // return mockIpfs 14 | // } 15 | // 16 | // async start () {} 17 | // } 18 | 19 | function create () { 20 | const mockIpfs = new MockIpfsInstance() 21 | 22 | return mockIpfs 23 | } 24 | 25 | class MockIpfsInstance { 26 | constructor () { 27 | this.config = { 28 | profiles: { 29 | apply: () => {} 30 | } 31 | } 32 | } 33 | 34 | stop () {} 35 | } 36 | 37 | export default create; 38 | -------------------------------------------------------------------------------- /config/env/production.js: -------------------------------------------------------------------------------- 1 | /* 2 | These are the environment settings for the PRODUCTION environment. 3 | This is the environment run with `npm start` if KOA_ENV=production. 4 | This is the environment run inside the Docker container. 5 | 6 | It is assumed the MonogDB Docker container is accessed by port 5555 7 | so as not to conflict with the default host port of 27017 for MongoDB. 8 | */ 9 | 10 | export default { 11 | session: 'secret-boilerplate-token', 12 | token: 'secret-jwt-token', 13 | // database: 'mongodb://172.17.0.1:5555/ipfs-service-prod', 14 | database: process.env.DBURL 15 | ? process.env.DBURL 16 | : 'mongodb://172.17.0.1:5555/psf-slp-indexer-prod', 17 | env: 'prod' 18 | } 19 | -------------------------------------------------------------------------------- /production/docker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | # Start the service with the command 'docker-compose up -d' 2 | 3 | version: '3.9' 4 | 5 | services: 6 | slp-indexer: 7 | build: . 8 | container_name: slp-indexer 9 | logging: 10 | driver: 'json-file' 11 | options: 12 | max-size: '10m' 13 | max-file: '10' 14 | #mem_limit: 500mb 15 | #links: 16 | # - mongo-slp-indexer 17 | ports: 18 | - '5010:5010' # : 19 | volumes: 20 | - ../data/ipfsdata:/home/safeuser/psf-slp-indexer/.ipfsdata 21 | - ../data/leveldb:/home/safeuser/psf-slp-indexer/leveldb 22 | - ./start-production.sh:/home/safeuser/psf-slp-indexer/start-production.sh 23 | restart: always 24 | -------------------------------------------------------------------------------- /shell-scripts/ipfs-service-provider-generic.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # This script is an example for running a generic ipfs-service-provider instance. 4 | 5 | # Ports 6 | export PORT=5001 # REST API port 7 | export IPFS_TCP_PORT=5268 8 | export IPFS_WS_PORT=5269 9 | 10 | # The human-readible name that is used when displaying data about this node. 11 | export COORD_NAME=ipfs-service-provider-generic 12 | 13 | # This is used for end-to-end encryption (e2ee). 14 | export MNEMONIC="churn aisle shield silver ladder swear hunt slim pen demand spoil veteran" 15 | 16 | # 0 = less verbose. 3 = most verbose 17 | export DEBUG_LEVEL=1 18 | 19 | # MongoDB connection string. 20 | #export DBURL=mongodb://localhost:27017/bch-service-dev 21 | 22 | npm start 23 | -------------------------------------------------------------------------------- /src/use-cases/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | This is a top-level library that encapsulates all the additional Use Cases. 3 | The concept of Use Cases comes from Clean Architecture: 4 | https://troutsblog.com/blog/clean-architecture 5 | */ 6 | 7 | class UseCases { 8 | constructor (localConfig = {}) { 9 | this.adapters = localConfig.adapters 10 | if (!this.adapters) { 11 | throw new Error( 12 | 'Instance of adapters must be passed in when instantiating Use Cases library.' 13 | ) 14 | } 15 | } 16 | 17 | // Run any startup Use Cases at the start of the app. 18 | async start () { 19 | // try { 20 | console.log('Async Use Cases have been started.') 21 | 22 | return true 23 | } 24 | } 25 | 26 | export default UseCases 27 | -------------------------------------------------------------------------------- /util/index/getOneAddr.js: -------------------------------------------------------------------------------- 1 | /* 2 | Utility tool to retrieve a single TX from the TX database. 3 | */ 4 | 5 | let addr = 'bitcoincash:qp5zflad4y9vk7q7m7l4j4cqtnvxkl7nh5y79lprka' 6 | // let addr = 'bitcoincash:qqwmwye0udasr7m92nxx6attxhramh5qj5xg3ejk49' 7 | 8 | const level = require('level') 9 | const BCHJS = require('@psf/bch-js') 10 | const bchjs = new BCHJS() 11 | 12 | const addrDb = level(`${__dirname.toString()}/../../leveldb/current/addrs`, { 13 | valueEncoding: 'json' 14 | }) 15 | 16 | async function getAddr () { 17 | try { 18 | addr = bchjs.SLP.Address.toCashAddress(addr) 19 | 20 | const addrData = await addrDb.get(addr) 21 | 22 | console.log(`${JSON.stringify(addrData, null, 2)}`) 23 | } catch (err) { 24 | console.error(err) 25 | } 26 | } 27 | getAddr() 28 | -------------------------------------------------------------------------------- /test/unit/mocks/log-api-mock.js: -------------------------------------------------------------------------------- 1 | // Mocks representing an array of logs for the 2 | // Unit tests of logapi 3 | 4 | const data = [ 5 | { 6 | message: 'Error in lib/nodemailer.js/validateEmailArray()', 7 | level: 'error', 8 | timestamp: '2020-11-14T12:15:55.230Z' 9 | }, 10 | { 11 | message: 'Error in lib/nodemailer.js/validateEmailArray()', 12 | level: 'error', 13 | timestamp: '2020-11-14T12:15:55.231Z' 14 | }, 15 | { 16 | message: 'Error in lib/nodemailer.js/validateEmailArray()', 17 | level: 'error', 18 | timestamp: '2020-11-14T12:15:55.230Z' 19 | }, 20 | { 21 | message: 'Error in lib/nodemailer.js/validateEmailArray()', 22 | level: 'error', 23 | timestamp: '2020-11-14T12:15:55.231Z' 24 | } 25 | ] 26 | 27 | export default { 28 | data 29 | }; 30 | -------------------------------------------------------------------------------- /test/unit/mocks/use-cases/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | Mocks for the use cases. 3 | */ 4 | /* eslint-disable */ 5 | 6 | class UserUseCaseMock { 7 | async createUser(userObj) { 8 | return {} 9 | } 10 | 11 | async getAllUsers() { 12 | return true 13 | } 14 | 15 | async getUser(params) { 16 | return true 17 | } 18 | 19 | async updateUser(existingUser, newData) { 20 | return true 21 | } 22 | 23 | async deleteUser(user) { 24 | return true 25 | } 26 | 27 | async authUser(login, passwd) { 28 | return { 29 | generateToken: () => {} 30 | } 31 | } 32 | } 33 | 34 | class UseCasesMock { 35 | constuctor(localConfig = {}) { 36 | // this.user = new UserUseCaseMock(localConfig) 37 | } 38 | 39 | user = new UserUseCaseMock() 40 | } 41 | 42 | export default UseCasesMock; 43 | -------------------------------------------------------------------------------- /util/index/getAllTokens.js: -------------------------------------------------------------------------------- 1 | /* 2 | Utility tool to retrieve all token keys in the token DB. 3 | */ 4 | 5 | const level = require('level') 6 | 7 | const tokenDb = level(`${__dirname.toString()}/../../leveldb/current/tokens`, { 8 | valueEncoding: 'json' 9 | }) 10 | 11 | async function getTokens () { 12 | try { 13 | const stream = tokenDb.createReadStream() 14 | 15 | stream.on('data', function (data) { 16 | // console.log(data.key, ' = ', data.value) 17 | 18 | if (data.value.totalBurned !== '0' && data.value.totalBurned !== data.value.totalMinted) { 19 | data.value.totalTxs = data.value.txs.length 20 | data.value.txs = [] 21 | console.log(data.key, ' = ', data.value) 22 | } 23 | }) 24 | } catch (err) { 25 | console.error(err) 26 | } 27 | } 28 | getTokens() 29 | -------------------------------------------------------------------------------- /util/index/getOneToken.js: -------------------------------------------------------------------------------- 1 | /* 2 | Utility tool to retrieve token stats on a specific token 3 | */ 4 | 5 | // Group 6 | const tokenId = 7 | 'b0f842e4170fc2e3a0a178990509914e02bf5e20a3f395b32a3a3d96fe428eff' 8 | 9 | // NFT 10 | // const tokenId = 11 | // '6f5c47c1a0d22781e6b28c5f119affd73de287b958ce8760ee02211626d4734e' 12 | 13 | const level = require('level') 14 | 15 | const tokenDb = level(`${__dirname.toString()}/../../leveldb/current/tokens`, { 16 | valueEncoding: 'json' 17 | }) 18 | 19 | async function getOneToken () { 20 | try { 21 | const tokenData = await tokenDb.get(tokenId) 22 | 23 | tokenData.txsTotal = tokenData.txs.length 24 | // tokenData.txs = [] 25 | 26 | console.log(`${JSON.stringify(tokenData, null, 2)}`) 27 | } catch (err) { 28 | console.error(err) 29 | } 30 | } 31 | getOneToken() 32 | -------------------------------------------------------------------------------- /test/integration/adapters/slp-indexer/lib/cache-integration.js: -------------------------------------------------------------------------------- 1 | /* 2 | Integration tests for the Cache library 3 | */ 4 | 5 | const assert = require('chai').assert 6 | 7 | const BCHJS = require('@psf/bch-js') 8 | const bchjs = new BCHJS() 9 | 10 | const Cache = require('../../../../../src/adapters/slp-indexer/lib/cache') 11 | 12 | describe('#cache.js', () => { 13 | let uut 14 | 15 | beforeEach(() => { 16 | uut = new Cache({ bchjs }) 17 | }) 18 | 19 | describe('#get', () => { 20 | it('should get tx data from bch-js on the first call', async () => { 21 | const txid = 22 | '6bc111fbf5b118021d68355ca19a0e77fa358dd931f284b2550f79a51ab4792a' 23 | 24 | let result = await uut.get(txid) 25 | // console.log('result: ', result) 26 | 27 | result = await uut.get(txid) 28 | 29 | assert.equal(result.blockheight, 543957) 30 | }) 31 | }) 32 | }) 33 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # http://editorconfig.org 2 | 3 | # A special property that should be specified at the top of the file outside of 4 | # any sections. Set to true to stop .editor config file search on current file 5 | root = true 6 | 7 | [*] 8 | # Indentation style 9 | # Possible values - tab, space 10 | indent_style = space 11 | 12 | # Indentation size in single-spaced characters 13 | # Possible values - an integer, tab 14 | indent_size = 2 15 | 16 | # Line ending file format 17 | # Possible values - lf, crlf, cr 18 | end_of_line = lf 19 | 20 | # File character encoding 21 | # Possible values - latin1, utf-8, utf-16be, utf-16le 22 | charset = utf-8 23 | 24 | # Denotes whether to trim whitespace at the end of lines 25 | # Possible values - true, false 26 | trim_trailing_whitespace = true 27 | 28 | # Denotes whether file should end with a newline 29 | # Possible values - true, false 30 | insert_final_newline = true 31 | -------------------------------------------------------------------------------- /util/data/get-data.js: -------------------------------------------------------------------------------- 1 | /* 2 | Generate the 'data' object that is passed around to the various libraries that 3 | process and index SLP transactions. This object is composed of three parts: 4 | - SLP data from decodeOpReturn() 5 | - block height 6 | - hydrated transaction data 7 | 8 | This data can then be used in unit tests. 9 | */ 10 | 11 | const TXID = 'a98686df3ced24b5ce7f47762631b8a8dc87086aa7b9ee2be59bcdde7886264a' 12 | 13 | const Transaction = require('../../src/adapters/slp-indexer/lib/transaction') 14 | const transaction = new Transaction() 15 | 16 | async function getData (txid) { 17 | try { 18 | const slpData = await transaction.decodeOpReturn(txid) 19 | console.log(`slpData: ${JSON.stringify(slpData, null, 2)}`) 20 | 21 | const txData = await transaction.get(txid) 22 | console.log(`txData: ${JSON.stringify(txData, null, 2)}`) 23 | } catch (err) { 24 | console.error(err) 25 | } 26 | } 27 | getData(TXID) 28 | -------------------------------------------------------------------------------- /util/index/getAllAddressesWithTxid.js: -------------------------------------------------------------------------------- 1 | /* 2 | Utility tool to retrieve all token keys in the token DB that have a TX history 3 | that includes a given TXID. 4 | */ 5 | 6 | const TXID = 'c895e072e723a8c228db7e509dfa08684acdcd61973c2e1e26b1d34f42e023d5' 7 | 8 | const level = require('level') 9 | 10 | const addrDb = level(`${__dirname.toString()}/../../leveldb/current/addrs`, { 11 | valueEncoding: 'json' 12 | }) 13 | 14 | async function getAddrs () { 15 | try { 16 | const stream = addrDb.createReadStream() 17 | 18 | stream.on('data', function (data) { 19 | const txHistory = data.value.txs 20 | 21 | const hasTxid = txHistory.filter(x => x.txid === TXID) 22 | if (hasTxid.length) { 23 | // console.log(data.key, ' = ', data.value) 24 | console.log(`${data.key} = ${JSON.stringify(data.value, null, 2)}`) 25 | } 26 | }) 27 | } catch (err) { 28 | console.error(err) 29 | } 30 | } 31 | getAddrs() 32 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | This code repository welcomes code contributions from other developers, but any submissions must satisfy the following constraints: 4 | 5 | ## Tests 6 | Because thie code is *infrastructure* that runs the SLP token economy, 100% unit test coverage must be maintained. Any code submissions must include unit tests, and any submissions must not decrease the overall percentage of code coverage. 7 | 8 | ## Code Reviews 9 | The GitHub repository has a branch protection rules on the `master` branch to prevent pull requests from being merged without a code review. Any developer who has landed a PR on a PSF code repository can review submissions. 10 | 11 | ## Index Regression 12 | Prior to merging a PR, it's a good idea to run the indexer from SLP genesis to the current chain tip, to ensure that changes do not cause a regression error. 13 | 14 | # Questions 15 | If you have any questions or need guidence, reach on on the [Telegram channel](https://t.me/psf_slp). -------------------------------------------------------------------------------- /test/unit/adapters/wlogger.adapter.unit.js: -------------------------------------------------------------------------------- 1 | import { assert } from 'chai' 2 | import { Wlogger } from '../../../src/adapters/wlogger.js' 3 | import sinon from 'sinon' 4 | 5 | let uut 6 | let sandbox 7 | 8 | describe('#wlogger', () => { 9 | beforeEach(() => { 10 | sandbox = sinon.createSandbox() 11 | }) 12 | 13 | afterEach(() => { 14 | sandbox.restore() 15 | 16 | uut = new Wlogger() 17 | }) 18 | 19 | describe('#constructor', () => { 20 | it('should create a new wlogger instance', () => { 21 | uut = new Wlogger() 22 | // console.log('uut: ', uut) 23 | 24 | assert.property(uut, 'transport') 25 | }) 26 | }) 27 | 28 | describe('#notifyRotation', () => { 29 | it('should notify of a log rotation', () => { 30 | uut.notifyRotation() 31 | }) 32 | }) 33 | 34 | describe('#envronment', () => { 35 | it('should write to console in non-test environment', () => { 36 | uut.outputToConsole() 37 | }) 38 | }) 39 | }) 40 | -------------------------------------------------------------------------------- /shell-scripts/local-external-ipfs-node.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # This script is an example for running a production environment, which is 4 | # defined by running an external go-ipfs node. 5 | 6 | # Ports 7 | export PORT=5010 # REST API port 8 | 9 | # The human-readible name that is used when displaying data about this node. 10 | export COORD_NAME=ipfs-service-provider-generic 11 | 12 | # This is used for end-to-end encryption (e2ee). 13 | export MNEMONIC="churn aisle shield silver ladder swear hunt slim pen demand spoil veteran" 14 | 15 | # 0 = less verbose. 3 = most verbose 16 | export DEBUG_LEVEL=0 17 | 18 | # Production settings that use external IPFS node. 19 | # https://github.com/christroutner/docker-ipfs 20 | export SVC_ENV=production 21 | export IPFS_HOST=localhost 22 | export IPFS_API_PORT=5001 23 | 24 | # Configure IPFS ports 25 | export IPFS_TCP_PORT=4001 26 | #export IPFS_WS_PORT=5269 27 | 28 | # MongoDB connection string. 29 | export DBURL=mongodb://localhost:27017/ipfs-service-dev 30 | 31 | npm start 32 | -------------------------------------------------------------------------------- /util/index/misc/getSimpleNfts.js: -------------------------------------------------------------------------------- 1 | /* 2 | Query the database for all tokens with a quantity of 1, decimals of 0, 3 | and no minting baton, which defines a simple NFT. 4 | */ 5 | 6 | const level = require('level') 7 | 8 | const tokenDb = level(`${__dirname.toString()}/../../../leveldb/current/tokens`, { 9 | valueEncoding: 'json' 10 | }) 11 | 12 | async function getTokens () { 13 | try { 14 | const stream = tokenDb.createReadStream() 15 | 16 | stream.on('data', function (data) { 17 | // console.log(data.key, ' = ', data.value) 18 | 19 | const hasQty1 = data.value.totalMinted === '1' 20 | const has0Decimals = data.value.decimals === 0 21 | const hasNoBaton = data.value.mintBatonIsActive === false 22 | 23 | if (hasQty1 && has0Decimals && hasNoBaton) { 24 | data.value.totalTxs = data.value.txs.length 25 | // data.value.txs = [] 26 | console.log(data.key, ' = ', data.value) 27 | } 28 | }) 29 | } catch (err) { 30 | console.error(err) 31 | } 32 | } 33 | getTokens() 34 | -------------------------------------------------------------------------------- /src/adapters/webhook.js: -------------------------------------------------------------------------------- 1 | /* 2 | This library makes a webhook call to additional applications to extend 3 | functionality of the indexer. This library is currently targeted at the 4 | ipfs-file-pin-service app. 5 | */ 6 | 7 | // Global npm libraries 8 | import axios from 'axios' 9 | 10 | // Local libraries 11 | import config from '../../config/index.js' 12 | 13 | class Webhook { 14 | constructor () { 15 | // Encapsulate dependencies 16 | this.axios = axios 17 | this.config = config 18 | } 19 | 20 | // Generate a webhook to pass new claim data to the ssp-api. 21 | async webhookNewClaim (claim) { 22 | try { 23 | const url = `${this.config.pinUrl}/ipfs/pin-claim` 24 | 25 | await this.axios.post(url, claim) 26 | 27 | return true 28 | } catch (err) { 29 | console.error('Error in webhookNewClaim(): ', err) 30 | // throw err 31 | console.log('Skipping error and continuing processing. Check ipfs-file-pin-service') 32 | return false 33 | } 34 | } 35 | } 36 | 37 | export default Webhook 38 | -------------------------------------------------------------------------------- /util/index/getAllPinClaims.js: -------------------------------------------------------------------------------- 1 | /* 2 | Utility tool to retrieve all pinClaim keys in the token DB. 3 | */ 4 | 5 | import level from 'level' 6 | 7 | // Hack to get __dirname back. 8 | // https://blog.logrocket.com/alternatives-dirname-node-js-es-modules/ 9 | import * as url from 'url' 10 | const __dirname = url.fileURLToPath(new URL('.', import.meta.url)) 11 | 12 | const pinClaimDb = level(`${__dirname.toString()}/../../leveldb/current/pinClaim`, { 13 | valueEncoding: 'json' 14 | }) 15 | 16 | async function getPinClaims () { 17 | try { 18 | const stream = pinClaimDb.createReadStream() 19 | 20 | stream.on('data', function (data) { 21 | console.log(data.key, ' = ', data.value) 22 | 23 | // if (data.value.totalBurned !== '0' && data.value.totalBurned !== data.value.totalMinted) { 24 | // data.value.totalTxs = data.value.txs.length 25 | // data.value.txs = [] 26 | // console.log(data.key, ' = ', data.value) 27 | // } 28 | }) 29 | } catch (err) { 30 | console.error(err) 31 | } 32 | } 33 | getPinClaims() 34 | -------------------------------------------------------------------------------- /util/wipe-test-db.js: -------------------------------------------------------------------------------- 1 | /* 2 | Utility app to wipe the test database. 3 | */ 4 | 5 | 'use strict' 6 | 7 | import mongoose from 'mongoose' 8 | import config from '../config/index.js' 9 | 10 | // Force test environment 11 | process.env.KOA_ENV = 'test' 12 | 13 | async function cleanDb () { 14 | // Connect to the Mongo Database. 15 | mongoose.Promise = global.Promise 16 | mongoose.set('useCreateIndex', true) // Stop deprecation warning. 17 | await mongoose.connect(config.database, { useNewUrlParser: true }) 18 | 19 | console.log(`mongoose.connection.collections: ${JSON.stringify(mongoose.connection.collections, null, 2)}`) 20 | 21 | for (const collection in mongoose.connection.collections) { 22 | const collections = mongoose.connection.collections 23 | if (collections.collection) { 24 | // const thisCollection = mongoose.connection.collections[collection] 25 | // console.log(`thisCollection: ${JSON.stringify(thisCollection, null, 2)}`) 26 | 27 | await collection.deleteMany() 28 | } 29 | } 30 | 31 | mongoose.connection.close() 32 | } 33 | cleanDb() 34 | -------------------------------------------------------------------------------- /test/unit/controllers/json-rpc/about.json-rpc.controller.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the json-rpc/about/index.js file. 3 | */ 4 | 5 | // Public npm libraries 6 | import sinon from 'sinon' 7 | 8 | import { assert } from 'chai' 9 | 10 | // Local libraries 11 | import AboutRPC from '../../../../src/controllers/json-rpc/about/index.js' 12 | 13 | describe('#AboutRPC', () => { 14 | let uut 15 | let sandbox 16 | 17 | beforeEach(() => { 18 | sandbox = sinon.createSandbox() 19 | 20 | uut = new AboutRPC() 21 | }) 22 | 23 | afterEach(() => sandbox.restore()) 24 | 25 | describe('#aboutRouter', () => { 26 | it('should return information about the service', async () => { 27 | const result = await uut.aboutRouter() 28 | // console.log('result: ', result) 29 | 30 | assert.property(result, 'success') 31 | assert.equal(result.success, true) 32 | assert.property(result, 'status') 33 | assert.equal(result.status, 200) 34 | assert.property(result, 'message') 35 | assert.property(result, 'endpoint') 36 | assert.equal(result.endpoint, 'about') 37 | }) 38 | }) 39 | }) 40 | -------------------------------------------------------------------------------- /test/unit/adapters/webhook.adapter.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the webhook adapter library 3 | */ 4 | 5 | // Global npm libraries 6 | import { assert } from 'chai' 7 | import sinon from 'sinon' 8 | 9 | // Local libraries 10 | import WebhookAdapter from '../../../src/adapters/webhook.js' 11 | // import config from '../../../config/index.js' 12 | 13 | describe('#Webhook', () => { 14 | let uut 15 | let sandbox 16 | 17 | beforeEach(() => { 18 | uut = new WebhookAdapter() 19 | 20 | sandbox = sinon.createSandbox() 21 | }) 22 | 23 | afterEach(() => sandbox.restore()) 24 | 25 | describe('#webhookNewClaim', () => { 26 | it('should execute a webhook', async () => { 27 | // Mock Dependencies 28 | sandbox.stub(uut.axios, 'post').resolves() 29 | 30 | const result = await uut.webhookNewClaim({}) 31 | 32 | assert.equal(result, true) 33 | }) 34 | 35 | it('should return false on error', async () => { 36 | sandbox.stub(uut.axios, 'post').rejects(new Error('test error')) 37 | 38 | const result = await uut.webhookNewClaim({}) 39 | 40 | assert.equal(result, false) 41 | }) 42 | }) 43 | }) 44 | -------------------------------------------------------------------------------- /src/adapters/slp-indexer/lib/blacklist.js: -------------------------------------------------------------------------------- 1 | /* 2 | This library controls blacklists. 3 | */ 4 | 5 | // local libraries 6 | // const config = require('../../../../config') 7 | import config from '../../../../config/index.js' 8 | 9 | class Blacklist { 10 | constructor () { 11 | // Encapsulate dependencies 12 | this.config = config 13 | 14 | this.blacklist = this.config.blacklist 15 | } 16 | 17 | // This function expects a token ID as input. It compares that token ID 18 | // against the list of token IDs in the blacklist. It returns true if there 19 | // is a match. Otherwise it returns false. 20 | checkBlacklist (tokenId) { 21 | try { 22 | // Default value 23 | let result = false 24 | 25 | for (let i = 0; i < this.blacklist.length; i++) { 26 | const thisToken = this.blacklist[i] 27 | 28 | if (tokenId === thisToken) { 29 | result = true 30 | break 31 | } 32 | } 33 | 34 | return result 35 | } catch (err) { 36 | console.error('Error in checkBlacklist()') 37 | throw err 38 | } 39 | } 40 | } 41 | 42 | // module.exports = Blacklist 43 | export default Blacklist 44 | -------------------------------------------------------------------------------- /examples/create-proof-of-burn.js: -------------------------------------------------------------------------------- 1 | /* 2 | This example generates a proof-of-burn transaction. This TXID can submitted 3 | in a Claim as proof of payment for the pinning of a file. 4 | */ 5 | 6 | // BCH Address: bitcoincash:qqkg30ryje97al52htqwvveha538y7gttywut3cdqv 7 | // SLP Address: simpleledger:qqkg30ryje97al52htqwvveha538y7gttyz8q2dd7j 8 | 9 | import Wallet from 'minimal-slp-wallet' 10 | const WRITE_PRICE = 0.08335233 // Cost in PSF tokens to pin 1MB 11 | 12 | // Replace this private key and public address with your own. You can generate 13 | // new values at wallet.fullstack.cash. 14 | const WIF = 'L1tcvcqa5PztqqDH4ZEcUmHA9aSHhTau5E2Zwp1xEK5CrKBrjP3m' 15 | 16 | async function start () { 17 | try { 18 | // Initialize the wallet. 19 | const wallet = new Wallet(WIF, { 20 | interface: 'consumer-api', 21 | restURL: 'https://free-bch.fullstack.cash' 22 | }) 23 | await wallet.initialize() 24 | 25 | const PSF_TOKEN_ID = '38e97c5d7d3585a2cbf3f9580c82ca33985f9cb0845d4dcce220cb709f9538b0' 26 | const txid = await wallet.burnTokens(WRITE_PRICE, PSF_TOKEN_ID) 27 | 28 | console.log(`Proof of burn TXID: ${txid}`) 29 | } catch (err) { 30 | console.error(err) 31 | } 32 | } 33 | start() 34 | -------------------------------------------------------------------------------- /shell-scripts/ipfs-service-provider-relay.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # This script is an example for running a ipfs-service-provider as a Circuit Relay. 4 | # Circuit Relays are help other nodes on the network communicate. They are 5 | # critical for reliable functioning of the network, and for circumventing 6 | # censorship. 7 | 8 | # Ports 9 | export PORT=5001 # REST API port 10 | export IPFS_TCP_PORT=5268 11 | export IPFS_WS_PORT=5269 12 | 13 | # The human-readible name that is used when displaying data about this node. 14 | export COORD_NAME=ipfs-service-provider-generic 15 | 16 | # This is used for end-to-end encryption (e2ee). 17 | export MNEMONIC="churn aisle shield silver ladder swear hunt slim pen demand spoil veteran" 18 | 19 | # 0 = less verbose. 3 = most verbose 20 | export DEBUG_LEVEL=1 21 | 22 | # MongoDB connection string. 23 | #export DBURL=mongodb://localhost:27017/bch-service-dev 24 | 25 | # Comment to disable circuit relay functionality. Or set to 1 to enable. 26 | export ENABLE_CIRCUIT_RELAY=1 27 | # For browsers to use your circuit realy, you must set up a domain, SSL certificate, 28 | # and you must forward that subdomain to the IPFS_WS_PORT. 29 | #export CR_DOMAIN=subdomain.yourdomain.com 30 | 31 | npm start 32 | -------------------------------------------------------------------------------- /test/unit/adapters/slp-indexer/lib/level-db.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the level-db.js adapter library. 3 | */ 4 | 5 | // Global npm libraries 6 | import { assert } from 'chai' 7 | 8 | // const LevelDb = require('../../../../../src/adapters/slp-indexer/lib/level-db') 9 | import LevelDb from '../../../../../src/adapters/slp-indexer/lib/level-db.js' 10 | 11 | describe('#level-db', () => { 12 | let uut 13 | 14 | beforeEach(() => { 15 | uut = new LevelDb() 16 | }) 17 | 18 | afterEach(async () => { 19 | await uut.closeDbs() 20 | }) 21 | 22 | describe('#openDbs', () => { 23 | it('should open the databases and return handles', () => { 24 | const { addrDb, txDb, tokenDb, statusDb, pTxDb, utxoDb } = uut.openDbs() 25 | // console.log('addrDb: ', addrDb) 26 | // console.log('addrDb.db.status: ', addrDb.db.status) 27 | 28 | assert.equal(addrDb.db.status, 'opening') 29 | assert.equal(txDb.db.status, 'opening') 30 | assert.equal(tokenDb.db.status, 'opening') 31 | assert.equal(statusDb.db.status, 'opening') 32 | assert.equal(pTxDb.db.status, 'opening') 33 | assert.equal(utxoDb.db.status, 'opening') 34 | }) 35 | }) 36 | 37 | // describe('#closeDbs', () => { 38 | // it('should close all DBs', async () => { 39 | 40 | // const result = await uut.closeDbs() 41 | 42 | // assert.equal(result, true) 43 | // }) 44 | // }) 45 | }) 46 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | 3 | # Created by https://www.gitignore.io/api/node,sublimetext 4 | 5 | ### Node ### 6 | # Logs 7 | #logs 8 | logs/*.json 9 | *.log 10 | npm-debug.log* 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | 17 | # Directory for instrumented libs generated by jscoverage/JSCover 18 | lib-cov 19 | 20 | # Coverage directory used by tools like istanbul 21 | coverage 22 | 23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 24 | .grunt 25 | 26 | # node-waf configuration 27 | .lock-wscript 28 | 29 | # Compiled binary addons (http://nodejs.org/api/addons.html) 30 | build/Release 31 | 32 | # Dependency directory 33 | node_modules 34 | 35 | # Optional npm cache directory 36 | .npm 37 | 38 | # Optional REPL history 39 | .node_repl_history 40 | 41 | 42 | ### SublimeText ### 43 | # cache files for sublime text 44 | *.tmlanguage.cache 45 | *.tmPreferences.cache 46 | *.stTheme.cache 47 | 48 | # workspace files are user-specific 49 | *.sublime-workspace 50 | 51 | # project files should be checked into the repository, unless a significant 52 | # proportion of contributors will probably not be using SublimeText 53 | *.sublime-project 54 | 55 | # sftp configuration file 56 | sftp-config.json 57 | 58 | #Documentation 59 | docs 60 | 61 | .nyc_output 62 | coverage 63 | database/ 64 | system-user-*.json 65 | orbitdb 66 | ipfsdata 67 | .ipfsdata 68 | leveldb/ 69 | logs/ 70 | tx-map.json 71 | slp-tx-map.zip 72 | data/ 73 | 74 | !README.md 75 | -------------------------------------------------------------------------------- /test/unit/mocks/adapters/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | Mocks for the Adapter library. 3 | */ 4 | 5 | class IpfsAdapter { 6 | constructor () { 7 | this.ipfs = { 8 | files: { 9 | stat: () => {} 10 | } 11 | } 12 | } 13 | } 14 | 15 | class IpfsCoordAdapter { 16 | constructor () { 17 | this.ipfsCoord = { 18 | useCases: { 19 | peer: { 20 | sendPrivateMessage: () => {} 21 | } 22 | } 23 | } 24 | } 25 | } 26 | 27 | const ipfs = { 28 | ipfsAdapter: new IpfsAdapter(), 29 | ipfsCoordAdapter: new IpfsCoordAdapter() 30 | } 31 | ipfs.ipfs = ipfs.ipfsAdapter.ipfs 32 | 33 | const localdb = { 34 | Users: class Users { 35 | static findById () {} 36 | static find () {} 37 | static findOne () { 38 | return { 39 | validatePassword: localdb.validatePassword 40 | } 41 | } 42 | 43 | async save () { 44 | return {} 45 | } 46 | 47 | generateToken () { 48 | return '123' 49 | } 50 | 51 | toJSON () { 52 | return {} 53 | } 54 | 55 | async remove () { 56 | return true 57 | } 58 | 59 | async validatePassword () { 60 | return true 61 | } 62 | }, 63 | 64 | validatePassword: () => { 65 | return true 66 | } 67 | } 68 | 69 | const slpIndexer = { 70 | query: { 71 | getAddress: () => {}, 72 | getTx: () => {}, 73 | getToken: () => {} 74 | }, 75 | blacklist: { 76 | checkBlacklist: () => {} 77 | } 78 | } 79 | 80 | export default { ipfs, localdb, slpIndexer } 81 | -------------------------------------------------------------------------------- /util/index/getGroupTokens.js: -------------------------------------------------------------------------------- 1 | /* 2 | This script scans the token database for NFT 1 Group tokens 3 | 4 | 0x81 = 129 = Group token 5 | 0x41 = 65 = Child NFT 6 | */ 7 | 8 | const level = require('level') 9 | 10 | const tokenDb = level(`${__dirname.toString()}/../../leveldb/current/tokens`, { 11 | valueEncoding: 'json' 12 | }) 13 | 14 | async function getNFTGroupTokens () { 15 | try { 16 | // const promiseArray = [] 17 | const stream = tokenDb.createReadStream() 18 | const tokens = [] 19 | 20 | function filterTokens (tokenData) { 21 | try { 22 | if (tokenData.type === 129) { 23 | tokens.push(tokenData) 24 | } 25 | } catch (err) { 26 | console.error('Error in filterTokens: ', err) 27 | throw err 28 | } 29 | } 30 | 31 | stream.on('data', async function (data) { 32 | try { 33 | data.value.txLength = data.value.txs.length 34 | delete data.value.txs 35 | 36 | filterTokens(data.value) 37 | } catch (err) { 38 | console.error('Error in "data" read steam: ', err) 39 | } 40 | }) 41 | 42 | stream.on('close', async function () { 43 | try { 44 | console.log(`NFTs: ${JSON.stringify(tokens, null, 2)}`) 45 | } catch (err) { 46 | console.error('Error in "close" read steam: ', err) 47 | } 48 | }) 49 | 50 | stream.on('end', function () { 51 | console.log('Stream ended') 52 | }) 53 | } catch (err) { 54 | console.error(err) 55 | } 56 | } 57 | getNFTGroupTokens() 58 | -------------------------------------------------------------------------------- /test/unit/mocks/utils-mock.js: -------------------------------------------------------------------------------- 1 | /* 2 | Mocked testing data for utils.unit.js 3 | */ 4 | 5 | const balance01 = { 6 | utxos: [ 7 | { 8 | txid: 'be7ebc3143e6f020cfc7a6b225ff0c291b8b22f51b391695bf409676cd501c5e', 9 | vout: 1, 10 | type: 'token', 11 | qty: '9900', 12 | tokenId: 13 | 'a4fb5c2da1aa064e25018a43f9165040071d9e984ba190c222a7f59053af84b2', 14 | address: 'bitcoincash:qp3t5cuncq2czduh27ps3jmz08m37ey3s5le8qca2f' 15 | } 16 | ], 17 | txs: [ 18 | { 19 | txid: 'f3ad7418888fb5344394d511e373b53f99a41bd6ae35176533d7b5b5a6b21452', 20 | height: 717542 21 | }, 22 | { 23 | txid: '06fff9287c909617720ab002f12a05cd2d6f314f2e1e888df8e44bffd848b905', 24 | height: 717546 25 | }, 26 | { 27 | txid: 'be7ebc3143e6f020cfc7a6b225ff0c291b8b22f51b391695bf409676cd501c5e', 28 | height: 717555 29 | } 30 | ], 31 | balances: [ 32 | { 33 | tokenId: 34 | 'a4fb5c2da1aa064e25018a43f9165040071d9e984ba190c222a7f59053af84b2', 35 | qty: '9900' 36 | } 37 | ] 38 | } 39 | 40 | const tokenData01 = { 41 | type: 1, 42 | ticker: 'SLPTEST', 43 | name: 'SLP Test Token', 44 | tokenId: 'a4fb5c2da1aa064e25018a43f9165040071d9e984ba190c222a7f59053af84b2', 45 | documentUri: 'https://FullStack.cash', 46 | documentHash: '', 47 | decimals: 0, 48 | mintBatonIsActive: true, 49 | tokensInCirculationBN: '20000', 50 | tokensInCirculationStr: '20000', 51 | blockCreated: 716085 52 | } 53 | 54 | export default { 55 | balance01, 56 | tokenData01 57 | } 58 | -------------------------------------------------------------------------------- /test/unit/misc/config.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the config directory 3 | */ 4 | 5 | import { assert } from 'chai' 6 | 7 | let currentEnv 8 | 9 | describe('#config', () => { 10 | before(() => { 11 | // Backup the current environment setting. 12 | currentEnv = process.env.SVC_ENV 13 | }) 14 | 15 | after(() => { 16 | // Restore the environment setting before starting these tests. 17 | process.env.SVC_ENV = currentEnv 18 | }) 19 | 20 | it('Should return development environment config by default', async () => { 21 | const importedConfig = await import('../../../config/index.js') 22 | const config = importedConfig.default 23 | // console.log('config: ', config) 24 | 25 | assert.equal(config.env, 'dev') 26 | }) 27 | 28 | it('Should return test environment config', async () => { 29 | // Hack to dynamically import a library multiple times: 30 | // https://github.com/denoland/deno/issues/6946 31 | 32 | process.env.SVC_ENV = 'test' 33 | 34 | const importedConfig2 = await import('../../../config/index.js?foo=bar1') 35 | const config = importedConfig2.default 36 | // console.log('config: ', config) 37 | 38 | assert.equal(config.env, 'test') 39 | }) 40 | 41 | it('Should return test environment config', async () => { 42 | process.env.SVC_ENV = 'prod' 43 | 44 | const importedConfig3 = await import('../../../config/index.js?foo=bar2') 45 | const config = importedConfig3.default 46 | // console.log('config: ', config) 47 | 48 | assert.equal(config.env, 'prod') 49 | }) 50 | }) 51 | -------------------------------------------------------------------------------- /util/index/getNFTTokens.js: -------------------------------------------------------------------------------- 1 | /* 2 | This script scans the token database for NFT 1 Group tokens 3 | 4 | 0x81 = 129 = Group token 5 | 0x41 = 65 = Child NFT 6 | */ 7 | 8 | const level = require('level') 9 | 10 | const tokenDb = level(`${__dirname.toString()}/../../leveldb/current/tokens`, { 11 | valueEncoding: 'json' 12 | }) 13 | 14 | async function getNFTTokens () { 15 | try { 16 | // const promiseArray = [] 17 | const stream = tokenDb.createReadStream() 18 | const tokens = [] 19 | 20 | function filterTokens (tokenData) { 21 | try { 22 | if (tokenData.type === 65) { 23 | tokens.push(tokenData) 24 | } 25 | } catch (err) { 26 | console.error('Error in filterTokens: ', err) 27 | throw err 28 | } 29 | } 30 | 31 | stream.on('data', async function (data) { 32 | try { 33 | data.value.txLength = data.value.txs.length 34 | delete data.value.txs 35 | 36 | // promiseArray.push(filterTokens(data.value)) 37 | filterTokens(data.value) 38 | } catch (err) { 39 | console.error('Error in "data" read steam: ', err) 40 | } 41 | }) 42 | 43 | stream.on('close', async function () { 44 | try { 45 | console.log(`NFTs: ${JSON.stringify(tokens, null, 2)}`) 46 | } catch (err) { 47 | console.error('Error in "close" read steam: ', err) 48 | } 49 | }) 50 | 51 | stream.on('end', function () { 52 | console.log('Stream ended') 53 | }) 54 | } catch (err) { 55 | console.error(err) 56 | } 57 | } 58 | getNFTTokens() 59 | -------------------------------------------------------------------------------- /src/controllers/rest-api/logs/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | REST API library for /logs route. 3 | */ 4 | 5 | // Public npm libraries. 6 | import Router from 'koa-router' 7 | 8 | // Local libraries. 9 | import LogsRESTControllerLib from './controller.js' 10 | 11 | class LogsRouter { 12 | constructor (localConfig = {}) { 13 | // Dependency Injection. 14 | this.adapters = localConfig.adapters 15 | if (!this.adapters) { 16 | throw new Error( 17 | 'Instance of Adapters library required when instantiating Logs REST Controller.' 18 | ) 19 | } 20 | this.useCases = localConfig.useCases 21 | if (!this.useCases) { 22 | throw new Error( 23 | 'Instance of Use Cases library required when instantiating Logs REST Controller.' 24 | ) 25 | } 26 | 27 | const dependencies = { 28 | adapters: this.adapters, 29 | useCases: this.useCases 30 | } 31 | 32 | this.logsRESTController = new LogsRESTControllerLib(dependencies) 33 | 34 | // Instantiate the router and set the base route. 35 | const baseUrl = '/logs' 36 | this.router = new Router({ prefix: baseUrl }) 37 | } 38 | 39 | attach (app) { 40 | if (!app) { 41 | throw new Error( 42 | 'Must pass app object when attaching REST API controllers.' 43 | ) 44 | } 45 | 46 | // Define the routes and attach the controller. 47 | this.router.post('/', this.logsRESTController.getLogs) 48 | 49 | // Attach the Controller routes to the Koa app. 50 | app.use(this.router.routes()) 51 | app.use(this.router.allowedMethods()) 52 | } 53 | } 54 | 55 | export default LogsRouter 56 | -------------------------------------------------------------------------------- /src/adapters/slp-indexer/tx-maps/get-tx-map.js: -------------------------------------------------------------------------------- 1 | /* 2 | This app is used to download the tx-map from the Filecoin blockchain. 3 | */ 4 | 5 | const https = require('https') 6 | const fs = require('fs') 7 | const shell = require('shelljs') 8 | 9 | const url = 10 | 'https://bafybeigmljzmjbknx7bb5vwcmm5oowxuteawpvzgggqzojqatw6jibqita.ipfs.dweb.link/slp-tx-map.zip' 11 | 12 | async function getTxMap () { 13 | try { 14 | const download = function (url, dest, cb) { 15 | return new Promise((resolve, reject) => { 16 | const file = fs.createWriteStream(dest) 17 | https 18 | .get(url, function (response) { 19 | response.pipe(file) 20 | file.on('finish', function () { 21 | file.close(cb) // close() is async, call cb after close completes. 22 | return resolve(true) 23 | }) 24 | }) 25 | .on('error', function (err) { 26 | // Handle errors 27 | fs.unlink(dest) // Delete the file async. (But we don't check the result) 28 | // if (cb) cb(err.message) 29 | return reject(err) 30 | }) 31 | }) 32 | } 33 | 34 | console.log( 35 | 'Downloading tx-map. It\'s a big file (over 100MB), it can take a while...' 36 | ) 37 | 38 | // const dest = 'tx-map.json' 39 | const dest = 'slp-tx-map.zip' 40 | await download(url, dest, function () { 41 | console.log('done') 42 | }) 43 | 44 | // Unzip the tx map. 45 | shell.exec(`unzip ${dest}`) 46 | } catch (err) { 47 | console.error('Error in getTxMap(): ', err) 48 | } 49 | } 50 | getTxMap() 51 | -------------------------------------------------------------------------------- /src/controllers/json-rpc/about/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | This is the JSON RPC router for the users API 3 | 4 | CT 3/5/22: This library can probably be deleted. Handling of the /about endpoint 5 | is now directly controlled by the ipfs-coord library. 6 | */ 7 | 8 | // Public npm libraries 9 | import jsonrpc from 'jsonrpc-lite' 10 | 11 | // Local libraries 12 | import config from '../../../../config/index.js' 13 | 14 | class AboutRPC { 15 | constructor (localConfig) { 16 | // Encapsulate dependencies 17 | this.jsonrpc = jsonrpc 18 | } 19 | 20 | /** 21 | * @api {JSON} /about About IPFS Node 22 | * @apiPermission public 23 | * @apiName About 24 | * @apiGroup JSON About 25 | * 26 | * @apiExample Example usage: 27 | * {"jsonrpc":"2.0","id":"555","method":"about"} 28 | * 29 | * @apiDescription 30 | * This endpoint can be customized so that users can retrieve information about 31 | * your IPFS node and Service Provider application. This is a great place to 32 | * put a website URL, an IPFS hash, an other basic information. 33 | */ 34 | 35 | // This is the top-level router for this library. 36 | // This is a bit different than other router libraries, because there is 37 | // only one response, which is a string about this node. 38 | async aboutRouter (rpcData) { 39 | console.log('debugging: aboutRouter from ipfs-service-provider triggered') 40 | 41 | return { 42 | success: true, 43 | status: 200, 44 | // message: aboutStr, 45 | message: JSON.stringify(config.announceJsonLd), 46 | endpoint: 'about' 47 | } 48 | } 49 | } 50 | 51 | export default AboutRPC 52 | -------------------------------------------------------------------------------- /test/unit/mocks/ctx-mock.js: -------------------------------------------------------------------------------- 1 | // Ripped from https://github.com/koajs/koa/blob/master/test/helpers/context.js 2 | // Solution courtesy of user @fl0w. See: https://github.com/koajs/koa/issues/999#issuecomment-309270599 3 | // Take from this gist: https://gist.github.com/emmanuelnk/f1254eed8f947a81e8d715476d9cc92c 4 | 5 | // if you want more comprehensive Koa Context object to test stuff like Cookies etc 6 | // then use https://www.npmjs.com/package/@shopify/jest-koa-mocks (requires Jest) 7 | 8 | // INSTRUCTIONS: 9 | // Import in test file as below: 10 | // 11 | // const mockContext = require('./mocks/ctx-mock').context 12 | // const ctx = mockContext() 13 | // ... 14 | 15 | import Stream from 'stream'; 16 | 17 | import Koa from 'koa'; 18 | 19 | const context = (req, res, app) => { 20 | const socket = new Stream.Duplex() 21 | 22 | req = Object.assign( 23 | { headers: {}, socket }, 24 | Stream.Readable.prototype, 25 | req || {} 26 | ) 27 | res = Object.assign( 28 | { _headers: {}, socket }, 29 | Stream.Writable.prototype, 30 | res || {} 31 | ) 32 | req.socket.remoteAddress = req.socket.remoteAddress || '127.0.0.1' 33 | app = app || new Koa() 34 | res.getHeader = k => res._headers[k.toLowerCase()] 35 | res.setHeader = (k, v) => (res._headers[k.toLowerCase()] = v) 36 | res.removeHeader = (k, v) => delete res._headers[k.toLowerCase()] 37 | 38 | const retApp = app.createContext(req, res) 39 | 40 | return retApp 41 | } 42 | 43 | const request = (req, res, app) => context(req, res, app).request 44 | 45 | const response = (req, res, app) => context(req, res, app).response 46 | 47 | export { 48 | context, 49 | request, 50 | response 51 | }; 52 | -------------------------------------------------------------------------------- /src/controllers/rest-api/contact/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | REST API library for /contact route. 3 | */ 4 | 5 | // Public npm libraries. 6 | import Router from 'koa-router' 7 | 8 | // Local libraries. 9 | import ContactRESTControllerLib from './controller.js' 10 | 11 | class ContactRouter { 12 | constructor (localConfig = {}) { 13 | // Dependency Injection. 14 | this.adapters = localConfig.adapters 15 | if (!this.adapters) { 16 | throw new Error( 17 | 'Instance of Adapters library required when instantiating Contact REST Controller.' 18 | ) 19 | } 20 | this.useCases = localConfig.useCases 21 | if (!this.useCases) { 22 | throw new Error( 23 | 'Instance of Use Cases library required when instantiating Contact REST Controller.' 24 | ) 25 | } 26 | 27 | const dependencies = { 28 | adapters: this.adapters, 29 | useCases: this.useCases 30 | } 31 | 32 | // Encapsulate dependencies. 33 | this.contactRESTController = new ContactRESTControllerLib(dependencies) 34 | 35 | // Instantiate the router and set the base route. 36 | const baseUrl = '/contact' 37 | this.router = new Router({ prefix: baseUrl }) 38 | } 39 | 40 | attach (app) { 41 | if (!app) { 42 | throw new Error( 43 | 'Must pass app object when attaching REST API controllers.' 44 | ) 45 | } 46 | 47 | // Define the routes and attach the controller. 48 | this.router.post('/email', this.contactRESTController.email) 49 | 50 | // Attach the Controller routes to the Koa app. 51 | app.use(this.router.routes()) 52 | app.use(this.router.allowedMethods()) 53 | } 54 | } 55 | 56 | export default ContactRouter 57 | -------------------------------------------------------------------------------- /test/unit/adapters/slp-indexer/lib/blacklist.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | unit tests for the Cache library 3 | */ 4 | 5 | // Global npm libraries 6 | import { assert } from 'chai' 7 | import sinon from 'sinon' 8 | 9 | import Blacklist from '../../../../../src/adapters/slp-indexer/lib/blacklist.js' 10 | 11 | describe('#blacklist', () => { 12 | let uut, sandbox 13 | 14 | beforeEach(() => { 15 | // Restore the sandbox before each test. 16 | sandbox = sinon.createSandbox() 17 | 18 | uut = new Blacklist() 19 | }) 20 | 21 | afterEach(() => sandbox.restore()) 22 | 23 | describe('#checkBlacklist', () => { 24 | it('should return true if a token ID is on the blacklist', () => { 25 | // Use a token ID that is on the blacklist (FlexUSD) 26 | const tokenId = 'dd21be4532d93661e8ffe16db6535af0fb8ee1344d1fef81a193e2b4cfa9fbc9' 27 | 28 | const result = uut.checkBlacklist(tokenId) 29 | 30 | assert.equal(result, true) 31 | }) 32 | 33 | it('should return false if a token ID is not in the blacklist', () => { 34 | // Use a token ID that is on the blacklist (FlexUSD) 35 | const tokenId = 'dd21be4532d93661e8ffe16db6535af0fb8ee1344d1fef81a193e2b4cfa9faaa' 36 | 37 | const result = uut.checkBlacklist(tokenId) 38 | 39 | assert.equal(result, false) 40 | }) 41 | 42 | it('should catch and throw errors', () => { 43 | try { 44 | // Force an error 45 | uut.blacklist = null 46 | 47 | uut.checkBlacklist() 48 | 49 | assert.fail('Unexpected result') 50 | } catch (err) { 51 | // console.log('err: ', err) 52 | assert.include(err.message, 'Cannot read') 53 | } 54 | }) 55 | }) 56 | }) 57 | -------------------------------------------------------------------------------- /test/unit/use-cases/index.use-case.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the index.js file that aggregates all use-cases. 3 | */ 4 | 5 | // Public npm libraries 6 | import { assert } from 'chai' 7 | 8 | import sinon from 'sinon' 9 | 10 | // Local support libraries 11 | // const testUtils = require('../../utils/test-utils') 12 | 13 | // Unit under test (uut) 14 | import UseCases from '../../../src/use-cases/index.js' 15 | 16 | import adapters from '../mocks/adapters/index.js' 17 | 18 | describe('#use-cases', () => { 19 | let uut 20 | let sandbox 21 | 22 | before(async () => { 23 | // Delete all previous users in the database. 24 | // await testUtils.deleteAllUsers() 25 | }) 26 | 27 | beforeEach(() => { 28 | sandbox = sinon.createSandbox() 29 | 30 | uut = new UseCases({ adapters }) 31 | }) 32 | 33 | afterEach(() => sandbox.restore()) 34 | 35 | describe('#constructor', () => { 36 | it('should throw an error if adapters are not passed in', () => { 37 | try { 38 | uut = new UseCases() 39 | 40 | assert.fail('Unexpected code path') 41 | 42 | // This is here to prevent the linter from complaining. 43 | assert.isOk(uut) 44 | } catch (err) { 45 | assert.include( 46 | err.message, 47 | 'Instance of adapters must be passed in when instantiating Use Cases library.' 48 | ) 49 | } 50 | }) 51 | }) 52 | 53 | describe('#start', () => { 54 | it('should initialize async use cases', async () => { 55 | const result = await uut.start() 56 | 57 | assert.equal(result, true) 58 | }) 59 | 60 | // it('should catch and throw errors', async () => { 61 | // // Force an error 62 | // sandbox.stub() 63 | // }) 64 | }) 65 | }) 66 | -------------------------------------------------------------------------------- /src/controllers/rest-api/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | This index file for the Clean Architecture Controllers loads dependencies, 3 | creates instances, and attaches the controller to REST API endpoints for 4 | Koa. 5 | */ 6 | 7 | // Public npm libraries. 8 | 9 | // Local libraries 10 | import ContactRESTController from './contact/index.js' 11 | import LogsRESTController from './logs/index.js' 12 | import SlpRESTController from './slp/index.js' 13 | 14 | class RESTControllers { 15 | constructor (localConfig = {}) { 16 | // Dependency Injection. 17 | this.adapters = localConfig.adapters 18 | if (!this.adapters) { 19 | throw new Error( 20 | 'Instance of Adapters library required when instantiating REST Controller libraries.' 21 | ) 22 | } 23 | this.useCases = localConfig.useCases 24 | if (!this.useCases) { 25 | throw new Error( 26 | 'Instance of Use Cases library required when instantiating REST Controller libraries.' 27 | ) 28 | } 29 | 30 | // console.log('Controllers localConfig: ', localConfig) 31 | } 32 | 33 | attachRESTControllers (app) { 34 | const dependencies = { 35 | adapters: this.adapters, 36 | useCases: this.useCases 37 | } 38 | 39 | // Attach the REST API Controllers associated with the /contact route 40 | const contactRESTController = new ContactRESTController(dependencies) 41 | contactRESTController.attach(app) 42 | 43 | // Attach the REST API Controllers associated with the /logs route 44 | const logsRESTController = new LogsRESTController(dependencies) 45 | logsRESTController.attach(app) 46 | 47 | const slpRESTController = new SlpRESTController(dependencies) 48 | slpRESTController.attach(app) 49 | 50 | return true 51 | } 52 | } 53 | 54 | export default RESTControllers 55 | -------------------------------------------------------------------------------- /test/integration/adapters/slp-indexer/tx-types/send.integration.js: -------------------------------------------------------------------------------- 1 | /* 2 | Integration tests for the send.js library 3 | */ 4 | 5 | // Public npm libraries 6 | 7 | // Local libraries 8 | const Transaction = require('../../../../../src/adapters/slp-indexer/lib/transaction') 9 | const transaction = new Transaction() 10 | const Send = require('../../../../../src/adapters/slp-indexer/tx-types/send') 11 | const MockLevel = require('../../../../unit/mocks/leveldb-mock') 12 | const Cache = require('../../../../../src/adapters/slp-indexer/lib/cache') 13 | 14 | describe('#send.js', () => { 15 | let uut 16 | 17 | beforeEach(() => { 18 | const addrDb = new MockLevel() 19 | const tokenDb = new MockLevel() 20 | const txDb = new MockLevel() 21 | const utxoDb = new MockLevel() 22 | txDb.get = () => { 23 | throw new Error('not in db') 24 | } 25 | 26 | const cache = new Cache({ txDb }) 27 | 28 | uut = new Send({ cache, addrDb, tokenDb, txDb, utxoDb }) 29 | }) 30 | 31 | describe('#processTx', () => { 32 | it('should processes problematic tx', async () => { 33 | const txid = '8e577799f9366f41880f53fb4dcca12af3a69cae5f4a6c6bf6f8dd7dc43ef564' 34 | 35 | const data = await getData(txid) 36 | 37 | const result = await uut.processTx(data) 38 | console.log('result: ', result) 39 | }) 40 | }) 41 | }) 42 | 43 | // Get the data needed to process a TXID. 44 | async function getData (txid) { 45 | const slpData = await transaction.decodeOpReturn(txid) 46 | console.log(`slpData: ${JSON.stringify(slpData, null, 2)}`) 47 | 48 | const txData = await transaction.get(txid) 49 | console.log(`txData: ${JSON.stringify(txData, null, 2)}`) 50 | 51 | const blockHeight = txData.blockheight 52 | 53 | const data = { slpData, txData, blockHeight } 54 | 55 | return data 56 | } 57 | -------------------------------------------------------------------------------- /production/docker/start-production.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # BEGIN: Optional configuration settings 4 | 5 | # This mnemonic is used to set up persistent public key for e2ee 6 | # Replace this with your own 12-word mnemonic. 7 | # You can get one at https://wallet.fullstack.cash. 8 | #export MNEMONIC="olive two muscle bottom coral ancient wait legend bronze useful process session" 9 | 10 | # The human readable name this IPFS node identifies as. 11 | #export COORD_NAME=ipfs-service-provider-generic 12 | 13 | # Allow this node to function as a circuit relay. It must not be behind a firewall. 14 | #export ENABLE_CIRCUIT_RELAY=true 15 | # For browsers to use your circuit realy, you must set up a domain, SSL certificate, 16 | # and you must forward that subdomain to the IPFS_WS_PORT. 17 | #export CR_DOMAIN=subdomain.yourdomain.com 18 | 19 | # Debug level. 0 = minimal info. 2 = max info. 20 | export DEBUG_LEVEL=1 21 | 22 | # END: Optional configuration settings 23 | 24 | 25 | # Production database connection string. 26 | export DBURL=mongodb://172.17.0.1:5555/psf-slp-indexer-prod 27 | 28 | # Configure REST API port 29 | export PORT=5010 30 | 31 | # Production settings using external go-ipfs node. 32 | export SVC_ENV=production 33 | #export IPFS_HOST=172.17.0.1 34 | #export IPFS_API_PORT=5001 35 | #export IPFS_TCP_PORT=4001 36 | #export IPFS_WS_PORT=5269 37 | 38 | # RPC settings for the full node 39 | export RPC_IP=172.17.0.1 40 | export RPC_PORT=8332 41 | export ZMQ_PORT=28332 42 | export RPC_USER=bitcoin 43 | export RPC_PASS=password 44 | 45 | # Delete backups as it syncs. 46 | export DELETE_BACKUP=1 47 | 48 | export PIN_API_URL=http://172.17.0.1:5031 49 | 50 | # make directories 51 | mkdir leveldb 52 | mkdir leveldb/current 53 | mkdir leveldb/backup 54 | mkdir leveldb/zips 55 | cp restore-auto.sh leveldb/zips/ 56 | 57 | npm start 58 | -------------------------------------------------------------------------------- /test/integration/adapters/slp-indexer/tx-types/genesis.integration.js: -------------------------------------------------------------------------------- 1 | /* 2 | Integration tests for the genesis.js library 3 | */ 4 | 5 | // Public npm libraries 6 | 7 | // Local libraries 8 | const Transaction = require('../../../../../src/adapters/slp-indexer/lib/transaction') 9 | const transaction = new Transaction() 10 | const Genesis = require('../../../../../src/adapters/slp-indexer/tx-types/genesis') 11 | const MockLevel = require('../../../../unit/mocks/leveldb-mock') 12 | // const Cache = require('../../../../../src/adapters/slp-indexer/lib/cache') 13 | 14 | describe('#genesis.js', () => { 15 | let uut 16 | 17 | beforeEach(() => { 18 | const addrDb = new MockLevel() 19 | const tokenDb = new MockLevel() 20 | const txDb = new MockLevel() 21 | const utxoDb = new MockLevel() 22 | txDb.get = () => { 23 | throw new Error('not in db') 24 | } 25 | 26 | // const cache = new Cache({ txDb }) 27 | 28 | uut = new Genesis({ addrDb, tokenDb, utxoDb }) 29 | }) 30 | 31 | describe('#addBatonAddress', () => { 32 | it('should processes problematic tx', async () => { 33 | const txid = '805b85ae1a7e1c1a770429a1158a8364cc8f6f1421115bcd0557cca9437d2769' 34 | 35 | const data = await getData(txid) 36 | 37 | const result = await uut.addBatonAddress(data) 38 | console.log('result: ', result) 39 | }) 40 | }) 41 | }) 42 | 43 | // Get the data needed to process a TXID. 44 | async function getData (txid) { 45 | const slpData = await transaction.decodeOpReturn(txid) 46 | console.log(`slpData: ${JSON.stringify(slpData, null, 2)}`) 47 | 48 | const txData = await transaction.get(txid) 49 | console.log(`txData: ${JSON.stringify(txData, null, 2)}`) 50 | 51 | const blockHeight = txData.blockheight 52 | 53 | const data = { slpData, txData, blockHeight } 54 | 55 | return data 56 | } 57 | -------------------------------------------------------------------------------- /test/unit/misc/server-unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the bin/server.js file 3 | */ 4 | 5 | // Public npm libraries 6 | import { assert } from 'chai' 7 | import sinon from 'sinon' 8 | 9 | // Local libraries 10 | import Server from '../../../bin/server.js' 11 | 12 | describe('#server', () => { 13 | let uut, sandbox 14 | 15 | beforeEach(() => { 16 | sandbox = sinon.createSandbox() 17 | 18 | uut = new Server() 19 | }) 20 | 21 | afterEach(() => sandbox.restore()) 22 | 23 | describe('#startServer', () => { 24 | it('should start the server', async () => { 25 | // Mock dependencies 26 | sandbox.stub(uut.controllers, 'initAdapters').resolves() 27 | sandbox.stub(uut.controllers, 'initUseCases').resolves() 28 | sandbox.stub(uut.controllers, 'attachRESTControllers').resolves() 29 | sandbox.stub(uut.controllers, 'attachControllers').resolves() 30 | sandbox.stub(uut.controllers.adapters.slpIndexer, 'start').resolves() 31 | uut.config.env = 'dev' 32 | 33 | const result = await uut.startServer() 34 | // console.log('result: ', result) 35 | 36 | assert.property(result, 'env') 37 | 38 | // Turn off the server. 39 | uut.server.close() 40 | 41 | // Restor config env 42 | uut.config.env = 'test' 43 | }) 44 | 45 | it('should exit on failure', async () => { 46 | // Force an error 47 | sandbox.stub(uut.controllers, 'initAdapters').rejects(new Error('test error')) 48 | 49 | // Prevent default behavior of exiting the program. 50 | sandbox.stub(uut, 'sleep').resolves() 51 | sandbox.stub(uut.process, 'exit').returns() 52 | 53 | await uut.startServer() 54 | 55 | // Not throwing an error is a success 56 | }) 57 | }) 58 | 59 | describe('#sleep', () => { 60 | it('should execute', async () => { 61 | await uut.sleep(1) 62 | }) 63 | }) 64 | }) 65 | -------------------------------------------------------------------------------- /test/unit/adapters/adapters-index-unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the adapters index.js library 3 | */ 4 | 5 | // Global npm libraries 6 | import { assert } from 'chai' 7 | import sinon from 'sinon' 8 | 9 | // Local libraries 10 | import Adapters from '../../../src/adapters/index.js' 11 | 12 | describe('#adapters', () => { 13 | let uut, sandbox 14 | 15 | beforeEach(() => { 16 | uut = new Adapters() 17 | 18 | sandbox = sinon.createSandbox() 19 | }) 20 | 21 | afterEach(() => { 22 | sandbox.restore() 23 | }) 24 | 25 | describe('#start', () => { 26 | it('should start the async adapters', async () => { 27 | // Mock dependencies 28 | uut.config.getJwtAtStartup = true 29 | uut.config.useIpfs = true 30 | uut.config.env = 'not-a-test' 31 | sandbox.stub(uut.fullStackJwt, 'instanceBchjs').resolves() 32 | sandbox.stub(uut, 'initIndexer').returns() 33 | 34 | const result = await uut.start() 35 | 36 | assert.equal(result, true) 37 | }) 38 | 39 | it('should catch and throw an error', async () => { 40 | try { 41 | // Force an error 42 | uut.config.getJwtAtStartup = false 43 | uut.config.env = 'dev' 44 | sandbox.stub(uut, 'initIndexer').throws(new Error('test error')) 45 | 46 | await uut.start() 47 | 48 | assert.fail('Unexpected result') 49 | } catch (err) { 50 | // console.log('err: ', err) 51 | assert.include(err.message, 'test error') 52 | } 53 | }) 54 | }) 55 | 56 | describe('#initIndexer', () => { 57 | it('should initialize the indexer', () => { 58 | // Mock dependencies 59 | sandbox.stub(uut.slpIndexer, 'openDatabases').returns() 60 | sandbox.stub(uut.slpIndexer, 'encapsulateDeps').returns() 61 | 62 | const result = uut.initIndexer() 63 | 64 | assert.equal(result, true) 65 | }) 66 | }) 67 | }) 68 | -------------------------------------------------------------------------------- /src/adapters/contact.js: -------------------------------------------------------------------------------- 1 | /* 2 | Business logic for the /contact endpoint. 3 | */ 4 | 5 | /* eslint-disable no-useless-escape */ 6 | import config from '../../config/index.js' 7 | import NodeMailer from '../adapters/nodemailer.js' 8 | import wlogger from '../adapters/wlogger.js' 9 | 10 | process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0' 11 | const nodemailer = new NodeMailer() 12 | 13 | let _this 14 | 15 | class ContactLib { 16 | constructor () { 17 | _this = this 18 | _this.config = config 19 | _this.nodemailer = nodemailer 20 | } 21 | 22 | async sendEmail (emailObj) { 23 | try { 24 | // Validate input 25 | if (!emailObj.email || typeof emailObj.email !== 'string') { 26 | throw new Error("Property 'email' must be a string!") 27 | } 28 | 29 | if (!emailObj.formMessage || typeof emailObj.formMessage !== 'string') { 30 | throw new Error("Property 'formMessage' must be a string!") 31 | } 32 | 33 | // If an email list exists, the email will be sended to that list 34 | // otherwhise will be sended by default to the variable "_this.config.emailUser" 35 | let _to = [_this.config.emailUser] 36 | 37 | // Email list is optional 38 | if (emailObj.emailList) { 39 | if ( 40 | !Array.isArray(emailObj.emailList) || 41 | !emailObj.emailList.length > 0 42 | ) { 43 | throw new Error("Property 'emailList' must be a array of emails!") 44 | } else { 45 | _to = emailObj.emailList 46 | } 47 | } 48 | 49 | console.log(`Trying send message to : ${_to}`) 50 | 51 | emailObj.subject = 'Someone wants contact with you.' 52 | emailObj.to = _to 53 | 54 | const result = await _this.nodemailer.sendEmail(emailObj) 55 | return result 56 | } catch (err) { 57 | wlogger.error('Error in lib/contact.js/sendEmail()') 58 | throw err 59 | } 60 | } 61 | } 62 | export default ContactLib 63 | -------------------------------------------------------------------------------- /src/adapters/slp-indexer/lib/query.js: -------------------------------------------------------------------------------- 1 | /* 2 | A library for querying the LevelDB entries. 3 | */ 4 | 5 | class Query { 6 | constructor (localConfig = {}) { 7 | const { addrDb, tokenDb, txDb, statusDb, pTxDb } = localConfig 8 | this.addrDb = addrDb 9 | this.tokenDb = tokenDb 10 | this.txDb = txDb 11 | this.statusDb = statusDb 12 | this.pTxDb = pTxDb 13 | 14 | if (!this.addrDb) throw new Error('addrDb missing when instantiating Query library') 15 | if (!this.tokenDb) throw new Error('tokenDb missing when instantiating Query library') 16 | if (!this.txDb) throw new Error('txDb missing when instantiating Query library') 17 | if (!this.statusDb) throw new Error('statusDb missing when instantiating Query library') 18 | if (!this.pTxDb) throw new Error('pTxDb missing when instantiating Query library') 19 | } 20 | 21 | // Query the state of an address from the database. 22 | async getAddress (addr) { 23 | try { 24 | if (!addr) throw new Error('Address required when calling getAddress()') 25 | 26 | const result = await this.addrDb.get(addr) 27 | 28 | return result 29 | } catch (err) { 30 | console.log('Error in getAddress()') 31 | throw err 32 | } 33 | } 34 | 35 | async getTx (txid) { 36 | try { 37 | if (!txid) throw new Error('txid required when calling getTx()') 38 | 39 | const result = await this.txDb.get(txid) 40 | 41 | return result 42 | } catch (err) { 43 | console.log('Error in getTx(): ', err) 44 | throw err 45 | } 46 | } 47 | 48 | async getToken (tokenId) { 49 | try { 50 | if (!tokenId) throw new Error('tokenId required when calling getToken()') 51 | 52 | const result = await this.tokenDb.get(tokenId) 53 | 54 | return result 55 | } catch (err) { 56 | console.log('Error in getToken()') 57 | throw err 58 | } 59 | } 60 | } 61 | 62 | // module.exports = Query 63 | export default Query 64 | -------------------------------------------------------------------------------- /test/unit/controllers/controllers.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for controllers index.js file. 3 | */ 4 | 5 | // Public npm libraries 6 | import { assert } from 'chai' 7 | import sinon from 'sinon' 8 | 9 | import Controllers from '../../../src/controllers/index.js' 10 | 11 | describe('#Controllers', () => { 12 | let uut 13 | let sandbox 14 | 15 | beforeEach(() => { 16 | sandbox = sinon.createSandbox() 17 | 18 | uut = new Controllers() 19 | }) 20 | 21 | afterEach(() => sandbox.restore()) 22 | 23 | describe('#attachControllers', () => { 24 | it('should attach the controllers', async () => { 25 | // mock IPFS 26 | sandbox.stub(uut.adapters, 'start').resolves({}) 27 | uut.adapters.ipfs.ipfsCoordAdapter = { 28 | attachRPCRouter: () => {} 29 | } 30 | 31 | // Mock the timer controllers 32 | sandbox.stub(uut.timerControllers, 'startTimers').returns() 33 | 34 | const app = { 35 | use: () => {} 36 | } 37 | 38 | await uut.attachControllers(app) 39 | }) 40 | }) 41 | 42 | describe('#initAdapters', () => { 43 | it('should initialize adapters', async () => { 44 | // Mock dependencies 45 | sandbox.stub(uut.adapters, 'start').resolves() 46 | 47 | const result = await uut.initAdapters() 48 | 49 | assert.equal(result, true) 50 | }) 51 | }) 52 | 53 | describe('#initUseCases', () => { 54 | it('should initialize use cases', async () => { 55 | // Mock dependencies 56 | sandbox.stub(uut.useCases, 'start').resolves() 57 | 58 | const result = await uut.initUseCases() 59 | 60 | assert.equal(result, true) 61 | }) 62 | }) 63 | 64 | describe('#attachRESTControllers', () => { 65 | it('should attach REST controllers', () => { 66 | const app = { 67 | use: () => {} 68 | } 69 | 70 | const result = uut.attachRESTControllers(app) 71 | 72 | assert.equal(result, true) 73 | }) 74 | }) 75 | }) 76 | -------------------------------------------------------------------------------- /src/adapters/ipfs/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | top-level IPFS library that combines the individual IPFS-based libraries. 3 | */ 4 | 5 | // Local libraries 6 | import IpfsAdapter from './ipfs.js' 7 | 8 | import IpfsCoordAdapter from './ipfs-coord.js' 9 | import config from '../../../config/index.js' 10 | 11 | class IPFS { 12 | constructor (localConfig = {}) { 13 | // Encapsulate dependencies 14 | this.ipfsAdapter = new IpfsAdapter() 15 | this.IpfsCoordAdapter = IpfsCoordAdapter 16 | this.process = process 17 | this.config = config 18 | 19 | this.ipfsCoordAdapter = {} // placeholder 20 | 21 | // Properties of this class instance. 22 | this.isReady = false 23 | } 24 | 25 | // Provides a global start() function that triggers the start() function in 26 | // the underlying libraries. 27 | async start () { 28 | try { 29 | // Start IPFS 30 | await this.ipfsAdapter.start() 31 | console.log('IPFS is ready.') 32 | 33 | // this.ipfs is a Promise that will resolve into an instance of an IPFS node. 34 | this.ipfs = this.ipfsAdapter.ipfs 35 | 36 | // Start ipfs-coord 37 | this.ipfsCoordAdapter = new this.IpfsCoordAdapter({ 38 | ipfs: this.ipfs, 39 | tcpPort: this.config.ipfsTcpPort, 40 | wsPort: this.config.ipfsWsPort 41 | }) 42 | await this.ipfsCoordAdapter.start() 43 | console.log('ipfs-coord is ready.') 44 | 45 | // Subscribe to the chat pubsub channel 46 | await this.ipfsCoordAdapter.subscribeToChat() 47 | 48 | return true 49 | } catch (err) { 50 | console.error('Error in adapters/ipfs/index.js/start()') 51 | 52 | // If error is due to a lock file issue. Kill the process, so that 53 | // Docker or pm2 has a chance to restart the service. 54 | if (err.message.includes('Lock already being held')) { 55 | this.process.exit(1) 56 | } 57 | 58 | throw err 59 | } 60 | } 61 | } 62 | 63 | export default IPFS 64 | -------------------------------------------------------------------------------- /src/controllers/rest-api/slp/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | REST API library for /slp route. 3 | */ 4 | 5 | // Public npm libraries. 6 | import Router from 'koa-router' 7 | 8 | // Local libraries. 9 | import SlpRESTControllerLib from './controller.js' 10 | 11 | // let _this 12 | 13 | class SlpRouter { 14 | constructor (localConfig = {}) { 15 | // Dependency Injection. 16 | this.adapters = localConfig.adapters 17 | if (!this.adapters) { 18 | throw new Error( 19 | 'Instance of Adapters library required when instantiating SLP REST Controller.' 20 | ) 21 | } 22 | this.useCases = localConfig.useCases 23 | if (!this.useCases) { 24 | throw new Error( 25 | 'Instance of Use Cases library required when instantiating SLP REST Controller.' 26 | ) 27 | } 28 | 29 | const dependencies = { 30 | adapters: this.adapters, 31 | useCases: this.useCases 32 | } 33 | 34 | // Encapsulate dependencies. 35 | this.slpRESTController = new SlpRESTControllerLib(dependencies) 36 | // this.validators = new Validators() 37 | 38 | // Instantiate the router and set the base route. 39 | const baseUrl = '/slp' 40 | this.router = new Router({ prefix: baseUrl }) 41 | 42 | // _this = this 43 | } 44 | 45 | attach (app) { 46 | if (!app) { 47 | throw new Error( 48 | 'Must pass app object when attaching REST API controllers.' 49 | ) 50 | } 51 | 52 | // Define the routes and attach the controller. 53 | this.router.get('/status', this.slpRESTController.status) 54 | this.router.post('/address', this.slpRESTController.address) 55 | this.router.post('/tx', this.slpRESTController.tx) 56 | this.router.post('/token', this.slpRESTController.token) 57 | 58 | // Attach the Controller routes to the Koa app. 59 | app.use(this.router.routes()) 60 | app.use(this.router.allowedMethods()) 61 | } 62 | } 63 | 64 | // module.exports = SlpRouter 65 | export default SlpRouter 66 | -------------------------------------------------------------------------------- /src/controllers/timer-controllers.js: -------------------------------------------------------------------------------- 1 | /* 2 | This Controller library is concerned with timer-based functions that are 3 | kicked off periodicially. 4 | */ 5 | 6 | import config from '../../config/index.js' 7 | 8 | class TimerControllers { 9 | constructor (localConfig = {}) { 10 | // Dependency Injection. 11 | this.adapters = localConfig.adapters 12 | if (!this.adapters) { 13 | throw new Error( 14 | 'Instance of Adapters library required when instantiating Timer Controller libraries.' 15 | ) 16 | } 17 | this.useCases = localConfig.useCases 18 | if (!this.useCases) { 19 | throw new Error( 20 | 'Instance of Use Cases library required when instantiating Timer Controller libraries.' 21 | ) 22 | } 23 | 24 | this.debugLevel = localConfig.debugLevel 25 | 26 | // Encapsulate dependencies 27 | this.config = config 28 | 29 | // Bind 'this' object to all subfunctions. 30 | this.exampleTimerFunc = this.exampleTimerFunc.bind(this) 31 | 32 | // this.startTimers() 33 | } 34 | 35 | // Start all the time-based controllers. 36 | startTimers () { 37 | // Any new timer control functions can be added here. They will be started 38 | // when the server starts. 39 | this.optimizeWalletHandle = setInterval(this.exampleTimerFunc, 60000 * 10) 40 | 41 | return true 42 | } 43 | 44 | stopTimers () { 45 | clearInterval(this.optimizeWalletHandle) 46 | } 47 | 48 | // Replace this example function with your own timer handler. 49 | exampleTimerFunc (negativeTest) { 50 | try { 51 | console.log('Example timer controller executed.') 52 | 53 | if (negativeTest) throw new Error('test error') 54 | 55 | return true 56 | } catch (err) { 57 | console.error('Error in exampleTimerFunc(): ', err) 58 | 59 | // Note: Do not throw an error. This is a top-level function. 60 | return false 61 | } 62 | } 63 | } 64 | 65 | export default TimerControllers 66 | -------------------------------------------------------------------------------- /test/unit/adapters/ipfs-index.adapter.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the index.js file for the IPFS and ipfs-coord libraries. 3 | */ 4 | 5 | import { assert } from 'chai' 6 | 7 | import sinon from 'sinon' 8 | import IPFSLib from '../../../src/adapters/ipfs/index.js' 9 | // import create from '../mocks/ipfs-mock.js' 10 | import IPFSCoordMock from '../mocks/ipfs-coord-mock.js' 11 | 12 | describe('#IPFS-adapter-index', () => { 13 | let uut 14 | let sandbox 15 | 16 | beforeEach(() => { 17 | uut = new IPFSLib() 18 | 19 | sandbox = sinon.createSandbox() 20 | }) 21 | 22 | afterEach(() => sandbox.restore()) 23 | 24 | describe('#start', () => { 25 | it('should return a promise that resolves into an instance of IPFS.', async () => { 26 | // Mock dependencies. 27 | uut.ipfsAdapter = { 28 | start: async () => {} 29 | } 30 | uut.IpfsCoordAdapter = IPFSCoordMock 31 | 32 | const result = await uut.start() 33 | 34 | assert.equal(result, true) 35 | }) 36 | 37 | it('should catch and throw an error', async () => { 38 | try { 39 | // Force an error 40 | sandbox.stub(uut.ipfsAdapter, 'start').rejects(new Error('test error')) 41 | 42 | await uut.start() 43 | 44 | assert.fail('Unexpected code path.') 45 | } catch (err) { 46 | // console.log(err) 47 | assert.include(err.message, 'test error') 48 | } 49 | }) 50 | 51 | it('should handle lock-file errors', async () => { 52 | try { 53 | // Force an error 54 | sandbox 55 | .stub(uut.ipfsAdapter, 'start') 56 | .rejects(new Error('Lock already being held')) 57 | 58 | // Prevent process from exiting 59 | sandbox.stub(uut.process, 'exit').returns() 60 | 61 | await uut.start() 62 | 63 | assert.fail('Unexpected code path.') 64 | } catch (err) { 65 | assert.include(err.message, 'Lock already being held') 66 | } 67 | }) 68 | }) 69 | }) 70 | -------------------------------------------------------------------------------- /test/unit/controllers/rest-api/logs/logs.rest.controller.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the REST API handler for the /users endpoints. 3 | */ 4 | 5 | // Public npm libraries 6 | import { assert } from 'chai' 7 | 8 | import sinon from 'sinon' 9 | import LogsApiController from '../../../../../src/controllers/rest-api/logs/controller.js' 10 | 11 | import { context as mockContext } from '../../../../unit/mocks/ctx-mock.js' 12 | let uut 13 | let sandbox 14 | let ctx 15 | 16 | describe('Logapi', () => { 17 | before(async () => { 18 | }) 19 | 20 | beforeEach(() => { 21 | uut = new LogsApiController() 22 | 23 | sandbox = sinon.createSandbox() 24 | 25 | // Mock the context object. 26 | ctx = mockContext() 27 | }) 28 | 29 | afterEach(() => sandbox.restore()) 30 | 31 | describe('#POST /logapi', () => { 32 | it('should return 422 status on biz logic error', async () => { 33 | try { 34 | await uut.getLogs(ctx) 35 | 36 | assert.fail('Unexpected result') 37 | } catch (err) { 38 | assert.equal(err.status, 422) 39 | assert.include(err.message, 'Cannot read') 40 | } 41 | }) 42 | 43 | it('should return 500 status on biz logic Unhandled error', async () => { 44 | try { 45 | // eslint-disable 46 | sandbox 47 | .stub(uut.logsApiLib, 'getLogs') 48 | .returns(Promise.reject(new Error())) 49 | 50 | ctx.request.body = { 51 | password: 'test' 52 | } 53 | 54 | await uut.getLogs(ctx) 55 | 56 | assert.fail('Unexpected result') 57 | } catch (err) { 58 | assert.equal(err.status, 500) 59 | assert.include(err.message, 'Unhandled error') 60 | } 61 | }) 62 | 63 | it('should return 200 status on success', async () => { 64 | // Mock dependencies 65 | sandbox.stub(uut.logsApiLib, 'getLogs').resolves({}) 66 | 67 | ctx.request.body = { 68 | password: 'test' 69 | } 70 | 71 | await uut.getLogs(ctx) 72 | 73 | assert.isOk(ctx.body) 74 | }) 75 | }) 76 | }) 77 | -------------------------------------------------------------------------------- /src/controllers/rest-api/logs/controller.js: -------------------------------------------------------------------------------- 1 | import LogsApiLib from '../../../adapters/logapi.js' 2 | const logsApiLib = new LogsApiLib() 3 | let _this 4 | 5 | class LogsApi { 6 | constructor () { 7 | _this = this 8 | _this.logsApiLib = logsApiLib 9 | } 10 | 11 | /** 12 | * @api {post} /logapi Parse and return the log files. 13 | * @apiPermission public 14 | * @apiName LogApi 15 | * @apiGroup Logs 16 | * 17 | * @apiExample Example usage: 18 | * curl -H "Content-Type: application/json" -X POST -d '{ "password": "secretpasas" }' localhost:5000/logapi 19 | * 20 | * @apiParam {String} password Password (required) 21 | * 22 | * @apiSuccess {Array} users User object 23 | * @apiSuccess {ObjectId} users._id User id 24 | * @apiSuccess {String} user.type User type (admin or user) 25 | * @apiSuccess {String} users.name User name 26 | * @apiSuccess {String} users.username User username 27 | * 28 | * @apiSuccessExample {json} Success-Response: 29 | * HTTP/1.1 200 OK 30 | * { 31 | * "user": { 32 | * "_id": "56bd1da600a526986cf65c80" 33 | * "name": "John Doe" 34 | * "username": "johndoe" 35 | * } 36 | * } 37 | * 38 | * @apiError UnprocessableEntity Missing required parameters 39 | * 40 | * @apiErrorExample {json} Error-Response: 41 | * HTTP/1.1 422 Unprocessable Entity 42 | * { 43 | * "status": 422, 44 | * "error": "Unprocessable Entity" 45 | * } 46 | */ 47 | async getLogs (ctx) { 48 | try { 49 | // console.log('entering getLogs()') 50 | 51 | // Get the user-provided password. 52 | const password = ctx.request.body.password 53 | const result = await _this.logsApiLib.getLogs(password) 54 | ctx.body = result 55 | } catch (err) { 56 | if (err && err.message) { 57 | ctx.throw(422, err.message) 58 | } else { 59 | ctx.throw(500, 'Unhandled error') 60 | } 61 | } 62 | } 63 | } 64 | 65 | export default LogsApi 66 | -------------------------------------------------------------------------------- /test/unit/controllers/rest-api/rest.controller.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the REST API controllers/rest-api/index.js library. 3 | */ 4 | 5 | // Public npm libraries 6 | import { assert } from 'chai' 7 | import sinon from 'sinon' 8 | 9 | // Local libraries 10 | import RESTControllers from '../../../../src/controllers/rest-api/index.js' 11 | import adapters from '../../mocks/adapters/index.js' 12 | import UseCasesMock from '../../mocks/use-cases/index.js' 13 | 14 | describe('#RESTControllers', () => { 15 | let uut 16 | let sandbox 17 | // let ctx 18 | 19 | before(async () => {}) 20 | 21 | beforeEach(() => { 22 | const useCases = new UseCasesMock() 23 | uut = new RESTControllers({ adapters, useCases }) 24 | 25 | sandbox = sinon.createSandbox() 26 | 27 | // Mock the context object. 28 | // ctx = mockContext() 29 | }) 30 | 31 | afterEach(() => sandbox.restore()) 32 | 33 | describe('#constructor', () => { 34 | it('should throw an error if adapters are not passed in', () => { 35 | try { 36 | uut = new RESTControllers() 37 | 38 | assert.fail('Unexpected code path') 39 | } catch (err) { 40 | assert.include( 41 | err.message, 42 | 'Instance of Adapters library required when instantiating REST Controller libraries.' 43 | ) 44 | } 45 | }) 46 | 47 | it('should throw an error if useCases are not passed in', () => { 48 | try { 49 | uut = new RESTControllers({ adapters }) 50 | 51 | assert.fail('Unexpected code path') 52 | 53 | // use to prevent complaints from linter. 54 | console.log('uut: ', uut) 55 | } catch (err) { 56 | assert.include( 57 | err.message, 58 | 'Instance of Use Cases library required when instantiating REST Controller libraries.' 59 | ) 60 | } 61 | }) 62 | }) 63 | 64 | describe('#attachRESTControllers', () => { 65 | it('should attach controllers to the app', () => { 66 | const app = { 67 | use: () => {} 68 | } 69 | 70 | const result = uut.attachRESTControllers(app) 71 | 72 | assert.equal(result, true) 73 | }) 74 | }) 75 | }) 76 | -------------------------------------------------------------------------------- /src/adapters/json-files.js: -------------------------------------------------------------------------------- 1 | /* 2 | A utility file for reading and writing JSON files. 3 | */ 4 | 5 | import fs from 'fs' 6 | 7 | let _this 8 | 9 | class JsonFiles { 10 | constructor () { 11 | this.fs = fs 12 | 13 | _this = this 14 | } 15 | 16 | // Writes out a JSON file of any object passed to the function. 17 | // This is used for testing. 18 | writeJSON (obj, fileName) { 19 | return new Promise(function (resolve, reject) { 20 | try { 21 | if (!obj) { 22 | throw new Error('obj property is required') 23 | } 24 | if (!fileName || typeof fileName !== 'string') { 25 | throw new Error('fileName property must be a string') 26 | } 27 | const fileStr = JSON.stringify(obj, null, 2) 28 | 29 | _this.fs.writeFile(fileName, fileStr, function (err) { 30 | if (err) { 31 | console.error('Error while trying to write file: ') 32 | throw err 33 | } else { 34 | // console.log(`${fileName} written successfully!`) 35 | return resolve() 36 | } 37 | }) 38 | } catch (err) { 39 | console.error('Error trying to write out object in util.js/_writeJSON().') 40 | return reject(err) 41 | } 42 | }) 43 | } 44 | 45 | readJSON (fileName) { 46 | return new Promise(function (resolve, reject) { 47 | try { 48 | if (!fileName || typeof fileName !== 'string') { 49 | throw new Error('fileName property must be a string') 50 | } 51 | 52 | _this.fs.readFile(fileName, (err, data) => { 53 | if (err) { 54 | if (err.code === 'ENOENT') { 55 | console.log('Admin .json file not found!') 56 | } else { 57 | console.log(`err: ${JSON.stringify(err, null, 2)}`) 58 | } 59 | 60 | // throw err 61 | return reject(err) 62 | } 63 | 64 | const obj = JSON.parse(data) 65 | 66 | return resolve(obj) 67 | }) 68 | } catch (err) { 69 | console.error('Error trying to read JSON file in util.js/_readJSON().') 70 | return reject(err) 71 | } 72 | }) 73 | } 74 | } 75 | 76 | export default JsonFiles 77 | -------------------------------------------------------------------------------- /src/controllers/rest-api/contact/controller.js: -------------------------------------------------------------------------------- 1 | /* 2 | Controller for the /contact REST API endpoints. 3 | */ 4 | 5 | /* eslint-disable no-useless-escape */ 6 | import ContactLib from '../../../adapters/contact.js' 7 | 8 | process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0' 9 | const contactLib = new ContactLib() 10 | 11 | let _this 12 | 13 | class ContactController { 14 | constructor () { 15 | _this = this 16 | _this.contactLib = contactLib 17 | } 18 | 19 | /** 20 | * @api {post} /contact/email Send Email 21 | * @apiName SendMail 22 | * @apiGroup Contact 23 | * 24 | * @apiExample Example usage: 25 | * curl -H "Content-Type: application/json" -X POST -d '{ "obj": { "email": "email@format.com", "formMessage": "a message" } }' localhost:5001/contact/email 26 | * 27 | * @apiParam {Object} obj object (required) 28 | * @apiParam {String} obj.email Sender Email. 29 | * @apiParam {String} obj.formMessage Message. 30 | * 31 | * @apiSuccessExample {json} Success-Response: 32 | * HTTP/1.1 200 OK 33 | * { 34 | * 35 | * success:true 36 | * 37 | * } 38 | * 39 | * @apiError UnprocessableEntity Missing required parameters 40 | * 41 | * @apiErrorExample {json} Error-Response: 42 | * HTTP/1.1 422 Unprocessable Entity 43 | * { 44 | * "status": 422, 45 | * "error": "Unprocessable Entity" 46 | * } 47 | */ 48 | async email (ctx) { 49 | try { 50 | const data = ctx.request.body 51 | const emailObj = data.obj 52 | await _this.contactLib.sendEmail(emailObj) 53 | 54 | ctx.body = { 55 | success: true 56 | } 57 | } catch (err) { 58 | _this.handleError(ctx, err) 59 | } 60 | } 61 | 62 | // DRY error handler 63 | handleError (ctx, err) { 64 | // If an HTTP status is specified by the buisiness logic, use that. 65 | if (err.status) { 66 | if (err.message) { 67 | ctx.throw(err.status, err.message) 68 | } else { 69 | ctx.throw(err.status) 70 | } 71 | } else { 72 | // By default use a 422 error if the HTTP status is not specified. 73 | ctx.throw(422, err.message) 74 | } 75 | } 76 | } 77 | export default ContactController 78 | -------------------------------------------------------------------------------- /src/adapters/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | This is a top-level library that encapsulates all the additional Adapters. 3 | The concept of Adapters comes from Clean Architecture: 4 | https://troutsblog.com/blog/clean-architecture 5 | */ 6 | 7 | // Public NPM libraries 8 | import BCHJS from '@psf/bch-js' 9 | 10 | // Load individual adapter libraries. 11 | import IPFSAdapter from './ipfs/index.js' 12 | 13 | // import LocalDB from './localdb/index.js' 14 | import LogsAPI from './logapi.js' 15 | import Nodemailer from './nodemailer.js' 16 | 17 | // const { wlogger } = require('./wlogger') 18 | import JSONFiles from './json-files.js' 19 | 20 | import FullStackJWT from './fullstack-jwt.js' 21 | import SlpIndexer from './slp-indexer/index.js' 22 | import config from '../../config/index.js' 23 | 24 | class Adapters { 25 | constructor (localConfig = {}) { 26 | // Encapsulate dependencies 27 | this.ipfs = new IPFSAdapter() 28 | this.logapi = new LogsAPI() 29 | this.nodemailer = new Nodemailer() 30 | this.jsonFiles = new JSONFiles() 31 | this.bchjs = new BCHJS() 32 | this.config = config 33 | this.slpIndexer = new SlpIndexer() 34 | 35 | // Get a valid JWT API key and instance bch-js. 36 | this.fullStackJwt = new FullStackJWT(config) 37 | } 38 | 39 | async start () { 40 | try { 41 | if (this.config.getJwtAtStartup) { 42 | // Get a JWT token and instantiate bch-js with it. Then pass that instance 43 | // to all the rest of the apps controllers and adapters. 44 | // await this.fullStackJwt.getJWT() 45 | // Instantiate bch-js with the JWT token, and overwrite the placeholder for bch-js. 46 | this.bchjs = await this.fullStackJwt.instanceBchjs() 47 | } 48 | 49 | // Start the IPFS node. 50 | // await this.ipfs.start() 51 | 52 | // Start the indexer. 53 | this.initIndexer() 54 | 55 | return true 56 | } catch (err) { 57 | console.error('Error in adapters/index.js/start()') 58 | throw err 59 | } 60 | } 61 | 62 | initIndexer () { 63 | console.log('Instantiating SlpIndexer() in adapters/index.js') 64 | 65 | const dbs = this.slpIndexer.openDatabases() 66 | this.slpIndexer.encapsulateDeps(dbs) 67 | 68 | return true 69 | } 70 | } 71 | 72 | export default Adapters 73 | -------------------------------------------------------------------------------- /test/unit/controllers/timer-controllers.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the timer-controller.js Controller library 3 | */ 4 | 5 | // Public npm libraries 6 | import { assert } from 'chai' 7 | import sinon from 'sinon' 8 | 9 | // Local libraries 10 | import TimerControllers from '../../../src/controllers/timer-controllers.js' 11 | import adapters from '../mocks/adapters/index.js' 12 | import UseCasesMock from '../mocks/use-cases/index.js' 13 | 14 | describe('#Timer-Controllers', () => { 15 | let uut 16 | let sandbox 17 | 18 | beforeEach(() => { 19 | sandbox = sinon.createSandbox() 20 | 21 | const useCases = new UseCasesMock() 22 | uut = new TimerControllers({ adapters, useCases }) 23 | }) 24 | 25 | afterEach(() => { 26 | sandbox.restore() 27 | 28 | uut.stopTimers() 29 | }) 30 | 31 | describe('#constructor', () => { 32 | it('should throw an error if adapters are not passed in', () => { 33 | try { 34 | uut = new TimerControllers() 35 | 36 | assert.fail('Unexpected code path') 37 | } catch (err) { 38 | assert.include( 39 | err.message, 40 | 'Instance of Adapters library required when instantiating Timer Controller libraries.' 41 | ) 42 | } 43 | }) 44 | 45 | it('should throw an error if useCases are not passed in', () => { 46 | try { 47 | uut = new TimerControllers({ adapters }) 48 | 49 | assert.fail('Unexpected code path') 50 | } catch (err) { 51 | assert.include( 52 | err.message, 53 | 'Instance of Use Cases library required when instantiating Timer Controller libraries.' 54 | ) 55 | } 56 | }) 57 | }) 58 | 59 | describe('#startTimers', () => { 60 | it('should start the timers', () => { 61 | const result = uut.startTimers() 62 | 63 | uut.stopTimers() 64 | 65 | assert.equal(result, true) 66 | }) 67 | }) 68 | 69 | describe('#exampleTimerFunc', () => { 70 | it('should kick off the Use Case', async () => { 71 | const result = await uut.exampleTimerFunc() 72 | 73 | assert.equal(result, true) 74 | }) 75 | 76 | it('should return false on error', async () => { 77 | const result = await uut.exampleTimerFunc(true) 78 | 79 | assert.equal(result, false) 80 | }) 81 | }) 82 | }) 83 | -------------------------------------------------------------------------------- /test/unit/controllers/rest-api/logs/logs.rest.router.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the REST API handler for the /users endpoints. 3 | */ 4 | 5 | // Public npm libraries 6 | import { assert } from 'chai' 7 | 8 | import sinon from 'sinon' 9 | 10 | // Local support libraries 11 | import adapters from '../../../mocks/adapters/index.js' 12 | 13 | import UseCasesMock from '../../../mocks/use-cases/index.js' 14 | 15 | // const app = require('../../../mocks/app-mock') 16 | 17 | import LogsRouter from '../../../../../src/controllers/rest-api/logs/index.js' 18 | 19 | let uut 20 | let sandbox 21 | // let ctx 22 | 23 | // const mockContext = require('../../../../unit/mocks/ctx-mock').context 24 | 25 | describe('#Contact-REST-Router', () => { 26 | // const testUser = {} 27 | 28 | beforeEach(() => { 29 | const useCases = new UseCasesMock() 30 | uut = new LogsRouter({ adapters, useCases }) 31 | 32 | sandbox = sinon.createSandbox() 33 | 34 | // Mock the context object. 35 | // ctx = mockContext() 36 | }) 37 | 38 | afterEach(() => sandbox.restore()) 39 | 40 | describe('#constructor', () => { 41 | it('should throw an error if adapters are not passed in', () => { 42 | try { 43 | uut = new LogsRouter() 44 | 45 | assert.fail('Unexpected code path') 46 | } catch (err) { 47 | assert.include( 48 | err.message, 49 | 'Instance of Adapters library required when instantiating Logs REST Controller.' 50 | ) 51 | } 52 | }) 53 | 54 | it('should throw an error if useCases are not passed in', () => { 55 | try { 56 | uut = new LogsRouter({ adapters }) 57 | 58 | assert.fail('Unexpected code path') 59 | } catch (err) { 60 | assert.include( 61 | err.message, 62 | 'Instance of Use Cases library required when instantiating Logs REST Controller.' 63 | ) 64 | } 65 | }) 66 | }) 67 | 68 | describe('#attach', () => { 69 | it('should throw an error if app is not passed in.', () => { 70 | try { 71 | uut.attach() 72 | 73 | assert.fail('Unexpected code path') 74 | } catch (err) { 75 | assert.include( 76 | err.message, 77 | 'Must pass app object when attaching REST API controllers.' 78 | ) 79 | } 80 | }) 81 | }) 82 | }) 83 | -------------------------------------------------------------------------------- /test/unit/controllers/rest-api/contact/contact.rest.router.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the REST API handler for the /users endpoints. 3 | */ 4 | 5 | // Public npm libraries 6 | import { assert } from 'chai' 7 | 8 | import sinon from 'sinon' 9 | 10 | // Local support libraries 11 | import adapters from '../../../mocks/adapters/index.js' 12 | 13 | import UseCasesMock from '../../../mocks/use-cases/index.js' 14 | 15 | // const app = require('../../../mocks/app-mock') 16 | 17 | import ContactRouter from '../../../../../src/controllers/rest-api/contact/index.js' 18 | 19 | let uut 20 | let sandbox 21 | // let ctx 22 | 23 | // const mockContext = require('../../../../unit/mocks/ctx-mock').context 24 | 25 | describe('#Contact-REST-Router', () => { 26 | // const testUser = {} 27 | 28 | beforeEach(() => { 29 | const useCases = new UseCasesMock() 30 | uut = new ContactRouter({ adapters, useCases }) 31 | 32 | sandbox = sinon.createSandbox() 33 | 34 | // Mock the context object. 35 | // ctx = mockContext() 36 | }) 37 | 38 | afterEach(() => sandbox.restore()) 39 | 40 | describe('#constructor', () => { 41 | it('should throw an error if adapters are not passed in', () => { 42 | try { 43 | uut = new ContactRouter() 44 | 45 | assert.fail('Unexpected code path') 46 | } catch (err) { 47 | assert.include( 48 | err.message, 49 | 'Instance of Adapters library required when instantiating Contact REST Controller.' 50 | ) 51 | } 52 | }) 53 | 54 | it('should throw an error if useCases are not passed in', () => { 55 | try { 56 | uut = new ContactRouter({ adapters }) 57 | 58 | assert.fail('Unexpected code path') 59 | } catch (err) { 60 | assert.include( 61 | err.message, 62 | 'Instance of Use Cases library required when instantiating Contact REST Controller.' 63 | ) 64 | } 65 | }) 66 | }) 67 | 68 | describe('#attach', () => { 69 | it('should throw an error if app is not passed in.', () => { 70 | try { 71 | uut.attach() 72 | 73 | assert.fail('Unexpected code path') 74 | } catch (err) { 75 | assert.include( 76 | err.message, 77 | 'Must pass app object when attaching REST API controllers.' 78 | ) 79 | } 80 | }) 81 | }) 82 | }) 83 | -------------------------------------------------------------------------------- /test/unit/controllers/rest-api/slp/slp.rest.router.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the REST API handler for the /users endpoints. 3 | */ 4 | 5 | // Public npm libraries 6 | // const assert = require('chai').assert 7 | // const sinon = require('sinon') 8 | import { assert } from 'chai' 9 | import sinon from 'sinon' 10 | 11 | // Local support libraries 12 | import adapters from '../../../mocks/adapters/index.js' 13 | import UseCasesMock from '../../../mocks/use-cases/index.js' 14 | // const app = require('../../../mocks/app-mock') 15 | 16 | import SlpRouter from '../../../../../src/controllers/rest-api/slp/index.js' 17 | let uut 18 | let sandbox 19 | // let ctx 20 | 21 | // const mockContext = require('../../../../unit/mocks/ctx-mock').context 22 | 23 | describe('#SLP-REST-Router', () => { 24 | // const testUser = {} 25 | 26 | beforeEach(() => { 27 | const useCases = new UseCasesMock() 28 | uut = new SlpRouter({ adapters, useCases }) 29 | 30 | sandbox = sinon.createSandbox() 31 | 32 | // Mock the context object. 33 | // ctx = mockContext() 34 | }) 35 | 36 | afterEach(() => sandbox.restore()) 37 | 38 | describe('#constructor', () => { 39 | it('should throw an error if adapters are not passed in', () => { 40 | try { 41 | uut = new SlpRouter() 42 | 43 | assert.fail('Unexpected code path') 44 | } catch (err) { 45 | assert.include( 46 | err.message, 47 | 'Instance of Adapters library required when instantiating SLP REST Controller.' 48 | ) 49 | } 50 | }) 51 | 52 | it('should throw an error if useCases are not passed in', () => { 53 | try { 54 | uut = new SlpRouter({ adapters }) 55 | 56 | assert.fail('Unexpected code path') 57 | } catch (err) { 58 | assert.include( 59 | err.message, 60 | 'Instance of Use Cases library required when instantiating SLP REST Controller.' 61 | ) 62 | } 63 | }) 64 | }) 65 | 66 | describe('#attach', () => { 67 | it('should throw an error if app is not passed in.', () => { 68 | try { 69 | uut.attach() 70 | 71 | assert.fail('Unexpected code path') 72 | } catch (err) { 73 | assert.include( 74 | err.message, 75 | 'Must pass app object when attaching REST API controllers.' 76 | ) 77 | } 78 | }) 79 | }) 80 | }) 81 | -------------------------------------------------------------------------------- /test/unit/adapters/slp-indexer/lib/start-stop.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the start-stop.js library 3 | */ 4 | 5 | import { assert } from 'chai' 6 | import sinon from 'sinon' 7 | 8 | import StartStop from '../../../../../src/adapters/slp-indexer/lib/start-stop.js' 9 | 10 | describe('#start-stop', () => { 11 | let uut, sandbox 12 | 13 | beforeEach(() => { 14 | sandbox = sinon.createSandbox() 15 | 16 | uut = new StartStop() 17 | }) 18 | 19 | afterEach(() => sandbox.restore()) 20 | 21 | describe('#stopStatus', () => { 22 | it('should return false by default', () => { 23 | const result = uut.stopStatus() 24 | 25 | assert.equal(result, false) 26 | }) 27 | 28 | it('should return true if stopIndexing is true', () => { 29 | uut.stopIndexing = true 30 | 31 | const result = uut.stopStatus() 32 | 33 | assert.equal(result, true) 34 | }) 35 | }) 36 | 37 | describe('#initStartStop', () => { 38 | it('should initialize stdin hooks', () => { 39 | // mock process so that test completes. 40 | // sandbox.stub(process.stdin, 'setRawMode').returns() 41 | // sandbox.stub(process.stdin, 'on').returns() 42 | uut.process = mockProcess 43 | 44 | const result = uut.initStartStop() 45 | 46 | assert.equal(result, true) 47 | }) 48 | 49 | it('should set raw mode if stdin is TTY', () => { 50 | // mock process so that test completes. 51 | uut.process = mockProcess 52 | uut.process.stdin.isTTY = true 53 | 54 | const result = uut.initStartStop() 55 | 56 | assert.equal(result, true) 57 | }) 58 | }) 59 | 60 | describe('#qDetected', () => { 61 | it('should set the stop flag if the q key is detected', () => { 62 | const key = { 63 | name: 'q' 64 | } 65 | 66 | const result = uut.qDetected('', key) 67 | 68 | assert.equal(result, true) 69 | assert.equal(uut.stopIndexing, true) 70 | }) 71 | 72 | it('should exit immediately if ctrl-c is detected', () => { 73 | uut.process = mockProcess 74 | 75 | const key = { 76 | name: 'c', 77 | ctrl: true 78 | } 79 | 80 | const result = uut.qDetected('', key) 81 | 82 | assert.equal(result, true) 83 | }) 84 | }) 85 | }) 86 | 87 | const mockProcess = { 88 | stdin: { 89 | setRawMode: () => {}, 90 | on: () => {} 91 | }, 92 | exit: () => {} 93 | } 94 | -------------------------------------------------------------------------------- /test/integration/adapters/slp-indexer/lib/rpc.integration.js: -------------------------------------------------------------------------------- 1 | /* 2 | Integration test for rpc.js library. These tests ensure the indexer is 3 | properly configured to talk with the full node. 4 | 5 | In order to run these tests, the environment variables must be configured 6 | for the Full Node, to override the defaults in the config/env/common.js file. 7 | */ 8 | 9 | const assert = require('chai').assert 10 | 11 | const RPC = require('../../../../../src/adapters/slp-indexer/lib/rpc') 12 | let uut 13 | 14 | describe('#rpc.js', () => { 15 | beforeEach(() => { 16 | uut = new RPC() 17 | }) 18 | 19 | describe('#getBlockCount', () => { 20 | it('should get current block height', async () => { 21 | const result = await uut.getBlockCount() 22 | // console.log('result: ', result) 23 | 24 | assert.isNumber(result) 25 | }) 26 | }) 27 | 28 | describe('#getBlockHeader', () => { 29 | it('should get the a block header', async () => { 30 | const hash = 31 | '0000000000000000008e8d83cba6d45a9314bc2ef4538d4e0577c6bed8593536' 32 | 33 | const result = await uut.getBlockHeader(hash) 34 | // console.log('result: ', result) 35 | 36 | assert.equal(result.height, 600000) 37 | }) 38 | }) 39 | 40 | describe('#getBlock', () => { 41 | it('should get the contents of a block', async () => { 42 | const hash = 43 | '0000000000000000008e8d83cba6d45a9314bc2ef4538d4e0577c6bed8593536' 44 | 45 | const result = await uut.getBlock(hash) 46 | // console.log('result: ', result) 47 | 48 | assert.equal(result.height, 600000) 49 | }) 50 | }) 51 | 52 | describe('#getBlockHash', () => { 53 | it('should get the contents of a block', async () => { 54 | const height = 600000 55 | const hash = 56 | '0000000000000000008e8d83cba6d45a9314bc2ef4538d4e0577c6bed8593536' 57 | 58 | const result = await uut.getBlockHash(height) 59 | // console.log('result: ', result) 60 | 61 | assert.equal(result, hash) 62 | }) 63 | }) 64 | 65 | describe('#getRawTransaction', () => { 66 | it('should get details on a transaction', async () => { 67 | const txid = 68 | 'ee9d3cf5153599c134147e3fac9844c68e216843f4452a1ce15a29452af6db34' 69 | 70 | const result = await uut.getRawTransaction(txid) 71 | // console.log('result: ', result) 72 | 73 | assert.equal(result.txid, txid) 74 | }) 75 | }) 76 | }) 77 | -------------------------------------------------------------------------------- /production/docker/README.md: -------------------------------------------------------------------------------- 1 | # Docker Containers 2 | 3 | The 'production' environment is assumed to be a set of Docker containers orchestrated with Docker Compose. The files in this directory will stand up three Docker containers: 4 | 5 | 1 An instance of go-ipfs. 6 | 2 An instance of MongoDB. 7 | 3 The JavaScript software in this repository. 8 | 9 | The software in this repository depends on the first two containers, so if they aren't running correctly, the application won't run correctly either. 10 | 11 | ## IPFS 12 | 13 | IPFS can be a little tricky to set up. By default, the container uses the following ports: 14 | 15 | - 4001 for TCP connections, exposed publicly. 16 | - 5001 for control by the application, exposed privately. 17 | - 8080 for an IPFS gateway, consumed by the application, exposed privately. 18 | 19 | If you already have an IPFS node running on a the computer, you will need to change the ports to avaid a conflict. To change the ports from the default, you'll need to perform a series of steps, and the order of the steps matter. 20 | 21 | 1. Edit the `docker-compose.yml` file and change the ports. Then save the file. Here is an example: 22 | 23 | ``` 24 | ports: 25 | - 4101:4101 26 | - 172.17.0.1:5101:5101 27 | - 172.17.0.1:8180:8180 28 | ``` 29 | 30 | 2. Bring the Docker containers up, and then back down. This will allow the IPFS container to create the config file that you'll need to edit. 31 | 32 | - `docker-compose up -d` 33 | - Wait a few seconds. 34 | - `docker-compose down` 35 | 36 | 3. Update the generated config file at `../data/go-ipfs/data/config`, to update the ports in the config file, like this: 37 | 38 | ``` 39 | "Addresses": { 40 | "API": "/ip4/0.0.0.0/tcp/5101", 41 | "Announce": [], 42 | "AppendAnnounce": [], 43 | "Gateway": "/ip4/0.0.0.0/tcp/8180", 44 | "NoAnnounce": [], 45 | "Swarm": [ 46 | "/ip4/0.0.0.0/tcp/4101", 47 | "/ip6/::/tcp/4101", 48 | "/ip4/0.0.0.0/udp/4101/quic", 49 | "/ip6/::/udp/4101/quic" 50 | ] 51 | }, 52 | 53 | ``` 54 | 55 | 4. Update the port changes in the `start-production.sh` shell script. This tells the application which ports to use, in order to control the IPFS node, are are used when signaling other nodes. 56 | 57 | 5. Quickly rebuild the containers, to add the modified `start-production.sh` shell script to the application Docker container: 58 | 59 | - `docker-compose build` 60 | 61 | 6. Now start the containers, and the port changes to IPFS should be complete. 62 | -------------------------------------------------------------------------------- /test/unit/controllers/rest-api/contact/contact.rest.controller.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the REST API handler for the /users endpoints. 3 | */ 4 | 5 | // Public npm libraries 6 | import { assert } from 'chai' 7 | 8 | import sinon from 'sinon' 9 | import ContactController from '../../../../../src/controllers/rest-api/contact/controller.js' 10 | 11 | import { context as mockContext } from '../../../../unit/mocks/ctx-mock.js' 12 | let uut 13 | let sandbox 14 | let ctx 15 | 16 | describe('Contact', () => { 17 | before(async () => { 18 | }) 19 | 20 | beforeEach(() => { 21 | uut = new ContactController() 22 | 23 | sandbox = sinon.createSandbox() 24 | 25 | // Mock the context object. 26 | ctx = mockContext() 27 | }) 28 | 29 | afterEach(() => sandbox.restore()) 30 | 31 | describe('#POST /contact', () => { 32 | it('should return 422 status on biz logic error', async () => { 33 | try { 34 | await uut.email(ctx) 35 | 36 | assert.fail('Unexpected result') 37 | } catch (err) { 38 | // console.log(err) 39 | assert.equal(err.status, 422) 40 | assert.include(err.message, 'Cannot read') 41 | } 42 | }) 43 | 44 | it('should return 200 status on success', async () => { 45 | sandbox.stub(uut.contactLib, 'sendEmail').resolves(true) 46 | 47 | ctx.request.body = { 48 | email: 'test02@test.com', 49 | formMessage: 'test' 50 | } 51 | 52 | await uut.email(ctx) 53 | 54 | // Assert the expected HTTP response 55 | assert.equal(ctx.status, 200) 56 | 57 | // Assert that expected properties exist in the returned data. 58 | assert.property(ctx.response.body, 'success') 59 | assert.isTrue(ctx.response.body.success) 60 | }) 61 | }) 62 | 63 | describe('#handleError', () => { 64 | it('should pass an error message', () => { 65 | try { 66 | const err = { 67 | status: 422, 68 | message: 'Unprocessable Entity' 69 | } 70 | 71 | uut.handleError(ctx, err) 72 | } catch (err) { 73 | assert.include(err.message, 'Unprocessable Entity') 74 | } 75 | }) 76 | 77 | it('should still throw error if there is no message', () => { 78 | try { 79 | const err = { 80 | status: 404 81 | } 82 | 83 | uut.handleError(ctx, err) 84 | } catch (err) { 85 | assert.include(err.message, 'Not Found') 86 | } 87 | }) 88 | }) 89 | }) 90 | -------------------------------------------------------------------------------- /src/adapters/slp-indexer/lib/start-stop.js: -------------------------------------------------------------------------------- 1 | /* 2 | This library controls the biz logic around starting and stopping the app. 3 | It handles graceful shutdown, detecting all the different SIG signals for 4 | shutting down. 5 | */ 6 | 7 | // Public npm libraries 8 | // const readline = require('readline') 9 | import readline from 'readline' 10 | 11 | class StartStop { 12 | constructor () { 13 | // Encapsulate dependencies 14 | this.process = process 15 | 16 | this.stopIndexing = false 17 | 18 | // Bind 'this' object to all subfunctions. 19 | this.initStartStop = this.initStartStop.bind(this) 20 | this.stopStatus = this.stopStatus.bind(this) 21 | this.qDetected = this.qDetected.bind(this) 22 | } 23 | 24 | // Returns the value of the stopIndexing state variable. 25 | // The main app polls this function to determine if it should shut down. 26 | stopStatus () { 27 | return this.stopIndexing 28 | } 29 | 30 | initStartStop () { 31 | // Detect 'q' key to stop indexing. 32 | console.log("Press the 'q' key to stop indexing.") 33 | 34 | readline.emitKeypressEvents(process.stdin) 35 | 36 | if (this.process.stdin.isTTY) { 37 | this.process.stdin.setRawMode(true) 38 | } 39 | 40 | this.process.stdin.on('keypress', this.qDetected) 41 | // this.process.stdin.on('keypress', (str, key) => { 42 | // if (key.name === 'q') { 43 | // console.log( 44 | // 'q key detected. Will stop indexing after processing current block.' 45 | // ) 46 | // this.stopIndexing = true 47 | // } 48 | 49 | // // Exit immediately if Ctrl+C is pressed. 50 | // if (key.ctrl && key.name === 'c') { 51 | // this.process.exit(0) 52 | // } 53 | // }) 54 | 55 | // Return true to signal the function exited successfully. 56 | // return true 57 | // } 58 | 59 | return true 60 | } 61 | 62 | // This is a callback function that is called by the keypress event. It checks 63 | // to see if the 'q' key has been pressed. 64 | qDetected (str, key) { 65 | if (key.name === 'q') { 66 | console.log( 67 | 'q key detected. Will stop indexing after processing current block.' 68 | ) 69 | this.stopIndexing = true 70 | } 71 | 72 | // Exit immediately if Ctrl+C is pressed. 73 | if (key.ctrl && key.name === 'c') { 74 | this.process.exit(0) 75 | } 76 | 77 | return true 78 | } 79 | } 80 | 81 | // module.exports = StartStop 82 | export default StartStop 83 | -------------------------------------------------------------------------------- /src/controllers/json-rpc/rate-limit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Rate limit 3 | */ 4 | /* eslint no-useless-catch: 0 */ 5 | 6 | // Local libraries 7 | import { RateLimit as RateLimitLib } from 'koa2-ratelimit' 8 | 9 | class RateLimit { 10 | constructor (options) { 11 | // Encapsulate dependencies 12 | this.RateLimitLib = RateLimitLib 13 | 14 | // Set default rate limit options. 15 | this.defaultOptions = { 16 | interval: { min: 1 }, 17 | max: 60, 18 | onLimitReached: this.onLimitReached 19 | } 20 | 21 | // ctx obj 22 | this.context = { 23 | state: { 24 | user: '' 25 | }, 26 | request: { 27 | ip: '' 28 | }, 29 | user: '', 30 | set: () => {} 31 | } 32 | 33 | // console.log( 34 | // `this.defaultOptions: ${JSON.stringify(this.defaultOptions, null, 2)}` 35 | // ) 36 | // console.log(`options: ${JSON.stringify(options, null, 2)}`) 37 | 38 | // Set rate limit settings. Default values are overwritten if user passes 39 | // in an options object. 40 | this.rateLimitOptions = Object.assign({}, this.defaultOptions, options) 41 | // console.log( 42 | // `this.rateLimitOptions: ${JSON.stringify(this.rateLimitOptions, null, 2)}` 43 | // ) 44 | this.rateLimit = this.RateLimitLib.middleware(this.rateLimitOptions) 45 | } 46 | 47 | // This function is called when the user hits their rate limits. 48 | onLimitReached () { 49 | try { 50 | const error = new Error() // Establish provided options as the default options. 51 | error.message = 'Too many requests, please try again later.' 52 | error.status = 429 53 | throw error 54 | } catch (error) { 55 | // console.log("Error in onLimitReached()", error) 56 | throw error 57 | } 58 | } 59 | 60 | // This is the middleware function called by the router. 61 | async limiter (from) { 62 | try { 63 | if (!from || typeof from !== 'string') { 64 | throw new Error('from must be a string') 65 | } 66 | 67 | // Set context.limiter 68 | // This overrides the default koa behavior and adapts the rate limiter 69 | // to work with the JSON RPC over IPFS. 70 | this.context.state.user = from 71 | this.context.request.ip = from 72 | this.context.user = from 73 | 74 | await this.rateLimit(this.context, () => {}) 75 | return true 76 | } catch (error) { 77 | console.error('Error in rate-limit.js/limiter()') 78 | throw error 79 | } 80 | } 81 | } 82 | 83 | export default RateLimit 84 | -------------------------------------------------------------------------------- /test/integration/adapters/slp-indexer/tx-types/mint.integration.js: -------------------------------------------------------------------------------- 1 | /* 2 | Integration tests for the mint.js library 3 | */ 4 | 5 | // Public npm libraries 6 | const sinon = require('sinon') 7 | 8 | // Local libraries 9 | const Transaction = require('../../../../../src/adapters/slp-indexer/lib/transaction') 10 | const transaction = new Transaction() 11 | const Mint = require('../../../../../src/adapters/slp-indexer/tx-types/mint') 12 | const MockLevel = require('../../../../unit/mocks/leveldb-mock') 13 | const Cache = require('../../../../../src/adapters/slp-indexer/lib/cache') 14 | 15 | describe('#mint.js', () => { 16 | let uut, sandbox 17 | 18 | beforeEach(() => { 19 | const addrDb = new MockLevel() 20 | const tokenDb = new MockLevel() 21 | const txDb = new MockLevel() 22 | const utxoDb = new MockLevel() 23 | txDb.get = () => { 24 | throw new Error('not in db') 25 | } 26 | 27 | const cache = new Cache({ txDb }) 28 | 29 | uut = new Mint({ cache, addrDb, tokenDb, txDb, utxoDb }) 30 | 31 | // Restore the sandbox before each test. 32 | sandbox = sinon.createSandbox() 33 | }) 34 | 35 | afterEach(() => sandbox.restore()) 36 | 37 | // This test is not working. 38 | describe('#processTx', () => { 39 | it('should processes multisig tx', async () => { 40 | const txid = '7f530b22748c227dd125ffbc045dbce23fa0d0e9826a8daab3ca5837dba1d382' 41 | 42 | const data = await getData(txid) 43 | 44 | // Stub removeBatonInAddr() as there is likely not an input to remove in the test. 45 | sandbox.stub(uut, 'removeBatonInAddr').resolves() 46 | 47 | // Stub update token stats 48 | sandbox.stub(uut, 'updateTokenStats').resolves() 49 | 50 | const result = await uut.processTx(data) 51 | console.log('result: ', result) 52 | }) 53 | 54 | it('should process Group baton', async () => { 55 | const txid = '805b85ae1a7e1c1a770429a1158a8364cc8f6f1421115bcd0557cca9437d2769' 56 | 57 | const data = await getData(txid) 58 | console.log(`data: ${JSON.stringify(data, null, 2)}`) 59 | }) 60 | }) 61 | }) 62 | 63 | // Get the data needed to process a TXID. 64 | async function getData (txid) { 65 | const slpData = await transaction.decodeOpReturn(txid) 66 | console.log(`slpData: ${JSON.stringify(slpData, null, 2)}`) 67 | 68 | const txData = await transaction.get(txid) 69 | console.log(`txData: ${JSON.stringify(txData, null, 2)}`) 70 | 71 | const blockHeight = txData.blockheight 72 | 73 | const data = { slpData, txData, blockHeight } 74 | 75 | return data 76 | } 77 | -------------------------------------------------------------------------------- /src/adapters/slp-indexer/lib/cache.js: -------------------------------------------------------------------------------- 1 | /* 2 | This class is used to generate a simple key-value (in-memory) cache of TX data. 3 | The 'key' is the txid. The 'value' is the tx data. 4 | */ 5 | 6 | // Local libraries 7 | // const Transaction = require('./transaction' 8 | import Transaction from './transaction.js' 9 | 10 | class Cache { 11 | constructor (localConfig = {}) { 12 | this.txDb = localConfig.txDb 13 | if (!this.txDb) { 14 | throw new Error( 15 | 'Must include txDb when instantiationg Transaction library' 16 | ) 17 | } 18 | 19 | // Encapsulate dependencies 20 | this.transaction = new Transaction(localConfig) 21 | 22 | this.cache = {} 23 | this.cacheCnt = 0 24 | } 25 | 26 | // Save a new entry into the cache. 27 | put (key, value) { 28 | if (typeof key !== 'string') throw new Error('key must be a string') 29 | 30 | this.cache[key] = value 31 | } 32 | 33 | // Get the tx data from the full node if it's not already in the cache. 34 | async get (key) { 35 | // Try to retrieve it from the cache. 36 | let txData = this.cache[key] 37 | 38 | // If the data existed in the cache, this function is done. 39 | if (txData) return txData 40 | 41 | // Try to get txData from the database. 42 | try { 43 | // console.log(`key: ${key}`) 44 | txData = await this.txDb.get(key) 45 | // console.log('~~>Result coming from database') 46 | 47 | return txData 48 | } catch (err) { 49 | /* exit quietly */ 50 | } 51 | 52 | // Get TX Data from full node if it's not in the cache. 53 | txData = await this.transaction.get(key) 54 | // console.log(`txData: ${JSON.stringify(txData, null, 2)}`) 55 | // console.log('~~>Result coming from full node') 56 | 57 | // Save the data to the *local* cache. 58 | this.put(key, txData) 59 | 60 | // Dev note: Do not store the TX data in the TX Level DB at this point. A 61 | // determination about its SLP validity has not yet been made. That data 62 | // is assumed to be in any entry coming out of the LevelDB. 63 | 64 | this.cacheCnt++ 65 | if (this.cacheCnt % 100 === 0) { 66 | console.log(`tx cache has ${this.cacheCnt} cached txs`) 67 | } 68 | 69 | // Flush the cache once it gets too big, to same on memory. 70 | if (this.cacheCnt > 1000000) { 71 | this.cache = {} 72 | this.cacheCnt = 0 73 | } 74 | 75 | return txData 76 | } 77 | 78 | // Delete an entry from the cache 79 | delete (key) { 80 | delete this.cache[key] 81 | } 82 | } 83 | 84 | // module.exports = Cache 85 | export default Cache 86 | -------------------------------------------------------------------------------- /dev-docs/dev-notes.md: -------------------------------------------------------------------------------- 1 | # Dev Notes 2 | 3 | These are notes taking during code development. 4 | 5 | ## Problematic TXIDs 6 | 7 | This section lists TXIDs that were found to be problematic. 8 | 9 | 10 | 11 | TXID c321b6e7a3e447f2cbaea3da9d5d7c7f6c83542e4fb3b97a345f3b5b0f8018ce (MINT) 12 | - Looks totally legit to me. Not sure why simpleledger.info shows this baton as burned. 13 | 14 | ## Notable TXIDs 15 | 16 | TXID 1a8e0fb428fefe6c7413dba30c15e2814947f7efb7978de526c514d9128b266c (SEND) 17 | - Has 2 token outputs. 18 | - The tx implicitly burns 0.01 token (controlled burn) 19 | - simpleledger.info shows the second output of 1228567.88 as burned in a future tx. 20 | 21 | TXID 93d30d6ea82126f86c786041b10bf9eb44d9612907eaf9b14f9fba60fc0d3dc7 (SEND) 22 | - Similar to above. 23 | - has 1 token output. 24 | - The tx implicitly burns 99 tokens (controlled burn) 25 | - simpleledger.info shows the first output of 499,900 tokens as burned in a future tx. 26 | 27 | TXID 175ac1e083b86cf9e723acc1698e3c69d2ccbbe3f9901b015b817cdb0db5f9e7 28 | - Burns token utxos from two different tokens. 29 | 30 | **Spice DAG** 31 | These TXIDs are Spice token transaction that are linked by a long DAG. Most of the txs in this DAG are valid, but some are not. It appears the indexer has a discrepancy with SLPDB on the validity of the DAG. 32 | TXID 23279ab149da98673cf3677c9c6d90bbc4bad4a8de2c0baea7181e8fba08cccc, 621236 33 | - has an input that is invalid. 34 | TXId 43cf6410d2b41a2087f38d83ba5340547a32dd99a5778e347667df2379708eee, 619563 35 | - uncontrolled burn. Some of these are used in txid 23279 36 | 37 | These are some of the TXIDs in-between: 38 | // const txid = '57e76d0d3d3b76f66ca4276642557eddc8e5c1b92355add6866da958ec39afe5' 39 | // const txid = '23279ab149da98673cf3677c9c6d90bbc4bad4a8de2c0baea7181e8fba08cccc' 40 | // const txid = 'de30610b68be8dae2d1627cd0e7f7c0e18d916bc8881bbbff074c4c2c8e58e73' 41 | // const txid = 'e74ed9a8593d521eb64e527ac12d1ab00c689c8440931079f6e50d37097d2f7c' 42 | // const txid = '58bbb866dd09bd348f20a367c706dd7c48cc8c642a28f4f9c442cb469f99aefb' 43 | const txid = '43cf6410d2b41a2087f38d83ba5340547a32dd99a5778e347667df2379708eee' 44 | // const TXID = 'ab406b2ddac910067e987c2d32bf5acf01396be4f5982555483e55a2975d609d' 45 | // const TXID = 'f36b94aa9e282d71ad9d578e4818c2d401eb928168793a8b04c3c2bb591d892b' 46 | // const TXID = 'c94faf77fc2cd7057eb78d258c9bed007266c212e18b8d12254daa69a1e4bed1' 47 | 48 | **UIOP2 token** 49 | - Token ID: 3257135d7c351f8b2f46ab2b5e610620beb7a957f3885ce1787cffa90582f503 50 | 51 | There is a pretty big discrepancy in the `totalBurned` value between psf-slp-indexer and SLPDB. This discrepency is worth investigating. 52 | -------------------------------------------------------------------------------- /test/unit/adapters/slp-indexer/lib/query.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the start-stop.js library 3 | */ 4 | 5 | import { assert } from 'chai' 6 | import sinon from 'sinon' 7 | 8 | import Query from '../../../../../src/adapters/slp-indexer/lib/query.js' 9 | import MockLevel from '../../../../unit/mocks/leveldb-mock.js' 10 | 11 | describe('#query', () => { 12 | let uut, sandbox 13 | 14 | beforeEach(() => { 15 | sandbox = sinon.createSandbox() 16 | 17 | const addrDb = new MockLevel() 18 | const tokenDb = new MockLevel() 19 | const txDb = new MockLevel() 20 | const statusDb = new MockLevel() 21 | const pTxDb = new MockLevel() 22 | const localConfig = { addrDb, tokenDb, txDb, statusDb, pTxDb } 23 | 24 | uut = new Query(localConfig) 25 | }) 26 | 27 | afterEach(() => sandbox.restore()) 28 | 29 | describe('#getAddress', () => { 30 | it('should get an address from the database', async () => { 31 | sandbox.stub(uut.addrDb, 'get').resolves(true) 32 | 33 | const result = await uut.getAddress('fake-addr') 34 | 35 | assert.equal(result, true) 36 | }) 37 | 38 | it('should throw an error if address is not passed in', async () => { 39 | try { 40 | await uut.getAddress() 41 | 42 | assert.fail('Unexpected result') 43 | } catch (err) { 44 | assert.equal(err.message, 'Address required when calling getAddress()') 45 | } 46 | }) 47 | }) 48 | 49 | describe('#getTx', () => { 50 | it('should get a transaction from the database', async () => { 51 | sandbox.stub(uut.txDb, 'get').resolves(true) 52 | 53 | const result = await uut.getTx('fake-txid') 54 | 55 | assert.equal(result, true) 56 | }) 57 | 58 | it('should throw an error if txid is not passed in', async () => { 59 | try { 60 | await uut.getTx() 61 | 62 | assert.fail('Unexpected result') 63 | } catch (err) { 64 | assert.equal(err.message, 'txid required when calling getTx()') 65 | } 66 | }) 67 | }) 68 | 69 | describe('#getToken', () => { 70 | it('should get a token from the database', async () => { 71 | sandbox.stub(uut.tokenDb, 'get').resolves(true) 72 | 73 | const result = await uut.getToken('fake-token-id') 74 | 75 | assert.equal(result, true) 76 | }) 77 | 78 | it('should throw an error if token ID is not passed in', async () => { 79 | try { 80 | await uut.getToken() 81 | 82 | assert.fail('Unexpected result') 83 | } catch (err) { 84 | assert.equal(err.message, 'tokenId required when calling getToken()') 85 | } 86 | }) 87 | }) 88 | }) 89 | -------------------------------------------------------------------------------- /src/adapters/wlogger.js: -------------------------------------------------------------------------------- 1 | /* 2 | Instantiates and configures the Winston logging library. This utitlity library 3 | can be called by other parts of the application to conveniently tap into the 4 | logging library. 5 | */ 6 | 7 | 'use strict' 8 | 9 | // Global npm libraries 10 | import winston from 'winston' 11 | import 'winston-daily-rotate-file' 12 | 13 | // Local libraries 14 | import config from '../../config/index.js' 15 | 16 | // Hack to get __dirname back. 17 | // https://blog.logrocket.com/alternatives-dirname-node-js-es-modules/ 18 | import * as url from 'url' 19 | const __dirname = url.fileURLToPath(new URL('.', import.meta.url)) 20 | 21 | class Wlogger { 22 | constructor (localConfig = {}) { 23 | this.config = config 24 | 25 | // Configure daily-rotation transport. 26 | this.transport = new winston.transports.DailyRotateFile({ 27 | filename: `${__dirname.toString()}/../../logs/koa-${ 28 | this.config.env 29 | }-%DATE%.log`, 30 | datePattern: 'YYYY-MM-DD', 31 | zippedArchive: false, 32 | maxSize: '1m', // 1 megabyte 33 | maxFiles: '5d', // 5 days 34 | format: winston.format.combine( 35 | winston.format.timestamp(), 36 | winston.format.json() 37 | ) 38 | }) 39 | 40 | this.transport.on('rotate', this.notifyRotation) 41 | 42 | // This controls what goes into the log FILES 43 | this.wlogger = winston.createLogger({ 44 | level: 'verbose', 45 | format: winston.format.json(), 46 | transports: [ 47 | // 48 | // - Write to all logs with level `info` and below to `combined.log` 49 | // - Write all logs error (and below) to `error.log`. 50 | // 51 | // new winston.transports.File({ filename: 'logs/error.log', level: 'error' }), 52 | // new winston.transports.File({ filename: 'logs/combined.log' }) 53 | this.transport 54 | ] 55 | }) 56 | 57 | // Bind 'this' object to all subfunctions. 58 | this.notifyRotation = this.notifyRotation.bind(this) 59 | this.outputToConsole = this.outputToConsole.bind(this) 60 | } 61 | 62 | notifyRotation (oldFilename, newFilename) { 63 | this.wlogger.info('Rotating log files') 64 | } 65 | 66 | outputToConsole () { 67 | this.wlogger.add( 68 | new winston.transports.Console({ 69 | format: winston.format.simple(), 70 | level: 'info' 71 | }) 72 | ) 73 | } 74 | } 75 | 76 | const logger = new Wlogger() 77 | 78 | // Allow the logger to write to the console. 79 | logger.outputToConsole() 80 | 81 | const wlogger = logger.wlogger 82 | 83 | export { wlogger as default, Wlogger } 84 | // export default wlogger 85 | -------------------------------------------------------------------------------- /src/controllers/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | This is a top-level library that encapsulates all the additional Controllers. 3 | The concept of Controllers comes from Clean Architecture: 4 | https://troutsblog.com/blog/clean-architecture 5 | */ 6 | 7 | // Public npm libraries. 8 | 9 | // Local libraries 10 | import Adapters from '../adapters/index.js' 11 | // import JSONRPC from './json-rpc/index.js' 12 | import UseCases from '../use-cases/index.js' 13 | import RESTControllers from './rest-api/index.js' 14 | import TimerControllers from './timer-controllers.js' 15 | import config from '../../config/index.js' 16 | 17 | class Controllers { 18 | constructor (localConfig = {}) { 19 | // Encapsulate dependencies 20 | this.adapters = new Adapters() 21 | this.useCases = new UseCases({ adapters: this.adapters }) 22 | this.timerControllers = new TimerControllers({ adapters: this.adapters, useCases: this.useCases }) 23 | this.config = config 24 | } 25 | 26 | // Spin up any adapter libraries that have async startup needs. 27 | async initAdapters () { 28 | await this.adapters.start() 29 | 30 | return true 31 | } 32 | 33 | // Run any Use Cases to startup the app. 34 | async initUseCases () { 35 | await this.useCases.start() 36 | 37 | return true 38 | } 39 | 40 | // Top-level function for this library. 41 | // Start the various Controllers and attach them to the app. 42 | attachRESTControllers (app) { 43 | const restControllers = new RESTControllers({ 44 | adapters: this.adapters, 45 | useCases: this.useCases 46 | }) 47 | 48 | // Attach the REST API Controllers associated with the boilerplate code to the Koa app. 49 | restControllers.attachRESTControllers(app) 50 | 51 | return true 52 | } 53 | 54 | // Attach any other controllers other than REST API controllers. 55 | async attachControllers (app) { 56 | // Wait for any startup processes to complete for the Adapters libraries. 57 | // await this.adapters.start() 58 | 59 | if (this.config.useIpfs) { 60 | // Attach JSON RPC controllers 61 | this.attachRPCControllers() 62 | } 63 | 64 | // Attach and start the timer controllers 65 | this.timerControllers.startTimers() 66 | } 67 | 68 | // Add the JSON RPC router to the ipfs-coord adapter. 69 | attachRPCControllers () { 70 | // const jsonRpcController = new JSONRPC({ 71 | // adapters: this.adapters, 72 | // useCases: this.useCases 73 | // }) 74 | // 75 | // // Attach the input of the JSON RPC router to the output of ipfs-coord. 76 | // this.adapters.ipfs.ipfsCoordAdapter.attachRPCRouter( 77 | // jsonRpcController.router 78 | // ) 79 | } 80 | } 81 | 82 | export default Controllers 83 | -------------------------------------------------------------------------------- /production/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | # Create a Dockerized API server 2 | # 3 | 4 | #IMAGE BUILD COMMANDS 5 | # ct-base-ubuntu = ubuntu 18.04 + nodejs v10 LTS 6 | #FROM christroutner/ct-base-ubuntu 7 | FROM ubuntu:22.04 8 | MAINTAINER Chris Troutner 9 | 10 | #Update the OS and install any OS packages needed. 11 | RUN apt-get update 12 | RUN apt-get install -y sudo git curl nano gnupg wget zip unzip python3 13 | 14 | #Install Node and NPM 15 | RUN curl -sL https://deb.nodesource.com/setup_20.x -o nodesource_setup.sh 16 | RUN bash nodesource_setup.sh 17 | RUN apt-get install -y nodejs build-essential 18 | 19 | #Create the user 'safeuser' and add them to the sudo group. 20 | RUN useradd -ms /bin/bash safeuser 21 | RUN adduser safeuser sudo 22 | 23 | #Set password to 'password' change value below if you want a different password 24 | RUN echo safeuser:password | chpasswd 25 | 26 | #Set the working directory to be the home directory 27 | WORKDIR /home/safeuser 28 | 29 | #Setup NPM for non-root global install 30 | RUN mkdir /home/safeuser/.npm-global 31 | RUN chown -R safeuser .npm-global 32 | RUN echo "export PATH=~/.npm-global/bin:$PATH" >> /home/safeuser/.profile 33 | RUN runuser -l safeuser -c "npm config set prefix '~/.npm-global'" 34 | 35 | # Update to the latest version of npm. 36 | #RUN npm install -g npm@8.3.0 37 | 38 | # npm mirror to prevent direct dependency on npm. 39 | #RUN npm set registry http://94.130.170.209:4873/ 40 | 41 | # Switch to user account. 42 | #USER safeuser 43 | # Prep 'sudo' commands. 44 | #RUN echo 'abcd8765' | sudo -S pwd 45 | 46 | #RUN npm install -g node-gyp 47 | 48 | # Clone the rest.bitcoin.com repository 49 | WORKDIR /home/safeuser 50 | RUN git clone https://github.com/Permissionless-Software-Foundation/psf-slp-indexer 51 | 52 | # Switch to the desired branch. `master` is usually stable, 53 | # and `stage` has the most up-to-date changes. 54 | WORKDIR /home/safeuser/psf-slp-indexer 55 | 56 | # For development: switch to unstable branch 57 | #RUN git checkout pin-ipfs 58 | 59 | # Install dependencies 60 | RUN npm install 61 | 62 | # Generate the API docs 63 | RUN npm run docs 64 | 65 | VOLUME /home/safeuser/keys 66 | 67 | # Make leveldb folders 68 | #RUN mkdir leveldb 69 | #WORKDIR /home/safeuser/psf-slp-indexer/leveldb 70 | #RUN mkdir current 71 | #RUN mkdir zips 72 | #RUN mkdir backup 73 | #WORKDIR /home/safeuser/psf-slp-indexer/leveldb/zips 74 | COPY restore-auto.sh restore-auto.sh 75 | #WORKDIR /home/safeuser/psf-slp-indexer 76 | 77 | # Expose the port the API will be served on. 78 | EXPOSE 5010 79 | 80 | # Start the application. 81 | #COPY start-production.sh start-production.sh 82 | VOLUME start-production.sh 83 | CMD ["./start-production.sh"] 84 | 85 | #CMD ["npm", "start"] 86 | -------------------------------------------------------------------------------- /src/adapters/slp-indexer/lib/ptxdb.js: -------------------------------------------------------------------------------- 1 | /* 2 | This library contains utility functions for cleaning up the processed tx db. 3 | 'processed' transactions are txs that have already been processed. This lets 4 | the indexer safely transition between phase 1 (bulk indexing) and phase 2 5 | (ZMQ real-time indexing), and maintain a consistent state. 6 | */ 7 | 8 | class ManagePTXDB { 9 | constructor (localConfig = {}) { 10 | this.pTxDb = localConfig.pTxDb 11 | if (!this.pTxDb) { 12 | throw new Error( 13 | 'Must pass instance of pTxDb when instantiating ManagePTXDB lib' 14 | ) 15 | } 16 | 17 | // State 18 | this.keys = [] 19 | this.cleanCnt = 0 20 | 21 | // Add 'this' object to all subfunctions 22 | this.getAllTxs = this.getAllTxs.bind(this) 23 | this.cleanPTXDB = this.cleanPTXDB.bind(this) 24 | this.readFromStream = this.readFromStream.bind(this) 25 | this.endStream = this.endStream.bind(this) 26 | } 27 | 28 | // Return a promise, which resolves to true when all txs have been collected 29 | // from the database and stored in this.keys array. 30 | getAllTxs (isTest = false) { 31 | return new Promise((resolve) => { 32 | const stream = this.pTxDb.createReadStream() 33 | 34 | stream.on('data', this.readFromStream) 35 | 36 | stream.on('end', this.endStream(resolve)) 37 | 38 | if (isTest) return resolve(true) 39 | }) 40 | } 41 | 42 | readFromStream (data) { 43 | this.keys.push(data.key) 44 | } 45 | 46 | endStream (resolve) { 47 | return resolve(true) 48 | } 49 | 50 | // Remove entries in the DB that are old and not needed. 51 | async cleanPTXDB (blockHeight) { 52 | try { 53 | // Get all TXs in the database. 54 | await this.getAllTxs() 55 | 56 | const cutoff = blockHeight - 10 57 | 58 | // Loop through each TX in the database. 59 | for (let i = 0; i < this.keys.length; i++) { 60 | const thisKey = this.keys[i] 61 | 62 | let value 63 | try { 64 | value = await this.pTxDb.get(thisKey) 65 | } catch (err) { 66 | // console.log(`Warning: Could not find ${thisKey} in pTxDb`) 67 | 68 | // Skip if value can't be found. 69 | continue 70 | } 71 | 72 | // If the value is older than the cutoff, delete the db entry. 73 | if (value <= cutoff) { 74 | try { 75 | await this.pTxDb.del(thisKey) 76 | this.cleanCnt++ 77 | } catch (err) { 78 | console.log(`Could not delete ${thisKey} from the pTxDB`) 79 | } 80 | } 81 | } 82 | 83 | console.log(`Cleaned ${this.cleanCnt} entries from the pTxDb.`) 84 | this.cleanCnt = 0 85 | this.keys = [] 86 | 87 | return true 88 | } catch (err) { 89 | console.error('Error in cleanPTXDB()') 90 | throw err 91 | } 92 | } 93 | } 94 | 95 | export default ManagePTXDB 96 | -------------------------------------------------------------------------------- /src/adapters/slp-indexer/tx-maps/combine-maps.js: -------------------------------------------------------------------------------- 1 | /* 2 | This stand-alone app is used to compare two transaction maps, and combine 3 | them. 4 | 5 | Run this command by increasing the memory allocation for node.js: 6 | node --max_old_space_size=28000 combine-maps.js 7 | */ 8 | 9 | const map1 = require('./tx-map.json') 10 | const map2 = require('./tx-map-new.json') 11 | 12 | const fs = require('fs') 13 | 14 | const combinedMap = [] 15 | 16 | async function combineMaps () { 17 | try { 18 | // console.log(`map1: ${JSON.stringify(map1, null, 2)}`) 19 | 20 | // Get block heights from each map. 21 | const map1Heights = map1.map(x => x.height) 22 | const map2Heights = map2.map(x => x.height) 23 | // console.log(`map1Heights: ${JSON.stringify(map1Heights, null, 2)}`) 24 | 25 | // Combine both arrays. 26 | let allHeights = map1Heights.concat(map2Heights) 27 | 28 | // Remove duplicates. https://stackoverflow.com/questions/9229645/remove-duplicate-values-from-js-array 29 | allHeights = [...new Set(allHeights)] 30 | // console.log(`allHeights: ${JSON.stringify(allHeights, null, 2)}`) 31 | // console.log(`map1Heights: ${map1Heights.length}, map2Heights: ${map2Heights.length}, allHeights: ${allHeights.length}`) 32 | 33 | // Loop through all block heights. 34 | for (let i = 0; i < allHeights.length; i++) { 35 | // for (let i = 0; i < 10; i++) { 36 | const thisHeight = allHeights[i] 37 | // console.log('thisHeight: ', thisHeight) 38 | 39 | // Get txs for this block height. 40 | const map1Txs = map1.filter(x => x.height === thisHeight) 41 | const map2Txs = map2.filter(x => x.height === thisHeight) 42 | // console.log(`map1Txs: ${JSON.stringify(map1Txs, null, 2)}`) 43 | // console.log(`map2Txs: ${JSON.stringify(map2Txs, null, 2)}`) 44 | 45 | // If map1 has no txs for this height, just use map2. 46 | if (!map1Txs.length) { 47 | const thisObj = { 48 | height: thisHeight, 49 | txs: map2Txs[0].txs 50 | } 51 | combinedMap.push(thisObj) 52 | continue 53 | } 54 | 55 | // If map2 has no txs for this hight, just use map1. 56 | if (!map2Txs.length) { 57 | const thisObj = { 58 | height: thisHeight, 59 | txs: map1Txs[0].txs 60 | } 61 | combinedMap.push(thisObj) 62 | continue 63 | } 64 | 65 | // Combine transactions from both maps. 66 | let allTxs = map1Txs[0].txs.concat(map2Txs[0].txs) 67 | // console.log(`allTxs: ${JSON.stringify(allTxs, null, 2)}`) 68 | 69 | // Remove duplicates 70 | allTxs = [...new Set(allTxs)] 71 | // console.log(`allTxs: ${JSON.stringify(allTxs, null, 2)}`) 72 | 73 | const thisObj = { 74 | height: thisHeight, 75 | txs: allTxs 76 | } 77 | combinedMap.push(thisObj) 78 | } 79 | 80 | // console.log(`combinedMap: ${JSON.stringify(combinedMap, null, 2)}`) 81 | console.log(`map1Heights: ${map1Heights.length}, map2Heights: ${map2Heights.length}, combinedMap: ${combinedMap.length}`) 82 | 83 | fs.writeFileSync('./out-combined-map.json', JSON.stringify(combinedMap, null, 2)) 84 | } catch (err) { 85 | console.error(err) 86 | } 87 | } 88 | combineMaps() 89 | -------------------------------------------------------------------------------- /src/adapters/slp-indexer/lib/level-db.js: -------------------------------------------------------------------------------- 1 | /* 2 | Adapter library for LevelDB. 3 | */ 4 | 5 | // Public npm libraries. 6 | import level from 'level' 7 | 8 | // Hack to get __dirname back. 9 | // https://blog.logrocket.com/alternatives-dirname-node-js-es-modules/ 10 | import * as url from 'url' 11 | const __dirname = url.fileURLToPath(new URL('.', import.meta.url)) 12 | 13 | class LevelDb { 14 | constructor (localConfig = {}) { 15 | // Encapsulate dependencies 16 | this.level = level 17 | } 18 | 19 | openDbs () { 20 | // Instantiate LevelDB databases 21 | console.log('Opening LevelDB databases...') 22 | 23 | // Address database. Tracks the balances of addresses. 24 | this.addrDb = this.level(`${__dirname.toString()}/../../../../leveldb/current/addrs`, { 25 | valueEncoding: 'json', 26 | cacheSize: 1 * 1024 * 1024 * 1024 // 1 GB 27 | }) 28 | 29 | // Transaction database. Acts as a cache, to reduce the amount of network 30 | // calls and computation. 31 | this.txDb = this.level(`${__dirname.toString()}/../../../../leveldb/current/txs`, { 32 | valueEncoding: 'json', 33 | cacheSize: 1 * 1024 * 1024 * 1024 // 1 GB 34 | }) 35 | 36 | // Token Stats database. 37 | this.tokenDb = this.level( 38 | `${__dirname.toString()}/../../../../leveldb/current/tokens`, 39 | { 40 | valueEncoding: 'json' 41 | } 42 | ) 43 | 44 | // Tracks the sync status of the indexer. 45 | this.statusDb = this.level( 46 | `${__dirname.toString()}/../../../../leveldb/current/status`, 47 | { 48 | valueEncoding: 'json' 49 | } 50 | ) 51 | 52 | // Processed transaction database. Used to detect transactions that have 53 | // already been processed. 54 | this.pTxDb = this.level(`${__dirname.toString()}/../../../../leveldb/current/ptxs`, { 55 | valueEncoding: 'json' 56 | }) 57 | 58 | // The UTXO database is used as a sort of reverse-lookup. The key is the TXID 59 | // plus vout, in this format: 'txid:vout'. 60 | // and the value is the vout and address. This can be used to lookup what 61 | // address possesses the UTXO. This makes handling of 'controlled burn' txs 62 | // much faster. 63 | this.utxoDb = this.level(`${__dirname.toString()}/../../../../leveldb/current/utxos`, { 64 | valueEncoding: 'json' 65 | }) 66 | 67 | // Pin Claims are on-chain payments for pinning IPFS content. 68 | this.pinClaimDb = this.level(`${__dirname.toString()}/../../../../leveldb/current/pinClaim`, { 69 | valueEncoding: 'json' 70 | }) 71 | 72 | return { 73 | addrDb: this.addrDb, 74 | txDb: this.txDb, 75 | tokenDb: this.tokenDb, 76 | statusDb: this.statusDb, 77 | pTxDb: this.pTxDb, 78 | utxoDb: this.utxoDb, 79 | pinClaimDb: this.pinClaimDb 80 | } 81 | } 82 | 83 | // Cleanly close the open databases. 84 | async closeDbs () { 85 | await this.addrDb.close() 86 | await this.txDb.close() 87 | await this.tokenDb.close() 88 | await this.statusDb.close() 89 | await this.pTxDb.close() 90 | await this.utxoDb.close() 91 | await this.pinClaimDb.close() 92 | 93 | // Signal that the databases were close successfully. 94 | return true 95 | } 96 | } 97 | 98 | // module.exports = LevelDb 99 | export default LevelDb 100 | -------------------------------------------------------------------------------- /src/adapters/fullstack-jwt.js: -------------------------------------------------------------------------------- 1 | /* 2 | A library of utility functions for working with FullStack.cash JWT tokens. 3 | 4 | Feel free to copy this library into your own app, as well as the unit tests 5 | for this file. 6 | */ 7 | 8 | import JwtLib from 'jwt-bch-lib' 9 | 10 | import BCHJS from '@psf/bch-js' 11 | 12 | class FullStackJWT { 13 | constructor (localConfig = {}) { 14 | // Input Validation 15 | this.authServer = localConfig.authServer 16 | if (!this.authServer || typeof this.authServer !== 'string') { 17 | throw new Error( 18 | 'Must pass a url for the AUTH server when instantiating FullStackJWT class.' 19 | ) 20 | } 21 | this.apiServer = localConfig.apiServer 22 | if (!this.apiServer || typeof this.apiServer !== 'string') { 23 | throw new Error( 24 | 'Must pass a url for the API server when instantiating FullStackJWT class.' 25 | ) 26 | } 27 | this.login = localConfig.fullstackLogin 28 | if (!this.login || typeof this.login !== 'string') { 29 | throw new Error( 30 | 'Must pass a FullStack.cash login (email) instantiating FullStackJWT class.' 31 | ) 32 | } 33 | this.password = localConfig.fullstackPassword 34 | if (!this.password || typeof this.password !== 'string') { 35 | throw new Error( 36 | 'Must pass a FullStack.cash account password when instantiating FullStackJWT class.' 37 | ) 38 | } 39 | 40 | // Encapsulate dependencies 41 | this.jwtLib = new JwtLib({ 42 | // Overwrite default values with the values in the config file. 43 | server: this.authServer, 44 | login: this.login, 45 | password: this.password 46 | }) 47 | 48 | // State 49 | this.apiToken = '' // Default value. 50 | this.bchjs = {} 51 | } 52 | 53 | // Get's a JWT token from FullStack.cash. 54 | async getJWT () { 55 | try { 56 | // Log into the auth server. 57 | await this.jwtLib.register() 58 | 59 | this.apiToken = this.jwtLib.userData.apiToken 60 | if (!this.apiToken) { 61 | throw new Error('This account does not have a JWT') 62 | } 63 | console.log(`Retrieved JWT token: ${this.apiToken}\n`) 64 | 65 | // Ensure the JWT token is valid to use. 66 | const isValid = await this.jwtLib.validateApiToken() 67 | 68 | // Get a new token with the same API level, if the existing token is not 69 | // valid (probably expired). 70 | if (!isValid.isValid) { 71 | this.apiToken = await this.jwtLib.getApiToken( 72 | this.jwtLib.userData.apiLevel 73 | ) 74 | console.log( 75 | `The JWT token was not valid. Retrieved new JWT token: ${this.apiToken}\n` 76 | ) 77 | } else { 78 | console.log('JWT token is valid.\n') 79 | } 80 | 81 | return this.apiToken 82 | } catch (err) { 83 | console.error( 84 | `Error trying to log into ${this.server} and retrieve JWT token.` 85 | ) 86 | throw err 87 | } 88 | } 89 | 90 | // Create an instance of bchjs with the validated JWT token. Returns this 91 | // instance of bch-js. 92 | instanceBchjs () { 93 | this.bchjs = new BCHJS({ 94 | restURL: this.apiServer, 95 | apiToken: this.apiToken 96 | }) 97 | 98 | return this.bchjs 99 | } 100 | } 101 | 102 | export default FullStackJWT 103 | -------------------------------------------------------------------------------- /test/unit/controllers/json-rpc/a14-rate-limits.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the JSON RPC validator middleware. 3 | 4 | TODO: ensureTargetUserOrAdmin: it should exit quietly if user is an admin. 5 | */ 6 | 7 | // Public npm libraries 8 | import sinon from 'sinon' 9 | 10 | import { assert } from 'chai' 11 | 12 | // Local libraries 13 | import RateLimit from '../../../../src/controllers/json-rpc/rate-limit.js' 14 | 15 | // Set the environment variable to signal this is a test. 16 | process.env.SVC_ENV = 'test' 17 | 18 | describe('#rate-limit', () => { 19 | let uut 20 | let sandbox 21 | 22 | beforeEach(() => { 23 | sandbox = sinon.createSandbox() 24 | 25 | uut = new RateLimit() 26 | }) 27 | 28 | afterEach(() => sandbox.restore()) 29 | 30 | describe('#constructor', () => { 31 | it('should use the options provided', async () => { 32 | try { 33 | const options = { 34 | interval: { min: 10 }, 35 | delayAfter: 1, 36 | timeWait: { sec: 5 }, 37 | max: 2, 38 | onLimitReached: () => { 39 | throw new Error('custom message error') 40 | } 41 | } 42 | const _uut = new RateLimit(options) 43 | 44 | // Assert options 45 | assert.equal(_uut.rateLimitOptions.interval.min, options.interval.min) 46 | assert.equal(_uut.rateLimitOptions.delayAfter, options.delayAfter) 47 | assert.equal(_uut.rateLimitOptions.timeWait.sec, options.timeWait.sec) 48 | 49 | const from = 'constructor test' 50 | const firstRequest = await _uut.limiter(from) 51 | assert.isTrue(firstRequest) 52 | 53 | const secondRequest = await _uut.limiter(from) 54 | assert.isTrue(secondRequest) 55 | 56 | await _uut.limiter(from) 57 | assert.fail('unexpected error') 58 | } catch (error) { 59 | assert.include(error.message, 'custom message error') 60 | } 61 | }) 62 | }) 63 | 64 | describe('#onLimitReached', () => { 65 | it('should throw error', async () => { 66 | try { 67 | uut.onLimitReached() 68 | assert.fail('unexpected error') 69 | } catch (error) { 70 | assert.equal(error.status, 429) 71 | assert.include( 72 | error.message, 73 | 'Too many requests, please try again later.' 74 | ) 75 | } 76 | }) 77 | }) 78 | 79 | describe('#limiter', () => { 80 | it('should throw error if "from" input is not provider', async () => { 81 | try { 82 | await uut.limiter() 83 | assert.fail('unexpected error') 84 | } catch (error) { 85 | assert.include(error.message, 'from must be a string') 86 | } 87 | }) 88 | 89 | it('should throw error 429', async () => { 90 | try { 91 | const _uut = new RateLimit({ max: 1 }) 92 | const from = 'Origin request' 93 | 94 | const firstRequest = await _uut.limiter(from) 95 | assert.isTrue(firstRequest) 96 | 97 | const secondRequest = await _uut.limiter(from) 98 | assert.isTrue(secondRequest) 99 | 100 | await _uut.limiter(from) 101 | assert.fail('unexpected error') 102 | } catch (error) { 103 | assert.include( 104 | error.message, 105 | 'Too many requests, please try again later.' 106 | ) 107 | } 108 | }) 109 | }) 110 | }) 111 | -------------------------------------------------------------------------------- /test/unit/adapters/ipfs.adapter.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the IPFS Adapter. 3 | */ 4 | 5 | import { assert } from 'chai' 6 | 7 | import sinon from 'sinon' 8 | import IPFSLib from '../../../src/adapters/ipfs/ipfs.js' 9 | import create from '../mocks/ipfs-mock.js' 10 | import config from '../../../config/index.js' 11 | 12 | // config.isProduction = true; 13 | describe('#IPFS-adapter', () => { 14 | let uut 15 | let sandbox 16 | 17 | beforeEach(() => { 18 | uut = new IPFSLib() 19 | 20 | sandbox = sinon.createSandbox() 21 | }) 22 | 23 | afterEach(() => { 24 | sandbox.restore() 25 | }) 26 | 27 | describe('#constructor', () => { 28 | it('should instantiate IPFS Lib in dev mode.', async () => { 29 | const _uut = new IPFSLib() 30 | assert.exists(_uut) 31 | assert.isFunction(_uut.start) 32 | assert.isFunction(_uut.stop) 33 | }) 34 | 35 | it('should instantiate dev IPFS Lib in production mode.', async () => { 36 | config.isProduction = true 37 | const _uut = new IPFSLib() 38 | assert.exists(_uut) 39 | assert.isFunction(_uut.start) 40 | assert.isFunction(_uut.stop) 41 | config.isProduction = false 42 | }) 43 | }) 44 | 45 | describe('#start', () => { 46 | it('should return a promise that resolves into an instance of IPFS.', async () => { 47 | // Mock dependencies. 48 | uut.create = create 49 | 50 | const result = await uut.start() 51 | // console.log('result: ', result) 52 | 53 | assert.equal(uut.isReady, true) 54 | 55 | assert.property(result, 'config') 56 | }) 57 | 58 | it('should return a promise that resolves into an instance of IPFS in production mode.', async () => { 59 | // Mock dependencies. 60 | uut.create = create 61 | uut.config.isProduction = true 62 | const result = await uut.start() 63 | // console.log('result: ', result) 64 | 65 | assert.equal(uut.isReady, true) 66 | 67 | assert.property(result, 'config') 68 | }) 69 | 70 | it('should catch and throw an error', async () => { 71 | try { 72 | // Force an error 73 | sandbox.stub(uut, 'create').rejects(new Error('test error')) 74 | 75 | await uut.start() 76 | 77 | assert.fail('Unexpected code path.') 78 | } catch (err) { 79 | // console.log(err) 80 | assert.include(err.message, 'test error') 81 | } 82 | }) 83 | }) 84 | 85 | describe('#stop', () => { 86 | it('should stop the IPFS node', async () => { 87 | // Mock dependencies 88 | uut.ipfs = { 89 | stop: () => { 90 | } 91 | } 92 | 93 | const result = await uut.stop() 94 | 95 | assert.equal(result, true) 96 | }) 97 | }) 98 | 99 | // describe('#rmBlocksDir', () => { 100 | // it('should delete the /blocks directory', () => { 101 | // const result = uut.rmBlocksDir() 102 | // 103 | // assert.equal(result, true) 104 | // }) 105 | // 106 | // it('should catch and throw an error', () => { 107 | // try { 108 | // // Force an error 109 | // sandbox.stub(uut.fs, 'rmdirSync').throws(new Error('test error')) 110 | // 111 | // uut.rmBlocksDir() 112 | // 113 | // assert.fail('Unexpected code path') 114 | // } catch (err) { 115 | // assert.equal(err.message, 'test error') 116 | // } 117 | // }) 118 | // }) 119 | }) 120 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "psf-slp-indexer", 3 | "version": "3.0.7", 4 | "description": "Indexer for validating SLP transactions. Uses LevelDB.", 5 | "main": "index.js", 6 | "type": "module", 7 | "scripts": { 8 | "start": "node --max_old_space_size=16000 index.js", 9 | "test": "npm run test:all", 10 | "test:all": "export SVC_ENV=test && c8 --reporter=text mocha --exit --timeout 15000 --recursive test/unit/", 11 | "test:unit": "export SVC_ENV=test && c8 --reporter=text mocha --exit --timeout 15000 --recursive test/unit/", 12 | "test:temp": "export SVC_ENV=test && mocha --exit --timeout 15000 -g '#rate-limit' test/unit/json-rpc/", 13 | "lint": "standard --env mocha --fix", 14 | "docs": "./node_modules/.bin/apidoc -i src/ -o docs", 15 | "coverage": "c8 report --reporter=text-lcov | coveralls", 16 | "coverage:report": "export SVC_ENV=test && c8 --reporter=html mocha --exit --timeout 15000 --recursive test/unit/ test/e2e/automated/", 17 | "reindex": "node --max_old_space_size=16000 src/adapters/slp-indexer/re-index.js" 18 | }, 19 | "author": "Chris Troutner ", 20 | "license": "GPL-2.0", 21 | "apidoc": { 22 | "title": "psf-slp-indexer", 23 | "url": "localhost:5000" 24 | }, 25 | "repository": "Permissionless-Software-Foundation/psf-slp-indexer", 26 | "dependencies": { 27 | "@chris.troutner/retry-queue-commonjs": "1.0.8", 28 | "@chris.troutner/slp-validate": "1.2.2", 29 | "@psf/bch-js": "6.7.3", 30 | "@psf/bitcoincash-zmq-decoder": "0.1.5", 31 | "axios": "0.27.2", 32 | "bcryptjs": "2.4.3", 33 | "bignumber.js": "9.0.1", 34 | "bitcoin-rpc-promise-retry": "1.3.0", 35 | "glob": "7.1.6", 36 | "ipfs-coord-esm": "9.1.13", 37 | "ipfs-http-client": "58.0.0", 38 | "jsonrpc-lite": "2.2.0", 39 | "jsonwebtoken": "8.5.1", 40 | "jwt-bch-lib": "1.3.0", 41 | "kcors": "2.2.2", 42 | "koa": "2.13.1", 43 | "koa-bodyparser": "4.3.0", 44 | "koa-convert": "2.0.0", 45 | "koa-generic-session": "2.1.1", 46 | "koa-logger": "3.2.1", 47 | "koa-mount": "4.0.0", 48 | "koa-passport": "4.1.3", 49 | "koa-router": "10.0.0", 50 | "koa-static": "5.0.0", 51 | "koa2-ratelimit": "0.9.1", 52 | "level": "7.0.1", 53 | "line-reader": "0.4.0", 54 | "minimal-slp-wallet": "5.11.1", 55 | "mongoose": "5.13.14", 56 | "node-fetch": "npm:@achingbrain/node-fetch@2.6.7", 57 | "nodemailer": "6.7.5", 58 | "p-queue": "7.4.1", 59 | "p-retry": "6.0.0", 60 | "passport-local": "1.0.0", 61 | "public-ip": "4.0.4", 62 | "readline": "1.3.0", 63 | "shelljs": "0.8.4", 64 | "slp-parser": "0.0.4", 65 | "winston": "3.3.3", 66 | "winston-daily-rotate-file": "4.5.0", 67 | "zeromq": "6.5.0" 68 | }, 69 | "devDependencies": { 70 | "apidoc": "0.51.1", 71 | "c8": "7.12.0", 72 | "chai": "4.3.0", 73 | "coveralls": "3.1.0", 74 | "husky": "4.3.8", 75 | "lodash.clonedeep": "^4.5.0", 76 | "mocha": "10.0.0", 77 | "semantic-release": "19.0.3", 78 | "sinon": "9.2.4", 79 | "standard": "17.0.0", 80 | "uuid": "8.3.2" 81 | }, 82 | "release": { 83 | "publish": [ 84 | { 85 | "path": "@semantic-release/npm", 86 | "npmPublish": false 87 | } 88 | ] 89 | }, 90 | "husky": { 91 | "hooks": { 92 | "pre-commit": "npm run lint" 93 | } 94 | }, 95 | "standard": { 96 | "ignore": [ 97 | "/test/unit/mocks/**/*.js" 98 | ] 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /util/index/getAllTxs.js: -------------------------------------------------------------------------------- 1 | /* 2 | Utility tool to retrieve all token TXs the indexer indexed, organized by 3 | block height. 4 | */ 5 | 6 | const BCHJS = require('@psf/bch-js') 7 | const bchjs = new BCHJS({ restURL: 'http://192.168.0.36:3000/v5/' }) 8 | 9 | const level = require('level') 10 | 11 | const txDb = level(`${__dirname.toString()}/../../leveldb/current/txs`, { 12 | valueEncoding: 'json' 13 | }) 14 | 15 | const txs = [] 16 | 17 | async function getTxs () { 18 | try { 19 | const promiseArray = [] 20 | const stream = txDb.createReadStream() 21 | 22 | // const txData = [] 23 | 24 | // Add block height to the transaction data and add it to the txs array. 25 | async function getTxDataWithHeight (txData) { 26 | try { 27 | const blockhash = txData.blockhash 28 | const blockHeader = await bchjs.Blockchain.getBlockHeader(blockhash) 29 | const blockHeight = blockHeader.height 30 | 31 | txData.blockHeight = blockHeight 32 | txs.push(txData) 33 | } catch (err) { 34 | console.error('Error in getTxDataWithHeight') 35 | throw err 36 | } 37 | } 38 | 39 | stream.on('data', async function (data) { 40 | try { 41 | // console.log(data.key, ' = ', JSON.stringify(data.value, null, 2)) 42 | // console.log(data.key) 43 | // txs.push(data.key) 44 | 45 | promiseArray.push(getTxDataWithHeight(data.value)) 46 | } catch (err) { 47 | console.error('Error in "data" read steam: ', err) 48 | } 49 | }) 50 | 51 | stream.on('close', async function () { 52 | try { 53 | // console.log(`const txs = ${JSON.stringify(txs, null, 2)}`) 54 | 55 | console.log(`Waiting for ${promiseArray.length} promises`) 56 | await Promise.all(promiseArray) 57 | 58 | // Sort transactions by blockHeight. (oldest first) 59 | txs.sort(function (a, b) { 60 | return a.blockHeight - b.blockHeight 61 | }) 62 | console.log(`There are ${txs.length} txs`) 63 | // console.log(`txs: ${JSON.stringify(txs, null, 2)}`) 64 | 65 | const outAry = [] 66 | 67 | let currentBlock = txs[0].blockHeight 68 | let currentObj = { 69 | height: currentBlock, 70 | txs: [] 71 | } 72 | 73 | for (let i = 0; i < txs.length; i++) { 74 | const elem = txs[i] 75 | console.log( 76 | `elem.txid: ${elem.txid}, elem.blockHeight: ${elem.blockHeight}` 77 | ) 78 | 79 | if (elem.blockHeight !== currentBlock) { 80 | // Save the current block data to the output array. 81 | outAry.push(currentObj) 82 | 83 | // Create a new block object 84 | currentBlock = elem.blockHeight 85 | currentObj = { 86 | height: currentBlock, 87 | txs: [] 88 | } 89 | } 90 | 91 | // Add the current transaction to the block object. 92 | currentObj.txs.push(elem.txid) 93 | } 94 | 95 | // Push the final element 96 | outAry.push(currentObj) 97 | 98 | console.log(`${JSON.stringify(outAry, null, 2)}`) 99 | } catch (err) { 100 | console.error('Error in "close" read steam: ', err) 101 | } 102 | }) 103 | 104 | stream.on('end', function () { 105 | console.log('Stream ended') 106 | }) 107 | } catch (err) { 108 | console.error(err) 109 | } 110 | } 111 | getTxs() 112 | -------------------------------------------------------------------------------- /src/adapters/ipfs/ipfs-coord.js: -------------------------------------------------------------------------------- 1 | /* 2 | Clean Architecture Adapter for ipfs-coord. 3 | This library deals with ipfs-coord library so that the apps business logic 4 | doesn't need to have any specific knowledge of the library. 5 | */ 6 | 7 | // Global npm libraries 8 | import IpfsCoord from 'ipfs-coord-esm' 9 | 10 | // import BCHJS from '@psf/bch-js'; 11 | import SlpWallet from 'minimal-slp-wallet' 12 | import publicIp from 'public-ip' 13 | 14 | // Local libraries 15 | import config from '../../../config/index.js' 16 | 17 | // const JSONRPC = require('../../controllers/json-rpc/') 18 | 19 | let _this 20 | 21 | class IpfsCoordAdapter { 22 | constructor (localConfig = {}) { 23 | // Dependency injection. 24 | this.ipfs = localConfig.ipfs 25 | if (!this.ipfs) { 26 | throw new Error( 27 | 'Instance of IPFS must be passed when instantiating ipfs-coord.' 28 | ) 29 | } 30 | 31 | // Encapsulate dependencies 32 | this.IpfsCoord = IpfsCoord 33 | this.ipfsCoord = {} 34 | // this.bchjs = new BCHJS() 35 | this.wallet = new SlpWallet() 36 | this.config = config 37 | this.publicIp = publicIp 38 | 39 | // Properties of this class instance. 40 | this.isReady = false 41 | 42 | _this = this 43 | } 44 | 45 | async start () { 46 | const circuitRelayInfo = {} 47 | 48 | // Wait for the BCH wallet to create the wallet. 49 | await this.wallet.walletInfoPromise 50 | 51 | // If configured as a Circuit Relay, get the public IP addresses for this node. 52 | if (this.config.isCircuitRelay) { 53 | try { 54 | const ip4 = await this.publicIp.v4() 55 | // const ip6 = await publicIp.v6() 56 | 57 | circuitRelayInfo.ip4 = ip4 58 | circuitRelayInfo.tcpPort = this.config.ipfsTcpPort 59 | 60 | // Domain used by browser-based secure websocket connections. 61 | circuitRelayInfo.crDomain = this.config.crDomain 62 | } catch (err) { 63 | /* exit quietly */ 64 | } 65 | } 66 | 67 | const ipfsCoordOptions = { 68 | ipfs: this.ipfs, 69 | type: 'node.js', 70 | // type: 'browser', 71 | wallet: this.wallet, 72 | privateLog: console.log, // Default to console.log 73 | isCircuitRelay: this.config.isCircuitRelay, 74 | circuitRelayInfo, 75 | apiInfo: this.config.apiInfo, 76 | announceJsonLd: this.config.announceJsonLd, 77 | debugLevel: this.config.debugLevel 78 | } 79 | 80 | // Production env uses external go-ipfs node. 81 | if (this.config.isProduction) { 82 | ipfsCoordOptions.nodeType = 'external' 83 | } 84 | 85 | this.ipfsCoord = new this.IpfsCoord(ipfsCoordOptions) 86 | 87 | // Wait for the ipfs-coord library to signal that it is ready. 88 | await this.ipfsCoord.start() 89 | 90 | // Signal that this adapter is ready. 91 | this.isReady = true 92 | 93 | return this.isReady 94 | } 95 | 96 | // Expects router to be a function, which handles the input data from the 97 | // pubsub channel. It's expected to be capable of routing JSON RPC commands. 98 | attachRPCRouter (router) { 99 | try { 100 | _this.ipfsCoord.privateLog = router 101 | _this.ipfsCoord.adapters.pubsub.privateLog = router 102 | } catch (err) { 103 | console.error('Error in attachRPCRouter()') 104 | throw err 105 | } 106 | } 107 | 108 | // Subscribe to the chat pubsub channel 109 | async subscribeToChat () { 110 | await this.ipfsCoord.adapters.pubsub.subscribeToPubsubChannel( 111 | this.config.chatPubSubChan, 112 | console.log, 113 | this.ipfsCoord.thisNode 114 | ) 115 | } 116 | } 117 | 118 | export default IpfsCoordAdapter 119 | -------------------------------------------------------------------------------- /test/unit/adapters/slp-indexer/lib/ptxdb.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the start-stop.js library 3 | */ 4 | 5 | import { assert } from 'chai' 6 | import sinon from 'sinon' 7 | 8 | import ManagePTXDB from '../../../../../src/adapters/slp-indexer/lib/ptxdb.js' 9 | import MockLevel from '../../../mocks/leveldb-mock.js' 10 | 11 | describe('#ManagePTXDB', () => { 12 | let uut, sandbox 13 | 14 | beforeEach(() => { 15 | sandbox = sinon.createSandbox() 16 | 17 | const pTxDb = new MockLevel() 18 | const localConfig = { pTxDb } 19 | 20 | uut = new ManagePTXDB(localConfig) 21 | }) 22 | 23 | afterEach(() => sandbox.restore()) 24 | 25 | describe('#constructor', () => { 26 | it('should throw error pTxDb instance is not included', () => { 27 | try { 28 | uut = new ManagePTXDB({}) 29 | 30 | assert.fail('Unexpected result') 31 | } catch (err) { 32 | assert.equal(err.message, 'Must pass instance of pTxDb when instantiating ManagePTXDB lib') 33 | } 34 | }) 35 | }) 36 | 37 | describe('#getAllTxs', () => { 38 | it('should get all transactions in the database', async () => { 39 | const isTest = true 40 | const result = await uut.getAllTxs(isTest) 41 | 42 | assert.equal(result, true) 43 | }) 44 | }) 45 | 46 | describe('#readFromStream', () => { 47 | it('should add key to the keys array', () => { 48 | const data = { 49 | key: 'a', 50 | value: 'b' 51 | } 52 | 53 | uut.readFromStream(data) 54 | 55 | assert.equal(uut.keys[0], 'a') 56 | }) 57 | }) 58 | 59 | describe('#endStream', () => { 60 | it('should call promise resolve() function', () => { 61 | const resolve = () => true 62 | 63 | const result = uut.endStream(resolve) 64 | 65 | assert.equal(result, true) 66 | }) 67 | }) 68 | 69 | describe('#cleanPTXDB', () => { 70 | it('should clean entries from the ptxdb', async () => { 71 | // Mock dependencies 72 | sandbox.stub(uut, 'getAllTxs').resolves() 73 | uut.keys.push({ 74 | key: 'a', 75 | value: 'b' 76 | }) 77 | sandbox.stub(uut.pTxDb, 'get').resolves(100) 78 | 79 | const result = await uut.cleanPTXDB(110) 80 | 81 | assert.equal(result, true) 82 | }) 83 | 84 | it('should throw error if key is not found in pTxDb', async () => { 85 | // Mock dependencies 86 | sandbox.stub(uut, 'getAllTxs').resolves() 87 | uut.keys.push({ 88 | key: 'a', 89 | value: 'b' 90 | }) 91 | sandbox.stub(uut.pTxDb, 'get').rejects(new Error('entry not found')) 92 | 93 | const result = await uut.cleanPTXDB(110) 94 | 95 | assert.equal(result, true) 96 | }) 97 | 98 | it('should throw error if entry can not be deleted from the database', async () => { 99 | // Mock dependencies 100 | sandbox.stub(uut, 'getAllTxs').resolves() 101 | uut.keys.push({ 102 | key: 'a', 103 | value: 'b' 104 | }) 105 | sandbox.stub(uut.pTxDb, 'get').resolves(100) 106 | 107 | // Force error 108 | sandbox.stub(uut.pTxDb, 'del').rejects(new Error('Could not delete entry')) 109 | 110 | const result = await uut.cleanPTXDB(110) 111 | 112 | assert.equal(result, true) 113 | }) 114 | 115 | it('should catch and throw unhandled errors', async () => { 116 | try { 117 | // Force error 118 | sandbox.stub(uut, 'getAllTxs').rejects(new Error('test error')) 119 | 120 | await uut.cleanPTXDB(110) 121 | 122 | assert.fail('Unexpected result') 123 | } catch (err) { 124 | assert.equal(err.message, 'test error') 125 | } 126 | }) 127 | }) 128 | }) 129 | -------------------------------------------------------------------------------- /bin/server.js: -------------------------------------------------------------------------------- 1 | /* 2 | This Koa server has two interfaces: 3 | - REST API over HTTP 4 | - JSON RPC over IPFS 5 | 6 | The architecture of the code follows the Clean Architecture pattern: 7 | https://troutsblog.com/blog/clean-architecture 8 | */ 9 | 10 | // npm libraries 11 | import Koa from 'koa' 12 | 13 | import bodyParser from 'koa-bodyparser' 14 | import convert from 'koa-convert' 15 | import logger from 'koa-logger' 16 | import session from 'koa-generic-session' 17 | import mount from 'koa-mount' 18 | import serve from 'koa-static' 19 | import cors from 'kcors' 20 | 21 | // Local libraries 22 | import config from '../config/index.js' // this first. 23 | import wlogger from '../src/adapters/wlogger.js' 24 | import Controllers from '../src/controllers/index.js' 25 | 26 | class Server { 27 | constructor () { 28 | // Encapsulate dependencies 29 | this.controllers = new Controllers() 30 | this.config = config 31 | this.process = process 32 | } 33 | 34 | async startServer () { 35 | try { 36 | // Create a Koa instance. 37 | const app = new Koa() 38 | app.keys = [this.config.session] 39 | 40 | console.log(`Starting environment: ${this.config.env}`) 41 | console.log(`Debug level: ${this.config.debugLevel}`) 42 | console.log('Version 3.0.7') 43 | 44 | // MIDDLEWARE START 45 | 46 | app.use(convert(logger())) 47 | app.use(bodyParser()) 48 | app.use(session()) 49 | 50 | // Used to generate the docs. 51 | app.use(mount('/', serve(`${process.cwd()}/docs`))) 52 | 53 | // Mount the page for displaying logs. 54 | app.use(mount('/logs', serve(`${process.cwd()}/config/logs`))) 55 | 56 | // Enable CORS for testing 57 | // THIS IS A SECURITY RISK. COMMENT OUT FOR PRODUCTION 58 | // Dev Note: This line must come BEFORE controllers.attachRESTControllers() 59 | app.use(cors({ origin: '*' })) 60 | 61 | // Wait for any adapters to initialize. 62 | await this.controllers.initAdapters() 63 | 64 | // Wait for any use-libraries to initialize. 65 | await this.controllers.initUseCases() 66 | 67 | // Attach REST API and JSON RPC controllers to the app. 68 | await this.controllers.attachRESTControllers(app) 69 | 70 | app.controllers = this.controllers 71 | 72 | // MIDDLEWARE END 73 | 74 | console.log(`Running server in environment: ${this.config.env}`) 75 | wlogger.info(`Running server in environment: ${this.config.env}`) 76 | 77 | this.server = await app.listen(this.config.port) 78 | console.log(`Server started on ${this.config.port}`) 79 | 80 | // Attach the other IPFS controllers. 81 | // Skip if this is a test environment. 82 | if (this.config.env !== 'test') { 83 | await this.controllers.attachControllers(app) 84 | } 85 | 86 | // Start the SLP Indexer 87 | app.controllers.adapters.slpIndexer.start() 88 | // app.controllers.adapters.start() 89 | 90 | // Display configuration settings 91 | console.log('\nConfiguration:') 92 | console.log(`Circuit Relay: ${this.config.isCircuitRelay}`) 93 | console.log(`IPFS TCP port: ${this.config.ipfsTcpPort}`) 94 | console.log(`IPFS WS port: ${this.config.ipfsWsPort}\n`) 95 | 96 | return app 97 | } catch (err) { 98 | console.error('Could not start server. Error: ', err) 99 | 100 | console.log( 101 | 'Exiting after 5 seconds. Depending on process manager to restart.' 102 | ) 103 | await this.sleep(5000) 104 | this.process.exit(1) 105 | } 106 | } 107 | 108 | sleep (ms) { 109 | return new Promise((resolve) => setTimeout(resolve, ms)) 110 | } 111 | } 112 | 113 | export default Server 114 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # psf-slp-indexer 2 | 3 | [![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg)](http://standardjs.com) [![semantic-release](https://img.shields.io/badge/%20%20%F0%9F%93%A6%F0%9F%9A%80-semantic--release-e10079.svg)](https://github.com/semantic-release/semantic-release) 4 | 5 | This application crawls the Bitcoin Cash (BCH) blockchain and indexes SLP token transactions. This code base is intended to be a replacement for [SLPDB](https://github.com/Permissionless-Software-Foundation/docker-slpdb). The work is based on [this report](https://gist.github.com/christroutner/77c46f1fa9adaf593074d41a508a6401) and the work was funded by [this Flipstarter](https://flipstarter.fullstack.cash/). 6 | 7 | This indexer is one part of a collection of blockchain infrastructure. To understand how all the pieces fit together, read the [Cash Stack Documentation](https://cashstack.info). 8 | 9 | If you have question or need help, ask in the [community support Telegram channel](https://t.me/bch_js_toolkit). 10 | 11 | ## Videos 12 | 13 | - [Installing the psf-slp-indexer](https://youtu.be/5gF4ON9lRHI) 14 | - Note: The 'CashStrap' databases are no longer available. You'll need to sync from genesis. 15 | - [Additional Infrastructure Videos](https://psfoundation.cash/video/) in the 'Dev Ops & Infrastructure' section. 16 | 17 | ## Installation and Usage 18 | 19 | This software is intended to be run inside a Docker container, controlled with Docker Compose, on a Ubuntu Linux OS. 20 | 21 | - Ensure you have a BCHN full node running and **fully synced**. [docker-bchn](https://github.com/Permissionless-Software-Foundation/docker-bchn) is recommended for running a full node. 22 | - Enter the `production/docker` directory. 23 | - Customize the `start-production.sh` file to match your full node settings 24 | - Build the image with `docker-compose build --no-cache` 25 | - Start the indexer with `docker-compose up -d` 26 | 27 | 28 | ## Features 29 | 30 | - Written in [standard JavaScript](https://www.npmjs.com/package/standard), using the [Clean Architecture](https://christroutner.github.io/trouts-blog/blog/clean-architecture) design pattern. 31 | - 100% unit test coverage. This allows for operational reliability and easy code collaboration. 32 | - [GPLv2](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html) Licensed to encourage wide adoption and free use throughout the crypto ecosystem. 33 | - [LevelDB](https://github.com/google/leveldb) used for fast, efficient indexing and querying. 34 | - Drastically reduced memory usage, compared to SLPDB. 35 | - Docker container for easy deployment and horizontal scaling. 36 | 37 | ## Development Environment 38 | 39 | **See the [developer documentation](./dev-docs) for more information.** 40 | 41 | ### Requirements 42 | 43 | - Ubuntu Linux OS v20.4+ 44 | - node **^16.17.0** 45 | - npm **^8.15.0** 46 | 47 | ### Dev Environment Installation 48 | 49 | Customize the [slp-indexer.sh](./slp-indexer.sh) bash shell script to point to the a BCH full node with the standard JSON RPC. [docker-bchn](https://github.com/Permissionless-Software-Foundation/docker-bchn) is recommended for running a full node. 50 | 51 | ``` 52 | git clone https://github.com/Permissionless-Software-Foundation/psf-slp-indexer 53 | cd psf-slp-indexer 54 | npm install 55 | ./slp-indexer.sh 56 | ``` 57 | 58 | **See the [developer documentation](./dev-docs) for more information.** 59 | 60 | ## Usage 61 | 62 | - `npm start` Start server on live mode 63 | - `npm run docs` Generate API documentation 64 | - `npm test` Run mocha tests 65 | - `docker-compose build` Build a 'production' Docker container 66 | - `docker-compose up` Run the docker container 67 | 68 | ## License 69 | 70 | [GPLv2](./LICENSE.md) 71 | 72 | ## Contributing 73 | 74 | Contribution are welcome! Check out the [Contribution guide](./CONTRIBUTING.md) for guidence on contributing to this repository. 75 | -------------------------------------------------------------------------------- /test/unit/adapters/contact.adapter.unit.js: -------------------------------------------------------------------------------- 1 | import { assert } from 'chai' 2 | import sinon from 'sinon' 3 | import ContactLib from '../../../src/adapters/contact.js' 4 | let uut 5 | let sandbox 6 | 7 | describe('Contact', () => { 8 | beforeEach(() => { 9 | uut = new ContactLib() 10 | 11 | sandbox = sinon.createSandbox() 12 | }) 13 | 14 | afterEach(() => sandbox.restore()) 15 | 16 | describe('sendEmail()', () => { 17 | it('should throw error if email property is not provided', async () => { 18 | try { 19 | const data = { 20 | formMessage: 'test msg' 21 | } 22 | await uut.sendEmail(data) 23 | assert(false, 'Unexpected result') 24 | } catch (err) { 25 | assert.include(err.message, "Property 'email' must be a string!") 26 | } 27 | }) 28 | 29 | it('should throw error if formMessage property is not provided', async () => { 30 | try { 31 | const data = { 32 | email: 'test@email.com' 33 | } 34 | await uut.sendEmail(data) 35 | assert(false, 'Unexpected result') 36 | } catch (err) { 37 | assert.include(err.message, "Property 'formMessage' must be a string!") 38 | } 39 | }) 40 | 41 | it('should throw error if email list provided is not a array', async () => { 42 | try { 43 | sandbox.stub(uut.nodemailer, 'sendEmail').resolves(true) 44 | 45 | const data = { 46 | formMessage: 'test msg', 47 | email: 'test@email.com', 48 | emailList: 'test@email.com' 49 | } 50 | await uut.sendEmail(data) 51 | assert(false, 'Unexpected result') 52 | } catch (err) { 53 | assert.include( 54 | err.message, 55 | "Property 'emailList' must be a array of emails!" 56 | ) 57 | } 58 | }) 59 | 60 | it('should throw error if email list provided is a empty array', async () => { 61 | try { 62 | sandbox.stub(uut.nodemailer, 'sendEmail').resolves(true) 63 | 64 | const data = { 65 | formMessage: 'test msg', 66 | email: 'test@email.com', 67 | emailList: [] 68 | } 69 | await uut.sendEmail(data) 70 | assert(false, 'Unexpected result') 71 | } catch (err) { 72 | assert.include( 73 | err.message, 74 | "Property 'emailList' must be a array of emails!" 75 | ) 76 | } 77 | }) 78 | 79 | it('should send email to default server email', async () => { 80 | try { 81 | sandbox.stub(uut.nodemailer, 'sendEmail').resolves(true) 82 | 83 | const data = { 84 | formMessage: 'test msg', 85 | email: 'test@email.com' 86 | } 87 | const result = await uut.sendEmail(data) 88 | assert.isTrue(result) 89 | } catch (err) { 90 | assert(false, 'Unexpected result') 91 | } 92 | }) 93 | 94 | it('should catch and throw nodemailer lib error', async () => { 95 | try { 96 | // Force an error with the database. 97 | sandbox 98 | .stub(uut.nodemailer, 'sendEmail') 99 | .throws(new Error('test error')) 100 | 101 | const data = { 102 | formMessage: 'test msg', 103 | email: 'test@email.com' 104 | } 105 | await uut.sendEmail(data) 106 | assert(false, 'Unexpected result') 107 | } catch (err) { 108 | assert.include(err.message, 'test error') 109 | } 110 | }) 111 | 112 | it('should send email to specifics email list', async () => { 113 | try { 114 | sandbox.stub(uut.nodemailer, 'sendEmail').resolves(true) 115 | 116 | const data = { 117 | formMessage: 'test msg', 118 | email: 'test@email.com', 119 | emailList: ['testcontact@email.com'] 120 | } 121 | const result = await uut.sendEmail(data) 122 | assert.isTrue(result) 123 | } catch (err) { 124 | assert(false, 'Unexpected result') 125 | } 126 | }) 127 | }) 128 | }) 129 | -------------------------------------------------------------------------------- /src/adapters/slp-indexer/lib/retry-queue.js: -------------------------------------------------------------------------------- 1 | /* 2 | This library leverages the p-retry and p-queue libraries, to create a 3 | validation queue with automatic retry. 4 | 5 | New nodes syncing will attempt to rapidly validate a lot of entries. 6 | A promise-based queue allows this to happen while respecting rate-limits 7 | of the blockchain service provider. 8 | 9 | pay-to-write-access-controller.js depends on this library. 10 | */ 11 | 12 | // Global npm libraries 13 | import PQueue from 'p-queue' 14 | import pRetry from 'p-retry' 15 | 16 | // Local libraries 17 | import Util from './utils.js' 18 | 19 | class RetryQueue { 20 | constructor (localConfig = {}) { 21 | // Encapsulate dependencies 22 | this.queue = new PQueue({ concurrency: 1 }) 23 | this.pRetry = pRetry 24 | this.util = new Util() 25 | 26 | // Note: Retry has exponential back-off, so 6-10 is the right number. 27 | this.attempts = 6 28 | this.retryPeriod = 3000 29 | 30 | // Bind 'this' object to all subfunctions 31 | this.addToQueue = this.addToQueue.bind(this) 32 | this.retryWrapper = this.retryWrapper.bind(this) 33 | this.handleValidationError = this.handleValidationError.bind(this) 34 | this.sleep = this.util.sleep 35 | } 36 | 37 | // Add an async function to the queue, and execute it with the input object. 38 | async addToQueue (funcHandle, inputObj) { 39 | try { 40 | // console.log('addToQueue inputObj: ', inputObj) 41 | 42 | if (!funcHandle) { 43 | throw new Error('function handler is required') 44 | } 45 | if (!inputObj) { 46 | throw new Error('input object is required') 47 | } 48 | 49 | const returnVal = await this.queue.add(() => 50 | this.retryWrapper(funcHandle, inputObj) 51 | ) 52 | return returnVal 53 | } catch (err) { 54 | console.log('addToQueue() err: ', err) 55 | 56 | if (err.message.includes('500')) { 57 | console.log('Error code 500 typically indicates a TXID that does not exist. This is expected, and indexing can continue.') 58 | } else { 59 | console.error('Error in addToQueue(): ', err) 60 | } 61 | 62 | throw err 63 | } 64 | } 65 | 66 | // Wrap the p-retry library. 67 | // This function returns a promise that will resolve to the output of the 68 | // function 'funcHandle'. 69 | async retryWrapper (funcHandle, inputObj) { 70 | try { 71 | // console.log('retryWrapper inputObj: ', inputObj) 72 | 73 | if (!funcHandle) { 74 | throw new Error('function handler is required') 75 | } 76 | if (!inputObj) { 77 | throw new Error('input object is required') 78 | } 79 | // console.log('Entering retryWrapper()') 80 | 81 | // Add artificial delay to prevent 429 errors. 82 | // await this.sleep(this.retryPeriod) 83 | 84 | return this.pRetry( 85 | async () => { 86 | return await funcHandle(inputObj) 87 | }, 88 | { 89 | onFailedAttempt: this.handleValidationError, 90 | retries: this.attempts // Retry 5 times 91 | } 92 | ) 93 | } catch (err) { 94 | console.error('Error in retryWrapper()') 95 | throw err 96 | } 97 | } 98 | 99 | // Notifies the user that an error occured and that a retry will be attempted. 100 | // It tracks the number of retries until it fails. 101 | async handleValidationError (error) { 102 | try { 103 | // console.log('handleValidationError() error: ', error) 104 | 105 | const errorMsg = `Attempt ${error.attemptNumber} to validate entry. There are ${error.retriesLeft} retries left. Waiting before trying again.` 106 | console.log(errorMsg) 107 | 108 | const SLEEP_TIME = this.retryPeriod 109 | console.log(`Waiting ${SLEEP_TIME} milliseconds before trying again.\n`) 110 | await this.sleep(SLEEP_TIME) // 30 sec 111 | } catch (err) { 112 | console.error('Error in handleValidationError()') 113 | throw err 114 | } 115 | } 116 | } 117 | 118 | // module.exports = RetryQueue 119 | export default RetryQueue 120 | -------------------------------------------------------------------------------- /test/unit/adapters/slp-indexer/lib/zmq.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the zmq.js library 3 | */ 4 | 5 | import { assert } from 'chai' 6 | import sinon from 'sinon' 7 | 8 | import ZMQ from '../../../../../src/adapters/slp-indexer/lib/zmq.js' 9 | import mockData from '../../../mocks/zmq-mocks.js' 10 | 11 | describe('#zmq.js', () => { 12 | let uut, sandbox 13 | 14 | beforeEach(() => { 15 | uut = new ZMQ() 16 | 17 | sandbox = sinon.createSandbox() 18 | }) 19 | 20 | afterEach(() => { 21 | // uut.disconnect() // Ensure the socket is disconnected. 22 | 23 | sandbox.restore() 24 | }) 25 | 26 | describe('#connect', () => { 27 | it('should initialize a connection', async () => { 28 | console.log('uut.sock: ', uut.sock) 29 | 30 | // Mock network calls. 31 | uut.sock = { 32 | connect: () => {}, 33 | subscribe: () => {} 34 | } 35 | sandbox.stub(uut, 'monitorZmq').returns() 36 | 37 | const result = await uut.connect() 38 | 39 | assert.equal(result, true) 40 | }) 41 | 42 | it('should catch and throw an error', async () => { 43 | try { 44 | // Force and error 45 | uut.sock = { 46 | connect: () => { throw new Error('test error') } 47 | } 48 | 49 | await uut.connect() 50 | 51 | assert.fail('Unexpected code path') 52 | } catch (err) { 53 | assert.equal(err.message, 'test error') 54 | } 55 | }) 56 | }) 57 | 58 | describe('#decodeMsg', () => { 59 | it('should decode an SLP transaction', () => { 60 | // Assert that the TX queue is empty at the start of the test. 61 | assert.equal(uut.txQueue.length, 0) 62 | 63 | const topic = Buffer.from(mockData.topic01, 'hex') 64 | const message = Buffer.from(mockData.msg01, 'hex') 65 | 66 | const result = uut.decodeMsg(topic, message) 67 | 68 | assert.equal(result, true) 69 | 70 | // Assert that the queue now has a transaction in it. 71 | assert.equal(uut.txQueue.length, 1) 72 | }) 73 | 74 | it('should decode a new block', () => { 75 | // Assert that the TX queue is empty at the start of the test. 76 | assert.equal(uut.txQueue.length, 0) 77 | 78 | const topic = Buffer.from(mockData.blockTopic, 'hex') 79 | const message = Buffer.from(mockData.blockMsg, 'hex') 80 | 81 | const result = uut.decodeMsg(topic, message) 82 | 83 | assert.equal(result, true) 84 | 85 | // Assert that the queue now has a transaction in it. 86 | assert.equal(uut.blockQueue.length, 1) 87 | }) 88 | 89 | it('should catch errors and return false', async () => { 90 | const topic = Buffer.from(mockData.topic01, 'hex') 91 | const message = Buffer.from(mockData.msg01, 'hex') 92 | 93 | // Force an error 94 | sandbox.stub(uut.bchZmqDecoder, 'decodeTransaction').throws(new Error('test error')) 95 | 96 | const result = uut.decodeMsg(topic, message) 97 | 98 | assert.equal(result, false) 99 | }) 100 | }) 101 | 102 | describe('#getTx', () => { 103 | it('should return false if the queue is empty', () => { 104 | const result = uut.getTx() 105 | // console.log('result: ', result) 106 | 107 | assert.equal(result, false) 108 | }) 109 | 110 | it('should return the oldest element in the queue', () => { 111 | uut.txQueue.push('a') 112 | uut.txQueue.push('b') 113 | uut.txQueue.push('c') 114 | 115 | const result = uut.getTx() 116 | 117 | assert.equal(result, 'a') 118 | }) 119 | }) 120 | 121 | describe('#getBlock', () => { 122 | it('should return false if the queue is empty', () => { 123 | const result = uut.getBlock() 124 | // console.log('result: ', result) 125 | 126 | assert.equal(result, false) 127 | }) 128 | 129 | it('should return the oldest element in the queue', () => { 130 | uut.blockQueue.push('a') 131 | uut.blockQueue.push('b') 132 | uut.blockQueue.push('c') 133 | 134 | const result = uut.getBlock() 135 | 136 | assert.equal(result, 'a') 137 | }) 138 | }) 139 | }) 140 | -------------------------------------------------------------------------------- /examples/pin-ipfs-content.js: -------------------------------------------------------------------------------- 1 | /* 2 | The create-proof-of-burn.js example should be run before this example. 3 | 4 | The TXID generated by the first example will be used in this example to 5 | generate a Claim that will pin IPFS content. 6 | 7 | First TX: 8 | TXID: 09555a14fd2de71a54c0317a8a22ae17bc43512116b063e263e41b3fc94f8905 9 | Block: 825,467 10 | */ 11 | 12 | // The IPFS CID that should be pinned. 13 | // BCH Address: bitcoincash:qqkg30ryje97al52htqwvveha538y7gttywut3cdqv 14 | // SLP Address: simpleledger:qqkg30ryje97al52htqwvveha538y7gttyz8q2dd7j 15 | 16 | import Wallet from 'minimal-slp-wallet' 17 | const CID = 'bafybeicd455l7c6mxiogptqcg6md474qmzzmzobgzu4vfms4wnek2hxguy' 18 | const POB_TXID = '5bfcdca588830245dcd9353f45bb1d06640d7fada0000160ae2789a887b23766' 19 | 20 | // Replace this private key and public address with your own. You can generate 21 | // new values at wallet.fullstack.cash. 22 | const WIF = 'L1tcvcqa5PztqqDH4ZEcUmHA9aSHhTau5E2Zwp1xEK5CrKBrjP3m' 23 | 24 | async function start () { 25 | try { 26 | // Initialize the wallet. 27 | const wallet = new Wallet(WIF, { 28 | interface: 'consumer-api', 29 | restURL: 'https://free-bch.fullstack.cash' 30 | }) 31 | await wallet.initialize() 32 | 33 | // Get info and libraries from the wallet. 34 | const addr = wallet.walletInfo.address 35 | const bchjs = wallet.bchjs 36 | 37 | // Sign a message with the private key 38 | // const sig = bchjs.BitcoinCash.signMessageWithPrivKey(privKey, POB_TXID) 39 | 40 | // Generate the object that will be included in the Claim. 41 | const claimObj = { 42 | pow: POB_TXID, 43 | cid: `ipfs://${CID}` 44 | } 45 | const opReturnStr = JSON.stringify(claimObj) 46 | console.log(opReturnStr) 47 | 48 | let utxos = await wallet.getUtxos() 49 | utxos = utxos.bchUtxos 50 | // console.log('utxos: ', utxos) 51 | 52 | const utxo = bchjs.Utxo.findBiggestUtxo(utxos) 53 | // console.log('utxo: ', utxo) 54 | 55 | // instance of transaction builder 56 | const transactionBuilder = new bchjs.TransactionBuilder() 57 | 58 | const originalAmount = utxo.value 59 | const vout = utxo.tx_pos 60 | const txid = utxo.tx_hash 61 | 62 | // add input with txid and index of vout 63 | transactionBuilder.addInput(txid, vout) 64 | 65 | // TODO: Compute the 1 sat/byte fee. 66 | const fee = 500 67 | 68 | // BEGIN - Construction of OP_RETURN transaction. 69 | 70 | // Add the OP_RETURN to the transaction. 71 | const script = [ 72 | bchjs.Script.opcodes.OP_RETURN, 73 | Buffer.from('00510000', 'hex'), // Makes message comply with the memo.cash protocol. 74 | Buffer.from(POB_TXID, 'hex'), 75 | Buffer.from(CID) 76 | ] 77 | 78 | // Compile the script array into a bitcoin-compliant hex encoded string. 79 | const data = bchjs.Script.encode(script) 80 | 81 | // Add the OP_RETURN output. 82 | transactionBuilder.addOutput(data, 0) 83 | 84 | // END - Construction of OP_RETURN transaction. 85 | 86 | // Send the same amount - fee. 87 | transactionBuilder.addOutput(addr, originalAmount - fee) 88 | 89 | // Create an EC Key Pair from the user-supplied WIF. 90 | const ecPair = bchjs.ECPair.fromWIF(WIF) 91 | 92 | // Sign the transaction with the HD node. 93 | let redeemScript 94 | transactionBuilder.sign( 95 | 0, 96 | ecPair, 97 | redeemScript, 98 | transactionBuilder.hashTypes.SIGHASH_ALL, 99 | originalAmount 100 | ) 101 | 102 | // build tx 103 | const tx = transactionBuilder.build() 104 | 105 | // output rawhex 106 | const hex = tx.toHex() 107 | // console.log(`TX hex: ${hex}`); 108 | // console.log(` `); 109 | 110 | // Broadcast transation to the network 111 | // const txidStr = await bchjs.RawTransactions.sendRawTransaction(hex) 112 | const txidStr = await wallet.broadcast({ hex }) 113 | console.log(`Claim Transaction ID: ${txidStr}`) 114 | console.log(`https://blockchair.com/bitcoin-cash/transaction/${txidStr}`) 115 | } catch (err) { 116 | console.error(err) 117 | } 118 | } 119 | start() 120 | -------------------------------------------------------------------------------- /src/adapters/slp-indexer/lib/zmq.js: -------------------------------------------------------------------------------- 1 | /* 2 | A library for working with the ZMQ/websocket connection of a full node. This 3 | is used to get notifications of new mempool transactions and newly mined 4 | blocks. 5 | */ 6 | 7 | // Public npm libraries 8 | import BitcoinCashZmqDecoder from '@psf/bitcoincash-zmq-decoder' 9 | import * as zmq from 'zeromq' 10 | 11 | // Local libraries 12 | import config from '../../../../config/index.js' 13 | 14 | class ZMQ { 15 | constructor () { 16 | // Encapsulate dependencies 17 | // this.sock = zmq.socket('sub') 18 | this.sock = new zmq.Subscriber() 19 | this.bchZmqDecoder = new BitcoinCashZmqDecoder('mainnet') 20 | this.config = config 21 | 22 | // State 23 | this.txQueue = [] 24 | this.blockQueue = [] 25 | 26 | // Bind 'this' object to subfunctions 27 | this.connect = this.connect.bind(this) 28 | this.monitorZmq = this.monitorZmq.bind(this) 29 | this.disconnect = this.disconnect.bind(this) 30 | this.decodeMsg = this.decodeMsg.bind(this) 31 | this.getTx = this.getTx.bind(this) 32 | this.getBlock = this.getBlock.bind(this) 33 | } 34 | 35 | // Connect to the ZMQ port of the full node. 36 | async connect () { 37 | try { 38 | this.sock.connect(`tcp://${this.config.rpcIp}:${this.config.zmqPort}`) 39 | this.sock.subscribe('raw') 40 | 41 | // Send incoming messages to the decodeMsg() function. 42 | // this.sock.on('message', this.decodeMsg) 43 | 44 | // Do not await. Fire and forget. 45 | this.monitorZmq() 46 | 47 | // Return true to signal that the function has completed successfully. 48 | return true 49 | } catch (err) { 50 | console.error('Error in zmq.js/connect()') 51 | throw err 52 | } 53 | } 54 | 55 | async monitorZmq () { 56 | try { 57 | for await (const [topic, msg] of this.sock) { 58 | // console.log( 59 | // "received a message related to:", 60 | // topic, 61 | // "containing message:", 62 | // msg, 63 | // ) 64 | 65 | this.decodeMsg(topic, msg) 66 | } 67 | } catch (err) { 68 | console.error('Error in zmq.js/monitorZmq()') 69 | throw err 70 | } 71 | } 72 | 73 | disconnect () { 74 | // this.sock.disconnect(`tcp://${this.config.rpcIp}:${this.config.zmqPort}`) 75 | this.sock.close() 76 | } 77 | 78 | // Decode message coming through ZMQ connection. 79 | decodeMsg (topic, message) { 80 | try { 81 | // console.log('topic: ', topic) 82 | 83 | const decoded = topic.toString('ascii') 84 | // console.log('decoded topic: ', decoded) 85 | 86 | if (decoded === 'rawtx') { 87 | // Process new transactions. 88 | 89 | const txd = this.bchZmqDecoder.decodeTransaction(message) 90 | // console.log(`txd: ${JSON.stringify(txd, null, 2)}`) 91 | // console.log(`txd.format.txid: ${txd.format.txid}`) 92 | this.txQueue.push(txd.format.txid) 93 | // console.log(`txQueue length: ${this.txQueue.length}`) 94 | } else if (decoded === 'rawblock') { 95 | // Process new blocks 96 | 97 | const blk = this.bchZmqDecoder.decodeBlock(message) 98 | console.log(`blk: ${JSON.stringify(blk, null, 2)}`) 99 | this.blockQueue.push(blk) 100 | } 101 | 102 | return true 103 | } catch (err) { 104 | console.error('Error in decodeMsg: ', err) 105 | 106 | // This is a top-level function. Do not throw an error. 107 | return false 108 | } 109 | } 110 | 111 | // Get the next TX in the queue 112 | getTx () { 113 | // console.log(`this.txQueue.length: ${this.txQueue.length}`) 114 | let nextTx = this.txQueue.shift() 115 | // console.log(`nextTx: ${JSON.stringify(nextTx, null, 2)}`) 116 | 117 | if (nextTx === undefined) nextTx = false 118 | 119 | return nextTx 120 | } 121 | 122 | // Get the next block in the queue 123 | getBlock () { 124 | // console.log(`this.blockQueue.length: ${this.blockQueue.length}`) 125 | let nextBlock = this.blockQueue.shift() 126 | 127 | if (nextBlock === undefined) nextBlock = false 128 | 129 | return nextBlock 130 | } 131 | } 132 | 133 | // module.exports = ZMQ 134 | export default ZMQ 135 | -------------------------------------------------------------------------------- /test/unit/adapters/ipfs-coord.adapter.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the IPFS Adapter. 3 | */ 4 | 5 | // Global npm libraries 6 | import { assert } from 'chai' 7 | import sinon from 'sinon' 8 | 9 | // Local libraries 10 | import IPFSCoordAdapter from '../../../src/adapters/ipfs/ipfs-coord.js' 11 | import create from '../mocks/ipfs-mock.js' 12 | import IPFSCoordMock from '../mocks/ipfs-coord-mock.js' 13 | import config from '../../../config/index.js' 14 | 15 | describe('#IPFS', () => { 16 | let uut 17 | let sandbox 18 | 19 | beforeEach(() => { 20 | const ipfs = create() 21 | uut = new IPFSCoordAdapter({ ipfs }) 22 | 23 | sandbox = sinon.createSandbox() 24 | }) 25 | 26 | afterEach(() => sandbox.restore()) 27 | 28 | describe('#constructor', () => { 29 | it('should throw an error if ipfs instance is not included', () => { 30 | try { 31 | uut = new IPFSCoordAdapter() 32 | 33 | assert.fail('Unexpected code path') 34 | } catch (err) { 35 | assert.include( 36 | err.message, 37 | 'Instance of IPFS must be passed when instantiating ipfs-coord.' 38 | ) 39 | } 40 | }) 41 | }) 42 | 43 | describe('#start', () => { 44 | it('should return a promise that resolves into an instance of IPFS.', async () => { 45 | // Mock dependencies. 46 | uut.IpfsCoord = IPFSCoordMock 47 | 48 | const result = await uut.start() 49 | // console.log('result: ', result) 50 | 51 | assert.equal(result, true) 52 | }) 53 | 54 | it('should get the public IP address if this node is a Circuit Relay', async () => { 55 | // Mock dependencies. 56 | uut.IpfsCoord = IPFSCoordMock 57 | sandbox.stub(uut.publicIp, 'v4').resolves('123') 58 | 59 | // Force Circuit Relay 60 | uut.config.isCircuitRelay = true 61 | 62 | const result = await uut.start() 63 | // console.log('result: ', result) 64 | 65 | assert.equal(result, true) 66 | }) 67 | 68 | it('should exit quietly if this node is a Circuit Relay and there is an issue getting the IP address', async () => { 69 | // Mock dependencies. 70 | uut.IpfsCoord = IPFSCoordMock 71 | sandbox.stub(uut.publicIp, 'v4').rejects(new Error('test error')) 72 | 73 | // Force Circuit Relay 74 | uut.config.isCircuitRelay = true 75 | 76 | const result = await uut.start() 77 | // console.log('result: ', result) 78 | 79 | assert.equal(result, true) 80 | }) 81 | 82 | it('should return a promise that resolves into an instance of IPFS in production mode', async () => { 83 | uut.config.isProduction = true 84 | // Mock dependencies. 85 | uut.IpfsCoord = IPFSCoordMock 86 | 87 | const result = await uut.start() 88 | // console.log('result: ', result) 89 | assert.equal(result, true) 90 | config.isProduction = false 91 | }) 92 | }) 93 | 94 | describe('#attachRPCRouter', () => { 95 | it('should attached a router output', async () => { 96 | // Mock dependencies 97 | uut.ipfsCoord = { 98 | privateLog: {}, 99 | ipfs: { 100 | orbitdb: { 101 | privateLog: {} 102 | } 103 | }, 104 | adapters: { 105 | pubsub: { 106 | privateLog: () => { 107 | } 108 | } 109 | } 110 | } 111 | 112 | const router = console.log 113 | 114 | uut.attachRPCRouter(router) 115 | }) 116 | 117 | it('should catch and throw an error', () => { 118 | try { 119 | // Force an error 120 | delete uut.ipfsCoord.adapters 121 | 122 | const router = console.log 123 | 124 | uut.attachRPCRouter(router) 125 | 126 | assert.fail('Unexpected code path') 127 | } catch (err) { 128 | assert.include(err.message, 'Cannot read') 129 | } 130 | }) 131 | }) 132 | 133 | describe('#subscribeToChat', () => { 134 | it('should subscribe to the chat channel', async () => { 135 | // Mock dependencies 136 | uut.ipfsCoord = { 137 | adapters: { 138 | pubsub: { 139 | subscribeToPubsubChannel: async () => { 140 | } 141 | } 142 | } 143 | } 144 | 145 | await uut.subscribeToChat() 146 | }) 147 | }) 148 | }) 149 | -------------------------------------------------------------------------------- /test/unit/adapters/slp-indexer/lib/db-backup.unit.js: -------------------------------------------------------------------------------- 1 | /* 2 | Unit tests for the db-backup.js library 3 | */ 4 | 5 | import { assert } from 'chai' 6 | import sinon from 'sinon' 7 | 8 | import DbBackup from '../../../../../src/adapters/slp-indexer/lib/db-backup.js' 9 | import MockLevel from '../../../../unit/mocks/leveldb-mock.js' 10 | 11 | describe('#db-backup', () => { 12 | let uut, sandbox 13 | 14 | beforeEach(() => { 15 | sandbox = sinon.createSandbox() 16 | 17 | const addrDb = new MockLevel() 18 | const tokenDb = new MockLevel() 19 | const txDb = new MockLevel() 20 | const statusDb = new MockLevel() 21 | const pTxDb = new MockLevel() 22 | const utxoDb = new MockLevel() 23 | const pinClaimDb = new MockLevel() 24 | const localConfig = { addrDb, tokenDb, txDb, statusDb, pTxDb, utxoDb, pinClaimDb } 25 | 26 | uut = new DbBackup(localConfig) 27 | }) 28 | 29 | afterEach(() => sandbox.restore()) 30 | 31 | describe('#backupDb', () => { 32 | it('should backup the databases', async () => { 33 | // Mock dependencies 34 | sandbox.stub(uut.shell, 'rm').returns() 35 | sandbox.stub(uut.shell, 'mkdir').returns() 36 | sandbox.stub(uut.shell, 'cp').returns() 37 | 38 | const result = await uut.backupDb() 39 | // console.log('result: ', result) 40 | 41 | assert.equal(result, true) 42 | }) 43 | 44 | it('should catch and throw errors', async () => { 45 | try { 46 | // Force an error 47 | sandbox.stub(uut.addrDb, 'close').rejects(new Error('test error')) 48 | 49 | await uut.backupDb() 50 | // console.log('result: ', result) 51 | 52 | assert.fail('Unexpected result') 53 | } catch (err) { 54 | assert.equal(err.message, 'test error') 55 | } 56 | }) 57 | }) 58 | 59 | describe('#restoreDb', () => { 60 | it('should restore databases', async () => { 61 | // Mock dependencies 62 | sandbox.stub(uut.shell, 'rm').returns() 63 | sandbox.stub(uut.shell, 'cp').returns() 64 | 65 | const result = await uut.restoreDb() 66 | 67 | assert.equal(result, true) 68 | }) 69 | 70 | it('should catch and throw errors', async () => { 71 | try { 72 | // Force an error 73 | sandbox.stub(uut.addrDb, 'close').rejects(new Error('test error')) 74 | 75 | await uut.restoreDb() 76 | // console.log('result: ', result) 77 | 78 | assert.fail('Unexpected result') 79 | } catch (err) { 80 | assert.equal(err.message, 'test error') 81 | } 82 | }) 83 | }) 84 | 85 | describe('#zipDb', () => { 86 | it('should zip databases', async () => { 87 | // Mock dependencies 88 | sandbox.stub(uut.shell, 'rm').returns() 89 | sandbox.stub(uut.shell, 'exec').returns() 90 | 91 | const result = await uut.zipDb() 92 | 93 | assert.equal(result, true) 94 | }) 95 | 96 | it('should delete old zip database', async () => { 97 | // Mock dependencies 98 | sandbox.stub(uut.shell, 'rm').returns() 99 | sandbox.stub(uut.shell, 'exec').returns() 100 | 101 | const result = await uut.zipDb(1, 1) 102 | 103 | assert.equal(result, true) 104 | }) 105 | 106 | it('should catch and throw errors', async () => { 107 | try { 108 | // Force an error 109 | sandbox.stub(uut.addrDb, 'close').rejects(new Error('test error')) 110 | 111 | await uut.zipDb() 112 | // console.log('result: ', result) 113 | 114 | assert.fail('Unexpected result') 115 | } catch (err) { 116 | assert.equal(err.message, 'test error') 117 | } 118 | }) 119 | }) 120 | 121 | describe('#unzipDb', () => { 122 | it('should unzip databases', async () => { 123 | // Mock dependencies 124 | sandbox.stub(uut.shell, 'rm').returns() 125 | sandbox.stub(uut.shell, 'exec').returns() 126 | sandbox.stub(uut.shell, 'cd').returns() 127 | 128 | const result = await uut.unzipDb() 129 | 130 | assert.equal(result, true) 131 | }) 132 | 133 | it('should return false if there is an error', async () => { 134 | // Force an error 135 | sandbox.stub(uut.addrDb, 'close').rejects(new Error('test error')) 136 | 137 | const result = await uut.unzipDb() 138 | // console.log('result: ', result) 139 | 140 | assert.equal(result, false) 141 | }) 142 | }) 143 | }) 144 | -------------------------------------------------------------------------------- /util/index/create-tx-map.js: -------------------------------------------------------------------------------- 1 | /* 2 | Utility tool to retrieve all token TXs the indexer indexed, organized by 3 | block height. 4 | 5 | Run this command by increasing the memory allocation for node.js: 6 | node --max_old_space_size=28000 create-tx-map.js 7 | */ 8 | 9 | const fs = require('fs') 10 | 11 | const level = require('level') 12 | 13 | const txDb = level(`${__dirname.toString()}/../../leveldb/current/txs`, { 14 | valueEncoding: 'json' 15 | }) 16 | 17 | // const txs = [] 18 | 19 | async function getTxs () { 20 | try { 21 | // const promiseArray = [] 22 | const stream = txDb.createReadStream() 23 | 24 | const txids = [] 25 | 26 | // const txData = [] 27 | 28 | // Add block height to the transaction data and add it to the txs array. 29 | // async function getTxData (txid) { 30 | // try { 31 | // await bchjs.Util.sleep(200) 32 | // 33 | // const txData = await bchjs.Transaction.get3(txid) 34 | // return txData 35 | // } catch (err) { 36 | // console.error('Error in getTxData') 37 | // throw err 38 | // } 39 | // } 40 | 41 | stream.on('data', async function (data) { 42 | try { 43 | // console.log(data.key, ' = ', JSON.stringify(data.value, null, 2)) 44 | // console.log(data.key) 45 | // txs.push(data.key) 46 | 47 | // promiseArray.push(getTxDataWithHeight(data.value)) 48 | 49 | // Get the TXID from the database. 50 | txids.push(data.value) 51 | } catch (err) { 52 | console.error('Error in "data" read steam: ', err) 53 | } 54 | }) 55 | 56 | stream.on('close', async function () { 57 | try { 58 | console.log('Stream closed.') 59 | // console.log(`const txs = ${JSON.stringify(txs, null, 2)}`) 60 | 61 | console.log(`txids: ${txids.length}`) 62 | } catch (err) { 63 | console.error('Error in "close" read steam: ', err) 64 | } 65 | }) 66 | 67 | stream.on('end', function () { 68 | console.log('Stream ended') 69 | console.log(`txids: ${txids.length}`) 70 | 71 | processTxids() 72 | }) 73 | 74 | async function processTxids () { 75 | try { 76 | // console.log(`Waiting for ${promiseArray.length} promises`) 77 | // await Promise.all(promiseArray) 78 | 79 | // Loop through each txid and get the TX data for it. 80 | // const txData = [] 81 | // for (let i = 0; i < txids.length; i++) { 82 | // console.log(`Getting data on txid ${i} out of ${txids.length}`) 83 | // 84 | // const txid = txids[i] 85 | // const data = await getTxData(txid) 86 | // txData.push(data) 87 | // } 88 | 89 | const txData = txids 90 | // console.log(`txData: ${JSON.stringify(txData, null, 2)}`) 91 | 92 | // Sort transactions by blockHeight. (oldest first) 93 | txData.sort(function (a, b) { 94 | return a.blockheight - b.blockheight 95 | }) 96 | console.log(`There are ${txData.length} txs`) 97 | // console.log(`txs: ${JSON.stringify(txs, null, 2)}`) 98 | 99 | const outAry = [] 100 | 101 | let currentBlock = txData[0].blockHeight 102 | let currentObj = { 103 | height: currentBlock, 104 | txs: [] 105 | } 106 | 107 | for (let i = 0; i < txData.length; i++) { 108 | const elem = txData[i] 109 | // console.log( 110 | // `elem.txid: ${elem.txid}, elem.blockHeight: ${elem.blockheight}` 111 | // ) 112 | 113 | if (elem.blockheight !== currentBlock) { 114 | // Save the current block data to the output array. 115 | outAry.push(currentObj) 116 | 117 | // Create a new block object 118 | currentBlock = elem.blockheight 119 | currentObj = { 120 | height: currentBlock, 121 | txs: [] 122 | } 123 | } 124 | 125 | // Add the current transaction to the block object. 126 | currentObj.txs.push(elem.txid) 127 | } 128 | 129 | // Push the final element 130 | outAry.push(currentObj) 131 | 132 | const outJsonStr = JSON.stringify(outAry, null, 2) 133 | // console.log(`${outJsonStr}`) 134 | fs.writeFileSync('./tx-map-new.json', outJsonStr) 135 | } catch (err) { 136 | console.error(err) 137 | } 138 | } 139 | } catch (err) { 140 | console.error(err) 141 | } 142 | } 143 | getTxs() 144 | -------------------------------------------------------------------------------- /src/adapters/ipfs/ipfs.js: -------------------------------------------------------------------------------- 1 | /* 2 | Clean Architecture Adapter for IPFS. 3 | This library deals with IPFS so that the apps business logic doesn't need 4 | to have any specific knowledge of the js-ipfs library. 5 | 6 | TODO: Add the external IP address to the list of multiaddrs advertised by 7 | this node. See this GitHub Issue for details: 8 | https://github.com/Permissionless-Software-Foundation/ipfs-service-provider/issues/38 9 | */ 10 | 11 | // Global npm libraries 12 | // const IPFS = require('ipfs') 13 | // const IPFS = require('@chris.troutner/ipfs') 14 | // import IPFSembedded from 'ipfs'; 15 | 16 | import { create } from 'ipfs-http-client' 17 | import fs from 'fs' 18 | import http from 'http' 19 | 20 | // Local libraries 21 | import config from '../../../config/index.js' 22 | 23 | const IPFS_DIR = './.ipfsdata/ipfs' 24 | 25 | class IpfsAdapter { 26 | constructor (localConfig) { 27 | // Encapsulate dependencies 28 | this.config = config 29 | this.fs = fs 30 | this.create = create 31 | 32 | // Choose the IPFS constructor based on the config settings. 33 | // this.IPFS = IPFSembedded // default 34 | // if (this.config.isProduction) { 35 | // this.IPFS = IPFSexternal 36 | // } 37 | 38 | // Properties of this class instance. 39 | this.isReady = false 40 | } 41 | 42 | // Start an IPFS node. 43 | async start () { 44 | try { 45 | // Ipfs Options 46 | const ipfsOptionsEmbedded = { 47 | repo: IPFS_DIR, 48 | start: true, 49 | config: { 50 | relay: { 51 | enabled: true, // enable circuit relay dialer and listener 52 | hop: { 53 | enabled: config.isCircuitRelay // enable circuit relay HOP (make this node a relay) 54 | } 55 | }, 56 | pubsub: true, // enable pubsub 57 | Swarm: { 58 | ConnMgr: { 59 | HighWater: 30, 60 | LowWater: 10 61 | } 62 | }, 63 | Addresses: { 64 | Swarm: [ 65 | `/ip4/0.0.0.0/tcp/${this.config.ipfsTcpPort}`, 66 | `/ip4/0.0.0.0/tcp/${this.config.ipfsWsPort}/ws` 67 | ] 68 | }, 69 | Datastore: { 70 | StorageMax: '2GB', 71 | StorageGCWatermark: 50, 72 | GCPeriod: '15m' 73 | } 74 | } 75 | } 76 | 77 | const ipfsOptionsExternal = { 78 | host: this.config.ipfsHost, 79 | port: this.config.ipfsApiPort, 80 | agent: http.Agent({ keepAlive: true, maxSockets: 2000 }) 81 | } 82 | 83 | let ipfsOptions = ipfsOptionsEmbedded 84 | if (this.config.isProduction) { 85 | ipfsOptions = ipfsOptionsExternal 86 | } 87 | 88 | // Create a new IPFS node. 89 | this.ipfs = await this.create(ipfsOptions) 90 | 91 | // Set the 'server' profile so the node does not scan private networks. 92 | await this.ipfs.config.profiles.apply('server') 93 | 94 | // Debugging: Display IPFS config settings. 95 | // const configSettings = await this.ipfs.config.getAll() 96 | // console.log(`configSettings: ${JSON.stringify(configSettings, null, 2)}`) 97 | 98 | // Signal that this adapter is ready. 99 | this.isReady = true 100 | 101 | return this.ipfs 102 | } catch (err) { 103 | console.error('Error in ipfs.js/start()') 104 | 105 | // If IPFS crashes because the /blocks directory is full, wipe the directory. 106 | // if (err.message.includes('No space left on device')) { 107 | // this.rmBlocksDir() 108 | // } 109 | 110 | throw err 111 | } 112 | } 113 | 114 | async stop () { 115 | await this.ipfs.stop() 116 | 117 | return true 118 | } 119 | 120 | // Remove the '/blocks' directory that is used to store IPFS data. 121 | // Dev Note: It's assumed this node is not pinning any data and that 122 | // everything in this directory is transient. This folder will regularly 123 | // fill up and prevent IPFS from starting. 124 | // rmBlocksDir () { 125 | // try { 126 | // const dir = `${IPFS_DIR}/blocks` 127 | // console.log(`Deleting ${dir} directory...`) 128 | // 129 | // this.fs.rmdirSync(dir, { recursive: true }) 130 | // 131 | // console.log(`${dir} directory is deleted!`) 132 | // 133 | // return true // Signal successful execution. 134 | // } catch (err) { 135 | // console.log('Error in rmBlocksDir()') 136 | // throw err 137 | // } 138 | // } 139 | } 140 | 141 | export default IpfsAdapter 142 | --------------------------------------------------------------------------------