├── .codeclimate.yml ├── .docker ├── conf │ ├── regtest01.conf │ └── regtest02.conf └── docker-compose.yml ├── .dockerignore ├── .eslintrc.json ├── .github └── workflows │ └── node.js.yml ├── .gitignore ├── .nvmrc ├── CODE_OF_CONDUCT.md ├── Dockerfile ├── INSTALL.md ├── LICENSE ├── README.md ├── app.js ├── init.sh ├── package-lock.json ├── package.json ├── pool_config_sample.json ├── server_config_sample.json ├── src ├── api │ ├── collect_blocks_data.js │ ├── collect_payments_data.js │ ├── collect_workers_data.js │ ├── index.js │ └── v1 │ │ ├── blocks.js │ │ ├── combined.js │ │ ├── history.js │ │ ├── index.js │ │ ├── payments.js │ │ ├── statistics.js │ │ ├── utils.js │ │ ├── wallets.js │ │ └── workers.js ├── cnc │ └── index.js ├── daemon │ ├── index.js │ └── validate_address.js ├── logger │ ├── colorize_log.js │ ├── format_log.js │ ├── index.js │ └── utils.js ├── payments │ ├── calculate_payments │ │ ├── build_worker_records │ │ │ ├── fill_worker_records.js │ │ │ └── index.js │ │ ├── index.js │ │ ├── prepare_payments_update.js │ │ └── send_payments.js │ ├── coin_utils.js │ ├── fix_failed_payments.js │ ├── index.js │ ├── init_payments.js │ ├── initialize_payouts │ │ ├── fetch_pending_blocks.js │ │ ├── fetch_unpaid_workers.js │ │ ├── find_duplicate_blocks.js │ │ ├── find_invalid_blocks.js │ │ ├── index.js │ │ └── move_invalid_blocks.js │ ├── manage_sent_payments │ │ ├── index.js │ │ ├── send_redis_commands.js │ │ ├── update_worker_payout_balances.js │ │ └── update_worker_shares.js │ ├── process_payments.js │ ├── process_share_blocks │ │ ├── check_payment_funds.js │ │ ├── confirmed_block │ │ │ ├── adjust_round_times.js │ │ │ ├── compute_shared_payouts.js │ │ │ ├── index.js │ │ │ └── shared_round_total.js │ │ ├── fetch_round_shares.js │ │ ├── fetch_round_times.js │ │ ├── immature_block │ │ │ ├── index.js │ │ │ └── shared_round_total.js │ │ ├── index.js │ │ ├── list_unspent.js │ │ ├── lost_shares.js │ │ ├── move_manual_rounds.js │ │ ├── process_auto_rounds.js │ │ └── separate_rounds.js │ ├── start_payments.js │ ├── update_rounds │ │ ├── convert_transaction.js │ │ ├── fetch_transactions.js │ │ ├── flag_deletable_rounds.js │ │ └── index.js │ └── utils.js ├── redis │ ├── clients.js │ ├── fetch_version_num.js │ ├── index.js │ └── is_valid_version.js ├── shares │ ├── fetch_times_shares.js │ ├── handle_share.js │ ├── index.js │ ├── persist_hashrate_data.js │ ├── process_block_data.js │ └── process_share_data.js ├── startup │ ├── index.js │ ├── spawn_api.js │ ├── spawn_payments.js │ ├── spawn_process.js │ ├── spawn_workers.js │ ├── start_cnc.js │ └── utils.js ├── stats │ ├── coin_balances.js │ ├── coin_stats │ │ ├── compute_hashrates.js │ │ ├── fetch_raw_stats.js │ │ ├── index.js │ │ ├── initialize_workers.js │ │ ├── parse_stats.js │ │ ├── persist_history.js │ │ └── process_stats.js │ ├── index.js │ ├── total_shares.js │ └── utils.js ├── utils │ ├── finalize_pool_config.js │ ├── promised_redis.js │ ├── require_deps.js │ ├── retry.js │ └── set_posix_limit.js └── worker │ ├── auth.js │ ├── index.js │ ├── log_share.js │ └── pool_auth_callback.js └── test ├── api ├── collect_blocks_data_test.js ├── collect_payments_data_test.js ├── collect_workers_data_test.js ├── index_test.js └── v1 │ ├── blocks_test.js │ ├── combined_test.js │ ├── history_test.js │ ├── index_test.js │ ├── payments_test.js │ ├── statistics_test.js │ ├── wallets_test.js │ └── workers_test.js ├── carrot.json ├── chai-local.js ├── cnc └── index_test.js ├── daemon └── index.js ├── helpers.js ├── logger ├── colorize_log_test.js ├── format_log_test.js └── index_test.js ├── payments ├── calculate_payments │ ├── build_worker_records │ │ ├── fill_worker_records_test.js │ │ └── index_test.js │ ├── index_test.js │ ├── prepare_payments_update_test.js │ └── send_payments_test.js ├── fix_failed_payments_test.js ├── index_test.js ├── init_payments_test.js ├── initialize_payouts │ ├── fetch_pending_blocks_test.js │ ├── fetch_unpaid_workers_test.js │ ├── find_duplicate_blocks_test.js │ ├── find_invalid_blocks_test.js │ ├── index_test.js │ └── move_invalid_blocks_test.js ├── manage_sent_payments │ ├── index_test.js │ ├── send_redis_commands_test.js │ ├── update_worker_payout_balances_test.js │ └── update_worker_shares_test.js ├── process_payments_test.js ├── process_share_blocks │ ├── check_payment_funds_test.js │ ├── confirmed_block │ │ ├── compute_shared_payouts_test.js │ │ ├── index_test.js │ │ └── shared_round_total_test.js │ ├── fetch_round_shares_test.js │ ├── fetch_round_times_test.js │ ├── immature_block │ │ ├── index_test.js │ │ └── shared_round_total_test.js │ ├── index_test.js │ ├── list_unspent_test.js │ ├── lost_shares_test.js │ ├── move_manual_rounds_test.js │ ├── process_auto_rounds_test.js │ └── separate_rounds_test.js ├── start_payments_test.js ├── update_rounds │ ├── convert_transaction_test.js │ ├── fetch_transactions_test.js │ ├── flag_deletable_rounds_test.js │ └── index_test.js └── utils_test.js ├── redis ├── clients_test.js ├── fetch_version_num_test.js ├── index_test.js └── is_valid_version_test.js ├── shares ├── fetch_times_shares_test.js ├── handle_share_test.js ├── process_block_data_test.js └── process_share_data_test.js ├── startup ├── index_test.js ├── spawn_api_test.js ├── spawn_payments_test.js ├── spawn_process_test.js ├── spawn_workers_test.js └── start_cnc_test.js ├── stats ├── coin_balances_test.js ├── coin_stats │ ├── compute_hashrates_test.js │ ├── fetch_raw_stats_test.js │ ├── initialize_workers_test.js │ ├── parse_stats_test.js │ └── persist_history_test.js ├── index_test.js └── total_shares_test.js ├── utils ├── finalize_pool_config_test.js ├── key_dep.js ├── promised_redis_test.js ├── require_deps_test.js ├── retry_test.js └── set_posix_limit_test.js └── worker ├── auth_test.js ├── index_test.js ├── log_share_test.js └── pool_auth_callback_test.js /.codeclimate.yml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | plugins: 3 | duplication: 4 | enabled: true 5 | config: 6 | languages: 7 | javascript: 8 | mass_threshold: 75 9 | eslint: 10 | enabled: true 11 | channel: "eslint-7" 12 | checks: 13 | argument-count: 14 | config: 15 | threshold: 4 16 | complex-logic: 17 | config: 18 | threshold: 4 19 | file-lines: 20 | config: 21 | threshold: 250 22 | method-complexity: 23 | config: 24 | threshold: 15 25 | method-count: 26 | config: 27 | threshold: 20 28 | method-lines: 29 | config: 30 | threshold: 35 31 | nested-control-flow: 32 | config: 33 | threshold: 4 34 | return-statements: 35 | config: 36 | threshold: 4 37 | similar-code: 38 | config: 39 | threshold: # language-specific defaults. an override will affect all languages. 40 | identical-code: 41 | config: 42 | threshold: # language-specific defaults. an override will affect all languages. 43 | -------------------------------------------------------------------------------- /.docker/conf/regtest01.conf: -------------------------------------------------------------------------------- 1 | regtest=1 2 | port=8333 3 | connect=regtest02:8333 4 | listen=1 5 | rpcuser=regtestuser 6 | rpcpassword=regtestpassword 7 | rpcallowip=::/0 8 | rpcport=8332 9 | maxstackmemoryusageconsensus=7000000000 10 | excessiveblocksize=10000000000 11 | -------------------------------------------------------------------------------- /.docker/conf/regtest02.conf: -------------------------------------------------------------------------------- 1 | regtest=1 2 | port=8333 3 | connect=regtest01:8333 4 | listen=1 5 | rpcuser=regtestuser 6 | rpcpassword=regtestpassword 7 | rpcallowip=::/0 8 | rpcport=8332 9 | maxstackmemoryusageconsensus=7000000000 10 | excessiveblocksize=10000000000 11 | -------------------------------------------------------------------------------- /.docker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.9" 2 | services: 3 | app: 4 | build: ../. 5 | ports: 6 | - "3010:3010" 7 | configs: 8 | - source: app_pool 9 | target: "/src/pool_config.json" 10 | - source: app_server 11 | target: "/src/server_config.json" 12 | 13 | redis: 14 | image: redis 15 | 16 | regtest01: 17 | image: bitcoinsv/bitcoin-sv 18 | entrypoint: "bitcoind -conf=/etc/bitcoin.conf -data-dir=/data" 19 | configs: 20 | - source: regtest01 21 | target: "/etc/bitcoin.conf" 22 | volumes: 23 | - "regtest01:/data/" 24 | 25 | regtest02: 26 | image: bitcoinsv/bitcoin-sv 27 | entrypoint: "bitcoind -conf=/etc/bitcoin.conf -data-dir=/data" 28 | configs: 29 | - source: regtest02 30 | target: "/etc/bitcoin.conf" 31 | volumes: 32 | - "regtest02:/data/" 33 | 34 | cpuminer: 35 | image: tpruvot/cpuminer-multi 36 | command: [ 37 | "-a", "sha256d", 38 | "-o", "stratum+tcp://app:3010", 39 | "-u", "mg3Brq4Kj9wDhEq4gkMFbTyJwp5yipKrkw.worker.1", 40 | "-p", "pw.worker.1", 41 | "-t", "4", 42 | "--no-longpoll", 43 | "--no-getwork", 44 | "--no-gbt", 45 | "-D", 46 | "-P"] 47 | 48 | volumes: 49 | regtest01: {} 50 | regtest02: {} 51 | configs: 52 | regtest01: 53 | file: ./conf/regtest01.conf 54 | regtest02: 55 | file: ./conf/regtest02.conf 56 | app_pool: 57 | file: ./conf/pool_config.json 58 | app_server: 59 | file: ./conf/server_config.json 60 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .git/ 2 | node_modules/ 3 | npm-debug.log 4 | *.json 5 | !package.json 6 | !package-lock.json 7 | configs/*.json 8 | README.md 9 | .gitignore 10 | .eslintrc.js 11 | 12 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "node": true 4 | }, 5 | "extends": [ 6 | "airbnb-base" 7 | ], 8 | "parserOptions": { 9 | "ecmaVersion": 12 10 | }, 11 | "rules": { 12 | "comma-dangle": ["error", "only-multiline"], 13 | "default-case": "off", 14 | "func-names": ["error", "never"], 15 | "global-require": "off", 16 | "import/no-extraneous-dependencies": ["error", {"devDependencies": true}], 17 | "import/no-unresolved": ["error", { "ignore": ["posix"], "commonjs": true, "caseSensitive": true }], 18 | "no-bitwise": "off", 19 | "no-console": "off", 20 | "no-new": "off", 21 | "no-param-reassign": "off", 22 | "no-underscore-dangle": "off", 23 | "no-unused-vars": ["error", { "argsIgnorePattern": "^_", "varsIgnorePattern": "^_" }], 24 | "object-curly-newline": ["error", { 25 | "ObjectExpression": { "multiline": true, "minProperties": 4 }, 26 | "ExportDeclaration": { "multiline": true, "minProperties": 4 } 27 | }] 28 | }, 29 | "overrides": [ 30 | { 31 | "files": ["*_test.js"], 32 | "rules": { "no-unused-expressions": "off" } 33 | } 34 | ] 35 | } 36 | -------------------------------------------------------------------------------- /.github/workflows/node.js.yml: -------------------------------------------------------------------------------- 1 | name: Node.js CI 2 | 3 | on: [push] 4 | 5 | jobs: 6 | test: 7 | strategy: 8 | matrix: 9 | node-version: [14.x, 16.x] 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@master 13 | - name: Run ssh-agent 14 | uses: webfactory/ssh-agent@v0.4.1 15 | with: 16 | ssh-private-key: ${{ secrets.SSH_PRIVATE_KEY }} 17 | - name: Use Node.js ${{ matrix.node-version }} 18 | uses: actions/setup-node@master 19 | with: 20 | node-version: ${{ matrix.node-version }} 21 | - run: npm ci 22 | - run: npm test 23 | coverage: 24 | needs: [ test ] 25 | name: coverage 26 | runs-on: ubuntu-latest 27 | steps: 28 | - uses: actions/checkout@master 29 | - name: Run ssh-agent 30 | uses: webfactory/ssh-agent@v0.4.1 31 | with: 32 | ssh-private-key: ${{ secrets.SSH_PRIVATE_KEY }} 33 | - uses: actions/setup-node@master 34 | with: 35 | node-version: '14' 36 | - run: npm ci 37 | - uses: paambaati/codeclimate-action@v2.7.5 38 | env: 39 | CC_TEST_REPORTER_ID: ab6be7df89409a87a7e4a6096da9fb75b9200c2a3fecbe43da7fd63aec48dcc1 40 | with: 41 | coverageCommand: npm run cover 42 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | .idea/ 3 | npm-debug.log 4 | config.json 5 | configs/* 6 | .nyc_output/ 7 | coverage/ 8 | !configs/example.json 9 | partners/* 10 | !partners/example.json 11 | pool_config.json 12 | server_config.json 13 | -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | 14.17.3 2 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # syntax = docker/dockerfile:experimental 2 | 3 | # Stage 1: Build deps from source including private github repos 4 | 5 | FROM node:14 6 | 7 | # Create working directory 8 | WORKDIR /src 9 | 10 | # Update and download system dependencies 11 | RUN apt-get update && apt-get install -y openssh-client 12 | 13 | # Create ssh directory 14 | RUN mkdir -p -m 0700 ~/.ssh && ssh-keyscan github.com >> ~/.ssh/known_hosts 15 | 16 | # Copy source excluding files in .dockerignore 17 | COPY . . 18 | 19 | # Build deps using lockfile 20 | RUN npm ci --only=production 21 | 22 | # Expose ports for stratum workers 23 | EXPOSE 3010 24 | 25 | # Run helper script as entrypoint to load configs based on environment 26 | ENTRYPOINT node app.js 27 | -------------------------------------------------------------------------------- /app.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const cluster = require('cluster'); 3 | 4 | const { PoolLogger } = require('./src/logger'); 5 | const { PoolStartup } = require('./src/startup'); 6 | 7 | const { setPosixLimit } = require('./src/utils/set_posix_limit'); 8 | const { finalizePoolConfig } = require('./src/utils/finalize_pool_config'); 9 | 10 | // Intialize our base config environments. 11 | const loadConfigJson = (name) => { 12 | const path = `${__dirname}/${name}`; 13 | return JSON.parse(fs.readFileSync(path, { encoding: 'utf8' })); 14 | }; 15 | 16 | const portalConfig = loadConfigJson('server_config.json'); 17 | const baseLogger = new PoolLogger(portalConfig); 18 | let poolConfig = loadConfigJson('pool_config.json'); 19 | poolConfig = finalizePoolConfig({ portalConfig, poolConfig, baseLogger }); 20 | 21 | // Ensure our process has an expanded file handle limit. 22 | setPosixLimit({ baseLogger, isMaster: cluster.isMaster }); 23 | 24 | // Intitialize Pool startup. 25 | PoolStartup({ 26 | cluster, baseLogger, poolConfig, portalConfig 27 | }); 28 | -------------------------------------------------------------------------------- /init.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | if [ ! -f config.json ]; then 3 | if [ -z "$AWS_ACCESS_KEY_ID" -o \ 4 | -z "$AWS_SECRET_ACCESS_KEY" -o \ 5 | -z "$AWS_DEFAULT_REGION" -o \ 6 | -z "$APP_ENVIRONMENT" ]; then 7 | echo "ERROR: Cannot start hr-pool; no config.json present and external config variables missing. Aborting" >&2 8 | exit 1 9 | fi 10 | 11 | aws s3 cp "s3://hr-pool/${APP_ENVIRONMENT}/config.json" . 12 | aws s3 cp "s3://hr-pool/${APP_ENVIRONMENT}/configs" configs/ --recursive 13 | fi 14 | 15 | exec npm run start 16 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "carrot-pool", 3 | "version": "0.5.0", 4 | "description": "Enterprise proof-of-work infrastructure & API for blockchain mining.", 5 | "keywords": [ 6 | "stratum", 7 | "mining", 8 | "pool", 9 | "server", 10 | "poolserver", 11 | "bitcoin", 12 | "bitcoin-sv" 13 | ], 14 | "license": "GPL-2.0", 15 | "author": "HashRabbit, Inc. (https://hashrabbit.com)", 16 | "main": "app.js", 17 | "private": true, 18 | "homepage": "https://carrot-pool.com/", 19 | "repository": "github:hashrabbit/carrot-pool", 20 | "bugs": "https://github.com/hashrabbit/carrot-pool/issues", 21 | "scripts": { 22 | "start": "nodemon app.js", 23 | "clean": "eslint . --ext .js --ignore-pattern node_modules/", 24 | "test": "mocha --exit 'test/**/**_test.js'", 25 | "dot": "mocha --exit --reporter dot 'test/**/**_test.js'", 26 | "cover": "nyc --reporter=lcov --reporter=text-summary npm run dot", 27 | "prod:start": "forever start -l carrot-pool-log.txt --append app.js", 28 | "prod:stop": "forever stop app.js" 29 | }, 30 | "dependencies": { 31 | "apicache": "^1.6.2", 32 | "babel-eslint": "^10.1.0", 33 | "bignum": "^0.13.1", 34 | "body-parser": "^1.19.0", 35 | "colors": "^1.4.0", 36 | "compression": "^1.7.4", 37 | "cors": "^2.8.5", 38 | "dateformat": "^4.5.1", 39 | "express": "^4.17.1", 40 | "forever": "^3.0.4", 41 | "node-watch": "^0.7.1", 42 | "nodemon": "^2.0.7", 43 | "redis": "^3.1.2", 44 | "redis-clustr": "^1.7.0", 45 | "stratum-pool": "git+https://github.com/hashrabbit/carrot-pool-stratum.git" 46 | }, 47 | "engines": { 48 | "node": ">= 14.0.0", 49 | "npm": ">= 7.0.0" 50 | }, 51 | "devDependencies": { 52 | "chai": "^4.2.0", 53 | "chai-as-promised": "^7.1.1", 54 | "chai-http": "^4.3.0", 55 | "decache": "^4.6.0", 56 | "eslint": "^7.17.0", 57 | "eslint-config-airbnb-base": "^14.2.1", 58 | "eslint-plugin-import": "^2.22.1", 59 | "mocha": "^8.2.1", 60 | "nyc": "^15.1.0", 61 | "redis-mock": "^0.52.0", 62 | "sinon": "^9.2.3", 63 | "sinon-chai": "^3.5.0" 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /server_config_sample.json: -------------------------------------------------------------------------------- 1 | { 2 | "cliPort": 42320, 3 | "clustering": { 4 | "enabled": true, 5 | "forks": "auto" 6 | }, 7 | "logger": { 8 | "colors": true, 9 | "level": "debug", 10 | "tty": true 11 | }, 12 | "logColors": true, 13 | "logLevel": "debug", 14 | "defaultPoolConfigs": { 15 | "blockRefreshInterval": 1000, 16 | "jobRebroadcastTimeout": 55, 17 | "connectionTimeout": 600, 18 | "emitInvalidBlockHashes": false, 19 | "validateWorkerUsername": true, 20 | "tcpProxyProtocol": false, 21 | "banning": { 22 | "enabled": true, 23 | "time": 600, 24 | "invalidPercent": 50, 25 | "checkThreshold": 500, 26 | "purgeInterval": 300 27 | } 28 | }, 29 | "redis": { 30 | "host": "127.0.0.1", 31 | "port": 6379, 32 | "password": "", 33 | "cluster": false 34 | }, 35 | "server": { 36 | "host": "127.0.0.1", 37 | "port": 3001 38 | }, 39 | "stats": { 40 | "updateInterval": 60, 41 | "historicalInterval": 600, 42 | "historicalRetention": 43200, 43 | "hashrateWindow": 300 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/api/collect_blocks_data.js: -------------------------------------------------------------------------------- 1 | // Block address filter test 2 | const isBlockAddress = (address) => (block) => { 3 | if (!address || address.length === 0) return true; 4 | return block.worker === address; 5 | }; 6 | 7 | const blocksSinceX = (blocks, time) => blocks.filter((b) => b.time > time).length; 8 | 9 | const collectBlocksData = ({ stats, address }) => { 10 | const poolEntries = { 11 | pool: stats.name, 12 | symbol: stats.symbol, 13 | algorithm: stats.algorithm 14 | }; 15 | const confirms = stats.blocks.confirmations; 16 | const types = ['pending', 'confirmed']; 17 | const blocks = types.flatMap((type) => { 18 | const isPending = type === 'pending'; 19 | return stats.blocks[type].map((p) => JSON.parse(p)) 20 | .map((entries) => ({ 21 | ...poolEntries, 22 | ...entries, 23 | confirmed: !isPending, 24 | confirmations: isPending ? (confirms[entries.blockHash] || 1) : 100 25 | })); 26 | }).filter(isBlockAddress(address)); 27 | 28 | // Calculate Blocks Found in Last "X" Hours/Days 29 | const currentDate = new Date(); 30 | const statistics = { 31 | lastHour: blocksSinceX(blocks, currentDate.setHours(currentDate.getHours() - 1)), 32 | last24Hours: blocksSinceX(blocks, currentDate.setDate(currentDate.getDate() - 1)), 33 | last7Days: blocksSinceX(blocks, currentDate.setDate(currentDate.getDate() - 7)), 34 | }; 35 | 36 | return { blocks, statistics }; 37 | }; 38 | 39 | module.exports = { collectBlocksData }; 40 | -------------------------------------------------------------------------------- /src/api/collect_payments_data.js: -------------------------------------------------------------------------------- 1 | // Worker address filter test 2 | const isInPaymentAmounts = (address) => (payment) => { 3 | if (!address || address.length === 0) return true; 4 | return Object.keys(payment.totals.amounts).includes(address); 5 | }; 6 | 7 | // Collect Current Payments Data 8 | const collectPaymentsData = ({ stats, address }) => { 9 | const poolEntries = { 10 | pool: stats.name, 11 | symbol: stats.symbol, 12 | algorithm: stats.algorithm 13 | }; 14 | const payments = stats.payments.map((p) => JSON.parse(p)) 15 | .map((entries) => ({ ...poolEntries, ...entries })) 16 | .filter(isInPaymentAmounts(address)); 17 | return { payments }; 18 | }; 19 | 20 | module.exports = { collectPaymentsData }; 21 | -------------------------------------------------------------------------------- /src/api/collect_workers_data.js: -------------------------------------------------------------------------------- 1 | // Worker address filter test 2 | const isWorkerAddress = (address) => (worker) => { 3 | if (!address || address.length === 0) return true; 4 | return worker.address === address; 5 | }; 6 | 7 | // Collect Workers Data from Pool Stats and filter by worker address, if supplied. 8 | const collectWorkersData = ({ stats, address }) => { 9 | const { workersShared, workersSolo } = stats.workers; 10 | const entries = [...Object.entries(workersShared), ...Object.entries(workersSolo)]; 11 | const workers = entries.map(([addr, entry]) => ( 12 | { 13 | pool: stats.name, 14 | symbol: stats.symbol, 15 | algorithm: stats.algorithm, 16 | address: addr, 17 | difficulty: entry.difficulty, 18 | validShares: entry.validShares, 19 | invalidShares: entry.invalidShares, 20 | hashrate: entry.hashrate, 21 | hashrateType: entry.hashrateType, 22 | soloMining: entry.soloMining, 23 | } 24 | )).filter(isWorkerAddress(address)); 25 | return { workers }; 26 | }; 27 | 28 | module.exports = { collectWorkersData }; 29 | -------------------------------------------------------------------------------- /src/api/index.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const _defaultDeps = [ 4 | ['express', 'express', false], 5 | ['apicache', 'apicache', false], 6 | ['compression', 'compression', false], 7 | ['cors', 'cors', false], 8 | ['PoolStats', `${__dirname}/../stats/`], 9 | ['v1', `${__dirname}/v1`] 10 | ]; 11 | 12 | // Express HTTP server constructor function, for managing the pool API. 13 | const _PoolApi = (deps) => function (env) { 14 | const { express, apicache, compression, cors, PoolStats, v1 } = deps; 15 | const { baseLogger, poolConfig, portalConfig } = env; 16 | const logger = baseLogger.cached('Api', 'Server'); 17 | const cache = apicache.middleware; 18 | let intervalId; 19 | 20 | const poolStats = new PoolStats({ baseLogger, poolConfig, portalConfig }); 21 | 22 | const app = express(); 23 | app.use(express.json()); 24 | app.use(cache('2 minutes')); 25 | app.use(compression()); 26 | app.use(cors()); 27 | 28 | // Configure and mount v1 routes 29 | v1({ logger, poolStats })({ app, prefix: '/api' }); 30 | 31 | // Handle server errors 32 | app.use((err, req, res, next) => { 33 | if (res.headersSent) return next(err); 34 | logger.error(err.stack); 35 | return res.status(500).json({ error: 'API Server Error' }); 36 | }); 37 | 38 | // External properties 39 | this.app = app; 40 | this.intervalId = intervalId; 41 | this.listen = () => { 42 | const { port, host } = portalConfig.server; 43 | const { updateInterval } = portalConfig.stats; 44 | 45 | // Cache PoolStats data 46 | poolStats.getGlobalStats(); 47 | 48 | // Establish Global Statistics Interval 49 | intervalId = setInterval(async () => { 50 | try { 51 | await poolStats.getGlobalStats(); 52 | } catch (e) { 53 | logger.error(e.toString()); 54 | } 55 | }, updateInterval * 1000); 56 | 57 | app.listen(port, () => { 58 | logger.debug(`API server listening on ${host}:${port}`); 59 | }).on('error', (err) => { 60 | clearInterval(intervalId); 61 | logger.error(`API server error: ${err.toString()}`); 62 | }); 63 | }; 64 | }; 65 | 66 | module.exports = { 67 | _defaultDeps, 68 | _PoolApi, 69 | PoolApi: _PoolApi(requireDeps(_defaultDeps)) 70 | }; 71 | -------------------------------------------------------------------------------- /src/api/v1/blocks.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['collectBlocksData', `${__dirname}/../collect_blocks_data`], 5 | ['utils', `${__dirname}/utils`, false] 6 | ]; 7 | 8 | // Blocks route handler 9 | const _blocks = (deps) => ({ poolStats }) => (req, res) => { 10 | const { collectBlocksData } = deps; 11 | const { isInvalidPool, invalidPoolError } = deps.utils; 12 | const { pool, worker } = req.query; 13 | const { stats } = poolStats; 14 | const isInvalid = isInvalidPool({ pool, stats }); 15 | const endpoint = 'blocks'; 16 | 17 | if (isInvalid) return invalidPoolError({ res, pool, endpoint }); 18 | 19 | const { blocks } = collectBlocksData({ pool, address: worker }); 20 | res.status(200); 21 | return res.json({ endpoint, blocks }); 22 | }; 23 | 24 | module.exports = { 25 | _blocks, 26 | blocks: _blocks(requireDeps(defaultDeps)) 27 | }; 28 | -------------------------------------------------------------------------------- /src/api/v1/combined.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['collectBlocksData', `${__dirname}/../collect_blocks_data`], 5 | ['collectPaymentsData', `${__dirname}/../collect_payments_data`], 6 | ['collectWorkersData', `${__dirname}/../collect_workers_data`], 7 | ['utils', `${__dirname}/utils`, false], 8 | ]; 9 | 10 | // Combined stats route handler 11 | const _combined = (deps) => ({ poolStats }) => (req, res) => { 12 | const { collectBlocksData, collectPaymentsData, collectWorkersData } = deps; 13 | const { isInvalidPool, invalidPoolError, renderStatistics } = deps.utils; 14 | const { pool } = req.query; 15 | const { stats } = poolStats; 16 | const isInvalid = isInvalidPool({ pool, stats }); 17 | const endpoint = 'combined'; 18 | 19 | if (isInvalid) return invalidPoolError({ res, pool, endpoint }); 20 | 21 | const { blocks, statistics } = collectBlocksData({ stats }); 22 | const { payments } = collectPaymentsData({ stats }); 23 | const { workers } = collectWorkersData({ stats }); 24 | const combined = { 25 | ...renderStatistics({ stats, statistics }), 26 | history: stats.history, 27 | blocks, 28 | payments, 29 | workers 30 | }; 31 | 32 | res.status(200); 33 | return res.json({ endpoint, combined }); 34 | }; 35 | 36 | module.exports = { 37 | _combined, 38 | combined: _combined(requireDeps(defaultDeps)) 39 | }; 40 | -------------------------------------------------------------------------------- /src/api/v1/history.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['utils', `${__dirname}/utils`, false] 5 | ]; 6 | 7 | // History route handler 8 | const _history = (deps) => ({ poolStats }) => (req, res) => { 9 | const { isInvalidPool, invalidPoolError } = deps.utils; 10 | const { pool } = req.query; 11 | const { stats } = poolStats; 12 | const isInvalid = isInvalidPool({ pool, stats }); 13 | const endpoint = 'history'; 14 | 15 | if (isInvalid) return invalidPoolError({ res, pool, endpoint }); 16 | 17 | res.status(200); 18 | return res.json({ endpoint, history: stats.history }); 19 | }; 20 | 21 | module.exports = { 22 | _history, 23 | history: _history(requireDeps(defaultDeps)), 24 | }; 25 | -------------------------------------------------------------------------------- /src/api/v1/index.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const _defaultDeps = [ 4 | ['express', 'express', false], 5 | ['blocks', `${__dirname}/blocks`], 6 | ['combined', `${__dirname}/combined`], 7 | ['history', `${__dirname}/history`], 8 | ['payments', `${__dirname}/payments`], 9 | ['statistics', `${__dirname}/statistics`], 10 | ['wallets', `${__dirname}/wallets`], 11 | ['workers', `${__dirname}/workers`], 12 | ]; 13 | 14 | // Defines the /v1 routes section of our API. This function takes a reference to the 15 | // express app, and a path prefix to mount itself, once all routes have been defined. 16 | // Also defines a 404 route, for this mounted area. 17 | const _v1 = (deps) => (env) => ({ app, prefix = '' }) => { 18 | const { express, blocks, combined, history, payments, 19 | statistics, wallets, workers } = deps; 20 | const router = express.Router(); 21 | 22 | router.get('/blocks', blocks(env)); 23 | router.get('/combined', combined(env)); 24 | router.get('/history', history(env)); 25 | router.get('/payments', payments(env)); 26 | router.get('/statistics', statistics(env)); 27 | router.get('/workers', workers(env)); 28 | 29 | router.get('/wallets', (req, res, next) => { 30 | // Directly resolve wallets returned promise, so we can correctly pass errors 31 | // to next(). 32 | Promise.resolve(wallets(env)(req, res)).catch(next); 33 | }); 34 | 35 | // 404 (Not Found) handler for this route section 36 | router.use((req, res) => ( 37 | res.status(404).json({ error: 'Invalid API route' }) 38 | )); 39 | 40 | // Mount the router entpoints to our app, at path. 41 | app.use(`${prefix}/v1`, router); 42 | 43 | return router; 44 | }; 45 | 46 | module.exports = { 47 | _defaultDeps, 48 | _v1, 49 | v1: _v1(requireDeps(_defaultDeps)) 50 | }; 51 | -------------------------------------------------------------------------------- /src/api/v1/payments.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['collectPaymentsData', `${__dirname}/../collect_payments_data`], 5 | ['utils', `${__dirname}/utils`, false] 6 | ]; 7 | 8 | // Payments route handler 9 | const _payments = (deps) => ({ poolStats }) => (req, res) => { 10 | const { collectPaymentsData } = deps; 11 | const { isInvalidPool, invalidPoolError } = deps.utils; 12 | const { pool, worker } = req.query; 13 | const { stats } = poolStats; 14 | const isInvalid = isInvalidPool({ pool, stats }); 15 | const endpoint = 'payments'; 16 | 17 | if (isInvalid) return invalidPoolError({ res, pool, endpoint }); 18 | 19 | const { payments } = collectPaymentsData({ pool, address: worker }); 20 | res.status(200); 21 | return res.json({ endpoint, payments }); 22 | }; 23 | 24 | module.exports = { 25 | _payments, 26 | payments: _payments(requireDeps(defaultDeps)) 27 | }; 28 | -------------------------------------------------------------------------------- /src/api/v1/statistics.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['collectBlocksData', `${__dirname}/../collect_blocks_data`], 5 | ['utils', `${__dirname}/utils`, false], 6 | ]; 7 | 8 | // Combined stats route handler 9 | const _statistics = (deps) => ({ poolStats }) => (req, res) => { 10 | const { collectBlocksData } = deps; 11 | const { isInvalidPool, invalidPoolError, renderStatistics } = deps.utils; 12 | const { pool } = req.query; 13 | const { stats } = poolStats; 14 | const isInvalid = isInvalidPool({ pool, stats }); 15 | const endpoint = 'statistics'; 16 | 17 | if (isInvalid) return invalidPoolError({ res, pool, endpoint }); 18 | 19 | const { statistics } = collectBlocksData({ stats }); 20 | const data = renderStatistics({ stats, statistics }); 21 | 22 | res.status(200); 23 | return res.json({ endpoint, statistics: data }); 24 | }; 25 | 26 | module.exports = { 27 | _statistics, 28 | statistics: _statistics(requireDeps(defaultDeps)) 29 | }; 30 | -------------------------------------------------------------------------------- /src/api/v1/utils.js: -------------------------------------------------------------------------------- 1 | const isInvalidPool = ({ pool, stats }) => ( 2 | pool && pool.toLowerCase() !== stats.name.toLowerCase() 3 | ); 4 | 5 | const invalidPoolError = ({ res, pool, endpoint }) => { 6 | const error = `Invalid pool name: ${pool}`; 7 | res.status(400); 8 | return res.json({ endpoint, error }); 9 | }; 10 | 11 | const renderStatistics = ({ stats, statistics }) => ( 12 | { 13 | pool: stats.name, 14 | symbol: stats.symbol, 15 | algorithm: stats.algorithm, 16 | featured: stats.featured, 17 | ports: stats.ports, 18 | statistics: { 19 | hashrateType: stats.statistics.hashrateType, 20 | invalidShares: stats.statistics.invalidShares, 21 | lastPaid: stats.statistics.lastPaid, 22 | paymentFees: stats.fees, 23 | paymentTime: stats.statistics.paymentTime, 24 | paymentMinimum: stats.statistics.paymentMinimum, 25 | totalPaid: stats.statistics.totalPaid, 26 | validShares: stats.statistics.validShares, 27 | validBlocks: stats.statistics.validBlocks, 28 | blocks: statistics, 29 | hashrate: { 30 | hashrate: stats.hashrate.hashrate, 31 | hashrateShared: stats.hashrate.hashrateShared, 32 | hashrateSolo: stats.hashrate.hashrateSolo, 33 | }, 34 | workers: { 35 | workers: stats.workers.workersCount, 36 | workersShared: stats.workers.workersSharedCount, 37 | workersSolo: stats.workers.workersSoloCount, 38 | } 39 | } 40 | } 41 | ); 42 | 43 | module.exports = { isInvalidPool, invalidPoolError, renderStatistics }; 44 | -------------------------------------------------------------------------------- /src/api/v1/wallets.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['collectBlocksData', `${__dirname}/../collect_blocks_data`], 5 | ['collectPaymentsData', `${__dirname}/../collect_payments_data`], 6 | ['collectWorkersData', `${__dirname}/../collect_workers_data`], 7 | ]; 8 | 9 | // /v1/wallets route handler. This function returns a promise due to the reliance 10 | // upon poolStats.getBalanceByAddress. 11 | const _wallets = (deps) => ({ poolStats }) => async (req, res) => { 12 | const endpoint = 'wallets'; 13 | const { worker } = req.query; 14 | 15 | if (!worker || worker.length === 0) { 16 | const error = 'Invalid "worker" parameter. Verify inputs and try again.'; 17 | res.status(400); 18 | return res.json({ endpoint, error }); 19 | } 20 | 21 | const { collectBlocksData, collectPaymentsData, collectWorkersData } = deps; 22 | const { stats } = poolStats; 23 | 24 | const { balances } = await poolStats.getBalanceByAddress(worker); 25 | const combined = Object.values(balances).reduce((acc, s) => acc + s, 0); 26 | 27 | const { blocks } = collectBlocksData({ stats, address: worker }); 28 | const { payments } = collectPaymentsData({ stats, address: worker }); 29 | const { workers } = collectWorkersData({ stats, address: worker }); 30 | 31 | const wallets = { 32 | pool: stats.name, 33 | symbol: stats.symbol, 34 | algorithm: stats.algorithm, 35 | worker, 36 | balance: balances.totalBalance.toFixed(8), 37 | immature: balances.totalImmature.toFixed(8), 38 | paid: balances.totalPaid.toFixed(8), 39 | unpaid: balances.totalUnpaid.toFixed(8), 40 | total: combined.toFixed(8), 41 | blocks, 42 | payments, 43 | workers 44 | }; 45 | 46 | res.status(200); 47 | return res.json({ endpoint, wallets }); 48 | }; 49 | 50 | module.exports = { 51 | _wallets, 52 | wallets: _wallets(requireDeps(defaultDeps)) 53 | }; 54 | -------------------------------------------------------------------------------- /src/api/v1/workers.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['collectWorkersData', `${__dirname}/../collect_workers_data`], 5 | ['utils', `${__dirname}/utils`, false] 6 | ]; 7 | 8 | // /v1/workers route handler 9 | const _workers = (deps) => ({ poolStats }) => (req, res) => { 10 | const { collectWorkersData } = deps; 11 | const { isInvalidPool, invalidPoolError } = deps.utils; 12 | const { pool } = req.query; 13 | const { stats } = poolStats; 14 | const isInvalid = isInvalidPool({ pool, stats }); 15 | const endpoint = 'workers'; 16 | 17 | if (isInvalid) return invalidPoolError({ res, pool, endpoint }); 18 | 19 | const { workers } = collectWorkersData({ stats }); 20 | res.status(200); 21 | return res.json({ endpoint, workers }); 22 | }; 23 | 24 | module.exports = { 25 | _workers, 26 | workers: _workers(requireDeps(defaultDeps)) 27 | }; 28 | -------------------------------------------------------------------------------- /src/cnc/index.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['net', 'net', false], 5 | ['EventEmitter', 'events'] 6 | ]; 7 | 8 | // Pool Command/Control Server contructor. Outer function recevies the overridable 9 | // dependencies. Inner constructor encapsulates the TCP/IP server. We separate their 10 | // definitions so we can apply EventEmitter to the inner constructor. 11 | const _poolCnC = ({ net, EventEmitter }) => { 12 | const cnc = function (port) { 13 | const _this = this; 14 | 15 | const server = net.createServer(); 16 | server.on('connection', (socket) => { 17 | let data = ''; 18 | // Receives text data that should be part (or all) of a command, in JSON format. 19 | socket.on('data', (d) => { 20 | // Capture the JSON fragment into our data buffer 21 | data += d; 22 | // Process the command JSON when we have all the fragments 23 | if (data.slice(-1) === '\n') { 24 | try { 25 | const { command, params, options } = JSON.parse(data); 26 | _this.emit('command', command, params, options, socket.end); 27 | } catch (e) { 28 | _this.emit('log', 'error', `CLI listener failed to parse message ${data}`); 29 | } 30 | } 31 | }); 32 | socket.on('end', () => {}); 33 | socket.on('error', () => {}); 34 | }); 35 | 36 | // Set the server to listen on the supplied port 37 | this.listen = () => { 38 | server.listen(port, '127.0.0.1', () => { 39 | _this.emit('log', 'debug', `Command/Control listening on port ${port}`); 40 | }); 41 | }; 42 | }; 43 | // Return our constructor function, after inheriting EventEmitter's prototype. 44 | Object.setPrototypeOf(cnc.prototype, EventEmitter.prototype); 45 | return cnc; 46 | }; 47 | 48 | module.exports = { 49 | _poolCnC, 50 | PoolCnC: _poolCnC(requireDeps(defaultDeps)) 51 | }; 52 | -------------------------------------------------------------------------------- /src/daemon/index.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['promisify', 'util'], 5 | ['Interface', 'stratum-pool/scripts/daemon.js'], 6 | ['validateAddress', `${__dirname}/validate_address`] 7 | ]; 8 | 9 | const _daemonWrapper = (deps) => function (daemon, logger) { 10 | const { promisify, validateAddress } = deps; 11 | this.rpcBatch = promisify(daemon.batchCmd); 12 | 13 | daemon.cmd[promisify.custom] = (method, params, streamResults, returnRawData) => ( 14 | new Promise((resolve, reject) => { 15 | daemon.cmd(method, params, (result) => { 16 | if (result.error) { 17 | reject(new Error(result.error.message)); 18 | } else { 19 | resolve(result); 20 | } 21 | }, streamResults, returnRawData); 22 | }) 23 | ); 24 | this.rpcCmd = promisify(daemon.cmd); 25 | 26 | const _this = this; 27 | this.isValidAddress = (address) => validateAddress({ daemon: _this, logger })(address); 28 | }; 29 | 30 | // Module wrapper providing promisify-ed versions of DaemonIterface module. 31 | // Need for this should go away, once DaemonIterface is converted to natively 32 | // support Promises. 33 | const _daemon = (deps) => function ({ configs, logger }) { 34 | const { Interface } = deps; 35 | // Instantiate Stratum Coin Daemon Interface. 36 | const callback = (severity, message) => { logger[severity](message); }; 37 | const daemon = new Interface(configs, callback); 38 | const DaemonWrapper = _daemonWrapper(deps); 39 | 40 | return new DaemonWrapper(daemon, logger); 41 | }; 42 | 43 | module.exports = { 44 | _daemonWrapper, 45 | DaemonWrapper: _daemonWrapper(requireDeps(defaultDeps)), 46 | _daemon, 47 | Daemon: _daemon(requireDeps(defaultDeps)), 48 | }; 49 | -------------------------------------------------------------------------------- /src/daemon/validate_address.js: -------------------------------------------------------------------------------- 1 | // This is a heavily modified version of payments/utils.validateAddress. We want to 2 | // ensure that the pool's wallet address (poolOptions.addresses.address) belongs to 3 | // our pool, since all payments are processed through it. This is done via the 4 | // https://bitcoincore.org/en/doc/0.20.0/rpc/wallet/getaddressinfo/ RPC command. 5 | 6 | const validateAddress = ({ daemon, logger }) => async (address) => ( 7 | daemon.rpcCmd('getaddressinfo', [address], true) 8 | .then((reply) => { 9 | const { response: { ismine } } = reply; 10 | return ismine; 11 | }) 12 | .catch((error) => { 13 | const msg = `validateAddress: getaddressinfo failed with: ${error.message}`; 14 | // TODO(rschifflin): Hack since the wrapped daemon doesn't expose its logger yet 15 | // We should be able to re-use the wrapped daemon's logger which always exists 16 | if (logger) { 17 | logger.error(msg); 18 | } else { 19 | console.log(`error: ${msg}`); 20 | } 21 | 22 | return false; 23 | }) 24 | ); 25 | 26 | module.exports = { validateAddress }; 27 | -------------------------------------------------------------------------------- /src/logger/colorize_log.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['colors', 'colors/safe', false], 5 | ['severityColor', `${__dirname}/utils`] 6 | ]; 7 | 8 | // Uses the colors module to apply TTY-based colors and styles to the individual 9 | // log message items. Each item can have a separate color/style applied, with 10 | // ts (timestamp) and system getting their color from the log severity. 11 | const _colorizeLog = (deps) => (severity, items) => { 12 | const { colors, severityColor } = deps; 13 | const colorValue = severityColor(severity); 14 | 15 | colors.setTheme({ 16 | ts: [...colorValue, 'italic'], 17 | system: [...colorValue, 'italic'], 18 | component: 'italic', 19 | text: 'grey', 20 | subcat: ['grey', 'bold'], 21 | }); 22 | Object.entries(items).forEach(([key, value]) => { 23 | if (value) items[key] = colors[key](value); 24 | }); 25 | }; 26 | 27 | module.exports = { 28 | _colorizeLog, 29 | colorizeLog: _colorizeLog(requireDeps(defaultDeps)) 30 | }; 31 | -------------------------------------------------------------------------------- /src/logger/format_log.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['dateFormat', 'dateformat', false], 5 | ['colorizeLog', `${__dirname}/colorize_log`] 6 | ]; 7 | 8 | // Takes the log message items, colorizes them, if necessary, and then returns 9 | // a joined message string, representing the log's output. 10 | const _formatLog = (deps) => (env) => (args) => { 11 | const { dateFormat, colorizeLog } = deps; 12 | const { severityLevels, level, logColors } = env; 13 | const { severity, system, component } = args; 14 | let { text, subcat } = args; 15 | 16 | // Only report logs at or above the overall severity level 17 | if (severityLevels[severity] < level) return false; 18 | 19 | const ts = dateFormat(new Date(), 'yyyy-mm-dd HH:MM:ss'); 20 | 21 | // If the "subcat" argument is supplied, it, positionally, represents the "text" 22 | // argument, and vice-versa. This swaps their values. 23 | if (subcat) [subcat, text] = [text, subcat]; 24 | 25 | // Decorate component and subcat (if present), prior to colorizing. 26 | const items = { 27 | ts, 28 | system, 29 | component: `[${component}]`, 30 | text 31 | }; 32 | if (subcat) items.subcat = `(${subcat})`; 33 | 34 | if (logColors) colorizeLog(severity, items); 35 | 36 | // Join the items into a single string, for output. Making sure to place the subcat 37 | // vaule *before* the text value, if present. 38 | let formatted = `${items.ts} ${items.system}\t${items.component} `; 39 | formatted += subcat ? `${items.subcat} ${items.text}` : `${items.text}`; 40 | 41 | return formatted; 42 | }; 43 | 44 | module.exports = { 45 | _formatLog, 46 | formatLog: _formatLog(requireDeps(defaultDeps)) 47 | }; 48 | -------------------------------------------------------------------------------- /src/logger/index.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['formatLog', `${__dirname}/format_log`], 5 | ['utils', `${__dirname}/utils`, false], 6 | ]; 7 | 8 | const severityLevels = { 9 | debug: 1, warning: 2, error: 3, special: 4 10 | }; 11 | 12 | // Pool Logger 13 | const _poolLogger = (deps) => function ({ logLevel, logColors, tty = true }) { 14 | const { formatLog, utils: { severityFunctions, cachedLogger } } = deps; 15 | const level = severityLevels[logLevel]; 16 | const env = { severityLevels, level, logColors }; 17 | 18 | // Base Logging Functon 19 | const log = (severity, system, component, text, subcat) => { 20 | const logString = formatLog(env)({ 21 | severity, system, component, text, subcat 22 | }); 23 | 24 | // Print Formatted Logger Message 25 | if (tty) console.log(logString); 26 | return (logString); 27 | }; 28 | 29 | // Add severity functions 30 | const _this = this; 31 | env.logger = _this; 32 | severityFunctions(env)(log); 33 | 34 | this.cached = (system, component, subcat) => { 35 | const CachedLogger = cachedLogger(env); 36 | return new CachedLogger(system, component, subcat); 37 | }; 38 | }; 39 | 40 | // Export Pool Logger 41 | module.exports = { 42 | _poolLogger, 43 | PoolLogger: _poolLogger(requireDeps(defaultDeps)), 44 | }; 45 | -------------------------------------------------------------------------------- /src/logger/utils.js: -------------------------------------------------------------------------------- 1 | const severityColor = (name) => { 2 | const severityMap = { 3 | special: ['cyan', 'underline'], 4 | debug: ['green'], 5 | warning: ['yellow'], 6 | error: ['red'] 7 | }; 8 | 9 | const color = severityMap[name] || []; 10 | // TODO: I think we need a cleaner way to handle unknown log severity levels. 11 | // Possiby throwing an error, or if we managed to switch to a more Result-style, 12 | // this should probably return a Failure object. 13 | if (color.length === 0) { 14 | color.push('italic'); 15 | console.log(`Unknown severity: ${name}`); 16 | } 17 | return color; 18 | }; 19 | 20 | // Dynamically defines methods, on the supplied logger, corresponding to each 21 | // named severity level. That severity level is added to the front of the array 22 | // of arguments, and then sent to an "apply"-ed version of the base log function. 23 | const severityFunctions = ({ logger, severityLevels }) => (log) => { 24 | Object.keys(severityLevels).forEach((severity) => { 25 | logger[severity] = (...args) => { 26 | args.unshift(severity); 27 | return log.apply(this, args); 28 | }; 29 | }); 30 | }; 31 | 32 | // A constructor function that provides caching of the "system", "component", and 33 | // "subcat" arguemnts that are regulary repeated when a module is outputting log 34 | // messages. We initialize our constructor with the severiyLevels, and the logger 35 | // instance requesting the caching service. 36 | const cachedLogger = (env) => function (system, component, subcat) { 37 | const _this = this; 38 | const { severityLevels, logger } = env; 39 | 40 | Object.keys(severityLevels).forEach((logType) => { 41 | _this[logType] = function (...args) { 42 | if (subcat) args = [subcat, ...args]; 43 | logger[logType](system, component, ...args); 44 | }; 45 | }); 46 | }; 47 | 48 | module.exports = { 49 | severityColor, 50 | severityFunctions, 51 | cachedLogger, 52 | }; 53 | -------------------------------------------------------------------------------- /src/payments/calculate_payments/send_payments.js: -------------------------------------------------------------------------------- 1 | const sendPayments = (env) => (args) => { 2 | const { daemon, logger, coinUtils, withholdPercent } = env; 3 | const { addressAccount, amountsRecords, totalSent } = args; 4 | const { satoshisToCoins } = coinUtils; 5 | 6 | // Send Payments Through Daemon 7 | const rpccallTracking = `sendmany "" ${JSON.stringify(amountsRecords)}`; 8 | const hasInsufficientFunds = (result) => result.error 9 | && result.error.code === -6 10 | && result.error.message 11 | && result.error.message.includes('insufficient funds'); 12 | 13 | return daemon.rpcCmd('sendmany', [addressAccount || '', amountsRecords], true, true).then((result) => { 14 | if (hasInsufficientFunds(result)) { 15 | const higherPercent = withholdPercent + 0.001; 16 | logger.warning(rpccallTracking); 17 | logger.warning(`Insufficient funds (??) for payments (${satoshisToCoins(totalSent)}), decreasing rewards by ${(higherPercent * 100).toFixed(1)}% and retrying`); 18 | // TODO: Use custom error class signalling insufficient funds instead 19 | throw new Error('Retry!'); 20 | } else if (result.error) { 21 | logger.warning(rpccallTracking); 22 | logger.error(`Error sending payments ${JSON.stringify(result.error)}`); 23 | throw new Error(`Error sending payments ${JSON.stringify(result.error)}`); 24 | } 25 | 26 | return result; 27 | }); 28 | }; 29 | 30 | module.exports = { sendPayments }; 31 | -------------------------------------------------------------------------------- /src/payments/coin_utils.js: -------------------------------------------------------------------------------- 1 | const { roundTo } = require('./utils.js'); 2 | 3 | function CoinUtils(coinInfo) { 4 | const { minPaymentSatoshis, magnitude, coinPrecision } = coinInfo; 5 | 6 | this.satoshisToCoins = (satoshis) => roundTo((satoshis / magnitude), coinPrecision); 7 | 8 | // Convert Coins to Satoshis 9 | this.coinsToSatoshies = (coins) => Math.round(coins * magnitude); 10 | 11 | // Round Coins to Nearest Value Given Precision 12 | this.coinsRound = (number) => roundTo(number, coinPrecision); 13 | 14 | this.minPaymentSatoshis = minPaymentSatoshis; 15 | } 16 | 17 | module.exports = { CoinUtils }; 18 | -------------------------------------------------------------------------------- /src/payments/index.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['initPayments', `${__dirname}/init_payments`] 5 | ]; 6 | 7 | // Uses initPayments to start the payment process background timers 8 | const _poolPayments = ({ initPayments }) => function (env) { 9 | const { baseLogger, poolConfig, portalConfig } = env; 10 | const coin = poolConfig.coin.name; 11 | const logger = baseLogger.cached('Payments', coin); 12 | 13 | this.start = () => ( 14 | initPayments({ logger, poolConfig, portalConfig }) 15 | .then((didStart) => { 16 | if (!didStart) return; 17 | 18 | const { paymentInterval: secs, 19 | daemon: { user, host, port } } = poolConfig.paymentProcessing; 20 | const daemonUrl = `${user}@${host}:${port}`; 21 | const redisUrl = `${portalConfig.redis.host}:${portalConfig.redis.port}`; 22 | const msg = `Payment processing running every ${secs} second(s) with daemon (${ 23 | daemonUrl}) and redis (${redisUrl})`; 24 | logger.debug(msg); 25 | }) 26 | ); 27 | }; 28 | 29 | // Export Pool Payments 30 | module.exports = { 31 | _poolPayments, 32 | PoolPayments: _poolPayments(requireDeps(defaultDeps)) 33 | }; 34 | -------------------------------------------------------------------------------- /src/payments/init_payments.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['Daemon', `${__dirname}/../daemon/`], 5 | ['Redis', `${__dirname}/../redis`], 6 | ['CoinUtils', `${__dirname}/coin_utils`], 7 | ['startPayments', `${__dirname}/start_payments`] 8 | ]; 9 | 10 | // Intitialize our daemon/node connection and our Redis client connection. 11 | // Verify that our configured pool wallet address is "ours", which is required 12 | // to process payments. If valid, setup the payments process env, and start the 13 | // payment processes. 14 | const _initPayments = (deps) => (env) => { 15 | const { Daemon, Redis, CoinUtils, startPayments } = deps; 16 | const { logger, poolConfig, portalConfig } = env; 17 | 18 | const { address } = poolConfig.addresses; 19 | const { daemon: daemonConfig, minimumPayment } = poolConfig.paymentProcessing; 20 | 21 | // Connect to coin/node daemon(s) from daemon config. 22 | const daemon = new Daemon({ configs: [daemonConfig], logger }); 23 | 24 | return daemon.isValidAddress(address).then((isValid) => { 25 | // If validating our wallet address fails, log the invalid 26 | // address and warn the user. 27 | if (!isValid) { 28 | const msg = `initPayments: Could not validate pool address "${address}" - Ensure the pool owns this address!`; 29 | logger.warning(msg); 30 | } 31 | 32 | const coin = poolConfig.coin.name; 33 | const redis = new Redis(portalConfig.redis); 34 | const coinPrecision = poolConfig.satoshiPrecision || 8; 35 | const magnitude = 10 ** coinPrecision; 36 | const minPaymentSatoshis = minimumPayment * magnitude; 37 | const coinUtils = new CoinUtils({ coinPrecision, magnitude, minPaymentSatoshis }); 38 | const startEnv = { 39 | logger, 40 | coin, 41 | client: redis.client, 42 | daemon, 43 | coinUtils, 44 | // Todo: poolOptions is a holdover from multi-coin days. This should be poolConfig, 45 | // but requires changing many downstream functions. That will happen later. 46 | poolOptions: poolConfig 47 | }; 48 | 49 | startPayments(startEnv); 50 | return true; 51 | }); 52 | }; 53 | 54 | module.exports = { 55 | _initPayments, 56 | initPayments: _initPayments(requireDeps(defaultDeps)) 57 | }; 58 | -------------------------------------------------------------------------------- /src/payments/initialize_payouts/fetch_pending_blocks.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['promiseCmd', `${__dirname}/../../utils/promised_redis`] 5 | ]; 6 | 7 | const _fetchPendingBlocks = ({ promiseCmd }) => async (env) => { 8 | const smembers = promiseCmd('smembers')(env); 9 | const failMsg = 'fetchUnpaidPending: Error retrieving pending blocks'; 10 | const pending = await smembers({ args: [`${env.coin}:blocks:pending`], failMsg }); 11 | 12 | // Pending is Redis SET of JSON encoded strings, containing block-finding shares. 13 | // Each share represents a payment "round". We parse the entries into an array of 14 | // "round" objects, preserving the original JSON in the "serialized" key. 15 | // TODO(bh): Why do we replace/rename the "worker" key to "workerAddress"? 16 | const rounds = pending.map((entry) => { 17 | const round = JSON.parse(entry); 18 | round.serialized = entry; 19 | round.workerAddress = round.worker; 20 | delete round.worker; 21 | return round; 22 | }); 23 | // Sort Rounds by Block Height 24 | rounds.sort((a, b) => a.height - b.height); 25 | 26 | return rounds; 27 | }; 28 | 29 | module.exports = { 30 | _fetchPendingBlocks, 31 | fetchPendingBlocks: _fetchPendingBlocks(requireDeps(defaultDeps)) 32 | }; 33 | -------------------------------------------------------------------------------- /src/payments/initialize_payouts/fetch_unpaid_workers.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['promiseCmd', `${__dirname}/../../utils/promised_redis`] 5 | ]; 6 | 7 | const _fetchUnpaidWorkers = ({ promiseCmd }) => async (env) => { 8 | const { coin, coinUtils } = env; 9 | const hgetall = promiseCmd('hgetall')(env); 10 | const failMsg = 'fetchUnpaidWorkers: Error retrieving unpaid payments'; 11 | const unpaid = await hgetall({ args: [`${coin}:payments:unpaid`], failMsg }); 12 | 13 | const workers = {}; 14 | Object.entries(unpaid || {}).forEach(([addr, bal]) => { 15 | workers[addr] = { balance: coinUtils.coinsToSatoshies(parseFloat(bal)) }; 16 | }); 17 | 18 | return workers; 19 | }; 20 | 21 | module.exports = { 22 | _fetchUnpaidWorkers, 23 | fetchUnpaidWorkers: _fetchUnpaidWorkers(requireDeps(defaultDeps)) 24 | }; 25 | -------------------------------------------------------------------------------- /src/payments/initialize_payouts/find_duplicate_blocks.js: -------------------------------------------------------------------------------- 1 | const findDuplicateBlocks = (blocks) => { 2 | // Count the occurences of each block height. They should be unique, within 3 | // a payment processing run. 4 | const heights = {}; 5 | blocks.forEach((b) => { 6 | if (!heights[b.height]) heights[b.height] = 0; 7 | heights[b.height] += 1; 8 | }); 9 | 10 | // Use the height counts to flag "duplicate" blocks, while collecting them 11 | // into a separate array, for further processing. 12 | const duplicteBlocks = blocks.filter((b) => { 13 | b.duplicate = heights[b.height] > 1; 14 | return heights[b.height] > 1; 15 | }); 16 | 17 | return duplicteBlocks; 18 | }; 19 | 20 | module.exports = { findDuplicateBlocks }; 21 | -------------------------------------------------------------------------------- /src/payments/initialize_payouts/find_invalid_blocks.js: -------------------------------------------------------------------------------- 1 | // Take the set of duplicate round blocks and verify each of them against the 2 | // blockchain. If the blockchain verifies them as "invalid", prep those rounds 3 | // to be removed from payment processing. 4 | const findInvalidBlocks = (env) => async (dups) => { 5 | if (dups.length === 0) return []; 6 | 7 | const { daemon, logger } = env; 8 | const rpcArgs = dups.map((r) => ['getblock', [r.blockHash]]); 9 | // Retrieve the on-chain block data for each "duplicate" block's blockHash. 10 | const blocks = await daemon.rpcBatch(rpcArgs) 11 | .catch((dupError) => { 12 | const msg = `findInvalidBlocks: Error fetching 'getblock' data: ${dupError.message}`; 13 | logger.error(msg); 14 | throw new Error(msg); 15 | }); 16 | 17 | // Use the on-chain block data to determine which "duplicate" blocks are invalid. 18 | const validBlocks = []; 19 | const invalidBlocks = []; 20 | blocks.forEach((block, i) => { 21 | if (block && block.result) { 22 | const r = block.result; 23 | const msg = `findInvalidBlocks: Duplicate block ${r.height}(${r.hash})`; 24 | if (r.confirmations < 0) { 25 | logger.warning(`${msg} is invalid.`); 26 | invalidBlocks.push(dups[i].serialized); 27 | } else if (validBlocks.includes(dups[i].blockHash)) { 28 | logger.warning(`${msg} is non-unique.`); 29 | invalidBlocks.push(dups[i].serialized); 30 | } else { 31 | validBlocks.push(dups[i].blockHash); 32 | logger.debug(`${msg}) is *valid*.`); 33 | } 34 | } 35 | }); 36 | return invalidBlocks; 37 | }; 38 | 39 | module.exports = { findInvalidBlocks }; 40 | -------------------------------------------------------------------------------- /src/payments/initialize_payouts/index.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['fetchUnpaidWorkers', `${__dirname}/fetch_unpaid_workers`], 5 | ['fetchPendingBlocks', `${__dirname}/fetch_pending_blocks`], 6 | ['findDuplicateBlocks', `${__dirname}/find_duplicate_blocks`], 7 | ['findInvalidBlocks', `${__dirname}/find_invalid_blocks`], 8 | ['moveInvalidBlocks', `${__dirname}/move_invalid_blocks`] 9 | ]; 10 | 11 | // Initial step of the paymentProcessing pipeline. Retrieves, and parses, the 12 | // current set of payable entities, for the rest of the pipeline to process. 13 | const _initializePayouts = (deps) => async (env) => { 14 | const { fetchUnpaidWorkers, fetchPendingBlocks, findDuplicateBlocks } = deps; 15 | const { findInvalidBlocks, moveInvalidBlocks } = deps; 16 | 17 | // Get the entries that repesent potential payouts from the database: unpaid 18 | // worker balances, from previous paymentProcessing runs, and the current, 19 | // pending blocks that haven't been paid (aka. "rounds"). 20 | const workers = await fetchUnpaidWorkers(env); 21 | const rounds = await fetchPendingBlocks(env); 22 | 23 | // Identify and invalidate any duplicate entries in "rounds". 24 | const duplicteBlocks = findDuplicateBlocks(rounds); 25 | const invalidBlocks = await findInvalidBlocks(env)(duplicteBlocks); 26 | await moveInvalidBlocks(env)(invalidBlocks); 27 | 28 | // Pass the workers and non-duplicate rounds to the next paymentProcessing step. 29 | const uniqueRounds = rounds.filter((round) => !round.duplicate); 30 | return { workers, rounds: uniqueRounds }; 31 | }; 32 | 33 | module.exports = { 34 | _initializePayouts, 35 | initializePayouts: _initializePayouts(requireDeps(defaultDeps)) 36 | }; 37 | -------------------------------------------------------------------------------- /src/payments/initialize_payouts/move_invalid_blocks.js: -------------------------------------------------------------------------------- 1 | const { promiseExec } = require('../../utils/promised_redis'); 2 | 3 | const moveInvalidBlocks = ({ client, logger, coin }) => async (invalidBlocks) => { 4 | if (invalidBlocks.length === 0) { 5 | // TODO(bh): Why is this being logged. We should expect no duplicate blocks. 6 | logger.error('Unable to detect invalid duplicate blocks, duplicate block payments on hold.'); 7 | return; 8 | } 9 | 10 | const commands = invalidBlocks.map((b) => ( 11 | ['smove', `${coin}:blocks:pending`, `${coin}:blocks:duplicate`, b] 12 | )); 13 | const failMsg = 'moveInvalidBlocks: Error moving invalid blocks.'; 14 | 15 | await promiseExec({ client, logger })({ commands, failMsg }); 16 | }; 17 | 18 | module.exports = { moveInvalidBlocks }; 19 | -------------------------------------------------------------------------------- /src/payments/manage_sent_payments/index.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['updateWorkerPayoutBalances', `${__dirname}/update_worker_payout_balances`], 5 | ['updateWorkerShares', `${__dirname}/update_worker_shares`], 6 | ['sendRedisCommands', `${__dirname}/send_redis_commands`] 7 | ]; 8 | 9 | // Manage Sent Payments 10 | const baseManageSentPayments = (deps) => (env) => async (args) => { 11 | const { updateWorkerPayoutBalances, updateWorkerShares, sendRedisCommands } = deps; 12 | const { paymentMode, coin, lastInterval } = env; 13 | const { workers, rounds, paymentsUpdate } = args; 14 | 15 | const [ 16 | totalPaid, 17 | workerPayoutsCommand, 18 | balanceUpdateCommands, 19 | immatureUpdateCommands 20 | ] = updateWorkerPayoutBalances(env)(workers); 21 | 22 | const [ 23 | movePendingCommands, 24 | orphanMergeCommands, 25 | roundsToDelete, 26 | confirmsUpdate, 27 | confirmsToDelete 28 | ] = updateWorkerShares(env)(rounds); 29 | 30 | const deleteCmds = []; 31 | if (roundsToDelete.length > 0) { 32 | deleteCmds.push(['del'].concat(roundsToDelete)); 33 | } 34 | 35 | const totalPaidCmds = []; 36 | if (totalPaid !== 0) { 37 | totalPaidCmds.push(['hincrbyfloat', `${coin}:statistics:basic`, 'totalPaid', totalPaid]); 38 | } 39 | 40 | const lastPaidCmds = []; 41 | if ((paymentMode === 'start') || (paymentMode === 'payment')) { 42 | lastPaidCmds.push(['hset', `${coin}:statistics:basic`, 'lastPaid', lastInterval]); 43 | } 44 | 45 | // Update Main Database 46 | const finalRedisCommands = [ 47 | ...movePendingCommands, ...orphanMergeCommands, ...immatureUpdateCommands, 48 | ...balanceUpdateCommands, ...workerPayoutsCommand, ...deleteCmds, ...confirmsUpdate, 49 | ...confirmsToDelete, ...paymentsUpdate, ...totalPaidCmds, ...lastPaidCmds 50 | ]; 51 | 52 | return sendRedisCommands(env)(finalRedisCommands); 53 | }; 54 | 55 | module.exports = { 56 | _manageSentPayments: baseManageSentPayments, 57 | manageSentPayments: baseManageSentPayments(requireDeps(defaultDeps)) 58 | }; 59 | -------------------------------------------------------------------------------- /src/payments/manage_sent_payments/send_redis_commands.js: -------------------------------------------------------------------------------- 1 | const { promisify } = require('util'); 2 | const { promiseExec } = require('../../utils/promised_redis'); 3 | 4 | const sendRedisCommands = (env) => async (commands) => { 5 | if (commands.length === 0) { return null; } 6 | const { coin, 7 | logger, 8 | signalStop, 9 | client, 10 | fs } = env; 11 | 12 | const redisEnv = { client, logger }; 13 | const redisArgs = { 14 | commands, 15 | failMsg: `Payments sent but could not update redis. 16 | Disabling payment processing to prevent possible double-payouts. The redis commands in 17 | ${coin}_finalRedisCommands.txt must be ran manually` 18 | }; 19 | 20 | return promiseExec(redisEnv)(redisArgs).catch((redisErr) => { 21 | // TODO: This should instead return a semantic error that bubbles up 22 | // to the interval to stop rescheduling 23 | signalStop(); // Single cb to clear both intervals 24 | const fsWriteFile = promisify(fs.writeFile).bind(fs); 25 | return fsWriteFile(`${coin}_finalRedisCommands.txt`, JSON.stringify(commands)).catch((fsErr) => { 26 | const fsErrString = `${fsErr}: Could not write finalRedisCommands.txt, you are fucked.`; 27 | logger.error(fsErrString); 28 | throw new Error(fsErrString); 29 | }).then(() => { throw redisErr; }); 30 | }).then(() => logger.debug('Finished sending all confirmed payments to users')); 31 | }; 32 | 33 | module.exports = { sendRedisCommands }; 34 | -------------------------------------------------------------------------------- /src/payments/manage_sent_payments/update_worker_payout_balances.js: -------------------------------------------------------------------------------- 1 | // Update Worker Payouts/Balances 2 | const updateWorkerPayoutBalances = (env) => (workers) => { 3 | const { paymentMode, coin, coinUtils } = env; 4 | const { coinsRound, satoshisToCoins } = coinUtils; 5 | 6 | let totalPaid = 0; 7 | const workerPayoutsCommand = []; 8 | const balanceUpdateCommands = []; 9 | const immatureUpdateCommands = []; 10 | 11 | Object.entries(workers).forEach((pair) => { 12 | const [w, worker] = pair; 13 | if (paymentMode === 'payment') { 14 | if (worker.balanceChange !== 0) { 15 | balanceUpdateCommands.push([ 16 | 'hincrbyfloat', 17 | `${coin}:payments:unpaid`, 18 | w, 19 | satoshisToCoins(worker.balanceChange), 20 | ]); 21 | } 22 | if ((worker.sent || 0) > 0) { 23 | workerPayoutsCommand.push(['hincrbyfloat', `${coin}:payments:payouts`, w, coinsRound(worker.sent)]); 24 | totalPaid = coinsRound(totalPaid + worker.sent); 25 | } 26 | } else if ((worker.reward || 0) > 0) { 27 | const reward = satoshisToCoins(worker.reward); 28 | balanceUpdateCommands.push(['hset', `${coin}:payments:balances`, w, coinsRound(reward)]); 29 | } else { 30 | balanceUpdateCommands.push(['hset', `${coin}:payments:balances`, w, 0]); 31 | } 32 | 33 | if ((worker.immature || 0) > 0) { 34 | const immature = satoshisToCoins(worker.immature); 35 | immatureUpdateCommands.push(['hset', `${coin}:payments:immature`, w, coinsRound(immature)]); 36 | } else { 37 | immatureUpdateCommands.push(['hset', `${coin}:payments:immature`, w, 0]); 38 | } 39 | }); 40 | 41 | return [totalPaid, workerPayoutsCommand, balanceUpdateCommands, immatureUpdateCommands]; 42 | }; 43 | 44 | module.exports = { updateWorkerPayoutBalances }; 45 | -------------------------------------------------------------------------------- /src/payments/manage_sent_payments/update_worker_shares.js: -------------------------------------------------------------------------------- 1 | // Update Worker Payouts/Balances 2 | const updateWorkerShares = (env) => (rounds) => { 3 | const { logger, paymentMode, coin } = env; 4 | const movePendingCommands = []; 5 | const orphanMergeCommands = []; 6 | const roundsToDelete = []; 7 | const confirmsUpdate = []; 8 | const confirmsToDelete = []; 9 | 10 | // Update Worker Shares 11 | const moveSharesToCurrent = function (round) { 12 | const { workerShares } = round; 13 | if (workerShares != null) { 14 | logger.warning(`Moving shares from orphaned block ${round.height} to current round.`); 15 | Object.keys(workerShares).forEach((worker) => { 16 | orphanMergeCommands.push(['hincrby', `${coin}:shares:roundCurrent`, 17 | worker, workerShares[worker]]); 18 | }); 19 | } 20 | }; 21 | 22 | // Update Worker Shares in Database 23 | Object.values(rounds).forEach((r) => { 24 | switch (r.category) { 25 | case 'kicked': 26 | case 'orphan': 27 | confirmsToDelete.push(['hdel', `${coin}:blocks:pendingConfirms`, r.blockHash]); 28 | movePendingCommands.push(['smove', `${coin}:blocks:pending`, `${coin}:blocks:kicked`, r.serialized]); 29 | if (r.canDeleteShares) { 30 | moveSharesToCurrent(r); 31 | roundsToDelete.push(`${coin}:shares:round${r.height}`); 32 | roundsToDelete.push(`${coin}:times:times${r.height}`); 33 | } 34 | return; 35 | case 'immature': 36 | confirmsUpdate.push(['hset', `${coin}:blocks:pendingConfirms`, r.blockHash, (r.confirmations || 0)]); 37 | return; 38 | case 'generate': 39 | if (paymentMode === 'payment') { 40 | confirmsToDelete.push(['hdel', `${coin}:blocks:pendingConfirms`, r.blockHash]); 41 | movePendingCommands.push(['smove', `${coin}:blocks:pending`, `${coin}:blocks:confirmed`, r.serialized]); 42 | roundsToDelete.push(`${coin}:shares:round${r.height}`); 43 | roundsToDelete.push(`${coin}:times:times${r.height}`); 44 | } 45 | } 46 | }); 47 | 48 | return [ 49 | movePendingCommands, 50 | orphanMergeCommands, 51 | roundsToDelete, 52 | confirmsUpdate, 53 | confirmsToDelete 54 | ]; 55 | }; 56 | 57 | module.exports = { updateWorkerShares }; 58 | -------------------------------------------------------------------------------- /src/payments/process_payments.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['initializePayouts', `${__dirname}/initialize_payouts`], 5 | ['updateRounds', `${__dirname}/update_rounds`], 6 | ['processShareBlocks', `${__dirname}/process_share_blocks`], 7 | ['calculatePayments', `${__dirname}/calculate_payments`], 8 | ['manageSentPayments', `${__dirname}/manage_sent_payments`], 9 | ['fixFailedPayments', `${__dirname}/fix_failed_payments`] 10 | ]; 11 | 12 | // Periodic background process that processes all share and worker data 13 | // to determine payment amounts and produce payout transactions for the 14 | // blockchain. 15 | const baseProcessPayments = (deps) => (env) => async (paymentMode, lastInterval) => { 16 | const { initializePayouts, updateRounds, processShareBlocks } = deps; 17 | const { calculatePayments, manageSentPayments, fixFailedPayments } = deps; 18 | 19 | const subEnv = { ...env, paymentMode, lastInterval }; 20 | 21 | const { workers, rounds } = await initializePayouts(subEnv); 22 | await updateRounds(subEnv)(rounds); 23 | await processShareBlocks(subEnv)({ workers, rounds }); 24 | return calculatePayments(subEnv)({ workers, rounds }) 25 | .then(manageSentPayments(subEnv)) 26 | .then(fixFailedPayments(subEnv)); 27 | }; 28 | 29 | module.exports = { 30 | _processPayments: baseProcessPayments, 31 | processPayments: baseProcessPayments(requireDeps(defaultDeps)) 32 | }; 33 | -------------------------------------------------------------------------------- /src/payments/process_share_blocks/check_payment_funds.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['listUnspent', `${__dirname}/list_unspent`] 5 | ]; 6 | 7 | // Simple wrapper for calling listUnspent, to contain the try..catch block 8 | const fetchUnspent = (listUnspent) => async (env) => { 9 | const { logger } = env; 10 | try { 11 | return await listUnspent(env)(false); 12 | } catch (e) { 13 | logger.error('Error checking pool balance before processing payments.'); 14 | throw e; 15 | } 16 | }; 17 | 18 | const _checkPaymentFunds = (deps) => (env) => async ({ workers, rounds }) => { 19 | const { listUnspent } = deps; 20 | const { logger, feeSatoshi } = env; 21 | const { satoshisToCoins, coinsToSatoshies } = env.coinUtils; 22 | 23 | // Calculate the total amount of "owed" payouts. 24 | const roundsOwed = rounds.filter((r) => (r.category === 'generate')) 25 | .reduce((sum, r) => sum + coinsToSatoshies(r.reward) - feeSatoshi, 0); 26 | const workersOwed = Object.values(workers) 27 | .reduce((sum, w) => sum + (w.balance || 0), 0); 28 | const owed = roundsOwed + workersOwed; 29 | 30 | // Check the owed amount against the unspent balance 31 | const balance = await fetchUnspent(listUnspent)(env); 32 | const insufficient = (balance < owed); 33 | const cantPay = (owed <= 0) || insufficient; 34 | // Produce an error log message if we don't have sufficient funds 35 | if (insufficient) { 36 | const funds = satoshisToCoins(balance); 37 | const amount = satoshisToCoins(owed); 38 | logger.error(`Insufficient funds: (${funds}) < (${amount}); possibly waiting for txs.`); 39 | } 40 | 41 | // If we can't payout the current shares, change all 'generate' (aka. confirmed) shares 42 | // to 'immature', to defer full payment. 43 | if (cantPay) { 44 | rounds.forEach((r) => { 45 | if (r.category === 'generate') r.category = 'immature'; 46 | }); 47 | } 48 | }; 49 | 50 | module.exports = { 51 | _checkPaymentFunds, 52 | checkPaymentFunds: _checkPaymentFunds(requireDeps(defaultDeps)) 53 | }; 54 | -------------------------------------------------------------------------------- /src/payments/process_share_blocks/confirmed_block/adjust_round_times.js: -------------------------------------------------------------------------------- 1 | // TODO: I have no idea why this offset is being calculated. It only happens with 2 | // a duplicate address entry in the times object, but that can't happen. In fact 3 | // I can't think of a way to write a test that covers this bit of logic. 4 | const calculateOffset = ({ workerTimes, addr, time }) => { 5 | if (workerTimes[addr] < time) workerTimes[addr] = workerTimes[addr] * 0.5 + time; 6 | else workerTimes[addr] += time * 0.5; 7 | }; 8 | 9 | // Calculates the adjusted time each worker will be credited with, for the round. 10 | const adjustRoundTimes = (times, maxTime) => { 11 | const workerTimes = {}; 12 | Object.entries(times).forEach(([addr, time]) => { 13 | time = parseFloat(time); 14 | if (!(addr in workerTimes)) workerTimes[addr] = time; 15 | else calculateOffset({ workerTimes, addr, time }); 16 | 17 | // If the worker's time is greater than the round's maxTime, adjust down. 18 | if (workerTimes[addr] > maxTime) workerTimes[addr] = maxTime; 19 | }); 20 | return workerTimes; 21 | }; 22 | 23 | module.exports = { adjustRoundTimes }; 24 | -------------------------------------------------------------------------------- /src/payments/process_share_blocks/confirmed_block/compute_shared_payouts.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['findOrNew', `${__dirname}/../../utils`] 5 | ]; 6 | 7 | // For "shared" worker shares, compute the % of the round reward they receive, 8 | // based on their portion of shares contributed. If a worker's % is > 1.0, log 9 | // an error, and that worker receives no payout. 10 | const _computeSharedPayouts = ({ findOrNew }) => (env) => (args) => { 11 | const { logger, coinUtils } = env; 12 | const { workers, shared, round, totalShares, reward } = args; 13 | 14 | Object.keys(shared).forEach((addr) => { 15 | const worker = findOrNew(workers, addr); 16 | const percent = parseFloat(worker.roundShares) / totalShares; 17 | 18 | if (percent > 1.0) { 19 | const msg = `${addr} share % > 100% in round: ${round.height} blockHash: ${round.blockHash}`; 20 | logger.error(msg); 21 | return; 22 | } 23 | const rewardInSatoshis = Math.round(reward * percent); 24 | const rewardInCoins = coinUtils.satoshisToCoins(rewardInSatoshis); 25 | worker.records[round.height].amounts = rewardInCoins; 26 | worker.reward = (worker.reward || 0) + rewardInSatoshis; 27 | }); 28 | }; 29 | 30 | module.exports = { 31 | _computeSharedPayouts, 32 | computeSharedPayouts: _computeSharedPayouts(requireDeps(defaultDeps)) 33 | }; 34 | -------------------------------------------------------------------------------- /src/payments/process_share_blocks/confirmed_block/index.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['findOrNew', `${__dirname}/../../utils`], 5 | ['adjustRoundTimes', `${__dirname}/adjust_round_times`], 6 | ['sharedRoundTotal', `${__dirname}/shared_round_total`], 7 | ['computeSharedPayouts', `${__dirname}/compute_shared_payouts`] 8 | ]; 9 | 10 | const _confirmedBlock = (deps) => (env) => (args) => { 11 | const { findOrNew, adjustRoundTimes, sharedRoundTotal, computeSharedPayouts } = deps; 12 | const { coinUtils, feeSatoshi } = env; 13 | const { coinsToSatoshies, satoshisToCoins } = coinUtils; 14 | const { workers, round, shared, solo, times, maxTime } = args; 15 | 16 | // NOTE: worker.reward is in units of SATOSHIS 17 | // NOTE: record.amounts is in units of COINS 18 | const reward = Math.round(coinsToSatoshies(round.reward) - feeSatoshi); 19 | 20 | // Check if Solo Mined 21 | if (round.soloMined) { 22 | const addr = round.workerAddress; 23 | const worker = findOrNew(workers, addr); 24 | worker.records = (worker.records || {}); 25 | 26 | const shares = parseFloat((solo[addr] || 0)); 27 | const amounts = satoshisToCoins(reward); 28 | worker.records[round.height] = { shares, amounts, times: 1 }; 29 | worker.roundShares = shares; 30 | worker.totalShares = parseFloat(worker.totalShares || 0) + shares; 31 | worker.reward = (worker.reward || 0) + reward; 32 | } else { 33 | // Otherwise, calculate payout amounts for all workers that contributed to the block 34 | const workerTimes = adjustRoundTimes(times, maxTime); 35 | const totalArgs = { 36 | workers, round, shared, times: workerTimes, maxTime 37 | }; 38 | const totalShares = sharedRoundTotal(env)(totalArgs); 39 | const payoutsArgs = { 40 | workers, round, shared, totalShares, reward 41 | }; 42 | computeSharedPayouts(env)(payoutsArgs); 43 | } 44 | }; 45 | 46 | module.exports = { 47 | _confirmedBlock, 48 | confirmedBlock: _confirmedBlock(requireDeps(defaultDeps)) 49 | }; 50 | -------------------------------------------------------------------------------- /src/payments/process_share_blocks/confirmed_block/shared_round_total.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['findOrNew', `${__dirname}/../../utils`], 5 | ['lostShares', `${__dirname}/../lost_shares`] 6 | ]; 7 | 8 | // Adjust the amount of shares credited to each worker in a confirmed block, under 9 | // shared mining. Returns the total shares for the shared round. 10 | const _sharedRoundTotal = (deps) => (env) => (args) => { 11 | const { findOrNew, lostShares } = deps; 12 | const { logger } = env; 13 | const { workers, round, shared, times, maxTime } = args; 14 | 15 | let totalShares = 0; 16 | 17 | Object.entries(shared).forEach(([addr, shares]) => { 18 | const worker = findOrNew(workers, addr); 19 | shares = parseFloat(shares || 0); 20 | worker.records = (worker.records || {}); 21 | worker.records[round.height] = { shares, amounts: 0, times: 0 }; 22 | const record = worker.records[round.height]; 23 | const lostCount = lostShares(shares, times[addr], maxTime, record); 24 | let shareCount = maxTime > 0 ? Math.max(shares - lostCount, 0) : 0; 25 | 26 | // worker.records[round.height].times is the percentage of the time period 27 | // this worker is being credited for. If this % > 100%, they are credited with 28 | // zero shares, and an error message is logged. 29 | if (record.times > 1.0) { 30 | logger.error(`Worker: ${addr} time share > 1.0 in round: ${ 31 | round.height} blockHash: ${round.blockHash}`); 32 | shareCount = 0; 33 | } 34 | 35 | worker.roundShares = shareCount; 36 | worker.totalShares = parseFloat(worker.totalShares || 0) + shareCount; 37 | totalShares += shareCount; 38 | }); 39 | return totalShares; 40 | }; 41 | 42 | module.exports = { 43 | _sharedRoundTotal, 44 | sharedRoundTotal: _sharedRoundTotal(requireDeps(defaultDeps)) 45 | }; 46 | -------------------------------------------------------------------------------- /src/payments/process_share_blocks/fetch_round_shares.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['promiseExec', `${__dirname}/../../utils/promised_redis`] 5 | ]; 6 | 7 | // For each set of round share data, extract the worker address and the total 8 | // difficulty they contributed to the round, keeping the soloMined entries separate 9 | // from the non-soloMined entries. 10 | const accumulateRoundDiffs = (round, logger) => { 11 | const data = { soloDiffs: {}, sharedDiffs: {} }; 12 | try { 13 | Object.entries(round).forEach(([json, value]) => { 14 | const { worker, soloMined } = JSON.parse(json); 15 | const type = soloMined ? 'soloDiffs' : 'sharedDiffs'; 16 | data[type][worker] = (data[type][worker] || 0) + parseFloat(value); 17 | }); 18 | } catch (e) { 19 | const msg = `fetchRoundShares: Invalid round shares format ${e}`; 20 | logger.error(msg); 21 | throw new Error(msg); 22 | } 23 | return data; 24 | }; 25 | 26 | const _fetchRoundShares = ({ promiseExec }) => (env) => async (rounds) => { 27 | const { client, logger, coin } = env; 28 | const solo = []; 29 | const shared = []; 30 | 31 | const commands = rounds.map((r) => ['hgetall', `${coin}:shares:round${r.height}`]); 32 | const failMsg = 'fetchRoundShares: Error retrieving round shares'; 33 | const roundShares = await promiseExec({ client, logger })({ commands, failMsg }); 34 | 35 | roundShares.forEach((round) => { 36 | const { soloDiffs, sharedDiffs } = accumulateRoundDiffs(round, logger); 37 | solo.push(soloDiffs); 38 | shared.push(sharedDiffs); 39 | }); 40 | return { solo, shared }; 41 | }; 42 | 43 | module.exports = { 44 | _fetchRoundShares, 45 | fetchRoundShares: _fetchRoundShares(requireDeps(defaultDeps)) 46 | }; 47 | -------------------------------------------------------------------------------- /src/payments/process_share_blocks/fetch_round_times.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['promiseExec', `${__dirname}/../../utils/promised_redis`] 5 | ]; 6 | 7 | // Redis stores all numberic values as strings. We need those time values to be 8 | // floating point numbers, for all downstream consumers. On the outside chance 9 | // that we fetch a null entry, we catch errors during parsing. 10 | const parseTimes = (times, logger) => { 11 | try { 12 | times.forEach((t) => { 13 | Object.entries(t).forEach(([k, v]) => { t[k] = parseFloat(v); }); 14 | }); 15 | } catch (e) { 16 | const msg = `fetchRoundTimes: Invalid round times format ${e}`; 17 | logger.error(msg); 18 | throw new Error(msg); 19 | } 20 | }; 21 | 22 | const _fetchRoundTimes = ({ promiseExec }) => (env) => async (rounds) => { 23 | const { client, logger, coin } = env; 24 | const commands = rounds.map((r) => ['hgetall', `${coin}:times:times${r.height}`]); 25 | const failMsg = 'fetchRoundTimes: Error retrieving round times'; 26 | 27 | const roundTimes = await promiseExec({ client, logger })({ commands, failMsg }); 28 | parseTimes(roundTimes, logger); 29 | return roundTimes; 30 | }; 31 | 32 | module.exports = { 33 | _fetchRoundTimes, 34 | fetchRoundTimes: _fetchRoundTimes(requireDeps(defaultDeps)) 35 | }; 36 | -------------------------------------------------------------------------------- /src/payments/process_share_blocks/immature_block/index.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['findOrNew', `${__dirname}/../../utils`], 5 | ['sharedRoundTotal', `${__dirname}/shared_round_total`] 6 | ]; 7 | 8 | // Calulate the total Immature block reward, for workers that contributed "shares". 9 | // For soloMined blocks, the full reward is assigned to the worker. 10 | // For blocks with multiple workers, each worker is assinged their percentage of the 11 | // reward, based on the percentage of the share "work" they contributed. 12 | const _immatureBlock = (deps) => (env) => (args) => { 13 | const { findOrNew, sharedRoundTotal } = deps; 14 | const { coinUtils, feeSatoshi } = env; 15 | const { workers, round, shared, solo, times, maxTime } = args; 16 | const reward = Math.round(coinUtils.coinsToSatoshies(round.reward) - feeSatoshi); 17 | 18 | // Check if Solo Mined 19 | if (round.soloMined) { 20 | const worker = findOrNew(workers, round.workerAddress); 21 | const shares = parseFloat((solo[round.workerAddress] || 0)); 22 | worker.roundShares = shares; 23 | worker.immature = (worker.immature || 0) + reward; 24 | } else { 25 | const totalShared = sharedRoundTotal({ 26 | workers, shared, times, maxTime 27 | }); 28 | 29 | // Calculate adjusted immature reward for all workers, by shared entry address. 30 | Object.keys(shared).forEach((addr) => { 31 | const worker = workers[addr]; 32 | const percent = worker.roundShares / totalShared; 33 | worker.immature = (worker.immature || 0) + Math.round(reward * percent); 34 | }); 35 | } 36 | }; 37 | 38 | module.exports = { 39 | _immatureBlock, 40 | immatureBlock: _immatureBlock(requireDeps(defaultDeps)) 41 | }; 42 | -------------------------------------------------------------------------------- /src/payments/process_share_blocks/immature_block/shared_round_total.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['findOrNew', `${__dirname}/../../utils`], 5 | ['lostShares', `${__dirname}/../lost_shares`] 6 | ]; 7 | 8 | const _sharedRoundTotal = (deps) => ({ workers, shared, times, maxTime }) => { 9 | const { findOrNew, lostShares } = deps; 10 | let totalShares = 0; 11 | 12 | Object.entries(shared).forEach(([addr, shares]) => { 13 | shares = parseFloat((shares || 0)); 14 | const worker = findOrNew(workers, addr); 15 | 16 | // Calculate this worker's adjusted shares, for the current round, reduced by 17 | // their percentage of share contribution time in the block. 18 | const lostCount = lostShares(shares, times[addr], maxTime); 19 | const shareCount = maxTime > 0 ? Math.max(shares - lostCount, 0) : 0; 20 | 21 | // Update the worker's data with their adjusted shares. 22 | worker.roundShares = shareCount; 23 | 24 | totalShares += shareCount; 25 | }); 26 | return totalShares; 27 | }; 28 | 29 | module.exports = { 30 | _sharedRoundTotal, 31 | sharedRoundTotal: _sharedRoundTotal(requireDeps(defaultDeps)) 32 | }; 33 | -------------------------------------------------------------------------------- /src/payments/process_share_blocks/list_unspent.js: -------------------------------------------------------------------------------- 1 | // Returns the total unspent amount, across all matching transactions, from 2 | // the pool's daemon account. 3 | const listUnspent = (env) => async (displayBool = false) => { 4 | const { daemon, logger, coinUtils, minConfPayout } = env; 5 | const { coinsRound, coinsToSatoshies } = coinUtils; 6 | const addr = 'Payout Wallet'; 7 | 8 | // Fetch all unspent transactions with at least minConfPayout confirmations. 9 | const [data] = await daemon.rpcCmd('listunspent', [minConfPayout, 99999999]) 10 | .catch((error) => { 11 | logger.error(`Daemon RPC error: listunspent, ${addr}: ${error.message}`); 12 | throw error; 13 | }); 14 | 15 | // Sum all of the transaction amounts and round to the nearest "satoshi". 16 | const balance = coinsRound( 17 | data.response.reduce((sum, r) => sum + parseFloat(r.amount || 0), 0) 18 | ); 19 | 20 | if (displayBool) { logger.special(`${addr} balance of ${balance}`); } 21 | return coinsToSatoshies(balance); 22 | }; 23 | 24 | module.exports = { listUnspent }; 25 | -------------------------------------------------------------------------------- /src/payments/process_share_blocks/lost_shares.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['roundTo', `${__dirname}/../utils`] 5 | ]; 6 | 7 | // Calculate the amount of "lost" shares, is there's a specified timestamp for 8 | // when the worker either started or stopped providing work to the current block. 9 | const _lostShares = ({ roundTo }) => (shares, time, maxTime, record) => { 10 | time = parseFloat((time || 0)); 11 | if (time === 0) return 0; 12 | 13 | const timePeriod = roundTo(time / maxTime, 2); 14 | if (record) record.times = timePeriod; 15 | if (timePeriod <= 0 || timePeriod >= 0.51) return 0; 16 | 17 | return shares - (shares * timePeriod); 18 | }; 19 | 20 | module.exports = { 21 | _lostShares, 22 | lostShares: _lostShares(requireDeps(defaultDeps)) 23 | }; 24 | -------------------------------------------------------------------------------- /src/payments/process_share_blocks/move_manual_rounds.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['promiseExec', `${__dirname}/../../utils/promised_redis`] 5 | ]; 6 | 7 | // rounds.forEach callback function. Currently logs manual round notices as an 8 | // error. TODO: Upgrade this to a proper notification to an external channel. 9 | const logManualRound = (logger) => (r) => { 10 | const msg = `No worker shares for round: ${r.height} blockHash: ${r.blockHash}.`; 11 | logger.error(`${msg} Manual payout required.`); 12 | }; 13 | 14 | // Moves the pending blocks for all rounds that have no shared or solo work 15 | // recorded. These blocks need to be evaluated for possible manual payout. 16 | // After resolving, an error message is logged for each block moved. 17 | const _moveManualRounds = ({ promiseExec }) => (env) => async (rounds) => { 18 | const { client, logger, coin } = env; 19 | const cmdArr = ['smove', `${coin}:blocks:pending`, `${coin}:blocks:manual`]; 20 | const commands = rounds.map((r) => [...cmdArr, r.serialized]); 21 | const failMsg = 'moveManualRounds: Error'; 22 | 23 | const result = await promiseExec({ client, logger })({ commands, failMsg }); 24 | rounds.forEach(logManualRound(logger)); 25 | return result; 26 | }; 27 | 28 | module.exports = { 29 | _moveManualRounds, 30 | moveManualRounds: _moveManualRounds(requireDeps(defaultDeps)) 31 | }; 32 | -------------------------------------------------------------------------------- /src/payments/process_share_blocks/process_auto_rounds.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['confirmedBlock', `${__dirname}/confirmed_block`], 5 | ['immatureBlock', `${__dirname}/immature_block`] 6 | ]; 7 | 8 | // Delegate 'immature', and 'generate' category rounds to their respective 9 | // sub-functions, to calculate worker payout amounts. 10 | const _processAutoRounds = (deps) => (env) => (args) => { 11 | const { immatureBlock, confirmedBlock } = deps; 12 | const { workers, rounds, times, solo, shared } = args; 13 | const categoryProcessors = { 14 | generate: confirmedBlock(env), 15 | immature: immatureBlock(env), 16 | kicked: () => {}, 17 | orphan: () => {} 18 | }; 19 | 20 | rounds.forEach((round, i) => { 21 | const maxTime = Math.max(...Object.values(times[i]).map(parseFloat)); 22 | const processor = categoryProcessors[round.category]; 23 | const roundArgs = { 24 | workers, round, shared: shared[i], solo: solo[i], times: times[i], maxTime 25 | }; 26 | processor(roundArgs); 27 | }); 28 | }; 29 | 30 | module.exports = { 31 | _processAutoRounds, 32 | processAutoRounds: _processAutoRounds(requireDeps(defaultDeps)) 33 | }; 34 | -------------------------------------------------------------------------------- /src/payments/process_share_blocks/separate_rounds.js: -------------------------------------------------------------------------------- 1 | // Evaluates all pending rounds and separates them in those that can be processed 2 | // for automatic payout, and those that require manual payout (likely an error). 3 | const separateRounds = ({ rounds, solo, shared }) => { 4 | const objectSize = (obj) => Object.keys(obj).length; 5 | const autoRounds = []; 6 | const manualRounds = []; 7 | 8 | // Separate the automatically payable rounds, from those requiring manual payout. 9 | rounds.forEach((round, i) => { 10 | const sum = objectSize(solo[i]) + objectSize(shared[i]); 11 | if (sum > 0) { autoRounds.push(round); } else { manualRounds.push(round); } 12 | }); 13 | return { autoRounds, manualRounds }; 14 | }; 15 | 16 | module.exports = { separateRounds }; 17 | -------------------------------------------------------------------------------- /src/payments/start_payments.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['processPayments', `${__dirname}/process_payments`] 5 | ]; 6 | 7 | // Extracted from initPayments. Using processPayments, we start the async timers 8 | // that manage payment processing. 9 | const _startPayments = ({ processPayments }) => (env) => { 10 | const { logger, poolOptions } = env; 11 | const { checkInterval, paymentInterval } = poolOptions.paymentProcessing; 12 | 13 | let checkTimerId; 14 | let paymentTimerId; 15 | const signalStop = () => { 16 | clearInterval(checkTimerId); 17 | clearInterval(paymentTimerId); 18 | }; 19 | 20 | const process = processPayments({ ...env, signalStop }); 21 | 22 | const startTimerId = setTimeout(async () => { 23 | try { 24 | await process('start', Date.now()); 25 | } catch (e) { 26 | logger.error(e.toString()); 27 | } 28 | }, 100); 29 | 30 | checkTimerId = setInterval(async () => { 31 | try { 32 | await process('check', Date.now()); 33 | } catch (e) { 34 | logger.error(e.toString()); 35 | } 36 | }, checkInterval * 1000); 37 | 38 | paymentTimerId = setInterval(async () => { 39 | try { 40 | await process('payment', Date.now()); 41 | } catch (e) { 42 | logger.error(e.toString()); 43 | } 44 | }, paymentInterval * 1000); 45 | 46 | return { startTimerId, checkTimerId, paymentTimerId }; 47 | }; 48 | 49 | module.exports = { 50 | _startPayments, 51 | startPayments: _startPayments(requireDeps(defaultDeps)) 52 | }; 53 | -------------------------------------------------------------------------------- /src/payments/update_rounds/fetch_transactions.js: -------------------------------------------------------------------------------- 1 | // Simple wrapper to handle retrieving block transaction details from the 2 | // coin Daemon. 3 | const fetchTransactions = (env) => async (rounds) => { 4 | const { daemon, logger } = env; 5 | const batchRPCCommand = rounds.map((r) => ['gettransaction', [r.txHash]]); 6 | const transactions = await daemon.rpcBatch(batchRPCCommand) 7 | .catch((err) => { 8 | logger.error(`Error with batch gettransaction call ${JSON.stringify(err)}`); 9 | throw err; 10 | }); 11 | 12 | return transactions; 13 | }; 14 | 15 | module.exports = { fetchTransactions }; 16 | -------------------------------------------------------------------------------- /src/payments/update_rounds/flag_deletable_rounds.js: -------------------------------------------------------------------------------- 1 | const buildDeleteLookup = (deleteCats, rounds) => { 2 | const lookup = {}; 3 | rounds.forEach((r) => { 4 | if (!deleteCats.includes(r.category)) lookup[r.height] = r.serialized; 5 | }); 6 | return lookup; 7 | }; 8 | 9 | // 'orphan' and 'kicked' rounds should be flagged for deletion. But, there's 10 | // a special case where we don't flag it: a 'generate' or 'immature' round, 11 | // with the same height, but a different serialized value. 12 | const flagDeletableRounds = (rounds) => { 13 | const deleteCats = ['orphan', 'kicked']; 14 | const lookup = buildDeleteLookup(deleteCats, rounds); 15 | rounds.forEach((r) => { 16 | if (deleteCats.includes(r.category)) { 17 | const inLookup = Object.keys(lookup).includes(r.height.toString()); 18 | r.canDeleteShares = inLookup ? lookup[r.height] === r.serialized : true; 19 | } 20 | }); 21 | }; 22 | 23 | module.exports = { flagDeletableRounds }; 24 | -------------------------------------------------------------------------------- /src/payments/update_rounds/index.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['fetchTransactions', `${__dirname}/fetch_transactions`], 5 | ['convertTransaction', `${__dirname}/convert_transaction`], 6 | ['flagDeletableRounds', `${__dirname}/flag_deletable_rounds`] 7 | ]; 8 | 9 | const _updateRounds = (deps) => (env) => async (rounds) => { 10 | const { fetchTransactions, convertTransaction, flagDeletableRounds } = deps; 11 | 12 | const transactions = await fetchTransactions(env)(rounds); 13 | const updaters = transactions.map((tx) => convertTransaction(env)(tx)); 14 | updaters.forEach((updater, idx) => updater(rounds[idx])); 15 | 16 | flagDeletableRounds(rounds); 17 | }; 18 | 19 | module.exports = { 20 | _updateRounds, 21 | updateRounds: _updateRounds(requireDeps(defaultDeps)) 22 | }; 23 | -------------------------------------------------------------------------------- /src/redis/clients.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['redis', 'redis', false], 5 | ['RedisClustr', 'redis-clustr', false], 6 | ]; 7 | 8 | // Returns a standard (non-cluster) Redis client 9 | const _standard = ({ redis }) => ({ host, port, password = '' }) => { 10 | const args = { port, host }; 11 | if (password !== '') args.password = password; 12 | return redis.createClient(args); 13 | }; 14 | 15 | // Returns a RedisClustr client 16 | const _cluster = (deps) => ({ host, port, password = '' }) => { 17 | const { redis, RedisClustr } = deps; 18 | const servers = [{ host, port }]; 19 | 20 | const createClient = (cPort, cHost, options) => { 21 | const args = { host: cHost, port: cPort }; 22 | if (options) args.password = options.password; 23 | return redis.createClient(args); 24 | }; 25 | const clusterArgs = { servers, createClient }; 26 | const redisOptions = { password }; 27 | if (password !== '') clusterArgs.redisOptions = redisOptions; 28 | return new RedisClustr(clusterArgs); 29 | }; 30 | 31 | module.exports = { 32 | _standard, 33 | _cluster, 34 | standard: _standard(requireDeps(defaultDeps)), 35 | cluster: _cluster(requireDeps(defaultDeps)) 36 | }; 37 | -------------------------------------------------------------------------------- /src/redis/fetch_version_num.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['promiseCmd', `${__dirname}/../utils/promised_redis`] 5 | ]; 6 | 7 | const _fetchVersionNum = ({ promiseCmd }) => async ({ client, logger }) => { 8 | const args = ['server']; 9 | const infoCmd = promiseCmd('info')({ client, logger }); 10 | const response = await infoCmd({ args }); 11 | const version = response.split('\r\n').filter((r) => r.match(/^redis_version:/)); 12 | if (version.length !== 1) return false; 13 | return parseFloat(version[0].split(':')[1]); 14 | }; 15 | 16 | module.exports = { 17 | _fetchVersionNum, 18 | fetchVersionNum: _fetchVersionNum(requireDeps(defaultDeps)) 19 | }; 20 | -------------------------------------------------------------------------------- /src/redis/index.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['clients', `${__dirname}/clients`, false], 5 | ['isValidVersion', `${__dirname}/is_valid_version`], 6 | ]; 7 | 8 | const attachEvents = (env) => (client) => { 9 | const { logger, config } = env; 10 | 11 | // Add event listeners 12 | client.on('ready', () => { 13 | logger.debug(`Share processing setup with redis (${config.host}:${config.port})`); 14 | }).on('error', (error) => { 15 | logger.error(`Redis client had an error: ${error.toString()}`); 16 | }).on('end', () => { 17 | logger.error('Connection to redis database has been ended'); 18 | }); 19 | return client; 20 | }; 21 | 22 | // Wrapper function/constructor for our Redis client instance. Provides both a "plain" 23 | // client, as well as one with several event listeners that output log messages. 24 | const _redis = (deps) => function (config) { 25 | const { clients, isValidVersion } = deps; 26 | 27 | // Assign our client property to either a standard, or cluster-ed, setup. 28 | this.client = config.cluster ? clients.cluster(config) : clients.standard(config); 29 | 30 | const _this = this; 31 | 32 | // Attaches the "standard" event listeners to our initialized client. 33 | // To maintain existing compatibility, we hardcode the 2.6 minumumum version. 34 | // Throws errors if the Redis version is invalid. 35 | this.attachEvents = (logger) => { 36 | isValidVersion({ client: _this.client, logger })(2.6).then(() => {}); 37 | _this.client = attachEvents({ logger, config })(_this.client); 38 | }; 39 | }; 40 | 41 | module.exports = { 42 | _redis, 43 | Redis: _redis(requireDeps(defaultDeps)) 44 | }; 45 | -------------------------------------------------------------------------------- /src/redis/is_valid_version.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['fetchVersionNum', `${__dirname}/fetch_version_num`], 5 | ]; 6 | 7 | const _isValidVersion = (deps) => (env) => async (num) => { 8 | const { fetchVersionNum } = deps; 9 | const { client, logger } = env; 10 | 11 | const version = await fetchVersionNum({ client, logger }) 12 | .catch((err) => { 13 | const msg = `Redis version check failed: ${err.toString()}`; 14 | logger.error(msg); 15 | throw new Error(msg); 16 | }); 17 | if (!version || version < num) { 18 | const msg = `Redis version invalid: v${num} or later required to operate pool.`; 19 | logger.error(msg); 20 | throw new Error(msg); 21 | } 22 | return true; 23 | }; 24 | 25 | module.exports = { 26 | _isValidVersion, 27 | isValidVersion: _isValidVersion(requireDeps(defaultDeps)) 28 | }; 29 | -------------------------------------------------------------------------------- /src/shares/fetch_times_shares.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['promiseExec', `${__dirname}/../utils/promised_redis`] 5 | ]; 6 | 7 | // PoolShares.handleShare() process function. Retrieves all existing times and 8 | // data shares. 9 | const _fetchTimesShares = ({ promiseExec }) => ({ client, logger, coin }) => { 10 | const commands = [ 11 | ['hgetall', `${coin}:times:timesStart`], 12 | ['hgetall', `${coin}:times:timesShare`], 13 | ['hgetall', `${coin}:shares:roundCurrent`], 14 | ['hgetall', `${coin}:times:timesCurrent`], 15 | ]; 16 | const failMsg = 'fetchTimesShares: Error retrieving times/shares data'; 17 | 18 | return promiseExec({ client, logger })({ commands, failMsg }) 19 | .then((results) => { 20 | const rVals = results.map((r) => (r === null ? {} : r)); 21 | const [startTimes, shareTimes, currentShares, currentTimes] = rVals; 22 | return { 23 | startTimes, shareTimes, currentShares, currentTimes 24 | }; 25 | }); 26 | }; 27 | 28 | module.exports = { 29 | _fetchTimesShares, 30 | fetchTimesShares: _fetchTimesShares(requireDeps(defaultDeps)), 31 | }; 32 | -------------------------------------------------------------------------------- /src/shares/handle_share.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const _defaultDeps = [ 4 | ['fetchTimesShares', `${__dirname}/fetch_times_shares`], 5 | ['processShareData', `${__dirname}/process_share_data`], 6 | ['processBlockData', `${__dirname}/process_block_data`], 7 | ['persistHashrateData', `${__dirname}/persist_hashrate_data`], 8 | ]; 9 | 10 | // Process submitted share data. 11 | const _handleShare = (deps) => (env) => async (args) => { 12 | const { fetchTimesShares, 13 | processShareData, processBlockData, persistHashrateData } = deps; 14 | const { poolConfig, isCluster } = env; 15 | const { isValidShare, isValidBlock, shareData } = args; 16 | const timestamp = Date.now(); 17 | 18 | // Check to see if Solo Mining 19 | const portInfo = poolConfig.ports[shareData.port]; 20 | const isSoloMining = !!portInfo && !!portInfo.soloMining; 21 | 22 | const shareEnv = { 23 | ...env, timestamp, shareData, isValidShare, isValidBlock, isSoloMining 24 | }; 25 | 26 | // fetchTimesShares returns an object with 4 data entries. All 4 of them are 27 | // consumed by processShareData, but only currentTimes and currentShares are 28 | // consumed by processBlockData. 29 | const { currentTimes, currentShares } = await fetchTimesShares(env) 30 | .then((timesShares) => { 31 | // processShareData add entries to currentTimes and currentShares, so we 32 | // need to explicitly return the timesShares object, after this call. 33 | processShareData(shareEnv)(timesShares); 34 | return timesShares; 35 | }); 36 | await processBlockData(shareEnv)({ isCluster, currentShares, currentTimes }); 37 | await persistHashrateData(shareEnv); 38 | return true; 39 | }; 40 | 41 | module.exports = { 42 | _defaultDeps, 43 | _handleShare, 44 | handleShare: _handleShare(requireDeps(_defaultDeps)), 45 | }; 46 | -------------------------------------------------------------------------------- /src/shares/index.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['Redis', `${__dirname}/../redis`], 5 | ['handleShare', `${__dirname}/handle_share`], 6 | ]; 7 | 8 | // PoolShares share processing module 9 | const _poolShares = (deps) => function (env) { 10 | const { Redis, handleShare } = deps; 11 | const { logger: baseLogger, poolConfig, portalConfig } = env; 12 | 13 | const redis = new Redis(portalConfig.redis); 14 | const isCluster = !!portalConfig.redis.cluster; 15 | const coin = poolConfig.coin.name; 16 | const subcat = `Thread ${parseInt(process.env.forkId, 10) + 1}`; 17 | const logger = baseLogger.cached('Pool', coin, subcat); 18 | redis.attachEvents(logger); 19 | 20 | const shareEnv = { 21 | client: redis.client, logger, coin, poolConfig, isCluster 22 | }; 23 | 24 | this.handleShare = handleShare(shareEnv); 25 | }; 26 | 27 | module.exports = { 28 | _poolShares, 29 | PoolShares: _poolShares(requireDeps(defaultDeps)) 30 | }; 31 | -------------------------------------------------------------------------------- /src/shares/persist_hashrate_data.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['promiseExec', `${__dirname}/../utils/promised_redis`] 5 | ]; 6 | 7 | // Builds a 'hashrate' representation of the submitted share. Persists this 8 | // data to statistics:hashrate, using the time, in seconds, as the key. 9 | // Only a single conditional, with no computational logic. No associated tests. 10 | const _persistHashrateData = ({ promiseExec }) => (env) => { 11 | const { coin, client, logger, timestamp, shareData, 12 | isValidShare, isSoloMining } = env; 13 | 14 | const difficulty = (isValidShare ? shareData.difficulty : -shareData.difficulty); 15 | 16 | const data = JSON.stringify({ 17 | time: timestamp, difficulty, worker: shareData.worker, soloMined: isSoloMining 18 | }); 19 | 20 | const cmd = ['zadd', `${coin}:statistics:hashrate`, timestamp / 1000, data]; 21 | const failMsg = 'persistHashrateData: Error persisting block data'; 22 | 23 | return promiseExec({ client, logger })({ commands: [cmd], failMsg }); 24 | }; 25 | 26 | module.exports = { 27 | _persistHashrateData, 28 | persistHashrateData: _persistHashrateData(requireDeps(defaultDeps)) 29 | }; 30 | -------------------------------------------------------------------------------- /src/startup/index.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['startCnC', `${__dirname}/start_cnc`], 5 | ['spawnAPI', `${__dirname}/spawn_api`], 6 | ['spawnPayments', `${__dirname}/spawn_payments`], 7 | ['spawnWorkers', `${__dirname}/spawn_workers`], 8 | ]; 9 | 10 | const _poolStartup = (deps) => (env) => { 11 | const { startCnC, spawnAPI, spawnPayments, spawnWorkers } = deps; 12 | const { cluster } = env; 13 | 14 | // Hand-off to the service-launching branch for the current sub-process. 15 | if (cluster.isWorker) { 16 | const services = { api: spawnAPI, payments: spawnPayments, worker: spawnWorkers }; 17 | const { workerType } = cluster.worker.process.env; 18 | const service = services[workerType]; 19 | // Throw an error if the sub-process is an unknown type. 20 | if (!service) throw new Error('Pool Startup: unknown sub-process type.'); 21 | service(env); 22 | return; 23 | } 24 | 25 | // Initialize the CLI server and spawn the service sub-processes. 26 | startCnC(env); 27 | spawnAPI(env); 28 | spawnPayments(env); 29 | spawnWorkers(env); 30 | }; 31 | 32 | module.exports = { 33 | _poolStartup, 34 | PoolStartup: _poolStartup(requireDeps(defaultDeps)) 35 | }; 36 | -------------------------------------------------------------------------------- /src/startup/spawn_api.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['spawnProcess', `${__dirname}/spawn_process`], 5 | ['PoolApi', `${__dirname}/../api/`] 6 | ]; 7 | 8 | // Handles forking, and launching the PoolApi service. 9 | const _spawnAPI = (deps) => (env) => { 10 | const { spawnProcess, PoolApi } = deps; 11 | const { cluster, baseLogger } = env; 12 | 13 | const logger = baseLogger.cached('Startup', 'API'); 14 | 15 | // When called within a sub-process, construct PoolApi and start listening. 16 | if (cluster.isWorker) { 17 | const poolApi = new PoolApi(env); 18 | poolApi.listen(); 19 | return false; 20 | } 21 | 22 | // When called in the primary process, spawn our sub-process. 23 | const processOpts = { 24 | type: 'api', 25 | failMsg: 'API server process died, re-spawning...', 26 | }; 27 | const worker = spawnProcess({ cluster, logger })(processOpts); 28 | return worker; 29 | }; 30 | 31 | module.exports = { 32 | _spawnAPI, 33 | spawnAPI: _spawnAPI(requireDeps(defaultDeps)) 34 | }; 35 | -------------------------------------------------------------------------------- /src/startup/spawn_payments.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['spawnProcess', `${__dirname}/spawn_process`], 5 | ['PoolPayments', `${__dirname}/../payments/`] 6 | ]; 7 | 8 | // Handles forking, and launching the PoolPayments service. 9 | const _spawnPayments = (deps) => (env) => { 10 | const { spawnProcess, PoolPayments } = deps; 11 | const { cluster, baseLogger, poolConfig } = env; 12 | const logger = baseLogger.cached('Startup', 'Payments'); 13 | 14 | // We only spawn a Payments process if both the coin, and the coin's 15 | // paymentProcessing config are enabled. 16 | const { enabled = false, paymentProcessing = {} } = poolConfig; 17 | if (!(enabled && paymentProcessing.enabled)) { 18 | logger.warning('Payment processing not enabled.'); 19 | return false; 20 | } 21 | 22 | // When called within a sub-process, construct PoolPayments and start listening. 23 | if (cluster.isWorker) { 24 | const poolPayments = new PoolPayments(env); 25 | poolPayments.start(); 26 | return false; 27 | } 28 | 29 | // When called in the primary process, spawn our sub-process. 30 | const processOpts = { 31 | type: 'payments', 32 | failMsg: 'PoolPayments process died, re-spawning...', 33 | }; 34 | const worker = spawnProcess({ cluster, logger })(processOpts); 35 | 36 | return worker; 37 | }; 38 | 39 | module.exports = { 40 | _spawnPayments, 41 | spawnPayments: _spawnPayments(requireDeps(defaultDeps)) 42 | }; 43 | -------------------------------------------------------------------------------- /src/startup/spawn_process.js: -------------------------------------------------------------------------------- 1 | const spawnProcess = (env) => (opts) => { 2 | const { cluster, logger } = env; 3 | const { type, events = {}, forkId = 0 } = opts; 4 | const failMsg = opts.failMsg || 'Spawned process failed, re-spawning'; 5 | 6 | const worker = cluster.fork({ workerType: type, forkId }); 7 | worker.type = type; 8 | 9 | // Attach the standard re-spawn event handler. 10 | worker.on('exit', () => { 11 | logger.error(failMsg); 12 | setTimeout(() => { spawnProcess(env)(opts); }, 2000); 13 | }); 14 | 15 | // Attach additional event handlers. 16 | Object.entries(events).forEach(([name, callback]) => { 17 | worker.on(name, callback); 18 | }); 19 | 20 | return worker; 21 | }; 22 | 23 | module.exports = { spawnProcess }; 24 | -------------------------------------------------------------------------------- /src/startup/spawn_workers.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['utils', `${__dirname}/utils`, false], 5 | ['spawnProcess', `${__dirname}/spawn_process`], 6 | ['PoolWorker', `${__dirname}/../worker/`] 7 | ]; 8 | 9 | // Handles forking, and launching the PoolWorkers services cluster. 10 | const _spawnWorkers = (deps) => (env) => { 11 | const { utils, spawnProcess, PoolWorker } = deps; 12 | const { cluster, baseLogger, portalConfig, poolConfig } = env; 13 | const logger = baseLogger.cached('Startup', 'Worker'); 14 | 15 | // Spawn worker listeners only if we have a valid coin daemon configuration. 16 | const { daemons = [] } = poolConfig; 17 | if (!Array.isArray(daemons) || daemons.length === 0) { 18 | logger.error('No daemons configured. No worker listeners started.'); 19 | return false; 20 | } 21 | 22 | // In a 'worker' sub-process. Construct a PoolWorker and start listening. 23 | if (cluster.isWorker) { 24 | new PoolWorker(env); 25 | return false; 26 | } 27 | 28 | // In the primary process. Spawn our cluster of 'worker' sub-processes. 29 | const numForks = utils.workerForks(portalConfig.clustering); 30 | const processOpts = { 31 | type: 'worker', 32 | failMsg: 'Worker process died, re-spawning...', 33 | // Attach a 'message' event handler for 'banIP' message payloads. 34 | events: { message: utils.messageHandler } 35 | }; 36 | const spawnProc = spawnProcess({ cluster, logger }); 37 | const workers = [...Array(numForks)].map((_, i) => (spawnProc({ ...processOpts, forkId: i }))); 38 | logger.debug(`Started ${numForks} PoolWorker thread(s)`); 39 | 40 | return workers; 41 | }; 42 | 43 | module.exports = { 44 | _spawnWorkers, 45 | spawnWorkers: _spawnWorkers(requireDeps(defaultDeps)) 46 | }; 47 | -------------------------------------------------------------------------------- /src/startup/start_cnc.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['cluster', 'cluster', false], 5 | ['PoolCnC', `${__dirname}/../cnc`] 6 | ]; 7 | 8 | // Supervisor function for our Command/Control server. Listens for event messages 9 | // sent from the server instance and processes them. 10 | const _startCnC = (deps) => ({ portalConfig, baseLogger }) => { 11 | const { cluster, PoolCnC } = deps; 12 | const logger = baseLogger.cached('Startup', 'CnC'); 13 | 14 | // TODO: Rename this server config entry to 'controlPort' or 'commandPort' 15 | const { cliPort: port } = portalConfig; 16 | 17 | // Initialize our Command/Control server instance 18 | const cnc = new PoolCnC(port); 19 | 20 | // Handle 'log' events. Output the { [severity] : text } through our logger 21 | cnc.on('log', (severity, text) => { logger[severity](text); }); 22 | 23 | // Handle 'command' events. Process the named command, if known, or return an error. 24 | cnc.on('command', (command, params, options, reply) => { 25 | switch (command) { 26 | case 'reloadpool': 27 | Object.values(cluster.workers).forEach((worker) => { 28 | worker.send({ type: 'reloadpool', coin: params[0] }); 29 | }); 30 | reply(`reloaded pool ${params[0]}`); 31 | break; 32 | default: 33 | reply(`unknown command: ${command}`); 34 | break; 35 | } 36 | }); 37 | 38 | // Launch the Command/Control server 39 | cnc.listen(); 40 | }; 41 | 42 | module.exports = { 43 | _startCnC, 44 | startCnC: _startCnC(requireDeps(defaultDeps)) 45 | }; 46 | -------------------------------------------------------------------------------- /src/startup/utils.js: -------------------------------------------------------------------------------- 1 | const cluster = require('cluster'); 2 | const os = require('os'); 3 | 4 | const messageHandler = (msg) => { 5 | // Only handle type: 'banIP' message payloads. 6 | if (msg.type !== 'banIP') return; 7 | // Relay the 'banIP' command to each of our PoolWorker sub-processes 8 | Object.keys(cluster.workers).filter((w) => w.type === 'worker') 9 | .forEach((w) => w.send({ type: 'banIP', ip: msg.ip })); 10 | }; 11 | 12 | // Calculate the size of the PoolWorker process cluster, based off the clustering 13 | // config setting and the numner of available CPUs. 14 | const workerForks = (config = {}) => { 15 | const { enabled = false, forks = 1 } = config; 16 | 17 | if (!enabled) return 1; 18 | if (forks === 'auto') return os.cpus().length; 19 | return (forks > 0 ? forks : 1); 20 | }; 21 | 22 | module.exports = { 23 | messageHandler, 24 | workerForks 25 | }; 26 | -------------------------------------------------------------------------------- /src/stats/coin_balances.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['promiseExec', `${__dirname}/../utils/promised_redis`], 5 | ['sumScanValues', `${__dirname}/utils`] 6 | ]; 7 | 8 | const _coinBalances = (deps) => (env) => (address) => { 9 | const { promiseExec, sumScanValues } = deps; 10 | const { client, logger, coin } = env; 11 | const scanArgs = [0, 'match', `${address}*`, 'count', 10000]; 12 | const keys = ['totalBalance', 'totalImmature', 'totalPaid', 'totalUnpaid']; 13 | const commands = [ 14 | ['hscan', `${coin}:payments:balances`, ...scanArgs], 15 | ['hscan', `${coin}:payments:immature`, ...scanArgs], 16 | ['hscan', `${coin}:payments:payouts`, ...scanArgs], 17 | ['hscan', `${coin}:payments:unpaid`, ...scanArgs], 18 | ]; 19 | const failMsg = 'Stats.getBalanceByAddress'; 20 | 21 | return promiseExec({ client, logger })({ commands, failMsg }) 22 | .then((results) => { 23 | const pairs = results.map((r) => sumScanValues(r[1])) 24 | // Zip keys with their summed values 25 | .map((sum, i) => [keys[i], sum]); 26 | return Object.fromEntries(pairs); 27 | }); 28 | }; 29 | 30 | module.exports = { 31 | _coinBalances, 32 | coinBalances: _coinBalances(requireDeps(defaultDeps)) 33 | }; 34 | -------------------------------------------------------------------------------- /src/stats/coin_stats/compute_hashrates.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['shareMultiplier', `${__dirname}/../utils`], 5 | ]; 6 | 7 | const objectSize = (obj) => Object.keys(obj).length; 8 | 9 | // Computes pool and worker roundShares and adjusted hashrates, from entries in 10 | // stats.shares.roundShares. Also computes pool-wide worker-related stats. This 11 | // sub-function essentially does *three* things, but all of them are directly 12 | // related to entries in stats.workers.workers. 13 | const _computeHashrates = ({ shareMultiplier }) => ({ statsConfig, stats }) => { 14 | // Accumulate roundShares for workers with matching shares.roundShares entries. 15 | const { workers } = stats.workers; 16 | const shareEntries = stats.shares.roundShares; 17 | Object.entries(shareEntries).filter(([w]) => w in workers) 18 | .forEach(([w, c]) => { workers[w].roundShares += parseFloat(c); }); 19 | 20 | // Calculate the adjusted hashrate for each worker entry. Accumulate adjusted 21 | // hashrates pool-wide, and by their mining type. Copy worker entries to their 22 | // mining type sub-object. 23 | Object.entries(stats.workers.workers).forEach(([address, worker]) => { 24 | const type = worker.soloMining ? 'Solo' : 'Shared'; 25 | const multi = shareMultiplier(stats.algorithm); 26 | const adjustedRate = (multi * worker.validShares) / statsConfig.hashrateWindow; 27 | 28 | worker.hashrate = adjustedRate; 29 | stats.workers[`workers${type}`][address] = worker; 30 | stats.hashrate.hashrate += adjustedRate; 31 | stats.hashrate[`hashrate${type}`] += adjustedRate; 32 | }); 33 | 34 | // Compute the various workers "count" entries 35 | stats.workers = { 36 | ...stats.workers, 37 | workersCount: objectSize(stats.workers.workers), 38 | workersSharedCount: objectSize(stats.workers.workersShared), 39 | workersSoloCount: objectSize(stats.workers.workersSolo) 40 | }; 41 | }; 42 | 43 | module.exports = { 44 | _computeHashrates, 45 | computeHashrates: _computeHashrates(requireDeps(defaultDeps)) 46 | }; 47 | -------------------------------------------------------------------------------- /src/stats/coin_stats/fetch_raw_stats.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['promiseExec', `${__dirname}/../../utils/promised_redis`], 5 | ]; 6 | 7 | // Fetches the full range of statistics assiciated with our coin/pool. Returns an 8 | // object, with each retrieved "stat", matched to its appropriate key name. 9 | const _fetchRawStats = ({ promiseExec }) => (env) => { 10 | const { statsConfig, coin, timestamp } = env; 11 | const offsetTime = (timestamp - statsConfig.hashrateWindow).toString(); 12 | const keys = [ 13 | 'hashrates', 'roundCurrent', 'timesCurrent', 'pendingCount', 'pendingBlocks', 14 | 'confirmedCount', 'confirmedBlocks', 'pendingConfirms', 'orphanedCount', 15 | 'basicStats', 'history', 'payments' 16 | ]; 17 | const commands = [ 18 | ['zrangebyscore', `${coin}:statistics:hashrate`, offsetTime, '+inf'], 19 | ['hgetall', `${coin}:shares:roundCurrent`], 20 | ['hgetall', `${coin}:times:timesCurrent`], 21 | ['scard', `${coin}:blocks:pending`], 22 | ['smembers', `${coin}:blocks:pending`], 23 | ['scard', `${coin}:blocks:confirmed`], 24 | ['smembers', `${coin}:blocks:confirmed`], 25 | ['hgetall', `${coin}:blocks:pendingConfirms`], 26 | ['scard', `${coin}:blocks:kicked`], 27 | ['hgetall', `${coin}:statistics:basic`], 28 | ['hgetall', `${coin}:statistics:history`], 29 | ['zrange', `${coin}:payments:payments`, -100, -1], 30 | ]; 31 | const failMsg = `coinStats: Error fetching stats for ${coin}`; 32 | return promiseExec(env)({ commands, failMsg }) 33 | .then((results) => { 34 | // Ensure that any null values are converted to empty objects. Map over the 35 | // new values, combining with key name, in a nested array. Convert nested 36 | // array into an object and return. 37 | const entries = results.map((r) => (r !== null ? r : {})) 38 | .map((val, i) => [keys[i], val]); 39 | return Object.fromEntries(entries); 40 | }); 41 | }; 42 | 43 | module.exports = { 44 | _fetchRawStats, 45 | fetchRawStats: _fetchRawStats(requireDeps(defaultDeps)) 46 | }; 47 | -------------------------------------------------------------------------------- /src/stats/coin_stats/index.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['fetchRawStats', `${__dirname}/fetch_raw_stats`], 5 | ['parseStats', `${__dirname}/parse_stats`], 6 | ['processStats', `${__dirname}/process_stats`], 7 | ]; 8 | 9 | // A very simple wrapper function, that coordinates the individual steps in 10 | // fetching and producing the current stats breakdown for the coin. 11 | // No tests needed, since the individual steps are fully tested. 12 | const _coinStats = (deps) => (env) => { 13 | const { fetchRawStats, parseStats, processStats } = deps; 14 | 15 | return fetchRawStats(env) 16 | .then(parseStats(env)) 17 | .then(processStats(env)); 18 | }; 19 | 20 | module.exports = { 21 | _coinStats, 22 | coinStats: _coinStats(requireDeps(defaultDeps)), 23 | }; 24 | -------------------------------------------------------------------------------- /src/stats/coin_stats/initialize_workers.js: -------------------------------------------------------------------------------- 1 | const baseWorker = ({ soloMining, hashrateType }) => ( 2 | { 3 | validShares: 0, invalidShares: 0, roundShares: 0, hashrateType, soloMining 4 | } 5 | ); 6 | 7 | // Intitalize worker records, from JSON entries in stats.hashrate.hashrates. If 8 | // any workers have multiple entries, we accumulate their valid/invalid shares. 9 | // After all JSON entries are processed, we remove stats.hashrate.hashrates. 10 | const initializeWorkers = ({ poolConfig, stats }) => { 11 | const { hashrateType } = poolConfig.coin; 12 | const { workers } = stats.workers; 13 | 14 | stats.hashrate.hashrates.forEach((rateJson) => { 15 | const rate = JSON.parse(rateJson); 16 | const { worker: addr, soloMined: soloMining } = rate; 17 | const shares = parseFloat(rate.difficulty || 0); 18 | const hasShares = (shares || 0) > 0; 19 | 20 | // Accumulate a pool-wide total of shares found. 21 | stats.shares.shares += shares; 22 | 23 | if (!workers[addr]) workers[addr] = baseWorker({ soloMining, hashrateType }); 24 | 25 | workers[addr].difficulty = Math.round(rate.difficulty || 0); 26 | workers[addr].validShares += hasShares ? shares : 0; 27 | workers[addr].invalidShares -= !hasShares ? shares : 0; 28 | }); 29 | 30 | // Remove the raw hashrates entry from the global stats, after processing. 31 | delete stats.hashrate.hashrates; 32 | }; 33 | 34 | module.exports = { initializeWorkers }; 35 | -------------------------------------------------------------------------------- /src/stats/coin_stats/persist_history.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['promiseCmd', `${__dirname}/../../utils/promised_redis`], 5 | ]; 6 | 7 | const _persistHistory = ({ promiseCmd }) => async (env) => { 8 | const promiseHset = promiseCmd('hset')(env); 9 | const { stats, statsConfig, timestamp } = env; 10 | const oldestTime = timestamp - statsConfig.historicalRetention; 11 | const interval = timestamp - statsConfig.historicalInterval; 12 | 13 | const history = { 14 | time: timestamp, 15 | hashrateSolo: stats.hashrate.hashrateSolo, 16 | hashrateShared: stats.hashrate.hashrateShared, 17 | workersSolo: stats.workers.workersSoloCount, 18 | workersShared: stats.workers.workersSharedCount, 19 | }; 20 | stats.history.push(history); 21 | 22 | // Filter out history entries whose time is too old (default is 30 days). 23 | const hists = stats.history.filter((h) => h.time >= oldestTime); 24 | 25 | // TODO: why is this entry is named 'history' and not 'counts'? Having an entry 26 | // at statistics:history:counts seems better than statistics:history:history. 27 | const args = [`${stats.name}:statistics:history`, 'history', JSON.stringify(hists)]; 28 | const failMsg = 'coinStats: Error persisting history data'; 29 | 30 | // If this is our only history entry, or the previous "last" entry is within the 31 | // interval time (default is 10 minutes), we persist the updated history data. 32 | // 33 | // TODO: Based on the setInterval command in src/server/index.js, the pool 34 | // stats are cached ever 60 seconds. That's how frequently our hashrate and worker 35 | // counts are caluclated (and persisted). Why would we care that the earliest, previous 36 | // history entry is from more than 10 minutes ago? 37 | if (hists.length === 1 || interval > hists[hists.length - 2].time) { 38 | await promiseHset({ args, failMsg }); 39 | return true; 40 | } 41 | return false; 42 | }; 43 | 44 | module.exports = { 45 | _persistHistory, 46 | persistHistory: _persistHistory(requireDeps(defaultDeps)) 47 | }; 48 | -------------------------------------------------------------------------------- /src/stats/coin_stats/process_stats.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['initializeWorkers', `${__dirname}/initialize_workers`], 5 | ['computeHashrates', `${__dirname}/compute_hashrates`], 6 | ['persistHistory', `${__dirname}/persist_history`], 7 | ]; 8 | 9 | // Takes the parsed stats data and processes it into more specific worker and 10 | // hashrate values. Also, creates a new "history" entry that should be persisted. 11 | const _processStats = (deps) => (env) => async (stats) => { 12 | const { initializeWorkers, computeHashrates, persistHistory } = deps; 13 | const subEnv = { ...env, stats }; 14 | 15 | initializeWorkers(subEnv); 16 | computeHashrates(subEnv); 17 | await persistHistory(subEnv); 18 | return stats; 19 | }; 20 | 21 | module.exports = { 22 | _processStats, 23 | processStats: _processStats(requireDeps(defaultDeps)), 24 | }; 25 | -------------------------------------------------------------------------------- /src/stats/index.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const _defaultDeps = [ 4 | ['Redis', `${__dirname}/../redis`], 5 | ['coinBalances', `${__dirname}/coin_balances`], 6 | ['totalShares', `${__dirname}/total_shares`], 7 | ['coinStats', `${__dirname}/coin_stats/`], 8 | ]; 9 | 10 | // Function constructor that encloses the statistics generating functions. 11 | // Each function is a thin wrapper around extracted functions performing the 12 | // stats generations. Also containes a cached version of the coin's full 13 | // stats breakdown. 14 | const _poolStats = (deps) => function (env) { 15 | const { Redis, coinBalances, totalShares, coinStats } = deps; 16 | const { baseLogger, poolConfig, portalConfig } = env; 17 | const coin = poolConfig.coin.name; 18 | const redis = new Redis(portalConfig.redis); 19 | 20 | redis.attachEvents(baseLogger.cached('Stats', 'Redis')); 21 | 22 | const subEnv = { client: redis.client, coin, poolConfig }; 23 | 24 | this.stats = {}; 25 | const _this = this; 26 | 27 | // Return coin balances for the supplied address 28 | this.getBalanceByAddress = async (address) => { 29 | const logger = baseLogger.cached('Stats', 'Balances'); 30 | const balances = await coinBalances({ ...subEnv, logger })(address); 31 | return balances; 32 | }; 33 | 34 | // Return total shares for the supplied address 35 | this.getTotalSharesByAddress = async (address) => { 36 | const logger = baseLogger.cached('Stats', 'Shares'); 37 | const total = await totalShares({ ...subEnv, logger })(address); 38 | return total; 39 | }; 40 | 41 | // Cache coin stats from Pool/Database 42 | this.getGlobalStats = async () => { 43 | const logger = baseLogger.cached('Stats', 'Global'); 44 | // Converts the current milliseconds time to seconds, with no remainder. 45 | const timestamp = Math.trunc(Date.now() / 1000); 46 | const statsEnv = { 47 | ...subEnv, logger, timestamp, statsConfig: portalConfig.stats 48 | }; 49 | _this.stats = await coinStats(statsEnv); 50 | }; 51 | }; 52 | 53 | module.exports = { 54 | _defaultDeps, 55 | _poolStats, 56 | PoolStats: _poolStats(requireDeps(_defaultDeps)), 57 | }; 58 | -------------------------------------------------------------------------------- /src/stats/total_shares.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('../utils/require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['promiseCmd', `${__dirname}/../utils/promised_redis`], 5 | ['sumScanValues', `${__dirname}/utils`] 6 | ]; 7 | 8 | const _totalShares = (deps) => (env) => (address) => { 9 | const { promiseCmd, sumScanValues } = deps; 10 | const { client, logger, coin } = env; 11 | const promiseHscan = promiseCmd('hscan')({ client, logger }); 12 | 13 | const scanKey = `${coin}:shares:roundCurrent`; 14 | const args = [scanKey, 0, 'match', `${address}*`, 'count', 10000]; 15 | const failMsg = 'Stats.getTotalSharesByAddress'; 16 | 17 | return promiseHscan({ args, failMsg }) 18 | .then((results) => sumScanValues(results[1])); 19 | }; 20 | 21 | module.exports = { 22 | _totalShares, 23 | totalShares: _totalShares(requireDeps(defaultDeps)), 24 | }; 25 | -------------------------------------------------------------------------------- /src/utils/finalize_pool_config.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('./require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['algorithms', 'stratum-pool/scripts/algorithms.js', false], 5 | ]; 6 | 7 | // Read and Combine ALL Pool Configurations 8 | const _finalizePoolConfig = ({ algorithms }) => (env) => { 9 | let { poolConfig } = env; 10 | const { baseLogger, portalConfig } = env; 11 | const logger = baseLogger.cached('Startup', 'Finalize Config'); 12 | const { algorithm } = poolConfig.coin; 13 | 14 | // Check to Ensure Algorithm is Supported 15 | if (!Object.keys(algorithms).includes(algorithm)) { 16 | const msg = `Pool Startup: Unsupported algorithm "${algorithm}"`; 17 | logger.error(msg); 18 | throw new Error(msg); 19 | } 20 | 21 | // Copy default configs into pool config 22 | poolConfig = { ...portalConfig.defaultPoolConfigs, ...poolConfig }; 23 | 24 | const toBuffer = (value) => Buffer.from(value, 'hex'); 25 | const initNetwork = (network) => { 26 | network.bip32.public = toBuffer(network.bip32.public).readUInt32LE(0); 27 | network.pubKeyHash = toBuffer(network.pubKeyHash).readUInt8(0); 28 | network.scriptHash = toBuffer(network.scriptHash).readUInt8(0); 29 | }; 30 | 31 | // Establish Mainnet/Testnet 32 | if (poolConfig.coin.mainnet) initNetwork(poolConfig.coin.mainnet); 33 | if (poolConfig.coin.testnet) initNetwork(poolConfig.coin.testnet); 34 | 35 | return poolConfig; 36 | }; 37 | 38 | module.exports = { 39 | _finalizePoolConfig, 40 | finalizePoolConfig: _finalizePoolConfig(requireDeps(defaultDeps)) 41 | }; 42 | -------------------------------------------------------------------------------- /src/utils/promised_redis.js: -------------------------------------------------------------------------------- 1 | const { promisify } = require('util'); 2 | 3 | const errorMessage = (what, where) => `Redis ${what} Failed: ${where}`; 4 | 5 | const failed = ({ logger, failMsg }) => (err) => { 6 | const msg = `${failMsg}: ${err.toString()}`; 7 | logger.error(msg); 8 | throw new Error(msg); 9 | }; 10 | 11 | const promiseCmd = (name) => ({ client, logger }) => ({ args, fullName, failMsg }) => { 12 | const promise = promisify(client[name]).bind(client); 13 | failMsg = `Redis ${fullName || name} Failed: ${failMsg}`; 14 | return promise(...args).catch(failed({ logger, failMsg })); 15 | }; 16 | 17 | // TODO: Refactor to use promiseCmd to wrap exec 18 | const promiseExec = ({ client, logger }) => ({ commands, failMsg }) => { 19 | const name = 'exec'; 20 | failMsg = errorMessage(name, failMsg); 21 | const multi = client.multi(commands); 22 | return promiseCmd(name)({ client: multi, logger })({ args: [], failMsg }); 23 | }; 24 | 25 | module.exports = { 26 | promiseCmd, 27 | promiseExec, 28 | }; 29 | -------------------------------------------------------------------------------- /src/utils/require_deps.js: -------------------------------------------------------------------------------- 1 | // Provides a mechanism for specifying a module's dependencies, in a manner 2 | // that allows for overrides, much like traditional dependency injection. 3 | // Dependencies are provided as an array of arrays, where each element is a 4 | // series of arguments that determine how a given dependecy will be loaded. 5 | // 6 | // Dependencies can be a key in a module object, or a reference to an entire 7 | // module. You can also choose to supress the normal exception that is thrown 8 | // when the specified dependency path is invalid. 9 | const requireDeps = (deps) => { 10 | /* eslint-disable import/no-dynamic-require */ 11 | const safeRequire = (path, reThrow = true) => { 12 | try { 13 | return require(path); 14 | } catch (e) { 15 | if (reThrow) throw e; 16 | return undefined; 17 | } 18 | }; 19 | 20 | const resolveDep = ([name, path, isKey = true, shouldThrow = true]) => { 21 | let dep = safeRequire(path, shouldThrow); 22 | if (isKey && !dep) dep = {}; 23 | return { [name]: isKey ? dep[name] : dep }; 24 | }; 25 | 26 | return deps.reduce((acc, args) => ({ ...acc, ...resolveDep(args) }), {}); 27 | }; 28 | 29 | module.exports = { requireDeps }; 30 | -------------------------------------------------------------------------------- /src/utils/retry.js: -------------------------------------------------------------------------------- 1 | // A function, `retry`, which tries to resolve a promise-producing fn a set number of times. 2 | // 3 | // Promises whose failures return true for a given predicate will be retried. 4 | // Promises which fail for unrelated reasons will be rejected. 5 | // Promises which succeed will be resolved 6 | // If the promise does not succeed after a set number of tries, retry will reject with a RetryError 7 | // containing a list of each failure in order 8 | class RetryError extends Error { 9 | constructor(n, failures) { 10 | super(`Attempt failed after ${n} tries.`); 11 | this.name = this.constructor.name; 12 | Error.captureStackTrace(this, this.constructor); 13 | 14 | this.failures = failures; 15 | } 16 | } 17 | 18 | const retry = (n, attemptFn, pred) => { 19 | let tries = 0; 20 | 21 | return new Promise((resolve, reject) => { 22 | const attempt = (fn, failures) => { 23 | if (tries >= n) { 24 | reject(new RetryError(n, failures)); 25 | } else { 26 | fn(tries) 27 | .then(resolve) 28 | .catch((e) => { 29 | if (pred(e)) { 30 | tries += 1; 31 | failures.push(e); 32 | attempt(fn, failures); 33 | } else { 34 | reject(e); 35 | } 36 | }); 37 | } 38 | }; 39 | 40 | attempt(attemptFn, []); 41 | }); 42 | }; 43 | 44 | module.exports = { 45 | retry, 46 | RetryError 47 | }; 48 | -------------------------------------------------------------------------------- /src/utils/set_posix_limit.js: -------------------------------------------------------------------------------- 1 | const { requireDeps } = require('./require_deps'); 2 | 3 | const defaultDeps = [ 4 | ['posix', 'posix', false, false], 5 | ]; 6 | 7 | // Wraps a function that would throw on exception to one that returns false on exception 8 | const orFalse = (proc) => (...args) => { 9 | let result; 10 | try { 11 | result = proc(...args); 12 | } catch { 13 | result = false; 14 | } 15 | return result; 16 | }; 17 | 18 | // Raises the "nofile" connection limit on POSIX-compatibile operating systems. 19 | const _setPosixLimit = (deps) => ({ baseLogger, isMaster }) => { 20 | const { posix } = deps; 21 | const logger = baseLogger.cached('POSIX', 'Connection Limit'); 22 | 23 | const setLimit = orFalse(() => { 24 | posix.setrlimit('nofile', { soft: 100000, hard: 100000 }); // Might throw 25 | return true; 26 | }); 27 | 28 | const setUid = orFalse((uid) => { 29 | process.setuid(uid); // Might throw 30 | return true; 31 | }); 32 | 33 | // If we're not on a POSIX operating system, we log and exit out. 34 | let msg; 35 | if (!posix) { 36 | msg = '(Safe to ignore) POSIX module not installed. Connection limit not raised'; 37 | if (isMaster) logger.debug(msg); 38 | return false; 39 | } 40 | 41 | if (!setLimit()) { 42 | // If we fail, we log the failure with a notice about needing root-level access. 43 | msg = '(Safe to ignore) Must be a root user to increase file connection limit'; 44 | if (isMaster) logger.warning(msg); 45 | 46 | // If we have a root user's UID, try to switch 47 | // to that UID then try to raise the file limit. 48 | const rootUid = parseInt(process.env.SUDO_UID, 10); 49 | if (!(rootUid && setUid(rootUid) && setLimit())) { return false; } 50 | } 51 | 52 | msg = `File connection limit, for user: ${process.getuid()}, raised to 100K `; 53 | logger.debug(msg); 54 | return true; 55 | }; 56 | 57 | module.exports = { 58 | _setPosixLimit, 59 | setPosixLimit: _setPosixLimit(requireDeps(defaultDeps)), 60 | }; 61 | -------------------------------------------------------------------------------- /src/worker/auth.js: -------------------------------------------------------------------------------- 1 | // Establish Worker Authentication 2 | // Returns a promise resolving True when auth succeeds and false otherwise 3 | // Takes a constructor to wrap a daemon in a Promise interface 4 | const { requireDeps } = require('../utils/require_deps'); 5 | 6 | const defaultDeps = [ 7 | ['DaemonWrapper', `${__dirname}/../daemon/`] 8 | ]; 9 | 10 | // TODO(rschifflin): Remove daemon wrapper once the pool daemons in the codebase get promisified 11 | // NOTE(rschifflin): The `pool` environment arg is initially null and is initialized later, 12 | // due to the mutually recursive nature of the auth callback 13 | // (a pool requires an auth callback which requires a pool which requires...) 14 | const baseAuth = (deps) => (env) => async (port, workerName, _password) => { 15 | const { DaemonWrapper } = deps; 16 | const { pool, poolConfig } = env; 17 | const daemon = new DaemonWrapper(pool.daemon); 18 | 19 | if (poolConfig.validateWorkerUsername !== true) { 20 | return true; 21 | } 22 | 23 | if (workerName.length === 40) { 24 | try { 25 | // TODO(rschifflin): So as long as the worker name is the right format it's valid? 26 | // I assume port/password/etc are for integration with different pluggable auths 27 | const validName = Buffer.from(workerName, 'hex').toString('hex') === workerName; 28 | return validName; 29 | } catch (e) { 30 | return false; 31 | } 32 | } 33 | 34 | return daemon.rpcCmd('validateaddress', [workerName]).then((results) => { 35 | const isValid = results.filter((r) => r.response.isvalid).length > 0; 36 | return isValid; 37 | }); 38 | }; 39 | 40 | module.exports = { 41 | _auth: baseAuth, 42 | auth: baseAuth(requireDeps(defaultDeps)) 43 | }; 44 | -------------------------------------------------------------------------------- /src/worker/log_share.js: -------------------------------------------------------------------------------- 1 | const logShare = ({ logger }) => (isValidShare, isValidBlock, data) => { 2 | const shareData = JSON.stringify(data); 3 | // Checks for Block Data 4 | if (data.blockHash && !isValidBlock) logger.debug(`We thought a block was found but it was rejected by the daemon, share data: ${shareData}`); 5 | else if (isValidBlock) logger.debug(`Block found: ${data.blockHash} by ${data.worker}`); 6 | 7 | // Checks for Share Data 8 | if (isValidShare) { 9 | if (data.shareDiff > 1000000000) logger.debug('Share was found with diff higher than 1.000.000.000!'); 10 | else if (data.shareDiff > 1000000) logger.debug('Share was found with diff higher than 1.000.000!'); 11 | logger.debug(`Share accepted at diff ${data.difficulty}/${data.shareDiff} by ${data.worker} [${data.ip}]`); 12 | } else { 13 | logger.debug(`Share rejected: ${shareData}`); 14 | } 15 | }; 16 | 17 | module.exports = { logShare }; 18 | -------------------------------------------------------------------------------- /src/worker/pool_auth_callback.js: -------------------------------------------------------------------------------- 1 | // Glue between callback interface and promise interface 2 | // TODO(rschifflin): Remove this callback layer once the stratum side also becomes promise-ified 3 | // Eventually we will simply pass a promise-producing function and the callbacks will be localized. 4 | 5 | const { requireDeps } = require('../utils/require_deps'); 6 | 7 | const defaultDeps = [ 8 | ['auth', `${__dirname}/auth`] 9 | ]; 10 | 11 | const basePoolAuthCallback = ({ auth }) => (env) => (ip, port, workerName, password, callback) => { 12 | const { logger, ...authEnv } = env; 13 | return auth(authEnv)(port, workerName, password).then((authorized) => { 14 | const authString = authorized ? 'Authorized' : 'Unauthorized'; 15 | logger.debug(`${authString} ${workerName}:${password} [${ip}]`); 16 | callback({ 17 | error: null, 18 | authorized, 19 | disconnect: false, 20 | }); 21 | }); 22 | }; 23 | 24 | module.exports = { 25 | poolAuthCallback: basePoolAuthCallback(requireDeps(defaultDeps)), 26 | _poolAuthCallback: basePoolAuthCallback 27 | }; 28 | -------------------------------------------------------------------------------- /test/api/collect_blocks_data_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | 3 | const { expect } = require('../chai-local'); 4 | 5 | const { collectBlocksData } = require('../../src/api/collect_blocks_data'); 6 | 7 | describe('collectBlocksData()', () => { 8 | const stats = { 9 | name: 'carrot', 10 | symbol: 'CRRT', 11 | algorithm: 'scrypt', 12 | blocks: { 13 | pending: [], 14 | confirmed: [], 15 | confirmations: {}, 16 | pendingCount: 0, 17 | confirmedCount: 0, 18 | orphanedCount: 0 19 | } 20 | }; 21 | 22 | describe('with no "pending" and no "confirmed" blocks', () => { 23 | it('retuns a data object with an empty blocks array', () => { 24 | const result = collectBlocksData({ stats }); 25 | 26 | expect(result.blocks).to.eql([]); 27 | }); 28 | }); 29 | 30 | describe('with 1 "pending" block', () => { 31 | const pending = { 32 | time: Date.now(), 33 | height: 10, 34 | blockHash: 'AABB', 35 | blockReward: 6.25, 36 | txHash: '', 37 | difficulty: 1, 38 | worker: 'CCDD', 39 | soloMined: false, 40 | }; 41 | 42 | it('retuns a data object with 1 pending entry in the blocks array', () => { 43 | stats.blocks.pending.push(JSON.stringify(pending)); 44 | stats.blocks.confirmations.AABB = 5; 45 | const result = collectBlocksData({ stats }); 46 | 47 | expect(result.blocks.length).to.eql(1); 48 | expect(result.blocks[0].confirmed).to.eql(false); 49 | }); 50 | }); 51 | }); 52 | -------------------------------------------------------------------------------- /test/api/collect_payments_data_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it, before } = require('mocha'); 2 | 3 | const { expect } = require('../chai-local'); 4 | 5 | const { collectPaymentsData } = require('../../src/api/collect_payments_data'); 6 | 7 | describe('collectPaymentsData()', () => { 8 | const stats = { 9 | name: 'carrot', 10 | symbol: 'CRRT', 11 | algorithm: 'scrypt', 12 | payments: [] 13 | }; 14 | 15 | describe('with no payments in the pool stats', () => { 16 | it('retuns an empty array', () => { 17 | const result = collectPaymentsData({ stats }); 18 | 19 | expect(result.payments).to.eql([]); 20 | }); 21 | }); 22 | 23 | describe('with 1 payment in the pool stats', () => { 24 | const payment = { 25 | time: Date.now(), 26 | txid: '', 27 | paid: 6.25, 28 | shares: 1, 29 | workers: 1, 30 | records: {}, 31 | unpaid: {}, 32 | totals: { 33 | amounts: { AABB: 0 }, 34 | shares: {} 35 | } 36 | }; 37 | 38 | before(() => stats.payments.push(JSON.stringify(payment))); 39 | 40 | describe('with no address to filter', () => { 41 | it('retuns arrary of just the payment object', () => { 42 | const result = collectPaymentsData({ stats }); 43 | 44 | expect(result.payments.length).to.eql(1); 45 | expect(result.payments[0].paid).to.eql(6.25); 46 | }); 47 | }); 48 | 49 | describe('with a filter address', () => { 50 | it('retuns an empty arrary', () => { 51 | const result = collectPaymentsData({ stats, address: 'AABB' }); 52 | 53 | expect(result.payments.length).to.eql(1); 54 | expect(result.payments[0].paid).to.eql(6.25); 55 | }); 56 | }); 57 | }); 58 | }); 59 | -------------------------------------------------------------------------------- /test/api/collect_workers_data_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | 3 | const { expect } = require('../chai-local'); 4 | 5 | const { collectWorkersData } = require('../../src/api/collect_workers_data'); 6 | 7 | describe('collectWorkersData()', () => { 8 | const stats = { 9 | name: 'carrot', 10 | symbol: 'CRRT', 11 | algorithm: 'scrypt', 12 | workers: { 13 | workersShared: {}, 14 | workersSolo: {}, 15 | } 16 | }; 17 | const addr = 'AABB'; 18 | const shared = { 19 | difficulty: 10, 20 | validShares: 0, 21 | invalidShares: 0, 22 | hashrate: 1111, 23 | hashrateType: 'sols', 24 | soloMining: false, 25 | }; 26 | const solo = { 27 | difficulty: 10, 28 | validShares: 0, 29 | invalidShares: 0, 30 | hashrate: 2222, 31 | hashrateType: 'sols', 32 | soloMining: true, 33 | }; 34 | 35 | describe('with no "shared" and no "solo" workers', () => { 36 | it('retuns an empty workers array', () => { 37 | const result = collectWorkersData({ stats }); 38 | 39 | expect(result.workers).to.eql([]); 40 | }); 41 | }); 42 | 43 | describe('with 1 "shared" worker', () => { 44 | it('retuns 1 entry in the workers array', () => { 45 | stats.workers.workersShared[addr] = shared; 46 | const result = collectWorkersData({ stats }); 47 | 48 | expect(result.workers.length).to.eql(1); 49 | expect(result.workers[0].address).to.eql(addr); 50 | }); 51 | }); 52 | 53 | describe('with 1 "shared" and 1 "solo" workers, filtered by address', () => { 54 | it('retuns the filtered entry in the workers array', () => { 55 | const addr2 = 'BBCC'; 56 | stats.workers.workersShared[addr] = shared; 57 | stats.workers.workersSolo[addr2] = solo; 58 | const result = collectWorkersData({ stats, address: addr2 }); 59 | 60 | expect(result.workers.length).to.eql(1); 61 | expect(result.workers[0].address).to.eql(addr2); 62 | }); 63 | }); 64 | }); 65 | -------------------------------------------------------------------------------- /test/api/index_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../chai-local'); 5 | 6 | const { requireDeps } = require('../../src/utils/require_deps'); 7 | const { _defaultDeps, _PoolApi } = require('../../src/api/index'); 8 | 9 | describe('PoolApi -- express HTTP server for API', () => { 10 | const logger = { 11 | debug: sinon.stub().returnsArg(0), 12 | error: sinon.stub().returnsArg(0), 13 | }; 14 | const baseLogger = { cached: () => logger }; 15 | const getGlobalStats = sinon.stub().resolves(true); 16 | const PoolStats = function () { return { getGlobalStats }; }; 17 | const v1 = () => sinon.stub(); 18 | const deps = requireDeps(_defaultDeps); 19 | deps.PoolStats = PoolStats; 20 | deps.v1 = v1; 21 | const PoolApi = _PoolApi(deps); 22 | const poolConfig = {}; 23 | 24 | describe('when constructed', () => { 25 | const portalConfig = {}; 26 | const env = { baseLogger, portalConfig, poolConfig }; 27 | 28 | it('configures the express app', () => { 29 | const poolApi = new PoolApi(env); 30 | expect(poolApi.app).to.have.property('mountpath', '/'); 31 | }); 32 | }); 33 | 34 | describe('.listen()', () => { 35 | const portalConfig = { 36 | stats: { updateInterval: 1 }, 37 | server: {} 38 | }; 39 | 40 | describe('with a valid port', () => { 41 | portalConfig.server.port = 8080; 42 | // const env = { baseLogger, portalConfig, poolConfig }; 43 | 44 | it('starts the getGlobalStats interval, and starts listening', () => { 45 | // Not sure how to test this, if necessary. Would like to verify that 46 | // the server and the interval starts up. 47 | }); 48 | }); 49 | 50 | describe('with an invalid port', () => { 51 | portalConfig.server.port = 80; 52 | // const env = { baseLogger, portalConfig, poolConfig }; 53 | 54 | it('throws an error', () => { 55 | // Would like to verify that the `error` event is caught and the interval 56 | // is cleared. 57 | }); 58 | }); 59 | }); 60 | }); 61 | -------------------------------------------------------------------------------- /test/api/v1/blocks_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | const { responseStub } = require('../../helpers.js'); 6 | 7 | const utils = require('../../../src/api/v1/utils'); 8 | const { _blocks } = require('../../../src/api/v1/blocks'); 9 | 10 | describe('blocks() - Route handler for the /blocks endpoint.', () => { 11 | const collectBlocksData = sinon.stub().returns({ blocks: [] }); 12 | const poolStats = { stats: { name: 'coin' } }; 13 | const blocks = _blocks({ collectBlocksData, utils })({ poolStats }); 14 | 15 | describe('when providing no pool', () => { 16 | const res = responseStub(); 17 | const req = { query: {} }; 18 | 19 | it('sets a 200 status, with empty blocks payload', () => { 20 | const result = blocks(req, res); 21 | expect(res.status).to.have.been.calledOnceWith(200); 22 | expect(result.blocks).to.eql([]); 23 | }); 24 | }); 25 | 26 | describe('when the provided pool does not match', () => { 27 | const res = responseStub(); 28 | const req = { query: { pool: 'invalid' } }; 29 | 30 | it('sets a 400 status, with error payload', () => { 31 | const result = blocks(req, res); 32 | expect(res.status).to.have.been.calledOnceWith(400); 33 | expect(Object.keys(result)).to.eql(['endpoint', 'error']); 34 | }); 35 | }); 36 | }); 37 | -------------------------------------------------------------------------------- /test/api/v1/combined_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | const { responseStub } = require('../../helpers.js'); 6 | 7 | const utils = require('../../../src/api/v1/utils'); 8 | const { _combined } = require('../../../src/api/v1/combined'); 9 | 10 | describe('combined() - Route handler for the /combined endpoint.', () => { 11 | const empty = []; 12 | const collectBlocksData = sinon.stub().returns({ blocks: [] }); 13 | const collectPaymentsData = sinon.stub().returns({ payments: [] }); 14 | const collectWorkersData = sinon.stub().returns({ workers: empty }); 15 | const deps = { 16 | collectBlocksData, collectPaymentsData, collectWorkersData, utils 17 | }; 18 | const poolStats = { 19 | stats: { 20 | name: 'coin', 21 | statistics: {}, 22 | hashrate: {}, 23 | workers: {} 24 | } 25 | }; 26 | const combined = _combined(deps)({ poolStats }); 27 | 28 | describe('when providing the correct pool', () => { 29 | const res = responseStub(); 30 | const req = { query: { pool: 'coin' } }; 31 | 32 | it('sets a 200 status, with the combined payload', () => { 33 | const result = combined(req, res); 34 | 35 | expect(res.status).to.have.been.calledOnceWith(200); 36 | expect(result.combined).to.include({ pool: 'coin', workers: empty }); 37 | }); 38 | }); 39 | }); 40 | -------------------------------------------------------------------------------- /test/api/v1/history_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | 3 | const { expect } = require('../../chai-local'); 4 | const { responseStub } = require('../../helpers.js'); 5 | 6 | const { history } = require('../../../src/api/v1/history'); 7 | 8 | describe('history() - Route handler for the /history endpoint.', () => { 9 | const poolStats = { stats: { name: 'coin', history: [] } }; 10 | 11 | describe('when providing no pool', () => { 12 | const res = responseStub(); 13 | const req = { query: {} }; 14 | 15 | it('sets a 200 status, with empty history payload', () => { 16 | const result = history({ poolStats })(req, res); 17 | expect(res.status).to.have.been.calledOnceWith(200); 18 | expect(result.history).to.eql([]); 19 | }); 20 | }); 21 | }); 22 | -------------------------------------------------------------------------------- /test/api/v1/payments_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | const { responseStub } = require('../../helpers.js'); 6 | 7 | const utils = require('../../../src/api/v1/utils'); 8 | const { _payments } = require('../../../src/api/v1/payments'); 9 | 10 | describe('payments() - Route handler for the /payments endpoint.', () => { 11 | const collectPaymentsData = sinon.stub().returns({ payments: [] }); 12 | const poolStats = { stats: { name: 'coin' } }; 13 | const payments = _payments({ collectPaymentsData, utils })({ poolStats }); 14 | 15 | describe('when providing no pool', () => { 16 | const res = responseStub(); 17 | const req = { query: {} }; 18 | 19 | it('sets a 200 status, with empty payments payload', () => { 20 | const result = payments(req, res); 21 | expect(res.status).to.have.been.calledOnceWith(200); 22 | expect(result.payments).to.eql([]); 23 | }); 24 | }); 25 | }); 26 | -------------------------------------------------------------------------------- /test/api/v1/statistics_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | const { responseStub } = require('../../helpers.js'); 6 | 7 | const utils = require('../../../src/api/v1/utils'); 8 | const { _combined } = require('../../../src/api/v1/combined'); 9 | 10 | describe('combined() - Route handler for the /combined endpoint.', () => { 11 | const empty = []; 12 | const collectBlocksData = sinon.stub().returns({ blocks: [] }); 13 | const collectPaymentsData = sinon.stub().returns({ payments: [] }); 14 | const collectWorkersData = sinon.stub().returns({ workers: empty }); 15 | const deps = { 16 | collectBlocksData, collectPaymentsData, collectWorkersData, utils 17 | }; 18 | const poolStats = { 19 | stats: { 20 | name: 'coin', 21 | statistics: {}, 22 | hashrate: {}, 23 | workers: {} 24 | } 25 | }; 26 | const combined = _combined(deps)({ poolStats }); 27 | 28 | describe('when providing the correct pool', () => { 29 | const res = responseStub(); 30 | const req = { query: { pool: 'coin' } }; 31 | 32 | it('sets a 200 status, with the combined payload', () => { 33 | const result = combined(req, res); 34 | 35 | expect(res.status).to.have.been.calledOnceWith(200); 36 | expect(result.combined).to.include({ pool: 'coin', workers: empty }); 37 | }); 38 | }); 39 | }); 40 | -------------------------------------------------------------------------------- /test/api/v1/wallets_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | const { responseStub } = require('../../helpers.js'); 6 | 7 | const { _wallets } = require('../../../src/api/v1/wallets'); 8 | 9 | describe('wallets() - Route handler for the /wallets endpoint.', () => { 10 | const collectBlocksData = sinon.stub().returns({ blocks: [] }); 11 | const collectPaymentsData = sinon.stub().returns({ payments: [] }); 12 | const collectWorkersData = sinon.stub().returns({ workers: [] }); 13 | const deps = { collectBlocksData, collectPaymentsData, collectWorkersData }; 14 | const balances = { 15 | totalBalance: 0.111, totalImmature: 0, totalPaid: 0.222, totalUnpaid: 0 16 | }; 17 | const getBalanceByAddress = sinon.stub().resolves({ balances }); 18 | const poolStats = { 19 | getBalanceByAddress, 20 | stats: { 21 | name: 'coin', 22 | statistics: {}, 23 | hashrate: {}, 24 | workers: {} 25 | } 26 | }; 27 | const wallets = _wallets(deps)({ poolStats }); 28 | 29 | describe('when providing a worker address', () => { 30 | const res = responseStub(); 31 | const req = { query: { worker: 'worker' } }; 32 | 33 | it('sets a 200 status, with the wallets payload', async () => { 34 | const result = await wallets(req, res); 35 | 36 | expect(res.status).to.have.been.calledOnceWith(200); 37 | expect(result.wallets).to.include({ total: '0.33300000' }); 38 | }); 39 | }); 40 | 41 | describe('when not providing a worker address', () => { 42 | const res = responseStub(); 43 | const req = { query: {} }; 44 | 45 | it('sets a 400 status, returning an Invalid "worker" error', async () => { 46 | const result = await wallets(req, res); 47 | 48 | expect(res.status).to.have.been.calledOnceWith(400); 49 | expect(result.error).to.include('Invalid "worker" parameter'); 50 | }); 51 | }); 52 | }); 53 | -------------------------------------------------------------------------------- /test/api/v1/workers_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | const { responseStub } = require('../../helpers.js'); 6 | 7 | const utils = require('../../../src/api/v1/utils'); 8 | const { _workers } = require('../../../src/api/v1/workers'); 9 | 10 | describe('workers() - Route handler for the /workers endpoint.', () => { 11 | const collectWorkersData = sinon.stub().returns({ workers: [] }); 12 | const poolStats = { stats: { name: 'coin' } }; 13 | const workers = _workers({ collectWorkersData, utils })({ poolStats }); 14 | 15 | describe('when providing no pool', () => { 16 | const res = responseStub(); 17 | const req = { query: {} }; 18 | 19 | it('sets a 200 status, with empty workers payload', () => { 20 | const result = workers(req, res); 21 | expect(res.status).to.have.been.calledOnceWith(200); 22 | expect(result.workers).to.eql([]); 23 | }); 24 | }); 25 | }); 26 | -------------------------------------------------------------------------------- /test/chai-local.js: -------------------------------------------------------------------------------- 1 | const chai = require('chai'); 2 | const chaiHttp = require('chai-http'); 3 | const chaiAsPromised = require('chai-as-promised'); 4 | const sinonChai = require('sinon-chai'); 5 | 6 | chai.use(chaiHttp); 7 | chai.use(chaiAsPromised); 8 | chai.use(sinonChai); 9 | const { expect } = chai; 10 | 11 | module.exports = { chai, expect }; 12 | -------------------------------------------------------------------------------- /test/daemon/index.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | const { promisify } = require('util'); 4 | 5 | const { expect } = require('../chai-local'); 6 | const { logger } = require('../helpers'); 7 | 8 | const { _daemonWrapper } = require('../../src/daemon'); 9 | 10 | describe('DaemonWrapper() -- wraps Stratum daemon interface in promises', () => { 11 | const daemonStub = { 12 | batchCmd: () => {}, 13 | cmd: (m, p, c, _s, _r) => c(m) 14 | }; 15 | const Interface = sinon.stub(); 16 | const validateAddress = () => sinon.stub(); 17 | const deps = { promisify, Interface, validateAddress }; 18 | const DaemonWrapper = _daemonWrapper(deps); 19 | 20 | it('correctly wraps the daemon cmd function', async () => { 21 | const wrapper = new DaemonWrapper(daemonStub, logger); 22 | const batch = await wrapper.rpcCmd(1); 23 | expect(batch).to.eql(1); 24 | }); 25 | }); 26 | -------------------------------------------------------------------------------- /test/helpers.js: -------------------------------------------------------------------------------- 1 | const redismock = require('redis-mock'); 2 | const { promisify } = require('util'); 3 | 4 | const { PoolLogger } = require('../src/logger'); 5 | 6 | // Ensure we only create a single instance of the redis-mock client. 7 | const mockClient = (() => { 8 | const options = { 9 | detect_buffers: true, 10 | url: process.env.REDIS_URL || 'redis://127.0.0.1:6379' 11 | }; 12 | return redismock.createClient(options); 13 | })(); 14 | 15 | const createClient = () => mockClient; 16 | 17 | const promisedClient = (client = createClient()) => ( 18 | { 19 | flushall: promisify(client.flushall).bind(client), 20 | hincrby: promisify(client.hincrby).bind(client), 21 | hincrbyfloat: promisify(client.hincrbyfloat).bind(client), 22 | sadd: promisify(client.sadd).bind(client), 23 | smembers: promisify(client.smembers).bind(client), 24 | } 25 | ); 26 | 27 | const logger = new PoolLogger({ 28 | logLevel: 'debug', 29 | logColors: false, 30 | tty: false 31 | }); 32 | 33 | const metricCoinInfo = { minPaymentSatoshis: 1.0, magnitude: 10.0, coinPrecision: 10 }; 34 | 35 | const swapProcess = (opts) => { 36 | const origProcess = {}; 37 | Object.keys(opts).forEach((key) => { 38 | origProcess[key] = process[key]; 39 | process[key] = opts[key]; 40 | }); 41 | return origProcess; 42 | }; 43 | 44 | const restoreProcess = (origProcess) => { 45 | Object.entries(origProcess).forEach(([key, value]) => { 46 | process[key] = value; 47 | }); 48 | }; 49 | 50 | const responseStub = () => { 51 | const sinon = require('sinon'); 52 | const res = {}; 53 | res.status = sinon.stub().returns(res); 54 | res.json = sinon.stub().returnsArg(0); 55 | return res; 56 | }; 57 | 58 | module.exports = { 59 | createClient, 60 | promisedClient, 61 | logger, 62 | metricCoinInfo, 63 | swapProcess, 64 | restoreProcess, 65 | responseStub 66 | }; 67 | -------------------------------------------------------------------------------- /test/logger/colorize_log_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../chai-local'); 5 | const { _colorizeLog } = require('../../src/logger/colorize_log'); 6 | 7 | describe('colorizeLog() - color/styling for log message items', () => { 8 | const severityColor = () => ['green']; 9 | const colors = { 10 | setTheme: sinon.stub().returns(true), 11 | system: sinon.stub().returnsArg(0), 12 | }; 13 | const colorizeLog = _colorizeLog({ colors, severityColor }); 14 | const items = { system: 'test' }; 15 | 16 | it('applies the correct colors/styling', () => { 17 | colorizeLog('error', items); 18 | expect(colors.setTheme).to.have.callCount(1); 19 | expect(colors.system).to.have.been.calledWith('test'); 20 | }); 21 | }); 22 | -------------------------------------------------------------------------------- /test/logger/format_log_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | const dateFormat = require('dateformat'); 4 | 5 | const { expect } = require('../chai-local'); 6 | const { _formatLog } = require('../../src/logger/format_log'); 7 | 8 | describe('formatLog() - formatting/combining log message items', () => { 9 | const colorizeLog = sinon.stub().returnsArg(0); 10 | const formatLog = _formatLog({ dateFormat, colorizeLog }); 11 | const [system, component, text, subcat] = ['test', 'one', 'two', 'three']; 12 | 13 | describe('when the severity equals the level', () => { 14 | const env = { severityLevels: { error: 4 }, level: 4 }; 15 | const severity = 'error'; 16 | const args = { 17 | severity, system, component, text, subcat 18 | }; 19 | const logPat = new RegExp(`\\(${text}\\) ${subcat}`); 20 | 21 | it('returns a properly formatted log string', () => { 22 | const log = formatLog(env)(args); 23 | expect(log).to.match(logPat); 24 | }); 25 | }); 26 | 27 | describe('when the severity is below the level', () => { 28 | const env = { severityLevels: { warning: 3 }, level: 4 }; 29 | const severity = 'warning'; 30 | const args = { severity }; 31 | 32 | it('returns false', () => { 33 | const log = formatLog(env)(args); 34 | expect(log).to.eql(false); 35 | }); 36 | }); 37 | }); 38 | -------------------------------------------------------------------------------- /test/logger/index_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../chai-local'); 5 | 6 | const { severityFunctions, cachedLogger } = require('../../src/logger/utils'); 7 | const { _poolLogger } = require('../../src/logger'); 8 | 9 | // Provides a properly initialized set of stubs and a logger instance 10 | // with supplied dependencies. 11 | const loggerWithDeps = () => { 12 | const formatLog = sinon.stub().returnsArg(0); 13 | const deps = { 14 | formatLog: () => formatLog, 15 | utils: { severityFunctions, cachedLogger } 16 | }; 17 | const PoolLogger = _poolLogger(deps); 18 | const logger = new PoolLogger({ logLevel: 'warning', tty: false }); 19 | return { formatLog, logger }; 20 | }; 21 | 22 | describe('PoolLogger() - output formatted log messages, from their severity', () => { 23 | describe('when calling a valid serverity function', () => { 24 | const { formatLog, logger } = loggerWithDeps(); 25 | const undefItems = { component: undefined, text: undefined, subcat: undefined }; 26 | const items = { severity: 'error', system: 'test', ...undefItems }; 27 | 28 | it('correctly calls formatLog to produce the log message', () => { 29 | logger.error('test'); 30 | expect(formatLog).to.have.been.calledOnceWith(items); 31 | }); 32 | }); 33 | 34 | describe('when calling an invalid serverity function', () => { 35 | const { logger } = loggerWithDeps(); 36 | const invalid = () => logger.invalid(); 37 | 38 | it('throws a TypeError', () => { 39 | expect(invalid).to.throw(TypeError); 40 | }); 41 | }); 42 | 43 | describe('when using a cached logger', () => { 44 | const { formatLog, logger } = loggerWithDeps(); 45 | const severity = 'error'; 46 | const [system, component, subcat] = ['sys', 'comp', 'subcat']; 47 | const cached = logger.cached(system, component, subcat); 48 | const items = { 49 | severity, system, component, subcat: 'test', text: 'subcat' 50 | }; 51 | 52 | it('supplies the cached values to the parent logger instance', () => { 53 | cached[severity]('test'); 54 | expect(formatLog).to.have.been.calledOnceWith(items); 55 | }); 56 | }); 57 | }); 58 | -------------------------------------------------------------------------------- /test/payments/index_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../chai-local'); 5 | 6 | const { _poolPayments } = require('../../src/payments'); 7 | 8 | describe('PoolPayments() - starts payment processing background timers', () => { 9 | const poolConfig = { 10 | coin: { name: 'coin' }, 11 | paymentProcessing: { daemon: {}, paymentInterval: 0 } 12 | }; 13 | const portalConfig = { redis: {} }; 14 | 15 | describe('when initPayments returns true', () => { 16 | const initPayments = sinon.stub().resolves(true); 17 | const PoolPayments = _poolPayments({ initPayments }); 18 | const logger = { debug: sinon.stub() }; 19 | const baseLogger = { cached: () => logger }; 20 | const env = { baseLogger, poolConfig, portalConfig }; 21 | 22 | it('logs the payment processing started message', async () => { 23 | const poolPayments = new PoolPayments(env); 24 | await poolPayments.start(); 25 | expect(logger.debug).to.have.been.calledOnce; 26 | }); 27 | }); 28 | 29 | describe('when initPayments returns false', () => { 30 | const initPayments = sinon.stub().resolves(false); 31 | const PoolPayments = _poolPayments({ initPayments }); 32 | const logger = { debug: sinon.stub() }; 33 | const baseLogger = { cached: () => logger }; 34 | const env = { baseLogger, poolConfig, portalConfig }; 35 | 36 | it('does not log payment processing started message', async () => { 37 | const poolPayments = new PoolPayments(env); 38 | await poolPayments.start(); 39 | expect(logger.debug).not.to.been.called; 40 | }); 41 | }); 42 | }); 43 | -------------------------------------------------------------------------------- /test/payments/init_payments_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../chai-local'); 5 | 6 | const { _initPayments } = require('../../src/payments/init_payments'); 7 | 8 | describe('initPayments() - start the async payment processing timers', () => { 9 | const client = sinon.stub(); 10 | const Redis = function () { return { client }; }; 11 | const coinUtils = sinon.stub().returns(0); 12 | const CoinUtils = function () { return coinUtils; }; 13 | const startPayments = sinon.stub(); 14 | const deps = { Redis, CoinUtils, startPayments }; 15 | 16 | const logger = { warning: sinon.stub() }; 17 | const poolConfig = { 18 | coin: { name: 'coin' }, 19 | addresses: { address: 'AAAAAA' }, 20 | satoshiPrecision: 1, 21 | paymentProcessing: { daemon: {}, minimumPayment: 0 } 22 | }; 23 | const portalConfig = { redis: {} }; 24 | const env = { logger, poolConfig, portalConfig }; 25 | 26 | describe('when the pool address is valid', () => { 27 | const daemon = { isValidAddress: sinon.stub().resolves(true) }; 28 | const Daemon = function () { return daemon; }; 29 | const initPayments = _initPayments({ ...deps, Daemon }); 30 | const startEnv = { 31 | logger, coin: 'coin', client, daemon, coinUtils, poolOptions: poolConfig 32 | }; 33 | 34 | it('starts payment processing and resolves to true', () => ( 35 | expect(initPayments(env)).to.eventually.eql(true).then(() => ( 36 | expect(startPayments).to.have.been.calledOnceWith(startEnv) 37 | )) 38 | )); 39 | }); 40 | 41 | describe('when the pool address is invalid', () => { 42 | const daemon = { isValidAddress: sinon.stub().resolves(false) }; 43 | const Daemon = function () { return daemon; }; 44 | const initPayments = _initPayments({ ...deps, Daemon }); 45 | 46 | it('warns the user, but continues payment processing', () => ( 47 | expect(initPayments(env)).to.eventually.eql(true).then(() => ( 48 | expect(logger.warning).to.have.been.calledOnce 49 | )) 50 | )); 51 | }); 52 | }); 53 | -------------------------------------------------------------------------------- /test/payments/initialize_payouts/fetch_pending_blocks_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it, beforeEach, after } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | const { createClient, promisedClient } = require('../../helpers'); 6 | 7 | const { fetchPendingBlocks } = require( 8 | '../../../src/payments/initialize_payouts/fetch_pending_blocks' 9 | ); 10 | 11 | describe('fetchPendingBlocks() - initializePayouts pipeline function', () => { 12 | const client = createClient(); 13 | const promised = promisedClient(); 14 | const coin = 'carrot'; 15 | 16 | after(() => { client.quit(); }); 17 | 18 | beforeEach(async () => { 19 | await promised.flushall(); 20 | }); 21 | 22 | describe('with 0 pending blocks', () => { 23 | const logger = { error: sinon.stub().returnsArg(0) }; 24 | const env = { coin, client, logger }; 25 | 26 | it('returns an empty array of rounds', async () => { 27 | const rounds = await fetchPendingBlocks(env); 28 | expect(rounds).to.eql([]); 29 | }); 30 | }); 31 | 32 | describe('with 1 pending block', () => { 33 | const logger = { error: sinon.stub().returnsArg(0) }; 34 | const env = { coin, client, logger }; 35 | const pending = { worker: 'AAAAAA' }; 36 | const json = JSON.stringify(pending); 37 | 38 | beforeEach(async () => { 39 | await promised.sadd(`${coin}:blocks:pending`, json); 40 | }); 41 | 42 | it('returns the correct rounds array', async () => { 43 | const rounds = await fetchPendingBlocks(env); 44 | expect(rounds.length).to.eql(1); 45 | expect(rounds[0]).to.eql({ workerAddress: 'AAAAAA', serialized: json }); 46 | }); 47 | }); 48 | }); 49 | -------------------------------------------------------------------------------- /test/payments/initialize_payouts/fetch_unpaid_workers_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it, beforeEach, after } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | const { createClient, promisedClient } = require('../../helpers'); 6 | 7 | const { fetchUnpaidWorkers } = require( 8 | '../../../src/payments/initialize_payouts/fetch_unpaid_workers' 9 | ); 10 | 11 | describe('fetchUnpaidWorkers() - initializePayouts pipeline function', () => { 12 | const client = createClient(); 13 | const promised = promisedClient(); 14 | const coin = 'carrot'; 15 | const coinsToSatoshies = (coins) => coins; 16 | 17 | after(() => { client.quit(); }); 18 | 19 | beforeEach(async () => { 20 | await promised.flushall(); 21 | }); 22 | 23 | describe('with 0 unpaid workers', () => { 24 | const logger = { error: sinon.stub().returnsArg(0) }; 25 | const env = { 26 | coin, client, logger, coinUtils: { coinsToSatoshies } 27 | }; 28 | 29 | it('returns an empty set of workers', async () => { 30 | const workers = await fetchUnpaidWorkers(env); 31 | expect(workers).to.eql({}); 32 | }); 33 | }); 34 | 35 | describe('with 1 unpaid worker', () => { 36 | const logger = { error: sinon.stub().returnsArg(0) }; 37 | const env = { 38 | coin, client, logger, coinUtils: { coinsToSatoshies } 39 | }; 40 | const unpaid = ['addr', '1.111']; 41 | 42 | beforeEach(async () => { 43 | await promised.hincrbyfloat(`${coin}:payments:unpaid`, ...unpaid); 44 | }); 45 | 46 | it('returns 1 worker object, with the correct balance', async () => { 47 | const workers = await fetchUnpaidWorkers(env); 48 | expect(workers).to.eql({ addr: { balance: 1.111 } }); 49 | }); 50 | }); 51 | }); 52 | -------------------------------------------------------------------------------- /test/payments/initialize_payouts/find_duplicate_blocks_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const { expect } = require('../../chai-local'); 3 | 4 | const { findDuplicateBlocks } = require( 5 | '../../../src/payments/initialize_payouts/find_duplicate_blocks' 6 | ); 7 | 8 | describe('findDuplicateBlocks() - identify and flag duplicate records in rounds', () => { 9 | describe('with no dulicate rounds', () => { 10 | const rounds = [{ height: 1 }, { height: 2 }]; 11 | 12 | it('retuns an empty set of duplicates', () => { 13 | const dups = findDuplicateBlocks(rounds); 14 | expect(dups.length).to.eql(0); 15 | }); 16 | }); 17 | 18 | describe('with at least one pair of dulicate rounds', () => { 19 | const rounds = [{ height: 1 }, { height: 2 }, { height: 1 }]; 20 | 21 | it('retuns a set of 2 correctly flagged duplicates', () => { 22 | const dups = findDuplicateBlocks(rounds); 23 | expect(dups.length).to.eql(2); 24 | expect(dups[0].duplicate).to.eql(true); 25 | }); 26 | }); 27 | }); 28 | -------------------------------------------------------------------------------- /test/payments/initialize_payouts/index_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it, beforeEach, after } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | const { createClient, promisedClient } = require('../../helpers'); 6 | 7 | const { initializePayouts } = require('../../../src/payments/initialize_payouts'); 8 | 9 | describe('initializePayouts() - processPayments pipeline function', () => { 10 | const client = createClient(); 11 | const promised = promisedClient(); 12 | const daemon = { rpcBatch: sinon.stub().resolves([]) }; 13 | const logger = { error: sinon.stub().returnsArg(0) }; 14 | const coinsToSatoshies = (coins) => coins; 15 | const coin = 'carrot'; 16 | const env = { 17 | coin, client, daemon, logger, coinUtils: { coinsToSatoshies } 18 | }; 19 | 20 | after(() => { client.quit(); }); 21 | 22 | describe('with 1 unpaid, 1 pending, and no duplicates', () => { 23 | const address = 'AAAAAA'; 24 | const unpaid = [address, '1.111']; 25 | const pending = { height: 1, worker: address }; 26 | const json = JSON.stringify(pending); 27 | 28 | beforeEach(async () => { 29 | await promised.flushall(); 30 | await promised.hincrbyfloat(`${coin}:payments:unpaid`, ...unpaid); 31 | await promised.sadd(`${coin}:blocks:pending`, json); 32 | }); 33 | 34 | it('retuns the correct workers and rounds', async () => { 35 | const { workers, rounds } = await initializePayouts(env); 36 | expect(workers[address].balance).to.eql(1.111); 37 | expect(rounds.length).to.eql(1); 38 | expect(rounds[0].workerAddress).to.eql(address); 39 | expect(rounds[0].duplicate).to.eql(false); 40 | }); 41 | }); 42 | }); 43 | -------------------------------------------------------------------------------- /test/payments/initialize_payouts/move_invalid_blocks_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it, beforeEach, after } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | const { createClient, promisedClient } = require('../../helpers'); 6 | 7 | const { moveInvalidBlocks } = require( 8 | '../../../src/payments/initialize_payouts/move_invalid_blocks' 9 | ); 10 | 11 | describe('moveInvalidBlocks() - initializePayouts pipeline function', () => { 12 | const client = createClient(); 13 | const promised = promisedClient(); 14 | const coin = 'carrot'; 15 | 16 | after(() => { client.quit(); }); 17 | 18 | beforeEach(async () => { 19 | await promised.flushall(); 20 | }); 21 | 22 | describe('with an invalid block', () => { 23 | const logger = { error: sinon.stub().returnsArg(0) }; 24 | const env = { coin, client, logger }; 25 | const pending = { worker: 'AAAAAA' }; 26 | const json = JSON.stringify(pending); 27 | 28 | beforeEach(async () => { 29 | await promised.sadd(`${coin}:blocks:pending`, json); 30 | }); 31 | 32 | it('moves the blocks:pending entry to blocks:duplicate', async () => { 33 | await moveInvalidBlocks(env)([json]); 34 | const dups = await promised.smembers(`${coin}:blocks:duplicate`); 35 | expect(dups).to.eql([json]); 36 | }); 37 | }); 38 | }); 39 | -------------------------------------------------------------------------------- /test/payments/process_share_blocks/confirmed_block/compute_shared_payouts_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../../chai-local'); 5 | 6 | const { computeSharedPayouts } = require( 7 | '../../../../src/payments/process_share_blocks/confirmed_block/compute_shared_payouts' 8 | ); 9 | 10 | describe('computeSharedPayouts() - confirmedBlock support function', () => { 11 | const coinUtils = { satoshisToCoins: sinon.stub().returnsArg(0) }; 12 | const round = { height: 3, blockHash: 'HASH' }; 13 | const addr = 'AAAAAA'; 14 | const totalShares = 100; 15 | const reward = 10.0; 16 | 17 | describe('for a worker who contributed 60% of the total shares', () => { 18 | const logger = { error: sinon.stub().returnsArg(0) }; 19 | const worker = { roundShares: 60, records: { [round.height]: {} } }; 20 | const env = { logger, coinUtils }; 21 | const args = { 22 | workers: { [addr]: worker }, shared: { [addr]: 1 }, round, totalShares, reward 23 | }; 24 | 25 | it('sets a reward amount reduced by 60%', () => { 26 | computeSharedPayouts(env)(args); 27 | expect(worker.reward).to.eql(6); 28 | }); 29 | }); 30 | 31 | describe('for a worker who contributed > 100% of the total shares', () => { 32 | const logger = { error: sinon.stub().returnsArg(0) }; 33 | const worker = { roundShares: 101 }; 34 | const env = { logger, coinUtils }; 35 | const args = { 36 | workers: { [addr]: worker }, shared: { [addr]: 1 }, round, totalShares, reward 37 | }; 38 | 39 | it('logs an error', () => { 40 | computeSharedPayouts(env)(args); 41 | expect(logger.error).to.have.been.calledOnce; 42 | }); 43 | }); 44 | }); 45 | -------------------------------------------------------------------------------- /test/payments/process_share_blocks/confirmed_block/index_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../../chai-local'); 5 | 6 | const { CoinUtils } = require('../../../../src/payments/coin_utils'); 7 | const { metricCoinInfo } = require('../../../helpers'); 8 | const { confirmedBlock } = require( 9 | '../../../../src/payments/process_share_blocks/confirmed_block' 10 | ); 11 | 12 | // Integation tests to verify that confirmed_block/index collaborates with its 13 | // utility functions properly. 14 | describe('confirmedBlock() - prepareRounds category function', () => { 15 | const feeSatoshi = 1; 16 | const coinUtils = new CoinUtils(metricCoinInfo); 17 | const reward = 0.5; 18 | const addr = 'AAAAAA'; 19 | 20 | describe('for a solo round, with 1 worker', () => { 21 | const round = { 22 | soloMined: true, workerAddress: addr, reward, height: 1 23 | }; 24 | const solo = { [addr]: 1 }; 25 | const workers = { [addr]: {} }; 26 | const env = { coinUtils, feeSatoshi }; 27 | const args = { workers, round, solo }; 28 | 29 | it('correctly updates the worker object', () => { 30 | confirmedBlock(env)(args); 31 | expect(workers[addr].roundShares).to.eql(1); 32 | expect(workers[addr].reward).to.eql(4); 33 | }); 34 | }); 35 | 36 | describe('for a shared round, with 1 worker', () => { 37 | const round = { reward: 0.5, height: 1 }; 38 | const shared = { [addr]: 10 }; 39 | const workers = { [addr]: {} }; 40 | const logger = { error: sinon.stub().returnsArg(0) }; 41 | const env = { logger, coinUtils, feeSatoshi }; 42 | const args = { 43 | workers, round, shared, times: {}, maxTime: 1 44 | }; 45 | 46 | it('correctly updates the worker object', () => { 47 | confirmedBlock(env)(args); 48 | expect(workers[addr].reward).to.eql(4); 49 | expect(workers[addr].records[1].amounts).to.eql(0.4); 50 | }); 51 | }); 52 | }); 53 | -------------------------------------------------------------------------------- /test/payments/process_share_blocks/confirmed_block/shared_round_total_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../../chai-local'); 5 | 6 | const { sharedRoundTotal } = require( 7 | '../../../../src/payments/process_share_blocks/confirmed_block/shared_round_total' 8 | ); 9 | 10 | describe('sharedRoundTotal() - calculate adjusted shares for workers in a round', () => { 11 | const round = { height: 11 }; 12 | const addrs = ['AAAAAA', 'BBBBBB']; 13 | const logger = { error: sinon.stub().returnsArg(0) }; 14 | const workers = {}; 15 | const env = { logger }; 16 | 17 | describe('for a round with 1 worker and no maxTime', () => { 18 | const shared = { [addrs[0]]: 10 }; 19 | const args = { 20 | workers, round, shared, times: {}, maxTime: 0 21 | }; 22 | 23 | it('returns zero for total shares and worker.roundShares', () => { 24 | const total = sharedRoundTotal(env)(args); 25 | expect(total).to.eql(0); 26 | expect(workers[addrs[0]].roundShares).to.eql(0); 27 | }); 28 | }); 29 | 30 | describe('for a round with 2 workers getting partial credit', () => { 31 | const shared = { [addrs[0]]: 10, [addrs[1]]: 10 }; 32 | const times = { [addrs[0]]: 40, [addrs[1]]: 50 }; 33 | const args = { 34 | workers, round, shared, times, maxTime: 100 35 | }; 36 | 37 | it('returns the adjusted total shares and sets worker.roundShares', () => { 38 | const total = sharedRoundTotal(env)(args); 39 | expect(total).to.eql(9); 40 | expect(Object.keys(workers).length).to.eql(2); 41 | expect(workers[addrs[0]].roundShares).to.eql(4); 42 | }); 43 | }); 44 | 45 | describe('for a round with 1 worker, with an invalid time', () => { 46 | const shared = { [addrs[0]]: 10 }; 47 | const times = { [addrs[0]]: 101 }; 48 | const args = { 49 | workers, round, shared, times, maxTime: 100 50 | }; 51 | 52 | it('returns 0 total shares and logs an error message', () => { 53 | const total = sharedRoundTotal(env)(args); 54 | expect(total).to.eql(0); 55 | expect(logger.error).to.have.been.calledOnce; 56 | }); 57 | }); 58 | }); 59 | -------------------------------------------------------------------------------- /test/payments/process_share_blocks/fetch_round_shares_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it, beforeEach, after } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | const { createClient } = require('../../helpers'); 6 | 7 | const { fetchRoundShares } = require( 8 | '../../../src/payments/process_share_blocks/fetch_round_shares' 9 | ); 10 | 11 | describe('fetchRoundShares() - processShareBlocks pipeline function', () => { 12 | const client = createClient(); 13 | const coin = 'carrot'; 14 | 15 | after(() => { client.quit(); }); 16 | 17 | describe('for a round with 2 shares, 1 that is soloMined', () => { 18 | const rounds = [{ height: 111 }]; 19 | const shares = [ 20 | JSON.stringify({ time: 111, worker: 'AAAAAA' }), 21 | JSON.stringify({ time: 222, worker: 'BBBBBB', soloMined: true }), 22 | ]; 23 | const logger = { error: sinon.stub().returnsArg(0) }; 24 | const env = { coin, client, logger }; 25 | 26 | beforeEach(() => { 27 | client.flushall(() => {}); 28 | client.hincrby(`${coin}:shares:round111`, shares[0], 101); 29 | client.hincrby(`${coin}:shares:round111`, shares[1], 222); 30 | client.hincrby(`${coin}:shares:round112`, shares[2], 333); 31 | }); 32 | 33 | it('retuns the solo and shared lists', () => ( 34 | fetchRoundShares(env)(rounds) 35 | .then(({ solo, shared }) => { 36 | expect(shared.length).to.eql(1); 37 | expect(Object.keys(solo[0])).to.eql(['BBBBBB']); 38 | }) 39 | )); 40 | }); 41 | 42 | describe('for a round with 0 shares', () => { 43 | const rounds = [{ height: 111 }]; 44 | const logger = { error: sinon.stub().returnsArg(0) }; 45 | const env = { coin, client, logger }; 46 | 47 | beforeEach(() => client.flushall(() => {})); 48 | 49 | it('throws an "Invalid round shares" error', () => { 50 | expect(fetchRoundShares(env)(rounds)) 51 | .to.be.rejectedWith(Error, /Invalid round shares/); 52 | }); 53 | }); 54 | }); 55 | -------------------------------------------------------------------------------- /test/payments/process_share_blocks/fetch_round_times_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it, beforeEach, after } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | const { createClient } = require('../../helpers'); 6 | 7 | const { fetchRoundTimes } = require( 8 | '../../../src/payments/process_share_blocks/fetch_round_times' 9 | ); 10 | 11 | describe('fetchRoundTimes() - processShareBlocks pipeline function', () => { 12 | const client = createClient(); 13 | const coin = 'carrot'; 14 | const addrs = ['AAAAAA', 'BBBBBB']; 15 | 16 | after(() => { client.quit(); }); 17 | 18 | describe('with multiple round times entries', () => { 19 | const rounds = [{ height: 111 }, { height: 112 }]; 20 | const logger = { error: sinon.stub().returnsArg(0) }; 21 | const env = { coin, client, logger }; 22 | 23 | beforeEach(() => { 24 | client.flushall(() => {}); 25 | client.hincrbyfloat(`${coin}:times:times111`, addrs[0], 0.111); 26 | client.hincrbyfloat(`${coin}:times:times111`, addrs[1], 0.222); 27 | client.hincrbyfloat(`${coin}:times:times112`, addrs[1], 0.333); 28 | }); 29 | 30 | it('retuns the parsed times for the round addresses', async () => { 31 | const times = await fetchRoundTimes(env)(rounds); 32 | expect(times.length).to.eql(2); 33 | expect(times[0].AAAAAA).to.eql(0.111); 34 | }); 35 | }); 36 | 37 | describe('with an invalid round time entry', () => { 38 | const rounds = [{ height: 111 }]; 39 | const logger = { error: sinon.stub().returnsArg(0) }; 40 | const env = { 41 | coin, client, logger, rounds 42 | }; 43 | 44 | beforeEach(() => client.flushall(() => {})); 45 | 46 | it('throws an "Invalid round times" error', () => { 47 | expect(fetchRoundTimes(env)(rounds)).to.be.rejectedWith(Error, /Invalid round/); 48 | }); 49 | }); 50 | }); 51 | -------------------------------------------------------------------------------- /test/payments/process_share_blocks/immature_block/shared_round_total_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | 3 | const { expect } = require('../../../chai-local'); 4 | 5 | const { findOrNew } = require('../../../../src/payments/utils'); 6 | const { _sharedRoundTotal } = require( 7 | '../../../../src/payments/process_share_blocks/immature_block/shared_round_total' 8 | ); 9 | 10 | describe('sharedRoundTotal() - calculate adjusted shares for workers in a round', () => { 11 | const addrs = ['AAAAAA', 'BBBBBB']; 12 | const lostShares = () => 0; 13 | const sharedRoundTotal = _sharedRoundTotal({ findOrNew, lostShares }); 14 | 15 | describe('for a round with 1 worker and no maxTime', () => { 16 | const shared = { [addrs[0]]: 10 }; 17 | const workers = {}; 18 | const args = { 19 | workers, shared, times: {}, maxTime: 0 20 | }; 21 | 22 | it('returns zero for total shares and worker.roundShares', () => { 23 | const total = sharedRoundTotal(args); 24 | expect(total).to.eql(0); 25 | expect(workers[addrs[0]].roundShares).to.eql(0); 26 | }); 27 | }); 28 | 29 | describe('for a round with 2 workers', () => { 30 | const shared = { [addrs[0]]: 10, [addrs[1]]: 5 }; 31 | const workers = { [addrs[0]]: {} }; 32 | const args = { 33 | workers, shared, times: {}, maxTime: 1 34 | }; 35 | 36 | it('returns the total round shares and sets worker.roundShares', () => { 37 | const total = sharedRoundTotal(args); 38 | expect(total).to.eql(15); 39 | expect(Object.keys(workers).length).to.eql(2); 40 | expect(workers[addrs[0]].roundShares).to.eql(10); 41 | }); 42 | }); 43 | }); 44 | -------------------------------------------------------------------------------- /test/payments/process_share_blocks/lost_shares_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | 3 | const { expect } = require('../../chai-local'); 4 | 5 | const { lostShares } = require( 6 | '../../../src/payments/process_share_blocks/lost_shares' 7 | ); 8 | 9 | describe('lostShares() - based on time mining, calculate amount contributed shares "lost"', () => { 10 | const shares = 10; 11 | const maxTime = 10; 12 | 13 | describe('when shares were contributed for >= 51% of round time', () => { 14 | const time = 6; 15 | 16 | it('returns zero "lost" shares adjustment', () => { 17 | const result = lostShares(shares, time, maxTime); 18 | expect(result).to.eql(0); 19 | }); 20 | }); 21 | 22 | describe('when shares were contributed for only 40% of round time', () => { 23 | const time = 4; 24 | 25 | it('returns a "lost" shares amount of 60%', () => { 26 | const result = lostShares(shares, time, maxTime); 27 | expect(result).to.eql(6); 28 | }); 29 | }); 30 | }); 31 | -------------------------------------------------------------------------------- /test/payments/process_share_blocks/move_manual_rounds_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it, beforeEach, after } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | const { createClient, promisedClient } = require('../../helpers'); 6 | 7 | const { moveManualRounds } = require( 8 | '../../../src/payments/process_share_blocks/move_manual_rounds' 9 | ); 10 | 11 | describe('moveManualRounds() - processShareBlocks pipeline function', () => { 12 | const client = createClient(); 13 | const promised = promisedClient(); 14 | const coin = 'carrot'; 15 | 16 | after(() => { client.quit(); }); 17 | 18 | describe('with a pending manual round', () => { 19 | const rounds = [{ height: 111, serialized: 'round_1' }]; 20 | const logger = { error: sinon.stub().returnsArg(0) }; 21 | const env = { coin, client, logger }; 22 | 23 | beforeEach(async () => { 24 | await promised.flushall(); 25 | await promised.sadd(`${coin}:blocks:pending`, rounds[0].serialized); 26 | }); 27 | 28 | it('moves the round to the "manual" location', async () => { 29 | await moveManualRounds(env)(rounds); 30 | const moved = await promised.smembers(`${coin}:blocks:manual`); 31 | expect(moved).to.eql(['round_1']); 32 | expect(logger.error).to.have.been.calledOnce; 33 | }); 34 | }); 35 | }); 36 | -------------------------------------------------------------------------------- /test/payments/process_share_blocks/process_auto_rounds_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | 6 | const { _processAutoRounds } = require( 7 | '../../../src/payments/process_share_blocks/process_auto_rounds' 8 | ); 9 | 10 | describe('processAutoRounds() - delegate payable rounds to payout calculators', () => { 11 | const buildDeps = () => { 12 | const spies = { generate: sinon.spy(), immature: sinon.spy() }; 13 | const confirmedBlock = () => spies.generate; 14 | const immatureBlock = () => spies.immature; 15 | const deps = { confirmedBlock, immatureBlock }; 16 | return { spies, deps }; 17 | }; 18 | 19 | describe('with 1 "generate" round and 1 "orphan" round', () => { 20 | const { spies, deps } = buildDeps(); 21 | const processAutoRounds = _processAutoRounds(deps)({}); 22 | const rounds = [{ category: 'generate' }, { category: 'orphan' }]; 23 | const args = { 24 | rounds, times: [{}, {}], solo: [], shared: [], workers: {} 25 | }; 26 | 27 | it('delegates to confirmedBlock', () => { 28 | processAutoRounds(args); 29 | expect(spies.generate).to.have.been.calledOnce; 30 | expect(spies.generate.args[0][0].round).to.eql(rounds[0]); 31 | expect(spies.immature).not.to.have.been.called; 32 | }); 33 | }); 34 | 35 | describe('with 1 "immature" round', () => { 36 | const { spies, deps } = buildDeps(); 37 | const processAutoRounds = _processAutoRounds(deps)({}); 38 | const rounds = [{ category: 'immature' }]; 39 | const args = { 40 | rounds, times: [{}], solo: [], shared: [], workers: {} 41 | }; 42 | 43 | it('delegates to immatureBlock', () => { 44 | processAutoRounds(args); 45 | expect(spies.immature).to.have.been.calledOnce; 46 | expect(spies.generate).not.to.have.been.called; 47 | }); 48 | }); 49 | }); 50 | -------------------------------------------------------------------------------- /test/payments/process_share_blocks/separate_rounds_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const { expect } = require('../../chai-local'); 3 | 4 | const { separateRounds } = require( 5 | '../../../src/payments/process_share_blocks/separate_rounds' 6 | ); 7 | 8 | describe('separateRounds() - processShareBlocks pipeline function', () => { 9 | describe('with 1 auto and 1 manual rounds', () => { 10 | const rounds = [{ height: 111 }, { height: 112 }]; 11 | const solo = [{}, {}]; 12 | const shared = [{ 111: 0 }, {}]; 13 | const args = { rounds, solo, shared }; 14 | 15 | it('retuns 1 autoRounds and 1 manualRounds entries', () => { 16 | const { autoRounds, manualRounds } = separateRounds(args); 17 | expect(autoRounds.length).to.eql(1); 18 | expect(autoRounds[0]).to.include({ height: 111 }); 19 | expect(manualRounds.length).to.eql(1); 20 | expect(manualRounds[0]).to.include({ height: 112 }); 21 | }); 22 | }); 23 | }); 24 | -------------------------------------------------------------------------------- /test/payments/start_payments_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it, after } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../chai-local'); 5 | 6 | const { _startPayments } = require('../../src/payments/start_payments'); 7 | 8 | // Sanity check test, as there's no branching login in startPayments 9 | describe('startPayments() - start the async payment processing timers', () => { 10 | const clock = sinon.useFakeTimers(); 11 | const _processPayments = sinon.stub().resolves(true); 12 | const processPayments = () => _processPayments; 13 | const startPayments = _startPayments({ processPayments }); 14 | 15 | const logger = sinon.stub(); 16 | const poolOptions = { paymentProcessing: { checkInterval: 0.8, paymentInterval: 0.9 } }; 17 | 18 | after(() => clock.restore()); 19 | 20 | it('correctly delegates to paymentProcessing', () => { 21 | const { checkTimerId, paymentTimerId } = startPayments({ logger, poolOptions }); 22 | clock.tick(1000); 23 | clearInterval(checkTimerId); 24 | clearInterval(paymentTimerId); 25 | expect(_processPayments).to.have.callCount(3); 26 | expect(_processPayments.getCall(0)).to.have.been.calledWith('start', 100); 27 | expect(_processPayments.getCall(1)).to.have.been.calledWith('check', 800); 28 | expect(_processPayments.getCall(2)).to.have.been.calledWith('payment', 900); 29 | }); 30 | }); 31 | -------------------------------------------------------------------------------- /test/payments/update_rounds/fetch_transactions_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | const { fetchTransactions } = require( 6 | '../../../src/payments/update_rounds/fetch_transactions' 7 | ); 8 | 9 | // Without a viable daemon service stand-in, this test exists for coverage purposes. 10 | describe('fetchTransactions() - wraps daemon.rpcBatch("gettransaction") call', () => { 11 | describe('with 1 round to retrieve', () => { 12 | const rounds = [{ txHash: '0000' }]; 13 | const data = [{ result: { confirmations: 0 } }]; 14 | const daemon = { rpcBatch: sinon.stub().resolves(data) }; 15 | const logger = { 16 | error: sinon.stub().returnsArg(0), 17 | special: sinon.stub().returnsArg(0) 18 | }; 19 | const env = { daemon, logger }; 20 | 21 | it('returns the batch response', async () => { 22 | const txs = await fetchTransactions(env)(rounds); 23 | expect(txs[0].result).to.eql({ confirmations: 0 }); 24 | }); 25 | }); 26 | }); 27 | -------------------------------------------------------------------------------- /test/payments/update_rounds/flag_deletable_rounds_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | 3 | const { expect } = require('../../chai-local'); 4 | 5 | const { flagDeletableRounds } = require( 6 | '../../../src/payments/update_rounds/flag_deletable_rounds' 7 | ); 8 | 9 | describe('flagDeletableRounds() - validateTransactions sub-function', () => { 10 | describe('orphan round without a duplicate height', () => { 11 | const rounds = [ 12 | { category: 'orphan', height: 0, serialized: 'foo' }, 13 | { category: 'generate', height: 1, serialized: 'bar' } 14 | ]; 15 | 16 | it('flags the round for deleting', () => { 17 | flagDeletableRounds(rounds); 18 | expect(rounds[0].canDeleteShares).to.eql(true); 19 | }); 20 | }); 21 | 22 | describe('orphan round with duplicate height, same serialized', () => { 23 | const rounds = [ 24 | { category: 'orphan', height: 0, serialized: 'foo' }, 25 | { category: 'generate', height: 0, serialized: 'foo' } 26 | ]; 27 | 28 | it('flags the round for deleting', () => { 29 | flagDeletableRounds(rounds); 30 | expect(rounds[0].canDeleteShares).to.eql(true); 31 | }); 32 | }); 33 | 34 | describe('orphan round with duplicate height, diff serialized', () => { 35 | const rounds = [ 36 | { category: 'orphan', height: 0, serialized: 'foo' }, 37 | { category: 'generate', height: 0, serialized: 'bar' } 38 | ]; 39 | 40 | it('does not flag the round for deleting', () => { 41 | flagDeletableRounds(rounds); 42 | expect(rounds[0].canDeleteShares).to.eql(false); 43 | }); 44 | }); 45 | }); 46 | -------------------------------------------------------------------------------- /test/payments/update_rounds/index_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | const { updateRounds } = require( 6 | '../../../src/payments/update_rounds' 7 | ); 8 | 9 | // Using the coin Daemon, determine which round blocks are valid for payout. 10 | // Updates the reward amount for valid/complete blocks, flag invalid blocks 11 | // with 'kicked' category. Then flag invalid blocks for possible deletion. 12 | describe('updateRounds() - processPayments sub-process function', () => { 13 | const poolOptions = { addresses: { address: 'AAAAAA' } }; 14 | const coinUtils = { coinsRound: (c) => c }; 15 | 16 | describe('with 1 complete round to update', () => { 17 | const round = { category: 'orphan', txHash: 'txHash' }; 18 | const detail = { category: 'generate', address: 'AAAAAA', amount: 5 }; 19 | const tx = { 20 | result: { 21 | confirmations: 10, 22 | details: [detail] 23 | } 24 | }; 25 | const daemon = { rpcBatch: sinon.stub().resolves([tx]) }; 26 | const logger = { 27 | error: sinon.stub().returnsArg(0), 28 | special: sinon.stub().returnsArg(0) 29 | }; 30 | const env = { 31 | daemon, logger, poolOptions, coinUtils 32 | }; 33 | 34 | it('updates the round reward', async () => { 35 | await updateRounds(env)([round]); 36 | expect(round.reward).to.eql(5); 37 | }); 38 | }); 39 | }); 40 | -------------------------------------------------------------------------------- /test/payments/utils_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../chai-local'); 5 | 6 | const { calculateTotalOwed } = require('../../src/payments/utils'); 7 | 8 | describe('calculateTotalOwed() - processShareBlocks pipeline function', () => { 9 | const feeSatoshi = 3; 10 | const coinsToSatoshies = sinon.stub().returnsArg(0); 11 | const coinUtils = { coinsToSatoshies }; 12 | 13 | describe('for 1 round and no workers', () => { 14 | const rounds = [{ category: 'generate', reward: 5 }]; 15 | const env = { 16 | feeSatoshi, coinUtils, rounds, workers: {} 17 | }; 18 | 19 | it('retuns owed == 2', () => { 20 | const { owed } = calculateTotalOwed(env)({}); 21 | expect(owed).to.eql(2); 22 | }); 23 | }); 24 | 25 | describe('for 1 round and 1 worker balance', () => { 26 | const rounds = [{ category: 'generate', reward: 5 }]; 27 | const env = { 28 | feeSatoshi, coinUtils, rounds, workers: { a: { balance: 1 } } 29 | }; 30 | 31 | it('retuns owed == 3', () => { 32 | const { owed } = calculateTotalOwed(env)({}); 33 | expect(owed).to.eql(3); 34 | }); 35 | }); 36 | }); 37 | -------------------------------------------------------------------------------- /test/redis/clients_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../chai-local'); 5 | const { _standard, _cluster } = require('../../src/redis/clients'); 6 | 7 | describe('standard() - Factory function for standard Redis clients', () => { 8 | const redis = { createClient: sinon.stub().returnsArg(0) }; 9 | 10 | describe('when supplied "host", "port" and "password"', () => { 11 | const standard = _standard({ redis }); 12 | const args = { host: 1, port: 2, password: '3' }; 13 | 14 | it('sends the correct client config to redis.createClient', () => { 15 | const config = standard(args); 16 | expect(Object.keys(config)).to.include('password'); 17 | }); 18 | }); 19 | }); 20 | 21 | describe('cluster() - Factory function for clustered Redis clients', () => { 22 | const redis = { createClient: sinon.stub().returnsArg(0) }; 23 | const RedisClustr = sinon.stub().returnsArg(0); 24 | 25 | describe('when supplied only "host" and "port"', () => { 26 | const cluster = _cluster({ redis, RedisClustr }); 27 | const args = { host: 1, port: 2 }; 28 | 29 | it('sends the correct cluster config to RedisClustr', () => { 30 | const config = cluster(args); 31 | expect(Object.keys(config)).to.not.include('redisOptions'); 32 | }); 33 | }); 34 | }); 35 | -------------------------------------------------------------------------------- /test/redis/fetch_version_num_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../chai-local'); 5 | const { _fetchVersionNum } = require('../../src/redis/fetch_version_num'); 6 | 7 | describe('fetchVersionNum() - Redis sub-function', () => { 8 | const logger = { error: sinon.stub().returnsArg(0) }; 9 | const client = sinon.stub(); 10 | 11 | describe('when the server provides a version number', () => { 12 | const infoReturn = 'foo:bar\r\nredis_version:6.0.9\r\n'; 13 | const promiseInfo = () => sinon.stub().resolves(infoReturn); 14 | const promiseCmd = () => promiseInfo; 15 | const fetchVersionNum = _fetchVersionNum({ promiseCmd }); 16 | 17 | it('returns a resolved promise, containing a parseFloat of the version', () => { 18 | const promise = fetchVersionNum({ logger, client }); 19 | expect(promise).to.eventually.eql(6.0); 20 | }); 21 | }); 22 | 23 | describe('when the server fails to provide a version number', () => { 24 | const infoReturn = 'foo:bar\r\n'; 25 | const promiseInfo = () => sinon.stub().resolves(infoReturn); 26 | const promiseCmd = () => promiseInfo; 27 | const fetchVersionNum = _fetchVersionNum({ promiseCmd }); 28 | 29 | it('returns a resolved promise, containing false', () => { 30 | const promise = fetchVersionNum({ logger, client }); 31 | expect(promise).to.eventually.eql(false); 32 | }); 33 | }); 34 | 35 | describe('when fetching Redis info fails', () => { 36 | const infoError = new Error('infoError'); 37 | const promiseInfo = () => sinon.stub().rejects(infoError); 38 | const promiseCmd = () => promiseInfo; 39 | const fetchVersionNum = _fetchVersionNum({ promiseCmd }); 40 | 41 | it('returns a rejected promise', () => { 42 | const promise = fetchVersionNum({ logger, client }); 43 | expect(promise).to.be.rejectedWith(Error, 'infoError'); 44 | }); 45 | }); 46 | }); 47 | -------------------------------------------------------------------------------- /test/redis/index_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | const { EventEmitter } = require('events'); 4 | 5 | const { expect } = require('../chai-local'); 6 | const { _redis } = require('../../src/redis'); 7 | 8 | describe('Redis() - Constuctor for client instances and event listeners', () => { 9 | const config = { host: 1, port: 2 }; 10 | 11 | describe('when supplied a non-cluster connection config', () => { 12 | const clients = { standard: sinon.stub().returnsArg(0) }; 13 | const Redis = _redis({ clients }); 14 | 15 | it('initializes the correct client instance', () => { 16 | new Redis(config); 17 | expect(clients.standard).to.have.callCount(1); 18 | }); 19 | }); 20 | 21 | describe('when attaching events to the client instance', () => { 22 | const logger = { error: sinon.stub().returnsArg(0) }; 23 | const isValidVersion = () => sinon.stub().resolves(true); 24 | const clients = { standard: sinon.stub().returns(new EventEmitter()) }; 25 | const Redis = _redis({ clients, isValidVersion }); 26 | 27 | it('passes emitted events to the appropriate logger severity', () => { 28 | const instance = new Redis(config); 29 | instance.attachEvents(logger); 30 | instance.client.emit('error', ''); 31 | expect(logger.error).to.have.callCount(1); 32 | }); 33 | }); 34 | }); 35 | -------------------------------------------------------------------------------- /test/redis/is_valid_version_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../chai-local'); 5 | const { _isValidVersion } = require('../../src/redis/is_valid_version'); 6 | 7 | describe('isValidVersion() - Redis sub-function', () => { 8 | const logger = { error: sinon.stub().returnsArg(0) }; 9 | const client = sinon.stub(); 10 | 11 | describe('when the fetched version is >= number', () => { 12 | const num = 6.0; 13 | const fetchVersionNum = sinon.stub().resolves(num); 14 | const isValidVersion = _isValidVersion({ fetchVersionNum }); 15 | const env = { client, logger }; 16 | 17 | it('returns a resolved promise, containing true', () => { 18 | const promise = isValidVersion(env)(num); 19 | expect(promise).to.eventually.eql(true); 20 | }); 21 | }); 22 | 23 | describe('when the fetched version is < number', () => { 24 | const num = 6.0; 25 | const fetchVersionNum = sinon.stub().resolves(num - 0.1); 26 | const isValidVersion = _isValidVersion({ fetchVersionNum }); 27 | const env = { client, logger }; 28 | 29 | it('returns a rejected promise, with "Redis verison invalid"', () => { 30 | const promise = isValidVersion(env)(num); 31 | expect(promise).to.be.rejectedWith(Error, 'Redis version invalid'); 32 | }); 33 | }); 34 | }); 35 | -------------------------------------------------------------------------------- /test/shares/fetch_times_shares_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../chai-local'); 5 | const { _fetchTimesShares } = require('../../src/shares/fetch_times_shares'); 6 | 7 | describe('fetchTimesShares() - PoolShares.handleShare pipeline function', () => { 8 | const logger = { error: sinon.stub().returnsArg(0) }; 9 | const coin = 'coin'; 10 | 11 | describe('when the client retrieves data successfully', () => { 12 | const timesCurrent = { address: 111 }; 13 | const execReturn = [undefined, undefined, undefined, timesCurrent]; 14 | const promiseExec = () => sinon.stub().resolves(execReturn); 15 | const fetchTimesShares = _fetchTimesShares({ promiseExec }); 16 | 17 | it('returns the retrieved sharesData object', async () => { 18 | const sharesData = await fetchTimesShares({ logger, coin }); 19 | expect(Object.keys(sharesData)).to.eql( 20 | ['startTimes', 'shareTimes', 'currentShares', 'currentTimes'] 21 | ); 22 | expect(sharesData.currentTimes).to.deep.equal(timesCurrent); 23 | }); 24 | }); 25 | }); 26 | -------------------------------------------------------------------------------- /test/shares/handle_share_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it, beforeEach, afterEach } = require('mocha'); 2 | 3 | const { expect } = require('../chai-local'); 4 | const { createClient, logger } = require('../helpers'); 5 | 6 | const { promiseCmd } = require('../../src/utils/promised_redis'); 7 | const { handleShare } = require('../../src/shares/handle_share'); 8 | 9 | describe('handleShare() - handles the processing of shares sent by stratum', () => { 10 | const coin = 'coin'; 11 | const poolConfig = { ports: {} }; 12 | const isCluster = false; 13 | const client = createClient(); 14 | const env = { 15 | client, coin, poolConfig, logger, isCluster 16 | }; 17 | 18 | beforeEach(() => { client.flushall(); }); 19 | afterEach(() => { client.quit(); }); 20 | 21 | describe('when processing a valid share, for a valid block', () => { 22 | const key = ['coin:statistics:basic']; 23 | const isValidShare = true; 24 | const isValidBlock = true; 25 | const shareData = { 26 | worker: 'AAAAAA', difficulty: 0.5, height: 1, port: 0 27 | }; 28 | const args = { isValidBlock, isValidShare, shareData }; 29 | 30 | it('persists the correct share and block-related values', async () => { 31 | const result = await handleShare(env)(args); 32 | expect(result).to.eql(true); 33 | const hgetAll = promiseCmd('hgetall')({ client, logger }); 34 | const basicStats = await hgetAll({ args: key }); 35 | expect(basicStats.validShares).to.eql('1'); 36 | expect(basicStats.validBlocks).to.eql('1'); 37 | }); 38 | }); 39 | }); 40 | -------------------------------------------------------------------------------- /test/startup/index_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../chai-local'); 5 | 6 | const { _poolStartup } = require('../../src/startup'); 7 | 8 | describe('poolStartup -- initializes pool services/sub-processes', () => { 9 | const startCnC = sinon.spy(); 10 | 11 | describe('when called from the primary process', () => { 12 | const spawnAPI = sinon.spy(); 13 | const spawnPayments = sinon.spy(); 14 | const spawnWorkers = sinon.spy(); 15 | const deps = { 16 | startCnC, spawnAPI, spawnPayments, spawnWorkers 17 | }; 18 | const poolStartup = _poolStartup(deps); 19 | const cluster = { isWorker: false }; 20 | 21 | it('starts all the services, with the correct args', () => { 22 | poolStartup({ cluster }); 23 | expect(startCnC).to.have.been.calledOnceWith({ cluster }); 24 | expect(spawnAPI).to.have.been.calledOnceWith({ cluster }); 25 | expect(spawnPayments).to.have.been.calledOnceWith({ cluster }); 26 | expect(spawnWorkers).to.have.been.calledOnceWith({ cluster }); 27 | }); 28 | }); 29 | 30 | describe('when called from the "api" spawned sub-process', () => { 31 | const spawnAPI = sinon.spy(); 32 | const spawnPayments = sinon.spy(); 33 | const spawnWorkers = sinon.spy(); 34 | const deps = { 35 | startCnC, spawnAPI, spawnPayments, spawnWorkers 36 | }; 37 | const poolStartup = _poolStartup(deps); 38 | const env = { workerType: 'api' }; 39 | const cluster = { isWorker: true, worker: { process: { env } } }; 40 | 41 | it('hands off to the spawnAPI service', () => { 42 | poolStartup({ cluster }); 43 | expect(spawnAPI).to.have.been.calledOnceWith({ cluster }); 44 | expect(spawnPayments).not.to.have.been.called; 45 | }); 46 | }); 47 | }); 48 | -------------------------------------------------------------------------------- /test/startup/spawn_api_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../chai-local'); 5 | 6 | const { _spawnAPI } = require('../../src/startup/spawn_api'); 7 | 8 | describe('spawnAPI -- forks a worker process for API requests', () => { 9 | const baseLogger = { cached: () => {} }; 10 | const portalConfig = {}; 11 | const poolConfig = {}; 12 | 13 | describe('when called from the primary process', () => { 14 | const cluster = { isWorker: false }; 15 | const spawnSpy = sinon.stub().returns({}); 16 | const spawnProcess = () => spawnSpy; 17 | const spawnAPI = _spawnAPI({ spawnProcess }); 18 | const env = { 19 | cluster, baseLogger, poolConfig, portalConfig 20 | }; 21 | 22 | it('spawns a sub-process with the correct args', () => { 23 | spawnAPI(env); 24 | expect(spawnSpy.args[0][0]).to.include({ type: 'api' }); 25 | }); 26 | }); 27 | 28 | describe('when called from the "api" spawned sub-process', () => { 29 | const cluster = { isWorker: true }; 30 | const server = { listen: sinon.stub() }; 31 | const PoolApi = function () { return server; }; 32 | const spawnAPI = _spawnAPI({ PoolApi }); 33 | const env = { 34 | cluster, baseLogger, poolConfig, portalConfig 35 | }; 36 | 37 | it('constructs a PoolApi instance and starts the listener', () => { 38 | const result = spawnAPI(env); 39 | expect(result).to.eql(false); 40 | expect(server.listen).to.have.callCount(1); 41 | }); 42 | }); 43 | }); 44 | -------------------------------------------------------------------------------- /test/startup/spawn_process_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../chai-local'); 5 | 6 | const { spawnProcess } = require('../../src/startup/spawn_process'); 7 | 8 | describe('spawnProcess -- forks a sub-process for service listeners', () => { 9 | const logger = { error: sinon.stub() }; 10 | 11 | describe('when no additional events are supplied', () => { 12 | const worker = { on: sinon.stub() }; 13 | // We stub the cluster dependency so we don't fork multiple CI runs. 14 | const cluster = { fork: sinon.stub().returns(worker) }; 15 | const env = { cluster, logger }; 16 | 17 | it('assigns the type and adds the "exit" event listener to the worker', () => { 18 | const result = spawnProcess(env)({ type: 'test' }); 19 | expect(result.type).to.eql('test'); 20 | expect(worker.on).to.have.been.calledOnceWith('exit'); 21 | }); 22 | }); 23 | 24 | describe('when 1 additional event is supplied', () => { 25 | const events = { foo: () => {} }; 26 | const worker = { on: sinon.stub() }; 27 | const cluster = { fork: sinon.stub().returns(worker) }; 28 | const env = { cluster, logger }; 29 | 30 | it('adds the 2nd event listeners to the worker', () => { 31 | spawnProcess(env)({ type: 'test', events }); 32 | expect(worker.on).to.have.callCount(2); 33 | expect(worker.on).to.have.been.calledWith('foo'); 34 | }); 35 | }); 36 | }); 37 | -------------------------------------------------------------------------------- /test/startup/start_cnc_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | const { EventEmitter } = require('events'); 4 | 5 | const { expect } = require('../chai-local'); 6 | 7 | const { _startCnC } = require('../../src/startup/start_cnc'); 8 | 9 | const initEmitters = (cluster) => { 10 | const cnc = new EventEmitter(); 11 | cnc.listen = () => {}; 12 | const PoolCnC = function () { return cnc; }; 13 | const startCnC = _startCnC({ cluster, PoolCnC }); 14 | return [cnc, startCnC]; 15 | }; 16 | 17 | describe('startCnC -- launch CnC server and processes emitted events', () => { 18 | const portalConfig = { cliPort: 0 }; 19 | 20 | describe('when receiving a "command" event', () => { 21 | const baseLogger = { cached: () => {} }; 22 | 23 | describe('for a "reloadpool" command', () => { 24 | const reply = sinon.spy(); 25 | const args = ['reloadpool', ['test'], null, reply]; 26 | const worker = { send: sinon.spy() }; 27 | const workers = { 1: worker }; 28 | const [cnc, startCnC] = initEmitters({ workers }); 29 | 30 | it('replies back with the coin name, after signaling all workers', () => { 31 | startCnC({ portalConfig, baseLogger }); 32 | cnc.emit('command', ...args); 33 | expect(worker.send).to.have.been.calledOnce; 34 | expect(reply).to.have.been.calledOnceWith('reloaded pool test'); 35 | }); 36 | }); 37 | 38 | describe('for an unknown command', () => { 39 | const reply = sinon.spy(); 40 | const args = ['test', null, null, reply]; 41 | const [cnc, startCnC] = initEmitters(); 42 | 43 | it('replies with "unknown command"', () => { 44 | startCnC({ portalConfig, baseLogger }); 45 | cnc.emit('command', ...args); 46 | expect(reply).to.have.been.calledOnceWith('unknown command: test'); 47 | }); 48 | }); 49 | }); 50 | 51 | describe('when receiving a debug-severity "log" event', () => { 52 | const [cnc, startCnC] = initEmitters(); 53 | const logger = { debug: sinon.spy() }; 54 | const baseLogger = { cached: () => logger }; 55 | const msg = 'Debug severity log'; 56 | 57 | it('replies with "unknown command"', () => { 58 | startCnC({ portalConfig, baseLogger }); 59 | cnc.emit('log', 'debug', msg); 60 | expect(logger.debug).to.have.been.calledOnceWith(msg); 61 | }); 62 | }); 63 | }); 64 | -------------------------------------------------------------------------------- /test/stats/coin_balances_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it, beforeEach, after } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../chai-local'); 5 | const { createClient } = require('../helpers'); 6 | 7 | const { coinBalances } = require('../../src/stats/coin_balances'); 8 | 9 | describe('coinBalances() -- calculate recorded balances for an address', () => { 10 | const client = createClient(); 11 | const coin = 'carrot'; 12 | const logger = { error: sinon.stub().returnsArg(0) }; 13 | const address = 'AABBCCDD'; 14 | 15 | after(() => { client.quit(); }); 16 | 17 | describe('with multiple entries for "balances" key', () => { 18 | beforeEach(() => { 19 | client.flushall(() => {}); 20 | client.hincrbyfloat(`${coin}:payments:balances`, address, 0.111); 21 | client.hincrbyfloat(`${coin}:payments:immature`, address, 0); 22 | client.hincrbyfloat(`${coin}:payments:payouts`, address, 0); 23 | client.hincrbyfloat(`${coin}:payments:unpaid`, address, 0); 24 | client.hincrbyfloat(`${coin}:payments:balances`, `${address}.foo`, 0.222); 25 | }); 26 | 27 | it('retuns all balances for the address', async () => { 28 | const result = await coinBalances({ client, logger, coin })(address); 29 | expect(result.totalBalance).to.eql(0.333); 30 | }); 31 | }); 32 | }); 33 | -------------------------------------------------------------------------------- /test/stats/coin_stats/compute_hashrates_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | 6 | const { _computeHashrates } = require('../../../src/stats/coin_stats/compute_hashrates'); 7 | 8 | describe('computeHashrates() - processStats sub-function', () => { 9 | const shareMultiplier = sinon.stub().returns(0.1); 10 | const statsConfig = { hashrateWindow: 1 }; 11 | const computeHashrates = _computeHashrates({ shareMultiplier }); 12 | 13 | describe('for a matching shared-mining worker entry', () => { 14 | const addr = 'AAAAAA'; 15 | const worker = { validShares: 10, roundShares: 0, soloMining: false }; 16 | const stats = { 17 | algorithm: 'scrypt', 18 | hashrate: { hashrate: 0, hashrateShared: 0, hashrateSolo: 0 }, 19 | shares: { roundShares: { [addr]: 5 } }, 20 | workers: { workers: { [addr]: worker }, workersShared: {}, workersSolo: {} }, 21 | }; 22 | 23 | it('updates the worker-related stats', () => { 24 | computeHashrates({ statsConfig, stats }); 25 | 26 | expect(worker.roundShares).to.eql(5); 27 | expect(stats.hashrate.hashrateShared).to.eql(1); 28 | expect(Object.keys(stats.workers.workersShared)).to.eql([addr]); 29 | expect(Object.keys(stats.workers.workersSolo)).to.eql([]); 30 | }); 31 | }); 32 | }); 33 | -------------------------------------------------------------------------------- /test/stats/coin_stats/fetch_raw_stats_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it, after, beforeEach } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | const { createClient } = require('../../helpers'); 6 | 7 | const { fetchRawStats } = require('../../../src/stats/coin_stats/fetch_raw_stats'); 8 | 9 | describe('fetchRawStats()', () => { 10 | const coin = 'coin'; 11 | const client = createClient(); 12 | const statsConfig = { hashrateWindow: 60000 }; 13 | const env = { coin, client, statsConfig }; 14 | 15 | after(() => { client.quit(); }); 16 | 17 | describe('with 2 entries in the "blocks:pending" table', () => { 18 | const logger = { error: sinon.stub().returnsArg(0) }; 19 | 20 | beforeEach(() => { 21 | client.flushall(() => {}); 22 | client.sadd(`${coin}:blocks:pending`, [111, 222], () => {}); 23 | }); 24 | 25 | it('retuns a results object with no data', async () => { 26 | const result = await fetchRawStats({ ...env, logger }); 27 | 28 | expect(result.pendingCount).to.eql(2); 29 | expect(result.pendingBlocks[0]).to.eql('111'); 30 | }); 31 | }); 32 | }); 33 | -------------------------------------------------------------------------------- /test/stats/coin_stats/initialize_workers_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | 3 | const { expect } = require('../../chai-local'); 4 | 5 | const { initializeWorkers } = require('../../../src/stats/coin_stats/initialize_workers'); 6 | 7 | describe('initializeWorkers() - initialize stats workers from JSON hashrates', () => { 8 | const poolConfig = { coin: { hashrateType: 'sols' } }; 9 | const worker = 'AAAAAA'; 10 | 11 | describe('for a hashrate entry, with > 0 difficulty', () => { 12 | const hashrates = [JSON.stringify( 13 | { worker, difficulty: 10.2, soloMined: false } 14 | )]; 15 | const stats = { 16 | hashrate: { hashrates }, 17 | shares: { shares: 0 }, 18 | workers: { workers: {} }, 19 | }; 20 | 21 | it('populates shares.shares and workers.workers', () => { 22 | initializeWorkers({ poolConfig, stats }); 23 | 24 | expect(stats.shares.shares).to.eql(10.2); 25 | expect(stats.workers.workers[worker]).to.include({ validShares: 10.2 }); 26 | }); 27 | }); 28 | 29 | describe('for 2 hashrates, for the same worker', () => { 30 | const hashrates = [ 31 | { worker, difficulty: 10.2, soloMined: false }, 32 | { worker, difficulty: 4.8, soloMined: false } 33 | ].map(JSON.stringify); 34 | const stats = { 35 | hashrate: { hashrates }, 36 | shares: { shares: 0 }, 37 | workers: { workers: {} }, 38 | }; 39 | 40 | it('combines diffs for shares and selects last diff for worker', () => { 41 | initializeWorkers({ poolConfig, stats }); 42 | 43 | expect(stats.shares.shares).to.eql(15); 44 | expect(stats.workers.workers[worker]).to.include({ difficulty: 5 }); 45 | }); 46 | }); 47 | }); 48 | -------------------------------------------------------------------------------- /test/stats/coin_stats/parse_stats_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | const sinon = require('sinon'); 3 | 4 | const { expect } = require('../../chai-local'); 5 | 6 | const { _parseStats } = require('../../../src/stats/coin_stats/parse_stats'); 7 | 8 | describe('parseStats() - converts the raw stats object into a "full" stats object ', () => { 9 | const sortBlocks = sinon.stub().returns(0); 10 | const poolConfig = { 11 | coin: { name: 'coin', symbol: 'coin', algorithm: 'scrypt' }, 12 | fees: 0, 13 | enabled: true, 14 | featured: true, 15 | paymentProcessing: { paymentInterval: 0, minimumPayment: 0 }, 16 | ports: {}, 17 | }; 18 | const parseStats = _parseStats({ sortBlocks })({ poolConfig }); 19 | 20 | describe('with a "normal" rawStats', () => { 21 | const rawStats = { 22 | hashrates: [], 23 | roundCurrent: {}, 24 | timesCurrent: {}, 25 | pendingCount: 2, 26 | pendingBlocks: ['111', '222'], 27 | confirmedCount: 0, 28 | confirmedBlocks: [], 29 | pendingConfirms: {}, 30 | orphanedCount: 0, 31 | basicStats: { validBlocks: 1 }, 32 | history: { history: JSON.stringify([{ foo: 'bar' }]) }, 33 | payments: [JSON.stringify({ foo: 'bar' })] 34 | }; 35 | 36 | it('retuns the correct "full" stats object', () => { 37 | const result = parseStats(rawStats); 38 | 39 | expect(result.statistics.validBlocks).to.eql(1); 40 | expect(result.history[0].foo).to.eql('bar'); 41 | }); 42 | }); 43 | }); 44 | -------------------------------------------------------------------------------- /test/stats/total_shares_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it, beforeEach, after } = require('mocha'); 2 | 3 | const { expect } = require('../chai-local'); 4 | const { createClient, logger } = require('../helpers'); 5 | 6 | const { totalShares } = require('../../src/stats/total_shares'); 7 | 8 | describe('totalShares() -- calculate total shares for an address', () => { 9 | const client = createClient(); 10 | const coin = 'carrot'; 11 | const address = 'AABBCCDD'; 12 | const redisKey = `${coin}:shares:roundCurrent`; 13 | 14 | after(() => { client.quit(); }); 15 | 16 | describe('with multiple entries under "roundCurrent" key', () => { 17 | beforeEach(() => { 18 | client.flushall(() => {}); 19 | client.hincrbyfloat(redisKey, address, 0.111); 20 | client.hincrbyfloat(redisKey, `${address}.foo`, 0.222); 21 | }); 22 | 23 | it('retuns share total associated with address', async () => { 24 | const total = await totalShares({ client, logger, coin })(address); 25 | expect(total).to.eql(0.333); 26 | }); 27 | }); 28 | }); 29 | -------------------------------------------------------------------------------- /test/utils/key_dep.js: -------------------------------------------------------------------------------- 1 | const keyDep = () => 'keyDep'; 2 | 3 | module.exports = { keyDep }; 4 | -------------------------------------------------------------------------------- /test/utils/require_deps_test.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('mocha'); 2 | 3 | const { expect } = require('../chai-local'); 4 | const { requireDeps } = require('../../src/utils/require_deps'); 5 | 6 | describe('requireDeps() - Utility for specifying overridable dependencies', () => { 7 | describe('when the dependency is a key in an object', () => { 8 | const deps = [['keyDep', `${__dirname}/key_dep`, true]]; 9 | 10 | it('the deps entry directly references the inner function', () => { 11 | const result = requireDeps(deps); 12 | expect(Object.keys(result)).to.eql(['keyDep']); 13 | expect(result.keyDep()).to.eql('keyDep'); 14 | }); 15 | }); 16 | 17 | describe('when the dependency is a module object', () => { 18 | const deps = [['keyDep', `${__dirname}/key_dep`, false]]; 19 | 20 | it('deps entry is an object that references the dep function', () => { 21 | const result = requireDeps(deps); 22 | expect(Object.keys(result.keyDep)).to.eql(['keyDep']); 23 | }); 24 | }); 25 | 26 | describe('when the dependency reference is invalid', () => { 27 | describe('and the "throw" flag is set to false', () => { 28 | const deps = [['keyDep', `${__dirname}/invalid`, true, false]]; 29 | 30 | it('sets the named dep entry to undefined', () => { 31 | const result = requireDeps(deps); 32 | expect(Object.keys(result)).to.eql(['keyDep']); 33 | expect(result.keyDep).to.eql(undefined); 34 | }); 35 | }); 36 | }); 37 | }); 38 | --------------------------------------------------------------------------------