├── .babelrc
├── .editorconfig
├── .gitignore
├── .vscode
└── launch.json
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── api
├── 0df01ae7dd51cec4
│ └── index.js
├── 0eoUqXNE715mBVqV
│ └── index.js
├── 2wYLWQfWBa6GLPYs
│ └── index.js
├── 3K2xvNRLMpiEqLo8
│ └── index.js
├── 3XqkzNFzaTk1JPRf
│ └── index.js
├── 4wR3kjTwwC67R94Z
│ └── index.js
├── 5sgpSNaqnHIcfocl
│ └── index.js
├── 72dlrjquBORj0rhx
│ └── index.js
├── AgsSNTSA0RHmWUkp
│ └── index.js
├── BPZZ0l2nTfMSmmpl
│ └── index.js
├── Bz73Dm7u3dKlXDS8
│ └── index.js
├── C9rEOewwhQDijCnN
│ └── index.js
├── CZOTAF5LfusB1Ht5
│ └── index.js
├── DjwvDZ4bGUzKxOHW
│ └── index.js
├── DuOnf1Wqi29oJUaA
│ └── index.js
├── F7APEv5JfCY1siyz
│ └── index.js
├── FAow0eot8ZejZUTJ
│ └── index.js
├── FC8ylC617zzn1Gaa
│ └── index.js
├── FDLyQ5uo5t7jltiQ
│ └── index.js
├── Fv9lheUpVYq5caRe
│ └── index.js
├── GMDazQSouYWzLTCv
│ └── index.js
├── GktuwZlVP39gty6v
│ └── index.js
├── Gox4NzTLDnpEr10v
│ └── index.js
├── I3EoELuQCmqwvp8C
│ └── index.js
├── IBHgAmydZbmTUAe8
│ └── index.js
├── K9wxNE4Ved38xhJm
│ └── index.js
├── KN2ZttYDEKzCulEZ
│ └── index.js
├── LLpSTquu4tZL8Nu5
│ └── index.js
├── LOLb7q23p8rYSLwv
│ └── index.js
├── Lb2fuhVMDQm1DrLL
│ └── index.js
├── Mh9QifTIESw5t1fa
│ └── index.js
├── Mu7VpxzfYyQimf3V
│ └── index.js
├── NPCpnfUyPHRH4j29
│ └── index.js
├── NwYcrWsmpE1p8ylI
│ └── index.js
├── OBexeX0f0MsnL1S3
│ └── index.js
├── OMAlxmPq4rZs71K8
│ └── index.js
├── P2LMh8NsUTkpWAH3
│ └── index.js
├── PKUv2JfV87KpEZwE
│ └── index.js
├── PMW9LXqUv7vXLpbA
│ └── index.js
├── PZ7x3HVHVstLNYf0
│ └── index.js
├── PwwdU9lZbgMqS2DA
│ └── index.js
├── QISzUVake29059bi
│ └── index.js
├── QgbDSAuy1SYgocs6
│ └── index.js
├── RIlwZyfnizp2i8wh
│ └── index.js
├── RLOk0Wji0lQVjynT
│ └── index.js
├── RVBqKlGdk9aEhi5J
│ └── index.js
├── SLzLU50givH77Rns
│ └── index.js
├── VnJIBrrM0KY3uQ9X
│ └── index.js
├── WGem8x5aycBqFXWQ
│ └── index.js
├── XPzc85T3reYmGro1
│ └── index.js
├── XQmpDjEVF691r2gX
│ └── index.js
├── XVygz9vHv8cHTFf4
│ └── index.js
├── XgxHmjINTEqANwtS
│ └── index.js
├── Xim7SaikcsHICvfQ
│ └── index.js
├── ZFAYRM8LRtmfYp4Y
│ └── index.js
├── ZXJ3DqyLslyQETkX
│ └── index.js
├── aSiBLKkEsHI9lDr3
│ └── index.js
├── endpoint.js
├── epOW5kdvb12QdgsV
│ └── index.js
├── hMrav9QMiMyLQosB
│ └── index.js
├── hXwPQrVhLEALFsIJ
│ └── index.js
├── hbBmFhIpJS87W1Fy
│ └── index.js
├── kC5N9Tz06b2rA4Pg
│ └── index.js
├── kIoe20LWh2aw3CAu
│ └── index.js
├── l4kaEhMnhjB5yseq
│ └── index.js
├── pIQZEKY4T9vttBUk
│ └── index.js
├── qRHogKQ1Bb7OT4N9
│ └── index.js
├── quIoaHsl8h6IwyEI
│ └── index.js
├── rKclyiLtHx0dx55M
│ └── index.js
├── server.js
├── vM27tlHkqbRej6tP
│ └── index.js
├── w9UTTA7NXnEDUXhe
│ └── index.js
├── wDyC195wgjPjM2Ut
│ └── index.js
├── wWo8DCcoXVlpczoP
│ └── index.js
├── xGaf7vbfY15TGsSd
│ └── index.js
├── yefPsK2TvkZmC6M4
│ └── index.js
├── ywTmt3C0nwk5k4c7
│ └── index.js
├── yyCtgjuFu9mx0edg
│ └── index.js
└── zLsiAkocn90e3K6R
│ └── index.js
├── bootstrap.json
├── core
├── bootstrap.js
├── cache.js
├── config
│ ├── api.json
│ ├── config-loader.js
│ ├── config.js
│ ├── environment.js.example
│ └── job.json
├── console.js
├── crypto
│ ├── chash.js
│ ├── object-hash.js
│ └── signature.js
├── event-bus.js
├── genesis
│ └── genesis-config.js
├── log-manager.js
├── logger.js
├── mutex.js
├── ntp.js
├── services
│ └── services.js
├── statistics.js
├── storage
│ ├── chunk-utils.js
│ ├── file-exchange.js
│ ├── file-manager.js
│ ├── file-sync.js
│ ├── receiver.js
│ ├── sender.js
│ └── storage-acl.js
├── task.js
├── utils
│ ├── object-utils.js
│ ├── transaction-data-utils.js
│ └── utils.js
└── wallet
│ ├── wallet-sync.js
│ ├── wallet-transaction-consensus.js
│ ├── wallet-utils.js
│ └── wallet.js
├── database
├── database.js
├── pool
│ ├── pool.js
│ └── worker.mjs
├── queue-sqlite.js
├── repositories
│ ├── address.js
│ ├── api.js
│ ├── config.js
│ ├── job.js
│ ├── keychain.js
│ ├── node.js
│ ├── normalization.js
│ ├── repositories.js
│ ├── schema.js
│ ├── shard.js
│ ├── transaction.js
│ └── wallet.js
├── shard.js
└── sqlite3
│ └── sqlite3-binding.js
├── docker
├── Dockerfile
├── build.sh
├── compose_down.sh
├── compose_up.sh
├── docker-compose.yml
└── run_node.sh
├── index.js
├── job
└── job-engine.js
├── net
├── network.js
├── peer-rotation.js
└── peer.js
├── package.json
├── run-millix-node.sh
├── scripts
├── initialize-millix-job-engine-sqlite3.sql
├── initialize-millix-shard-sqlite3.sql
├── initialize-millix-sqlite3.sql
└── migration
│ ├── migration.js
│ ├── schema-update-1.sql
│ ├── schema-update-10.sql
│ ├── schema-update-11.sql
│ ├── schema-update-12.sql
│ ├── schema-update-13.sql
│ ├── schema-update-14.sql
│ ├── schema-update-15.sql
│ ├── schema-update-16.sql
│ ├── schema-update-17.sql
│ ├── schema-update-18.sql
│ ├── schema-update-19.sql
│ ├── schema-update-2.sql
│ ├── schema-update-20.sql
│ ├── schema-update-21.sql
│ ├── schema-update-22.sql
│ ├── schema-update-23.sql
│ ├── schema-update-3.sql
│ ├── schema-update-4.js
│ ├── schema-update-4.sql
│ ├── schema-update-5.sql
│ ├── schema-update-6.sql
│ ├── schema-update-7.js
│ ├── schema-update-7.sql
│ ├── schema-update-8.sql
│ ├── schema-update-9.sql
│ └── shard
│ ├── schema-update-10.sql
│ ├── schema-update-11.sql
│ ├── schema-update-12.sql
│ ├── schema-update-13.sql
│ ├── schema-update-14.sql
│ ├── schema-update-15.sql
│ ├── schema-update-16.sql
│ ├── schema-update-17.sql
│ ├── schema-update-18.sql
│ ├── schema-update-19.sql
│ ├── schema-update-20.sql
│ ├── schema-update-21.sql
│ ├── schema-update-22.sql
│ ├── schema-update-23.sql
│ ├── schema-update-4.sql
│ ├── schema-update-5.sql
│ ├── schema-update-6.sql
│ ├── schema-update-7.sql
│ ├── schema-update-8.sql
│ └── schema-update-9.sql
└── webpack.prod.config.js
/.babelrc:
--------------------------------------------------------------------------------
1 | {
2 | "presets": [
3 | "@babel/preset-env"
4 | ],
5 | "env": {
6 | "development": {
7 | "plugins": [
8 | "@babel/plugin-proposal-class-properties",
9 | "babel-plugin-transform-import-meta"
10 | ]
11 | }
12 | },
13 | "sourceMaps": "inline",
14 | "retainLines": true
15 | }
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | package-lock.json
3 | data/keys.json
4 | /core/config/environment.js
5 |
--------------------------------------------------------------------------------
/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | {
2 | // Use IntelliSense to learn about possible attributes.
3 | // Hover to view descriptions of existing attributes.
4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5 | "version": "0.2.0",
6 | "configurations": [
7 | {
8 | "type": "node",
9 | "request": "launch",
10 | "name": "Debug",
11 | "program": "${workspaceFolder}/index.js",
12 | "args": ["--port", "8800", "--api-port", "5000", "--debug", "--folder", "./millix-testnet/"],
13 | "runtimeExecutable": "${workspaceFolder}/node_modules/.bin/babel-node",
14 | "runtimeArgs": ["--nolazy"]
15 | },
16 | {
17 | "type": "node",
18 | "request": "launch",
19 | "name": "Debug 2",
20 | "program": "${workspaceFolder}/index.js",
21 | "args": ["--port", "8801", "--api-port", "5001", "--debug", "--folder", "./millix-testnetx/"],
22 | "runtimeExecutable": "${workspaceFolder}/node_modules/.bin/babel-node",
23 | "runtimeArgs": ["--nolazy"]
24 | }
25 | ]
26 | }
27 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 millix
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | millix node v1.24.0
6 |
7 |
8 |
9 | ## Main Features
10 |
11 | - DAG-backed cryptocurrency
12 | - Multiple wallet creation and management in-app
13 | - Easy to send and receive transactions
14 | - [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) Hierarchical deterministic (HD) address generation and wallet backups
15 | - Device-based security: all private keys are stored locally, not in the cloud
16 | - Support for testnet
17 | - Mnemonic ([BIP39](https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki)) support for wallet backups
18 | - Support [macOS](?#), [Linux](?#), [Windows](?#) devices
19 |
20 | ## About millix
21 |
22 | ### Principles
23 |
24 | - Currencies should not be created with debt.
25 | - Currencies should operate at infinite scale.
26 | - Currencies should work the same throughout the entire spectrum of transaction values.
27 | - Currencies should be exchanged with no fee.
28 | - Currencies should be functional without carrying the weight of every previous transaction.
29 | - Modern currencies should be at least as simple to use as primitive currencies.
30 | - Implementing a digital currency into a process should be nearly the same effort as implementing paper cash into a process, where any additional difficulty implementing a digital currency is indisputably offset by benefits.
31 | - Simplicity at the edge is the only possible with equal simplicity in the foundation.
32 | - Currencies are a product chosen by customers and supported by professionals. Customers and professionals require services and support.
33 | - The cost of securing value can't exceed the value it secures.
34 | - Decreasing a currency's value with inflation should not dilute the value of savers.
35 | - Increasing a currency's market value should be proportionate to increases in its' fundamental value.
36 | - Participants that increase fundamental value should be algorithmically incentivized.
37 |
38 |
39 | ## Installation
40 |
41 |
42 | ## Install nodejs 16
43 | ```
44 | sudo apt update
45 | sudo apt -y install curl dirmngr apt-transport-https lsb-release ca-certificates build-essential
46 | curl -sL https://deb.nodesource.com/setup_16.x | sudo -E bash -
47 | sudo apt-get -y install nodejs
48 | node --version (check version: should be 16.x.x)
49 | ```
50 |
51 | ## Clone millix-node code
52 | ```
53 | git clone https://github.com/millix/millix-node.git
54 | ````
55 |
56 | ## Run millix-node
57 | ```
58 | npm install
59 | sudo chmod +x run-millix-node.sh
60 | sh run-millix-node.sh
61 | ```
62 |
63 | ## How to Contribute
64 |
65 | Anyone and everyone is welcome to contribute. Please take a moment to review the [guidelines for contributing](CONTRIBUTING.md).
66 |
67 | - [bug reports](CONTRIBUTING.md#bugs)
68 | - [feature requests](CONTRIBUTING.md#features)
69 | - [pull requests](CONTRIBUTING.md#pull-requests)
70 |
71 | ### getting started
72 |
73 | 1. Clone repo and create a new branch: `$ git checkout git@github.com:millix/millix-node.git -b <>`.
74 | 2. Make changes and test
75 | 3. Submit Pull Request with comprehensive description of changes
76 |
77 | ## run in testnet
78 | update core/config/environment.js and set test mode to true
79 | ```
80 | environment['MODE_TEST_NETWORK'] = false;
81 | ```
82 |
83 | ## Release Schedules
84 |
85 | Copay uses the `MAJOR.MINOR.BATCH` convention for versioning. Any release that adds features should modify the MINOR or MAJOR number.
86 |
87 | ### Bug Fixing Releases
88 |
89 | We release bug fixes as soon as possible for all platforms. There is no coordination so all platforms are updated at the same time.
90 |
91 | ## Support
92 |
93 | Please see [Support requests](CONTRIBUTING.md#support)
94 |
95 | ## License
96 |
97 | Copay is released under the MIT License. Please refer to the [LICENSE](LICENSE) file that accompanies this project for more information including complete terms and conditions.
98 |
--------------------------------------------------------------------------------
/api/0df01ae7dd51cec4/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import mutex from '../../core/mutex';
3 |
4 |
5 | /**
6 | * api get_backlog_list
7 | */
8 | class _0df01ae7dd51cec4 extends Endpoint {
9 | constructor() {
10 | super('0df01ae7dd51cec4');
11 | }
12 |
13 | /**
14 | * returns a backlog list.
15 | * @param app
16 | * @param req
17 | * @param res
18 | */
19 | handler(app, req, res) {
20 | mutex.getBacklogList().then(backlogList => {
21 | res.send({
22 | api_status : 'success',
23 | backlog_list: backlogList
24 | });
25 | });
26 | }
27 | }
28 |
29 |
30 | export default new _0df01ae7dd51cec4();
31 |
--------------------------------------------------------------------------------
/api/0eoUqXNE715mBVqV/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api list_node
7 | */
8 | class _0eoUqXNE715mBVqV extends Endpoint {
9 | constructor() {
10 | super('0eoUqXNE715mBVqV');
11 | }
12 |
13 | /**
14 | * returns a list of peer nodes known by the host. it returns the newest
15 | * records by default
16 | * @param app
17 | * @param req (p0: status, p1: order_by="create_date desc", p2:
18 | * record_limit=1000)
19 | * @param res
20 | */
21 | handler(app, req, res) {
22 | const status = req.query.p0;
23 | const orderBy = req.query.p1 || 'create_date desc';
24 | const limit = parseInt(req.query.p2) || 1000;
25 | const nodeRepository = database.getRepository('node');
26 | nodeRepository.listNodes({status}, orderBy, limit)
27 | .then(nodes => {
28 | res.send(nodes);
29 | })
30 | .catch(e => res.send({
31 | api_status : 'fail',
32 | api_message: `unexpected generic api error: (${e})`
33 | }));
34 | }
35 | }
36 |
37 |
38 | export default new _0eoUqXNE715mBVqV();
39 |
--------------------------------------------------------------------------------
/api/2wYLWQfWBa6GLPYs/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 |
4 | /**
5 | * api get_config_by_name
6 | */
7 | class _2wYLWQfWBa6GLPYs extends Endpoint {
8 | constructor() {
9 | super('2wYLWQfWBa6GLPYs');
10 | }
11 |
12 | /**
13 | * get config value by name
14 | * @param app
15 | * @param req
16 | * @param res
17 | * @returns {*}
18 | */
19 | handler(app, req, res) {
20 |
21 | const {p0: configName} = req.query;
22 | if (!configName) {
23 | return res.status(400).send({
24 | api_status: 'fail',
25 | api_message: 'p0 is required'
26 | });
27 | }
28 | const configurationRepository = database.getRepository('config');
29 | configurationRepository.getConfig(configName.toLowerCase())
30 | .then(configuration => res.send(configuration))
31 | .catch(e => res.send({
32 | api_status: 'fail',
33 | api_message: `unexpected generic api error: (${e})`
34 | }));
35 | }
36 | }
37 |
38 |
39 | export default new _2wYLWQfWBa6GLPYs();
--------------------------------------------------------------------------------
/api/3K2xvNRLMpiEqLo8/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import database from '../../database/database';
3 | import async from 'async';
4 | import _ from 'lodash';
5 | import fileManager from '../../core/storage/file-manager';
6 | import wallet from '../../core/wallet/wallet';
7 |
8 |
9 | /**
10 | * api get_transaction_output_key
11 | */
12 | class _3K2xvNRLMpiEqLo8 extends Endpoint {
13 | constructor() {
14 | super('3K2xvNRLMpiEqLo8');
15 | }
16 |
17 | /**
18 | *
19 | * @param app
20 | * @param req (p0: transaction_id, p1:attribute_type_id, p2: file_hash)
21 | * @param res
22 | */
23 | handler(app, req, res) {
24 | fileManager.getKeyByTransactionAndFileHash(req.query.p0, req.query.p1, req.query.p2)
25 | .then(key => {
26 |
27 | if (!key) {
28 | return Promise.reject(`cannot get file key`);
29 | }
30 |
31 | res.send({key});
32 | })
33 | .catch(e => res.send({
34 | api_status : 'fail',
35 | api_message: `unexpected generic api error: (${e})`
36 | }));
37 | }
38 | }
39 |
40 |
41 | export default new _3K2xvNRLMpiEqLo8();
42 |
--------------------------------------------------------------------------------
/api/3XqkzNFzaTk1JPRf/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api list_address_version
7 | */
8 | class _3XqkzNFzaTk1JPRf extends Endpoint {
9 | constructor() {
10 | super('3XqkzNFzaTk1JPRf');
11 | }
12 |
13 | /**
14 | * return records from table address_version
15 | * @param app
16 | * @param req
17 | * @param res
18 | */
19 | handler(app, req, res) {
20 | const addressRepository = database.getRepository('address');
21 | addressRepository.listAddressVersion()
22 | .then(addressVersionList => res.send(addressVersionList))
23 | .catch(e => res.send({
24 | api_status : 'fail',
25 | api_message: `unexpected generic api error: (${e})`
26 | }));
27 | }
28 | };
29 |
30 | export default new _3XqkzNFzaTk1JPRf();
31 |
--------------------------------------------------------------------------------
/api/4wR3kjTwwC67R94Z/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import wallet from '../../core/wallet/wallet';
3 | import network from '../../net/network';
4 | import peer from '../../net/peer';
5 | import Endpoint from '../endpoint';
6 |
7 |
8 | /**
9 | * api optimize_database
10 | */
11 | class _4wR3kjTwwC67R94Z extends Endpoint {
12 | constructor() {
13 | super('4wR3kjTwwC67R94Z');
14 | }
15 |
16 | /**
17 | * executes the sqlite optimize database function on the indicated
18 | * shard_id, or on all shards if shard_id is not provided. this API pauses
19 | * the node service, tasks and network until it is finished
20 | * @param app
21 | * @param req (p0: shard_id)
22 | * @param res
23 | */
24 | handler(app, req, res) {
25 | wallet.stop();
26 | network.stop();
27 | peer.stop();
28 | database.runVacuumAll()
29 | .then(() => database.runWallCheckpointAll())
30 | .then(() => {
31 | return wallet.initialize(true)
32 | .then(() => network.initialize())
33 | .then(() => peer.initialize())
34 | .then(() => {
35 | res.send({api_status: 'success'});
36 | });
37 | })
38 | .catch(e => res.send({
39 | api_status : 'fail',
40 | api_message: `unexpected generic api error: (${e})`
41 | }));
42 | }
43 | }
44 |
45 |
46 | export default new _4wR3kjTwwC67R94Z();
47 |
--------------------------------------------------------------------------------
/api/5sgpSNaqnHIcfocl/index.js:
--------------------------------------------------------------------------------
1 | import network from '../../net/network';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api toggle_service_network
7 | */
8 | class _5sgpSNaqnHIcfocl extends Endpoint {
9 | constructor() {
10 | super('5sgpSNaqnHIcfocl');
11 | }
12 |
13 | /**
14 | * toggles the network service for all networks (main or test networks)
15 | * between running (true) and not running (false)
16 | * @param app
17 | * @param req (p0: is_running)
18 | * @param res
19 | * @returns {*}
20 | */
21 | handler(app, req, res) {
22 | if (!req.query.p0) {
23 | return res.status(400).send({
24 | api_status : 'fail',
25 | api_message: 'p0 is required'
26 | });
27 | }
28 | const isOnline = req.query.p0 === 'true';
29 | if (isOnline && !network.initialized) {
30 | network.initialize();
31 | res.send({api_status: 'success'});
32 | }
33 | else if (!isOnline && network.initialized === true) {
34 | network.stop();
35 | res.send({api_status: 'success'});
36 | }
37 | else {
38 | res.send({
39 | api_status : 'fail',
40 | api_message: 'not updated'
41 | });
42 | }
43 | }
44 | }
45 |
46 |
47 | export default new _5sgpSNaqnHIcfocl();
48 |
--------------------------------------------------------------------------------
/api/72dlrjquBORj0rhx/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api list_address
7 | */
8 | class _72dlrjquBORj0rhx extends Endpoint {
9 | constructor() {
10 | super('72dlrjquBORj0rhx');
11 | }
12 |
13 | /**
14 | * returns records from table address. it returns the newest records by
15 | * default
16 | * @param app
17 | * @param req (p0: address_base, p1: address_version, p2:
18 | * address_key_identifier, p3: address, p4: status, p5:
19 | * order_by="create_date desc", p6: record_limit: 1000)
20 | * @param res
21 | */
22 | handler(app, req, res) {
23 | const orderBy = req.query.p5 || 'create_date desc';
24 | const limit = parseInt(req.query.p6) || 1000;
25 | const addressRepository = database.getRepository('address');
26 | addressRepository.listAddress({
27 | 'address.address_base': req.query.p0,
28 | address_version : req.query.p1,
29 | address_key_identifier: req.query.p2,
30 | address : req.query.p3,
31 | 'address.status' : req.query.p4
32 | }, orderBy, limit)
33 | .then(addresses => res.send(addresses))
34 | .catch(e => res.send({
35 | api_status : 'fail',
36 | api_message: `unexpected generic api error: (${e})`
37 | }));
38 | }
39 | }
40 |
41 |
42 | export default new _72dlrjquBORj0rhx();
43 |
--------------------------------------------------------------------------------
/api/AgsSNTSA0RHmWUkp/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api list_node_attribute
7 | */
8 | class _AgsSNTSA0RHmWUkp extends Endpoint {
9 | constructor() {
10 | super('AgsSNTSA0RHmWUkp');
11 | }
12 |
13 | /**
14 | * returns records from table node_attributes. it returns the newest
15 | * records by default
16 | * @param app
17 | * @param req (p0: node_id, p1: attribute_type_id, p2: status, p3:
18 | * order_by="create_date desc", p4: record_limit=1000)
19 | * @param res
20 | */
21 | handler(app, req, res) {
22 | const orderBy = req.query.p3 || 'create_date desc';
23 | const limit = parseInt(req.query.p4) || 1000;
24 | const nodeRepository = database.getRepository('node');
25 | nodeRepository.listNodeAttribute({
26 | node_id : req.query.p0,
27 | 'node_attribute.attribute_type_id': req.query.p1,
28 | 'node_attribute.status' : req.query.p2
29 | }, orderBy, limit).then(attributeList => {
30 | res.send(attributeList);
31 | }).catch(e => res.send({
32 | api_status : 'fail',
33 | api_message: `unexpected generic api error: (${e})`
34 | }));
35 | }
36 | }
37 |
38 |
39 | export default new _AgsSNTSA0RHmWUkp();
40 |
--------------------------------------------------------------------------------
/api/BPZZ0l2nTfMSmmpl/index.js:
--------------------------------------------------------------------------------
1 | import walletUtils from '../../core/wallet/wallet-utils';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api get_mnemonic_phrase
7 | */
8 | class _BPZZ0l2nTfMSmmpl extends Endpoint {
9 | constructor() {
10 | super('BPZZ0l2nTfMSmmpl');
11 | }
12 |
13 | /**
14 | * returns the 24 word mnemonic phrase for the active session which is
15 | * stored by default in the millix directory
16 | * @param app
17 | * @param req
18 | * @param res
19 | */
20 | handler(app, req, res) {
21 | walletUtils.loadMnemonic()
22 | .then(([mnemonic, _]) => {
23 | res.send({mnemonic_phrase: mnemonic});
24 | })
25 | .catch(e => res.send({
26 | api_status : 'fail',
27 | api_message: `unexpected generic api error: (${e})`
28 | }));
29 | }
30 | }
31 |
32 |
33 | export default new _BPZZ0l2nTfMSmmpl();
34 |
--------------------------------------------------------------------------------
/api/Bz73Dm7u3dKlXDS8/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api get_node_address_stat_summary
7 | */
8 | class _Bz73Dm7u3dKlXDS8 extends Endpoint {
9 | constructor() {
10 | super('Bz73Dm7u3dKlXDS8');
11 | }
12 |
13 | /**
14 | * returns returns a summary of address statistics from the host
15 | * @param app
16 | * @param req
17 | * @param res
18 | */
19 | handler(app, req, res) {
20 | const addressRepository = database.getRepository('address');
21 | addressRepository.getAddressesCount()
22 | .then(data => res.send({
23 | address_count : data.address_count,
24 | key_identifier_count: data.address_key_identifier_count
25 | }))
26 | .catch(e => res.send({
27 | api_status : 'fail',
28 | api_message: `unexpected generic api error: (${e})`
29 | }));
30 | }
31 | };
32 |
33 | export default new _Bz73Dm7u3dKlXDS8();
34 |
--------------------------------------------------------------------------------
/api/C9rEOewwhQDijCnN/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 | import async from 'async';
4 | import _ from 'lodash';
5 |
6 |
7 | /**
8 | * api get_node_transaction_stat_summary
9 | */
10 | class _C9rEOewwhQDijCnN extends Endpoint {
11 | constructor() {
12 | super('C9rEOewwhQDijCnN');
13 | }
14 |
15 | /**
16 | * returns a summary of transaction statistics from the host
17 | * @param app
18 | * @param req
19 | * @param res
20 | */
21 | handler(app, req, res) {
22 | async.mapSeries(
23 | [
24 | () => database.applyShards(shardID => {
25 | return database.getRepository('transaction', shardID).getFreeTransactionsCount();
26 | }).then(_.sum),
27 | () => database.applyShards(shardID => {
28 | return database.getRepository('transaction', shardID).getIncludedTransactionsCount();
29 | }).then(_.sum),
30 | () => database.applyShards(shardID => {
31 | return database.getRepository('transaction', shardID).getInputsCount();
32 | }).then(_.sum),
33 | () => database.applyShards(shardID => {
34 | return database.getRepository('transaction', shardID).getOutputsCount();
35 | }).then(_.sum),
36 | () => database.applyShards(shardID => {
37 | return database.getRepository('transaction', shardID).getStableTransactionsCount();
38 | }).then(_.sum),
39 | () => database.applyShards(shardID => {
40 | return database.getRepository('transaction', shardID).getPendingTransactionsCount();
41 | }).then(_.sum)
42 | ],
43 | (fn, callback) => fn().then(result => callback(null, result)),
44 | (err, [transactionFreeCount, transactionIncludedCount, transactionInputCount, transactionOutputCount, transactionStableCount, transactionPendingCount]) => {
45 | res.send({
46 | transaction_free_count : transactionFreeCount,
47 | transaction_included_count: transactionIncludedCount,
48 | transaction_input_count : transactionInputCount,
49 | transaction_output_count : transactionOutputCount,
50 | transaction_stable_count : transactionStableCount,
51 | transaction_pending_count : transactionPendingCount
52 | });
53 | });
54 | }
55 | };
56 |
57 | export default new _C9rEOewwhQDijCnN();
58 |
--------------------------------------------------------------------------------
/api/CZOTAF5LfusB1Ht5/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api list_config_private
7 | */
8 | class _CZOTAF5LfusB1Ht5 extends Endpoint {
9 | constructor() {
10 | super('CZOTAF5LfusB1Ht5');
11 | }
12 |
13 | /**
14 | * returns private config values that are only available to the node
15 | * operator
16 | * @param app
17 | * @param req (p0: type, p1: status, p2: order_by="create_date desc", p3:
18 | * record_limit=1000)
19 | * @param res
20 | */
21 | handler(app, req, res) {
22 | const orderBy = req.query.p2 || 'create_date desc';
23 | const limit = parseInt(req.query.p3) || 1000;
24 | const configurationRepository = database.getRepository('config');
25 | configurationRepository.list({
26 | type : req.query.p0,
27 | status: req.query.p1
28 | }, orderBy, limit)
29 | .then(configurations => res.send(configurations))
30 | .catch(e => res.send({
31 | api_status : 'fail',
32 | api_message: `unexpected generic api error: (${e})`
33 | }));
34 | }
35 | }
36 |
37 |
38 | export default new _CZOTAF5LfusB1Ht5();
39 |
--------------------------------------------------------------------------------
/api/DjwvDZ4bGUzKxOHW/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import dns from 'dns';
3 | import config from '../../core/config/config';
4 | import {promisify} from 'util';
5 | import _ from 'lodash';
6 |
7 |
8 | /**
9 | * api verify_dns_has_address_key_identifier
10 | */
11 | class _DjwvDZ4bGUzKxOHW extends Endpoint {
12 | constructor() {
13 | super('DjwvDZ4bGUzKxOHW');
14 | dns.setServers(config.NODE_DNS_SERVER);
15 | this.resolveTxt = promisify(dns.resolveTxt);
16 | }
17 |
18 | /**
19 | * check if the address key identifier is configure in the dns tx record
20 | * @param app
21 | * @param req (p0: dns, p1:address_key_identifier)
22 | * @param res
23 | * @returns {*}
24 | */
25 | handler(app, req, res) {
26 | if (!req.query.p0 || !req.query.p1) {
27 | return res.status(400).send({
28 | api_status : 'fail',
29 | api_message: 'p0 and p1 are required'
30 | });
31 | }
32 | const dns = req.query.p0;
33 | const addressKeyIdentifier = req.query.p1;
34 |
35 | this.resolveTxt(dns)
36 | .then(txtRecords => {
37 | const addressKeyIdentifierSet = new Set();
38 | _.flatten(txtRecords).forEach(txtRecord => {
39 | if (txtRecord.startsWith('tangled=')) {
40 | txtRecord.substring(8).split(',').forEach(address => addressKeyIdentifierSet.add(address));
41 | }
42 | });
43 | res.send({
44 | is_address_verified: addressKeyIdentifierSet.has(addressKeyIdentifier)
45 | });
46 | })
47 | .catch(e => {
48 | return res.status(200).send({
49 | api_status : 'fail',
50 | api_message: `unexpected generic api error: (${e})`
51 | });
52 | });
53 | }
54 | }
55 |
56 |
57 | export default new _DjwvDZ4bGUzKxOHW();
58 |
--------------------------------------------------------------------------------
/api/DuOnf1Wqi29oJUaA/index.js:
--------------------------------------------------------------------------------
1 | import network from '../../net/network';
2 | import Endpoint from '../endpoint';
3 | import database from '../../database/database';
4 |
5 |
6 | /**
7 | * api new_node
8 | */
9 | class _DuOnf1Wqi29oJUaA extends Endpoint {
10 | constructor() {
11 | super('DuOnf1Wqi29oJUaA');
12 | }
13 |
14 | /**
15 | * inserts a new record to table node.
16 | * @param app
17 | * @param req (p0: node_prefix, p1: node_address,
18 | * p2: node_port, p3: node_port_api)
19 | * @param res
20 | * @returns {*}
21 | */
22 | handler(app, req, res) {
23 | const {p0: nodePrefix, p1: nodeIpAddress, p2: nodePort, p3: nodePortApi} = req.query;
24 | if (!nodePrefix || !nodeIpAddress || !nodePort || !nodePortApi) {
25 | return res.status(400).send({
26 | api_status : 'fail',
27 | api_message: 'p0, p1, p2 and p3 are required'
28 | });
29 | }
30 |
31 | const nodeRepository = database.getRepository('node');
32 | nodeRepository.addNode({
33 | node_prefix : nodePrefix,
34 | node_address : nodeIpAddress,
35 | node_port : nodePort,
36 | node_port_api: nodePortApi
37 | }).then(() => {
38 | network.addNode(nodePrefix, nodeIpAddress, nodePort, nodePortApi);
39 | res.send({api_status: 'success'});
40 | }).catch(e => res.send({
41 | api_status : 'fail',
42 | api_message: `unexpected generic api error: (${e})`
43 | }));
44 | }
45 | }
46 |
47 |
48 | export default new _DuOnf1Wqi29oJUaA();
49 |
--------------------------------------------------------------------------------
/api/FC8ylC617zzn1Gaa/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import wallet from '../../core/wallet/wallet';
3 | import database from '../../database/database';
4 | import _ from 'lodash';
5 | import config from '../../core/config/config';
6 |
7 |
8 | /**
9 | * api get_unspent_output_summary
10 | */
11 | class _FC8ylC617zzn1Gaa extends Endpoint {
12 | constructor() {
13 | super('FC8ylC617zzn1Gaa');
14 | }
15 |
16 | /**
17 | * returns the unspent output stat summary
18 | * @param app
19 | * @param req
20 | * @param res
21 | */
22 | handler(app, req, res) {
23 | database.applyShards((shardID) => {
24 | const transactionRepository = database.getRepository('transaction', shardID);
25 | return transactionRepository.countWalletFreeOutput(wallet.defaultKeyIdentifier);
26 | }).then(unstableTransactionCounts => database.applyShards((shardID) => {
27 | const transactionRepository = database.getRepository('transaction', shardID);
28 | return transactionRepository.listTransactionOutput({
29 | address_key_identifier : wallet.defaultKeyIdentifier,
30 | is_spent : 0,
31 | is_double_spend : 0,
32 | 'transaction_output.is_stable' : 1,
33 | 'transaction_output.status!' : 3,
34 | 'transaction_output.address_not-like': [`%${config.ADDRESS_VERSION_NFT}%`, `%${config.ADDRESS_VERSION_BRIDGE}%`]
35 | }, 'amount', 128);
36 | }, 'amount', 128).then(unspentOutputs => {
37 | res.send({
38 | transaction_output_count: _.sum(unstableTransactionCounts),
39 | transaction_max_amount : _.sum(_.map(unspentOutputs, output => output.amount))
40 | });
41 | })).catch(e => {
42 | res.send({
43 | api_status : 'fail',
44 | api_message: `unexpected generic api error: (${e})`
45 | });
46 | });
47 | }
48 | }
49 |
50 |
51 | export default new _FC8ylC617zzn1Gaa();
52 |
--------------------------------------------------------------------------------
/api/FDLyQ5uo5t7jltiQ/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 | import _ from 'lodash';
4 |
5 |
6 | /**
7 | * api list_transaction_output
8 | */
9 | class _FDLyQ5uo5t7jltiQ extends Endpoint {
10 | constructor() {
11 | super('FDLyQ5uo5t7jltiQ');
12 | }
13 |
14 | /**
15 | * returns records from table transaction_output that match the provided
16 | * parameters. it returns the newest records by default
17 | * @param app
18 | * @param req (p0: transaction_id, p1: date_begin, p2: date_end, p3:
19 | * address_key_identifier, p4: is_double_spend, p5:
20 | * double_spend_date_begin, p6: double_spend_date_end, p7: is_stable,
21 | * p8: stable_date_begin, p9: stable_date_end, p10: is_spent, p11:
22 | * spent_date_begin, p12: spent_date_end, p13: order_by="create_date
23 | * desc", p14: record_limit=1000, p15: shard_id)
24 | * @param res
25 | */
26 | handler(app, req, res) {
27 | const orderBy = req.query.p13 || 'create_date desc';
28 | const limit = parseInt(req.query.p14) || 1000;
29 | const shardID = req.query.p15 || undefined;
30 | database.applyShards((dbShardID) => {
31 | const transactionRepository = database.getRepository('transaction', dbShardID);
32 | if (!transactionRepository) {
33 | return Promise.resolve([]);
34 | }
35 | return transactionRepository.listTransactionOutput({
36 | 'transaction_output.transaction_id' : req.query.p0,
37 | transaction_date_begin : req.query.p1,
38 | transaction_date_end : req.query.p2,
39 | address_key_identifier : req.query.p3,
40 | is_double_spend : req.query.p4,
41 | double_spend_date_begin : req.query.p5,
42 | double_spend_date_end : req.query.p6,
43 | 'transaction_output.is_stable' : req.query.p7,
44 | 'transaction_output.stable_date_begin': req.query.p8,
45 | 'transaction_output.stable_date_end' : req.query.p9,
46 | is_spent : req.query.p10,
47 | spent_date_begin : req.query.p11,
48 | spent_date_end : req.query.p12,
49 | 'transaction_output.shard_id' : shardID,
50 | '`transaction`.status!' : 3,
51 | }, orderBy, limit);
52 | }, orderBy, limit, shardID)
53 | .then(data => res.send(_.uniqBy(data, i => `${i.transaction_id}_${i.output_position}`)))
54 | .catch(e => res.send({
55 | api_status : 'fail',
56 | api_message: `unexpected generic api error: (${e})`
57 | }));
58 | }
59 | }
60 |
61 |
62 | export default new _FDLyQ5uo5t7jltiQ();
63 |
--------------------------------------------------------------------------------
/api/Fv9lheUpVYq5caRe/index.js:
--------------------------------------------------------------------------------
1 | import wallet from '../../core/wallet/wallet';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api reset_transaction_verification_timeout
7 | */
8 | class _Fv9lheUpVYq5caRe extends Endpoint {
9 | constructor() {
10 | super('Fv9lheUpVYq5caRe');
11 | }
12 |
13 | /**
14 | * resets is_timeout field in table transaction records belonging to the
15 | * provided key_identifier from true to false to allow the node to retry
16 | * transaction validation
17 | * @param app
18 | * @param req (p0: key_identifier)
19 | * @param res
20 | */
21 | handler(app, req, res) {
22 | //TODO: update is_timeout in the database
23 | if (!req.query.p0) {
24 | return res.status(400).send({
25 | api_status : 'fail',
26 | api_message: 'p0 is required'
27 | });
28 | }
29 | wallet.getConsensus().resetTransactionValidationRejected();
30 | res.send({api_status: 'success'});
31 | }
32 | }
33 |
34 |
35 | export default new _Fv9lheUpVYq5caRe();
36 |
--------------------------------------------------------------------------------
/api/GMDazQSouYWzLTCv/index.js:
--------------------------------------------------------------------------------
1 | import logManager from '../../core/log-manager';
2 | import Endpoint from '../endpoint';
3 | import _ from 'lodash';
4 |
5 |
6 | /**
7 | * api list_log
8 | */
9 | class _GMDazQSouYWzLTCv extends Endpoint {
10 | constructor() {
11 | super('GMDazQSouYWzLTCv');
12 | }
13 |
14 | /**
15 | * returns a list of log items from the node. it returns the newest records
16 | * by default
17 | * @param app
18 | * @param req (p0: type, p1: content, p2: date_begin, p3: date_end, p4:
19 | * order_by="create_date desc", p5: record_limit=1000)
20 | * @param res
21 | */
22 | handler(app, req, res) {
23 | //TODO: filter the logs, make max log size a config, write logs to file
24 | // as config too
25 | const orderBy = req.query.p4 || 'create_date desc';
26 | const limit = parseInt(req.query.p5) || 1000;
27 | let log = logManager.log.slice(0, limit);
28 | _.each(log, entry => {
29 | try {
30 | entry.content = JSON.parse(entry.content);
31 | }
32 | catch (e) {
33 | }
34 | });
35 | res.send({log});
36 | }
37 | }
38 |
39 |
40 | export default new _GMDazQSouYWzLTCv();
41 |
--------------------------------------------------------------------------------
/api/Gox4NzTLDnpEr10v/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import walletUtils from '../../core/wallet/wallet-utils';
3 |
4 |
5 | /**
6 | * api get_random_mnemonic
7 | */
8 | class _Gox4NzTLDnpEr10v extends Endpoint {
9 | constructor() {
10 | super('Gox4NzTLDnpEr10v');
11 | }
12 |
13 | /**
14 | * returns a random mnemonic phrase
15 | * @param app
16 | * @param req
17 | * @param res
18 | * @returns {*}
19 | */
20 | handler(app, req, res) {
21 | res.send({mnemonic: walletUtils.newMnemonic().phrase});
22 | }
23 | }
24 |
25 |
26 | export default new _Gox4NzTLDnpEr10v();
27 |
--------------------------------------------------------------------------------
/api/I3EoELuQCmqwvp8C/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 | import _ from 'lodash';
4 |
5 |
6 | /**
7 | * api list_transaction_input
8 | */
9 | class _I3EoELuQCmqwvp8C extends Endpoint {
10 | constructor() {
11 | super('I3EoELuQCmqwvp8C');
12 | }
13 |
14 | /**
15 | * returns records from table transaction_input that match the provided
16 | * parameters. it returns the newest records by default
17 | * @param app
18 | * @param req (p0: transaction_id, p1: date_begin, p2: date_end, p3:
19 | * address_key_identifier, p4: is_double_spend, p5:
20 | * double_spend_date_begin, p6: double_spend_date_end, p7:
21 | * output_transaction_id, p8: output_position, p9:
22 | * order_by="create_date desc", p10: record_limit=1000, p11: shard_id
23 | * @param res
24 | */
25 | handler(app, req, res) {
26 | const orderBy = req.query.p9 || 'create_date desc';
27 | const limit = parseInt(req.query.p10) || 1000;
28 | const shardID = req.query.p11 || undefined;
29 |
30 | database.applyShards((dbShardID) => {
31 | const transactionRepository = database.getRepository('transaction', dbShardID);
32 | if (!transactionRepository) {
33 | return Promise.resolve([]);
34 | }
35 | return transactionRepository.listTransactionInput({
36 | 'transaction_input.transaction_id': req.query.p0,
37 | transaction_date_begin : req.query.p1,
38 | transaction_date_end : req.query.p2,
39 | address_key_identifier : req.query.p3,
40 | is_double_spend : req.query.p4,
41 | double_spend_date_begin : req.query.p5,
42 | double_spend_date_end : req.query.p6,
43 | output_transaction_id : req.query.p7,
44 | output_position : req.query.p8,
45 | 'transaction_input.shard_id' : shardID
46 | }, orderBy, limit);
47 | }, orderBy, limit, shardID)
48 | .then(data => res.send(_.uniqBy(data, i => `${i.transaction_id}_${i.input_position}`)))
49 | .catch(e => res.send({
50 | api_status : 'fail',
51 | api_message: `unexpected generic api error: (${e})`
52 | }));
53 | }
54 | }
55 |
56 |
57 | export default new _I3EoELuQCmqwvp8C();
58 |
--------------------------------------------------------------------------------
/api/K9wxNE4Ved38xhJm/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import database from '../../database/database';
3 | import peer from '../../net/peer';
4 | import walletUtils from '../../core/wallet/wallet-utils';
5 |
6 |
7 | /**
8 | * api is_transaction_data_valid
9 | */
10 | class _K9wxNE4Ved38xhJm extends Endpoint {
11 | constructor() {
12 | super('K9wxNE4Ved38xhJm');
13 | }
14 |
15 | /**
16 | * returns HTTP 200 if data is correct, HTTP 201 if data is invalid and
17 | * needs to be fetched again, HTTP 404 if transaction data is not
18 | * available.
19 | * @param app
20 | * @param req (p0: transaction_id, p1: shard_id)
21 | * @param res
22 | */
23 | handler(app, req, res) {
24 | if (!req.query.p0 || !req.query.p1) {
25 | return res.status(400).send({
26 | api_status : 'fail',
27 | api_message: 'p0 and p1 are required'
28 | });
29 | }
30 | const transactionId = req.query.p0;
31 | database.firstShardORShardZeroRepository('transaction', req.query.p1, transactionRepository => {
32 | return transactionRepository.getTransactionObjectFromDB(transactionId)
33 | .then(transaction => ([
34 | transactionRepository.normalizeTransactionObject(transaction),
35 | transactionRepository
36 | ]));
37 | }).then(([transaction, transactionRepository]) => {
38 | if (!transaction) {
39 | peer.transactionSyncRequest(transactionId).catch(_ => _);
40 | return res.status(404).send();
41 | }
42 |
43 | if (walletUtils.isValidTransactionObject(transaction)) {
44 | return res.status(200).send();
45 | }
46 |
47 | return transactionRepository.deleteTransaction(transactionId)
48 | .then(() => {
49 | peer.transactionSyncRequest(transactionId).catch(_ => _);
50 | return res.status(201).send();
51 | });
52 | }).catch(e => res.send({
53 | api_status : 'fail',
54 | api_message: `unexpected generic api error: (${e.message})`
55 | }));
56 | }
57 |
58 | }
59 |
60 |
61 | export default new _K9wxNE4Ved38xhJm();
62 |
--------------------------------------------------------------------------------
/api/KN2ZttYDEKzCulEZ/index.js:
--------------------------------------------------------------------------------
1 | import database, {Database} from '../../database/database';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | // api get_transaction_output
6 | class _KN2ZttYDEKzCulEZ extends Endpoint {
7 | constructor() {
8 | super('KN2ZttYDEKzCulEZ');
9 | }
10 |
11 | /**
12 | * returns a single record from table transaction_output as indicated by
13 | * transaction_id and output_position in the indicated shard
14 | * @param app
15 | * @param req (p0: transaction_id,
16 | * p2: shard_id)
17 | * @param res
18 | * @returns {*}
19 | */
20 | handler(app, req, res) {
21 | if (!req.query.p0 || !req.query.p1 || !req.query.p2) {
22 | return res.status(400).send({
23 | api_status : 'fail',
24 | api_message: 'p0, p1 and p2 are required'
25 | });
26 | }
27 |
28 | database.firstShardORShardZeroRepository('transaction', req.query.p2, transactionRepository => {
29 | return transactionRepository.getTransactionOutput({
30 | '`transaction`.transaction_id': req.query.p0,
31 | 'output_position' : req.query.p1,
32 | '`transaction`.shard_id' : req.query.p2
33 | });
34 | }).then(transactionOutput => {
35 | res.send(transactionOutput || {
36 | api_status : 'fail',
37 | api_message: `the transaction output ${req.query.p1} of transaction id ${req.query.p0} was not found at shard with id ${req.query.p2}`
38 | });
39 | }).catch(e => res.send({
40 | api_status : 'fail',
41 | api_message: `unexpected generic api error: (${e})`
42 | }));
43 | }
44 | }
45 |
46 |
47 | export default new _KN2ZttYDEKzCulEZ();
48 |
--------------------------------------------------------------------------------
/api/LLpSTquu4tZL8Nu5/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api update_config_value
7 | */
8 | class _LLpSTquu4tZL8Nu5 extends Endpoint {
9 | constructor() {
10 | super('LLpSTquu4tZL8Nu5');
11 | }
12 |
13 | /**
14 | * updates table config value field for the indicated config_id record
15 | * @param app
16 | * @param req (p0: config_id, p1:value)
17 | * @param res
18 | * @returns {*}
19 | */
20 | handler(app, req, res) {
21 | if (req.method === 'POST') {
22 | const resultBodyKey = Object.keys(req.body);
23 | if (!resultBodyKey.includes('p0') || !req.body.p0 || !resultBodyKey.includes('p1')) {
24 | return res.status(400)
25 | .send({
26 | api_status : 'fail',
27 | api_message: `p0 and p1 are rquired`
28 | });
29 | }
30 |
31 | let configID = req.body.p0;
32 | let value = req.body.p1;
33 | const configurationRepository = database.getRepository('config');
34 | if (typeof value === 'object') {
35 | value = JSON.stringify(value);
36 | }
37 |
38 | configurationRepository.updateConfigByID(configID, value)
39 | .then((row) => res.send({
40 | api_status: 'success',
41 | row : row
42 | }))
43 | .catch(e => res.send({
44 | api_status : 'fail',
45 | api_message: `unexpected generic api error: (${e})`
46 | }));
47 |
48 | }
49 | else {
50 | return res.status(400)
51 | .send({
52 | api_status : 'fail',
53 | api_message: 'POST only'
54 | });
55 | }
56 | }
57 | }
58 |
59 |
60 | export default new _LLpSTquu4tZL8Nu5();
61 |
--------------------------------------------------------------------------------
/api/LOLb7q23p8rYSLwv/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import WalletUtils from '../../core/wallet/wallet-utils';
3 |
4 |
5 | /**
6 | * api get_is_key_present
7 | */
8 | class _LOLb7q23p8rYSLwv extends Endpoint {
9 | constructor() {
10 | super('LOLb7q23p8rYSLwv');
11 | }
12 |
13 | handler(app, req, res) {
14 | WalletUtils.loadMnemonic().then(() => {
15 | return res.send({
16 | private_key_exists: true
17 | });
18 | })
19 | .catch(e => res.send({
20 | api_status : 'fail',
21 | api_message: `unexpected generic api error: (${e})`
22 | }));
23 | }
24 | }
25 |
26 |
27 | export default new _LOLb7q23p8rYSLwv();
28 |
--------------------------------------------------------------------------------
/api/Lb2fuhVMDQm1DrLL/index.js:
--------------------------------------------------------------------------------
1 | import wallet from '../../core/wallet/wallet';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api new_keychain_address
7 | */
8 | class _Lb2fuhVMDQm1DrLL extends Endpoint {
9 | constructor() {
10 | super('Lb2fuhVMDQm1DrLL');
11 | }
12 |
13 | /**
14 | * generates a new address and returns the new address record from table
15 | * address
16 | * @param app
17 | * @param req
18 | * @param res
19 | */
20 | handler(app, req, res) {
21 | wallet.addNewAddress(wallet.getDefaultActiveWallet())
22 | .then(address => {
23 | res.send(address);
24 | })
25 | .catch(e => res.send({
26 | api_status : 'fail',
27 | api_message: `unexpected generic api error: (${e})`
28 | }));
29 | }
30 | }
31 |
32 |
33 | export default new _Lb2fuhVMDQm1DrLL();
34 |
--------------------------------------------------------------------------------
/api/Mh9QifTIESw5t1fa/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import database from '../../database/database';
3 | import async from 'async';
4 | import _ from 'lodash';
5 | import fileManager from '../../core/storage/file-manager';
6 | import wallet from '../../core/wallet/wallet';
7 |
8 |
9 | /**
10 | * api get_transaction_output_data
11 | */
12 | class _Mh9QifTIESw5t1fa extends Endpoint {
13 | constructor() {
14 | super('Mh9QifTIESw5t1fa');
15 | this.normalizationRepository = database.getRepository('normalization');
16 | }
17 |
18 | /**
19 | *
20 | * @param app
21 | * @param req (p0: transaction_id, p1: address_key_identifier, p2:
22 | * attribute_type_id, p3: file_hash, p4: file_key)
23 | * @param res
24 | */
25 | handler(app, req, res) {
26 | fileManager.getBufferByTransactionAndFileHash(req.query.p0, req.query.p1, req.query.p2, req.query.p3, req.query.p4)
27 | .then(({
28 | file_data: fileData,
29 | data_type: dataType,
30 | mime_type: mimeType
31 | }) => {
32 |
33 | if (!fileData) {
34 | return Promise.reject(`cannot get file data`);
35 | }
36 |
37 | if (dataType === 'json') {
38 | res.setHeader('content-type', 'application/json');
39 | return res.send(JSON.parse(fileData.toString()));
40 | }
41 |
42 | if (mimeType) {
43 | res.setHeader('content-type', mimeType);
44 | }
45 | res.send(fileData);
46 | })
47 | .catch(e => res.send({
48 | api_status : 'fail',
49 | api_message: `unexpected generic api error: (${e})`
50 | }));
51 | }
52 | }
53 |
54 |
55 | export default new _Mh9QifTIESw5t1fa();
56 |
--------------------------------------------------------------------------------
/api/Mu7VpxzfYyQimf3V/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import database from '../../database/database';
3 | import transactionDataUtils from '../../core/utils/transaction-data-utils';
4 |
5 |
6 | /**
7 | * api list_transaction_output_attribute_received
8 | */
9 | class _Mu7VpxzfYyQimf3V extends Endpoint {
10 | constructor() {
11 | super('Mu7VpxzfYyQimf3V');
12 | this.normalizationRepository = database.getRepository('normalization');
13 | }
14 |
15 | /**
16 | *
17 | * @param app
18 | * @param req (p0: date_begin, p1: date_end, p2: node_id_origin, p3:
19 | * is_stable, p4: is_parent, p5: is_timeout, p6: create_date_begin, p7:
20 | * create_date_end, p8: status, p9: version,
21 | * p10:address_key_identifier, p11: attribute_type_id, p12: data_type,
22 | * p13:order_by="create_date desc", p14: record_limit=1000, p15:
23 | * shard_id)
24 | * @param res
25 | */
26 | handler(app, req, res) {
27 | const orderBy = req.query.p13 || '`transaction`.create_date desc';
28 | const limit = parseInt(req.query.p14) || 1000;
29 | const shardID = req.query.p15 || undefined;
30 | const dataType = req.query.p12 || undefined;
31 | const attributeTypeId = req.query.p11 || undefined;
32 |
33 | database.applyShards((dbShardID) => {
34 | const transactionRepository = database.getRepository('transaction', dbShardID);
35 | if (!transactionRepository) {
36 | return Promise.resolve([]);
37 | }
38 | return transactionRepository.listTransactionOutput({
39 | '`transaction`.transaction_date_begin': req.query.p0,
40 | '`transaction`.transaction_date_end' : req.query.p1,
41 | '`transaction`.node_id_origin' : req.query.p2,
42 | '`transaction`.is_stable' : req.query.p3,
43 | '`transaction`.is_parent' : req.query.p4,
44 | '`transaction`.is_timeout' : req.query.p5,
45 | '`transaction`.create_date_begin' : req.query.p6,
46 | '`transaction`.create_date_end' : req.query.p7,
47 | '`transaction`.status' : req.query.p8,
48 | '`transaction`.version' : req.query.p9,
49 | 'address_key_identifier' : req.query.p10,
50 | 'is_spent' : dataType === 'tangled_nft' ? 0 : undefined,
51 | 'output_position!' : -1, //discard fee output
52 | '`transaction`.shard_id' : shardID
53 | }, orderBy, limit);
54 | }, orderBy, limit, shardID)
55 | .then(outputList => {
56 | transactionDataUtils.processOutputList(outputList, attributeTypeId, orderBy, limit, shardID, dataType).then(data => {
57 | res.send(data);
58 | });
59 | })
60 | .catch(e => res.send({
61 | api_status : 'fail',
62 | api_message: `unexpected generic api error: (${e})`
63 | }));
64 | }
65 | }
66 |
67 |
68 | export default new _Mu7VpxzfYyQimf3V();
69 |
--------------------------------------------------------------------------------
/api/NPCpnfUyPHRH4j29/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 | import genesisConfig from '../../core/genesis/genesis-config';
4 | import _ from 'lodash';
5 |
6 |
7 | /**
8 | * api get_known_wallet_balance
9 | */
10 | class _NPCpnfUyPHRH4j29 extends Endpoint {
11 | constructor() {
12 | super('NPCpnfUyPHRH4j29');
13 | }
14 |
15 | /**
16 | * returns the available (stable) balance and pending (unstable) balance of
17 | * all known wallet
18 | * @param app
19 | * @param req
20 | * @param res
21 | * @returns {*}
22 | */
23 | handler(app, req, res) {
24 | const transactionRepository = database.getRepository('transaction', genesisConfig.genesis_shard_id);
25 | transactionRepository.getAllWalletBalance(true)
26 | .then(stableBalance => {
27 | const balance = {};
28 | return transactionRepository.getAllWalletBalance(false)
29 | .then(pendingBalance => {
30 | _.each(stableBalance, item => balance[item.address_key_identifier] = {
31 | ...item,
32 | balance_pending: _.find(pendingBalance, {address_key_identifier: item.address_key_identifier})?.balance_pending || 0
33 | });
34 | _.each(pendingBalance, item => {
35 | if (!balance[item.address_key_identifier]) {
36 | balance[item.address_key_identifier] = {
37 | address_key_identifier: item.address_key_identifier,
38 | balance_stable : 0,
39 | balance_pending : item.balance_pending
40 | };
41 | }
42 | });
43 | res.send(_.values(balance));
44 | });
45 | })
46 | .catch(e => res.send({
47 | api_status : 'fail',
48 | api_message: `unexpected generic api error: (${e})`
49 | }));
50 | }
51 | }
52 |
53 |
54 | export default new _NPCpnfUyPHRH4j29();
55 |
--------------------------------------------------------------------------------
/api/OBexeX0f0MsnL1S3/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import wallet from '../../core/wallet/wallet';
3 | import database from '../../database/database';
4 | import _ from 'lodash';
5 | import WalletUtils from '../../core/wallet/wallet-utils';
6 |
7 |
8 | /**
9 | * api get_session
10 | */
11 | class _OBexeX0f0MsnL1S3 extends Endpoint {
12 | constructor() {
13 | super('OBexeX0f0MsnL1S3');
14 | }
15 |
16 | /**
17 | * gets the active wallet in the node
18 | * @param app
19 | * @param req
20 | * @param res
21 | * @returns {*}
22 | */
23 | handler(app, req, res) {
24 | if (!wallet.initialized || _.isEmpty(wallet.getActiveWallets())) {
25 | let resultError = {
26 | api_status : 'fail',
27 | api_message : 'wallet not loaded',
28 | private_key_exists: false
29 | };
30 |
31 | WalletUtils.loadMnemonic().then(() => {
32 | resultError.private_key_exists = true;
33 |
34 | res.send(resultError);
35 | }).catch(e => {
36 | res.send(resultError);
37 | });
38 | }
39 | else {
40 | const walletID = wallet.getDefaultActiveWallet();
41 | const keyIdentifier = wallet.defaultKeyIdentifier;
42 | database.getRepository('address').getAddressBaseAttribute(keyIdentifier, 'key_public')
43 | .then(publicKey => {
44 | const addressVersion = database.getRepository('address').getDefaultAddressVersion().version;
45 | res.send({
46 | api_status: 'success',
47 | wallet : {
48 | id : walletID,
49 | address : `${keyIdentifier}${addressVersion}${keyIdentifier}`,
50 | address_key_identifier: keyIdentifier,
51 | address_public_key : publicKey
52 | }
53 | });
54 | });
55 | }
56 | }
57 | }
58 |
59 |
60 | export default new _OBexeX0f0MsnL1S3();
61 |
--------------------------------------------------------------------------------
/api/OMAlxmPq4rZs71K8/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api list_shard
7 | */
8 | class _OMAlxmPq4rZs71K8 extends Endpoint {
9 | constructor() {
10 | super('OMAlxmPq4rZs71K8');
11 | }
12 |
13 | /**
14 | * returns records from table shard. it returns the newest records by
15 | * default
16 | * @param app
17 | * @param req (p0: shard_type, p1: is_required, p2: node_id_origin, p3:
18 | * status, p4: order_by="create_date desc", p5: record_limit=1000)
19 | * @param res
20 | */
21 | handler(app, req, res) {
22 | const orderBy = req.query.p4 || 'create_date desc';
23 | const limit = parseInt(req.query.p5) || 1000;
24 | const shardRepository = database.getRepository('shard');
25 | shardRepository.listShard({
26 | shard_type : req.query.p0,
27 | is_required : req.query.p1,
28 | node_id_origin: req.query.p2,
29 | status : req.query.p3
30 | }, orderBy, limit).then(attributeList => {
31 | attributeList.forEach(attribute => attribute.schema_path = '${private_field}');
32 | res.send(attributeList);
33 | }).catch(e => res.send({
34 | api_status : 'fail',
35 | api_message: `unexpected generic api error: (${e})`
36 | }));
37 | }
38 | }
39 |
40 |
41 | export default new _OMAlxmPq4rZs71K8();
42 |
--------------------------------------------------------------------------------
/api/P2LMh8NsUTkpWAH3/index.js:
--------------------------------------------------------------------------------
1 | import wallet from '../../core/wallet/wallet';
2 | import Endpoint from '../endpoint';
3 | import _ from 'lodash';
4 | import async from 'async';
5 |
6 |
7 | /**
8 | * api reset_validation_transaction_by_guid
9 | */
10 | class _P2LMh8NsUTkpWAH3 extends Endpoint {
11 | constructor(props) {
12 | super('P2LMh8NsUTkpWAH3');
13 | }
14 |
15 | /**
16 | * this API reset transaction validation by transaction guid
17 | * @param app
18 | * @param req
19 | * @param res
20 | */
21 | handler(app, req, res) {
22 | let transactionIdList;
23 |
24 | if (req.method === 'POST') {
25 | transactionIdList = req.body.p0;
26 | }
27 | else if (req.method === 'GET') {
28 | transactionIdList = req.query.p0;
29 | }
30 |
31 | if (!transactionIdList) {
32 | return res
33 | .status(400)
34 | .send({
35 | api_status : 'fail',
36 | api_message: `p0is required`
37 | });
38 | }
39 |
40 | if (_.isArray(transactionIdList)) {
41 | async.eachSeries(transactionIdList, (transaction, callback) => {
42 | wallet.resetTransactionValidationByTransactionId(transaction)
43 | .then(() => callback())
44 | .catch((e) => callback(true, e));
45 | }, (error, exception) => {
46 | if (error) {
47 | res.send({
48 | api_status : 'fail',
49 | api_message: `unexpected generic api error: (${exception})`
50 | });
51 | }
52 | else {
53 | res.send({
54 | api_status: 'success'
55 | });
56 | }
57 | });
58 |
59 | }
60 | else {
61 | wallet.resetTransactionValidationByTransactionId(transactionIdList)
62 | .then(() => res.send({
63 | api_status: 'success'
64 | }))
65 | .catch(e => res.send({
66 | api_status : 'fail',
67 | api_message: `unexpected generic api error: (${e})`
68 | }));
69 | }
70 | }
71 | }
72 |
73 |
74 | export default new _P2LMh8NsUTkpWAH3();
75 |
--------------------------------------------------------------------------------
/api/PKUv2JfV87KpEZwE/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import database from '../../database/database';
3 | import walletUtils from '../../core/wallet/wallet-utils';
4 | import wallet from '../../core/wallet/wallet';
5 |
6 |
7 | /**
8 | * api get_address_private_key
9 | */
10 | class _PKUv2JfV87KpEZwE extends Endpoint {
11 | constructor() {
12 | super('PKUv2JfV87KpEZwE');
13 | }
14 |
15 | /**
16 | * returns the private key for the indicated address that is derived from
17 | * the master key that is temporarily stored in node memory for the active
18 | * wallet
19 | * @param app
20 | * @param req (p0: address)
21 | * @param res
22 | * @returns {*}
23 | */
24 | handler(app, req, res) {
25 | if (!req.query.p0) {
26 | return res.status(400).send({
27 | api_status : 'fail',
28 | api_message: 'p0 is required'
29 | });
30 | }
31 | else if (!wallet.initialized) {
32 | return res.status(401).send({
33 | api_status : 'fail',
34 | api_message: 'the wallet is not initialized'
35 | });
36 | }
37 |
38 | const keychainRepository = database.getRepository('keychain');
39 | keychainRepository.getAddress(req.query.p0)
40 | .then(address => {
41 | const walletID = address.wallet_id;
42 | const extendedPrivateKey = wallet.getActiveWallets()[walletID];
43 | if (!extendedPrivateKey) {
44 | return res.status(401).send({
45 | api_status : 'fail',
46 | api_message: 'the wallet is not active'
47 | });
48 | }
49 |
50 | const privateKey = walletUtils.derivePrivateKey(extendedPrivateKey, address.is_change, address.address_position);
51 | res.send({private_key_hex: privateKey.toString('hex')});
52 | })
53 | .catch(e => res.send({
54 | api_status : 'fail',
55 | api_message: `unexpected generic api error: (${e})`
56 | }));
57 | }
58 | }
59 |
60 |
61 | export default new _PKUv2JfV87KpEZwE();
62 |
--------------------------------------------------------------------------------
/api/PMW9LXqUv7vXLpbA/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import walletUtils from '../../core/wallet/wallet-utils';
3 | import services from '../../core/services/services';
4 | import eventBus from '../../core/event-bus';
5 | import wallet from '../../core/wallet/wallet';
6 | import database from '../../database/database';
7 |
8 |
9 | /**
10 | * api new_session
11 | */
12 | class _PMW9LXqUv7vXLpbA extends Endpoint {
13 | constructor() {
14 | super('PMW9LXqUv7vXLpbA');
15 | }
16 |
17 | /**
18 | * uses the passphrase to activate the wallet used in the previous session
19 | * in the node
20 | * @param app
21 | * @param req (p0: passphrase)
22 | * @param res
23 | * @returns {*}
24 | */
25 | handler(app, req, res) {
26 | let {p0: passPhrase} = req.query;
27 |
28 | if (!passPhrase) {
29 | return res.status(400).send({
30 | api_status : 'fail',
31 | api_message: 'p0 are required'
32 | });
33 | }
34 |
35 | let authenticationErrorHandler, authenticationSuccessHandler;
36 | eventBus.once('wallet_ready', () => {
37 | eventBus.emit('wallet_key', passPhrase);
38 | });
39 |
40 | authenticationErrorHandler = () => {
41 | res.status(401).send({
42 | api_status : 'fail',
43 | api_message: 'wallet authentication error'
44 | });
45 | eventBus.removeListener('wallet_unlock', authenticationSuccessHandler);
46 | };
47 | eventBus.once('wallet_authentication_error', authenticationErrorHandler);
48 |
49 | authenticationSuccessHandler = () => {
50 | const walletID = wallet.getDefaultActiveWallet();
51 | database.getRepository('keychain').getWalletDefaultKeyIdentifier(walletID)
52 | .then(keyIdentifier => {
53 | database.getRepository('address').getAddressBaseAttribute(keyIdentifier, 'key_public')
54 | .then(publicKey => {
55 | const addressVersion = database.getRepository('address').getDefaultAddressVersion().version;
56 | res.send({
57 | api_status: 'success',
58 | wallet : {
59 | id : walletID,
60 | address : `${keyIdentifier}${addressVersion}${keyIdentifier}`,
61 | address_key_identifier: keyIdentifier,
62 | address_public_key : publicKey
63 | }
64 | });
65 | });
66 | });
67 | eventBus.removeListener('wallet_authentication_error', authenticationErrorHandler);
68 | };
69 | eventBus.once('wallet_unlock', authenticationSuccessHandler);
70 | services.stop();
71 | services.initialize({
72 | initialize_wallet_event: false,
73 | auto_create_wallet : false
74 | })
75 | .catch(e => {
76 | eventBus.removeListener('wallet_authentication_error', authenticationErrorHandler);
77 | eventBus.removeListener('wallet_unlock', authenticationSuccessHandler);
78 | res.send({
79 | api_status : 'fail',
80 | api_message: `unexpected generic api error: (${e})`
81 | });
82 | });
83 |
84 | }
85 | }
86 |
87 |
88 | export default new _PMW9LXqUv7vXLpbA();
89 |
--------------------------------------------------------------------------------
/api/PZ7x3HVHVstLNYf0/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import logManager from '../../core/log-manager';
3 |
4 |
5 | /**
6 | * api event_log_list
7 | */
8 | class _PZ7x3HVHVstLNYf0 extends Endpoint {
9 | constructor() {
10 | super('PZ7x3HVHVstLNYf0');
11 | }
12 |
13 | /**
14 | * returns event log list
15 | * @param app
16 | * @param req (p0: log_limit=1000)
17 | * @param res
18 | */
19 | handler(app, req, res) {
20 | const limit = parseInt(req.query.p0) || 1000;
21 | res.send({
22 | api_status : 'success',
23 | event_log_list: logManager.getLog(limit)
24 | });
25 | }
26 | }
27 |
28 |
29 | export default new _PZ7x3HVHVstLNYf0();
30 |
--------------------------------------------------------------------------------
/api/PwwdU9lZbgMqS2DA/index.js:
--------------------------------------------------------------------------------
1 | import walletUtils from '../../core/wallet/wallet-utils';
2 | import Endpoint from '../endpoint';
3 | import database from '../../database/database';
4 | import server from '../server';
5 |
6 |
7 | /**
8 | * api register_node_identity
9 | */
10 | class _PwwdU9lZbgMqS2DA extends Endpoint {
11 | constructor() {
12 | super('PwwdU9lZbgMqS2DA');
13 | }
14 |
15 | /**
16 | * this API pushes a value from the client node to the host node for it to
17 | * apply to its database. it upserts a record in the host node's table
18 | * node_attribute with attribute_type_id = node_public_key. if a
19 | * corresponding node_id does not exist in table node, that is inserted as
20 | * well
21 | * @param app
22 | * @param req (p0: public_key)
23 | * @param res
24 | * @returns {*}
25 | */
26 | handler(app, req, res) {
27 | if (!req.query.p0) {
28 | return res.status(400).send({
29 | api_status : 'fail',
30 | api_message: 'p0 is required'
31 | });
32 | }
33 | try {
34 | if (!walletUtils.isValidNodeIdentity(req.params.nodeID, req.query.p0, server.nodeID, req.params.nodeSignature)) {
35 | return res.send({
36 | api_status : 'fail',
37 | api_message: 'node registration error: invalid node identity'
38 | });
39 | }
40 | const nodeRepository = database.getRepository('node');
41 | nodeRepository.addNodeAttribute(req.params.nodeID, 'node_public_key', req.query.p0)
42 | .then(() => {
43 | res.send({api_status: 'success'});
44 | })
45 | .catch(e => res.send({
46 | api_status : 'fail',
47 | api_message: `unexpected generic api error: (${e})`
48 | }));
49 | }
50 | catch (e) {
51 | res.send({
52 | api_status : 'fail',
53 | api_message: `unexpected generic api error: (${e})`
54 | });
55 | }
56 |
57 | }
58 | }
59 |
60 |
61 | export default new _PwwdU9lZbgMqS2DA();
62 |
--------------------------------------------------------------------------------
/api/QISzUVake29059bi/index.js:
--------------------------------------------------------------------------------
1 | import walletUtils from '../../core/wallet/wallet-utils';
2 | import Endpoint from '../endpoint';
3 | import database from '../../database/database';
4 | import server from '../server';
5 | import wallet from '../../core/wallet/wallet';
6 |
7 |
8 | /**
9 | * api reset_transaction_validation
10 | */
11 | class _QISzUVake29059bi extends Endpoint {
12 | constructor() {
13 | super('QISzUVake29059bi');
14 | }
15 |
16 | /**
17 | * this API resets transaction validation status
18 | * @param app
19 | * @param req
20 | * @param res
21 | * @returns {*}
22 | */
23 | handler(app, req, res) {
24 | wallet.resetValidationOnLeafTransactions();
25 | res.send({success: true});
26 | }
27 | }
28 |
29 |
30 | export default new _QISzUVake29059bi();
31 |
--------------------------------------------------------------------------------
/api/QgbDSAuy1SYgocs6/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import configLoader from '../../core/config/config-loader';
3 |
4 |
5 | /**
6 | * api reload_configs_from_database
7 | */
8 | class _QgbDSAuy1SYgocs6 extends Endpoint {
9 | constructor() {
10 | super('QgbDSAuy1SYgocs6');
11 | }
12 |
13 | /**
14 | * reloads configurations from the database
15 | * @param app
16 | * @param req
17 | * @param res
18 | * @returns {*}
19 | */
20 | handler(app, req, res) {
21 | configLoader.load(true)
22 | .then(() => res.send({api_status: 'success'}))
23 | .catch(e => res.send({
24 | api_status : 'fail',
25 | api_message: `unexpected generic api error: (${e?.message || e.cause || e})`
26 | }));
27 | }
28 | }
29 |
30 |
31 | export default new _QgbDSAuy1SYgocs6();
32 |
--------------------------------------------------------------------------------
/api/RIlwZyfnizp2i8wh/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import wallet from '../../core/wallet/wallet';
3 |
4 |
5 | /**
6 | * api wallet_stop_ongoing_transaction
7 | */
8 | class _RIlwZyfnizp2i8wh extends Endpoint {
9 | constructor() {
10 | super('RIlwZyfnizp2i8wh');
11 | }
12 |
13 | /**
14 | * stops current ongoing transaction if any.
15 | * @param app
16 | * @param req
17 | * @param res
18 | */
19 | handler(app, req, res) {
20 | wallet.interruptTransactionSendInProgress();
21 | res.send({success: true});
22 | }
23 | }
24 |
25 |
26 | export default new _RIlwZyfnizp2i8wh();
27 |
--------------------------------------------------------------------------------
/api/RLOk0Wji0lQVjynT/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import os from 'os';
3 | import {NODE_MILLIX_BUILD_DATE, NODE_MILLIX_VERSION} from '../../core/config/config';
4 |
5 |
6 | /**
7 | * api get_os_info
8 | */
9 | class _RLOk0Wji0lQVjynT extends Endpoint {
10 | constructor() {
11 | super('RLOk0Wji0lQVjynT');
12 | }
13 |
14 | /**
15 | * returns the node os info
16 | * @param app
17 | * @param req
18 | * @param res
19 | */
20 | handler(app, req, res) {
21 | try {
22 | const osCpus = {
23 | model: {},
24 | speed: {}
25 | };
26 | for (let c = 0; c < os.cpus().length; c++) {
27 | osCpus.model[os.cpus()[c].model] = (osCpus.model[os.cpus()[c].model] || 0) + 1;
28 | osCpus.speed[os.cpus()[c].speed] = (osCpus.speed[os.cpus()[c].speed] || 0) + 1;
29 | }
30 |
31 | let cpu = {
32 | model : [],
33 | speed : [],
34 | loadavg: []
35 | };
36 | for (const p in osCpus.model) {
37 | cpu.model.push(osCpus.model[p] + ' × ' + p);
38 | }
39 | for (const p in osCpus.speed) {
40 | cpu.speed.push(osCpus.speed[p] + ' × ' + p);
41 | }
42 | cpu.model = cpu.model.join(', ');
43 | cpu.speed = cpu.speed.join(', ') + ' MHz';
44 |
45 | const loadavg = os.loadavg();
46 | loadavg.forEach(item => {
47 | cpu.loadavg.push(item.toFixed(2));
48 | });
49 |
50 | let memory = {
51 | total : Math.round((os.totalmem() / 1024 / 1024 / 1024) * 100) / 100 + 'GB',
52 | free : Math.round((os.freemem() / 1024 / 1024 / 1024) * 100) / 100 + 'GB',
53 | freePercent: Math.round(os.freemem() / os.totalmem() * 100).toString() + '%'
54 | };
55 |
56 | res.send({
57 | type : os.type(),
58 | platform : os.platform(),
59 | release : os.release(),
60 | arch : os.arch(),
61 | cpu : cpu,
62 | memory : memory,
63 | node_millix_version : NODE_MILLIX_VERSION,
64 | node_millix_build_date: NODE_MILLIX_BUILD_DATE
65 | });
66 | }
67 | catch (e) {
68 | res.send({
69 | api_status : 'fail',
70 | api_message: `unexpected generic api error: (${e})`
71 | });
72 | }
73 | }
74 | }
75 |
76 |
77 | export default new _RLOk0Wji0lQVjynT();
78 |
--------------------------------------------------------------------------------
/api/WGem8x5aycBqFXWQ/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import config from '../../core/config/config';
3 | import wallet from '../../core/wallet/wallet';
4 | import os from 'os';
5 | import _ from 'lodash';
6 | import server from '../server';
7 |
8 | const https = require('https');
9 |
10 |
11 | /**
12 | * api get_available_version
13 | */
14 | class _WGem8x5aycBqFXWQ extends Endpoint {
15 | constructor() {
16 | super('WGem8x5aycBqFXWQ');
17 | }
18 |
19 | /**
20 | * returns a available version
21 | * @param app
22 | * @param req
23 | * @param res
24 | */
25 | handler(app, req, res) {
26 | const nodeMillixVersion = config.NODE_MILLIX_VERSION;
27 |
28 | let hostname = 'millix.org';
29 | let application = 'client';
30 | if (nodeMillixVersion.includes('tangled')) {
31 | hostname = 'tangled.com';
32 | application = 'browser';
33 | }
34 |
35 | wallet.getCurrentWalletInfo().then(walletInfo => {
36 | const nodeID = server.nodeID;
37 | let payload = {
38 | version : nodeMillixVersion,
39 | network_initialized: walletInfo.network_initialized,
40 | node_id : nodeID
41 | };
42 |
43 | if (wallet.initialized && !_.isEmpty(wallet.getActiveWallets())) {
44 | payload.address_key_identifier = walletInfo.address_key_identifier;
45 | payload.address_version = walletInfo.address_version;
46 | payload.address_public_key = walletInfo.address_public_key;
47 | }
48 |
49 | const options = {
50 | hostname: hostname,
51 | port : 443,
52 | path : '/latest.php?referrer=' + JSON.stringify(payload),
53 | method : 'GET'
54 | };
55 |
56 | const request = https.request(options, result => {
57 | let body = [];
58 | result.on('data', (chunk) => {
59 | body.push(chunk);
60 | }).on('end', () => {
61 | let versionAvailable = Buffer.concat(body).toString().replace(/(\n)/gm, '');
62 | versionAvailable.replace(/[^\d\.\d\.\d\.]/gm, '');
63 |
64 | const versionRegex = new RegExp(/^\d+\.\d+\.\d+$/, 'gm');
65 | if (!versionRegex.test(versionAvailable)) {
66 | versionAvailable = 0;
67 | }
68 |
69 | if (application === 'browser') {
70 | versionAvailable += '-tangled';
71 | }
72 |
73 | res.send({
74 | api_status : 'success',
75 | version_available : versionAvailable,
76 | application : application,
77 | node_millix_version: nodeMillixVersion,
78 | os_platform : os.platform(),
79 | os_arch : os.arch()
80 | });
81 | });
82 | });
83 |
84 | request.on('error', error => {
85 | res.send({
86 | api_status : 'fail',
87 | api_message: error
88 | });
89 | });
90 |
91 | request.end();
92 | }).catch(() => {
93 | res.send({
94 | api_status: 'fail'
95 | });
96 | });
97 | }
98 | }
99 |
100 |
101 | export default new _WGem8x5aycBqFXWQ();
102 |
--------------------------------------------------------------------------------
/api/XgxHmjINTEqANwtS/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from "../endpoint";
3 |
4 | /**
5 | * api remove_address_version
6 | */
7 |
8 | class _XgxHmjINTEqANwtS extends Endpoint {
9 | constructor() {
10 | super('XgxHmjINTEqANwtS');
11 | }
12 |
13 | /**
14 | *
15 | * @param app
16 | * @param req
17 | * @param res
18 | * @returns {*}
19 | */
20 | handler(app, req, res) {
21 | const {p0: version} = req.query;
22 | if (!version) {
23 | return res.status(400).send({
24 | api_status: 'fail',
25 | api_message: 'p0 is required'
26 | });
27 | }
28 | database.getRepository('address')
29 | .removeAddressVersion(version)
30 | .then(() => res.send({
31 | app_status: 'success'
32 | }))
33 | .catch(e => res.send({
34 | api_status: 'fail',
35 | api_message: `unexpected generic api error: (${e})`
36 | }));
37 | }
38 | }
39 |
40 | export default new _XgxHmjINTEqANwtS();
--------------------------------------------------------------------------------
/api/Xim7SaikcsHICvfQ/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import walletUtils from '../../core/wallet/wallet-utils';
3 | import database from '../../database/database';
4 |
5 |
6 | /**
7 | * api verify_address
8 | */
9 | class _Xim7SaikcsHICvfQ extends Endpoint {
10 | constructor() {
11 | super('Xim7SaikcsHICvfQ');
12 | this.addressRepository = database.getRepository('address');
13 | }
14 |
15 | _isValid(addressBase, addressVersion, addressKeyIdentifier) {
16 | if (!addressBase || !addressVersion || !addressKeyIdentifier) {
17 | return false;
18 | }
19 | else if (addressVersion.charAt(1) === 'b') { //using public key as address base
20 | return walletUtils.isValidAddress(addressKeyIdentifier);
21 | }
22 | return walletUtils.isValidAddress(addressBase) && walletUtils.isValidAddress(addressKeyIdentifier);
23 | }
24 |
25 | /**
26 | * returns verifies if an address is valid
27 | * @param app
28 | * @param req (p0: address)
29 | * @param res
30 | */
31 | handler(app, req, res) {
32 | const address = req.query.p0;
33 | if (!address) {
34 | return res.status(400).send({
35 | api_status : 'fail',
36 | api_message: 'p0 is required'
37 | });
38 | }
39 | try {
40 | const {
41 | address : addressBase,
42 | identifier: addressKeyIdentifier,
43 | version : addressVersion
44 | } = this.addressRepository.getAddressComponent(address);
45 | res.send({
46 | is_valid : this._isValid(addressBase, addressVersion, addressKeyIdentifier),
47 | address_base : addressBase,
48 | address_version : addressVersion,
49 | address_key_identifier: addressKeyIdentifier
50 | });
51 | }
52 | catch (e) {
53 | res.send({
54 | api_status : 'fail',
55 | api_message: `unexpected generic api error: (${e})`
56 | });
57 | }
58 | }
59 | }
60 |
61 |
62 | export default new _Xim7SaikcsHICvfQ();
63 |
--------------------------------------------------------------------------------
/api/ZFAYRM8LRtmfYp4Y/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import server from '../server';
3 |
4 |
5 | /**
6 | * api get_node_id
7 | */
8 | class _ZFAYRM8LRtmfYp4Y extends Endpoint {
9 | constructor() {
10 | super('ZFAYRM8LRtmfYp4Y');
11 | }
12 |
13 | /**
14 | * identifies the node_id responding to the request at the provided IP
15 | * address and api port an address
16 | * @param app
17 | * @param req
18 | * @param res
19 | * @returns {node_id}
20 | */
21 | handler(app, req, res) {
22 | res.send({node_id: server.nodeID});
23 | }
24 | }
25 |
26 |
27 | export default new _ZFAYRM8LRtmfYp4Y();
28 |
--------------------------------------------------------------------------------
/api/ZXJ3DqyLslyQETkX/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import crypto from 'crypto';
3 | import wallet from '../../core/wallet/wallet';
4 | import walletUtils from '../../core/wallet/wallet-utils';
5 |
6 |
7 | /**
8 | * api cryptography
9 | */
10 | class _ZXJ3DqyLslyQETkX extends Endpoint {
11 | constructor() {
12 | super('ZXJ3DqyLslyQETkX');
13 | }
14 |
15 | /**
16 | * depending on direction encrypt or decrypt provided string
17 | * @param app
18 | * @param req (p0: direction, p1: string)
19 | * @param res
20 | * @returns {*}
21 | */
22 | handler(app, req, res) {
23 | if (!req.query.p0 || !req.query.p1) {
24 | return res.status(400).send({
25 | api_status : 'fail',
26 | api_message: 'p0 and p1 are required'
27 | });
28 | }
29 |
30 | const direction = req.query.p0;
31 | const subject_string = req.query.p1;
32 | const algorithm = 'aes-256-cbc'; //Using AES encryption
33 |
34 | const extendedPrivateKey = wallet.getActiveWalletKey(wallet.getDefaultActiveWallet());
35 | const key = walletUtils.derivePrivateKey(extendedPrivateKey, 0, 0);
36 |
37 | let result_string = '';
38 | if (direction === 'encrypt') {
39 | const iv = crypto.randomBytes(16);
40 | let cipher = crypto.createCipheriv(algorithm, Buffer.from(key), iv);
41 | let encrypted = cipher.update(subject_string);
42 | encrypted = Buffer.concat([
43 | encrypted,
44 | cipher.final()
45 | ]);
46 | result_string = encrypted.toString('hex') + `[${iv.toString('hex')}]`;
47 | }
48 | else if (direction === 'decrypt') {
49 | const result_subject_string = subject_string.split('[');
50 | const iv_string = result_subject_string.pop().replace(']', '');
51 |
52 | const iv = Buffer.from(iv_string, 'hex');
53 | let encrypted_text = Buffer.from(result_subject_string.pop(), 'hex');
54 | let decipher = crypto.createDecipheriv(algorithm, Buffer.from(key), iv);
55 | let decrypted = decipher.update(encrypted_text);
56 | decrypted = Buffer.concat([
57 | decrypted,
58 | decipher.final()
59 | ]);
60 | result_string = decrypted.toString();
61 | }
62 |
63 | res.send({
64 | result: result_string
65 | });
66 | }
67 | }
68 |
69 |
70 | export default new _ZXJ3DqyLslyQETkX();
71 |
--------------------------------------------------------------------------------
/api/aSiBLKkEsHI9lDr3/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 | import path from 'path';
4 | import os from 'os';
5 | import config from '../../core/config/config';
6 | import server from '../server';
7 | import walletUtils from '../../core/wallet/wallet-utils';
8 | import network from '../../net/network';
9 | import ntp from '../../core/ntp';
10 |
11 |
12 | /**
13 | * api add_shard
14 | */
15 | class _aSiBLKkEsHI9lDr3 extends Endpoint {
16 | constructor() {
17 | super('aSiBLKkEsHI9lDr3');
18 | }
19 |
20 | /**
21 | * adds a new shard and returns the record
22 | * @param app
23 | * @param req (p0: shard_name, p1: shard_type, p2:
24 | * is_required, p3: schema_name, p4: schema_path, p5:
25 | * shard_attribute={})
26 | * @param res
27 | */
28 | handler(app, req, res) {
29 | if (!req.query.p0 || !req.query.p1 || req.query.p2 === undefined) {
30 | return res.status(400).send({
31 | api_status : 'fail',
32 | api_message: 'p0, p1 and p2 are required'
33 | });
34 | }
35 |
36 | const shardDate = Math.floor(Date.now() / 1000);
37 | const nodeID = server.nodeID;
38 | const schemaPath = req.query.p4 || path.join(config.DATABASE_CONNECTION.FOLDER, 'shard/');
39 | const shardName = req.query.p0;
40 | const shardType = req.query.p1;
41 | const isRequired = !!req.query.p2;
42 | const shardAttribute = req.query.p5 ? JSON.parse(req.query.p5) : {};
43 | const shardInfo = walletUtils.getShardInfo(nodeID, shardDate, shardType, server.nodePrivateKey);
44 | const schemaName = req.query.p3 || shardInfo.shard_id + '.sqlite';
45 |
46 | shardInfo['schema_path'] = schemaPath;
47 | shardInfo['schema_name'] = schemaName;
48 |
49 | const shardRepository = database.getRepository('shard');
50 | const nodeRepository = database.getRepository('node');
51 | shardRepository.addShard(shardInfo.shard_id, shardName, shardType, schemaName, schemaPath, isRequired, nodeID, shardDate, shardInfo.node_signature)
52 | .then(() => database.addShard(shardInfo, true))
53 | .then(() => nodeRepository.getNodeAttribute(nodeID, 'shard_' + shardType))
54 | .then(shardAttributeList => new Promise(resolve => {
55 | if (shardAttributeList) {
56 | shardAttributeList = JSON.parse(shardAttributeList);
57 | }
58 | else {
59 | shardAttributeList = [];
60 | }
61 | nodeRepository.addNodeAttribute(network.nodeID, 'shard_' + shardType, JSON.stringify([
62 | ...shardAttributeList,
63 | {
64 | 'shard_id' : shardInfo.shard_id,
65 | 'transaction_count': 0,
66 | 'update_date' : Math.floor(ntp.now().getTime() / 1000),
67 | 'is_required' : true,
68 | ...shardAttribute
69 | }
70 | ])).then(() => resolve()).catch(() => resolve());
71 | }))
72 | .then(() => shardRepository.getShard({shard_id: shardInfo.shard_id}))
73 | .then(shardData => res.send(shardData))
74 | .catch(e => res.send({
75 | api_status : 'fail',
76 | api_message: `unexpected generic api error: (${e})`
77 | }));
78 | }
79 | }
80 |
81 |
82 | export default new _aSiBLKkEsHI9lDr3();
83 |
--------------------------------------------------------------------------------
/api/endpoint.js:
--------------------------------------------------------------------------------
1 | import database from '../database/database';
2 | import walletUtils from '../core/wallet/wallet-utils';
3 | import server from './server';
4 |
5 | export default class Endpoint {
6 | constructor(endpoint) {
7 | this.endpoint = endpoint;
8 | this.baseURL = '/api/:nodeID/:nodeSignature/';
9 | }
10 |
11 | handler() {
12 | throw new Error('You must to implement the method handler!');
13 | }
14 |
15 | onRequest(app, permission, req, res) {
16 | if (permission.require_identity) {
17 | const {nodeID, nodeSignature} = req.params;
18 | database.getRepository('node')
19 | .getNodeAttribute(nodeID, 'node_public_key')
20 | .then(publicKey => {
21 | if (!publicKey) {
22 | return res.status(401).send({
23 | api_status : 'fail',
24 | api_message: 'unknown node identity'
25 | });
26 | }
27 | else if (!walletUtils.verify(publicKey, nodeSignature, server.nodeID)) {
28 | return res.status(401).send({
29 | api_status : 'fail',
30 | api_message: 'invalid node identity'
31 | });
32 | }
33 |
34 | if (permission.private && server.nodeID !== nodeID) {
35 | return res.status(401).send({
36 | api_status : 'fail',
37 | api_message: 'permission denied'
38 | });
39 | }
40 |
41 | this.handler(app, req, res);
42 | })
43 | .catch(e => res.send({
44 | api_status : 'fail',
45 | api_message: `unexpected generic api error: (${e})`
46 | }));
47 | }
48 | else {
49 | this.handler(app, req, res);
50 | }
51 | }
52 |
53 | register(app, permission) {
54 | app.post(this.baseURL + this.endpoint, this.onRequest.bind(this, app, permission));
55 | app.get(this.baseURL + this.endpoint, this.onRequest.bind(this, app, permission));
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/api/epOW5kdvb12QdgsV/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import mutex from '../../core/mutex';
3 |
4 |
5 | /**
6 | * api reset_backlog
7 | */
8 | class _epOW5kdvb12QdgsV extends Endpoint {
9 | constructor() {
10 | super('epOW5kdvb12QdgsV');
11 | }
12 |
13 | /**
14 | * reset backlog
15 | * @param app
16 | * @param req
17 | * @param res
18 | */
19 | handler(app, req, res) {
20 | mutex.resetBacklog();
21 |
22 | res.send({
23 | api_status: 'success'
24 | });
25 | }
26 | }
27 |
28 |
29 | export default new _epOW5kdvb12QdgsV();
30 |
--------------------------------------------------------------------------------
/api/hMrav9QMiMyLQosB/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api new_address_version
7 | */
8 | class _hMrav9QMiMyLQosB extends Endpoint {
9 | constructor() {
10 | super('hMrav9QMiMyLQosB');
11 | }
12 |
13 | /**
14 | * inserts a new record to table address_version. is_main_network
15 | * indicates whether the new address version is on the main network or on a
16 | * test network. if the new record indicates is_default = true the
17 | * previous address_version record set to is_default = true is updated to
18 | * is_default = false. there can only be a single record for each network
19 | * that is set to is_default = true
20 | * @param app
21 | * @param req (p0: version, p1: regex_pattern, p2:
22 | * is_main_network, p3: is_default)
23 | * @param res
24 | * @returns {*}
25 | */
26 | handler(app, req, res) {
27 | const {p0: version, p1: isMainNetwork, p2: regexPattern, p3: isDefault} = req.query;
28 | if (!version || isMainNetwork === undefined || !regexPattern || isDefault === undefined) {
29 | return res.status(400).send({
30 | api_status : 'fail',
31 | api_message: 'p0, p1, p2 and p3 are required'
32 | });
33 | }
34 |
35 | const addressRepository = database.getRepository('address');
36 | addressRepository.addAddressVersion(version, !!parseInt(isMainNetwork), regexPattern, !!parseInt(isDefault))
37 | .then(() => res.send({api_status: 'success'}))
38 | .catch(e => res.send({
39 | api_status : 'fail',
40 | api_message: `unexpected generic api error: (${e})`
41 | }));
42 | }
43 | }
44 |
45 |
46 | export default new _hMrav9QMiMyLQosB();
47 |
--------------------------------------------------------------------------------
/api/hXwPQrVhLEALFsIJ/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api list_config_public
7 | */
8 | class _hXwPQrVhLEALFsIJ extends Endpoint {
9 | constructor() {
10 | super('hXwPQrVhLEALFsIJ');
11 | this.publicConfigNameList = new Set([
12 | 'NODE_PORT',
13 | 'NODE_PORT_API',
14 | 'NODE_INITIAL_LIST',
15 | 'WALLET_TRANSACTION_DEFAULT_VERSION',
16 | 'WALLET_TRANSACTION_REFRESH_VERSION',
17 | 'WALLET_TRANSACTION_SUPPORTED_VERSION',
18 | 'MILLIX_CIRCULATION',
19 | 'NODE_MILLIX_VERSION',
20 | 'PEER_ROTATION_CONFIG'
21 | ]);
22 | }
23 |
24 | /**
25 | * returns returns public config values
26 | * @param app
27 | * @param req (p0: type, p1: status, p2: order_by="create_date desc", p3:
28 | * record_limit=1000)
29 | * @param res
30 | */
31 | handler(app, req, res) {
32 | const orderBy = req.query.p2 || 'create_date desc';
33 | const limit = parseInt(req.query.p3) || 1000;
34 | const configurationRepository = database.getRepository('config');
35 | configurationRepository.list({
36 | type : req.query.p0,
37 | status: req.query.p1
38 | }, orderBy, limit)
39 | .then(configurations => {
40 | const publicConfigs = [];
41 | configurations.forEach(configuration => {
42 | this.publicConfigNameList.has(configuration.config_name) && publicConfigs.push(configuration);
43 | });
44 | res.send(publicConfigs);
45 | })
46 | .catch(e => res.send({
47 | api_status : 'fail',
48 | api_message: `unexpected generic api error: (${e})`
49 | }));
50 | }
51 | }
52 |
53 |
54 | export default new _hXwPQrVhLEALFsIJ();
55 |
--------------------------------------------------------------------------------
/api/hbBmFhIpJS87W1Fy/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api get_transaction_input
7 | */
8 | class _hbBmFhIpJS87W1Fy extends Endpoint {
9 | constructor() {
10 | super('hbBmFhIpJS87W1Fy');
11 | }
12 |
13 | /**
14 | * returns a single record from table transaction_input as indicated by
15 | * transaction_id and input_position in the indicated shard
16 | * @param app
17 | * @param req (p0: transaction_id, p1: input_position,
18 | * p2: shard_id)
19 | * @param res
20 | * @returns {*}
21 | */
22 | handler(app, req, res) {
23 | if (!req.query.p0 || !req.query.p1 || !req.query.p2) {
24 | return res.status(400).send({
25 | api_status : 'fail',
26 | api_message: 'p0, p1, p2 are required'
27 | });
28 | }
29 |
30 | database.firstShardORShardZeroRepository('transaction', req.query.p2, transactionRepository => {
31 | return transactionRepository.getTransactionInput({
32 | transaction_id: req.query.p0,
33 | input_position: req.query.p1,
34 | shard_id : req.query.p2
35 | });
36 | }).then(transactionInput => {
37 | res.send(transactionInput || {
38 | api_status : 'fail',
39 | api_message: `the transaction input ${req.query.p1} of transaction id ${req.query.p0} was not found at shard with id ${req.query.p2}`
40 | });
41 | }).catch(e => res.send({
42 | api_status : 'fail',
43 | api_message: `unexpected generic api error: (${e})`
44 | }));
45 | }
46 | }
47 |
48 |
49 | export default new _hbBmFhIpJS87W1Fy();
50 |
--------------------------------------------------------------------------------
/api/kC5N9Tz06b2rA4Pg/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import mutex from '../../core/mutex';
3 | import wallet from '../../core/wallet/wallet';
4 |
5 |
6 | /**
7 | * api send_aggregation_transaction_from_wallet
8 | */
9 | class _kC5N9Tz06b2rA4Pg extends Endpoint {
10 | constructor() {
11 | super('kC5N9Tz06b2rA4Pg');
12 | }
13 |
14 | /**
15 | * submits a new aggregation transaction from the active wallet which
16 | * optimizes the funds and allows spending more funds in fewer
17 | * transactions. this API builds the tx payload and submits it
18 | * @param app
19 | * @param req
20 | * @param res
21 | * @returns {*}
22 | */
23 | handler(app, req, res) {
24 | mutex.lock(['submit_transaction'], (unlock) => {
25 | wallet.aggregateOutputs()
26 | .then(transaction => {
27 | unlock();
28 | res.send({
29 | api_status: 'success',
30 | transaction
31 | });
32 | })
33 | .catch(e => {
34 | console.log(`[api ${this.endpoint}] error: ${e}`);
35 | unlock();
36 | res.send({
37 | api_status : 'fail',
38 | api_message: `unexpected generic api error: (${e.message})`
39 | });
40 | });
41 | });
42 | }
43 | }
44 |
45 |
46 | export default new _kC5N9Tz06b2rA4Pg();
47 |
--------------------------------------------------------------------------------
/api/kIoe20LWh2aw3CAu/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import config, {DATABASE_CONNECTION} from '../../core/config/config';
3 |
4 |
5 | /**
6 | * api get_storage_config
7 | */
8 | class _kIoe20LWh2aw3CAu extends Endpoint {
9 | constructor() {
10 | super('kIoe20LWh2aw3CAu');
11 | }
12 |
13 | /**
14 | * returns storage related config
15 | * @param app
16 | * @param req
17 | * @param res
18 | */
19 | handler(app, req, res) {
20 | res.send({
21 | api_status : 'success',
22 | file_dir : config.STORAGE_CONNECTION.FOLDER,
23 | database_dir: DATABASE_CONNECTION.FOLDER
24 | });
25 | }
26 | }
27 |
28 |
29 | export default new _kIoe20LWh2aw3CAu();
30 |
--------------------------------------------------------------------------------
/api/l4kaEhMnhjB5yseq/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import database from '../../database/database';
3 |
4 |
5 | /**
6 | * api list_transaction
7 | */
8 | class _l4kaEhMnhjB5yseq extends Endpoint {
9 | constructor() {
10 | super('l4kaEhMnhjB5yseq');
11 | }
12 |
13 | /**
14 | *
15 | * @param app
16 | * @param req (p0: date_begin, p1: date_end, p2: node_id_origin, p3:
17 | * is_stable, p4: is_parent, p5: is_timeout, p6: create_date_begin, p7:
18 | * create_date_end, p8: status, p9: order_by="create_date desc", p10:
19 | * record_limit=1000, p11: shard_id)
20 | * @param res
21 | */
22 | handler(app, req, res) {
23 | const orderBy = req.query.p9 || 'create_date desc';
24 | const limit = parseInt(req.query.p10) || 1000;
25 | const shardID = req.query.p11 || undefined;
26 |
27 | database.applyShards((dbShardID) => {
28 | const transactionRepository = database.getRepository('transaction', dbShardID);
29 | if (!transactionRepository) {
30 | return Promise.resolve([]);
31 | }
32 | return transactionRepository.listTransactions({
33 | transaction_date_begin: req.query.p0,
34 | transaction_date_end : req.query.p1,
35 | node_id_origin : req.query.p2,
36 | is_stable : req.query.p3,
37 | is_parent : req.query.p4,
38 | is_timeout : req.query.p5,
39 | create_date_begin : req.query.p6,
40 | create_date_end : req.query.p7,
41 | status : req.query.p8,
42 | shard_id : shardID
43 | }, orderBy, limit);
44 | }, orderBy, limit, shardID).then(data => {
45 | data.forEach(row => row['transaction_date'] = Math.floor(row.transaction_date.getTime() / 1000));
46 | res.send(data);
47 | }).catch(e => res.send({
48 | api_status : 'fail',
49 | api_message: `unexpected generic api error: (${e})`
50 | }));
51 | }
52 | }
53 |
54 |
55 | export default new _l4kaEhMnhjB5yseq();
56 |
--------------------------------------------------------------------------------
/api/pIQZEKY4T9vttBUk/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import wallet from '../../core/wallet/wallet';
3 | import _ from 'lodash';
4 | import services from '../../core/services/services';
5 |
6 |
7 | /**
8 | * api end_session
9 | */
10 | class _pIQZEKY4T9vttBUk extends Endpoint {
11 | constructor() {
12 | super('pIQZEKY4T9vttBUk');
13 | }
14 |
15 | /**
16 | * ends the active wallet session in the node
17 | * @param app
18 | * @param req
19 | * @param res
20 | * @returns {*}
21 | */
22 | handler(app, req, res) {
23 | if (!wallet.initialized || _.isEmpty(wallet.getActiveWallets())) {
24 | return res.send({
25 | api_status : 'fail',
26 | api_message: 'wallet not loaded'
27 | });
28 | }
29 |
30 | services.stop();
31 | return res.send({
32 | address_key_identifier: wallet.defaultKeyIdentifier,
33 | locked : true
34 | });
35 | }
36 | }
37 |
38 |
39 | export default new _pIQZEKY4T9vttBUk();
40 |
--------------------------------------------------------------------------------
/api/qRHogKQ1Bb7OT4N9/index.js:
--------------------------------------------------------------------------------
1 | import network from "../../net/network";
2 | import Endpoint from "../endpoint";
3 |
4 | /**
5 | * api get_node_public_ip
6 | */
7 | class _qRHogKQ1Bb7OT4N9 extends Endpoint {
8 | constructor() {
9 | super('qRHogKQ1Bb7OT4N9');
10 | }
11 |
12 | handler(app, req, res) {
13 | res.send({
14 | node_public_ip: network.nodePublicIp
15 | })
16 | }
17 | }
18 |
19 | export default new _qRHogKQ1Bb7OT4N9();
--------------------------------------------------------------------------------
/api/quIoaHsl8h6IwyEI/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api list_keychain_address
7 | */
8 | class _quIoaHsl8h6IwyEI extends Endpoint {
9 | constructor() {
10 | super('quIoaHsl8h6IwyEI');
11 | }
12 |
13 | /**
14 | * returns records from table keychain_address. it returns the newest
15 | * records by default.
16 | * @param app
17 | * @param req (p0: address_key_identifier, p1: wallet_id, p2: is_change,
18 | * p3: order_by="create_date desc", p4: record_limit:1000)
19 | * @param res
20 | */
21 | handler(app, req, res) {
22 | const orderBy = req.query.p3 || 'create_date desc';
23 | const limit = parseInt(req.query.p4) || 1000;
24 | const keychainRepository = database.getRepository('keychain');
25 | keychainRepository.listWalletAddresses({
26 | address_key_identifier : req.query.p0,
27 | wallet_id : req.query.p1,
28 | is_change : req.query.p2,
29 | 'ka.status': 1
30 | }, orderBy, limit)
31 | .then((addresses) => {
32 | res.send(addresses);
33 | })
34 | .catch(e => res.send({
35 | api_status : 'fail',
36 | api_message: `unexpected generic api error: (${e})`
37 | }));
38 | }
39 | }
40 |
41 |
42 | export default new _quIoaHsl8h6IwyEI();
43 |
--------------------------------------------------------------------------------
/api/w9UTTA7NXnEDUXhe/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import database from '../../database/database';
3 |
4 |
5 | /**
6 | * api list_transaction_history
7 | */
8 | class _w9UTTA7NXnEDUXhe extends Endpoint {
9 | constructor() {
10 | super('w9UTTA7NXnEDUXhe');
11 | }
12 |
13 | /**
14 | * list transaction history for a given wallet
15 | * @param app
16 | * @param req (p0: address_key_identifier)
17 | * @param res
18 | * @returns {*}
19 | */
20 | handler(app, req, res) {
21 | let addressKeyIdentifier;
22 | if (req.method === 'GET') {
23 | if (!req.query.p0) {
24 | return res.status(400).send({
25 | api_status : 'fail',
26 | api_message: 'p0 is required'
27 | });
28 | }
29 | else {
30 | addressKeyIdentifier = req.query.p0;
31 | }
32 | }
33 | else {
34 | if (!req.body.p0) {
35 | return res.status(400).send({
36 | api_status : 'fail',
37 | api_message: 'p0 is required'
38 | });
39 | }
40 | else {
41 | addressKeyIdentifier = req.body.p0;
42 | }
43 | }
44 |
45 | return database.applyShards((shardID) => {
46 | return database.getRepository('transaction', shardID)
47 | .getTransactionsByAddressKeyIdentifier(addressKeyIdentifier);
48 | }, 'transaction_date desc').then(transactions => {
49 | res.send(transactions);
50 | }).catch(e => res.send({
51 | api_status : 'fail',
52 | api_message: `unexpected generic api error: (${e})`
53 | }));
54 | }
55 | }
56 |
57 |
58 | export default new _w9UTTA7NXnEDUXhe();
59 |
--------------------------------------------------------------------------------
/api/wDyC195wgjPjM2Ut/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 | import peer from '../../net/peer';
4 | import walletSync from '../../core/wallet/wallet-sync';
5 |
6 |
7 | /**
8 | * api get_transaction
9 | */
10 | class _wDyC195wgjPjM2Ut extends Endpoint {
11 | constructor() {
12 | super('wDyC195wgjPjM2Ut');
13 | }
14 |
15 | /**
16 | * returns a single record from table transaction as indicated by
17 | * transaction_id in the indicated shard
18 | * @param app
19 | * @param req (p0: transaction_id, p1: shard_id)
20 | * @param res
21 | * @returns {*}
22 | */
23 | handler(app, req, res) {
24 | if (!req.query.p0 || !req.query.p1) {
25 | return res.status(400).send({
26 | api_status : 'fail',
27 | api_message: 'p0 and p1 are required'
28 | });
29 | }
30 | database.firstShardORShardZeroRepository('transaction', req.query.p1, transactionRepository => {
31 | return transactionRepository.getTransaction(req.query.p0, req.query.p1);
32 | }).then(transaction => {
33 | if (!transaction) {
34 | peer.transactionSyncRequest(req.query.p0).then(_ => _).catch(_ => _);
35 | return this._getErrorStatus(req.query.p0)
36 | .then(errorStatus => res.send({
37 | api_status : errorStatus,
38 | api_message: `the transaction with id ${req.query.p0} was not found at shard ${req.query.p1}`
39 | }));
40 | }
41 |
42 | if (!!transaction.transaction_date) {
43 | transaction['transaction_date'] = Math.floor(transaction.transaction_date.getTime() / 1000);
44 | }
45 | res.send(transaction);
46 | }).catch(e => res.send({
47 | api_status : 'fail',
48 | api_message: `unexpected generic api error: (${e})`
49 | }));
50 | }
51 |
52 | _getErrorStatus(transactionID) {
53 | return walletSync.getTransactionData(transactionID)
54 | .then((data) => {
55 | if (!data) {
56 | return 'fail:not_found';
57 | }
58 | else if (data.type === 'sync') {
59 | return 'fail:not_found:pending';
60 | }
61 | else if (data.type === 'unresolved') {
62 | return 'fail:not_found:timeout';
63 | }
64 | else {
65 | throw new Error('unexpected error');
66 | }
67 | });
68 | }
69 | }
70 |
71 |
72 | export default new _wDyC195wgjPjM2Ut();
73 |
--------------------------------------------------------------------------------
/api/xGaf7vbfY15TGsSd/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 | import genesisConfig from '../../core/genesis/genesis-config';
4 | import _ from 'lodash';
5 |
6 |
7 | /**
8 | * api get_known_address_balance
9 | */
10 | class _xGaf7vbfY15TGsSd extends Endpoint {
11 | constructor() {
12 | super('xGaf7vbfY15TGsSd');
13 | }
14 |
15 | /**
16 | * returns the available (stable) balance and pending (unstable) balance of
17 | * all known addresses
18 | * @param app
19 | * @param req
20 | * @param res
21 | * @returns {*}
22 | */
23 | handler(app, req, res) {
24 | const transactionRepository = database.getRepository('transaction', genesisConfig.genesis_shard_id);
25 | transactionRepository.getAllAddressBalance(true)
26 | .then(stableBalance => {
27 | const balance = {};
28 | return transactionRepository.getAllAddressBalance(false)
29 | .then(pendingBalance => {
30 | _.each(stableBalance, item => balance[item.address] = {
31 | ...item,
32 | balance_pending: _.find(pendingBalance, {address: item.address})?.balance_pending || 0
33 | });
34 | _.each(pendingBalance, item => {
35 | if (!balance[item.address]) {
36 | balance[item.address] = {
37 | address : item.address,
38 | balance_stable : 0,
39 | balance_pending: item.balance_pending
40 | };
41 | }
42 | else {
43 | balance[item.address]['balance_pending'] = item.balance_pending;
44 | }
45 | });
46 | res.send(_.values(balance));
47 | });
48 | })
49 | .catch(e => res.send({
50 | api_status : 'fail',
51 | api_message: `unexpected generic api error: (${e})`
52 | }));
53 | }
54 | }
55 |
56 |
57 | export default new _xGaf7vbfY15TGsSd();
58 |
--------------------------------------------------------------------------------
/api/yefPsK2TvkZmC6M4/index.js:
--------------------------------------------------------------------------------
1 | import services from '../../core/services/services';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api toggle_service_node
7 | */
8 | class _yefPsK2TvkZmC6M4 extends Endpoint {
9 | constructor() {
10 | super('yefPsK2TvkZmC6M4');
11 | }
12 |
13 | /**
14 | * toggles the node service between running (true) and not running (false)
15 | * @param app
16 | * @param req (p0: is_running)
17 | * @param res
18 | * @returns {*}
19 | */
20 | handler(app, req, res) {
21 | if (!req.query.p0) {
22 | return res.status(400).send({
23 | api_status : 'fail',
24 | api_message: 'p0 is required'
25 | });
26 | }
27 | const isRun = req.query.p0 === 'true';
28 | if (isRun && !services.initialized) {
29 | services.initialize({initialize_wallet_event: true});
30 | res.send({api_status: 'success'});
31 | }
32 | else if (!isRun && services.initialized) {
33 | services.stop();
34 | res.send({api_status: 'success'});
35 | }
36 | else {
37 | res.send({
38 | api_status : 'fail',
39 | api_message: 'not updated'
40 | });
41 | }
42 | }
43 | }
44 |
45 |
46 | export default new _yefPsK2TvkZmC6M4();
47 |
--------------------------------------------------------------------------------
/api/ywTmt3C0nwk5k4c7/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 |
4 |
5 | /**
6 | * api get_keychain_address
7 | */
8 | class _ywTmt3C0nwk5k4c7 extends Endpoint {
9 | constructor() {
10 | super('ywTmt3C0nwk5k4c7');
11 | }
12 |
13 | /**
14 | * returns the record for the identified address from table keychain and
15 | * keychain_address
16 | * @param app
17 | * @param req (p0: address)
18 | * @param res
19 | * @returns {*}
20 | */
21 | handler(app, req, res) {
22 | if (!req.query.p0) {
23 | return res.status(400).send({
24 | api_status : 'fail',
25 | api_message: 'p0 is required'
26 | });
27 | }
28 | const keychainRepository = database.getRepository('keychain');
29 | keychainRepository.getAddress(req.query.p0)
30 | .then(address => {
31 | res.send(address || {
32 | api_status : 'fail',
33 | api_message: `the keychain address ${req.query.p0} was not found`
34 | });
35 | })
36 | .catch(e => res.send({
37 | api_status : 'fail',
38 | api_message: `unexpected generic api error: (${e})`
39 | }));
40 | }
41 | }
42 |
43 |
44 | export default new _ywTmt3C0nwk5k4c7();
45 |
--------------------------------------------------------------------------------
/api/yyCtgjuFu9mx0edg/index.js:
--------------------------------------------------------------------------------
1 | import Endpoint from '../endpoint';
2 | import database from '../../database/database';
3 | import transactionDataUtils from '../../core/utils/transaction-data-utils';
4 |
5 |
6 | /**
7 | * api get_transaction_output_attribute_received
8 | */
9 | class _yyCtgjuFu9mx0edg extends Endpoint {
10 | constructor() {
11 | super('yyCtgjuFu9mx0edg');
12 | this.normalizationRepository = database.getRepository('normalization');
13 | }
14 |
15 | /**
16 | *
17 | * @param app
18 | * @param req (p0: transaction_id, p1: shard_id, p2:
19 | * attribute_type_id, p3: data_type)
20 | * @param res
21 | */
22 | handler(app, req, res) {
23 | const transactionID = req.query.p0 || undefined;
24 | const shardID = req.query.p1 || undefined;
25 | const dataType = req.query.p3 || undefined;
26 | const attributeTypeId = req.query.p2 || undefined;
27 |
28 | const limit = undefined;
29 | const orderBy = undefined;
30 |
31 |
32 | database.applyShards((dbShardID) => {
33 | const transactionRepository = database.getRepository('transaction', dbShardID);
34 | if (!transactionRepository) {
35 | return Promise.resolve([]);
36 | }
37 |
38 | return transactionRepository.listTransactionOutput({
39 | 'transaction_output.transaction_id' : transactionID,
40 | 'output_position!' : -1, //discard fee output
41 | '`transaction`.shard_id' : shardID
42 | }, orderBy, limit);
43 | }, orderBy, limit, shardID)
44 | .then(outputList => {
45 | transactionDataUtils.processOutputList(outputList, attributeTypeId, orderBy, limit, shardID, dataType).then(data => {
46 | res.send(data.pop());
47 | });
48 | })
49 | .catch(e => res.send({
50 | api_status : 'fail',
51 | api_message: `unexpected generic api error: (${e})`
52 | }));
53 | }
54 | }
55 |
56 |
57 | export default new _yyCtgjuFu9mx0edg();
58 |
--------------------------------------------------------------------------------
/api/zLsiAkocn90e3K6R/index.js:
--------------------------------------------------------------------------------
1 | import database from '../../database/database';
2 | import Endpoint from '../endpoint';
3 | import _ from 'lodash';
4 |
5 |
6 | /**
7 | * api get_address_balance
8 | */
9 | class _zLsiAkocn90e3K6R extends Endpoint {
10 | constructor() {
11 | super('zLsiAkocn90e3K6R');
12 | }
13 |
14 | /**
15 | * returns the available (stable) balance and pending (unstable) balance of
16 | * an address
17 | * @param app
18 | * @param req (p0: address)
19 | * @param res
20 | * @returns {*}
21 | */
22 | handler(app, req, res) {
23 | if (!req.query.p0) {
24 | return res.status(400).send({
25 | api_status : 'fail',
26 | api_message: 'p0 is required'
27 | });
28 | }
29 |
30 | database.applyShards((shardID) => {
31 | const transactionRepository = database.getRepository('transaction', shardID);
32 | return transactionRepository.getAddressBalance(req.query.p0, true);
33 | }).then(balances => _.sum(balances)).then(stable => {
34 | return database.applyShards((shardID) => {
35 | const transactionRepository = database.getRepository('transaction', shardID);
36 | return transactionRepository.getAddressBalance(req.query.p0, false);
37 | }).then(balances => _.sum(balances)).then(unstable => res.send({
38 | stable,
39 | unstable
40 | }));
41 | }).catch(e => res.send({
42 | api_status : 'fail',
43 | api_message: `unexpected generic api error: (${e})`
44 | }));
45 | }
46 | }
47 |
48 |
49 | export default new _zLsiAkocn90e3K6R();
50 |
--------------------------------------------------------------------------------
/bootstrap.json:
--------------------------------------------------------------------------------
1 | {
2 | "MODE_TEST_NETWORK": false
3 | }
4 |
--------------------------------------------------------------------------------
/core/cache.js:
--------------------------------------------------------------------------------
1 | import _ from 'lodash';
2 | import task from './task';
3 |
4 |
5 | class Cache {
6 | constructor() {
7 | this.initialized = false;
8 | this.jobRunning = false;
9 | this.cache = {};
10 | }
11 |
12 | _purgeCache() {
13 | const now = Date.now();
14 | _.each(_.keys(this.cache), store => {
15 | _.each(_.keys(this.cache[store]), key => {
16 | if (now > this.cache[store][key].purge_time) {
17 | delete this.cache[store][key];
18 | }
19 | });
20 | });
21 | }
22 |
23 | removeCacheItem(store, key) {
24 | if (this.cache[store] && this.cache[store][key]) {
25 | delete this.cache[store][key];
26 | }
27 | }
28 |
29 | refreshCacheTime(store, key, cacheTime = 30000) {
30 | if (this.cache[store] && this.cache[store][key]) {
31 | this.cache[store][key].purge_time = Date.now() + cacheTime;
32 | }
33 | }
34 |
35 | setCacheItem(store, key, value, cacheTime = 30000) {
36 | if (!this.cache[store]) {
37 | this.cache[store] = {};
38 | }
39 |
40 | this.cache[store][key] = {
41 | value,
42 | purge_time: Date.now() + cacheTime
43 | };
44 | }
45 |
46 | getCachedIfPresent(store, key, getter, cacheTime = 30000) {
47 | const cachedItem = this.getCacheItem(store, key);
48 | return cachedItem ? Promise.resolve(cachedItem) : getter().then(value => {
49 | if(!(_.isNil(value) || _.isArray(value) && _.isEmpty(value))) {
50 | this.setCacheItem(store, key, value, cacheTime);
51 | }
52 | return value;
53 | });
54 | }
55 |
56 | getCacheItem(store, key) {
57 | if (this.cache[store] && this.cache[store][key]) {
58 | return this.cache[store][key].value;
59 | }
60 | return null;
61 | }
62 |
63 | initialize() {
64 | if (this.initialized) {
65 | if (!this.jobRunning) {
66 | task.scheduleTask('cache_purge', this._purgeCache.bind(this), 30000);
67 | }
68 | return Promise.resolve();
69 | }
70 |
71 | this.initialized = true;
72 | task.scheduleTask('cache_purge', this._purgeCache.bind(this), 30000);
73 | return Promise.resolve();
74 | }
75 |
76 | stop() {
77 | task.removeTask('cache_purge');
78 | this.jobRunning = false;
79 | }
80 | }
81 |
82 |
83 | export default new Cache();
84 |
--------------------------------------------------------------------------------
/core/config/environment.js.example:
--------------------------------------------------------------------------------
1 | const environment = {};
2 |
3 | environment['MODE_TEST_NETWORK'] = false;
4 |
5 | export default environment;
6 |
--------------------------------------------------------------------------------
/core/console.js:
--------------------------------------------------------------------------------
1 | import config from './config/config';
2 |
3 | let enabled = true;
4 | console.disable = () => enabled = false;
5 | console.enable = () => enabled = true;
6 | const _consoleLog = console.log;
7 | const filters = [];
8 |
9 | console.addFilter = function(filter) {
10 | filters.push(filter);
11 | };
12 |
13 | console.log = function() {
14 | if (!enabled || !config.MODE_DEBUG) {
15 | return;
16 | }
17 | let showLog = true;
18 | if (filters.length > 0) {
19 | const regex = new RegExp(`^\\[(${filters.join('|')})[^\\]]*\\]`, 'm');
20 | showLog = !!regex.exec(arguments[0]);
21 | }
22 | showLog && _consoleLog.apply(console, arguments);
23 | };
24 |
25 | config.DEBUG_LOG_FILTER.forEach(filter => console.addFilter(filter));
26 |
27 | export default console;
28 |
--------------------------------------------------------------------------------
/core/crypto/object-hash.js:
--------------------------------------------------------------------------------
1 | import crypto from 'crypto';
2 | import cHash from './chash';
3 | import objectUtils from '../utils/object-utils';
4 |
5 | function getCHash160(obj) {
6 | return cHash.getCHash160(objectUtils.asString(obj));
7 | }
8 |
9 | function getCHash288(obj) {
10 | return cHash.getCHash288(objectUtils.asString(obj));
11 | }
12 |
13 | function getHashBuffer(obj, fromBuffer) {
14 | if (!fromBuffer) {
15 | return crypto.createHash('sha256').update(objectUtils.asString(obj), 'utf8').digest();
16 | }
17 | else {
18 | return crypto.createHash('sha256').update(obj, 'utf8').digest();
19 | }
20 | }
21 |
22 | function getSHA1Buffer(obj, fromBuffer) {
23 | if (!fromBuffer) {
24 | return crypto.createHash('sha1').update(objectUtils.asString(obj), 'utf8').digest();
25 | }
26 | else {
27 | return crypto.createHash('sha1').update(obj, 'utf8').digest();
28 | }
29 | }
30 |
31 | export default {
32 | getCHash160,
33 | getCHash288,
34 | getHashBuffer,
35 | getSHA1Buffer
36 | };
37 |
38 |
39 |
--------------------------------------------------------------------------------
/core/crypto/signature.js:
--------------------------------------------------------------------------------
1 | import ecdsa from 'secp256k1';
2 | import base58 from 'bs58';
3 | import objectHash from './object-hash';
4 |
5 |
6 | export function signWithPrivateKeyObject(messageBuffer, exPrivKey) {
7 | const privateKey = exPrivKey.privateKey;
8 | const privKeyBuffer = privateKey.toBuffer({size: 32});
9 | const result = ecdsa.sign(objectHash.getHashBuffer(messageBuffer, true), privKeyBuffer);
10 | return base58.encode(result.signature);
11 | }
12 |
13 | export function sign(hash, privKey, format) {
14 | const result = ecdsa.sign(hash, privKey);
15 | if(format === 'hex'){
16 | return result.signature.toString('hex');
17 | } else if(format === 'buffer'){
18 | return result.signature;
19 | }
20 | return base58.encode(result.signature);
21 | }
22 |
23 |
24 | export function verifyBuffer(hash, signature, publicKey) {
25 | try {
26 | return ecdsa.verify(hash, signature, publicKey);
27 | }
28 | catch (e) {
29 | console.log('signature verification exception: ' + e.toString());
30 | return false;
31 | }
32 | }
33 |
34 | export function verify(hash, b58Signature, b58PublicKey) {
35 | return verifyBuffer(hash, base58.decode(b58Signature), base58.decode(b58PublicKey));
36 | }
37 |
38 | export default {
39 | sign,
40 | signWithPrivateKeyObject,
41 | verify,
42 | verifyBuffer
43 | };
44 |
45 |
--------------------------------------------------------------------------------
/core/event-bus.js:
--------------------------------------------------------------------------------
1 | import EventEmitter from 'events';
2 |
3 | export default new EventEmitter();
4 |
--------------------------------------------------------------------------------
/core/genesis/genesis-config.js:
--------------------------------------------------------------------------------
1 | import config from '../config/config';
2 |
3 | export const GENESIS_TRANSACTION_MAIN_NETWORK = '2VngVznbdiQ5tqfWqn2NMP8DijqCbLX79Gygo9yYRVFU6iN35h';
4 | export const GENESIS_TRANSACTION_TEST_NETWORK = 'BbYAZLcxbx6adN3KwHZSTGjE6VpeDhiJ3ZPrXs6EMAGqDPfi5';
5 | export const GENESIS_SHARD_ID_MAIN_NETWORK = 'qGuUgMMVmaCvqrvoWG6zARjkrujGMpzJmpNhBgz1y3RjBG7ZR';
6 | export const GENESIS_SHARD_ID_TEST_NETWORK = 'AyAC3kjLtjM4vktAJ5Xq6mbXKjzEqXoSsmGhhgjnkXUvjtF2M';
7 | export default {
8 | genesis_transaction: config.MODE_TEST_NETWORK ? GENESIS_TRANSACTION_TEST_NETWORK : GENESIS_TRANSACTION_MAIN_NETWORK,
9 | genesis_shard_id : config.MODE_TEST_NETWORK ? GENESIS_SHARD_ID_TEST_NETWORK : GENESIS_SHARD_ID_MAIN_NETWORK
10 | };
11 |
--------------------------------------------------------------------------------
/core/log-manager.js:
--------------------------------------------------------------------------------
1 | import task from './task';
2 | import eventBus from './event-bus';
3 | import mutex from './mutex';
4 | import ntp from './ntp';
5 | import moment from 'moment';
6 | import genesisConfig from './genesis/genesis-config';
7 |
8 |
9 | class LogManager {
10 | constructor(updateFrequency, logSize) {
11 | this.logsCache = [];
12 | this.backLogSize = 0;
13 | this.started = false;
14 | this.updateFrequency = updateFrequency;
15 | this.logSize = logSize;
16 | this.log = [];
17 | this.lastIdx = 0;
18 | this._onUpdate = null;
19 | }
20 |
21 | setOnUpdate(callback) {
22 | this._onUpdate = callback;
23 | }
24 |
25 | _update() {
26 | this._onUpdate && this._onUpdate();
27 | if (this.logSize > 0) {
28 | let excess = (this.log.length + this.logsCache.length) - this.logSize;
29 | if (excess > 0) {
30 | this.log.splice(this.log.length - excess);
31 | }
32 | this.log = [
33 | ...this.logsCache,
34 | ...this.log
35 | ];
36 | }
37 | this.logsCache = [];
38 | }
39 |
40 | getTime() {
41 | if (!ntp.initialized) {
42 | return 'undefined';
43 | }
44 |
45 | let clock = new Date();
46 | clock.setUTCMilliseconds(clock.getUTCMilliseconds() + ntp.offset);
47 | return moment.utc(clock).format('YYYY-MM-DD HH:mm:ss');
48 | }
49 |
50 | initialize() {
51 | if (this.started) {
52 | return Promise.resolve();
53 | }
54 | task.scheduleTask('update log', this._update.bind(this), this.updateFrequency);
55 | this.started = true;
56 |
57 | eventBus.on('node_event_log', data => {
58 | this.addLog(data, this.getTime());
59 | this.setBacklogSize(mutex.getKeyQueuedSize([`transaction_${genesisConfig.genesis_shard_id}`]));
60 | });
61 | eventBus.on('wallet_event_log', data => {
62 | this.addLog(data, this.getTime());
63 | this.setBacklogSize(mutex.getKeyQueuedSize([`transaction_${genesisConfig.genesis_shard_id}`]));
64 | });
65 |
66 | return Promise.resolve();
67 | }
68 |
69 | stop() {
70 | task.removeTask('update log');
71 | this.started = false;
72 | this.logsCache = [];
73 | this.backLogSize = 0;
74 | }
75 |
76 | setBacklogSize(size) {
77 | this.backLogSize = size;
78 | }
79 |
80 | addLog(data, timestamp) {
81 | if (!this.started) {
82 | return;
83 | }
84 | this.logsCache.push({
85 | ...data,
86 | idx : this.lastIdx++,
87 | content: JSON.stringify(data.content || '', null, '\t'),
88 | type : data.type.split(':')[0],
89 | timestamp
90 | });
91 | }
92 |
93 | getLog(limit) {
94 | let log;
95 | if(this.log.length <= limit) {
96 | log = this.log;
97 | } else {
98 | log = this.log.slice(Math.max(this.log.length - limit, 0))
99 | }
100 |
101 | return log;
102 | }
103 | }
104 |
105 |
106 | export default new LogManager(250, 1000);
107 |
--------------------------------------------------------------------------------
/core/ntp.js:
--------------------------------------------------------------------------------
1 | import NtpTimeSync from 'ntp-time-sync';
2 | import console from './console';
3 |
4 | let ntp = NtpTimeSync.getInstance();
5 | ntp.offset = 0;
6 | ntp.initialized = false;
7 | // request 1
8 | let initialize = () => {
9 | ntp.getTime().then(function(result) {
10 | console.log('[millix-node] current system time', new Date());
11 | console.log('[millix-node] real time', result.now);
12 | console.log('[millix-node] offset in milliseconds', result.offset);
13 | ntp.offset = result.offset;
14 | ntp.initialized = true;
15 | })
16 | .catch(() => initialize());
17 | };
18 |
19 | initialize();
20 |
21 | ntp.now = function() {
22 | let timeNow = new Date();
23 | timeNow.setUTCMilliseconds(timeNow.getUTCMilliseconds() + ntp.offset);
24 | return timeNow;
25 | };
26 |
27 | export default ntp;
28 |
--------------------------------------------------------------------------------
/core/services/services.js:
--------------------------------------------------------------------------------
1 | import wallet, {WALLET_MODE} from '../wallet/wallet';
2 | import network from '../../net/network';
3 | import server from '../../api/server';
4 | import peer from '../../net/peer';
5 | import peerRotation from '../../net/peer-rotation';
6 | import jobEngine from '../../job/job-engine';
7 | import console from '../console';
8 | import logManager from '../log-manager';
9 | import database from '../../database/database';
10 | import fileManager from '../storage/file-manager';
11 | import fileExchange from '../storage/file-exchange';
12 | import sender from '../storage/sender';
13 | import cache from '../cache';
14 |
15 |
16 | class Service {
17 | constructor() {
18 | this.mode = WALLET_MODE.CONSOLE;
19 | this.initialized = false;
20 | }
21 |
22 | initialize(options = {}) {
23 | let {
24 | mode,
25 | initialize_wallet_event: initializeWalletEvent,
26 | auto_create_wallet : createWalletIfNotExists
27 | } = options;
28 | if (this.initialized) {
29 | return Promise.resolve();
30 | }
31 | this.initialized = true;
32 | if (mode) {
33 | this.mode = mode;
34 | }
35 |
36 | if (createWalletIfNotExists === undefined) {
37 | createWalletIfNotExists = true;
38 | }
39 | return logManager.initialize()
40 | .then(() => server.initialize())
41 | .then(() => wallet.setMode(this.mode).initialize(initializeWalletEvent, createWalletIfNotExists))
42 | .then(() => cache.initialize())
43 | .then(() => network.initialize())
44 | .then(() => peer.initialize())
45 | .then(() => peerRotation.initialize())
46 | .then(() => jobEngine.initialize())
47 | .then(() => wallet._doUpdateNodeAttribute())
48 | .then(() => database.checkup())
49 | .then(() => fileManager.initialize())
50 | .then(() => fileExchange.initialize())
51 | .catch(e => {
52 | console.log(`[services] ${e.message}`);
53 | this.initialized = false;
54 | if (e.cause === 'wallet_not_found') {
55 | return;
56 | }
57 | return this.initialize(options);
58 | });
59 | }
60 |
61 | stop() {
62 | if (!this.initialized) {
63 | return;
64 | }
65 | this.initialized = false;
66 | wallet.stop();
67 | cache.stop();
68 | network.stop();
69 | peer.stop();
70 | peerRotation.stop();
71 | logManager.stop();
72 | jobEngine.stop();
73 | fileExchange.close();
74 | }
75 | }
76 |
77 |
78 | export default new Service();
79 |
--------------------------------------------------------------------------------
/core/statistics.js:
--------------------------------------------------------------------------------
1 | class Statistics {
2 |
3 | constructor() {
4 | this.messageCounter = {};
5 | this.messageCounter['last_update'] = Date.now();
6 | }
7 |
8 |
9 | newEvent(type) {
10 | if (this.messageCounter[type]) {
11 | this.messageCounter[type]++;
12 | }
13 | else {
14 | this.messageCounter[type] = 1;
15 | }
16 | if (Date.now() - this.messageCounter['last_update'] > 10000) {
17 | console.log('[statistics] ', this.messageCounter);
18 | this.messageCounter['last_update'] = Date.now();
19 | }
20 | }
21 | }
22 |
23 |
24 | export default new Statistics();
25 |
--------------------------------------------------------------------------------
/core/storage/chunk-utils.js:
--------------------------------------------------------------------------------
1 | import fs from 'fs';
2 | import {CHUNK_SIZE} from '../config/config';
3 | import fileManager from './file-manager';
4 |
5 |
6 | class ChunkUtils {
7 | constructor() {
8 | }
9 |
10 | writeFileChunk(addressKeyIdentifier, transactionDate, transactionId, fileHash, chunk, chunkNumber) {
11 | return new Promise((resolve, reject) => {
12 | let fileLocation = fileManager.createAndGetFileLocation(addressKeyIdentifier, transactionDate, transactionId, fileHash);
13 | fs.open(fileLocation, 'w', (err, fd) => {
14 | if (err) {
15 | console.log("[chunk-utils] error: ", err);
16 | return reject(err);
17 | }
18 |
19 | fs.write(fd, chunk, 0, chunk.length, chunkNumber * CHUNK_SIZE, (err) => {
20 | if (err) {
21 | console.log("[chunk-utils] error: ", err);
22 | return reject();
23 | }
24 | resolve();
25 | });
26 | });
27 | });
28 | }
29 |
30 | getChunk(addressKeyIdentifier, transactionDate, transactionId, fileHash, position) {
31 | return new Promise((resolve, reject) => {
32 | let offset = position * CHUNK_SIZE;
33 | let buffer = new Buffer.alloc(CHUNK_SIZE);
34 | let fileLocation = fileManager.getFileLocation(addressKeyIdentifier, transactionDate, transactionId, fileHash);
35 | fs.open(fileLocation, 'r', (err, fd) => {
36 | if (err) {
37 | return reject(err);
38 | }
39 | fs.read(fd, buffer, 0, CHUNK_SIZE, offset, (err, bytes) => {
40 | if (err) {
41 | return reject(err);
42 | }
43 |
44 | fs.close(fd, (err) => {
45 | if (err) {
46 | return reject(err);
47 | }
48 |
49 | resolve(buffer.slice(0, bytes));
50 | });
51 | });
52 | });
53 | });
54 | }
55 |
56 | getNumberOfChunks(addressKeyIdentifier, transactionDate, transactionId, fileHash) {
57 | return new Promise((resolve, reject) => {
58 | let fileLocation = fileManager.getFileLocation(addressKeyIdentifier, transactionDate, transactionId, fileHash);
59 | fs.stat(fileLocation, (err, stats) => {
60 | if (err) {
61 | return reject(err);
62 | }
63 | resolve(Math.ceil(stats.size / CHUNK_SIZE));
64 | });
65 | });
66 | }
67 | }
68 |
69 |
70 | export default new ChunkUtils();
71 |
--------------------------------------------------------------------------------
/core/storage/storage-acl.js:
--------------------------------------------------------------------------------
1 | import cache from '../cache';
2 | import _ from 'lodash';
3 |
4 |
5 | class StorageAcl {
6 |
7 | constructor() {
8 |
9 | }
10 |
11 | /***********************
12 | * Sender methods
13 | ***********************/
14 |
15 | addNewFileToSender(nodeId, transactionId, fileHash) {
16 | let cachedData = cache.getCacheItem('storage-acl-sender', nodeId);
17 | if (!cachedData) {
18 | cachedData = {};
19 | cache.setCacheItem('storage-acl-sender', nodeId, cachedData, 1800000); //30min cache
20 | }
21 |
22 | if (!cachedData[transactionId]) {
23 | cachedData[transactionId] = {};
24 | }
25 | cachedData[transactionId][fileHash] = true;
26 | }
27 |
28 | removeEntryFromSender(nodeId, transactionId) {
29 | const cachedData = cache.getCacheItem('storage-acl-sender', nodeId);
30 | if (cachedData) {
31 | delete cachedData[transactionId];
32 | }
33 | if (_.isEmpty(cachedData)) {
34 | cache.removeCacheItem('storage-acl-sender', nodeId);
35 | }
36 | }
37 |
38 | hasFileToSend(nodeId, transactionId, fileHash) {
39 | const cachedData = cache.getCacheItem('storage-acl-sender', nodeId);
40 | if (!cachedData) {
41 | return false;
42 | }
43 | return cachedData[transactionId] && cachedData[transactionId][fileHash];
44 | }
45 |
46 | hasTransactionRequest(nodeId, transactionId) {
47 | const cachedData = cache.getCacheItem('storage-acl-sender', nodeId);
48 | if (!cachedData) {
49 | return false;
50 | }
51 | return cachedData[transactionId];
52 | }
53 |
54 |
55 | /***********************
56 | * Receiver methods
57 | ***********************/
58 |
59 | addChunkToReceiver(nodeId, transactionId, fileHash, requestedChunk) {
60 | let cachedData = cache.getCacheItem('storage-acl-receiver', nodeId);
61 | if (!cachedData) {
62 | cachedData = {};
63 | cache.setCacheItem('storage-acl-receiver', nodeId, cachedData, 1800000); //30min cache
64 | }
65 | if (!cachedData[transactionId]) {
66 | cachedData[transactionId] = {};
67 | }
68 |
69 | if (!cachedData[transactionId][fileHash]) {
70 | cachedData[transactionId][fileHash] = {};
71 | }
72 |
73 | cachedData[transactionId][fileHash][requestedChunk] = true;
74 | }
75 |
76 | removeFileFromReceiver(nodeId, transactionId) {
77 | const cachedData = cache.getCacheItem('storage-acl-receiver', nodeId);
78 | if (cachedData) {
79 | delete cachedData[transactionId];
80 | }
81 | if (_.isEmpty(cachedData)) {
82 | cache.removeCacheItem('storage-acl-receiver', nodeId);
83 | }
84 | }
85 |
86 | removeChunkFromReceiver(nodeId, transactionId, fileHash, requestedChunk) {
87 | const cachedData = cache.getCacheItem('storage-acl-receiver', nodeId);
88 | if (cachedData && cachedData[transactionId] && cachedData[transactionId][fileHash] && cachedData[transactionId][fileHash][requestedChunk]) {
89 | delete cachedData[transactionId][fileHash][requestedChunk];
90 | }
91 | }
92 |
93 | hasChunkToReceive(nodeId, transactionId, fileHash, requestedChunk) {
94 | const cachedData = cache.getCacheItem('storage-acl-receiver', nodeId);
95 | if (!cachedData) {
96 | return false;
97 | }
98 | return cachedData[transactionId] && cachedData[transactionId][fileHash] && cachedData[transactionId][fileHash][requestedChunk];
99 | }
100 |
101 | }
102 |
103 |
104 | export default new StorageAcl();
105 |
--------------------------------------------------------------------------------
/core/task.js:
--------------------------------------------------------------------------------
1 | class Task {
2 | constructor() {
3 | this.debug = false;
4 | this.runningTask = {};
5 | }
6 |
7 | scheduleTask(taskName, task, waitTime, asyncTask, once) {
8 | let self = this;
9 | let taskID = this.runningTask[taskName];
10 |
11 | if (taskID) {
12 | clearTimeout(taskID);
13 | }
14 |
15 | this.runningTask[taskName] = setTimeout(function run() {
16 | if (!self.runningTask[taskName]) {
17 | return;
18 | }
19 |
20 | this.debug && console.log(`[task] running ${taskName}`);
21 | if (asyncTask) {
22 | task().then(() => {
23 | if (!self.runningTask[taskName]) {
24 | return;
25 | }
26 |
27 | if (!once) {
28 | self.runningTask[taskName] = setTimeout(run, waitTime);
29 | }
30 | else {
31 | delete self.runningTask[taskName];
32 | }
33 | });
34 | }
35 | else {
36 | try {
37 | task();
38 | }
39 | catch (e) {
40 | this.debug && console.log(`[task] error running task ${taskName}: ${e}`);
41 | }
42 |
43 | if (!once) {
44 | self.runningTask[taskName] = setTimeout(run, waitTime);
45 | }
46 | else {
47 | delete self.runningTask[taskName];
48 | }
49 |
50 | }
51 | }, waitTime);
52 | }
53 |
54 | removeTask(taskName) {
55 | clearTimeout(this.runningTask[taskName]);
56 | delete this.runningTask[taskName];
57 | }
58 | }
59 |
60 |
61 | export default new Task();
62 |
--------------------------------------------------------------------------------
/core/utils/object-utils.js:
--------------------------------------------------------------------------------
1 | const STRING_JOIN_CHAR = '\x00';
2 |
3 | /**
4 | * Converts the argument into a string by mapping data types to a prefixed
5 | * string and concatenating all fields together.
6 | * @param obj the value to be converted into a string
7 | * @returns {string} the string version of the value
8 | */
9 | function asString(obj) {
10 | let arrComponents = [];
11 |
12 | function extractComponents(variable) {
13 | if (variable === null) {
14 | throw Error('null value in ' + JSON.stringify(obj));
15 | }
16 | switch (typeof variable) {
17 | case 'string':
18 | arrComponents.push('s', variable);
19 | break;
20 | case 'number':
21 | arrComponents.push('n', variable.toString());
22 | break;
23 | case 'boolean':
24 | arrComponents.push('b', variable.toString());
25 | break;
26 | case 'object':
27 | if (Array.isArray(variable)) {
28 | if (variable.length === 0) {
29 | throw Error('empty array in ' + JSON.stringify(obj));
30 | }
31 | arrComponents.push('[');
32 | for (let i = 0; i < variable.length; i++) {
33 | extractComponents(variable[i]);
34 | }
35 | arrComponents.push(']');
36 | }
37 | else {
38 | const keys = Object.keys(variable).sort();
39 | if (keys.length === 0) {
40 | throw Error('empty object in ' + JSON.stringify(obj));
41 | }
42 | keys.forEach(function(key) {
43 | if (typeof variable[key] === 'undefined') {
44 | throw Error('undefined at ' + key + ' of ' + JSON.stringify(obj));
45 | }
46 | arrComponents.push(key);
47 | extractComponents(variable[key]);
48 | });
49 | }
50 | break;
51 | default:
52 | throw Error('hash: unknown type=' + (typeof variable) + ' of ' + variable + ', object: ' + JSON.stringify(obj));
53 | }
54 | }
55 |
56 | extractComponents(obj);
57 | return arrComponents.join(STRING_JOIN_CHAR);
58 | }
59 |
60 | export default {
61 | STRING_JOIN_CHAR,
62 | asString
63 | };
64 |
65 |
66 |
67 |
--------------------------------------------------------------------------------
/core/utils/utils.js:
--------------------------------------------------------------------------------
1 | export function orElsePromise(object, fn) {
2 | return object ? Promise.resolve(object) : fn();
3 | }
4 |
5 |
6 | export class NodeVersion {
7 | constructor(major, minor, patch) {
8 | this.major = major;
9 | this.minor = minor;
10 | this.patch = patch;
11 | }
12 |
13 | static fromString(version) {
14 | const re = new RegExp('(?\\d+)\\.(?\\d+)\\.(?\\d+)');
15 | let major, minor, patch;
16 | if (version) {
17 | const match = re.exec(version);
18 | if (match && match.groups &&
19 | match.groups.major && match.groups.minor && match.groups.patch) {
20 | try {
21 | major = parseInt(match.groups.major);
22 | minor = parseInt(match.groups.minor);
23 | patch = parseInt(match.groups.patch);
24 | return new NodeVersion(major, minor, patch);
25 | }
26 | catch (ignore) {
27 | }
28 | }
29 | }
30 | return null;
31 | }
32 |
33 | static ofNullable(version) {
34 | if (!version) {
35 | return new NodeVersion(0, 0, 0);
36 | }
37 | return version;
38 | }
39 |
40 | compareTo(other) {
41 | if (this.major === other.major && this.minor === other.minor && this.patch === other.patch) {
42 | return 0;
43 | }
44 |
45 | if (this.major > other.major || this.major === other.major && this.minor > other.minor || this.major === other.major && this.minor === other.minor && this.patch > other.patch) {
46 | return 1;
47 | }
48 |
49 | return -1;
50 | }
51 | }
52 |
53 |
54 | export default {
55 | orElsePromise
56 | };
57 |
--------------------------------------------------------------------------------
/database/pool/worker.mjs:
--------------------------------------------------------------------------------
1 | import {parentPort} from 'worker_threads';
2 | import sqlite3 from 'sqlite3';
3 | import path from 'path';
4 | import fs from 'fs';
5 |
6 | let database;
7 |
8 | function initializeDB(databaseRootFolder, databaseName, initializeScriptFile) {
9 | return new Promise((resolve, reject) => {
10 | if (!fs.existsSync(databaseRootFolder)) {
11 | fs.mkdirSync(path.join(databaseRootFolder));
12 | }
13 |
14 | let dbFile = path.join(databaseRootFolder, databaseName);
15 |
16 | let doInitialize = false;
17 | if (!fs.existsSync(dbFile)) {
18 | doInitialize = true;
19 | }
20 |
21 | database = new sqlite3.Database(dbFile, (err) => {
22 | if (err) {
23 | return reject(`${err.message} - ${dbFile}`);
24 | }
25 |
26 | if (doInitialize) {
27 | fs.readFile(initializeScriptFile, 'utf8', (err, data) => {
28 | if (err) {
29 | return reject(`${err.message} - ${dbFile}`);
30 | }
31 |
32 | database.exec(data, (err) => {
33 | if (err) {
34 | return reject(`${err.message} - ${dbFile}`);
35 | }
36 | database.run('PRAGMA journal_mode = WAL', () => database.run('PRAGMA synchronous = NORMAL', () => resolve()));
37 | });
38 | });
39 | } else {
40 | database.run('PRAGMA journal_mode = WAL', () => database.run('PRAGMA synchronous = NORMAL', () => resolve()));
41 | }
42 | });
43 | });
44 | }
45 |
46 | parentPort.on('message', ({
47 | type,
48 | data
49 | }) => {
50 |
51 | if (type === 'init') {
52 | initializeDB(data.database_folder, data.database_name, data.init_script_file)
53 | .then(() => parentPort.postMessage({
54 | type: 'init_response',
55 | initialized: true
56 | }))
57 | .catch(e => {
58 | throw Error(e)
59 | });
60 | } else if (type === 'close') {
61 | database.close(() => {
62 | parentPort.postMessage({type: 'close_response', closed: true});
63 | setImmediate(() => process.exit(0))
64 | })
65 | } else if (type === 'all' || type === 'get' || type === 'run') {
66 | const {
67 | sql,
68 | parameters
69 | } = data;
70 | database[type](sql, parameters, (err, data) => {
71 | parentPort.postMessage({err, data});
72 | });
73 | } else if (type === 'exec') {
74 | const {
75 | sql
76 | } = data;
77 | database.exec(sql, (err, data) => {
78 | parentPort.postMessage({err, data});
79 | });
80 | } else {
81 | throw Error('execution type not supported');
82 | }
83 | });
84 |
--------------------------------------------------------------------------------
/database/repositories/api.js:
--------------------------------------------------------------------------------
1 | import console from '../../core/console';
2 |
3 | export default class API {
4 | constructor(database) {
5 | this.database = database;
6 | }
7 |
8 | list() {
9 | return new Promise(resolve => {
10 | this.database.all('select * from api', (err, rows) => {
11 | resolve(rows);
12 | });
13 | });
14 | }
15 |
16 | addAPI(api) {
17 | return new Promise((resolve) => {
18 | this.database.run('INSERT INTO api (api_id, name, description, method, version_released, permission, status) VALUES (?,?,?,?,?,?,?)', [
19 | api.id,
20 | api.name,
21 | api.description,
22 | api.method,
23 | api.version_released,
24 | api.permission,
25 | api.enable
26 | ], (err) => {
27 | if (err) {
28 | err.message.startsWith('SQLITE_CONSTRAINT') ? console.log(`[database] apis ${api.id} already exits`) : console.error(err.message);
29 | }
30 | resolve();
31 | });
32 | });
33 | }
34 |
35 | removeAPI(apiID) {
36 | return new Promise((resolve, reject) => {
37 | let sql = 'delete from api where api_id = ?';
38 | this.database.run(sql, [apiID], (err) => {
39 | if (err) {
40 | return reject(err.message);
41 | }
42 | resolve();
43 | });
44 | });
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/database/repositories/config.js:
--------------------------------------------------------------------------------
1 | import database, {Database} from '../database';
2 |
3 | export default class Config {
4 | constructor(database) {
5 | this.database = database;
6 | }
7 |
8 | deleteAll() {
9 | return new Promise((resolve) => {
10 | this.database.get('DELETE FROM config', () => {
11 | resolve();
12 | });
13 | });
14 | }
15 |
16 | getConfig(name) {
17 | return new Promise(resolve => {
18 | this.database.get('SELECT * FROM config WHERE config_name=?', [name.toLowerCase()], (err, row) => {
19 | if (row) {
20 | row['config_name'] = row['config_name'].toUpperCase();
21 | }
22 | resolve(row);
23 | });
24 | });
25 | }
26 |
27 | addConfig(name, value, type) {
28 | let id = database.getRepository('normalization').get(name);
29 | if (!id) {
30 | id = Database.generateID(20);
31 | }
32 |
33 | return new Promise((resolve, reject) => {
34 | this.database.run('INSERT INTO config (config_id, config_name, value, type) VALUES (?,?,?,?)', [
35 | id,
36 | name.toLowerCase(),
37 | value,
38 | type
39 | ], (err, row) => {
40 | if (err) {
41 | reject(row);
42 | } else {
43 | resolve(row);
44 | }
45 | });
46 | });
47 | }
48 |
49 | updateConfig(name, value, type) {
50 | name = name.toLowerCase();
51 | return new Promise(resolve => {
52 | this.database.run('UPDATE config SET value=?' + (type !== undefined ? ', type=?' : '') + ' WHERE config_name=?', [value].concat(type !== undefined ? [
53 | type,
54 | name
55 | ] : [name]), (err, row) => {
56 | resolve(row);
57 | });
58 | });
59 | }
60 |
61 | updateConfigByID(configID, value) {
62 | let where = {config_id: configID};
63 | let set = [];
64 | return new Promise((resolve, reject) => {
65 | set['value'] = value;
66 | const {
67 | sql,
68 | parameters
69 | } = Database.buildUpdate('UPDATE config', set, where);
70 | this.database.run(sql, parameters, (err, row) => {
71 | if (err) {
72 | return reject(err.message);
73 | }
74 | resolve(row);
75 | });
76 | });
77 | }
78 |
79 | list(where, orderBy, limit) {
80 | return new Promise(resolve => {
81 | const {sql, parameters} = Database.buildQuery('SELECT * FROM config', where, orderBy, limit);
82 | this.database.all(sql, parameters, (err, rows) => {
83 | if (rows) {
84 | rows.forEach(row => {
85 | row['config_name'] = row['config_name'].toUpperCase();
86 | });
87 | }
88 | resolve(rows);
89 | });
90 | });
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/database/repositories/normalization.js:
--------------------------------------------------------------------------------
1 | export default class Normalization {
2 | constructor(database) {
3 | this.database = database;
4 | this.entries = {};
5 | this.types = {};
6 | }
7 |
8 | load() {
9 | return new Promise((resolve, reject) => {
10 | this.database.all('SELECT * FROM normalization', (err, rows) => {
11 | if (err) {
12 | return reject(err.message);
13 | }
14 |
15 | rows.forEach(row => {
16 | this.entries[row.normalization_name] = row.normalization_id;
17 | this.types[row.normalization_id] = row.normalization_name;
18 | });
19 | resolve();
20 | });
21 | });
22 | }
23 |
24 | get(name) {
25 | return this.entries[name];
26 | }
27 |
28 | getType(id) {
29 | return this.types[id];
30 | }
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/database/repositories/repositories.js:
--------------------------------------------------------------------------------
1 | import Node from './node';
2 | import Keychain from './keychain';
3 | import Config from './config';
4 | import Wallet from './wallet';
5 | import Address from './address';
6 | import Transaction from './transaction';
7 | import Schema from './schema';
8 | import Job from './job';
9 | import API from './api';
10 | import Shard from './shard';
11 | import Normalization from './normalization';
12 |
13 | export {
14 | Node, Keychain, Config, Wallet, Address,
15 | Transaction, Schema, Job, API,
16 | Shard, Normalization
17 | };
18 |
--------------------------------------------------------------------------------
/database/repositories/schema.js:
--------------------------------------------------------------------------------
1 | import {DATABASE_CONNECTION} from '../../core/config/config';
2 | import Migration from '../../scripts/migration/migration';
3 | import {Database} from '../database';
4 |
5 | export default class Schema {
6 | constructor(database) {
7 | this.database = database;
8 | this.baseMigrate = new Migration();
9 | }
10 |
11 | getVersion() {
12 | return new Promise((resolve, reject) => {
13 | this.database.get('SELECT value FROM schema_information WHERE key="version"', (err, row) => {
14 | if (err) {
15 | return reject(err);
16 | }
17 | resolve(row.value);
18 | });
19 | });
20 | }
21 |
22 | get(where) {
23 | const {sql, parameters} = Database.buildQuery('SELECT * FROM schema_information', where);
24 | return new Promise((resolve, reject) => {
25 | this.database.get(sql, parameters, (err, rows) => {
26 | if (err) {
27 | return reject(err);
28 | }
29 | resolve(rows);
30 | });
31 | });
32 | }
33 |
34 | migrate(version, migrationDir) {
35 |
36 | let migrationSQLFile = `${migrationDir}/schema-update-${version}.sql`;
37 | try {
38 | let module;
39 | if (migrationDir.endsWith('shard')) {
40 | module = require('../../scripts/migration/shard/schema-update-' + version + '.js');
41 | }
42 | else {
43 | module = require('../../scripts/migration/schema-update-' + version + '.js');
44 | }
45 | return module.default.migrate(this.database, migrationSQLFile);
46 | }
47 | catch (e) {
48 | return this.baseMigrate.runMigrateScript(this.database, migrationSQLFile);
49 | }
50 |
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/database/repositories/shard.js:
--------------------------------------------------------------------------------
1 | import {Database} from '../database';
2 | import console from '../../core/console';
3 |
4 | export default class Shard {
5 | constructor(database) {
6 | this.database = database;
7 | }
8 |
9 | listShard(where, orderBy, limit) {
10 | return new Promise((resolve, reject) => {
11 | const {sql, parameters} = Database.buildQuery('SELECT * FROM shard', where, orderBy, limit);
12 | this.database.all(
13 | sql, parameters,
14 | (err, rows) => {
15 | if (err) {
16 | console.log(err);
17 | return reject(err);
18 | }
19 |
20 | resolve(rows);
21 | }
22 | );
23 | });
24 | }
25 |
26 | addShard(shardID, shardName, shardType, schemaName, schemaPath, isRequired, nodeID, shardDate, nodeSignature) {
27 | return new Promise((resolve, reject) => {
28 | this.database.run(
29 | 'INSERT INTO shard (shard_id, shard_name, shard_type, schema_name, schema_path, is_required, node_id_origin, shard_date, node_signature) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)', [
30 | shardID,
31 | shardName,
32 | shardType,
33 | schemaName,
34 | schemaPath,
35 | isRequired,
36 | nodeID,
37 | shardDate,
38 | nodeSignature
39 | ],
40 | (err) => {
41 | if (err) {
42 | return reject(err);
43 | }
44 | resolve();
45 | }
46 | );
47 | });
48 | }
49 |
50 | getShard(where) {
51 | return new Promise((resolve, reject) => {
52 | const {sql, parameters} = Database.buildQuery('SELECT * FROM shard', where);
53 | this.database.get(
54 | sql, parameters,
55 | (err, row) => {
56 | if (err) {
57 | return reject(err);
58 | }
59 |
60 | resolve(row);
61 | }
62 | );
63 | });
64 | }
65 |
66 | updateShardRequired(shardID, isRequired) {
67 | return new Promise((resolve, reject) => {
68 | this.database.run('UPDATE shard SET is_required = ? WHERE shard_id = ?', [
69 | isRequired,
70 | shardID
71 | ], (err) => {
72 | if (err) {
73 | return reject(err);
74 | }
75 | return resolve();
76 | });
77 | });
78 | }
79 |
80 | }
81 |
--------------------------------------------------------------------------------
/database/repositories/wallet.js:
--------------------------------------------------------------------------------
1 | export default class Wallet {
2 | constructor(database) {
3 | this.database = database;
4 | }
5 |
6 | addWallet(walletID, account) {
7 | return new Promise((resolve) => {
8 | this.database.run('INSERT INTO wallet (wallet_id, account) VALUES (?,?)', [
9 | walletID,
10 | account
11 | ], () => {
12 | console.log('addWallet done ' + walletID);
13 | resolve();
14 | });
15 | });
16 | }
17 |
18 | walletExists(walletID) {
19 | return new Promise((resolve) => {
20 | this.database.get('SELECT * FROM wallet WHERE wallet_id=?', [walletID], (err, rows) => {
21 | resolve(rows !== undefined);
22 | });
23 | });
24 | }
25 |
26 | hasWallets() {
27 | return new Promise((resolve) => {
28 | this.database.get('SELECT * FROM wallet', [], (err, rows) => {
29 | resolve(rows !== undefined);
30 | });
31 | });
32 | }
33 |
34 | getWallet(walletID) {
35 | return new Promise((resolve, reject) => {
36 | this.database.get(
37 | 'SELECT * FROM wallet where wallet_id = ?', [walletID],
38 | (err, row) => {
39 | if (err) {
40 | console.log(err);
41 | return reject(err);
42 | }
43 | resolve(row);
44 | }
45 | );
46 | });
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/database/sqlite3/sqlite3-binding.js:
--------------------------------------------------------------------------------
1 | const binding = require('bindings')('node_sqlite3');
2 | module.exports = exports = binding;
3 |
--------------------------------------------------------------------------------
/docker/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:12-alpine
2 |
3 | RUN apk update && \
4 | apk add --no-cache --virtual .build-deps-full \
5 | build-base \
6 | python2 \
7 | curl \
8 | wget \
9 | gcc \
10 | git
11 | RUN git clone https://github.com/millix/millix-node.git -b develop
12 | WORKDIR /millix-node
13 | RUN npm install -g @babel/cli@7.8.4 @babel/core@7.8.4 @babel/node@7.8.4 && \
14 | npm install
15 | ENV MILLIX_NODE_PASSWORD="millixpwd"
16 | ENV MILLIX_NODE_PORT=30000
17 | ENV MILLIX_NODE_PORT_API=5500
18 | ENV MILLIX_NODE_DATA_FOLDER="./data/"
19 | COPY run_node.sh run_node.sh
20 | RUN chmod +x run_node.sh
21 | EXPOSE $MILLIX_NODE_PORT
22 | EXPOSE $MILLIX_NODE_PORT_API
23 | ENTRYPOINT [ "/bin/sh" ]
24 | CMD [ "run_node.sh" ]
25 |
26 |
27 |
--------------------------------------------------------------------------------
/docker/build.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | docker build -t millix/millix-node . && docker run --name millix-node millix/millix-node
3 |
--------------------------------------------------------------------------------
/docker/compose_down.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | COMPOSE_PROJECT_NAME=millix MILLIX_NODE_PORT=30000 MILLIX_NODE_PORT_API=5500 docker-compose down
3 |
--------------------------------------------------------------------------------
/docker/compose_up.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | NODE_SCALE="${1:-1}"
3 | for i in $(seq 1 $NODE_SCALE); do \
4 | COMPOSE_PROJECT_NAME=millix MILLIX_NODE_PORT=$((30000 + $i - 1)) \
5 | MILLIX_NODE_PORT_API=$((5500 + $i - 1)) docker-compose up --no-recreate --scale millix-node=$i -d; \
6 | done
7 |
--------------------------------------------------------------------------------
/docker/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 | millix-node:
5 | image: millix/millix-node
6 | build: .
7 | environment:
8 | - MILLIX_NODE_PASSWORD=millixpwd
9 | - MILLIX_NODE_DATA_FOLDER=./data/
10 | - MILLIX_NODE_PORT=${MILLIX_NODE_PORT}
11 | - MILLIX_NODE_PORT_API=${MILLIX_NODE_PORT_API}
12 | ports:
13 | - ${MILLIX_NODE_PORT}
14 | - ${MILLIX_NODE_PORT_API}
15 |
--------------------------------------------------------------------------------
/docker/run_node.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | echo "updating source code..."
3 | git pull
4 | echo "running on localhost:$MILLIX_NODE_PORT"
5 | echo $MILLIX_NODE_PASSWORD|babel-node --inspect=0.0.0.0:30009 --max-old-space-size=8192 index.js --port $MILLIX_NODE_PORT --api-port $MILLIX_NODE_PORT_API --debug --folder $MILLIX_NODE_DATA_FOLDER
6 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "millix-core",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "index.js",
6 | "scripts": {
7 | "start": "babel-node index.js",
8 | "debug": "babel-node debug index.js",
9 | "dist": "BABEL_ENV=production webpack --config webpack.prod.config.js",
10 | "test": "echo \"Error: no test specified\" && exit 1"
11 | },
12 | "author": "developer@millix.org",
13 | "license": "MIT",
14 | "devDependencies": {
15 | "@babel/cli": "^7.19.3",
16 | "@babel/compat-data": "^7.18.6",
17 | "@babel/core": "^7.19.6",
18 | "@babel/node": "^7.19.1",
19 | "@babel/plugin-proposal-class-properties": "^7.18.6",
20 | "@babel/plugin-transform-runtime": "^7.18.6",
21 | "@babel/preset-env": "^7.18.6",
22 | "@babel/preset-react": "^7.18.6",
23 | "@babel/runtime": "^7.18.6",
24 | "babel-loader": "^8.2.5",
25 | "babel-plugin-transform-import-meta": "^2.2.0",
26 | "better-queue": "^3.8.10",
27 | "copy-webpack-plugin": "^9.1.0",
28 | "node-uuid": "^1.4.8",
29 | "webpack": "^5.73.0",
30 | "webpack-cli": "^4.10.0"
31 | },
32 | "dependencies": {
33 | "@babel/register": "7.8.3",
34 | "async": "^2.6.2",
35 | "bindings": "^1.5.0",
36 | "bitcore-lib": "^8.1.1",
37 | "bitcore-mnemonic": "^8.1.1",
38 | "bittorrent-dht": "^10.0.0",
39 | "body-parser": "^1.19.0",
40 | "bs58": "^4.0.1",
41 | "bufferutil": "^4.0.1",
42 | "busboy": "^1.6.0",
43 | "cors": "^2.8.5",
44 | "crypto-random-string": "^3.0.1",
45 | "eciesjs": "^0.3.14",
46 | "express": "^4.17.1",
47 | "helmet": "^3.18.0",
48 | "https": "^1.0.0",
49 | "jsonwebtoken": "^8.5.1",
50 | "jsrsasign": "^8.0.15",
51 | "jwks-rsa": "^1.5.1",
52 | "lodash": "^4.17.11",
53 | "moment": "^2.24.0",
54 | "nat-api": "^0.3.1",
55 | "ntp-time-sync": "^0.1.0",
56 | "public-ip": "^4.0.1",
57 | "request": "^2.88.0",
58 | "secp256k1": "^3.6.2",
59 | "socks": "^2.3.2",
60 | "sqlite3": "^5.1.2",
61 | "thirty-two": "^1.0.1",
62 | "utf-8-validate": "^5.0.2",
63 | "winston": "^3.7.2",
64 | "ws": "^6.2.1",
65 | "yargs": "^13.2.2"
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/run-millix-node.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | echo "running millix node"
3 | npx babel-node --max-old-space-size=2048 index.js
4 |
--------------------------------------------------------------------------------
/scripts/migration/migration.js:
--------------------------------------------------------------------------------
1 | import fs from 'fs';
2 | import console from '../../core/console';
3 |
4 | export default class Migration {
5 | constructor() {
6 | }
7 |
8 | runMigrateScript(db, migrationFile, parameters = {}, checkDatabase = false) {
9 | return new Promise((resolve, reject) => {
10 | fs.readFile(migrationFile, 'utf8', (err, data) => {
11 | if (err) {
12 | return reject(err);
13 | }
14 | data = data.replace(/\?\w+/g, (m) => {
15 | let key = m.substring(1);
16 | return parameters.hasOwnProperty(key) ? parameters[key] : '';
17 | });
18 |
19 | db.exec(data, function(err) {
20 | if (err) {
21 | return reject(err);
22 | }
23 |
24 | if (!checkDatabase) {
25 | return resolve();
26 | }
27 |
28 | db.serialize(() => {
29 | db.run('VACUUM', err => {
30 | if (err) {
31 | console.log('[database] vacuum error', err);
32 | }
33 | else {
34 | console.log('[database] vacuum success');
35 | }
36 | });
37 | db.run('PRAGMA wal_checkpoint(TRUNCATE)', err => {
38 | if (err) {
39 | console.log('[database] wal_checkpoint error', err);
40 | }
41 | else {
42 | console.log('[database] wal_checkpoint success');
43 | }
44 | });
45 | db.run('PRAGMA optimize', err => {
46 | if (err) {
47 | console.log('[database] optimize error', err);
48 | }
49 | else {
50 | console.log('[database] optimize success');
51 | }
52 | resolve();
53 | });
54 | });
55 | });
56 | });
57 | });
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-1.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE schema_information
2 | (
3 | key TEXT NOT NULL UNIQUE,
4 | value TEXT NOT NULL,
5 | status TINYINT NOT NULL DEFAULT 1 CHECK (length(status) <= 3 AND TYPEOF(status) = 'integer'),
6 | create_date INT NOT NULL DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) CHECK(length(create_date) <= 10 AND TYPEOF(create_date) = 'integer')
7 | );
8 | CREATE INDEX idx_schema_information_create_date ON schema_information (create_date);
9 |
10 | CREATE TABLE address_version
11 | (
12 | version CHAR(4) NOT NULL UNIQUE CHECK (length(version) <= 4),
13 | is_main_network TINYINT NOT NULL DEFAULT 1 CHECK (is_main_network = 0 OR is_main_network = 1),
14 | regex_pattern TEXT NOT NULL,
15 | is_default TINYINT NOT NULL DEFAULT 0 CHECK (is_default = 0 OR is_default = 1),
16 | status TINYINT NOT NULL DEFAULT 1 CHECK (length(status) <= 3 AND TYPEOF(status) = 'integer'),
17 | create_date INT NOT NULL DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) CHECK(length(create_date) <= 10 AND TYPEOF(create_date) = 'integer')
18 | );
19 | CREATE INDEX idx_address_version_create_date ON address_version (create_date);
20 |
21 | INSERT INTO schema_information (key, value)
22 | VALUES ("version", "1");
23 |
24 | INSERT INTO address_version(version, is_main_network, is_default, regex_pattern)
25 | VALUES ("0a0", 1, 1, "(?.*)(?0a0)(?.*)"),
26 | ("0b0", 1, 0, "(?.*)(?0b0)(?.*)"),
27 | ("lal", 0, 1, "(?.*)(?lal)(?.*)"),
28 | ("la0l", 0, 1, "(?.*)(?la0l)(?.*)"),
29 | ("lb0l", 0, 0, "(?.*)(?lb0l)(?.*)");
30 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-10.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | DELETE FROM node_attribute;
6 |
7 | DROP TABLE IF EXISTS node;
8 |
9 | CREATE TABLE node
10 | (
11 | node_id CHAR(34) NOT NULL PRIMARY KEY CHECK (length(node_id) <= 34),
12 | node_prefix CHAR(10) NOT NULL CHECK (length(node_prefix) <= 10),
13 | node_address CHAR(45) NOT NULL CHECK (length(node_address) <= 45),
14 | node_port INT NOT NULL CHECK (length(node_port) <= 10 AND TYPEOF(node_port) = 'integer'),
15 | node_port_api INT NOT NULL CHECK (length(node_port_api) <= 10 AND TYPEOF(node_port_api) = 'integer'),
16 | status TINYINT NOT NULL DEFAULT 1 CHECK (length(status) <= 3 AND TYPEOF(status) = 'integer'),
17 | update_date INT NOT NULL DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) CHECK(length(update_date) <= 10 AND TYPEOF(update_date) = 'integer'),
18 | create_date INT NOT NULL DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) CHECK(length(create_date) <= 10 AND TYPEOF(create_date) = 'integer')
19 | );
20 | CREATE INDEX idx_node_create_date ON node (create_date);
21 |
22 | INSERT OR IGNORE INTO normalization (normalization_name, normalization_id)
23 | VALUES ('node_key_public', 'GKj5UNJmpx5qCGQnaJjA'),
24 | ('node_bind_ip', 'Apw9ovpclfW6LvSVYqYD');
25 |
26 | UPDATE config SET config_id = (SELECT normalization_id FROM normalization WHERE normalization_name = config_name) WHERE config_name IN (SELECT normalization_name FROM normalization);
27 | UPDATE node_attribute SET attribute_type_id = coalesce((SELECT normalization_id FROM normalization AS N WHERE normalization_name = (SELECT attribute_type FROM node_attribute_type WHERE attribute_type_id = node_attribute.attribute_type_id)), attribute_type_id);
28 | UPDATE node_attribute_type SET attribute_type_id = (SELECT normalization_id FROM normalization WHERE normalization_name = attribute_type) WHERE attribute_type IN (SELECT normalization_name FROM normalization);
29 | UPDATE address_attribute SET address_attribute_type_id = coalesce((SELECT normalization_id FROM normalization WHERE normalization_name = (SELECT attribute_type FROM address_attribute_type WHERE address_attribute_type_id = address_attribute.address_attribute_type_id)), address_attribute_type_id);
30 | UPDATE address_attribute_type SET address_attribute_type_id = (SELECT normalization_id FROM normalization WHERE normalization_name = attribute_type) WHERE attribute_type IN (SELECT normalization_name FROM normalization);
31 |
32 | UPDATE schema_information SET value = "10" WHERE key = "version";
33 |
34 | COMMIT;
35 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-11.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | INSERT OR IGNORE INTO normalization (normalization_name, normalization_id)
6 | VALUES ('address_default', 'T4CefCfUyoc4CWv7cZ5V'),
7 | ('node_about', 'ijDj2VlTyJBl5R4iTCmG'),
8 | ('peer_connection', '8FPirjQYaFIEIF2y7OEA');
9 |
10 | DELETE FROM node_attribute;
11 | DELETE FROM node_attribute_type;
12 |
13 | CREATE TABLE new_transaction_input
14 | (
15 | transaction_id CHAR(50) NOT NULL CHECK (length(transaction_id) <= 50),
16 | shard_id CHAR(50) NOT NULL CHECK (length(shard_id) <= 50),
17 | input_position TINYINT NOT NULL CHECK (length(input_position) <= 3 AND TYPEOF(input_position) = 'integer'),
18 | output_transaction_id CHAR(50) NULL CHECK (length(output_transaction_id) <= 50),
19 | output_position TINYINT NULL CHECK(length(output_position) <= 3 AND TYPEOF(output_position) IN ('integer', 'null')),
20 | output_shard_id CHAR(50) NULL CHECK (length(output_shard_id) <= 50),
21 | output_transaction_date INT NULL CHECK(length(output_transaction_date) <= 10 AND TYPEOF(output_transaction_date) IN ('integer', 'null')),
22 | double_spend_date INT NULL CHECK(length(double_spend_date) <= 10 AND TYPEOF(double_spend_date) IN ('integer', 'null')),
23 | is_double_spend TINYINT NULL DEFAULT 0 CHECK (is_double_spend = 0 OR is_double_spend = 1 OR is_double_spend IS NULL),
24 | address CHAR(72) NULL CHECK (length(address) <= 72),
25 | address_key_identifier CHAR(34) NULL CHECK (length(address_key_identifier) <= 34),
26 | status TINYINT NOT NULL DEFAULT 1 CHECK (length(status) <= 3 AND TYPEOF(status) = 'integer'),
27 | create_date INT NOT NULL DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) CHECK(length(create_date) <= 10 AND TYPEOF(create_date) = 'integer'),
28 | PRIMARY KEY (transaction_id, input_position),
29 | FOREIGN KEY (transaction_id) REFERENCES `transaction` (transaction_id),
30 | FOREIGN KEY (address, address_key_identifier) REFERENCES address (address, address_key_identifier)
31 | );
32 | INSERT INTO new_transaction_input SELECT * FROM transaction_input;
33 | DROP TABLE transaction_input;
34 | ALTER TABLE new_transaction_input RENAME TO transaction_input;
35 |
36 | CREATE INDEX idx_transaction_input_address_key_identifier ON transaction_input (address_key_identifier);
37 | CREATE INDEX idx_transaction_input_address_is_double_spend ON transaction_input (address, is_double_spend);
38 | CREATE INDEX idx_transaction_input_transaction_id ON transaction_input (transaction_id);
39 | CREATE INDEX idx_transaction_input_output_transaction_id_output_position ON transaction_input (output_transaction_id, output_position);
40 | CREATE INDEX idx_transaction_input_create_date ON transaction_input (create_date);
41 |
42 | UPDATE schema_information SET value = "11" WHERE key = "version";
43 |
44 | COMMIT;
45 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-13.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | INSERT OR IGNORE INTO normalization (normalization_name, normalization_id)
6 | VALUES ('node_public_key', 'KkwWuh5VaHBYlk8lsduD'),
7 | ('peer_count', 'OfhGqiGJID8WTOZHzl2b'),
8 | ('shard_protocol', 'kbkMAkuyqOlSNKv7udFz'),
9 | ('transaction_count', 'qhTfPzLhZENklxNbTQYW');
10 |
11 | UPDATE node_attribute SET attribute_type_id = coalesce((SELECT normalization_id FROM normalization WHERE normalization_name = (SELECT attribute_type FROM node_attribute_type WHERE attribute_type_id = node_attribute.attribute_type_id)), attribute_type_id);
12 | UPDATE node_attribute_type SET attribute_type_id = (SELECT normalization_id FROM normalization WHERE normalization_name = attribute_type) WHERE attribute_type IN (SELECT normalization_name FROM normalization);
13 |
14 | UPDATE schema_information SET value = "13" WHERE key = "version";
15 |
16 | COMMIT;
17 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-14.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | INSERT OR REPLACE INTO normalization (normalization_name, normalization_id)
6 | VALUES ('transaction_fee_proxy', 'qTCYsDQzIoVbaX8iIjry'),
7 | ('transaction_fee_network', '9hJcCunmEibhDgoLHzC8'),
8 | ('transaction_fee_default', 'eoSDGGFKD3dYfcKF1nFO');
9 |
10 | UPDATE config SET config_name='transaction_fee_proxy', config_id='qTCYsDQzIoVbaX8iIjry', value='1000' WHERE config_name='transaction_fee_proxy';
11 | UPDATE config SET config_name='transaction_fee_network', config_id='9hJcCunmEibhDgoLHzC8', value='0' WHERE config_name='transaction_fee_network';
12 | UPDATE config SET config_name='transaction_fee_default', config_id='eoSDGGFKD3dYfcKF1nFO', value='1000' WHERE config_name='transaction_fee_default';
13 |
14 | UPDATE schema_information SET value = "14" WHERE key = "version";
15 |
16 | COMMIT;
17 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-15.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | DROP INDEX IF EXISTS idx_transaction_input_status_output_transaction_id;
6 | DROP INDEX IF EXISTS idx_transaction_output_address_key_identifier_is_stable_is_spent_status;
7 | DROP INDEX IF EXISTS idx_transaction_output_address_key_identifier_spent_double_spend_status;
8 | CREATE INDEX idx_transaction_input_status_output_transaction_id ON transaction_input (status, output_transaction_id);
9 | CREATE INDEX idx_transaction_output_address_key_identifier_is_stable_is_spent_status ON transaction_output (address_key_identifier, is_stable, is_spent, status);
10 | CREATE INDEX idx_transaction_output_address_key_identifier_spent_double_spend_status ON transaction_output (address_key_identifier, is_spent, is_double_spend, status);
11 |
12 | UPDATE schema_information SET value = "15" WHERE key = "version";
13 |
14 | COMMIT;
15 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-16.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | DROP TABLE IF EXISTS audit_verification;
6 | DROP TABLE IF EXISTS audit_point;
7 |
8 | DELETE FROM normalization WHERE normalization_name IN ('audit_point_node_count', 'audit_point_validation_required',
9 | 'audit_point_attempt_max', 'audit_point_candidate_max',
10 | 'audit_point_validation_wait_time_max', 'audit_point_prune_age_min' ,
11 | 'audit_point_prune_count', 'audit_point_transaction_prune_age_min',
12 | 'audit_point_transaction_prune_count', 'wallet_spent_transaction_prune');
13 | DELETE FROM api WHERE api_id IN ('DBkGHZX6rugdLon9', 'VrLU9f2XYiMWfop4');
14 |
15 | UPDATE schema_information SET value = "16" WHERE key = "version";
16 |
17 | COMMIT;
18 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-17.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | CREATE INDEX IF NOT EXISTS idx_transaction_output_transaction_id_address_key_identifier ON transaction_output (transaction_id, address_key_identifier);
6 |
7 | UPDATE schema_information SET value = "17" WHERE key = "version";
8 |
9 | COMMIT;
10 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-18.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "18" WHERE key = "version";
6 |
7 | DELETE FROM config;
8 |
9 | COMMIT;
10 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-19.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | INSERT OR REPLACE INTO normalization (normalization_name, normalization_id)
6 | VALUES ('transaction_output_metadata', 'Adl87cz8kC190Nqc');
7 |
8 | UPDATE schema_information SET value = "19" WHERE key = "version";
9 |
10 | COMMIT;
11 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-2.sql:
--------------------------------------------------------------------------------
1 | UPDATE schema_information SET value = "2" WHERE key = "version";
2 |
3 | CREATE TABLE api
4 | (
5 | api_id CHAR(16) NOT NULL UNIQUE CHECK (length(api_id) <= 16),
6 | name CHAR(255) NOT NULL CHECK (length(name) <= 255),
7 | description CHAR(255) NOT NULL CHECK (length(description) <= 255),
8 | method CHAR(10) NOT NULL CHECK (length(method) <= 10),
9 | version_released CHAR(10) NOT NULL CHECK (length(version_released) <= 10),
10 | version_deprecated CHAR(10) NULL CHECK (length(version_deprecated) <= 10),
11 | version_removed CHAR(10) NULL CHECK (length(version_removed) <= 10),
12 | permission TEXT NOT NULL DEFAULT "true",
13 | status TINYINT NOT NULL DEFAULT 1 CHECK (length(status) <= 3 AND TYPEOF(status) = 'integer'),
14 | create_date INT NOT NULL DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) CHECK (length(create_date) <= 10 AND TYPEOF(create_date) = 'integer')
15 | );
16 | CREATE INDEX idx_api_create_date ON api (create_date);
17 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-20.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | INSERT OR REPLACE INTO address_version ('version', 'is_main_network', 'regex_pattern', 'is_default')
6 | VALUES ('0c0', 1, '(?.*)(?0c0)(?.*)', 0);
7 |
8 | INSERT OR REPLACE INTO address_version ('version', 'is_main_network', 'regex_pattern', 'is_default')
9 | VALUES ('lcl', 0, '(?.*)(?lcl)(?.*)', 0);
10 |
11 | UPDATE schema_information SET value = "20" WHERE key = "version";
12 |
13 | COMMIT;
14 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-21.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | CREATE INDEX IF NOT EXISTS idx_transaction_output_address_key_identifier_create_date ON transaction_output (address_key_identifier, create_date);
6 | UPDATE schema_information SET value = "21" WHERE key = "version";
7 |
8 | COMMIT;
9 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-22.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | INSERT OR IGNORE INTO normalization (normalization_name, normalization_id)
6 | VALUES ('wallet_aggregation_auto_enabled', 'n2aXBpCWhSVHx8kl8lwj'),
7 | ('wallet_aggregation_auto_output_min', 'Q1Ok1vhMqDsKNrADxbhh');
8 |
9 | UPDATE schema_information SET value = "22" WHERE key = "version";
10 |
11 | COMMIT;
12 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-23.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | INSERT OR REPLACE INTO address_version ('version', 'is_main_network', 'regex_pattern', 'is_default')
6 | VALUES ('0d0', 1, '(?.*)(?0d0)(?.*)', 0);
7 |
8 | INSERT OR REPLACE INTO address_version ('version', 'is_main_network', 'regex_pattern', 'is_default')
9 | VALUES ('ldl', 0, '(?.*)(?ldl)(?.*)', 0);
10 |
11 | UPDATE schema_information SET value = "23" WHERE key = "version";
12 |
13 | COMMIT;
14 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-3.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys=off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "3" WHERE key = "version";
6 |
7 | ALTER TABLE node RENAME TO _node_old;
8 |
9 | CREATE TABLE node
10 | (
11 | node_id CHAR(34) NULL CHECK (length(node_id) <= 34),
12 | node_prefix CHAR(10) NOT NULL CHECK (length(node_prefix) <= 10),
13 | node_ip_address CHAR(45) NOT NULL CHECK (length(node_ip_address) <= 45),
14 | node_port INT NOT NULL CHECK (length(node_port) <= 10 AND TYPEOF(node_port) = 'integer'),
15 | node_port_api INT NOT NULL CHECK (length(node_port_api) <= 10 AND TYPEOF(node_port_api) = 'integer'),
16 | status TINYINT NOT NULL DEFAULT 1 CHECK (length(status) <= 3 AND TYPEOF(status) = 'integer'),
17 | update_date INT NOT NULL DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) CHECK(length(update_date) <= 10 AND TYPEOF(update_date) = 'integer'),
18 | create_date INT NOT NULL DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) CHECK(length(create_date) <= 10 AND TYPEOF(create_date) = 'integer'),
19 | PRIMARY KEY (node_prefix, node_ip_address, node_port)
20 | );
21 | CREATE INDEX idx_node_create_date ON node (create_date);
22 |
23 | INSERT INTO node (node_id, node_prefix, node_ip_address, node_port, node_port_api, status, update_date, create_date)
24 | SELECT node_id, "wss://", node_ip_address, node_port, 0, status, update_date, create_date
25 | FROM _node_old;
26 |
27 | DROP TABLE _node_old;
28 |
29 | ALTER TABLE node_attribute RENAME TO _node_attribute_old;
30 |
31 | CREATE TABLE node_attribute
32 | (
33 | node_id CHAR(34) NOT NULL CHECK (length(node_id) <= 34),
34 | attribute_type_id CHAR(20) NOT NULL CHECK (length(attribute_type_id) <= 20),
35 | value TEXT NOT NULL,
36 | status TINYINT NOT NULL DEFAULT 1 CHECK (length(status) <= 3 AND TYPEOF(status) = 'integer'),
37 | create_date INT NOT NULL DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) CHECK(length(create_date) <= 10 AND TYPEOF(create_date) = 'integer'),
38 | PRIMARY KEY (node_id, attribute_type_id),
39 | FOREIGN KEY (node_id) REFERENCES node (node_id),
40 | FOREIGN KEY (attribute_type_id) REFERENCES node_attribute_type (attribute_type_id)
41 | );
42 | CREATE INDEX idx_node_attribute_create_date ON node_attribute (create_date);
43 |
44 | INSERT INTO node_attribute (node_id, attribute_type_id, value, status, create_date)
45 | SELECT node_id, attribute_type_id, value, status, create_date
46 | FROM _node_attribute_old;
47 |
48 | DROP TABLE _node_attribute_old;
49 |
50 | DELETE FROM api;
51 | DELETE FROM config where config_name = 'node_initial_list';
52 | UPDATE config SET value = 'wss://' where config_name = 'websocket_protocol';
53 | COMMIT;
54 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-4.js:
--------------------------------------------------------------------------------
1 | import fs from 'fs';
2 | import path from 'path';
3 | import os from 'os';
4 | import config from '../../core/config/config';
5 | import Migration from './migration';
6 | import genesisConfig from '../../core/genesis/genesis-config';
7 |
8 | export default new (class Migrate extends Migration {
9 |
10 | migrate(db, migrationFile) {
11 | return new Promise((resolve, reject) => {
12 | let databaseFile = path.join(config.DATABASE_CONNECTION.FOLDER, config.DATABASE_CONNECTION.FILENAME_MILLIX);
13 | let shardFolder = path.join(config.DATABASE_CONNECTION.FOLDER, 'shard/');
14 |
15 | if (!fs.existsSync(shardFolder)) {
16 | fs.mkdirSync(shardFolder);
17 | }
18 |
19 | fs.copyFile(databaseFile, shardFolder + genesisConfig.genesis_shard_id + '.sqlite', (err) => {
20 | if (err) {
21 | throw err;
22 | }
23 | this.runMigrateScript(db, migrationFile, {
24 | shard_id : genesisConfig.genesis_shard_id,
25 | shard_name : 'genesis',
26 | shard_type : 'protocol',
27 | schema_name : genesisConfig.genesis_shard_id + '.sqlite',
28 | schema_path : shardFolder,
29 | node_id_origin: 'mzPPDwP9BJvHXyvdoBSJJsCQViRTtPbcqA',
30 | shard_date : 1579648257,
31 | node_signature: '66n8CxBweCDRZWdvrg9caX7ckCh3Bgz5eDsJQtKYDbgVSAnRZMHCp41dnD4P1gvc6fjocFRhxDDWwtNh8JtpDpbE'
32 | }).then(() => resolve())
33 | .catch((e) => reject(e));
34 | });
35 | });
36 | }
37 | });
38 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-4.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "4" WHERE key = "version";
6 |
7 | CREATE TABLE shard
8 | (
9 | shard_id CHAR(50) NOT NULL PRIMARY KEY CHECK (length(shard_id) <= 50),
10 | shard_name CHAR(255) NOT NULL CHECK (length(shard_name) <= 255),
11 | shard_type CHAR(255) NOT NULL CHECK (length(shard_type) <= 255),
12 | schema_name CHAR(255) NOT NULL CHECK (length(schema_name) <= 255),
13 | schema_path CHAR(1024) NOT NULL CHECK (length(schema_path) <= 1024),
14 | is_required TINYINT NOT NULL DEFAULT 1 CHECK (is_required = 0 OR is_required = 1),
15 | record_count INT NOT NULL DEFAULT 0 CHECK (length(record_count) <= 3 AND TYPEOF(record_count) = 'integer'),
16 | disk_size INT NOT NULL DEFAULT 0 CHECK (length(disk_size) <= 3 AND TYPEOF(disk_size) = 'integer'),
17 | node_id_origin CHAR(34) NOT NULL CHECK (length(node_id_origin) <= 34),
18 | shard_date INT NOT NULL CHECK(length(shard_date) <= 10 AND TYPEOF(shard_date) = 'integer'),
19 | node_signature CHAR(88) NOT NULL CHECK (length(node_signature) <= 88),
20 | update_date INT NOT NULL DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) CHECK(length(update_date) <= 10 AND TYPEOF(update_date) = 'integer'),
21 | status TINYINT NOT NULL DEFAULT 1 CHECK (length(status) <= 3 AND TYPEOF(status) = 'integer'),
22 | create_date INT NOT NULL DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) CHECK(length(create_date) <= 10 AND TYPEOF(create_date) = 'integer')
23 | );
24 | CREATE INDEX idx_shard_create_date ON shard (create_date);
25 |
26 | INSERT INTO shard (shard_id, shard_name, shard_type, schema_name, schema_path, node_id_origin, shard_date, node_signature) VALUES ("?shard_id", "?shard_name", "?shard_type", "?schema_name", "?schema_path", "?node_id_origin", ?shard_date, "?node_signature");
27 |
28 | CREATE TABLE shard_attribute_type
29 | (
30 | attribute_type_id CHAR(20) NOT NULL PRIMARY KEY CHECK (length(attribute_type_id) <= 20),
31 | attribute_type CHAR(255) NOT NULL CHECK (length(attribute_type) <= 255),
32 | status TINYINT NOT NULL DEFAULT 1 CHECK (length(status) <= 3 AND TYPEOF(status) = 'integer'),
33 | create_date INT NOT NULL DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) CHECK(length(create_date) <= 10 AND TYPEOF(create_date) = 'integer')
34 | );
35 | CREATE INDEX idx_shard_attribute_type_create_date ON shard_attribute_type (create_date);
36 |
37 | CREATE TABLE shard_attribute
38 | (
39 | shard_id CHAR(50) NOT NULL CHECK (length(shard_id) <= 50),
40 | attribute_type_id CHAR(20) NOT NULL CHECK (length(attribute_type_id) <= 20),
41 | value TEXT NOT NULL,
42 | status SMALLINT NOT NULL DEFAULT 1 CHECK (length(status) <= 3 AND TYPEOF(status) = 'integer'),
43 | create_date INT NOT NULL DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) CHECK(length(create_date) <= 10 AND TYPEOF(create_date) = 'integer'),
44 | PRIMARY KEY (shard_id, attribute_type_id),
45 | FOREIGN KEY (shard_id) REFERENCES shard (shard_id),
46 | FOREIGN KEY (attribute_type_id) REFERENCES shard_attribute_type (attribute_type_id)
47 | );
48 | CREATE INDEX idx_shard_attribute_create_date ON shard_attribute (create_date);
49 |
50 | DROP TABLE IF EXISTS `transaction`;
51 | DROP TABLE IF EXISTS transaction_parent;
52 | DROP TABLE IF EXISTS transaction_signature;
53 | DROP TABLE IF EXISTS transaction_input;
54 | DROP TABLE IF EXISTS transaction_output;
55 | DROP TABLE IF EXISTS transaction_output_attribute;
56 | DROP TABLE IF EXISTS audit_verification;
57 | DROP TABLE IF EXISTS audit_point;
58 |
59 | COMMIT;
60 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-7.js:
--------------------------------------------------------------------------------
1 | import Migration from './migration';
2 | import walletUtils from '../../core/wallet/wallet-utils';
3 | import base58 from 'bs58';
4 |
5 | export default new (class Migrate extends Migration {
6 |
7 | migrate(db, migrationFile) {
8 | return new Promise((resolve, reject) => {
9 | walletUtils.loadNodeKeyAndCertificate()
10 | .then(({node_public_key: publicKey}) => walletUtils.getNodeIdFromPublicKey(base58.encode(publicKey.toBuffer())))
11 | .then((nodeID) => this.runMigrateScript(db, migrationFile, {node_id: nodeID}))
12 | .then(() => resolve())
13 | .catch((e) => reject(e));
14 |
15 | });
16 | }
17 | });
18 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-7.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "7" WHERE key = "version";
6 | INSERT INTO node_attribute_type (attribute_type_id, attribute_type) VALUES ('useBqrZ9F8Gv6aVH85pB', 'peer_rotation_settings');
7 | INSERT INTO node_attribute (node_id, attribute_type_id, value) VALUES ('?node_id', 'useBqrZ9F8Gv6aVH85pB', '{"PROACTIVE": { "frequency": 0.7, "DATA_QUANTITY": { "frequency": 0.25, "random_set_length": "PEER_ROTATION_MORE_THAN_AVERAGE" }, "POPULARITY": { "frequency": 0.25, "random_set_length": "PEER_ROTATION_MORE_THAN_AVERAGE" }, "RANDOM": { "frequency": 0.5 } }, "REACTIVE": { "frequency": 0.3 } }');
8 | COMMIT;
9 |
--------------------------------------------------------------------------------
/scripts/migration/schema-update-8.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "8" WHERE key = "version";
6 |
7 | INSERT OR IGNORE INTO normalization (normalization_name, normalization_id)
8 | VALUES ('peer_rotation_more_than_average', 'Z2z0wVCm6Ai1p7OG4MfN'),
9 | ('peer_rotation_more_than_most', 'hVEmlU6bL4l3DNeOhdM3'),
10 | ('peer_rotation_more_than_all', 'wpwt2V5vrT28ngz9u3J3'),
11 | ('peer_rotation_config', 'H2ODFHCxOl1FErIqCDqG'),
12 | ('shard_zero_name', 'rMSuKEh42OZaeVEgzG62');
13 |
14 | UPDATE config SET config_id = (SELECT normalization_id FROM normalization WHERE normalization_name = config_name) WHERE config_name IN (SELECT normalization_name FROM normalization);
15 | UPDATE node_attribute SET attribute_type_id = coalesce((SELECT normalization_id FROM normalization AS N WHERE normalization_name = (SELECT attribute_type FROM node_attribute_type WHERE attribute_type_id = node_attribute.attribute_type_id)), attribute_type_id);
16 | UPDATE node_attribute_type SET attribute_type_id = (SELECT normalization_id FROM normalization WHERE normalization_name = attribute_type) WHERE attribute_type IN (SELECT normalization_name FROM normalization);
17 |
18 | COMMIT;
19 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-10.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "10" WHERE key = "version";
6 |
7 | COMMIT;
8 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-11.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | CREATE TABLE new_transaction_input
6 | (
7 | transaction_id CHAR(50) NOT NULL CHECK (length(transaction_id) <= 50),
8 | shard_id CHAR(50) NOT NULL CHECK (length(shard_id) <= 50),
9 | input_position TINYINT NOT NULL CHECK (length(input_position) <= 3 AND TYPEOF(input_position) = 'integer'),
10 | output_transaction_id CHAR(50) NULL CHECK (length(output_transaction_id) <= 50),
11 | output_position TINYINT NULL CHECK(length(output_position) <= 3 AND TYPEOF(output_position) IN ('integer', 'null')),
12 | output_shard_id CHAR(50) NULL CHECK (length(output_shard_id) <= 50),
13 | output_transaction_date INT NULL CHECK(length(output_transaction_date) <= 10 AND TYPEOF(output_transaction_date) IN ('integer', 'null')),
14 | double_spend_date INT NULL CHECK(length(double_spend_date) <= 10 AND TYPEOF(double_spend_date) IN ('integer', 'null')),
15 | is_double_spend TINYINT NULL DEFAULT 0 CHECK (is_double_spend = 0 OR is_double_spend = 1 OR is_double_spend IS NULL),
16 | address CHAR(72) NULL CHECK (length(address) <= 72),
17 | address_key_identifier CHAR(34) NULL CHECK (length(address_key_identifier) <= 34),
18 | status TINYINT NOT NULL DEFAULT 1 CHECK (length(status) <= 3 AND TYPEOF(status) = 'integer'),
19 | create_date INT NOT NULL DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) CHECK(length(create_date) <= 10 AND TYPEOF(create_date) = 'integer'),
20 | PRIMARY KEY (transaction_id, input_position),
21 | FOREIGN KEY (transaction_id) REFERENCES `transaction` (transaction_id),
22 | FOREIGN KEY (address, address_key_identifier) REFERENCES address (address, address_key_identifier)
23 | );
24 | INSERT INTO new_transaction_input SELECT * FROM transaction_input;
25 | DROP TABLE transaction_input;
26 | ALTER TABLE new_transaction_input RENAME TO transaction_input;
27 |
28 | CREATE INDEX idx_transaction_input_address_key_identifier ON transaction_input (address_key_identifier);
29 | CREATE INDEX idx_transaction_input_address_is_double_spend ON transaction_input (address, is_double_spend);
30 | CREATE INDEX idx_transaction_input_transaction_id ON transaction_input (transaction_id);
31 | CREATE INDEX idx_transaction_input_output_transaction_id_output_position ON transaction_input (output_transaction_id, output_position);
32 | CREATE INDEX idx_transaction_input_create_date ON transaction_input (create_date);
33 |
34 | UPDATE schema_information SET value = "11" WHERE key = "version";
35 |
36 | COMMIT;
37 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-12.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | CREATE TABLE new_transaction
6 | (
7 | transaction_id CHAR(50) NOT NULL PRIMARY KEY CHECK (length(transaction_id) <= 50),
8 | shard_id CHAR(50) NOT NULL CHECK (length(shard_id) <= 50),
9 | transaction_date INT NOT NULL CHECK (length(transaction_date) <= 10 AND TYPEOF(transaction_date) = 'integer'),
10 | node_id_origin CHAR(34) NOT NULL CHECK (length(node_id_origin) <= 34),
11 | node_id_proxy CHAR(34) NULL CHECK (length(node_id_proxy) <= 34),
12 | version CHAR(4) NOT NULL DEFAULT '0a0' CHECK (length(version) <= 4),
13 | payload_hash CHAR(50) NOT NULL CHECK (length(payload_hash) <= 50),
14 | stable_date INT NULL CHECK (length(stable_date) <= 10 AND (TYPEOF(stable_date) IN ('integer', 'null'))),
15 | is_stable TINYINT NOT NULL DEFAULT 0 CHECK (is_stable = 0 OR is_stable = 1),
16 | parent_date INT NULL CHECK(length(parent_date) <= 10 AND TYPEOF(parent_date) IN ('integer', 'null')),
17 | is_parent TINYINT NOT NULL DEFAULT 0 CHECK (is_parent = 0 OR is_parent = 1),
18 | timeout_date INT NULL CHECK(length(timeout_date) <= 10 AND TYPEOF(timeout_date) IN ('integer', 'null')),
19 | is_timeout TINYINT NOT NULL DEFAULT 0 CHECK (is_timeout = 0 OR is_timeout = 1),
20 | status TINYINT NOT NULL DEFAULT 1 CHECK (length(status) <= 3 AND TYPEOF(status) = 'integer'),
21 | create_date INT NOT NULL DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) CHECK(length(create_date) <= 10 AND TYPEOF(create_date) = 'integer')
22 | );
23 | INSERT INTO new_transaction (transaction_id, shard_id, transaction_date, node_id_origin, node_id_proxy, version, payload_hash, stable_date, is_stable, parent_date, is_parent, timeout_date, is_timeout, status, create_date) SELECT transaction_id, shard_id, transaction_date, node_id_origin, NULL, version, payload_hash, stable_date, is_stable, parent_date, is_parent, timeout_date, is_timeout, status, create_date FROM `transaction`;
24 | DROP TABLE `transaction`;
25 | ALTER TABLE new_transaction RENAME TO `transaction`;
26 |
27 | CREATE INDEX idx_transaction_status_is_stable_transaction_date ON `transaction` (status, is_stable, transaction_date);
28 | CREATE INDEX idx_transaction_id_is_stable_is_parent ON `transaction` (transaction_id, is_stable, is_parent);
29 | CREATE INDEX idx_transaction_date ON `transaction` (transaction_date);
30 | CREATE INDEX idx_transaction_id_transaction_date ON `transaction` (transaction_id, transaction_date);
31 | CREATE INDEX idx_transaction_is_parent ON `transaction` (is_parent);
32 | CREATE INDEX idx_transaction_is_stable_transaction_date ON `transaction` (is_stable, transaction_date);
33 | CREATE INDEX idx_transaction_create_date ON `transaction` (create_date);
34 |
35 | DROP TABLE IF EXISTS transaction_output_attribute;
36 | CREATE TABLE transaction_output_attribute
37 | (
38 | transaction_id CHAR(50) NOT NULL CHECK (length(transaction_id) <= 50),
39 | attribute_type_id CHAR(20) NOT NULL CHECK (length(attribute_type_id) <= 20),
40 | shard_id CHAR(50) NOT NULL CHECK (length(shard_id) <= 50),
41 | value TEXT NOT NULL,
42 | status TINYINT NOT NULL DEFAULT 1 CHECK (length(status) <= 3 AND TYPEOF(status) = 'integer'),
43 | create_date INT NOT NULL DEFAULT (CAST(strftime('%s', 'now') AS INTEGER)) CHECK(length(create_date) <= 10 AND TYPEOF(create_date) = 'integer'),
44 | PRIMARY KEY (transaction_id, attribute_type_id),
45 | FOREIGN KEY (transaction_id) REFERENCES `transaction` (transaction_id)
46 | );
47 | CREATE INDEX idx_transaction_output_attribute_create_date ON transaction_output_attribute (create_date);
48 |
49 | CREATE INDEX idx_transaction_output_output_position ON transaction_output (output_position);
50 |
51 | UPDATE schema_information SET value = "12" WHERE key = "version";
52 |
53 | COMMIT;
54 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-13.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "13" WHERE key = "version";
6 |
7 | COMMIT;
8 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-14.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "14" WHERE key = "version";
6 |
7 | COMMIT;
8 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-15.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | DROP INDEX IF EXISTS idx_transaction_input_status_output_transaction_id;
6 | DROP INDEX IF EXISTS idx_transaction_output_address_key_identifier_is_stable_is_spent_status;
7 | DROP INDEX IF EXISTS idx_transaction_output_address_key_identifier_spent_double_spend_status;
8 | CREATE INDEX idx_transaction_input_status_output_transaction_id ON transaction_input (status, output_transaction_id);
9 | CREATE INDEX idx_transaction_output_address_key_identifier_is_stable_is_spent_status ON transaction_output (address_key_identifier, is_stable, is_spent, status);
10 | CREATE INDEX idx_transaction_output_address_key_identifier_spent_double_spend_status ON transaction_output (address_key_identifier, is_spent, is_double_spend, status);
11 |
12 | UPDATE schema_information SET value = "15" WHERE key = "version";
13 |
14 | COMMIT;
15 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-16.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | DROP TABLE IF EXISTS audit_verification;
6 | DROP TABLE IF EXISTS audit_point;
7 |
8 | UPDATE schema_information SET value = "16" WHERE key = "version";
9 |
10 | COMMIT;
11 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-17.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | CREATE INDEX IF NOT EXISTS idx_transaction_output_transaction_id_address_key_identifier ON transaction_output (transaction_id, address_key_identifier);
6 |
7 | UPDATE schema_information SET value = "17" WHERE key = "version";
8 |
9 | COMMIT;
10 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-18.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "18" WHERE key = "version";
6 |
7 | COMMIT;
8 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-19.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "19" WHERE key = "version";
6 |
7 | COMMIT;
8 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-20.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "20" WHERE key = "version";
6 |
7 | COMMIT;
8 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-21.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | CREATE INDEX IF NOT EXISTS idx_transaction_output_address_key_identifier_create_date ON transaction_output (address_key_identifier, create_date);
6 | UPDATE schema_information SET value = "21" WHERE key = "version";
7 |
8 | COMMIT;
9 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-22.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "22" WHERE key = "version";
6 |
7 | COMMIT;
8 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-23.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "23" WHERE key = "version";
6 |
7 | COMMIT;
8 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-4.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "4" WHERE key = "version";
6 |
7 | DROP TABLE IF EXISTS address;
8 | DROP TABLE IF EXISTS address_version;
9 | DROP TABLE IF EXISTS api;
10 | DROP TABLE IF EXISTS config;
11 | DROP TABLE IF EXISTS keychain;
12 | DROP TABLE IF EXISTS keychain_address;
13 | DROP TABLE IF EXISTS node;
14 | DROP TABLE IF EXISTS node_attribute;
15 | DROP TABLE IF EXISTS node_attribute_type;
16 | DROP TABLE IF EXISTS transaction_output_type;
17 | DROP TABLE IF EXISTS wallet;
18 |
19 | COMMIT;
20 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-5.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "5" WHERE key = "version";
6 |
7 | CREATE INDEX IF NOT EXISTS idx_transaction_create_date ON `transaction` (create_date);
8 | CREATE INDEX IF NOT EXISTS idx_transaction_parent_create_date ON transaction_parent (create_date);
9 | CREATE INDEX IF NOT EXISTS idx_transaction_signature_create_date ON transaction_signature (create_date);
10 | CREATE INDEX IF NOT EXISTS idx_transaction_input_create_date ON transaction_input (create_date);
11 | CREATE INDEX IF NOT EXISTS idx_transaction_output_create_date ON transaction_output (create_date);
12 | CREATE INDEX IF NOT EXISTS idx_transaction_output_attribute_create_date ON transaction_output_attribute (create_date);
13 | CREATE INDEX IF NOT EXISTS idx_audit_verification_create_date ON audit_verification (create_date);
14 | CREATE INDEX IF NOT EXISTS idx_audit_point_create_date ON audit_point (create_date);
15 |
16 | COMMIT;
17 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-6.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "6" WHERE key = "version";
6 |
7 | COMMIT;
8 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-7.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "7" WHERE key = "version";
6 |
7 | COMMIT;
8 |
--------------------------------------------------------------------------------
/scripts/migration/shard/schema-update-8.sql:
--------------------------------------------------------------------------------
1 | PRAGMA foreign_keys= off;
2 |
3 | BEGIN TRANSACTION;
4 |
5 | UPDATE schema_information SET value = "8" WHERE key = "version";
6 |
7 | COMMIT;
8 |
--------------------------------------------------------------------------------
/webpack.prod.config.js:
--------------------------------------------------------------------------------
1 | const path = require('path');
2 | const CopyPlugin = require('copy-webpack-plugin');
3 |
4 | module.exports = {
5 | target : 'node',
6 | mode : 'development',
7 | entry : './index.js',
8 | output : {
9 | filename: 'index.dist.js',
10 | path : path.resolve(__dirname, './dist')
11 | },
12 | devtool: 'inline-source-map',
13 | watch : false,
14 | resolve: {
15 | alias: {
16 | [path.join(__dirname, 'node_modules/sqlite3/lib/sqlite3-binding.js')]: path.join(__dirname, 'database/sqlite3/sqlite3-binding.js')
17 | }
18 | },
19 | module : {
20 | rules: [
21 | {
22 | test : /\.m?js$/,
23 | exclude: /node_modules/,
24 | use : {
25 | loader : 'babel-loader',
26 | options: {
27 | presets : [
28 | '@babel/preset-env'
29 | ],
30 | plugins : [
31 | [
32 | '@babel/plugin-transform-runtime',
33 | {
34 | 'regenerator': true
35 | }
36 | ]
37 | ],
38 | sourceMaps : 'inline',
39 | retainLines: true
40 | }
41 | },
42 | resolve: {
43 | extensions: [
44 | '.js',
45 | '.mjs'
46 | ]
47 | }
48 | }
49 | ]
50 | },
51 | plugins: [
52 | new CopyPlugin({
53 | patterns: [
54 | {
55 | from: 'node_modules/sqlite3/build/**/node_sqlite3.node',
56 | to : 'build/node_sqlite3.node'
57 | }
58 | ]
59 | })
60 | ]
61 | };
62 |
--------------------------------------------------------------------------------