├── .env.example ├── .gitignore ├── config.example.js ├── enum ├── abi.js ├── address.common.js ├── bulk.records.type.js ├── chain.id.js ├── contracts.js └── mainTokens.js ├── log └── reserves-to-update.log ├── package-lock.json ├── package.json ├── readme.md ├── server ├── app.js ├── config │ └── database.js ├── models │ ├── history_prices.js │ ├── history_transactions.js │ ├── pair.js │ ├── routers.js │ ├── token_basic.js │ └── token_history.js ├── routes │ └── tokens │ │ ├── history.pair.js │ │ ├── history.token.js │ │ ├── index.js │ │ ├── price.js │ │ └── token.js ├── server.js ├── service │ ├── db.history.js │ ├── db.history.price.js │ ├── db.history.transaction.js │ ├── db.routers.js │ ├── db.token.js │ └── index.js └── websocket │ ├── index.js │ ├── sockets │ └── Tokens.js │ └── utils.js ├── utils ├── addresses.js └── sleep.js └── workers ├── analizer ├── analize.transaction.js └── sync.in.block.js ├── blockchain.scraper ├── master.js └── slave.js ├── lib ├── block.listner.js ├── bloomfilter.js ├── logs.js ├── scrape.block.past.js ├── scrape.block.stream.js └── web3.js └── updater ├── lib ├── Cache.js ├── Queue.js ├── Scraper.js ├── bulk │ ├── Bulk.js │ ├── BulkNormal.js │ └── BulkTime.js ├── check.missing.blocks.js ├── entity │ ├── HistoryPirce.js │ ├── Routers.js │ ├── Token.js │ ├── TokenFees.js │ ├── TokenHistory.js │ └── Transaction.js ├── not-delete.scraped-block.checkpoint.txt ├── not-delete.scraped-blocks.txt └── test.token.fees.js ├── master.js ├── restarter.js ├── scripts ├── scrape.block.js └── syncIn.block.js └── slave.js /.env.example: -------------------------------------------------------------------------------- 1 | 2 | WRITE_TO_DB_SECONDS=20 3 | FRONTEND_URL=["http://localhost:5007"] 4 | PORT=5006 5 | CHAIN_ID=56 -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .env 3 | scraper.log 4 | missing.json 5 | config.js -------------------------------------------------------------------------------- /config.example.js: -------------------------------------------------------------------------------- 1 | const EnumChainId = require("./enum/chain.id"); 2 | const { toCheckSum } = require("./utils/addresses"); 3 | let scraperConfig = { 4 | [EnumChainId.BSC]: { 5 | "save_price": true, // do you want the scraper to save the price records of the tokens ? 6 | "save_transactions": true, // do you want the scraper to save every swap transaction ? 7 | "calculate_pair_fees": true, // do you want the scraper to save the fees of all the pairs ? 8 | "whitelist_enabled": false,// do you want the scraper to scrape only specific given tokens ? 9 | "whitelist": [ // pass here the tokens to whitelist as the example one 10 | toCheckSum("0xc748673057861a797275CD8A068AbB95A902e8de") // example whitelisted token 11 | ], 12 | "use_checkpoint_when_restart": false, // if the scraper crashes, it has to scrape all the block since the latest one scraped? 13 | "http_provider": "", // to fill 14 | "ws_provider": "", // to fill 15 | } 16 | } 17 | module.exports = scraperConfig; -------------------------------------------------------------------------------- /enum/address.common.js: -------------------------------------------------------------------------------- 1 | 2 | const EnumCommonAddresses = { 3 | ZERO: "0x0000000000000000000000000000000000000000" 4 | }; 5 | 6 | module.exports = EnumCommonAddresses; -------------------------------------------------------------------------------- /enum/bulk.records.type.js: -------------------------------------------------------------------------------- 1 | const EnumBulkTypes = { 2 | TOKEN_HISTORY: 'tokenHistory', 3 | TOKEN_BASIC: 'tokenBasic', 4 | HISTORY_PRICE: 'historyPrice', 5 | HISOTRY_TRANSACTION: 'historyTransacton', 6 | ROUTERS: 'routers' 7 | } 8 | module.exports = EnumBulkTypes; -------------------------------------------------------------------------------- /enum/chain.id.js: -------------------------------------------------------------------------------- 1 | const EnumChainId = { 2 | BSC: 56, 3 | BSC_TESTNET: 97, 4 | ETH: 1, 5 | POLYGON: 137, 6 | RINKEBY: 4 7 | } 8 | module.exports = EnumChainId; -------------------------------------------------------------------------------- /enum/contracts.js: -------------------------------------------------------------------------------- 1 | const UtilsAddresses = require("../utils/addresses"); 2 | const EnumChainId = require("./chain.id"); 3 | 4 | const EnumContracts = { 5 | [EnumChainId.BSC]: { 6 | MAIN_ROUTER: UtilsAddresses.toCheckSum("0x10ED43C718714eb63d5aA57B78B54704E256024E"), // Pancake 7 | MAIN_FACTORY: UtilsAddresses.toCheckSum("0xca143ce32fe78f1f7019d7d551a6402fc5350c73") // Pancake 8 | } 9 | }; 10 | 11 | module.exports = EnumContracts -------------------------------------------------------------------------------- /enum/mainTokens.js: -------------------------------------------------------------------------------- 1 | var EnumChainId = require('./chain.id'); 2 | var UtilsAddresses =require('../utils/addresses'); 3 | 4 | const EnumMainTokens = { 5 | [EnumChainId.BSC_TESTNET]: { 6 | ETH: { 7 | address: UtilsAddresses.toCheckSum("0xd66c6b4f0be8ce5b39d52e0fd1344c389929b378"), 8 | decimals: 18 9 | },// 10 | BUSD: { 11 | address: UtilsAddresses.toCheckSum("0xed24fc36d5ee211ea25a80239fb8c4cfd80f12ee"), 12 | decimals: 18 13 | },// 14 | DAI: { 15 | address: UtilsAddresses.toCheckSum("0xec5dcb5dbf4b114c9d0f65bccab49ec54f6a0867"), 16 | decimals: 18 17 | },// 18 | BTC: { 19 | address: UtilsAddresses.toCheckSum("0x6ce8da28e2f864420840cf74474eff5fd80e65b8"), 20 | decimals: 18 21 | },// 22 | XRP: { 23 | address: UtilsAddresses.toCheckSum("0xa83575490d7df4e2f47b7d38ef351a2722ca45b9"), 24 | decimals: 18 25 | },// 26 | USDC: { 27 | address: UtilsAddresses.toCheckSum("0x64544969ed7ebf5f083679233325356ebe738930"), 28 | decimals: 18 29 | },// 30 | USDT: { 31 | address: UtilsAddresses.toCheckSum("0x337610d27c682e347c9cd60bd4b3b107c9d34ddd"), 32 | decimals: 18 33 | },// 34 | CST: { 35 | address: UtilsAddresses.toCheckSum("0x0730eCd23F920d00C2D7AC5b245675B8423b0Ef1"), 36 | decimals: 9 37 | } 38 | }, 39 | [EnumChainId.BSC]: { 40 | BTC: { 41 | address: UtilsAddresses.toCheckSum("0x7130d2a12b9bcbfae4f2634d864a1ee1ce3ead9c"), 42 | decimals: 18 43 | },// 44 | DOT: { 45 | address: UtilsAddresses.toCheckSum("0x7083609fce4d1d8dc0c979aab8c869ea2c873402"), 46 | decimals: 18 47 | },// 48 | 49 | WBNB: { 50 | address: UtilsAddresses.toCheckSum("0xbb4cdb9cbd36b01bd1cbaebf2de08d9173bc095c"), 51 | decimals: 18 52 | }, 53 | BUSD: { 54 | address: UtilsAddresses.toCheckSum("0xe9e7cea3dedca5984780bafc599bd69add087d56"), 55 | decimals: 18 56 | },// 57 | USDC: { 58 | address: UtilsAddresses.toCheckSum("0x8ac76a51cc950d9822d68b83fe1ad97b32cd580d"), 59 | decimals: 18 60 | },// 61 | USDT: { 62 | address: UtilsAddresses.toCheckSum("0x55d398326f99059ff775485246999027b3197955"), 63 | decimals: 18 64 | },// 65 | DAI: { 66 | address: UtilsAddresses.toCheckSum("0x1af3f329e8be154074d8769d1ffa4ee058b1dbc3"), 67 | decimals: 18 68 | } 69 | } 70 | } 71 | 72 | EnumMainTokens[EnumChainId.BSC].STABLECOINS = [ "BUSD", "USDC", "USDT", "DAI" ].map( e => EnumMainTokens[EnumChainId.BSC][e].address ); 73 | EnumMainTokens[EnumChainId.BSC].MAIN = EnumMainTokens[EnumChainId.BSC]["WBNB"]; 74 | 75 | // [0x10ED43C718714eb63d5aA57B78B54704E256024E, 0x10ED43C718714eb63d5aA57B78B54704E256024E,0x10ED43C718714eb63d5aA57B78B54704E256024E,0x10ED43C718714eb63d5aA57B78B54704E256024E,0x0000000000000000000000000000000000000000] 76 | // [0xe9e7cea3dedca5984780bafc599bd69add087d56, 0xbb4cdb9cbd36b01bd1cbaebf2de08d9173bc095c, 0x55d398326f99059ff775485246999027b3197955, 0x8ac76a51cc950d9822d68b83fe1ad97b32cd580d] 77 | // 1000000000000000000 78 | module.exports = EnumMainTokens; 79 | 80 | // usdc -> usdt -> ice -> ftm -------------------------------------------------------------------------------- /log/reserves-to-update.log: -------------------------------------------------------------------------------- 1 | [20848928] 2 | [UPDATE 00:40:24:43] 0x7592054649b998213496570b52217069a16a9EA4 0xfec0a9cece0416db09fa162b9123f5b61c244fe20fad69143861afca440fc1f6 3 | 1342032435049788,249647387167593840918 4 | [20848928] 5 | [UPDATE 00:40:24:44] 0xa746dF1469fd1197a844F5fEA179Aedd2FECA2D0 0x1a2c640160782dbd410013f320d4fb719bbda6a6f21bd145194e86f288f4d026 6 | 1307045276421503989,38529813106905540569425989 7 | [20848928] 8 | [UPDATE 00:40:24:44] 0xa746dF1469fd1197a844F5fEA179Aedd2FECA2D0 0xc6d516e489efa7c75aa961e96d2d65c9a8b37ad576bfa4be75479c5b40219848 9 | 1312045276421503989,38383347678355182753360698 10 | [20848928] 11 | [UPDATE 00:40:24:44] 0x41138B6466E1d92ae05495735d46586b74efad75 0x0a3405d1148c6346737525cad692013c1165a15083bcfe0b0dfd59be0ae76a68 12 | 6115485521931140725538322,111927647086671789037 13 | [20848928] 14 | [UPDATE 00:40:24:45] 0x5a13Be0Fc060f8042bDc5E4A3771D4f78aF24141 0xaa466f114c07bcd058b2a18b10779983e63ea60f8ea4fb255b40cffadade06a9 15 | 1887607760821352680873360,912160401998073912201328 16 | [20848928] 17 | [UPDATE 00:40:24:45] 0x58F876857a02D6762E0101bb5C46A8c1ED44Dc16 0xa67576f4de3a97eec23e024e02779161aa567e001c2bd7387a420c88d3405a26 18 | 333462396353786621993025,93055515830530816413454448 19 | [20848928] 20 | [UPDATE 00:40:24:46] 0x16DcE2d4172d895Ea042f29df38F812a9b065661 0x8f125bc53c47bacbc660dd4713038e4f2a11eefcf29fb718359c339f6538a74f 21 | 42125513548186116747,829203829676946210440279 22 | [20848928] 23 | [UPDATE 00:40:24:46] 0x74E4716E431f45807DCF19f284c7aA99F18a4fbc 0x2cc02286b9d3b7aee4a508f4f5dc4b24286b68e449299541cd36e5eb1c38157b 24 | 16279534540366323038758,85470885562089547449781 25 | [20848928] 26 | [UPDATE 00:40:24:46] 0x74E4716E431f45807DCF19f284c7aA99F18a4fbc 0x2cc02286b9d3b7aee4a508f4f5dc4b24286b68e449299541cd36e5eb1c38157b 27 | 16279525040741906287218,85470935562089547449781 28 | [20848928] 29 | [UPDATE 00:40:24:46] 0x84821bb588f049913Dc579Dc511E5e31EB22d5E4 0x2cc02286b9d3b7aee4a508f4f5dc4b24286b68e449299541cd36e5eb1c38157b 30 | 4399283178729451059343,20517257164297707682477839 31 | [20848928] 32 | [UPDATE 00:40:24:46] 0x84821bb588f049913Dc579Dc511E5e31EB22d5E4 0x2cc02286b9d3b7aee4a508f4f5dc4b24286b68e449299541cd36e5eb1c38157b 33 | 4399292654625268614177,20517301357722739033956215 34 | [20848928] 35 | [UPDATE 00:40:24:46] 0x58F876857a02D6762E0101bb5C46A8c1ED44Dc16 0x1d898c8126df79fd4789e3c725d1063353815b7625dacc96114919f54476149d 36 | 333462257281863786258095,93055554737015265641891490 37 | [20848928] 38 | [UPDATE 00:40:24:46] 0x9191B027C26d44b86177E64f992cf4DCd1F4E0eF 0x1d898c8126df79fd4789e3c725d1063353815b7625dacc96114919f54476149d 39 | 8775671340431265912654,121878908299786431064 40 | [20848928] 41 | [UPDATE 00:40:24:46] 0xa746dF1469fd1197a844F5fEA179Aedd2FECA2D0 0x8cb23da92190fa2dacad8d1c01ff0ffed67948f869f1bda8b5b2496004e3241c 42 | 1342045276421503989,37527425830117461897470544 43 | [20848928] 44 | [UPDATE 00:40:24:46] 0xDe86464A0EBBEb88829929E53F2acf9D125ED68F 0xe51eb947933bf9bb2e0ff460decf9ea853e0d81e4c5349e45617184553833c37 45 | 48625095215799397160735984502,127363978532078237578344 46 | [20848928] 47 | [UPDATE 00:40:24:46] 0x0eD7e52944161450477ee417DE9Cd3a859b14fD0 0x865ba5c106f701bc987e0b504307f5be895677eef87695d7d3394616a75b36d8 48 | 24107355215761519710690693,331580241816312617974501 49 | [20848928] 50 | [UPDATE 00:40:24:47] 0x16b9a82891338f9bA80E2D6970FddA79D1eb0daE 0xedae1c19e1a76dfebca4c2c6b8bedd2ca010624d7bfa1974eed32b45757e3cff 51 | 82358126228371654930556119,294650299009470475032402 52 | [20848928] 53 | [UPDATE 00:40:24:47] 0xDd8124318D3DbfB486B4D9fbaBa8A97409EEBD71 0xedae1c19e1a76dfebca4c2c6b8bedd2ca010624d7bfa1974eed32b45757e3cff 54 | 401666795407381509357015,858469857416943552316 55 | [20848928] 56 | [UPDATE 00:40:24:47] 0xAC1F5e57d53e9Ac3c092EB876e46C235df95672A 0x8900e0a45a81112976f50006571104860e38c52ed899159b2872f6a125f83530 57 | 1742263336767412829467375,25920866485123320852130 58 | [20848928] 59 | [UPDATE 00:40:24:48] 0xa746dF1469fd1197a844F5fEA179Aedd2FECA2D0 0xfd38c258ba36806fd870f1d8241004b674fec59bfb01b259419867db2c17687b 60 | 1357045276421503989,37113644775403209354745087 61 | [SCRAPING BLOCK][NEW] 20848928 00:40:24:48 [20848929] 62 | [UPDATE 00:40:24:48] 0xa746dF1469fd1197a844F5fEA179Aedd2FECA2D0 0xe316959957d84e8e1d3e20ce48209c5b3c9f88677ebf5c60bbdc3161b34ed706 63 | 1367045276421503989,36842830499258195017269152 64 | [20848929] 65 | [UPDATE 00:40:24:48] 0xF1A12EC907B3d87b6De7a9A5C3820566c621f68B 0x21734c80d438b9265e295a15ec222411e6213f44b1655c1bafd1b6a706701fd5 66 | 735224339799196479904163,2483118961549922619582 67 | [20848929] 68 | [UPDATE 00:40:24:48] 0xbc82B318Ba60439a8fD1e03f05F23bf6A64CcE77 0x2043c1d52ea667ef2f0ed3f08df133ba872361b19c6eec6b6edbbeb423b0b003 69 | 97000126568643041165705475,1454019277011529035318 70 | [20848929] 71 | [UPDATE 00:40:24:48] 0x7D9a46aC9CB22A6c4E6A94253c309C0551F717fe 0xbb83e8ca59dd5e22bc27fcbf3c1a7d5d3350f36f30411715ff2e780a632ea362 72 | 8168482507703682212446971,515283224580725863166058 73 | [20848929] 74 | [UPDATE 00:40:24:49] 0x15DfE422b03B2059AAA370434CD3fC13FD264163 0x36693be8c4eced9fd5551c74e178076830c198dccf86eddbe4c58606c4ace893 75 | 34689615001502711840456,26977096176052238973 76 | [20848929] 77 | [UPDATE 00:40:24:49] 0x15DfE422b03B2059AAA370434CD3fC13FD264163 0x36693be8c4eced9fd5551c74e178076830c198dccf86eddbe4c58606c4ace893 78 | 34693255405014342694655,26979927211093955337 79 | [20848929] 80 | [UPDATE 00:40:24:49] 0x15DfE422b03B2059AAA370434CD3fC13FD264163 0x36693be8c4eced9fd5551c74e178076830c198dccf86eddbe4c58606c4ace893 81 | 34707107775989981813300,26969185838026739800 82 | [20848929] 83 | [UPDATE 00:40:24:49] 0xC1Dd3FEBaE66f04F8cB43B5E62892A396D8b6cC8 0x39a50faf2003aa04bb6f948ef8071cc0631f5550650c5886576a93a59b377a12 84 | 10225077584971472751,953835905965347773267 85 | [20848929] 86 | [UPDATE 00:40:25:50] 0x53a63ac301D6410915385294527f947aFf616599 0x12765697ee9414a77a3db8305dd38b9bc3a25d46ae113f37becf43eca3dea138 87 | 9149574362987778429915465,4920090055262332859073 88 | [20848929] 89 | [UPDATE 00:40:25:50] 0x50808249bC0d5af4D387dE4c5683B619788188e3 0xeaeebce8f5b813a01c5ec64d5c793411b41739dedeccfab01d6ee8a0696276f6 90 | 50136458848236843316606788,487366516192006155822 91 | [20848929] 92 | [UPDATE 00:40:25:50] 0xc2D00fa9F923e9Ee486984Ff341CC3CBC3875C88 0xf569bf9e1efb1ec3dc6a77bd978e278e42c343ef537ed90c3520b9ae94ca294d 93 | 853433339839770798302897,45061154224139683018 94 | [20848929] 95 | [UPDATE 00:40:25:50] 0x8a723A8A9B030c56b518A70Fbaf959c430d5335f 0xa58acdc21b0fe9b01db9fe69bf516dce0324e1d4a6462ad2ca894c9ea684e251 96 | 3112946835098224668256,416746199980440177571133 97 | [20848929] 98 | [UPDATE 00:40:25:50] 0x2fa22acDb65ce9763d927656909F14e4E66b5F08 0xd941c7ef1113a65d18e32ee1b9c937005c2135fd12ca050a37844c0edbe0dd4d 99 | 31922738255950018530409080,459223434509116231880 100 | [20848929] 101 | [UPDATE 00:40:25:50] 0x0eD7e52944161450477ee417DE9Cd3a859b14fD0 0xe16c5595ace0b19b2a79d27b6353c18d88064828da7375be627fadbae2e52f7d 102 | 24107354911166279514559133,331580246016312617974501 103 | [20848929] 104 | [UPDATE 00:40:25:50] 0xA9fDF4cBA9A215315a5e0b8ef1Ed758d11F66695 0xe16c5595ace0b19b2a79d27b6353c18d88064828da7375be627fadbae2e52f7d 105 | 6670001458458326252,4530938446343402823180 106 | [20848929] 107 | [UPDATE 00:40:25:50] 0x82f504D655ec6dbA8d5eBbd0036a908A719A295f 0x200da390628f9674c29e2c1773b5fe659c05a3d6086895d9ccc774257d481ed8 108 | 2949422914557029671020503,45013448223388305676 109 | [20848929] 110 | [UPDATE 00:40:25:51] 0xA9fDF4cBA9A215315a5e0b8ef1Ed758d11F66695 0x200da390628f9674c29e2c1773b5fe659c05a3d6086895d9ccc774257d481ed8 111 | 5811471408615685365,5201972643382979959514 112 | [20848929] 113 | [UPDATE 00:40:25:51] 0x0eD7e52944161450477ee417DE9Cd3a859b14fD0 0x200da390628f9674c29e2c1773b5fe659c05a3d6086895d9ccc774257d481ed8 114 | 24107355769696329357200020,331580234237338230887237 115 | [SCRAPING BLOCK][NEW] 20848929 00:40:25:51 -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pancakeswap-token-charting", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "server": "node ./server/app.js", 8 | "test": "echo \"Error: no test specified\" && exit 1", 9 | "restarter": "node ./workers/updater/restarter.js", 10 | "scraper": "node ./workers/updater/master.js" 11 | }, 12 | "author": "", 13 | "license": "ISC", 14 | "dependencies": { 15 | "abi-decoder": "^2.4.0", 16 | "axios": "^0.26.1", 17 | "bignumber.js": "^9.0.2", 18 | "compression": "^1.7.4", 19 | "cookie-parser": "^1.4.6", 20 | "cors": "^2.8.5", 21 | "dotenv": "^10.0.0", 22 | "ethers": "^5.5.4", 23 | "express": "^4.17.2", 24 | "fs": "*", 25 | "helmet": "^4.6.0", 26 | "mongoose": "^6.1.3", 27 | "morgan": "^1.10.0", 28 | "passport": "^0.5.2", 29 | "path": "^0.12.7", 30 | "redis": "^4.0.6", 31 | "web3": "^1.6.0", 32 | "ws": "^8.5.0" 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | 2 |

WeChart

3 |

We-Chart-Logo

4 | 5 | 6 |

7 | WeChart is a tool aiming to provide a software capable to detect whenever a token change it's prices on the most common dexes alive ( pancakeswap/biswap/uniswap/etc.. ), based on the chain where the scraper is running, in the most efficient way possibile and track it as a record on a database (mongodb) 8 |

9 | 10 | > :warning: To run this tool you will need a commercial full-node provider. 11 | 12 | #### How does this work? 13 | The `price scraper` listens for the latest swap made to blockchain, based on the token's liquidity pair reserves you can calculate the tokens prices, the router of the swap and the pair of the tokens. 14 | 15 | ### Setup 16 | 17 | - Install mongodb `sudo apt install -y mongodb` 18 | - Install nodejs `sudo apt install nodejs` 19 | - Install npm `sudo apt install npm` 20 | - Download the repo 21 | - Navigate to the repo from the termian `cd /path/to/tokenChartingRepo` 22 | - Install the repo dependencies `npm i` 23 | - Create a `.env` and a `config.js` file based on the provided examples 24 | - Start the scraper 25 | 26 | - If you want it to **restart on crash** follow this commands: 27 | - Install pm2 `sudo npm install pm2 -g` 28 | - Make pm2 restart the scraper if the server crash `pm2 startup` 29 | - Run the scraper `pm2 start npm --name "charting-bot-restarter" -- run restarter` 30 | - Save the pm2 running processes `pm2 save` 31 | - For see the current active pm2 processes `pm2 status` 32 | - For see the scraper logs `pm2 logs charting-bot` 33 | 34 | - If you **don't** want it to **restart on crash** 35 | - `npm run scraper` 36 | 37 | 38 | 39 | ### Emprove This 40 | If you know a more efficent way, or any other better than this please let the community know this by opening an issue with infos about your ideas! :heart: . This repo can be used not only with pancakeswap but also with any kind of other exchange platform on any chain that is similar to pancakeswap. 41 | 42 | 43 | 44 | 45 | ### To Do 46 | - multi chain scraping ( almost done ) [ currently bsc only ] 🟡 47 | - multi chain server querying [ currently bsc only ] 🔴 48 | 49 | 50 | ### Query the prices 51 | 52 | - Inside the repo you can find a rest api built with `expressjs` that you can use to query the scraped prices 53 | - Run the server with `npm run server` 54 | 55 | 56 | ### Endpoints 57 | 58 | - GET. `/token/price/:contract` : Retrun the token price 59 | - GET. `/token/history/:contract?from=<>&to=<>`: Return the history of the token in the specified time range ( use **unix** timestaps to specify the time ranges ) 60 | - there are more endpoints not documented yet 61 | 62 | 63 | ### ISSUES 64 | For anyu bug or issue please report it, i'll try to reply as soon as i can :thumbsup: 65 | -------------------------------------------------------------------------------- /server/app.js: -------------------------------------------------------------------------------- 1 | // RUN APP ON MULTIPLE CORES IF NEEDED 2 | require('dotenv').config(); 3 | const os = require("os"); 4 | const cluster = require("cluster"); 5 | const http = require('http'); 6 | const { setupWebSocket } = require('./websocket'); 7 | const PORT = process.env.PORT || 4060; 8 | const clusterWorkerSize = os.cpus().length ; 9 | console.log("PORT: " + PORT) 10 | console.log("WORKERS: " + clusterWorkerSize) 11 | 12 | if (cluster.isMaster) { 13 | 14 | // Create a worker for each CPU 15 | for (var i = 0; i < clusterWorkerSize; i += 1) { 16 | cluster.fork(); 17 | } 18 | 19 | // Listen for dying workers 20 | cluster.on('exit', function (worker) { 21 | // Replace the dead worker, we're not sentimental 22 | console.log('Worker ' + worker.id + ' died :('); 23 | cluster.fork(); 24 | }); 25 | 26 | } else { 27 | const app = require('./server'); 28 | 29 | 30 | // app should be your express app 31 | const server = http.createServer(app); 32 | 33 | // pass the same server to our websocket setup function 34 | // the websocket server will the run on the same port 35 | // accepting ws:// connections 36 | // setupWebSocket(server); 37 | 38 | server.listen(PORT, () => { 39 | console.log("Express server listening on port " + PORT); 40 | console.log('Worker ' + cluster.worker.id + ' running!'); 41 | }); 42 | 43 | } 44 | 45 | 46 | /* 47 | const app = require('./server'); 48 | app.listen(PORT, () => { 49 | console.log("Express server listening on port " + PORT); 50 | }); 51 | */ -------------------------------------------------------------------------------- /server/config/database.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | url: 'mongodb://localhost:27017/charting_56', 3 | }; 4 | -------------------------------------------------------------------------------- /server/models/history_prices.js: -------------------------------------------------------------------------------- 1 | 2 | var mongoose = require('mongoose'); 3 | 4 | var historyPriceSchema = mongoose.Schema({ 5 | time: Number, // unix timestamp 6 | 7 | open: Number, 8 | close: Number, 9 | high: Number, 10 | low: Number, 11 | value: Number, 12 | dependantValue: Number, // the value based on the token with which it is in pair 13 | 14 | index: Number, 15 | pair: String, 16 | router: String, 17 | mainToken: String, 18 | dependantToken: String, 19 | }); 20 | historyPriceSchema.index({ pair: 1 }); 21 | historyPriceSchema.index({ dependantToken: 1 }); 22 | historyPriceSchema.index({ mainToken: 1 }); 23 | historyPriceSchema.index({ router: 1 }); 24 | 25 | /* 26 | db.historyprices.ensureIndex({ pair: 1, time: 1 }); 27 | db.historyprices.ensureIndex({ dependantToken: 1 }); 28 | db.historyprices.ensureIndex({ mainToken: 1 }); 29 | db.historyprices.ensureIndex({ router: 1 }); 30 | db.historyprices.ensureIndex({ time: 1 }); 31 | db.historyprices.ensureIndex({ pair: 1 }); 32 | */ 33 | module.exports = mongoose.model('historyPrice', historyPriceSchema); 34 | 35 | -------------------------------------------------------------------------------- /server/models/history_transactions.js: -------------------------------------------------------------------------------- 1 | 2 | var mongoose = require('mongoose'); 3 | 4 | var historyTransactionSchema = mongoose.Schema({ 5 | time: Number, // unix timestamp 6 | hash: String, 7 | 8 | from: String, 9 | 10 | amountIn: Number, 11 | amountOut: Number, 12 | 13 | tokenIn: String, 14 | tokenOut: String, 15 | 16 | value: Number, // estimated buy or sell price 17 | 18 | index: Number, 19 | pair: String, 20 | mainToken: String, 21 | dependantToken: String, 22 | router: String 23 | }); 24 | historyTransactionSchema.index({ pair: 1 }); 25 | historyTransactionSchema.index({ dependantToken: 1 }); 26 | historyTransactionSchema.index({ mainToken: 1 }); 27 | historyTransactionSchema.index({ router: 1 }); 28 | historyTransactionSchema.index({ time: 1 }); 29 | /* 30 | db.historytransactions.ensureIndex({ pair: 1 }); 31 | db.historytransactions.ensureIndex({ dependantToken: 1 }); 32 | db.historytransactions.ensureIndex({ mainToken: 1 }); 33 | db.historytransactions.ensureIndex({ router: 1 }); 34 | db.historytransactions.ensureIndex({ time: 1 }); 35 | */ 36 | 37 | module.exports = mongoose.model('historyTransaction', historyTransactionSchema); 38 | 39 | -------------------------------------------------------------------------------- /server/models/pair.js: -------------------------------------------------------------------------------- 1 | var mongoose = require('mongoose'); 2 | 3 | var pairSchema = mongoose.Schema({ 4 | token0: { 5 | contract: String, 6 | token: { 7 | type: mongoose.Schema.Types.ObjectId, 8 | ref: 'TokenBasic', 9 | index: true 10 | }, 11 | }, 12 | token1: { 13 | contract: String, 14 | token: { 15 | type: mongoose.Schema.Types.ObjectId, 16 | ref: 'TokenBasic', 17 | index: true 18 | }, 19 | }, 20 | contract: String, 21 | index: Number 22 | }, { timestamps: { createdAt: 'created_at' } }); 23 | 24 | 25 | pairSchema.index({'token0.contract': 1}); 26 | pairSchema.index({'token1.contract': 1}); 27 | pairSchema.index({contract: 1}); 28 | pairSchema.index({router: 1}); 29 | pairSchema.index({chain: 1}); 30 | /* 31 | db.pairs.ensureIndex({'token0.contract': 1}); 32 | db.pairs.ensureIndex({'token1.contract': 1}); 33 | db.pairs.ensureIndex({contract: 1}); 34 | db.pairs.ensureIndex({router: 1}); 35 | db.pairs.ensureIndex({chain: 1}); 36 | */ 37 | 38 | 39 | module.exports = mongoose.model('Pair', pairSchema); 40 | 41 | -------------------------------------------------------------------------------- /server/models/routers.js: -------------------------------------------------------------------------------- 1 | var mongoose = require('mongoose'); 2 | 3 | var pairSchema = mongoose.Schema({ 4 | contract: String, 5 | valid: Boolean, 6 | fee: Number 7 | }, { timestamps: { createdAt: 'created_at' } }); 8 | 9 | pairSchema.index({'contract': 1}); 10 | pairSchema.index({'valid': 1}); 11 | /* 12 | db.pairs.ensureIndex({'token0.contract': 1}); 13 | db.pairs.ensureIndex({'token1.contract': 1}); 14 | db.pairs.ensureIndex({contract: 1}); 15 | db.pairs.ensureIndex({router: 1}); 16 | db.pairs.ensureIndex({chain: 1}); 17 | */ 18 | module.exports = mongoose.model('Router', pairSchema); 19 | 20 | -------------------------------------------------------------------------------- /server/models/token_basic.js: -------------------------------------------------------------------------------- 1 | var mongoose = require('mongoose'); 2 | 3 | var tokenBasicSchema = mongoose.Schema({ 4 | chain: String, 5 | contract: String, 6 | name: String, 7 | symbol: String, 8 | decimals: Number, 9 | total_supply: Number, 10 | pairs_count: Number, 11 | score: Object, // { [day]: { tx_count: 0 } } 12 | score_points: Number, 13 | 14 | }, { timestamps: { createdAt: 'created_at' } }); 15 | 16 | tokenBasicSchema.index({chain: 1}); 17 | tokenBasicSchema.index({contract: 1}, {unique: true}); 18 | tokenBasicSchema.index({name: 1}); 19 | tokenBasicSchema.index({pairs_count: 1}); 20 | tokenBasicSchema.index({score_point: 1}); 21 | /* 22 | db.tokenbasics.ensureIndex({chain: 1}); 23 | db.tokenbasics.ensureIndex({contract: 1}); 24 | db.tokenbasics.ensureIndex({name: "hashed"}); 25 | db.tokenbasics.ensureIndex({pairs_count: 1}); 26 | db.tokenbasics.ensureIndex({score_points: 1}); 27 | */ 28 | 29 | module.exports = mongoose.model('TokenBasic', tokenBasicSchema); 30 | -------------------------------------------------------------------------------- /server/models/token_history.js: -------------------------------------------------------------------------------- 1 | var mongoose = require('mongoose'); 2 | 3 | var tokenHistorySchema = mongoose.Schema({ 4 | 5 | updated_at_block: Number, 6 | 7 | records_date: Number, // used to reset recrods field on day change 8 | records_transactions: Number, // daily transactions 9 | records_price: Number, // daily price records 10 | 11 | chain: Number, 12 | 13 | router: String, 14 | router_fee: Number, 15 | 16 | pair: String, 17 | 18 | token0: { 19 | contract: String, 20 | name: String, 21 | symbol: String, 22 | decimals: Number, 23 | }, 24 | token1: { 25 | contract: String, 26 | name: String, 27 | symbol: String, 28 | decimals: Number, 29 | }, 30 | 31 | mainToken: String, 32 | dependantToken: String, 33 | 34 | burned: Number, 35 | mcap: Number, 36 | value: Number, 37 | price: Number, 38 | reserve0: Number, 39 | reserve1: Number, 40 | mainReserveValue: Number, 41 | 42 | volume: Number, 43 | 44 | variation: { 45 | hour: Number, 46 | day: Number, 47 | week: Number, 48 | month: Number 49 | }, 50 | 51 | hasFees: Boolean, // default null 52 | fees: { 53 | token0: { 54 | buy: Number, 55 | sell: Number, 56 | checked: { 57 | type: Boolean, 58 | default: false 59 | } 60 | }, 61 | token1: { 62 | buy: Number, 63 | sell: Number, 64 | checked: { 65 | type: Boolean, 66 | default: false 67 | } 68 | } 69 | } 70 | 71 | }, { timestamps: { createdAt: 'created_at' } }); 72 | 73 | tokenHistorySchema.index({'token0.contract': 1}); 74 | tokenHistorySchema.index({'token1.contract': 1}); 75 | tokenHistorySchema.index({'variation.daily': 1}); 76 | tokenHistorySchema.index({pair: 1}); 77 | tokenHistorySchema.index({router: 1}); 78 | tokenHistorySchema.index({chain: 1}); 79 | /* 80 | db.tokenhistories.ensureIndex({'token0.contract': 1}); 81 | db.tokenhistories.ensureIndex({'token1.contract': 1}); 82 | db.tokenhistories.ensureIndex({variation.daily': 1}); 83 | db.tokenhistories.ensureIndex({pair: 1}); 84 | db.tokenhistories.ensureIndex({router: 1}); 85 | db.tokenhistories.ensureIndex({chain: 1}); 86 | */ 87 | 88 | module.exports = mongoose.model('TokenHistory', tokenHistorySchema); 89 | -------------------------------------------------------------------------------- /server/routes/tokens/history.pair.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | var express = require('express'); 3 | var router = express.Router(); 4 | let Services = require('../../service'); 5 | 6 | router.get('/:pair', 7 | async function ( req, res ) { 8 | let from = req.query.from; 9 | let to = req.query.to; 10 | 11 | let resolution = req.query.resolution; 12 | let records = req.query.countBack; 13 | let pair = req.params.pair; 14 | 15 | if( !from || !to || !records || !pair ) return res.status(400).send({ error: { msg: "Invalid params", data: [] }}); 16 | 17 | console.log('\nFinding prices: ', resolution, from, to, '\n'); 18 | let priceRecords = await Services.price.findPrices( pair, from, to, records ); 19 | console.log('\nFound prices:', resolution, from, to, '\n'); 20 | 21 | if( !priceRecords || !priceRecords.length ) { 22 | let last_history = await Services.price.findLastPrice( pair, from, to ); 23 | if(!last_history) return res.status(200).send({ success: { msg: "success", data: [] }}); 24 | return res.status(200).send({ success: { msg: "success", data: [], nextTime: last_history.time }}); 25 | } 26 | 27 | return res.status(200).send({ success: { msg: "success", data: priceRecords }}); 28 | } 29 | ) 30 | router.get('/last/:pair', 31 | async function ( req, res ) { 32 | let from = req.query.from; 33 | let to = req.query.to; 34 | 35 | let records = req.query.countBack; 36 | let pair = req.params.pair; 37 | if( !from || !to || !records || !contract ) return res.status(400).send({ error: { msg: "Invalid params", data: [] }}); 38 | 39 | let lastPrice = await Services.price.findLastPrice( pair, from ); 40 | if(!lastPrice) return res.status(200).send({ success: { msg: "success", data: [], nextTime: 0 }}); 41 | return res.status(200).send({ success: { msg: "success", data: [], nextTime: lastPrice.time }}); 42 | } 43 | ) 44 | router.get('/transactions/:pair/:page', 45 | async function ( req, res ) { 46 | let pair = req.params.pair; 47 | let page = parseInt(req.params.page); 48 | 49 | if( !pair ) return res.status(400).send({ error: { msg: "Invalid params" }}); 50 | 51 | let transactions = await Services.transactions.findTransactions( pair, page ); 52 | if(!transactions) return res.status(400).send({ error: { msg: "cannot find transactions of the requested pair" }}); 53 | return res.status(200).send({ success: { msg: "success", data: transactions }}); 54 | } 55 | ) 56 | module.exports = router; -------------------------------------------------------------------------------- /server/routes/tokens/history.token.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | var express = require('express'); 3 | var router = express.Router(); 4 | let Services = require('../../service'); 5 | 6 | router.get('/:token', 7 | async function ( req, res ) { 8 | let token = req.params.token; 9 | let from = req.query.from; 10 | let to = req.query.to; 11 | 12 | let resolution = req.query.resolution; 13 | let records = req.query.countBack; 14 | let pair = await Services.token.getMainPair(token); 15 | 16 | if( !from || !to || !records || !pair ) return res.status(400).send({ error: { msg: "Invalid params", data: [] }}); 17 | 18 | let priceRecords = await Services.price.findPrices( pair.mainPair, from, to, records, resolution ); 19 | 20 | if( !priceRecords || !priceRecords.length ) { 21 | let last_history = await Services.price.findLastPrice( pair.mainPair, from, to ); 22 | if(!last_history) return res.status(200).send({ success: { msg: "success", reason: "No older records", data: [] }}); 23 | return res.status(200).send({ success: { msg: "success", data: [], nextTime: last_history.time }}); 24 | } 25 | 26 | return res.status(200).send({ success: { msg: "success", data: priceRecords }}); 27 | } 28 | ) 29 | router.get('/last/:token', 30 | async function ( req, res ) { 31 | let from = req.query.from; 32 | let to = req.query.to; 33 | 34 | let records = req.query.countBack; 35 | let pair = req.params.pair; 36 | if( !from || !to || !records || !contract ) return res.status(400).send({ error: { msg: "Invalid params", data: [] }}); 37 | 38 | let lastPrice = await Services.price.findLastPrice( pair, from ); 39 | if(!lastPrice) return res.status(200).send({ success: { msg: "success", data: [], nextTime: 0 }}); 40 | return res.status(200).send({ success: { msg: "success", data: [], nextTime: lastPrice.time }}); 41 | } 42 | ) 43 | module.exports = router; -------------------------------------------------------------------------------- /server/routes/tokens/index.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | price: require('./price'), 3 | historyPair: require('./history.pair'), 4 | historyToken: require('./history.token'), 5 | token: require('./token'), 6 | } -------------------------------------------------------------------------------- /server/routes/tokens/price.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | var express = require('express'); 3 | var router = express.Router(); 4 | let Services = require('../../service'); 5 | 6 | router.get('/single/:contract', 7 | async function ( req, res ) { 8 | let contract = req.params.contract; 9 | let pair = await Services.token.getMainPair( contract ); 10 | let price = await Services.price.findPrice( pair.mainPair ); 11 | if( !price ) return res.status(400).send({ error: { msg: "Cannot retrive the price", data: 0 }}); 12 | return res.status(200).send({ success: { msg: "success", data: price }}); 13 | } 14 | ) 15 | router.get('/multiple/', 16 | async function ( req, res ) { 17 | 18 | let contracts = req.query.contracts; 19 | if( !contracts ) return res.status(400).send({ error: { msg: "Invalid Parameters" }}); 20 | try { contracts = JSON.parse(contracts); } 21 | catch (error) { console.log(error); return res.status(400).send({ error: { msg: "Invalid Parameters" }}); } 22 | 23 | let mainPairs = await Services.token.getMainPairMultiple( contracts ); 24 | let pairs = {}; 25 | for( let token in mainPairs ){ 26 | let tokenMainPair = mainPairs[token].mainPair; 27 | if( !tokenMainPair ) continue; 28 | pairs[tokenMainPair] = token 29 | } 30 | 31 | let prices = {} 32 | let retrivedPrices = await Services.price.findPriceMultiple( Object.keys(pairs) ); 33 | 34 | for( let price of retrivedPrices ){ 35 | let tokenContract = pairs[price.record.pair]; 36 | prices[ tokenContract ] = price.record.value; 37 | } 38 | 39 | return res.status(200).send({ success: { msg: "success", data: prices }}); 40 | } 41 | ) 42 | module.exports = router; -------------------------------------------------------------------------------- /server/routes/tokens/token.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | var express = require('express'); 3 | var router = express.Router(); 4 | let Services = require('../../service'); 5 | 6 | function firstSignificant(n) { 7 | return Math.ceil(-Math.log10(n)); 8 | } 9 | async function getTokenBasicInfos( contract ){ 10 | let tokenInfo = {}; 11 | 12 | let tokenRetrived = await Services.token.findByContract( contract ); 13 | if( tokenRetrived ) tokenInfo = tokenRetrived; 14 | 15 | let pair = await mainPairHandler( contract ); 16 | let tokenPrice = pair.pairInfos.value; 17 | tokenInfo.pair = pair; 18 | if( tokenPrice ) { 19 | tokenInfo.pricescale = 10**(firstSignificant(tokenPrice) + 3 ) ; 20 | tokenInfo.minmov = 1; 21 | } 22 | return tokenInfo; 23 | } 24 | async function mainPairHandler( contract ) { 25 | let pair = await Services.token.getMainPair( contract ); 26 | let transactions = await Services.transactions.findTransactions( pair.mainPair, 1 ); 27 | pair.transactions = transactions; 28 | return pair; 29 | } 30 | router.get('/search/:url', 31 | async function(req, res) { 32 | let url = req.params.url; 33 | let tokens = await Services.token.searchByUrlOrContract(url); 34 | // await ApiCharting.getTokensPair( tokens ); 35 | return res.status(200).send({ success: { msg: "success", data: tokens }}) 36 | } 37 | ); 38 | router.get('/info/:contract', 39 | async function ( req, res ) { 40 | let contract = req.params.contract; 41 | let tokenInfo = await getTokenBasicInfos( contract ); 42 | 43 | // implemented custom endpoint to retrive informations 44 | let allPairs = await Services.token.getPairs( contract ); 45 | tokenInfo.pairs = allPairs; 46 | 47 | return res.status(200).send({ success: { msg: "success", data: tokenInfo }}); 48 | } 49 | ) 50 | router.get('/basic/:contract', 51 | async function ( req, res ) { 52 | let contract = req.params.contract; 53 | let tokenInfo = await getTokenBasicInfos( contract ); 54 | return res.status(200).send({ success: { msg: "success", data: tokenInfo }}); 55 | } 56 | ) 57 | router.get('/pairs/:contract', 58 | async function ( req, res ) { 59 | let contract = req.params.contract; 60 | let tokenPairs = await Services.history.findPairs( contract ); 61 | if( !tokenPairs ) return res.status(400).send({ error: { msg: "Cannot retrive the token pairs", data: {} }}); 62 | return res.status(200).send({ success: { msg: "success", data: tokenPairs }}); 63 | } 64 | ) 65 | router.get('/mainPairMultiple/', 66 | async function ( req, res ) { 67 | 68 | let contracts = req.query.contracts; 69 | if( !contracts ) return res.status(400).send({ error: { msg: "Invalid Parameters" }}); 70 | 71 | try { contracts = JSON.parse(contracts); } 72 | catch (error) { return res.status(400).send({ error: { msg: "Invalid Parameters" }}); } 73 | 74 | let pairsInfo = await Services.token.getMainPairMultiple( contracts ); 75 | 76 | return res.status(200).send({ success: { msg: "success", data: pairsInfo }}); 77 | } 78 | ) 79 | router.get('/mainPair/:contract', 80 | async function ( req, res ) { 81 | let contract = req.params.contract; 82 | if( !contract ) return res.status(400).send({ error: { msg: "Invalid Parameters" }}); 83 | 84 | let pair = await mainPairHandler(contract) 85 | 86 | if(!pair) res.status(400).send({ error: { msg: "Cannot retrive the token pairs", data: {} }}) 87 | else return res.status(200).send({ success: { msg: "success", data: pair }}); 88 | } 89 | ) 90 | router.get('/mainPairs/:contract', 91 | async function ( req, res ) { 92 | let contract = req.params.contract; 93 | if( !contract ) return res.status(400).send({ error: { msg: "Invalid Parameters" }}); 94 | let pairs = await Services.token.getPairs( contract ); 95 | return res.status(200).send({ success: { msg: "success", data: pairs }}); 96 | } 97 | ) 98 | 99 | 100 | 101 | 102 | 103 | 104 | module.exports = router; -------------------------------------------------------------------------------- /server/server.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | var express = require('express'); 3 | var path = require('path'); 4 | var logger = require('morgan'); 5 | var helmet = require('helmet'); 6 | var compression = require('compression'); // speed emprovement 7 | 8 | var cookieParser = require('cookie-parser'); 9 | var cors = require('cors'); 10 | 11 | const cluster = require("cluster"); 12 | 13 | // import routes handlers 14 | const routesTokens = require('./routes/tokens'); 15 | 16 | var configDB = require('./config/database.js'); 17 | const mongoose = require('mongoose'); 18 | mongoose.connect(configDB.url, { 19 | autoIndex: false, 20 | useNewUrlParser: true, 21 | useUnifiedTopology: true 22 | }).then(() => { console.log('1MongoDB is connected') }) 23 | .catch(err => { 24 | console.log('MongoDB connection unsuccessful'); 25 | console.log(err) 26 | }); 27 | 28 | var app = express(); 29 | 30 | 31 | 32 | //API RATE LIMIT 33 | app.set('trust proxy', 1); // if you are under reverse proxy 34 | 35 | // app.use(logger('combined')); 36 | app.use(logger(`CLUSTER ${cluster.worker.id} - :remote-addr - :remote-user [:date[clf]] ":method :url HTTP/:http-version" :status :res[content-length] ":referrer" ":user-agent"`)); 37 | //app.use(logger('dev')); 38 | 39 | 40 | app.use(express.json({limit: '50mb'})); 41 | app.use(express.urlencoded({ extended: false, limit: '50mb' })); 42 | app.use(compression({ level: 6, threshold: 0 })); 43 | app.use(cors({ 44 | credentials: true, 45 | origin: [...JSON.parse(process.env.FRONTEND_URL)] 46 | })); 47 | 48 | app.use(helmet()); 49 | app.use(cookieParser()); 50 | 51 | // Mount REST on /api 52 | 53 | // TOKENS ROUTES 54 | app.use( '/token', routesTokens.token ); 55 | app.use( '/token/price', routesTokens.price ); 56 | app.use( '/token/history', routesTokens.historyToken ); 57 | app.use( '/pair/history', routesTokens.historyPair ); 58 | 59 | app.use('*', (req, res) => { res.status(500).send({status: '2. ok'}) }); 60 | 61 | app.use(function(err, req, res, next) { 62 | res.status(500).send({ 63 | message: err.message, 64 | error: {}, 65 | meta_tags: {} 66 | }); 67 | }); 68 | 69 | app.get('*', function(req, res){ res.send({ status: 'Not Found'}); }); 70 | 71 | module.exports = app; -------------------------------------------------------------------------------- /server/service/db.history.js: -------------------------------------------------------------------------------- 1 | var TokenHistory = require('../models/token_history'); 2 | const UtilsAddresses = require('../../utils/addresses'); 3 | 4 | async function findPairs( contract ){ 5 | let documents = await TokenHistory.find( 6 | { dependantToken: UtilsAddresses.toCheckSum(contract) } 7 | ).lean().exec(); 8 | if(!documents.length) return null; 9 | return documents; 10 | } 11 | async function findPairsMultiple( contracts ){ 12 | for( let i = 0; i < contracts.length; i++ ){ 13 | contracts[i] = UtilsAddresses.toCheckSum(contracts[i]); 14 | } 15 | let documents = await TokenHistory.find( 16 | { dependantToken: { $in: contracts } } 17 | ).lean().exec(); 18 | if(!documents.length) return null; 19 | return documents; 20 | } 21 | async function findPairsWithFilters( filter ){ 22 | return await TokenHistory.find(filter).lean().exec(); 23 | } 24 | async function findTokensWithMultipleRouters( allowedRouters ){ 25 | 26 | let routers = await TokenHistory.aggregate([ 27 | { 28 | $match: { router: { $in: allowedRouters } } 29 | }, 30 | { 31 | $group: { 32 | _id: { router: "$router"}, 33 | count: { "$sum": 1 }, 34 | docs: { $push: "$$ROOT" } 35 | } 36 | } 37 | ], 38 | {allowDiskUse: true} // For faster processing if set is larger 39 | ).exec() 40 | return routers; 41 | } 42 | async function findAllowedRoutersPairs( allowedRouters, tokens ){ 43 | let match = { router: { $in: allowedRouters } }; 44 | if( tokens ){ 45 | match['token0.contract'] = { $in: tokens }; 46 | match['token1.contract'] = { $in: tokens }; 47 | } 48 | return await TokenHistory.find(match) 49 | .select({ 'token0.contract': 1, 'token1.contract': 1, pair: 1, router: 1, reserve0: 1, reserve1: 1 }) 50 | .lean() 51 | .exec(); 52 | } 53 | async function findAllPairs(){ 54 | return await TokenHistory.find() 55 | .select({ 'token0.contract': 1, 'token0.decimals': 1, 'token1.contract': 1, 'token1.decimals': 1, pair: 1, router: 1, reserve0: 1, reserve1: 1 }) 56 | .lean() 57 | .exec(); 58 | } 59 | 60 | 61 | module.exports = { 62 | findPairsWithFilters, 63 | findPairs, 64 | findPairsMultiple, 65 | findTokensWithMultipleRouters, 66 | findAllPairs, 67 | findAllowedRoutersPairs 68 | } -------------------------------------------------------------------------------- /server/service/db.history.price.js: -------------------------------------------------------------------------------- 1 | const HistoryPrice = require('../models/history_prices'); 2 | const UtilsAddresses = require('../../utils/addresses'); 3 | 4 | async function findPrices( pair, from, to, recordsCount, resolution ){ 5 | 6 | if( recordsCount > 350 ) recordsCount = 350; 7 | if( !pair || !from || !to ) return []; 8 | 9 | let records; 10 | if( !resolution || resolution == 1 ){ 11 | records = await HistoryPrice.find( 12 | { 13 | pair: UtilsAddresses.toCheckSum(pair), 14 | time: { $lt: parseInt(to) } 15 | } 16 | ).sort({time: -1}).limit(parseInt(recordsCount)).lean().select({ value: 1, low: 1, high: 1, open: 1, close: 1, time: 1 }).exec(); 17 | } 18 | else { 19 | let multiplier = 1; 20 | if( resolution == "5" ) multiplier = 5; 21 | else if( resolution == "60" ) multiplier = 60; 22 | else if( resolution == "720" ) multiplier = 720; 23 | else if( resolution == "1D" ) multiplier = 1440; 24 | 25 | records = await HistoryPrice.aggregate([ 26 | { 27 | "$match": { 28 | "pair" : UtilsAddresses.toCheckSum(pair), 29 | time: { $lt: parseInt(to) } 30 | }, 31 | }, 32 | { 33 | "$group": { 34 | "_id": { 35 | "interval": { 36 | "$subtract": [ 37 | "$time", 38 | { "$mod": ["$time", multiplier * 60] } 39 | ] 40 | }, 41 | 42 | }, 43 | "high": { "$max": "$value" }, 44 | "low": { "$min": "$value" }, 45 | "open": { "$first": "$value" }, 46 | "close": { "$last": "$value" }, 47 | }, 48 | }, 49 | { 50 | "$sort": { 51 | "_id.interval": -1 52 | } 53 | }, 54 | { 55 | "$limit": parseInt(recordsCount) 56 | } 57 | ]); 58 | for( let i = 0; i < records.length; i++ ){ 59 | records[i].time = records[i]._id.interval 60 | } 61 | console.log('\nFound prices with resolution: ', resolution, records.length, '\n'); 62 | } 63 | 64 | return records; 65 | } 66 | async function findLastPrice( pair, from, to ){ 67 | if( !pair || !from ) return null; 68 | 69 | let record = await HistoryPrice.findOne( 70 | { 71 | pair: UtilsAddresses.toCheckSum(pair), 72 | time: { $lt: parseInt(from) } 73 | } 74 | ).lean().select({ value: 1, low: 1, high: 1, open: 1, close: 1, time: 1 }).sort({ time: -1 }).exec(); 75 | 76 | return record; 77 | } 78 | 79 | async function findPrice( pair ){ 80 | if( !pair ) return null; 81 | let record = await HistoryPrice.findOne( 82 | { 83 | pair: UtilsAddresses.toCheckSum(pair), 84 | time: { $lte: parseInt(Date.now()/1000) } 85 | } 86 | ).lean().select({ value: 1 }).exec(); 87 | if( !record ) { return null } 88 | return record.value; 89 | } 90 | 91 | async function findPriceMultiple( pairs ){ 92 | if( !pairs || !pairs.length ) return null; 93 | for( let i in pairs ){ pairs[i] = UtilsAddresses.toCheckSum(pairs[i]) } 94 | 95 | let records = HistoryPrice.aggregate([ 96 | { 97 | $match: { 98 | pair: { 99 | $in: pairs 100 | } 101 | } 102 | }, 103 | { 104 | $project: { 105 | time: 1, 106 | value: 1, 107 | pair: 1 108 | } 109 | }, 110 | { 111 | $sort: { 112 | time: -1 113 | } 114 | }, 115 | { 116 | $group: { 117 | _id: "$pair", 118 | record: { 119 | $first: "$$ROOT" 120 | } 121 | } 122 | } 123 | ]).exec(); 124 | 125 | if( !records ) { return null } 126 | return records; 127 | } 128 | 129 | module.exports = { 130 | findPrice, 131 | findPrices, 132 | findLastPrice, 133 | findPriceMultiple 134 | } -------------------------------------------------------------------------------- /server/service/db.history.transaction.js: -------------------------------------------------------------------------------- 1 | var HistoryTransactions = require('../models/history_transactions'); 2 | let TRANSACTIONS_PER_PAGE_LIMIT = 100; 3 | const UtilsAddresses = require('../../utils/addresses'); 4 | 5 | async function findTransactions( pair, page ){ 6 | if(!pair) return null; 7 | if(!page) page = 1; 8 | 9 | let documents = await HistoryTransactions.find( 10 | { pair: UtilsAddresses.toCheckSum(pair) } 11 | ) 12 | .sort({ time: -1 }).limit(page * TRANSACTIONS_PER_PAGE_LIMIT).lean().exec(); 13 | return documents; 14 | } 15 | async function findAllTransactionsBySimpleFilter(filters){ 16 | let documents = await HistoryTransactions.find(filters) 17 | .sort({ time: -1 }).lean().exec(); 18 | return documents; 19 | } 20 | async function findTransactionsGteTime( time ){ 21 | let documents = await HistoryTransactions.aggregate([ 22 | { $match: { time: { $gte: time }} }, 23 | { $sort: { time: -1 } }, 24 | { $group: { _id: "$dependantToken" , docs: { $push: "$$ROOT" } }}, 25 | { 26 | $project: { 27 | token: "$_id", 28 | _id: 0, 29 | docs: 1 30 | } 31 | } 32 | ]).exec(); 33 | return documents; 34 | } 35 | module.exports = { 36 | findTransactions, 37 | findAllTransactionsBySimpleFilter, 38 | findTransactionsGteTime 39 | } -------------------------------------------------------------------------------- /server/service/db.routers.js: -------------------------------------------------------------------------------- 1 | 2 | var Routers = require('../models/routers'); 3 | async function findRouters( filter ){ 4 | let documents = await Routers.find(filter).lean().exec(); 5 | if(!documents.length) return []; 6 | return documents; 7 | } 8 | async function findRoutersValid(){ 9 | return await findRouters({valid: true}); 10 | } 11 | module.exports = { 12 | findRouters, 13 | findRoutersValid 14 | } -------------------------------------------------------------------------------- /server/service/db.token.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | const EnumMainTokens = require('../../enum/mainTokens'); 3 | const UtilsAddresses = require('../../utils/addresses'); 4 | var TokenBasic = require('../models/token_basic'); 5 | var ServiceHistory = require('./db.history'); 6 | let TOKENS_PER_PAGE = 25; 7 | 8 | 9 | 10 | async function findByContract( contract ){ 11 | let tokenInfos = await TokenBasic.findOne({ contract: UtilsAddresses.toCheckSum(contract) }).lean().exec(); 12 | if( !tokenInfos || tokenInfos.name == "$NULL" ) { return null } 13 | return tokenInfos; 14 | } 15 | async function getSymbolFromContract( contract ){ 16 | let tokenInfos = await TokenBasic.findOne({ contract: UtilsAddresses.toCheckSum(contract) }).select({ symbol: 1 }).lean().exec(); 17 | if( !tokenInfos || tokenInfos.name == "$NULL" ) { return null } 18 | return tokenInfos.symbol; 19 | } 20 | 21 | async function findWithMostScoreAndNoFee( minimumPoints, returnRecordsAmount ){ 22 | let bnb = await TokenBasic.findOne({ contract: EnumMainTokens[process.env.CHAIN_ID].MAIN.address }).lean().exec(); // sepcify bnb becouse it has no 'fee' object 23 | let queryRes = await TokenBasic.find({ score_points: { $gt: minimumPoints }, 'fees.buy': 0, 'fees.sell': 0 }).sort({ score_points: -1 }).limit(returnRecordsAmount).lean().exec(); 24 | 25 | if( bnb ) queryRes.unshift(bnb); 26 | 27 | return queryRes 28 | } 29 | async function findDecimals( tokens ){ 30 | return await TokenBasic.find({ contract: { $in: tokens } }).select({ decimals: 1, contract: 1 }).lean().exec(); 31 | } 32 | 33 | async function searchByUrlOrContract( urlOrContract ){ 34 | let query = urlOrContract.trim().replace(/[^\w\s]/gi, '').split(" ").join(""); 35 | let tokens = await TokenBasic.find({ 36 | $or: [ 37 | {name: { $regex: '.*' + query + '.*', $options: 'i' }}, 38 | {contract: { $regex: '.*' + query + '.*', $options: 'i' }} 39 | ] 40 | }) 41 | .select({ name: 1, symbol: 1, contract: 1, pairs_count: 1 }) 42 | .sort({ score_points: -1 }) 43 | .limit(TOKENS_PER_PAGE) 44 | .exec(); 45 | 46 | return tokens; 47 | } 48 | 49 | async function getPairs( contract ){ 50 | let tokenPairs = await ServiceHistory.findPairs( contract ); 51 | let pairs = {} // tokenAddress => { reserve: num, name: name } 52 | 53 | console.log( tokenPairs ); 54 | 55 | for( let i in tokenPairs ){ 56 | let pairInfos = tokenPairs[i]; 57 | 58 | if( pairInfos.mainToken === EnumMainTokens[pairInfos.chain].MAIN.address ) pairs[pairInfos.pair] = {}; 59 | else if( EnumMainTokens[pairInfos.chain].STABLECOINS.includes( pairInfos.mainToken ) ) pairs[pairInfos.pair] = {}; 60 | 61 | if( i == tokenPairs.length - 1 && !Object.keys(pairs).length ){} 62 | else if( !pairs[pairInfos.pair] ) continue; 63 | 64 | pairs[pairInfos.pair] = { 65 | mainToken: pairInfos.mainToken, 66 | mainReserveValue: pairInfos.mainReserveValue, 67 | router: pairInfos.router, 68 | chain: pairInfos.chain, 69 | value: pairInfos.value, 70 | mcap: pairInfos.mcap, 71 | variation: pairInfos.variation, 72 | tokens: { 73 | 0: pairInfos.token0.symbol, 74 | 1: pairInfos.token1.symbol 75 | } 76 | }; 77 | 78 | if( pairInfos.mainToken == pairInfos.token0.contract ) pairs[pairInfos.pair].reserve = pairInfos.reserve0; 79 | else pairs[pairInfos.pair].reserve = pairInfos.reserve1; 80 | 81 | } 82 | return pairs; 83 | } 84 | async function getPairsMultiple( contracts ){ 85 | for( let i in contracts ){ 86 | contracts[i] = UtilsAddresses.toCheckSum(contracts[i]); 87 | } 88 | 89 | let tokensPairs = await ServiceHistory.findPairsMultiple( contracts ); 90 | if( !tokensPairs ) return {} 91 | 92 | let pairsRetrived = {} // tokenAddress: { pairAddress => { reserve: num, name: name } } 93 | for( let pairRetrived of tokensPairs ){ 94 | if(!pairsRetrived[pairRetrived.dependantToken]) pairsRetrived[pairRetrived.dependantToken] = []; 95 | pairsRetrived[pairRetrived.dependantToken].push(pairRetrived); 96 | } 97 | let organizedPairs = {}; 98 | for( let token in pairsRetrived ){ 99 | let pairsToCheck = pairsRetrived[token]; 100 | organizedPairs[token] = {}; 101 | for( let i in pairsToCheck ){ 102 | let pairInfos = pairsToCheck[i]; 103 | 104 | if( pairInfos.mainToken === EnumMainTokens[pairInfos.chain].MAIN.address ) organizedPairs[token][pairInfos.pair] = {}; 105 | else if( EnumMainTokens[pairInfos.chain].STABLECOINS.includes( pairInfos.mainToken ) ) organizedPairs[token][pairInfos.pair] = {}; 106 | 107 | if( i == pairsToCheck.length - 1 && !Object.keys(organizedPairs[token]).length ){} 108 | else if( !organizedPairs[token][pairInfos.pair] ) continue; 109 | 110 | organizedPairs[token][pairInfos.pair] = { 111 | mainToken: pairInfos.mainToken, 112 | mainReserveValue: pairInfos.mainReserveValue, 113 | name: pairInfos.token0.name, 114 | router: pairInfos.router, 115 | chain: pairInfos.chain, 116 | value: pairInfos.value, 117 | mcap: pairInfos.mcap, 118 | variation: pairInfos.variation 119 | }; 120 | 121 | if( pairInfos.mainToken == pairInfos.token0.contract ) organizedPairs[token][pairInfos.pair].reserve = pairInfos.reserve0; 122 | else organizedPairs[token][pairInfos.pair].reserve = pairInfos.reserve1; 123 | } 124 | } 125 | 126 | return organizedPairs; 127 | } 128 | 129 | async function getSupplyMultiple( contracts ){ 130 | for( let i in contracts ) contracts[i] = contracts[i]; 131 | 132 | let retrivedSupplies = await findSupplyMultiple( contracts ); 133 | let retrived = {}; 134 | if(retrivedSupplies && retrivedSupplies.length ) { 135 | for( let info of retrivedSupplies ){ 136 | retrived[ info.contract ] = info.total_supply; 137 | } 138 | } 139 | return retrived; 140 | } 141 | 142 | async function findSupplyMultiple( contracts ){ 143 | let documents = await TokenBasic.find( 144 | { contract: { $in: contracts } } 145 | ).select({ contract: 1, total_supply: 1 }).lean().exec(); 146 | if(!documents.length) return null; 147 | return documents; 148 | } 149 | 150 | async function getMainPair( contract ){ 151 | 152 | let pairs = await getPairs( contract ) // tokenAddress => pair informations 153 | let token = await TokenBasic.findOne({contract : contract}).select({total_supply: 1}).lean().exec(); 154 | let totalSupply = token ? token.total_supply : 0; 155 | 156 | let mainPair = null; // each token probably has a pair with bnb or main stable coins, and we prefer that ones 157 | let mainPairVal = 0; 158 | let pairInfos = {}; 159 | 160 | for( let pair in pairs ){ 161 | let pairDetails = pairs[pair]; 162 | 163 | if( pairDetails.mainToken === EnumMainTokens[pairDetails.chain].MAIN.address ) { 164 | if( pairDetails.mainReserveValue > mainPairVal ){ 165 | mainPair = pair; 166 | mainPairVal = pairDetails.mainReserveValue; 167 | pairInfos = pairDetails; 168 | } 169 | } 170 | else if( EnumMainTokens[pairDetails.chain].STABLECOINS.includes( pairDetails.mainToken ) ) { 171 | if( pairDetails.mainReserveValue > mainPairVal ) { 172 | mainPair = pair; 173 | mainPairVal = pairDetails.mainReserveValue; 174 | pairInfos = pairDetails; 175 | } 176 | } 177 | } 178 | 179 | if( !mainPair ) { 180 | mainPair = Object.keys( pairs )[0]; 181 | let pairDetails = pairs[mainPair]; 182 | 183 | 184 | if( !pairDetails ) { 185 | return { 186 | mainPair: null, 187 | mainPairVal: 0, 188 | pairInfos: {}, 189 | totalSupply: totalSupply 190 | } 191 | } 192 | 193 | mainPairVal = pairDetails.mainReserveValue; 194 | return { 195 | mainPair: mainPair, 196 | mainPairVal: mainPairVal, 197 | pairInfos: pairDetails, 198 | totalSupply: totalSupply 199 | } 200 | } else { 201 | return { 202 | mainPair: mainPair, 203 | mainPairVal: mainPairVal, 204 | pairInfos: pairInfos, 205 | totalSupply: totalSupply 206 | }; 207 | } ; // if no mainPair was found, return the first pair inside the object 208 | 209 | } 210 | async function getMainPairMultiple( contracts ){ 211 | 212 | for( let i in contracts ){ 213 | contracts[i] = UtilsAddresses.toCheckSum(contracts[i]); 214 | } 215 | 216 | let tokensPairs = await getPairsMultiple( contracts ) // tokenAddress => { pair address: pair informations } 217 | let tokensSupplies = await getSupplyMultiple( contracts ) 218 | 219 | 220 | 221 | let mainPairs = {}; 222 | for( let token in tokensPairs ){ 223 | let pairs = tokensPairs[token]; 224 | mainPairs[token] = { mainPair: null, mainPairVal: 0, pairInfos: {}, totalSupply: tokensSupplies[token] } 225 | 226 | for( let pair in pairs ){ 227 | 228 | 229 | 230 | let pairInfos = pairs[pair]; 231 | 232 | 233 | if( pairInfos.mainToken === EnumMainTokens[pairInfos.chain].MAIN.address ) { 234 | 235 | if( pairInfos.mainReserveValue > mainPairs[token].mainPairVal ){ 236 | 237 | mainPairs[token].mainPair = pair; 238 | mainPairs[token].mainPairVal = pairInfos.mainReserveValue; 239 | mainPairs[token].pairInfos = pairInfos; 240 | } 241 | } 242 | else if( EnumMainTokens[pairInfos.chain].STABLECOINS.includes( pairInfos.mainToken ) ) { 243 | 244 | if( pairInfos.mainReserveValue > mainPairs[token].mainPairVal ) { 245 | 246 | mainPairs[token].mainPair = pair; 247 | mainPairs[token].mainPairVal = pairInfos.mainReserveValue; 248 | mainPairs[token].pairInfos = pairInfos; 249 | } 250 | } 251 | } 252 | if( !mainPairs[token] ) { // if no mainPair was found, return the first pair inside the token pairs 253 | 254 | mainPairs[token].mainPair = Object.keys( pairs )[0]; 255 | } 256 | } 257 | return mainPairs; 258 | } 259 | module.exports = { 260 | findByContract, 261 | getSymbolFromContract, 262 | getMainPair, 263 | getMainPairMultiple, 264 | getPairs, 265 | searchByUrlOrContract, 266 | findWithMostScoreAndNoFee, 267 | findDecimals 268 | } -------------------------------------------------------------------------------- /server/service/index.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | history: require('./db.history'), 3 | token: require('./db.token'), 4 | price: require('./db.history.price'), 5 | transactions: require('./db.history.transaction') 6 | } -------------------------------------------------------------------------------- /server/websocket/index.js: -------------------------------------------------------------------------------- 1 | const { getParams } = require("./utils"); 2 | const TokensWss = require("./sockets/Tokens"); 3 | 4 | // define all socket types and handling functions here 5 | function setupSocketHandlers() { 6 | return { 7 | token: TokensWss(), 8 | }; 9 | } 10 | 11 | // setup websocket server 12 | function setupWebSocket(server) { 13 | // setup socket handlers 14 | const wssHandler = setupSocketHandlers(); 15 | 16 | // upgrade will check if we have a way to handle this type of socket 17 | // authenticate user using the same jwt 18 | server.on("upgrade", async function upgrade(request, socket, head) { 19 | try { 20 | let { path } = getParams(request); 21 | path = path.trim(); 22 | if (!(path in wssHandler)) { 23 | throw `Unknow conneciton path ${path}`; 24 | } 25 | 26 | // authenticate client 27 | // allow upgrade 28 | const wss = wssHandler[path]; 29 | wss.handleUpgrade(request, socket, head, function done(ws) { 30 | wss.emit("connection", ws, request); 31 | }); 32 | } catch (err) { 33 | console.log("upgrade exception", err); 34 | socket.write("HTTP/1.1 401 Unauthorized\r\n\r\n"); 35 | socket.destroy(); 36 | return; 37 | } 38 | }); 39 | } 40 | 41 | module.exports = { 42 | setupSocketHandlers, 43 | setupWebSocket, 44 | }; 45 | -------------------------------------------------------------------------------- /server/websocket/sockets/Tokens.js: -------------------------------------------------------------------------------- 1 | const WebSocket = require("ws"); 2 | const { findTransactionsGteTime } = require("../../service/db.history.transaction"); 3 | 4 | const { 5 | individualPipeline, 6 | broadcastPipeline, 7 | authHandler, 8 | setupPing, 9 | getParams, 10 | getUniqueID 11 | } = require("../utils"); 12 | 13 | let clients = {}; 14 | let tokens = {}; // ctx.id: [token, index]; 15 | let retriveTime = Date.now()/1000; 16 | let retrivedTx = {}; 17 | 18 | function sleep(ms) { 19 | return new Promise(resolve => setTimeout(resolve, ms)); 20 | } 21 | function broadcastTransactions(){ 22 | let tokensTx = Object.keys(retrivedTx); 23 | for( let token of tokensTx ){ 24 | // console.log(` ${token} with ${clients[token] ? clients[token].length : 0} CLIENTS `); 25 | if(!clients[token]) continue; 26 | if( clients[token].length ){ 27 | for( let c of clients[token]){ 28 | if (c.is_authenticated) { 29 | c.send( 30 | JSON.stringify({ 31 | transactions: retrivedTx[token] 32 | }) 33 | ); 34 | } 35 | } 36 | } 37 | } 38 | } 39 | 40 | // websocket handling functions 41 | function TokensWss() { 42 | const wss = new WebSocket.Server({ noServer: true }); 43 | 44 | async function retriveTransactions() { 45 | let retriveTime = Date.now()/1000; 46 | while( true ){ 47 | 48 | let transactions = await findTransactionsGteTime(retriveTime); 49 | retrivedTx = {}; // reset the object 50 | for( let tokenTx of transactions ) retrivedTx[tokenTx.token] = tokenTx.docs; // populate it with new transactions 51 | console.log(`Broadcasting to ${wss.clients.size} clients ${Object.keys(retrivedTx).length}`) 52 | broadcastTransactions(); // send all the new transactions in broadcast 53 | 54 | if(Object.keys(retrivedTx).length) // increase the time ONLY if some transaction si found 55 | retriveTime = Date.now()/1000; 56 | 57 | await sleep( 1000 * process.env.WRITE_TO_DB_SECONDS ); 58 | } 59 | } 60 | retriveTransactions(); 61 | 62 | // establish connection 63 | wss.on("connection", (ctx, request) => { 64 | console.log("connected", wss.clients.size); 65 | 66 | ctx.id = getUniqueID(); 67 | 68 | // setup authentication 69 | authHandler(ctx, () => { 70 | let { token } = getParams(request); 71 | token = token; 72 | if( !clients[token] ) clients[token] = []; 73 | clients[token].push(ctx); 74 | tokens[ctx.id] = [token, clients[token].length - 1]; 75 | registerActions(ctx) 76 | }); 77 | 78 | ctx.send("connection established"); 79 | 80 | ctx.on("pong", () => { 81 | ctx.isAlive = true; 82 | }); 83 | 84 | }); 85 | 86 | // handle stalled connections 87 | setupPing(wss.clients); 88 | 89 | return wss; 90 | } 91 | 92 | // this function is invoked after successfull auth 93 | function registerActions(ctx) { 94 | 95 | // setup individual pipeline 96 | // const interval = individualPipeline(ctx); 97 | 98 | ctx.on("close", () => { 99 | let [token, index] = tokens[ctx.id]; 100 | 101 | clients[token].splice(index, 1); 102 | delete tokens[ctx.id]; 103 | 104 | console.log('Killing connection'); 105 | // clearInterval(interval); // individual pipeline interval 106 | }); 107 | 108 | // register new message handler 109 | ctx.on("message", (message) => { 110 | console.log('Recived message: ', message) 111 | ctx.send(`echo: ${message}`); 112 | }); 113 | } 114 | 115 | 116 | 117 | 118 | module.exports = TokensWss; -------------------------------------------------------------------------------- /server/websocket/utils.js: -------------------------------------------------------------------------------- 1 | const url = require("url"); 2 | 3 | // extract connection type from url 4 | // we'll only consider one param for type of socket 5 | const getParams = (request) => { 6 | try { 7 | const parsed = url.parse(request.url); 8 | const res = { path: parsed.pathname }; 9 | parsed.query.split("&").forEach((param) => { 10 | const [k, v] = param.split("="); 11 | res[k] = v; 12 | }); 13 | return res; 14 | } catch (err) { 15 | return "na"; 16 | } 17 | }; 18 | 19 | const getUniqueID = function () { 20 | function s4() { 21 | return Math.floor((1 + Math.random()) * 0x10000).toString(16).substring(1); 22 | } 23 | return s4() + s4() + '-' + s4(); 24 | }; 25 | 26 | 27 | // initiate a ping with client 28 | // stalled or unauthenticated clients are terminated 29 | function setupPing(clients) { 30 | const interval = setInterval(() => { 31 | for (let client of clients.values()) { 32 | // terminate stalled clients 33 | if (client.isAlive === false || client.is_authenticated === false) { 34 | client.terminate(); 35 | } 36 | 37 | // initiate ping 38 | client.isAlive = false; 39 | client.ping(() => {}); 40 | } 41 | }, 15000); 42 | 43 | return interval; 44 | } 45 | 46 | // --- demo --- 47 | 48 | // use for client specific messages 49 | // each client gets an individual instance 50 | function individualPipeline(ctx) { 51 | let idx = 0; 52 | const interval = setInterval(() => { 53 | ctx.send(`client message ${idx}`); 54 | idx++; 55 | }, 5000); 56 | return interval; 57 | } 58 | 59 | // use for braodcasting messages 60 | // one instance for all clients 61 | // usage example 62 | function broadcastPipeline(clients) { 63 | let idx = 0; 64 | const interval = setInterval(() => { 65 | for (let c of clients.values()) { 66 | if (c.is_authenticated) { 67 | c.send(`broadcast message ${idx}`); 68 | } 69 | } 70 | idx++; 71 | }, 3000); 72 | return interval; 73 | } 74 | 75 | // handle client jwt authentication 76 | function authHandler(ctx, next) { 77 | ctx.is_authenticated = false; // init 78 | ctx.is_authenticated = true; 79 | console.log('Authenticating client'); 80 | next( ctx ); 81 | } 82 | 83 | module.exports = { 84 | getParams, 85 | setupPing, 86 | individualPipeline, 87 | broadcastPipeline, 88 | authHandler, 89 | getUniqueID 90 | } -------------------------------------------------------------------------------- /utils/addresses.js: -------------------------------------------------------------------------------- 1 | const Web3 = require('web3'); 2 | function toCheckSum( add ){ 3 | if( !Web3.utils.isAddress(add) ) return null; 4 | return Web3.utils.toChecksumAddress(add); 5 | } 6 | 7 | let UtilsAddresses = { 8 | toCheckSum 9 | } 10 | 11 | module.exports = UtilsAddresses; -------------------------------------------------------------------------------- /utils/sleep.js: -------------------------------------------------------------------------------- 1 | module.exports = function sleep(ms) { 2 | return new Promise(resolve => setTimeout(resolve, ms)); 3 | } -------------------------------------------------------------------------------- /workers/analizer/analize.transaction.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | const configDB = require('../../server/config/database'); 3 | const Services = require('../../server/service'); 4 | const mongoose = require('mongoose'); 5 | const getDataFromLog = require('../lib/logs'); 6 | const {getBlockSyncEvents, getReceiptsBatch} = require('../lib/scrape.block.past'); 7 | const { web3 } = require('../web3'); 8 | 9 | const fs = require('fs'); 10 | const stream = require('stream'); 11 | const readline = require('readline'); 12 | const { getSyncInBlockForPairs } = require('./sync.in.block'); 13 | 14 | function toSum(add){ 15 | return web3.utils.toChecksumAddress(add) 16 | } 17 | 18 | /* 19 | 0xe5c672ebf81cee90849fca88d3ecb2b75141aa847345250e6697aa5d2bc70452 -> orionPool tx arbitrage 20 | 0xe74f4a187571430afa2d432c4ff4f985d8f678ad4112d80ddde3a1369b1bf0f2 -> MDEX LP tx arbitrage 21 | 0x8de6c6997a55a9cc0223c708bf6facb852ac5f00328b39dce9b520c803829a85 -> orionPool tx arbitrage 22 | */ 23 | 24 | 25 | let arbitrageOutputLog = '/root/.pm2/logs/arbitrage.master-out.log'; 26 | const searchStream = (filename, text) => { 27 | return new Promise((resolve) => { 28 | const inStream = fs.createReadStream(filename); 29 | const outStream = new stream; 30 | const rl = readline.createInterface(inStream, outStream); 31 | const result = []; 32 | const regEx = new RegExp(text, "i") 33 | rl.on('line', function (line) { 34 | if (line && line.search(regEx) >= 0) { 35 | result.push(line) 36 | } 37 | }); 38 | rl.on('close', function () { 39 | resolve(result) 40 | }); 41 | }) 42 | } 43 | 44 | /* 45 | 46 | */ 47 | 48 | ( async () => { 49 | 50 | //let arbitrageLogs = fs.readFileSync("/root/.pm2/logs/arbitrage.master-out.log", "utf-8"); 51 | 52 | mongoose.connect(configDB.url, { 53 | autoIndex: false, 54 | useNewUrlParser: true, 55 | useUnifiedTopology: true 56 | }).then(async () => { console.log('MongoDB connected') }) 57 | .catch(err => { console.log('MongoDB connection unsuccessful', err); process.exit() }); 58 | 59 | let blockToggler = null; // the block where the trasaction had to be find; 60 | 61 | let hash = process.argv[2]; 62 | let txReceipt = await web3.eth.getTransactionReceipt(hash); 63 | let logs = txReceipt.logs; 64 | 65 | let pairs = []; 66 | for( let log of logs ){ 67 | if( !pairs.includes( toSum(log.address) ) && getDataFromLog(log) ){ 68 | pairs.push( toSum(log.address) ); 69 | } 70 | } 71 | 72 | for( let pair of pairs ){ 73 | let savedOnDb = await Services.history.findPairByContract( pair ); 74 | console.log(`[FOUND ON DB] ${pair} | Fees: ${savedOnDb.router_fee} | Router: ${savedOnDb.router}`); 75 | } 76 | 77 | console.log(`[PAIRS PATH] `, pairs.join(" ")); 78 | 79 | let block = parseInt(txReceipt.blockNumber); 80 | let target = block - 5; 81 | 82 | console.log('[TX BLOCK]', block); 83 | 84 | await getSyncInBlockForPairs(pairs, block, txReceipt.from); 85 | 86 | let latestSyncForPairs = {}; 87 | let prevBlock = block; 88 | for( let i = 1; prevBlock > target; i ++){ 89 | 90 | prevBlock = block-i; 91 | console.log('[PREV BLOCK]', prevBlock, i); 92 | 93 | let pairsInfo = await getBlockSyncEvents( prevBlock ); 94 | 95 | let status = false; 96 | for( let pair in pairsInfo ){ 97 | if( pairs.includes( toSum(pair) ) ) { 98 | if(!blockToggler) blockToggler = prevBlock; 99 | console.log('[PREVIOUS SYNC FOUND FOR]', toSum(pair) ); 100 | if(!latestSyncForPairs[toSum(pair)]){ 101 | latestSyncForPairs[toSum(pair)] = pairsInfo[pair].events.sync; 102 | console.log(pairsInfo[pair].events.sync) 103 | } 104 | status = true; 105 | } 106 | } 107 | //if( blockToggler ) break; 108 | 109 | if(!status){ 110 | let blockHeader = await web3.eth.getBlock(prevBlock); 111 | let rpcResponsesReceipts = await getReceiptsBatch( blockHeader.transactions ); 112 | for( let rpcRes of rpcResponsesReceipts ){ 113 | let receipt = rpcRes.result; 114 | let hash = receipt.transactionHash; 115 | for( let log of receipt.logs ){ 116 | if( pairs.includes(toSum(log.address))){ 117 | console.log( '[PREVIOUS LOG FOUND FOR]', toSum(log.address), log.transactionHash ); 118 | } 119 | } 120 | } 121 | } 122 | } 123 | 124 | 125 | 126 | let res = await searchStream(arbitrageOutputLog, blockToggler.toString() + ' ' + pairs.join(" ")); 127 | 128 | console.log( latestSyncForPairs ); 129 | console.log( 'Output: ', res.join('\n') ); 130 | 131 | })(); -------------------------------------------------------------------------------- /workers/analizer/sync.in.block.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | 3 | const getDataFromLog = require('../lib/logs'); 4 | const {getReceiptsBatch} = require('../lib/scrape.block.past'); 5 | const { web3 } = require('../web3'); 6 | 7 | function toSum(add){ 8 | return web3.utils.toChecksumAddress(add) 9 | } 10 | 11 | async function getSyncInBlockForPairs( pairs, blockNum, from ){ 12 | let blockHeader = await web3.eth.getBlock(blockNum); 13 | let rpcResponsesReceipts = await getReceiptsBatch( blockHeader.transactions ); 14 | for( let rpcRes of rpcResponsesReceipts ){ 15 | let receipt = rpcRes.result; 16 | let hash = receipt.transactionHash; 17 | if( from ){ 18 | if( receipt.from == from ){ 19 | console.log(`[SAME FROM] ${hash} ${from}`) 20 | } 21 | } 22 | for( let log of receipt.logs ){ 23 | if( pairs.includes(toSum(log.address)) ){ 24 | let data = getDataFromLog(log); 25 | if( data?.name == 'sync'){ 26 | console.log( `[SYNC IN BLOCK ${blockNum}][${Number(log.transactionIndex)}]`, hash, toSum(log.address), data.reserve0, data.reserve1 ); 27 | } 28 | 29 | } 30 | } 31 | } 32 | } 33 | 34 | ( async () => { 35 | 36 | //let arbitrageLogs = fs.readFileSync("/root/.pm2/logs/arbitrage.master-out.log", "utf-8"); 37 | 38 | if( !process.argv[2]?.startsWith('0x') ) return console.log('- Not a valid address') 39 | if( process.argv[3]?.startsWith('0x') || isNaN( parseInt(process.argv[3]) ) ) return console.log('- Not a valid number') 40 | 41 | await getSyncInBlockForPairs( [toSum(process.argv[2])], parseInt(process.argv[3]) ); 42 | 43 | })(); 44 | 45 | 46 | module.exports = { 47 | getSyncInBlockForPairs 48 | } -------------------------------------------------------------------------------- /workers/blockchain.scraper/master.js: -------------------------------------------------------------------------------- 1 | const { 2 | Worker, 3 | isMainThread, 4 | parentPort, 5 | workerData 6 | } = require("worker_threads"); 7 | 8 | // Initialize Ethereum Web3 client 9 | let {web3} = require('../lib/web3') 10 | let queue = []; 11 | 12 | // callbacks 13 | // handle a callback when new reserves for a pair are pushed into the cache 14 | let ON_NEW_RESERVE_FOUND = ( pair, reserves, hash, blockNum ) => { console.log('ON_NEW_RESERVE_FOUND') }; 15 | // handle a callback when all the reserves of the sync events relative to a block are loaded into the cache 16 | let ON_BLOCK_RESERVES_UPDATE = ( blockNumber, pairsInfo ) => { console.log('ON_BLOCK_RESERVES_UPDATE') }; 17 | 18 | function removeFromQueue( blockNumber ){ 19 | //console.log(`[SLAVE ${id}] scraped ${blockNumber}`); 20 | const index = queue.indexOf(blockNumber); 21 | if (index > -1) queue.splice(index, 1); // 2nd parameter means remove one item only 22 | 23 | } 24 | 25 | let callbacks = { 26 | 'ON_BLOCK_RESERVES_UPDATE': async (number, pairsInfos) => { 27 | let start = Date.now(); 28 | console.log('[SCRAPED CB][OLD]', number); 29 | await ON_BLOCK_RESERVES_UPDATE(number, pairsInfos); 30 | console.log('[SCRAPED BLOCK][OLD][CB]', number, ( Date.now() - start )/1000 ); 31 | removeFromQueue(number); 32 | }, 33 | 'ON_NEW_RESERVE_FOUND': ( pair, reserves, hash, blockNum ) => { 34 | ON_NEW_RESERVE_FOUND( pair, reserves, hash, blockNum ); 35 | } 36 | } 37 | 38 | let slavePath = __dirname + '/slave.js'; 39 | let slaveCount = 0; 40 | let slaves = []; 41 | 42 | function intializeWorkers(){ 43 | for( let i = 0; i < slaveCount; i ++ ){ 44 | let worker = new Worker(slavePath, { workerData: { ID: i } }); 45 | worker.on('message', (msg) => { 46 | if( msg.type && callbacks[msg.type] ){ 47 | if( msg.data ) callbacks[msg.type](...msg.data); 48 | else callbacks[msg.type]() 49 | } else { 50 | console.log(`[SLAVE ${i}]`, msg); 51 | } 52 | }); 53 | worker.on('exit', () => { console.log('Worker dead', i )}) 54 | slaves.push( worker ); 55 | } 56 | } 57 | 58 | 59 | async function reliefQueue(){ 60 | while( queue.length == slaveCount ) { 61 | await sleep(10); 62 | } 63 | } 64 | function sleep(ms) { 65 | return new Promise(resolve => setTimeout(resolve, ms)); 66 | } 67 | 68 | async function scrapeFromBlock( 69 | startBlock, 70 | workers, 71 | onNewReserve, 72 | onNewBlockScraped 73 | ) { 74 | 75 | if( onNewReserve ) ON_NEW_RESERVE_FOUND = onNewReserve; 76 | if( onNewBlockScraped ) ON_BLOCK_RESERVES_UPDATE = onNewBlockScraped; 77 | 78 | slaveCount = workers; 79 | intializeWorkers(); 80 | 81 | let block = startBlock; 82 | let blockEnd = await web3.eth.getBlockNumber(); 83 | let start = Date.now(); 84 | 85 | console.log('[UPDATING UP TO BLOCK]', blockEnd) 86 | 87 | while( block <= blockEnd ){ 88 | let slaveId = block % slaveCount; 89 | queue.push(block); 90 | slaves[slaveId].postMessage({ 91 | type: 'ON_NEW_BLOCK', 92 | data: [block] 93 | }); 94 | await reliefQueue(); // wait that a slot is reliefed from the queue 95 | blockEnd = await web3.eth.getBlockNumber(); 96 | block ++; 97 | } 98 | console.log(`[DONE] Scraped ${ blockEnd - startBlock } blocks in`, (Date.now() - start)/1000 ); 99 | return blockEnd+1; 100 | } 101 | 102 | module.exports = scrapeFromBlock 103 | 104 | -------------------------------------------------------------------------------- /workers/blockchain.scraper/slave.js: -------------------------------------------------------------------------------- 1 | const { parentPort, workerData } = require('worker_threads'); 2 | const {getBlockSyncEvents} = require("../lib/scrape.block.past"); 3 | let ID = workerData.ID; 4 | 5 | async function analizeBlock( num ){ 6 | let start = Date.now(); 7 | let pairsInfo = await getBlockSyncEvents(num); 8 | for( let pair in pairsInfo ){ 9 | 10 | if(!pairsInfo[pair].events.sync) { 11 | console.log('[HASH WITHOUT SYNC] ', pairsInfo[pair].hash); 12 | continue; 13 | } else { 14 | parentPort.postMessage({ 15 | type: 'ON_NEW_RESERVE_FOUND', 16 | data: [pair, [pairsInfo[pair].events.sync.reserve0, pairsInfo[pair].events.sync.reserve1], pairsInfo[pair].hash, num] 17 | }) 18 | } 19 | 20 | 21 | } 22 | console.log(`[SLAVE ${ID}] scraped ${num} ${(Date.now()-start)/1000}. Pairs updated: ${Object.keys(pairsInfo).length}`); 23 | parentPort.postMessage({ 24 | type: 'ON_BLOCK_RESERVES_UPDATE', 25 | data: [num, pairsInfo] 26 | }) 27 | } 28 | let callbacks = { 29 | 'ON_NEW_BLOCK': analizeBlock, 30 | } 31 | parentPort.on('message', (msg) => { 32 | if( msg.type && callbacks[msg.type] ){ 33 | if( msg.data ) callbacks[msg.type](...msg.data); 34 | else callbacks[msg.type]() 35 | } 36 | }) -------------------------------------------------------------------------------- /workers/lib/block.listner.js: -------------------------------------------------------------------------------- 1 | const {web3, web3ws, account} = require('../lib/web3'); 2 | const {setInBloom, setTopicInBloom} = require('./bloomfilter'); 3 | const getDataFromLog = require('./logs'); 4 | 5 | const BLOCKS_BUFFER_SIZE = 10; 6 | 7 | 8 | class BlockInfo { 9 | 10 | constructor(blockNumber){ 11 | this._number = blockNumber; 12 | this._completeOnceChecks = 0; 13 | this._transactions = {}; 14 | this._logsBloom = '0x'; 15 | this._pairs = {}; 16 | for (let i=0; i<512; i++) { 17 | this._logsBloom = this._logsBloom + '0'; 18 | } 19 | } 20 | 21 | setBlock(block) { 22 | this._block = block; 23 | } 24 | 25 | addLog(log, cb) { 26 | //this._logs.push(log); 27 | this._logsBloom = setInBloom(this._logsBloom, log.address); 28 | for (let topic of log.topics) { 29 | this._logsBloom = setTopicInBloom(this._logsBloom, topic); 30 | } 31 | //console.log('[STREAM]', log.transactionHash, log.name, log.transactionIndex); 32 | if (log.name) { 33 | if (!this._pairs[log.address]) this._pairs[log.address] = { events: {}, hash: log.transactionHash }; 34 | if (log.name == 'swap') { 35 | //router address 36 | //console.log(log.transactionHash, '[STREAM]', log.returnValues) 37 | this._pairs[log.address].events.swap = { ...log.returnValues }; 38 | } else if ( 39 | log.name == 'sync' && 40 | ( 41 | //set this sync as latest if not other sync was set 42 | !this._pairs[log.address].events.sync || 43 | //or it is in a more recent transaction with respect to the current sync 44 | log.transactionIndex > this._pairs[log.address].events.sync.transactionIndex || 45 | //or it is in the same transaction as the current sync and it has a higher logIndex 46 | (log.transactionIndex == this._pairs[log.address].events.sync.transactionIndex && log.logIndex > this._pairs[log.address].events.sync.logIndex) 47 | ) 48 | ) { 49 | if (!this._pairs[log.address].sync) this._pairs[log.address].events.sync = {}; 50 | //console.log('[STREAM SYNC]', log.transactionHash, log.address, log.transactionIndex ); 51 | this._pairs[log.address].events.sync.reserve0 = log.returnValues.reserve0; 52 | this._pairs[log.address].events.sync.reserve1 = log.returnValues.reserve1; 53 | this._pairs[log.address].events.sync.transactionIndex = log.transactionIndex; 54 | this._pairs[log.address].events.sync.logIndex = log.logIndex; 55 | this._pairs[log.address].hash = log.transactionHash; 56 | cb(log.address, [log.returnValues.reserve0, log.returnValues.reserve1], log.transactionHash, log.blockNumber); // ON_NEW_RESERVE_FOUND 57 | } 58 | } 59 | } 60 | 61 | get isCompleteOnce(){ return this.isComplete && this._completeOnceChecks++ == 0 } 62 | get isComplete(){ return this._block && (this._logsBloom == this._block.logsBloom) } 63 | get pairs() { return this._pairs;} 64 | get block() { return this._block;} 65 | get number() { return this._number;} 66 | } 67 | 68 | class BlockListener { 69 | constructor( onNewReserveFound, onNewBlockComplete ){ 70 | this._blockInfoBuffer = []; 71 | this.ON_NEW_RESERVE_FOUND = onNewReserveFound; // when a new sync event is detected 72 | this.ON_BLOCK_RESERVES_UPDATE = onNewBlockComplete; // when a new block is fully scraped 73 | } 74 | 75 | start(){ 76 | this._subLogs = web3ws.eth.subscribe('logs', {topics: []}, this._processLog.bind(this)); 77 | this._subBlockHeaders = web3ws.eth.subscribe('newBlockHeaders', this._processBlock.bind(this)); 78 | } 79 | 80 | _getBlockInfo(blockNumber) { 81 | let blockInfo = this._blockInfoBuffer.find((bi)=> bi.number == blockNumber); 82 | if (!blockInfo) { 83 | //add it... 84 | blockInfo = new BlockInfo(blockNumber); 85 | this._blockInfoBuffer.push(blockInfo); 86 | if (this._blockInfoBuffer.length > BLOCKS_BUFFER_SIZE) { 87 | this._blockInfoBuffer.splice(0, this._blockInfoBuffer.length - BLOCKS_BUFFER_SIZE); 88 | } 89 | } 90 | return blockInfo; 91 | } 92 | 93 | _checkBlockCompletion(blockInfo){ 94 | if (blockInfo.isCompleteOnce){ 95 | this.ON_BLOCK_RESERVES_UPDATE( blockInfo.number, blockInfo.pairs ); 96 | } 97 | } 98 | 99 | async _processLog(error, log) { 100 | 101 | let blockInfo = this._getBlockInfo(log.blockNumber); 102 | let logInfo = getDataFromLog(log); 103 | if (logInfo) { 104 | log.returnValues = logInfo; 105 | log.name = logInfo.name; 106 | } 107 | 108 | blockInfo.addLog(log, this.ON_NEW_RESERVE_FOUND); 109 | 110 | //console.log(JSON.stringify(log)) 111 | this._checkBlockCompletion(blockInfo); 112 | } 113 | 114 | async _processBlock(error, data) { 115 | let blockInfo = this._getBlockInfo(data.number); 116 | if (!blockInfo.block) { 117 | blockInfo.setBlock(await web3.eth.getBlock(data.number)); 118 | console.log("GOT NEW BLOCK " + data.number); 119 | this._checkBlockCompletion(blockInfo); 120 | } 121 | } 122 | } 123 | 124 | module.exports = BlockListener; 125 | 126 | -------------------------------------------------------------------------------- /workers/lib/bloomfilter.js: -------------------------------------------------------------------------------- 1 | const sha3 = require("js-sha3"); 2 | 3 | //let exports = module.exports; 4 | 5 | /** 6 | * Keccak256 hash 7 | * @param data The data 8 | */ 9 | function keccak256(data) { 10 | return '0x' + sha3.keccak_256(toByteArray(data)); 11 | } 12 | /** 13 | * Adding padding to string on the left 14 | * @param value The value 15 | * @param chars The chars 16 | */ 17 | padLeft = (value, chars) => { 18 | const hasPrefix = /^0x/i.test(value) || typeof value === 'number'; 19 | value = value.toString().replace(/^0x/i, ''); 20 | const padding = chars - value.length + 1 >= 0 ? chars - value.length + 1 : 0; 21 | return (hasPrefix ? '0x' : '') + new Array(padding).join('0') + value; 22 | }; 23 | /** 24 | * Convert bytes to hex 25 | * @param bytes The bytes 26 | */ 27 | function bytesToHex(bytes) { 28 | const hex = []; 29 | for (let i = 0; i < bytes.length; i++) { 30 | hex.push((bytes[i] >>> 4).toString(16)); 31 | hex.push((bytes[i] & 0xf).toString(16)); 32 | } 33 | return `0x${hex.join('').replace(/^0+/, '')}`; 34 | } 35 | /** 36 | * To byte array 37 | * @param value The value 38 | */ 39 | function toByteArray(value) { 40 | if (value == null) { 41 | throw new Error('cannot convert null value to array'); 42 | } 43 | if (typeof value === 'string') { 44 | const match = value.match(/^(0x)?[0-9a-fA-F]*$/); 45 | if (!match) { 46 | throw new Error('invalid hexidecimal string'); 47 | } 48 | if (match[1] !== '0x') { 49 | throw new Error('hex string must have 0x prefix'); 50 | } 51 | value = value.substring(2); 52 | if (value.length % 2) { 53 | value = '0' + value; 54 | } 55 | const result = []; 56 | for (let i = 0; i < value.length; i += 2) { 57 | result.push(parseInt(value.substr(i, 2), 16)); 58 | } 59 | return addSlice(new Uint8Array(result)); 60 | } 61 | if (isByteArray(value)) { 62 | return addSlice(new Uint8Array(value)); 63 | } 64 | throw new Error('invalid arrayify value'); 65 | } 66 | 67 | /** 68 | * Is byte array 69 | * @param value The value 70 | */ 71 | function isByteArray(value) { 72 | if (!value || 73 | // tslint:disable-next-line: radix 74 | parseInt(String(value.length)) != value.length || 75 | typeof value === 'string') { 76 | return false; 77 | } 78 | for (let i = 0; i < value.length; i++) { 79 | const v = value[i]; 80 | // tslint:disable-next-line: radix 81 | if (v < 0 || v >= 256 || parseInt(String(v)) != v) { 82 | return false; 83 | } 84 | } 85 | return true; 86 | } 87 | /** 88 | * Add slice to array 89 | * @param array The array 90 | */ 91 | function addSlice(array) { 92 | if (array.slice !== undefined) { 93 | return array; 94 | } 95 | array.slice = () => { 96 | const args = Array.prototype.slice.call(arguments); 97 | return addSlice(new Uint8Array(Array.prototype.slice.apply(array, args))); 98 | }; 99 | return array; 100 | } 101 | 102 | /** 103 | * Returns true if the bloom is a valid bloom 104 | * @param bloom The bloom 105 | */ 106 | function isBloom(bloom) { 107 | if (typeof bloom !== 'string') { 108 | return false; 109 | } 110 | if (!/^(0x)?[0-9a-f]{512}$/i.test(bloom)) { 111 | return false; 112 | } 113 | if (/^(0x)?[0-9a-f]{512}$/.test(bloom) || 114 | /^(0x)?[0-9A-F]{512}$/.test(bloom)) { 115 | return true; 116 | } 117 | return false; 118 | } 119 | exports.isBloom = isBloom; 120 | /** 121 | * Returns true if the value is part of the given bloom 122 | * note: false positives are possible. 123 | * @param bloom encoded bloom 124 | * @param value The value 125 | */ 126 | 127 | 128 | function setInBloom(bloom, value) { 129 | if (typeof value === 'object' && value.constructor === Uint8Array) { 130 | value = bytesToHex(value); 131 | } 132 | const hash = keccak256(value).replace('0x', ''); 133 | for (let i = 0; i < 12; i += 4) { 134 | // calculate bit position in bloom filter that must be active 135 | const bitpos = ((parseInt(hash.substr(i, 2), 16) << 8) + 136 | parseInt(hash.substr(i + 2, 2), 16)) & 137 | 2047; 138 | // test if bitpos in bloom is active 139 | let charPos = bloom.length - 1 - Math.floor(bitpos / 4); 140 | let codePoint = bloom.charCodeAt(charPos); 141 | let code = codePointToInt(codePoint); 142 | const offset = 1 << bitpos % 4; 143 | code |= offset; 144 | 145 | let a = bloom.split('') 146 | 147 | let newCodePoint = intToCodePoint(code); 148 | if (codePoint >= 65 && codePoint <= 70) { 149 | newCodePoint += 32; 150 | } 151 | a[charPos] = String.fromCharCode(newCodePoint); 152 | 153 | bloom = a.join(''); 154 | 155 | } 156 | return bloom; 157 | } 158 | 159 | exports.setInBloom = setInBloom; 160 | 161 | function isInBloom(bloom, value) { 162 | if (typeof value === 'object' && value.constructor === Uint8Array) { 163 | value = bytesToHex(value); 164 | } 165 | const hash = keccak256(value).replace('0x', ''); 166 | for (let i = 0; i < 12; i += 4) { 167 | // calculate bit position in bloom filter that must be active 168 | const bitpos = ((parseInt(hash.substr(i, 2), 16) << 8) + 169 | parseInt(hash.substr(i + 2, 2), 16)) & 170 | 2047; 171 | // test if bitpos in bloom is active 172 | const code = codePointToInt(bloom.charCodeAt(bloom.length - 1 - Math.floor(bitpos / 4))); 173 | const offset = 1 << bitpos % 4; 174 | if ((code & offset) !== offset) { 175 | return false; 176 | } 177 | } 178 | return true; 179 | } 180 | exports.isInBloom = isInBloom; 181 | /** 182 | * Code points to int 183 | * @param codePoint The code point 184 | */ 185 | function codePointToInt(codePoint) { 186 | if (codePoint >= 48 && codePoint <= 57) { 187 | /* ['0'..'9'] -> [0..9] */ 188 | return codePoint - 48; 189 | } 190 | if (codePoint >= 65 && codePoint <= 70) { 191 | /* ['A'..'F'] -> [10..15] */ 192 | return codePoint - 55; 193 | } 194 | if (codePoint >= 97 && codePoint <= 102) { 195 | /* ['a'..'f'] -> [10..15] */ 196 | return codePoint - 87; 197 | } 198 | throw new Error('invalid bloom'); 199 | } 200 | 201 | function intToCodePoint(int) { 202 | if (int >= 10 && int <= 15) { 203 | return 87 + int; 204 | } else if (int >= 0 && int < 10) { 205 | return 48 + int; 206 | } 207 | } 208 | 209 | /** 210 | * Returns true if the ethereum users address is part of the given bloom. 211 | * note: false positives are possible. 212 | * @param bloom encoded bloom 213 | * @param address the address to test 214 | */ 215 | function isUserEthereumAddressInBloom(bloom, ethereumAddress) { 216 | if (!isBloom(bloom)) { 217 | throw new Error('Invalid bloom given'); 218 | } 219 | if (!isAddress(ethereumAddress)) { 220 | throw new Error(`Invalid ethereum address given: "${ethereumAddress}"`); 221 | } 222 | // you have to pad the ethereum address to 32 bytes 223 | // else the bloom filter does not work 224 | // this is only if your matching the USERS 225 | // ethereum address. Contract address do not need this 226 | // hence why we have 2 methods 227 | // (0x is not in the 2nd parameter of padleft so 64 chars is fine) 228 | const address = padLeft(ethereumAddress, 64); 229 | return isInBloom(bloom, address); 230 | } 231 | exports.isUserEthereumAddressInBloom = isUserEthereumAddressInBloom; 232 | /** 233 | * Returns true if the contract address is part of the given bloom. 234 | * note: false positives are possible. 235 | * @param bloom encoded bloom 236 | * @param contractAddress the contract address to test 237 | */ 238 | function isContractAddressInBloom(bloom, contractAddress) { 239 | if (!isBloom(bloom)) { 240 | throw new Error('Invalid bloom given'); 241 | } 242 | if (!isAddress(contractAddress)) { 243 | throw new Error(`Invalid contract address given: "${contractAddress}"`); 244 | } 245 | return isInBloom(bloom, contractAddress); 246 | } 247 | exports.isContractAddressInBloom = isContractAddressInBloom; 248 | /** 249 | * Returns true if the topic is part of the given bloom. 250 | * note: false positives are possible. 251 | * @param bloom encoded bloom 252 | * @param topic the topic encoded hex 253 | */ 254 | function isTopicInBloom(bloom, topic) { 255 | if (!isBloom(bloom)) { 256 | throw new Error('Invalid bloom given'); 257 | } 258 | if (!isTopic(topic)) { 259 | throw new Error('Invalid topic'); 260 | } 261 | return isInBloom(bloom, topic); 262 | } 263 | exports.isTopicInBloom = isTopicInBloom; 264 | 265 | 266 | function setTopicInBloom(bloom, topic) { 267 | if (!isBloom(bloom)) { 268 | throw new Error('Invalid bloom given'); 269 | } 270 | if (!isTopic(topic)) { 271 | throw new Error('Invalid topic'); 272 | } 273 | return setInBloom(bloom, topic); 274 | } 275 | exports.setTopicInBloom = setTopicInBloom; 276 | 277 | /** 278 | * Checks if its a valid topic 279 | * @param topic encoded hex topic 280 | */ 281 | function isTopic(topic) { 282 | if (typeof topic !== 'string') { 283 | return false; 284 | } 285 | if (!/^(0x)?[0-9a-f]{64}$/i.test(topic)) { 286 | return false; 287 | } 288 | else if (/^(0x)?[0-9a-f]{64}$/.test(topic) || 289 | /^(0x)?[0-9A-F]{64}$/.test(topic)) { 290 | return true; 291 | } 292 | return false; 293 | } 294 | exports.isTopic = isTopic; 295 | 296 | 297 | /** 298 | * Is valid address 299 | * @param address The address 300 | */ 301 | function isAddress(address) { 302 | if (typeof address !== 'string') { 303 | return false; 304 | } 305 | if (address.match(/^(0x)?[0-9a-fA-F]{40}$/)) { 306 | return true; 307 | } 308 | if (address.match(/^XE[0-9]{2}[0-9A-Za-z]{30,31}$/)) { 309 | return true; 310 | } 311 | return false; 312 | } 313 | exports.isAddress = isAddress; 314 | -------------------------------------------------------------------------------- /workers/lib/logs.js: -------------------------------------------------------------------------------- 1 | const ethers = require('ethers'); 2 | 3 | let signatures = { 4 | swap: { 5 | common: "0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822" 6 | }, 7 | sync: { 8 | uint112: "0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1", 9 | uint112TotalSupply: "0x17be3acebd510daa18778e1ee1fbaf88237b124dc0803c3be2fd4f99f3e69d33", 10 | uint256: "0xcf2aa50876cdfbb541206f89af0ee78d44a2abf8d328e37fa4917f982149848a" 11 | } 12 | } 13 | let syncSignatures = Object.values(signatures.sync); 14 | let swapSignatures = Object.values(signatures.swap); 15 | 16 | 17 | // extract swap informations from the log 18 | function getSwapDatas( log ){ 19 | let router = '0x' + log.topics[1].substr(26); 20 | let sender = '0x' + log.topics[2].substr(26); 21 | let decodedParams = ethers.utils.defaultAbiCoder.decode(['uint256','uint256','uint256','uint256'], log.data); 22 | let params = []; 23 | for( let param of decodedParams ) params.push(param.toString()); 24 | let pair = log.address; 25 | return { 26 | router: ethers.utils.getAddress(router), 27 | sender: ethers.utils.getAddress(sender), 28 | pair: ethers.utils.getAddress(pair), 29 | params: { 30 | amount0In: params[0], 31 | amount1In: params[1], 32 | amount0Out: params[2], 33 | amount1Out: params[3] 34 | } 35 | } 36 | } 37 | function decodedParams112(data){ 38 | return ethers.utils.defaultAbiCoder.decode(['uint112','uint112'], data); 39 | } 40 | function decodedParams112TotSupply(data){ 41 | return ethers.utils.defaultAbiCoder.decode(['uint112','uint112','uint256'], data); 42 | } 43 | function decodedParams256(data){ 44 | return ethers.utils.defaultAbiCoder.decode(['uint256','uint256'], data); 45 | } 46 | // extract sync informations from the log 47 | function getSyncDatas( log, topic ){ 48 | let pair = log.address; 49 | let decodedParams; 50 | if( topic == signatures.sync.uint112 ){ 51 | decodedParams = decodedParams112(log.data) 52 | } 53 | if( topic == signatures.sync.uint112TotalSupply ) { 54 | //console.log(`[NON TOPIC tot supply] ${pair}`) 55 | decodedParams = decodedParams112TotSupply(log.data) 56 | } 57 | if( topic == signatures.sync.uint256 ){ 58 | //console.log(`[NON TOPIC 256] ${pair}`) 59 | decodedParams = decodedParams256(log.data) 60 | } 61 | let params = []; 62 | for( let param of decodedParams ) params.push(param.toString()); 63 | return { 64 | pair: ethers.utils.getAddress(pair), 65 | reserve0: params[0], 66 | reserve1: params[1] 67 | } 68 | } 69 | function getDataFromLog( log ){ 70 | if(!log.topics){ 71 | console.log( '[NO TOPICS]', log ); 72 | return null; 73 | } 74 | let containSync = log.topics.filter( e => syncSignatures.includes(e) ); 75 | let containSwap = log.topics.filter( e => swapSignatures.includes(e) ); 76 | if( containSync.length ){ 77 | return { 78 | name: 'sync', 79 | ...getSyncDatas(log, containSync[0]) 80 | } 81 | } else if ( containSwap.length ){ 82 | return { 83 | name: 'swap', 84 | ...getSwapDatas(log, containSwap[0]) 85 | } 86 | } 87 | } 88 | 89 | module.exports = getDataFromLog; -------------------------------------------------------------------------------- /workers/lib/scrape.block.past.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | const axios = require('axios'); 3 | 4 | // Initialize Ethereum Web3 client 5 | const {web3, web3ws, account} = require('./web3'); 6 | 7 | //imports 8 | const getDataFromLog = require('./logs'); 9 | const scraperConfig = require('../../config'); 10 | 11 | 12 | function organizeEvents( hash, receipt, blockNumber ){ 13 | for ( let i = receipt.logs.length - 1; i >= 0; i -- ) { 14 | let log = receipt.logs[i]; 15 | 16 | let data = getDataFromLog(log); 17 | if( data && !pairs_informations[blockNumber][log.address] ) 18 | pairs_informations[blockNumber][log.address] = { events: {}, hash: log.transactionHash }; 19 | 20 | if( data && data.name == 'swap' ){ 21 | pairs_informations[blockNumber][log.address].events.swap = { ...data }; 22 | } else if ( 23 | data && data.name == 'sync' && 24 | ( 25 | //set this sync as latest if not other sync was set 26 | !pairs_informations[blockNumber][log.address].events.sync || 27 | //or it is in a more recent transaction with respect to the current sync 28 | log.transactionIndex > pairs_informations[blockNumber][log.address].events.sync.transactionIndex || 29 | //or it is in the same transaction as the current sync and it has a higher logIndex 30 | ( 31 | log.transactionIndex == pairs_informations[blockNumber][log.address].events.sync.transactionIndex && 32 | log.logIndex > pairs_informations[blockNumber][log.address].events.sync.logIndex 33 | ) 34 | ) 35 | ){ 36 | if(!pairs_informations[blockNumber][log.address].events.sync) pairs_informations[blockNumber][log.address].events.sync = {}; 37 | pairs_informations[blockNumber][log.address].events.sync.reserve0 = data.reserve0; 38 | pairs_informations[blockNumber][log.address].events.sync.reserve1 = data.reserve1; 39 | pairs_informations[blockNumber][log.address].events.sync.transactionIndex = Number(log.transactionIndex); 40 | pairs_informations[blockNumber][log.address].events.sync.logIndex = Number(log.logIndex); 41 | } 42 | } 43 | } 44 | 45 | 46 | let pairs_informations = {/* [blockNum]: { pairAdd: { hash, latestReserves } } */}; 47 | 48 | 49 | async function getReceiptsBatch( hashes ){ 50 | if(!hashes.length) return []; 51 | 52 | let start = Date.now(); 53 | let body = []; 54 | for(let i = 0; i < hashes.length; i++ ){ 55 | let hash = hashes[i]; 56 | body.push({"jsonrpc":"2.0","method":"eth_getTransactionReceipt","params":[hash], "id": i }) 57 | }; 58 | const receipts = await axios.post( 59 | scraperConfig[process.env.CHAIN_ID].http_provider, 60 | body, 61 | { headers: { 'Content-Type': 'application/json' } } 62 | ); 63 | return receipts.data; 64 | } 65 | async function getTransactionsBatch( hashes ){ 66 | if(!hashes.length) return []; 67 | 68 | let start = Date.now(); 69 | let body = []; 70 | for(let i = 0; i < hashes.length; i++ ){ 71 | let hash = hashes[i]; 72 | body.push({"jsonrpc":"2.0","method":"eth_getTransactionByHash","params":[hash], "id": i }) 73 | }; 74 | const receipts = await axios.post( 75 | process.env.PROVIDER_HTTPS, 76 | body, 77 | { headers: { 'Content-Type': 'application/json' } } 78 | ); 79 | return receipts.data; 80 | } 81 | async function getBlockSyncEvents( blockNumber ){ 82 | let blockInfos = await web3.eth.getBlock(blockNumber); 83 | pairs_informations[blockNumber] = {}; 84 | let rpcResponsesReceipts = await getReceiptsBatch( blockInfos.transactions ); 85 | for( let rpcRes of rpcResponsesReceipts ){ 86 | let receipt = rpcRes.result; 87 | let hash = receipt.transactionHash; 88 | organizeEvents(hash, receipt, blockNumber); // add the infos inside pairs_informations; 89 | } 90 | return pairs_informations[blockNumber]; 91 | } 92 | async function getBlockReceipts( blockNumber ){ 93 | let blockInfos = await web3.eth.getBlock(blockNumber); 94 | pairs_informations[blockNumber] = {}; 95 | let rpcResponsesReceipts = await getReceiptsBatch( blockInfos.transactions ); 96 | let receipts = []; 97 | for( let rpcRes of rpcResponsesReceipts ){ 98 | let receipt = rpcRes.result; 99 | receipts.push(receipt) 100 | } 101 | return receipts; 102 | } 103 | async function getBlockTransactions( blockNumber ){ 104 | let blockInfos = await web3.eth.getBlock(blockNumber); 105 | pairs_informations[blockNumber] = {}; 106 | let rpcResponsesTransactions = await getTransactionsBatch( blockInfos.transactions ); 107 | let transactions = []; 108 | for( let rpcRes of rpcResponsesTransactions ){ 109 | let tx = rpcRes.result; 110 | transactions.push(tx) 111 | } 112 | return transactions; 113 | } 114 | // if( process.argv[2] ) { 115 | // ( async () => { 116 | // if( process.argv[2].startsWith('0x') || isNaN( parseInt(process.argv[2]) ) ) return console.log('Not a valide number') 117 | // let start = Date.now(); 118 | // let res = await getBlockSyncEvents( process.argv[2] ) 119 | // for( let pair in res ){ 120 | // console.log( pair, res[pair].events ); 121 | // } 122 | // console.log((Date.now()- start)/1000, 's'); 123 | // })(); 124 | // } 125 | 126 | module.exports = { 127 | getBlockTransactions, 128 | getBlockSyncEvents, 129 | getReceiptsBatch, 130 | getBlockReceipts 131 | }; -------------------------------------------------------------------------------- /workers/lib/scrape.block.stream.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | const ethers = require('ethers') 3 | 4 | const fs = require('fs'); 5 | let logPath = 'log/reserves-to-update.log'; 6 | fs.writeFileSync(logPath, '', 'utf-8'); 7 | let fileContent = fs.readFileSync(logPath, 'utf-8'); 8 | function logInFile(txt){ 9 | fileContent += txt; 10 | fs.writeFileSync(logPath, fileContent, 'utf-8'); 11 | } 12 | 13 | //imports 14 | 15 | 16 | 17 | const BlockListener = require('./block.listner'); 18 | 19 | async function listenReserves( 20 | onSingleReservesUpdate, 21 | onBlockReservesScraped 22 | ){ 23 | 24 | let listener = new BlockListener( 25 | (pair, reserves, hash, blockNumber) => { 26 | logInFile(`\t[${blockNumber}]\n\t[UPDATE ${new Date().toLocaleTimeString()}:${Math.floor(Date.now()/100% 100)}] ${pair} ${hash}\n\t${reserves}\n`) 27 | onSingleReservesUpdate(pair, reserves, hash, blockNumber) 28 | }, 29 | async (number, pairsInfos) => { 30 | let start = Date.now(); 31 | console.log('[SCRAPING CB][NEW]', number); 32 | logInFile(`[SCRAPING BLOCK][NEW] ${number} ${new Date().toLocaleTimeString()}:${Math.floor(Date.now()/100% 100)}`) 33 | await onBlockReservesScraped(number, pairsInfos); 34 | console.log('[SCRAPED BLOCK][NEW][CB]', number, ( Date.now() - start )/1000 ); 35 | } 36 | ); 37 | listener.start(); 38 | 39 | } 40 | 41 | module.exports = listenReserves; -------------------------------------------------------------------------------- /workers/lib/web3.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | const Web3 = require('web3'); 3 | const scraperConfig = require('../../config'); 4 | 5 | let web3_wss = new Web3(); 6 | web3_wss.setProvider(new Web3.providers.WebsocketProvider( scraperConfig[process.env.CHAIN_ID].ws_provider )); 7 | 8 | let web3_https = new Web3(); 9 | web3_https.setProvider(new Web3.providers.HttpProvider( scraperConfig[process.env.CHAIN_ID].http_provider )); 10 | 11 | 12 | 13 | module.exports = { 14 | web3: web3_https, 15 | web3ws: web3_wss, 16 | } -------------------------------------------------------------------------------- /workers/updater/lib/Cache.js: -------------------------------------------------------------------------------- 1 | class Cache { 2 | constructor () { 3 | this.TOKENS_CACHE_MAX_SIZE = 5000; 4 | this.TOKENS_CACHE_SIZE = 0; 5 | this.HISTORIES_CACHE_SIZE = 0; 6 | this.PRICES_CACHE_SIZE = 0; 7 | 8 | this.TOKENS_CACHE_ORDER = []; 9 | this.HISTORIES_CACHE_ORDER = []; 10 | this.PRICES_CACHE_ORDER = []; 11 | 12 | this.CACHE = { 13 | token: {}, // [tokenAddress] => TokenBasic object 14 | tokenHistory: {}, // [pairAddress] => TokenHistory object of this pair 15 | historyPrice: {}, // [pairAddress] => { latest: Latest History Price , hour: One Hour Ago History Price, day: One Day Ago History Price } 16 | router: {}, // [routerContract] => { valid: if the router is valid } 17 | pair: {}, // [pairAdd] => { tokens: [ token0, token1], reserves: [reserve0, reserve1] } 18 | }; 19 | 20 | } 21 | 22 | getRouter( routerAdd ){ 23 | return this.CACHE.router[routerAdd]; 24 | } 25 | setRouter( routerAdd, fields ){ 26 | this.CACHE.router[routerAdd] = fields; 27 | } 28 | 29 | getPair( pairAdd ){ 30 | return this.CACHE.pair[pairAdd]; 31 | } 32 | setPair( pairAdd, fields ){ 33 | this.CACHE.pair[pairAdd] = fields; 34 | } 35 | 36 | getTokenHistory( pair ){ 37 | return this.CACHE.tokenHistory[pair]; 38 | } 39 | getHistoryPrice( pair ){ 40 | return this.CACHE.historyPrice[pair]; 41 | } 42 | getToken( contract ){ 43 | return this.CACHE.token[contract]; 44 | } 45 | getSizeTokens(){ 46 | return this.TOKENS_CACHE_SIZE; 47 | } 48 | getSizeHistories(){ 49 | return this.HISTORIES_CACHE_SIZE; 50 | } 51 | getSizePirceHistory(){ 52 | return this.PRICES_CACHE_SIZE; 53 | } 54 | setToken( tokenAddress, tokenInfos, overwrite ){ 55 | let cacheSize = this.getSizeTokens(); 56 | 57 | if( this.TOKENS_CACHE_ORDER.includes(tokenAddress) ){ 58 | if( overwrite ) this.CACHE.token[tokenAddress] = tokenInfos; 59 | else return; 60 | } 61 | 62 | //console.log(`\t\t[CACHE SIZE TOKEN] ${cacheSize}`); 63 | if( cacheSize > this.TOKENS_CACHE_MAX_SIZE ){ // keeps the tokens cache with a fixed size 64 | let toRemove = this.TOKENS_CACHE_ORDER.shift(); 65 | delete this.CACHE.token[toRemove]; 66 | this.TOKENS_CACHE_SIZE --; 67 | } 68 | if(tokenInfos) { 69 | this.TOKENS_CACHE_ORDER.push( tokenAddress ); 70 | this.CACHE.token[tokenAddress] = tokenInfos; 71 | } 72 | else { 73 | this.CACHE.token[tokenAddress] = { notFound: true, date: Date.now() }; 74 | } 75 | this.TOKENS_CACHE_SIZE ++; 76 | } 77 | 78 | // HANDLE TOKEN HISTORY CACHE 79 | setHistory( pair, history ){ 80 | let cacheSize = this.getSizeHistories(); 81 | if( cacheSize > this.TOKENS_CACHE_MAX_SIZE ){ // keeps the tokens cache with a fixed size 82 | let toRemove = this.HISTORIES_CACHE_ORDER.shift(); 83 | delete this.CACHE.tokenHistory[toRemove]; 84 | } 85 | this.HISTORIES_CACHE_ORDER.push( pair ); 86 | this.CACHE.tokenHistory[pair] = history ; 87 | } 88 | 89 | setHistoryPrice( pair, history ){ 90 | if( this.CACHE.historyPrice[pair] ) { // if already present then just update the one in cache and quit 91 | this.CACHE.historyPrice[pair] = history; 92 | return; 93 | } 94 | let cacheSize = this.getSizePirceHistory(); 95 | if( cacheSize > this.TOKENS_CACHE_MAX_SIZE ){ // keeps the tokens cache with a fixed size 96 | let toRemove = this.PRICES_CACHE_ORDER.shift(); 97 | delete this.CACHE.historyPrice[toRemove]; 98 | } 99 | this.PRICES_CACHE_ORDER.push( pair ); 100 | this.CACHE.historyPrice[pair] = history ; 101 | } 102 | } 103 | 104 | module.exports = Cache; -------------------------------------------------------------------------------- /workers/updater/lib/Queue.js: -------------------------------------------------------------------------------- 1 | function sleep(ms) { 2 | return new Promise(resolve => setTimeout(resolve, ms)); 3 | } 4 | /** 5 | * Instead of running an async function fo each scan trasactions, that's cause a system overload, 6 | * this class help managin the transactions scan between a fixed amount of workers ( childs ). 7 | */ 8 | class Queue { 9 | constructor( childs, callback ){ 10 | this.childs = childs; 11 | this.callback = callback; 12 | this.queue = {}; 13 | for( let i = 0; i < childs; i ++){ 14 | this.queue[i] = {}; 15 | this.queue[i].length = 0; 16 | this.queue[i].array = []; 17 | }; 18 | this.child_index = 0; 19 | this.start(); 20 | } 21 | add(){ 22 | if( this.child_index >= this.childs ) { 23 | this.child_index = 0; // reset index when overflow the childs amount 24 | } 25 | this.queue[this.child_index].array.push( [...arguments] ); 26 | this.queue[this.child_index].length ++; 27 | this.child_index ++; 28 | } 29 | async process( child ){ 30 | while( true ){ 31 | let toScan = this.queue[child].array; 32 | let count = 0; 33 | while( count <= toScan.length - 1){ 34 | let start = Date.now(); 35 | let scan = toScan[0]; 36 | 37 | await this.callback(...scan); 38 | this.queue[child].array.splice(0, 1); 39 | this.queue[child].length --; 40 | 41 | let end = (Date.now()-start)/1000; 42 | //if( !(end <= 0.01) ) console.log('[QUEUE] Processed tx in: ', end, this.callback.name ); 43 | count ++; 44 | } 45 | await sleep(10); 46 | } 47 | } 48 | getPending(){ 49 | let pending = 0; 50 | for( let child = 0; child < this.childs; child++ ){ 51 | pending += this.queue[child].length 52 | } 53 | return pending; 54 | } 55 | start(){ 56 | for( let child = 0; child < this.childs; child++ ){ 57 | this.process( child ); 58 | } 59 | setInterval(() => { 60 | let pending = this.getPending(); 61 | if( pending > 0 ) console.log('[AWAITING TX] ', this.getPending(), this.callback.name); 62 | }, 500); 63 | } 64 | } 65 | module.exports = Queue; -------------------------------------------------------------------------------- /workers/updater/lib/Scraper.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | 3 | const EnumChainId = require("../../../enum/chain.id"); 4 | const EnumContracts = require("../../../enum/contracts"); 5 | const Bulk = require("./bulk/Bulk"); 6 | const Cache = require("./Cache"); 7 | const Token = require("./entity/Token"); 8 | 9 | const fs = require('fs'); 10 | 11 | const EnumAbi = require("../../../enum/abi"); 12 | const EnumMainTokens = require("../../../enum/mainTokens"); 13 | const EnumBulkTypes = require("../../../enum/bulk.records.type"); 14 | const TokenHistory = require("./entity/TokenHistory"); 15 | const HistoryPirce = require("./entity/HistoryPirce"); 16 | 17 | const abiDecoder = require('abi-decoder'); 18 | const Router = require("./entity/Routers"); 19 | 20 | const UtilsAddresses = require("../../../utils/addresses"); 21 | const TokenFees = require("./entity/TokenFees"); 22 | abiDecoder.addABI(EnumAbi[process.env.CHAIN_ID].TOKEN); 23 | abiDecoder.addABI(EnumAbi[process.env.CHAIN_ID].ROUTERS.PANCAKE); 24 | 25 | const scraperConfig = require("../../../config.js"); 26 | const sleep = require('../../../utils/sleep'); 27 | 28 | 29 | 30 | function todayDateUnix(){ 31 | return Math.ceil(Date.now()/1000 - ((Date.now()/1000)%(60*60*24))); 32 | } 33 | 34 | class Scraper { 35 | 36 | static lastScrapedBlockPath = __dirname + '/not-delete.scraped-block.checkpoint.txt'; 37 | static allScrapedBlocksPath = __dirname + '/not-delete.scraped-blocks.txt'; 38 | static logPath = __dirname + '/log.txt'; 39 | 40 | static logger = ( text ) => { 41 | fs.appendFileSync(Scraper.allScrapedBlocksPath, text , 'utf-8'); 42 | } 43 | 44 | constructor ( web3 ) { 45 | this.web3 = web3; 46 | 47 | this.cache = new Cache(); 48 | this.bulk = new Bulk( this.cache ); 49 | 50 | this.routers = new Router( this.cache, this.web3, this.bulk ); 51 | this.tokens = new Token( this.cache, this.web3, this.bulk ); 52 | this.tokensFees = new TokenFees( this.web3 ); 53 | this.tokenHistories = new TokenHistory( this.cache ); 54 | this.historyPrices = new HistoryPirce( this.cache ); 55 | 56 | this.allScrapedBlocksTemp = ''; 57 | 58 | this.bulkUpdateGap = process.env.WRITE_TO_DB_SECONDS * 1000; 59 | this.lastUpdate = 0; 60 | 61 | this.CHAIN_MAIN_TOKEN_PRICE = 0; 62 | this.loopUpdateMainTokenPrice(); 63 | } 64 | 65 | areEqualAdd( add1, add2 ){ 66 | return add1.toLowerCase() == add2.toLowerCase(); 67 | } 68 | isMainToken( contract ){ 69 | return this.areEqualAdd(contract, EnumMainTokens[process.env.CHAIN_ID].MAIN.address); 70 | } 71 | isWhitelisted( token ){ 72 | if( !scraperConfig[process.env.CHAIN_ID].whitelist_enabled ) return true; 73 | return scraperConfig[process.env.CHAIN_ID].whitelist.includes(token); 74 | } 75 | 76 | // returns an array [ mainToken, dependantToken ]; 77 | async tokenHierarchy( first_token, latest_token, history ){ 78 | // to have consistency in data collection keep using the same mainToken and dependantToken if present in history 79 | if( history ){ 80 | let mainTokenContract = history.mainToken; 81 | if( first_token.contract == mainTokenContract ) return [ first_token, latest_token ] 82 | else return [ latest_token, first_token ]; 83 | } 84 | // compare wich of the tokens is used more frequently to create pairs. This means that the one with more pairs is the more common used 85 | 86 | let pairs_comparison; // true if first token is the main one, else false 87 | // cross chain 88 | if( 89 | this.areEqualAdd(EnumMainTokens[process.env.CHAIN_ID].MAIN.address, first_token.contract) && 90 | EnumMainTokens[process.env.CHAIN_ID].STABLECOINS.includes(latest_token.contract) 91 | ) { 92 | pairs_comparison = false; 93 | } else if( 94 | this.areEqualAdd(EnumMainTokens[process.env.CHAIN_ID].MAIN.address, latest_token.contract) && 95 | EnumMainTokens[process.env.CHAIN_ID].STABLECOINS.includes(first_token.contract) 96 | ){ 97 | pairs_comparison = true; 98 | } 99 | else if( first_token.pairs_count == latest_token.pairs_count ){ 100 | if( EnumMainTokens[process.env.CHAIN_ID].STABLECOINS.includes(first_token.contract) ) pairs_comparison = true; 101 | else if( EnumMainTokens[process.env.CHAIN_ID].STABLECOINS.includes(latest_token.contract) ) pairs_comparison = false; 102 | else if ( this.areEqualAdd(EnumMainTokens[process.env.CHAIN_ID].MAIN.address, first_token.contract) ) pairs_comparison = true; 103 | else if ( this.areEqualAdd(EnumMainTokens[process.env.CHAIN_ID].MAIN.address, latest_token.contract) ) pairs_comparison = false; 104 | } else { 105 | pairs_comparison = first_token.pairs_count > latest_token.pairs_count; // here pairs_count 106 | } 107 | 108 | let main_token = pairs_comparison ? first_token : latest_token; 109 | let dependant_token = pairs_comparison ? latest_token : first_token; 110 | 111 | return [ main_token, dependant_token ]; 112 | } 113 | async getTokens(pairAddress, cachedPair){ 114 | let token0 = 0; 115 | let token1 = 0; 116 | try { 117 | let pairWeb3Contract = await new this.web3.eth.Contract( EnumAbi[process.env.CHAIN_ID].PAIR.PANCAKE, pairAddress ); 118 | if( !cachedPair ){ 119 | console.log('\t\t[PAIR] Not cached') 120 | token0 = await pairWeb3Contract.methods.token0().call(); 121 | token1 = await pairWeb3Contract.methods.token1().call(); 122 | // some lines below the pair is setted in the cache 123 | } else { 124 | token0 = cachedPair.tokens[0]; 125 | token1 = cachedPair.tokens[1]; 126 | } 127 | } catch (error) { 128 | console.log( '\t[\tERROR] CANNOT RETRIVE RESERVES', error ); 129 | return [null, null] ; 130 | } 131 | return [token0, token1]; 132 | } 133 | async calculateTokenFees(token, pairInfos, routerInfos){ 134 | 135 | let feesKey = pairInfos.tokens[0] == token ? 'fees.token0': 'fees.token1'; 136 | let otherToken = pairInfos.tokens[0] == token ? pairInfos.tokens[1]: pairInfos.tokens[0]; 137 | let fees = { buy: null, sell: null, checked: false } 138 | 139 | let customCalc = false; 140 | 141 | if( // checked token is not bnb, and the other paired token is bnb 142 | !this.areEqualAdd(token, EnumMainTokens[process.env.CHAIN_ID].MAIN.address) && 143 | this.areEqualAdd(otherToken, EnumMainTokens[process.env.CHAIN_ID].MAIN.address) 144 | ){ 145 | console.log('[CALCULATING FEES MAIN]', token); 146 | let fee = 10000 - (routerInfos.fee * 10000); 147 | let buyFee = await this.tokensFees.buyFees( token, pairInfos.pair, fee, this.web3.utils.toWei('0.01', 'ether') ); 148 | let sellFee = await this.tokensFees.sellFees( token, pairInfos.pair, fee, this.web3.utils.toWei('0.01', 'ether') ); 149 | fees.buy = buyFee; 150 | fees.sell = sellFee; 151 | fees.checked = true; 152 | } else { 153 | customCalc = true; 154 | console.log('[CALCULATING FEES CUSTOM]', token); 155 | let pairForBuyOtherToken = await this.tokenHistories.getPairWithMainToken(otherToken); 156 | if( !pairForBuyOtherToken ){ 157 | console.log(`\t\t[TOKEN FEES ERR]\n\t\t\t> TOKENS: ${token} - ${otherToken}\n\t\t\t> PAIR: ${pairInfos.pair}\n\t\t\t> FEES: ${JSON.stringify(fees)}`, ) 158 | return fees; 159 | } 160 | let fee1 = 10000 - (pairForBuyOtherToken.router_fee * 10000); 161 | let fee2 = 10000 - (routerInfos.fee * 10000); 162 | 163 | fees.buy = await this.tokensFees.buyFeesCustom( otherToken, pairForBuyOtherToken.pair, fee1, token, pairInfos.pair, fee2, this.web3.utils.toWei('0.1', 'ether') ); 164 | fees.sell = await this.tokensFees.sellFeesCustom( otherToken, pairForBuyOtherToken.pair, fee1, token, pairInfos.pair, fee2, this.web3.utils.toWei('0.1', 'ether') ); 165 | fees.checked = true; 166 | } 167 | 168 | if( fees.buy != null && fees.sell == null ){ 169 | console.log('\t\tHONEYPOT', token, fees); 170 | fees.buy = 0; 171 | fees.sell = 100; 172 | fees.checked = true; 173 | this.bulk.bulk_normal.setTokenBulkSet( pairInfos.pair, EnumBulkTypes.TOKEN_HISTORY , feesKey, fees ); 174 | this.bulk.bulk_normal.setTokenBulkSet( pairInfos.pair, EnumBulkTypes.TOKEN_HISTORY , 'hasFees', true ); 175 | return fees; 176 | } 177 | 178 | console.log('\t\t[TOKEN FEES] ', token, JSON.stringify(fees), fees.buy == 0 && fees.sell == 0, 'CUSTOM: ', customCalc); 179 | if( fees.buy != null && fees.sell != null ){ 180 | this.bulk.bulk_normal.setTokenBulkSet( pairInfos.pair, EnumBulkTypes.TOKEN_HISTORY , feesKey, fees ); 181 | if( fees.buy == 0 && fees.sell == 0 ){ 182 | this.bulk.bulk_normal.setTokenBulkSet( pairInfos.pair, EnumBulkTypes.TOKEN_HISTORY , 'hasFees', false ); 183 | } 184 | } 185 | return fees; 186 | } 187 | 188 | /** 189 | * @description Add the token price inside the Bulk history 190 | * @param {*} token0 address 191 | * @param {*} token1 address 192 | */ 193 | async updatePairPriceWithReserves( 194 | /* txSender, router, txHash, pairAddress, params */ 195 | hash, pairAdd, swapInfo, syncInfo, blockNumber 196 | ){ 197 | 198 | pairAdd = UtilsAddresses.toCheckSum(pairAdd); 199 | 200 | console.log('\t[STARTED]', pairAdd, hash, blockNumber); 201 | console.log('\t[SYNC]', JSON.stringify(syncInfo)); 202 | console.log('\t[SWAP]', JSON.stringify(swapInfo)); 203 | 204 | let time = Date.now(); 205 | 206 | let reserve0 = syncInfo.reserve0; 207 | let reserve1 = syncInfo.reserve1; 208 | 209 | let cachedPair = this.cache.getPair(pairAdd); 210 | 211 | let [token0, token1] = await this.getTokens(pairAdd, cachedPair); 212 | 213 | if( !this.isWhitelisted(token0) && !this.isWhitelisted(token1) ) return; 214 | 215 | console.log(`[TIME][${blockNumber}][RETRIVED TOKENS]`, token0, token1, (Date.now()-time)/1000 ); 216 | time = Date.now(); 217 | 218 | if(!token0 || !token1 ) return console.log('\t[MISSING TOKENS]', pairAdd, hash); 219 | 220 | let tokenHistory = await this.tokenHistories.getTokenHistory( pairAdd ); 221 | 222 | console.log(`[TIME][${blockNumber}][RETRIVED TOKENS HISTORIES]`, pairAdd, (Date.now()-time)/1000 ); 223 | time = Date.now(); 224 | 225 | let token0Infos = await this.tokens.getToken( UtilsAddresses.toCheckSum(token0) ); 226 | let token1Infos = await this.tokens.getToken( UtilsAddresses.toCheckSum(token1) ); 227 | 228 | 229 | if( !token0Infos || !token0Infos.contract || !token1Infos || !token1Infos.contract ) return; // skip if some token infos is missing; 230 | 231 | reserve0 = reserve0/10**token0Infos.decimals; 232 | reserve1 = reserve1/10**token1Infos.decimals; 233 | 234 | let router = swapInfo?.router; 235 | if( router ) router = UtilsAddresses.toCheckSum(router); 236 | console.log(hash, '[ROUTER]', router); 237 | 238 | let pairInfos = { 239 | pair: pairAdd, 240 | router: tokenHistory ? tokenHistory.router : router, 241 | tokens: [token0, token1], 242 | reserves: [reserve0, reserve1], 243 | decimals: [token0Infos.decimals, token1Infos.decimals], 244 | fees: { 245 | token0: tokenHistory ? tokenHistory.fees?.token0 : null, 246 | token1: tokenHistory ? tokenHistory.fees?.token1 : null, 247 | } 248 | }; 249 | 250 | let oldRouterInfos = tokenHistory ? await this.routers.getRouter( tokenHistory.router, pairAdd ) : {}; 251 | let detectedRouterInfos = router ? await this.routers.getRouter( router, pairAdd ) : {}; 252 | console.log(`[TIME][${blockNumber}][RETRIVED ROUTERS]`, (Date.now()-time)/1000 ); 253 | time = Date.now(); 254 | 255 | let [ mainToken, dependantToken ] = await this.tokenHierarchy(token0Infos, token1Infos, tokenHistory); // get who is the main token in the pair 256 | console.log(`[TIME][${blockNumber}][RETRIVED HIERARCHY]`, (Date.now()-time)/1000 ); 257 | time = Date.now(); 258 | // cross chain 259 | 260 | if( scraperConfig[process.env.CHAIN_ID].calculate_pair_fees ){ 261 | console.log(`[GOING TO CHECK FEES] ${token0} ${JSON.stringify(pairInfos.fees.token0)}`) 262 | console.log(`[GOING TO CHECK FEES] ${token1} ${JSON.stringify(pairInfos.fees.token1)}`) 263 | if( !pairInfos.fees.token0 || !pairInfos.fees.token0.checked ) { 264 | let start = Date.now(); 265 | let tokenFees = await this.calculateTokenFees(token0, pairInfos, detectedRouterInfos); 266 | pairInfos.fees.token0 = tokenFees; 267 | console.log(`[TIME][${blockNumber}][CALCULATE FEES] 0:`, token0, pairAdd, (Date.now()-start)/1000 ); 268 | } 269 | if( !pairInfos.fees.token1 || !pairInfos.fees.token1.checked ) { 270 | let start = Date.now(); 271 | let tokenFees = await this.calculateTokenFees(token1, pairInfos, detectedRouterInfos); 272 | pairInfos.fees.token1 = tokenFees; 273 | console.log(`[TIME][${blockNumber}][CALCULATE FEES] 1:`, token1, pairAdd, (Date.now()-start)/1000 ); 274 | } 275 | } 276 | 277 | this.cache.setPair(pairAdd, pairInfos); 278 | 279 | 280 | 281 | time = Date.now(); 282 | 283 | if( !tokenHistory ){ 284 | if(!router) return ; 285 | tokenHistory = { 286 | records_date: todayDateUnix(), 287 | chain: process.env.CHAIN_ID, // cross chain 288 | token0: { 289 | contract: token0Infos.contract, 290 | name: token0Infos.name, 291 | symbol: token0Infos.symbol, 292 | decimals: token0Infos.decimals, 293 | }, 294 | token1: { 295 | contract: token1Infos.contract, 296 | name: token1Infos.name, 297 | symbol: token1Infos.symbol, 298 | decimals: token1Infos.decimals, 299 | }, 300 | 301 | dependantToken: dependantToken.contract, 302 | mainToken: mainToken.contract, 303 | 304 | router: router, 305 | router_fee: detectedRouterInfos.fee, 306 | pair: pairAdd, 307 | }; 308 | console.log(`\t\t[BULK ADD CREATE] ${Object.keys(this.bulk.bulk_normal.getHistories(EnumBulkTypes.TOKEN_HISTORY)).length} ${dependantToken.contract}`); 309 | this.bulk.bulk_normal.setNewDocument( pairAdd, EnumBulkTypes.TOKEN_HISTORY, tokenHistory ); 310 | this.cache.setHistory(pairAdd, tokenHistory); 311 | console.log(`[TIME][${blockNumber}][CREATED NEW DOCUMENT]`, (Date.now()-time)/1000 ); 312 | time = Date.now(); 313 | } else { 314 | console.log(`[ROUTER] ${blockNumber} ${pairAdd}`, tokenHistory?.router, router, oldRouterInfos.valid, detectedRouterInfos.valid ); 315 | if( ( !oldRouterInfos.valid || oldRouterInfos.fee == undefined) && detectedRouterInfos.valid ){ 316 | console.log('\t\t[UPDATING ROUTER] ', tokenHistory?.router, router, JSON.stringify(oldRouterInfos)); 317 | // if the pair was previously detected from an invalid router, update it with the current one if it is valid 318 | this.bulk.bulk_normal.setTokenBulkSet( pairAdd, EnumBulkTypes.TOKEN_HISTORY ,`router`, router ); 319 | this.bulk.bulk_normal.setTokenBulkSet( pairAdd, EnumBulkTypes.TOKEN_HISTORY ,`router_fee`, detectedRouterInfos.fee ); 320 | } 321 | else if ( (!tokenHistory.router_fee || tokenHistory.router_fee == -1) && oldRouterInfos.valid ){ // just for update the old toke histories that do not have this parameter 322 | this.bulk.bulk_normal.setTokenBulkSet( pairAdd, EnumBulkTypes.TOKEN_HISTORY ,`router_fee`, oldRouterInfos.fee ); 323 | } 324 | console.log(`[TIME][${blockNumber}][UPDATED ROUTER]`, (Date.now()-time)/1000 ); 325 | time = Date.now(); 326 | } 327 | 328 | // increase the token score 329 | let todayUnix = todayDateUnix(); 330 | this.bulk.bulk_normal.setTokenBulkInc( token0, EnumBulkTypes.TOKEN_BASIC ,`score.${todayUnix}`, 1 ); 331 | this.bulk.bulk_normal.setTokenBulkInc( token0, EnumBulkTypes.TOKEN_BASIC ,`score_points`, 1 ); 332 | if(!token0Infos.score) token0Infos.score = {}; 333 | token0Infos.score[todayUnix] = token0Infos.score[todayUnix] ? token0Infos.score[todayUnix] + 1 : 1; 334 | this.cache.setToken(token0, {...token0Infos}); 335 | 336 | if(!token1Infos.score) token1Infos.score = {}; 337 | this.bulk.bulk_normal.setTokenBulkInc( token1, EnumBulkTypes.TOKEN_BASIC ,`score.${todayUnix}`, 1 ); 338 | this.bulk.bulk_normal.setTokenBulkInc( token1, EnumBulkTypes.TOKEN_BASIC ,`score_points`, 1 ); 339 | token1Infos.score[todayUnix] = token1Infos.score[todayUnix] ? token1Infos.score[todayUnix] + 1 : 1; 340 | this.cache.setToken(token1, {...token1Infos}); 341 | 342 | // update the pair records 343 | this.bulk.bulk_normal.setTokenBulkSet( pairAdd, EnumBulkTypes.TOKEN_HISTORY ,'reserve0', reserve0); 344 | this.bulk.bulk_normal.setTokenBulkSet( pairAdd, EnumBulkTypes.TOKEN_HISTORY ,'reserve1', reserve1); 345 | this.bulk.bulk_normal.setTokenBulkSet( pairAdd, EnumBulkTypes.TOKEN_HISTORY ,'updated_at_block', blockNumber); 346 | console.log(`[TIME][${blockNumber}][SET BULK OP]`, (Date.now()-time)/1000 ); 347 | 348 | if( !swapInfo || !swapInfo.router || !swapInfo.params || !Object.keys(swapInfo.params).length ) return; 349 | 350 | if( scraperConfig[process.env.CHAIN_ID].save_transactions ){ 351 | let swapInfoIn = { 352 | amount: parseInt(swapInfo.params.amount0In) ? 353 | swapInfo.params.amount0In/(10**token0Infos.decimals) 354 | : swapInfo.params.amount1In/(10**token1Infos.decimals), 355 | token: parseInt(swapInfo.params.amount0In) ? token0: token1 356 | } 357 | 358 | let swapInfoOut = { 359 | amount: parseInt(swapInfo.params.amount0Out) ? 360 | swapInfo.params.amount0Out/(10**token0Infos.decimals) 361 | : swapInfo.params.amount1Out/(10**token1Infos.decimals), 362 | token: parseInt(swapInfo.params.amount0Out) ? token0: token1 363 | } 364 | 365 | let time_unix = time/1000; 366 | console.log('[SETTING TRANSACTION] ', pairAdd, time_unix); 367 | this.bulk.bulk_time.setNewDocument( pairAdd, EnumBulkTypes.HISOTRY_TRANSACTION, time_unix, { 368 | time: time_unix, // unix timestamp 369 | hash: hash, 370 | from: swapInfo.sender, 371 | 372 | amountIn: swapInfoIn.amount, 373 | amountOut: swapInfoOut.amount, 374 | 375 | tokenIn: swapInfoIn.token, 376 | tokenOut: swapInfoOut.token, 377 | 378 | pair: pairAdd, 379 | router: router, 380 | 381 | dependantToken: dependantToken.contract, 382 | mainToken: mainToken.contract 383 | }, false, 0.0001, true); 384 | } 385 | 386 | 387 | // update price record 388 | // cross chain 389 | let mainTokenIsBNB = this.isMainToken( mainToken.contract ); 390 | let mainReserve = mainToken.contract == token0 ? reserve0 : reserve1; 391 | let mainReserveValue = mainReserve; 392 | if( mainTokenIsBNB ) mainReserveValue = mainReserve * this.CHAIN_MAIN_TOKEN_PRICE; // if the main token of the pair is BNB then multiply the tokens in the pair reserver * bnb price 393 | this.bulk.bulk_normal.setTokenBulkSet( pairAdd, EnumBulkTypes.TOKEN_HISTORY, 'mainReserveValue', mainReserveValue); 394 | 395 | if( scraperConfig[process.env.CHAIN_ID].save_price ){ 396 | let dependantTokenPrice = null; // calculate the dependant token price 397 | if( mainToken.contract == token0 ) dependantTokenPrice = reserve0/reserve1; // here decimals 398 | else dependantTokenPrice = reserve1/reserve0; 399 | if( mainTokenIsBNB ){ // if the main token was BNB then multiply for get the token usd value 400 | if(this.CHAIN_MAIN_TOKEN_PRICE){ 401 | dependantTokenPrice = dependantTokenPrice * this.CHAIN_MAIN_TOKEN_PRICE; 402 | } 403 | } 404 | 405 | let pairHistory = await this.historyPrices.getHistory(pairAdd); 406 | await this.updatePrice( 407 | router, pairAdd, dependantToken.contract, mainToken.contract, 408 | pairHistory.latest, dependantTokenPrice, 409 | reserve0, reserve1 410 | ); 411 | } 412 | 413 | } 414 | 415 | async executeBulk(){ 416 | if( Date.now() < this.bulkUpdateGap + this.lastUpdate ) return; 417 | this.lastUpdate = Date.now(); 418 | 419 | console.log('[BULK] ', this.allScrapedBlocksTemp); 420 | let start = Date.now(); 421 | await this.bulk.execute(); 422 | console.log('[BULK UPDATED]', (Date.now()-start)/1000 ); 423 | } 424 | 425 | getTime() { return (Date.now()/ (1000)) - (Date.now()/ (1000)) % 60 } // get current minete as unix timestamp 426 | 427 | async updatePrice( router, pair, tokenAddress, mainTokenAddress, latestHistoryPirce, newPrice, reserve0, reserve1 ) { 428 | 429 | let time = this.getTime(); 430 | let tokenInfo = this.cache.getToken(UtilsAddresses.toCheckSum(tokenAddress)); 431 | if( !newPrice ) return; 432 | 433 | 434 | let latestHistory = latestHistoryPirce; 435 | let latestHistoryTime = latestHistory ? latestHistory.time: 0; 436 | 437 | let latestHigh = latestHistory ? latestHistory.high : 0; 438 | let latestLow = latestHistory ? latestHistory.low : 0 ; 439 | 440 | console.log(`[UPDATING PRICE]`, pair) 441 | 442 | if( ( time - latestHistoryTime ) < this.UPDATE_PRICE_INTERVAL ){ // update latest record 443 | 444 | if( newPrice > latestHigh ){ 445 | this.bulk.bulk_time.setTokenBulkSet( pair, EnumBulkTypes.HISTORY_PRICE, time, 'high', newPrice ); 446 | } 447 | if( newPrice < latestLow ){ 448 | this.bulk.bulk_time.setTokenBulkSet( pair, EnumBulkTypes.HISTORY_PRICE, time, 'low', newPrice ); 449 | } 450 | // update the value anyway also if it is not higher that the high or lower than the low 451 | this.bulk.bulk_time.setTokenBulkSet( pair, EnumBulkTypes.HISTORY_PRICE, time, 'value', newPrice ); 452 | this.bulk.bulk_time.setTokenBulkSet( pair, EnumBulkTypes.HISTORY_PRICE, time, 'reserve0', reserve0 ); 453 | this.bulk.bulk_time.setTokenBulkSet( pair, EnumBulkTypes.HISTORY_PRICE, time, 'reserve1', reserve1 ); 454 | 455 | 456 | } else { // create new record 457 | 458 | if( !latestHistoryTime || typeof latestHistoryTime != 'number' ){ // load the time of the last time that this price was updated so that we can change the 'close' parameter 459 | console.log(`[CLOSE RETRIVE] RETRIVING LAST HISTORY ${pair}. ${latestHistoryTime}`); 460 | latestHistoryTime = await this.historyPrices.getLastHistoryTime(pair, time); 461 | console.log(`[CLOSE RETRIVE] RETRIVED ${latestHistoryTime} ${pair}`) 462 | } 463 | if( latestHistoryTime ){ // update the close parameter 464 | console.log(`[CLOSE] UPDATING ${latestHistoryTime} WITH ${newPrice}. ${pair}`) 465 | this.bulk.bulk_time.setTokenBulkSet( pair, EnumBulkTypes.HISTORY_PRICE, latestHistoryTime, 'close', newPrice ); 466 | } else { 467 | console.log(`[CLOSE FAIL] CANNOT UPDATE ${latestHistoryTime} WITH ${newPrice}. ${pair}`) 468 | } 469 | 470 | console.log(`[CREATING RECORD] ${pair}. LAST RECORD: ${latestHistoryTime}`); 471 | this.bulk.bulk_time.setNewDocument( pair, EnumBulkTypes.HISTORY_PRICE, time, { 472 | time: time, // to have standard intervals, for example the exact minutes on the time. 9:01, 9:02, 9:03 473 | open: newPrice, 474 | close: newPrice, 475 | high: newPrice, 476 | low: newPrice, 477 | value: newPrice, 478 | burned: tokenInfo ? tokenInfo.burned : null, 479 | mcap: tokenInfo ? (tokenInfo.total_supply - tokenInfo.burned) * newPrice : 0, 480 | 481 | pair: pair, 482 | router: router, 483 | mainToken: mainTokenAddress, 484 | dependantToken: tokenAddress 485 | } ); 486 | 487 | if( tokenInfo ) { 488 | this.bulk.bulk_normal.setTokenBulkSet(pair, EnumBulkTypes.TOKEN_HISTORY, 'burned', tokenInfo.burned ) 489 | this.bulk.bulk_normal.setTokenBulkSet(pair, EnumBulkTypes.TOKEN_HISTORY, 'mcap', (tokenInfo.total_supply - tokenInfo.burned) * newPrice ) 490 | } 491 | 492 | this.bulk.bulk_normal.setTokenBulkSet(pair, EnumBulkTypes.TOKEN_HISTORY, 'value', newPrice ); 493 | this.bulk.bulk_normal.setTokenBulkInc(pair, EnumBulkTypes.TOKEN_HISTORY, 'records_price', 1); 494 | } 495 | } 496 | async loopUpdateMainTokenPrice(){ 497 | // cross chain 498 | let FACTORY = await new this.web3.eth.Contract( EnumAbi[process.env.CHAIN_ID].MAIN_FACTORY, EnumContracts[process.env.CHAIN_ID].MAIN_FACTORY ); 499 | while( true ){ 500 | try { 501 | let mainTokenPairAddress = await FACTORY.methods.getPair( EnumMainTokens[process.env.CHAIN_ID].MAIN.address, EnumMainTokens[process.env.CHAIN_ID].USDT.address ).call(); 502 | let mainTokenPair = await new this.web3.eth.Contract( EnumAbi[process.env.CHAIN_ID].PAIR.PANCAKE, mainTokenPairAddress ); 503 | let reserves = await mainTokenPair.methods.getReserves().call(); 504 | let WBNB_RESERVE = reserves[1]/10**EnumMainTokens[process.env.CHAIN_ID].MAIN.decimals; 505 | let USDT_RESERVE = reserves[0]/10**EnumMainTokens[process.env.CHAIN_ID].USDT.decimals; 506 | let WBNB_PRICE = USDT_RESERVE/WBNB_RESERVE; 507 | this.CHAIN_MAIN_TOKEN_PRICE = WBNB_PRICE; 508 | console.log('MAIN_PRICE: ', WBNB_PRICE); 509 | } catch (error) { 510 | console.log(`[ERR UPDATING MAIN PRICE] ${error}`); 511 | } 512 | await sleep(5000); 513 | } 514 | } 515 | 516 | 517 | 518 | } 519 | 520 | module.exports = Scraper; 521 | -------------------------------------------------------------------------------- /workers/updater/lib/bulk/Bulk.js: -------------------------------------------------------------------------------- 1 | const Router = require('../../../../server/models/routers'); 2 | const TokenBasic = require('../../../../server/models/token_basic'); 3 | const TokenHistory = require('../../../../server/models/token_history'); 4 | const HistoryPrices = require('../../../../server/models/history_prices'); 5 | const HistoryTransactions = require('../../../../server/models/history_transactions'); 6 | const EnumBulkTypes = require('../../../../enum/bulk.records.type'); 7 | 8 | let modelsMapping = { 9 | [EnumBulkTypes.TOKEN_HISTORY]: TokenHistory, 10 | [EnumBulkTypes.HISTORY_PRICE]: HistoryPrices, 11 | [EnumBulkTypes.HISOTRY_TRANSACTION]: HistoryTransactions, 12 | [EnumBulkTypes.TOKEN_BASIC]: TokenBasic, 13 | [EnumBulkTypes.ROUTERS]: Router, 14 | } 15 | 16 | 17 | const BulkNormal = require("./BulkNormal"); 18 | const BulkTime = require("./BulkTime"); 19 | 20 | 21 | /** 22 | * To optimize the write operations to the db, mostly intra-minute price changes are made inside this Bulk strcutures. 23 | * Every minute change ( for example from 16:04pm to 16:05pm ) the datas inside this structures are pushed to the database. 24 | */ 25 | class Bulk { 26 | constructor( cache ){ 27 | this.bulk_normal = new BulkNormal( modelsMapping ); 28 | this.bulk_time = new BulkTime( cache, modelsMapping ); 29 | } 30 | async execute(){ 31 | let contracts = await this.bulk_normal.execute(); 32 | contracts = [ ...(await this.bulk_time.execute()), ...contracts ]; 33 | return contracts; 34 | } 35 | } 36 | 37 | module.exports = Bulk; -------------------------------------------------------------------------------- /workers/updater/lib/bulk/BulkNormal.js: -------------------------------------------------------------------------------- 1 | 2 | const EnumBulkTypes = require('../../../../enum/bulk.records.type'); 3 | 4 | 5 | 6 | class BulkNormal { 7 | constructor( modelsMapping ) { 8 | this.BulkWriteOperations = { 9 | /* example of object formatting 10 | tokenHistory: { 11 | pair: { 12 | insert: { 13 | name: 'Eddard Stark', 14 | title: 'Warden of the North' 15 | }, 16 | update: { 17 | updateOne: { 18 | filter: { name: 'Eddard Stark' }, 19 | // If you were using the MongoDB driver directly, you'd need to do 20 | // `update: { $set: { title: ... } }` but mongoose adds $set for 21 | // you. 22 | update: { title: 'Hand of the King' } 23 | } 24 | }, 25 | delete: { 26 | deleteOne: { 27 | { 28 | filter: { name: 'Eddard Stark' } 29 | } 30 | } 31 | } 32 | } 33 | }, 34 | */ 35 | } 36 | for( let key in modelsMapping ){ 37 | this.BulkWriteOperations[key] = {}; 38 | } 39 | 40 | this.modelsMapping = modelsMapping; 41 | } 42 | 43 | 44 | getHistories(type){ 45 | return this.BulkWriteOperations[type]; 46 | } 47 | getHistory( pair, type ){ 48 | return this.BulkWriteOperations[type][pair]; 49 | } 50 | intializeBulkForContract( pair, type ){ 51 | if(!this.BulkWriteOperations[type][pair]) 52 | this.BulkWriteOperations[type][pair] = {}; 53 | } 54 | intializeBulkUpdate( pair, type ){ 55 | if(!this.BulkWriteOperations[type][pair].update) { 56 | ////console.log(`\t[BULK ADD UPDATE ${type}] ${Object.keys(this.BulkWriteOperations[type]).length} ${pair}`); 57 | let filter = { pair: pair }; 58 | if( type === EnumBulkTypes.TOKEN_BASIC ) filter = { contract: pair }; 59 | this.BulkWriteOperations[type][pair].update = { 60 | updateOne: { 61 | filter: filter, 62 | update: { 63 | $push: { }, 64 | $addToSet: { }, 65 | $inc: { }, 66 | $set: { }, 67 | } 68 | } 69 | }; 70 | } 71 | } 72 | 73 | /** 74 | * @description Add inside the bulk operations an insert 75 | * @param {*} pair address 76 | * @param {*} historyToInsert object 77 | */ 78 | setNewDocument( pair, type, record ){ 79 | this.intializeBulkForContract( pair, type ); 80 | if(this.BulkWriteOperations[type][pair].insert) return; 81 | 82 | this.BulkWriteOperations[type][pair].insert = record; 83 | } 84 | 85 | setTokenBulkPush( pair, type, path, toPush ){ 86 | this.intializeBulkForContract( pair, type ); 87 | this.intializeBulkUpdate( pair, type ); 88 | let pushObj = this.BulkWriteOperations[type][pair].update.updateOne.update['$push']; 89 | if( !pushObj[path] ) pushObj[path] = { $each: [] }; 90 | pushObj[path]['$each'].push(toPush); 91 | } 92 | setTokenBulkAddToSet( pair, type, path, toPush ){ 93 | this.intializeBulkForContract( pair, type ); 94 | this.intializeBulkUpdate( pair, type ); 95 | let addToSetObj = this.BulkWriteOperations[type][pair].update.updateOne.update['$addToSet']; 96 | if( !addToSetObj[path] ) addToSetObj[path] = { $each: [] }; 97 | addToSetObj[path]['$each'].push(toPush); 98 | } 99 | getTokenBulkPush( pair, type, path ){ 100 | if( this.BulkWriteOperations[type][pair] ) 101 | if( this.BulkWriteOperations[type][pair].update ){ 102 | return this.BulkWriteOperations[type][pair].update.updateOne.update['$push'][path] || { $each: [] }; 103 | } 104 | return { $each: [] }; 105 | } 106 | setTokenBulkInc( pair, type, path, amoutToInc ){ 107 | this.intializeBulkForContract( pair, type ); 108 | this.intializeBulkUpdate( pair, type ); 109 | let incObj = this.BulkWriteOperations[type][pair].update.updateOne.update['$inc']; 110 | if( !incObj[path] ) incObj[path] = 0; 111 | incObj[path] += amoutToInc; 112 | } 113 | setTokenBulkSet( pair, type, path, toSet ){ 114 | this.intializeBulkForContract( pair, type ); 115 | this.intializeBulkUpdate( pair, type ); 116 | let setObj = this.BulkWriteOperations[type][pair].update.updateOne.update['$set']; 117 | setObj[path] = toSet; 118 | } 119 | setTokenBulkUnset( pair, type, path, toUnset ){ 120 | this.intializeBulkForContract( pair, type ); 121 | this.intializeBulkUpdate( pair, type ); 122 | let setObj = this.BulkWriteOperations[type][pair].update.updateOne.update['$unset']; 123 | setObj[path] = toUnset; 124 | } 125 | getTokenBulkSet( pair, type, path ){ 126 | if( this.BulkWriteOperations[type][pair] ) 127 | if( this.BulkWriteOperations[type][pair].update ) 128 | return this.BulkWriteOperations[type][pair].update.updateOne.update['$set'][path]; 129 | return null; 130 | } 131 | getTokenBulkInsert( pair, type ){ 132 | if( this.BulkWriteOperations[type][pair] ) 133 | return this.BulkWriteOperations[type][pair].insert; 134 | return null; 135 | } 136 | 137 | 138 | async execute(){ 139 | let updatedContracts = []; 140 | for( let typeKey in EnumBulkTypes ){ 141 | let type = EnumBulkTypes[typeKey]; 142 | let start = Date.now(); 143 | updatedContracts = [ ...( await this.executeUtil( type, this.modelsMapping[type] ) ), ...updatedContracts ]; 144 | let time = Date.now() - start; 145 | //console.log('\t[BULK NORMAL TIME]', time/1000 ); 146 | } 147 | return updatedContracts; 148 | } 149 | 150 | async executeUtil( type, model ){ 151 | 152 | if(!type || !model) { 153 | //console.log(`\t[ERROR EXECUTING BUL UPDATES] `, type, model ); 154 | return []; 155 | } 156 | 157 | let toExecuteInsert = []; 158 | let toExecutePush = []; 159 | let toExecuteSet = []; 160 | 161 | let tokenContracts = Object.keys(this.BulkWriteOperations[type]); // get contracts to update 162 | let BulkWriteOperationsClone = JSON.parse(JSON.stringify(this.BulkWriteOperations[type])); 163 | 164 | // reset bulk object 165 | delete this.BulkWriteOperations[type]; 166 | this.BulkWriteOperations[type]= {}; 167 | 168 | for( let contract of tokenContracts ){ // populate (insert, push and set) arrays 169 | 170 | let toInsert = BulkWriteOperationsClone[contract].insert; 171 | if(toInsert) toExecuteInsert.push(toInsert); 172 | 173 | let toUpdate = BulkWriteOperationsClone[contract].update; 174 | 175 | if(toUpdate) { 176 | // clear empty update fields 177 | if( !Object.keys(toUpdate.updateOne.update['$set']).length ) delete toUpdate.updateOne.update['$set']; 178 | if( !Object.keys(toUpdate.updateOne.update['$inc']).length ) delete toUpdate.updateOne.update['$inc']; 179 | if( !Object.keys(toUpdate.updateOne.update['$push']).length ) delete toUpdate.updateOne.update['$push']; 180 | if( !Object.keys(toUpdate.updateOne.update['$addToSet']).length ) delete toUpdate.updateOne.update['$addToSet']; 181 | 182 | let clonedPush = JSON.parse(JSON.stringify(toUpdate)); 183 | let clonedSet = JSON.parse(JSON.stringify(toUpdate)); 184 | 185 | if( clonedPush.updateOne.update['$push'] ){ 186 | delete clonedPush.updateOne.update['$set']; 187 | delete clonedPush.updateOne.update['$inc']; 188 | delete clonedPush.updateOne.update['$addToSet']; 189 | toExecutePush.push( clonedPush ); 190 | } 191 | 192 | if( clonedPush.updateOne.update['$inc'] || clonedPush.updateOne.update['$set'] || clonedPush.updateOne.update['$addToSet'] ){ 193 | delete clonedSet.updateOne.update['$push']; 194 | clonedPush.updateOne.upsert = true; 195 | toExecuteSet.push( clonedSet ); 196 | } 197 | } 198 | } 199 | 200 | // //console.log( type, "toExecuteInsert: ", JSON.stringify(toExecuteInsert)); 201 | // //console.log( type, "\n\ntoExecutePush: ", JSON.stringify(toExecutePush)); 202 | // //console.log("\n\ntoExecuteSet: ", type, JSON.stringify(toExecuteSet)); 203 | 204 | await model.insertMany(toExecuteInsert); 205 | //console.log("\tEXECUTED INSERT"); 206 | await model.bulkWrite(toExecutePush); 207 | //console.log("\tEXECUTED PUSH"); 208 | await model.bulkWrite(toExecuteSet); 209 | //console.log("\tEXECUTED SET"); 210 | return tokenContracts; 211 | 212 | 213 | } 214 | } 215 | 216 | module.exports = BulkNormal; -------------------------------------------------------------------------------- /workers/updater/lib/bulk/BulkTime.js: -------------------------------------------------------------------------------- 1 | const EnumBulkTypes = require("../../../../enum/bulk.records.type"); 2 | 3 | /** 4 | * Manage bulk operations on objects that have strong time connection 5 | */ 6 | class BulkTime { 7 | constructor( cache, modelsMapping ) { 8 | 9 | this.BulkWriteOperations = {}; 10 | for( let key in modelsMapping ){ 11 | this.BulkWriteOperations[key] = {}; 12 | } 13 | 14 | this.modelsMapping = modelsMapping; 15 | this.cache = cache; 16 | } 17 | getHistories(type){ 18 | return this.BulkWriteOperations[type]; 19 | } 20 | getHistory( pair, type ){ 21 | return this.BulkWriteOperations[type][pair]; 22 | } 23 | intializeBulkForContract( pair, type, time ){ 24 | if(!this.BulkWriteOperations[type][pair]) this.BulkWriteOperations[type][pair] = {}; 25 | if(!this.BulkWriteOperations[type][pair][time]) this.BulkWriteOperations[type][pair][time] = {}; 26 | 27 | } 28 | intializeBulkUpdate( pair, type, time ){ 29 | this.intializeBulkForContract( pair, type, time ); 30 | if(!this.BulkWriteOperations[type][pair][time].update) { 31 | ////console.log(`\t[BULK ADD UPDATE ${type}] ${Object.keys(this.BulkWriteOperations[type]).length} ${pair}`); 32 | this.BulkWriteOperations[type][pair][time].update = { 33 | updateOne: { 34 | filter: { pair: pair, time: time }, 35 | update: { 36 | $push: { }, 37 | $inc: { }, 38 | $set: { }, 39 | } 40 | } 41 | }; 42 | } 43 | } 44 | 45 | /** 46 | * @description Add inside the bulk operations an insert, be aware that you can overwriting a document by setting always the same time 47 | * @param {address} pair 48 | * @param {string} type 49 | * @param {number} time 50 | * @param {object} record 51 | * @param {boolean} overwrite specify if the record in the specified time should be overwrite the current one ( if present ) 52 | * @param {number} increase specifiy if instead of overwrite it should use another increased time 53 | * @param {boolean} loop keep increasing the time to avoid an overwrite 54 | */ 55 | setNewDocument( pair, type, time, record, overwrite = true, increase = 0, loop = false ){ 56 | this.intializeBulkForContract( pair, type, time ); 57 | // if it should not overwrite, and there is already a record in the specified time 58 | if( !overwrite && this.BulkWriteOperations[type][pair][time].insert ) { 59 | let newTime = time + increase; 60 | if( loop ){ 61 | this.intializeBulkForContract( pair, type, newTime); 62 | while( this.BulkWriteOperations[type][pair][newTime].insert ) { 63 | newTime = newTime + increase; 64 | this.intializeBulkForContract( pair, type, newTime); 65 | 66 | } 67 | this.BulkWriteOperations[type][pair][newTime].insert = record; 68 | } else { 69 | this.intializeBulkForContract( pair, type, newTime ); 70 | this.BulkWriteOperations[type][pair][newTime].insert = record; 71 | } 72 | } else { 73 | this.BulkWriteOperations[type][pair][time].insert = record; 74 | } 75 | 76 | 77 | // update the price inside the cache instead of reading from the db 78 | if( type == EnumBulkTypes.HISTORY_PRICE ) { 79 | this.cache.setHistoryPrice(pair, record); 80 | } 81 | 82 | } 83 | setTokenBulkInc( pair, type, time, path, amoutToInc ){ 84 | this.intializeBulkForContract( pair, type, time ); 85 | this.intializeBulkUpdate( pair, type, time ); 86 | 87 | if( this.BulkWriteOperations[type][pair][time].insert ) { // if there is already a document that will be inserted with the passed time update this, instead of doing more operations on the db 88 | this.BulkWriteOperations[type][pair][time].insert[path] += amoutToInc; 89 | } else { // create a new operation to execute on the db 90 | let incObj = this.BulkWriteOperations[type][pair][time].update.updateOne.update['$inc']; 91 | if( !incObj[path] ) incObj[path] = 0; 92 | incObj[path] += amoutToInc; 93 | } 94 | 95 | // update the price inside the cache instead of reading back from the db 96 | if( type == EnumBulkTypes.HISTORY_PRICE ){ 97 | let cached = this.cache.getHistoryPrice(pair); 98 | if(!cached) return; 99 | cached[path] += amoutToInc; 100 | } 101 | } 102 | setTokenBulkSet( pair, type, time, path, toSet ){ 103 | this.intializeBulkForContract( pair, type, time ); 104 | this.intializeBulkUpdate( pair, type, time ); 105 | 106 | if( this.BulkWriteOperations[type][pair][time].insert ) { // if there is already a document that will be inserted with the passed time update this, instead of doing more operations on the db 107 | this.BulkWriteOperations[type][pair][time].insert[path] = toSet; 108 | } else { // create a new operation to execute on the db 109 | let setObj = this.BulkWriteOperations[type][pair][time].update.updateOne.update['$set']; 110 | setObj[path] = toSet; 111 | } 112 | 113 | // update the price inside the cache instead of reading back from the db 114 | if( type == EnumBulkTypes.HISTORY_PRICE ){ 115 | let cached = this.cache.getHistoryPrice(pair); 116 | if(!cached) return; 117 | cached[path] = toSet; 118 | } 119 | } 120 | 121 | async execute(){ 122 | let updatedContracts = []; 123 | for( let typeKey in EnumBulkTypes ){ 124 | let type = EnumBulkTypes[typeKey]; 125 | let start = Date.now(); 126 | updatedContracts = [ ...( await this.executeUtil( type, this.modelsMapping[type] ) ), ...updatedContracts ]; 127 | let time = Date.now() - start; 128 | //console.log('\t[BULK TIME TIME]', time/1000 ); 129 | } 130 | return updatedContracts; 131 | } 132 | 133 | async executeUtil( type, model ){ 134 | 135 | if(!type || !model) { 136 | //console.log(`\t[ERROR EXECUTING BUL UPDATES] `, type, model ); 137 | return []; 138 | } 139 | 140 | let toExecuteInsert = []; 141 | let toExecutePush = []; 142 | let toExecuteSet = []; 143 | 144 | let tokenContracts = Object.keys(this.BulkWriteOperations[type]); // get contracts to update 145 | let BulkWriteOperationsClone = JSON.parse(JSON.stringify(this.BulkWriteOperations[type])); 146 | 147 | // reset bulk object 148 | delete this.BulkWriteOperations[type]; 149 | this.BulkWriteOperations[type]= {}; 150 | 151 | for( let contract of tokenContracts ){ // populate (insert, push and set) arrays 152 | let bulkOperations = BulkWriteOperationsClone[contract]; 153 | for( let time in bulkOperations ){ 154 | let toInsert = BulkWriteOperationsClone[contract][time].insert; 155 | if(toInsert) toExecuteInsert.push(toInsert); 156 | let toUpdate = BulkWriteOperationsClone[contract][time].update; 157 | 158 | if(toUpdate) { 159 | 160 | // clear empty update fields 161 | if( !Object.keys(toUpdate.updateOne.update['$set']).length ) delete toUpdate.updateOne.update['$set']; 162 | if( !Object.keys(toUpdate.updateOne.update['$inc']).length ) delete toUpdate.updateOne.update['$inc']; 163 | if( !Object.keys(toUpdate.updateOne.update['$push']).length ) delete toUpdate.updateOne.update['$push']; 164 | 165 | let clonedPush = JSON.parse(JSON.stringify(toUpdate)); 166 | let clonedSet = JSON.parse(JSON.stringify(toUpdate)); 167 | 168 | if( clonedPush.updateOne.update['$push'] ){ 169 | delete clonedPush.updateOne.update['$set']; 170 | delete clonedPush.updateOne.update['$inc']; 171 | toExecutePush.push( clonedPush ); 172 | } 173 | 174 | if( clonedPush.updateOne.update['$inc'] || clonedPush.updateOne.update['$set'] ){ 175 | delete clonedSet.updateOne.update['$push']; 176 | toExecuteSet.push( clonedSet ); 177 | } 178 | 179 | } 180 | } 181 | 182 | } 183 | 184 | ////console.log( type, "toExecuteInsert: ", JSON.stringify(toExecuteInsert)); 185 | ////console.log( type, "\n\ntoExecutePush: ", JSON.stringify(toExecutePush)); 186 | ////console.log( type, "\n\ntoExecuteSet: ", JSON.stringify(toExecuteSet)); 187 | 188 | await model.insertMany(toExecuteInsert); 189 | //console.log("\tEXECUTED INSERT"); 190 | await model.bulkWrite(toExecutePush); 191 | //console.log("\tEXECUTED PUSH"); 192 | await model.bulkWrite(toExecuteSet); 193 | //console.log("\tEXECUTED SET"); 194 | return tokenContracts; 195 | } 196 | } 197 | 198 | module.exports = BulkTime; -------------------------------------------------------------------------------- /workers/updater/lib/check.missing.blocks.js: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Linch1/WeChartWeb3/6c5ca87d981bb731c0c7221c369ec8aecb9535d8/workers/updater/lib/check.missing.blocks.js -------------------------------------------------------------------------------- /workers/updater/lib/entity/HistoryPirce.js: -------------------------------------------------------------------------------- 1 | 2 | 3 | const HistoryPirceModel = require('../../../../server/models/history_prices'); 4 | class HistoryPirce { 5 | constructor( cache ){ 6 | this.cache = cache; 7 | } 8 | async getHistory( pair ){ 9 | let unix_day = 60 * 60 * 24; 10 | let now_unix = Date.now()/1000; 11 | let one_day_ago = now_unix - unix_day; 12 | let two_days_ago = now_unix - ( 2 * unix_day ); 13 | 14 | let history = this.cache.getHistoryPrice( pair ); 15 | 16 | if(!history) history = {latest: null, day: null}; 17 | 18 | if(!history.latest){ 19 | let latestPrice = await HistoryPirceModel 20 | .find( { pair: pair, time: { $lte: now_unix } } ) 21 | .sort({ time: -1}) 22 | .limit(1) 23 | .lean() 24 | .exec(); 25 | history.latest = latestPrice[0]; 26 | this.cache.setHistoryPrice( pair, history ); 27 | } 28 | 29 | if(!history.day){ 30 | let dayAgoPrice = await HistoryPirceModel 31 | .find( { pair: pair, time: { $lte: one_day_ago, $gt: two_days_ago } } ) 32 | .sort({ time: -1 }) 33 | .limit(1) 34 | .lean() 35 | .exec(); 36 | history.day = dayAgoPrice[0]; 37 | this.cache.setHistoryPrice( pair, history ); 38 | } 39 | 40 | if(!history.day) console.log(`[HISTORY FAIL DAY] Cannot retrive last day history for ${pair}`); 41 | if(!history.latest) console.log(`[HISTORY FAIL LATEST] Cannot retrive latest history for ${pair}`); 42 | return history; 43 | } 44 | async getLastHistoryTime( pair, time ){ 45 | let latestPrice = await HistoryPirceModel 46 | .findOne( { pair: pair, time: { $lte: time } } ) 47 | .lean() 48 | .select({ time: 1 }) 49 | .exec(); 50 | return latestPrice ? latestPrice.time : null; 51 | } 52 | 53 | } 54 | module.exports = HistoryPirce; -------------------------------------------------------------------------------- /workers/updater/lib/entity/Routers.js: -------------------------------------------------------------------------------- 1 | const EnumAbi = require('../../../../enum/abi'); 2 | const EnumBulkTypes = require('../../../../enum/bulk.records.type'); 3 | const EnumChainId = require('../../../../enum/chain.id'); 4 | const Routers = require('../../../../server/models/routers'); 5 | 6 | class Router { 7 | constructor( cache, web3, bulk ){ 8 | this.cache = cache; 9 | this.web3 = web3; 10 | this.bulk = bulk; 11 | } 12 | async getRouter( routerAdd, pairAdd ){ 13 | let routerInfos = null; 14 | 15 | let routerCached = this.cache.getRouter( routerAdd ); 16 | if( !routerCached ){ 17 | let s = Date.now(); 18 | let router = await Routers.findOne({contract: routerAdd}).lean().exec(); 19 | //console.log(`\t\t[LOADED ROUTER] ${routerAdd} [${(Date.now() - s)/1000}]`); 20 | if(!router){ 21 | let fee = await this.getFee( routerAdd, pairAdd ); 22 | if( fee != -1 ) fee = parseInt(fee/10**3)/10**4; 23 | 24 | let isValid = fee != -1 ? true : false; 25 | routerInfos = {valid: isValid, contract: routerAdd, fee: fee } ; 26 | this.cache.setRouter(routerAdd, routerInfos); 27 | this.bulk.bulk_normal.setNewDocument(routerAdd, EnumBulkTypes.ROUTERS, routerInfos ); 28 | //console.log(`\t\t[ROUTER] ${routerAdd} -> Fee ${fee}`); 29 | } else if( !router.fee ) { // if somehow the router was saved without the fee 30 | let fee = await this.getFee( routerAdd, pairAdd ); 31 | fee = parseInt(fee/10**3)/10**4; 32 | this.bulk.bulk_normal.setTokenBulkSet( routerAdd, EnumBulkTypes.ROUTERS ,'fee', fee); 33 | //console.log(`\t\t[UPDATING ROUTER FEE] ${routerAdd} -> Fee ${fee} `); 34 | router.fee = fee; 35 | routerInfos = router; 36 | this.cache.setRouter(routerAdd, router); 37 | } else { 38 | routerInfos = router; 39 | this.cache.setRouter(routerAdd, router); 40 | } 41 | //console.log(`\t\t[ROUTER] Loaded in cache ${routerAdd} `); 42 | } else { 43 | routerInfos = routerCached; 44 | } 45 | 46 | return routerInfos; 47 | } 48 | async getFee( routerAdd, pairAdd ){ 49 | 50 | let fee = -1; 51 | try{ 52 | fee = await this.getFeePancakeSimilar(routerAdd) 53 | //console.log(`\t\t[ROUTER] Valid ${routerAdd}.`); 54 | return fee; 55 | } catch( err ){ 56 | try{ 57 | fee = await this.getFeeBiswapSimilar(routerAdd, pairAdd) 58 | //console.log(`\t\t[ROUTER] Valid ${routerAdd}.`); 59 | return fee; 60 | } catch( err ){ 61 | //console.log(`\t\t[ROUTER] Not valid ${routerAdd} `, err.message); 62 | return -1; 63 | } 64 | } 65 | 66 | } 67 | 68 | async getFeePancakeSimilar(routerAdd){ 69 | let routerContract = await new this.web3.eth.Contract( EnumAbi[EnumChainId.BSC].ROUTERS.PANCAKE, routerAdd ); 70 | let factory = await routerContract.methods.factory().call(); 71 | let fee = await routerContract.methods.getAmountOut(10000000, 100000000000, 100000000000).call(); 72 | return parseInt(fee); 73 | } 74 | async getFeeBiswapSimilar(routerAdd, pairAdd){ 75 | //console.log('\t\t[BISWAP SIMILAR]', routerAdd, pairAdd) 76 | if( !pairAdd ) return -1; 77 | let routerContractBiswap = await new this.web3.eth.Contract( EnumAbi[EnumChainId.BSC].ROUTERS.BISWAP, routerAdd ); 78 | let factory = await routerContractBiswap.methods.factory().call(); 79 | let pairContract = await new this.web3.eth.Contract( EnumAbi[EnumChainId.BSC].PAIR.BISWAP, pairAdd ); 80 | let pairFee = parseInt(await pairContract.methods.swapFee().call()); 81 | let fee = await routerContractBiswap.methods.getAmountOut(10000000, 100000000000, 100000000000, pairFee).call(); 82 | return parseInt(fee); 83 | } 84 | async getRouters(){ 85 | let routers = await Routers.find().lean().exec(); 86 | for( let router of routers ){ 87 | this.cache.setRouter(router.contract, router); 88 | } 89 | 90 | } 91 | } 92 | 93 | module.exports = Router; -------------------------------------------------------------------------------- /workers/updater/lib/entity/Token.js: -------------------------------------------------------------------------------- 1 | const EnumAbi = require('../../../../enum/abi'); 2 | const EnumBulkTypes = require('../../../../enum/bulk.records.type'); 3 | const EnumChainId = require('../../../../enum/chain.id'); 4 | const TokenBasic = require('../../../../server/models/token_basic'); 5 | 6 | class Token { 7 | constructor( cache, web3, bulk ){ 8 | this.cache = cache; 9 | this.web3 = web3; 10 | this.bulk = bulk; 11 | } 12 | async getToken( token ){ 13 | 14 | let tokenInfo = this.cache.getToken(token); 15 | 16 | let searchOnDb = true; 17 | if( tokenInfo && tokenInfo.notFound ) 18 | if( ( Date.now() - tokenInfo.date ) < 1000 * 60 ) // scrape a not found token only after one minute 19 | searchOnDb = false 20 | 21 | 22 | if( searchOnDb && (!tokenInfo || tokenInfo.notFound) ) { 23 | let s = Date.now(); 24 | tokenInfo = await TokenBasic.findOne({ contract: token }).lean().exec(); 25 | //console.log(`\t\t[LOADED TOKEN] ${token} [${(Date.now() - s)/1000}]`); 26 | if(!tokenInfo) { 27 | let token_contract = await new this.web3.eth.Contract( EnumAbi[EnumChainId.BSC].TOKEN, token ); 28 | let token_decimals; 29 | let name; 30 | let supply; 31 | let symbol; 32 | try { 33 | token_decimals = parseInt( await token_contract.methods.decimals().call() ); 34 | name = await token_contract.methods.name().call(); 35 | supply = parseInt( await token_contract.methods.totalSupply().call() )/(10**token_decimals); 36 | symbol = await token_contract.methods.symbol().call(); 37 | tokenInfo = { 38 | contract: token, 39 | pairs_count: 0, 40 | decimals: token_decimals, 41 | name: name, 42 | symbol: symbol, 43 | total_supply: supply, 44 | } 45 | this.bulk.bulk_normal.setNewDocument( token, EnumBulkTypes.TOKEN_BASIC, tokenInfo ); 46 | } catch (error) { 47 | console.log('\t\t[ERROR] Cannot retrive token informations', error); 48 | } 49 | } 50 | this.cache.setToken( token, tokenInfo ); 51 | } 52 | 53 | if(tokenInfo && !tokenInfo.burned) tokenInfo.burned = (await this.getBurned(token))/10**tokenInfo.decimals; 54 | 55 | return tokenInfo; 56 | } 57 | async getBurned( token ){ 58 | try { 59 | let tokenContract = await new this.web3.eth.Contract( EnumAbi[EnumChainId.BSC].PAIR.PANCAKE, token ); 60 | let zeroAddAmount = await tokenContract.methods.balanceOf("0x0000000000000000000000000000000000000000").call(); 61 | let burnAddAmount = await tokenContract.methods.balanceOf("0x000000000000000000000000000000000000dEaD").call(); 62 | return zeroAddAmount + burnAddAmount; 63 | } catch (error) { 64 | console.log(`\t\t[ERR RETRIVING TOKEN BURNED] ${error} `); 65 | } 66 | return 0; 67 | } 68 | 69 | } 70 | 71 | module.exports = Token; -------------------------------------------------------------------------------- /workers/updater/lib/entity/TokenFees.js: -------------------------------------------------------------------------------- 1 | let abi = [ { "inputs": [ { "internalType": "address", "name": "_router", "type": "address" } ], "stateMutability": "nonpayable", "type": "constructor" }, { "inputs": [ { "internalType": "address", "name": "spender", "type": "address" }, { "internalType": "address", "name": "tokenAddress", "type": "address" } ], "name": "approve", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ { "internalType": "address", "name": "tokenAddress", "type": "address" }, { "internalType": "address", "name": "pairAdd", "type": "address" }, { "internalType": "uint256", "name": "fee", "type": "uint256" }, { "internalType": "uint256", "name": "bnbIn", "type": "uint256" } ], "name": "checkFeesOnBuy", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" }, { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "payable", "type": "function" }, { "inputs": [ { "internalType": "address", "name": "customToken", "type": "address" }, { "internalType": "address", "name": "pairAdd1", "type": "address" }, { "internalType": "uint256", "name": "fee1", "type": "uint256" }, { "internalType": "address", "name": "tokenAddress", "type": "address" }, { "internalType": "address", "name": "pairAdd2", "type": "address" }, { "internalType": "uint256", "name": "fee2", "type": "uint256" }, { "internalType": "uint256", "name": "bnbIn", "type": "uint256" } ], "name": "checkFeesOnBuyCustomToken", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" }, { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "payable", "type": "function" }, { "inputs": [ { "internalType": "address", "name": "tokenAddress", "type": "address" }, { "internalType": "address", "name": "pairAdd1", "type": "address" }, { "internalType": "uint256", "name": "fee1", "type": "uint256" } ], "name": "checkFeesOnSell", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" }, { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "payable", "type": "function" }, { "inputs": [ { "internalType": "address", "name": "customToken", "type": "address" }, { "internalType": "address", "name": "pairAdd1", "type": "address" }, { "internalType": "uint256", "name": "fee1", "type": "uint256" }, { "internalType": "address", "name": "tokenAddress", "type": "address" }, { "internalType": "address", "name": "pairAdd2", "type": "address" }, { "internalType": "uint256", "name": "fee2", "type": "uint256" } ], "name": "checkFeesOnSellCustomToken", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" }, { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "payable", "type": "function" }, { "inputs": [ { "internalType": "address", "name": "tokenAddress", "type": "address" }, { "internalType": "address", "name": "pairAddress", "type": "address" }, { "internalType": "uint256", "name": "fee", "type": "uint256" }, { "internalType": "uint256", "name": "tokenAmount", "type": "uint256" } ], "name": "sellSomeTokens", "outputs": [ { "internalType": "uint256", "name": "idealBnbOut", "type": "uint256" }, { "internalType": "uint256", "name": "bnbOut", "type": "uint256" } ], "stateMutability": "payable", "type": "function" }, { "inputs": [ { "internalType": "address", "name": "_tokenIn", "type": "address" }, { "internalType": "address", "name": "_tokenOut", "type": "address" }, { "internalType": "address", "name": "pairAdd", "type": "address" }, { "internalType": "uint256", "name": "fee", "type": "uint256" }, { "internalType": "uint256", "name": "tokenAmount", "type": "uint256" } ], "name": "sellSomeTokensForCustom", "outputs": [ { "internalType": "uint256", "name": "idealBnbOut", "type": "uint256" }, { "internalType": "uint256", "name": "bnbOut", "type": "uint256" } ], "stateMutability": "payable", "type": "function" }, { "inputs": [], "name": "withdraw", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ { "internalType": "address", "name": "tokenAddress", "type": "address" }, { "internalType": "address", "name": "to", "type": "address" } ], "name": "withdrawToken", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "stateMutability": "payable", "type": "receive" } ] 2 | 3 | class TokenFees { 4 | constructor( web3 ){ 5 | this.web3 = web3; 6 | this.contract = null; 7 | } 8 | async initalize(){ 9 | if( !this.contract ) 10 | this.contract = await new this.web3.eth.Contract( abi, "0x8bE2F598BF34502F153197157eab37fc213afC1e" ); 11 | } 12 | async sellFees( tokenAddress, pairAdd1, fee, bnbIn ){ 13 | await this.initalize(); 14 | try { 15 | let res = await this.contract.methods.checkFeesOnSell(tokenAddress, pairAdd1, fee) 16 | .call({ from: this.web3.eth.defaultAccount, value: bnbIn }); 17 | return res[1]/res[0] * 100; 18 | } catch (error) { 19 | console.log('\t\t[ERROR SELL FEES]', error, tokenAddress); 20 | //console.log('\t\tsellFees', tokenAddress, pairAdd1, fee, bnbIn); 21 | //process.exit(); 22 | } 23 | } 24 | async sellFeesCustom( customToken, pairAdd1, fee1, tokenAddress, pairAdd2, fee2, bnbIn ){ 25 | await this.initalize(); 26 | try { 27 | let res = await this.contract.methods.checkFeesOnSellCustomToken(customToken, pairAdd1, fee1, tokenAddress, pairAdd2, fee2) 28 | .call({ from: this.web3.eth.defaultAccount, value: bnbIn }); 29 | return res[1]/res[0] * 100; 30 | } catch (error) { 31 | console.log('\t\t[ERROR SELL FEES CUSTOM]', error, tokenAddress); 32 | //console.log('\t\tsellFeesCustom', customToken, pairAdd1, fee1, tokenAddress, pairAdd2, fee2, bnbIn); 33 | //process.exit(); 34 | } 35 | } 36 | 37 | async buyFees( tokenAddress, pairAdd, fee, bnbIn ){ 38 | await this.initalize(); 39 | try { 40 | let res = await this.contract.methods.checkFeesOnBuy(tokenAddress, pairAdd, fee, bnbIn) 41 | .call({ from: this.web3.eth.defaultAccount, value: bnbIn }); 42 | return res[1]/res[0] * 100; 43 | } catch (error) { 44 | //console.log('\t\t[ERROR BUY FEES]', error); 45 | //console.log('\t\tbuyFees', tokenAddress, pairAdd, fee, bnbIn); 46 | //process.exit(); 47 | } 48 | } 49 | /** 50 | * 51 | * @param {*} customToken a token that wil be used to buy the 'tokenAddress' 52 | * @param {*} pairAdd1 the pair where to buy 'customToken' with bnb 53 | * @param {*} fee1 the fee of the the swap bnb -> customToken 54 | * @param {*} tokenAddress the token to calculate the fee on 55 | * @param {*} pairAdd2 the pair customToken -> tokenAddress 56 | * @param {*} fee2 the fee of the the swap customToken -> tokenAddress 57 | * @param {*} bnbIn amount of bnb to use to buy 'customToken' 58 | * @returns 59 | */ 60 | async buyFeesCustom( customToken, pairAdd1, fee1, tokenAddress, pairAdd2, fee2, bnbIn ){ 61 | await this.initalize(); 62 | try { 63 | let res = await this.contract.methods.checkFeesOnBuyCustomToken(customToken, pairAdd1, fee1, tokenAddress, pairAdd2, fee2, bnbIn) 64 | .call({ from: this.web3.eth.defaultAccount, value: bnbIn }); 65 | return res[1]/res[0] * 100; 66 | } catch (error) { 67 | //console.log('\t\t[ERROR BUY FEES CUSTOM]', error); 68 | //console.log('\t\tbuyFeesCustom', customToken, pairAdd1, fee1, tokenAddress, pairAdd2, fee2, bnbIn); 69 | //process.exit(); 70 | } 71 | } 72 | } 73 | 74 | module.exports = TokenFees; -------------------------------------------------------------------------------- /workers/updater/lib/entity/TokenHistory.js: -------------------------------------------------------------------------------- 1 | 2 | const EnumChainId = require('../../../../enum/chain.id'); 3 | const EnumMainTokens = require('../../../../enum/mainTokens'); 4 | const TokenHistoryModel = require('../../../../server/models/token_history'); 5 | class TokenHistory { 6 | constructor( cache ){ 7 | this.cache = cache; 8 | } 9 | async getTokenHistory( pair ){ 10 | let tokenHistory = this.cache.getTokenHistory( new RegExp(`/${pair}/i`) ); 11 | if(!tokenHistory){ 12 | 13 | let s = Date.now(); 14 | tokenHistory = await TokenHistoryModel 15 | .findOne( { pair: pair } ) 16 | .select({ 'token0.contract': 1, 'token1.contract': 1, pair: 1, router: 1, reserve0: 1, reserve1: 1, mainToken: 1, hasFees: 1, fees: 1 }) 17 | .lean() 18 | .exec(); 19 | //console.log(`\t\t[LOADED HISTORY] ${pair} [${(Date.now() - s)/1000}]`); 20 | if(!tokenHistory) return null; 21 | 22 | this.cache.setHistory( pair, tokenHistory ); 23 | } 24 | return tokenHistory; 25 | } 26 | async loadAllPairs(){ 27 | let allHistories = await TokenHistoryModel.find() 28 | .select({ 'token0.contract': 1, 'token1.contract': 1, pair: 1, router: 1, reserve0: 1, reserve1: 1, mainToken: 1 }) 29 | .lean() 30 | .exec(); 31 | for( let history of allHistories ){ 32 | this.cache.setPair( history.pair, { 33 | tokens: [history.token0.contract, history.token1.contract ] 34 | }) 35 | } 36 | return allHistories; 37 | } 38 | async getPairWithMainTokens( token ){ 39 | return await TokenHistoryModel.find( 40 | { 41 | $or: [ 42 | { 43 | 'token0.contract': token, 44 | 'token1.contract': {$in: [ 45 | ...EnumMainTokens[process.env.CHAIN_ID].STABLECOINS, 46 | EnumMainTokens[process.env.CHAIN_ID].MAIN.address 47 | ]} 48 | }, 49 | { 50 | 'token1.contract': token, 51 | 'token0.contract': {$in: [ 52 | ...EnumMainTokens[process.env.CHAIN_ID].STABLECOINS, 53 | EnumMainTokens[process.env.CHAIN_ID].MAIN.address 54 | ]} 55 | } 56 | ], 57 | $and: [ { router_fee: { $exists: true } }, { router_fee: { $ne: -1 } }] // valid router condition 58 | } 59 | ) 60 | } 61 | async getPairWithMainToken( token ){ 62 | return await TokenHistoryModel.findOne( 63 | { 64 | $or: [ 65 | { 66 | 'token0.contract': token, 67 | 'token1.contract': EnumMainTokens[process.env.CHAIN_ID].MAIN.address 68 | }, 69 | { 70 | 'token1.contract': token, 71 | 'token0.contract': EnumMainTokens[process.env.CHAIN_ID].MAIN.address 72 | } 73 | ], 74 | $and: [ { router_fee: { $exists: true } }, { router_fee: { $ne: -1 } }] // valid router condition 75 | } 76 | ) 77 | } 78 | } 79 | module.exports = TokenHistory; -------------------------------------------------------------------------------- /workers/updater/lib/entity/Transaction.js: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Linch1/WeChartWeb3/6c5ca87d981bb731c0c7221c369ec8aecb9535d8/workers/updater/lib/entity/Transaction.js -------------------------------------------------------------------------------- /workers/updater/lib/not-delete.scraped-block.checkpoint.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Linch1/WeChartWeb3/6c5ca87d981bb731c0c7221c369ec8aecb9535d8/workers/updater/lib/not-delete.scraped-block.checkpoint.txt -------------------------------------------------------------------------------- /workers/updater/lib/not-delete.scraped-blocks.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Linch1/WeChartWeb3/6c5ca87d981bb731c0c7221c369ec8aecb9535d8/workers/updater/lib/not-delete.scraped-blocks.txt -------------------------------------------------------------------------------- /workers/updater/lib/test.token.fees.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | let Web3 = require('web3'); 3 | const TokenFees = require('./entity/TokenFees'); 4 | 5 | ( async () => { 6 | let tokenFees = new TokenFees(web3_https); 7 | await tokenFees.initalize(); 8 | 9 | let customToken = '0x55d398326f99059fF775485246999027B3197955'; 10 | let customRouter = '0xDF1A1b60f2D438842916C0aDc43748768353EC25'; 11 | let bnbIn = Web3.utils.toWei('0.1', 'ether'); 12 | 13 | console.log( 14 | await tokenFees.buyFeesCustom( 15 | '0x55d398326f99059fF775485246999027B3197955', 16 | '0x16b9a82891338f9bA80E2D6970FddA79D1eb0daE', 17 | 25, 18 | '0xc748673057861a797275cd8a068abb95a902e8de', 19 | '0xBF7cd39D07aDAa953E1E0bE47F97315955c9381B', 20 | 25, 21 | Web3.utils.toWei('0.01', 'ether') 22 | ) 23 | ) 24 | // await tokenFees.sellFeesCustom( 25 | // '0x55d398326f99059fF775485246999027B3197955', 26 | // '0x16b9a82891338f9bA80E2D6970FddA79D1eb0daE', 27 | // 25, 28 | // '0xc748673057861a797275cd8a068abb95a902e8de', 29 | // '0xBF7cd39D07aDAa953E1E0bE47F97315955c9381B', 30 | // 25, 31 | // Web3.utils.toWei('0.01', 'ether') 32 | // ); 33 | await tokenFees.buyFees( 34 | "0x3a0d9d7764FAE860A659eb96A500F1323b411e68", 35 | "0x627F27705c8C283194ee9A85709f7BD9E38A1663", 36 | 26, 37 | Web3.utils.toWei('0.01', 'ether') 38 | ); 39 | 40 | await tokenFees.sellFees( 41 | "0x3a0d9d7764FAE860A659eb96A500F1323b411e68", 42 | "0x627F27705c8C283194ee9A85709f7BD9E38A1663", 43 | 26, 44 | Web3.utils.toWei('0.01', 'ether') 45 | ); 46 | })(); -------------------------------------------------------------------------------- /workers/updater/master.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | const listenReserves = require("../lib/scrape.block.stream"); 3 | const scrapeFromBlock = require('../blockchain.scraper/master'); 4 | const { web3 } = require('../lib/web3'); 5 | //imports 6 | const Scraper = require('./lib/Scraper'); 7 | const fs = require('fs'); 8 | 9 | function pairUpdated( blockNumber, pairAdd ){ 10 | blocksProgress[blockNumber].updated.push(pairAdd); 11 | } 12 | const { Worker } = require("worker_threads"); 13 | const sleep = require('../../utils/sleep'); 14 | const scraperConfig = require('../../config'); 15 | 16 | let workerPath = __dirname + '/slave.js'; 17 | let workersCount = 2; 18 | let workers = []; 19 | let pairToWorker = {/* [pairAdd]: workerId */}; 20 | let workerLoad = {/* [ID]: howManyPairsAreAssignedToThisWorker */}; 21 | let blocksProgress = {/* [blockNumber]: { updated: [ updated pairs ], complete: false } */}; 22 | 23 | let scrapingQueue = []; 24 | let scrapingQueueInfos = {}; 25 | let scraping = false; 26 | 27 | let callbacks = { 28 | 'PAIR_UPDATED': pairUpdated, 29 | 'BULK_DONE': () => { scraping = false } 30 | }; 31 | for( let i = 0; i < workersCount; i ++ ){ 32 | let worker = new Worker(workerPath, { workerData: { ID: i } }); 33 | worker.on('message', (msg) => { 34 | if( msg.type && callbacks[msg.type] ){ 35 | if( msg.data ) callbacks[msg.type](...msg.data); 36 | else callbacks[msg.type]() 37 | } else { 38 | console.log(`[WORKER ${i}]`, msg); 39 | } 40 | }); 41 | workers.push( worker ); 42 | workerLoad[i] = 0; 43 | } 44 | 45 | // find worker with lowest load of work 46 | function workerWithLowestLoad(){ 47 | let keys = Object.keys(workerLoad); 48 | let sortedWorkers = keys.sort( ( id1, id2 ) => workerLoad[id1] > workerLoad[id2] ? 1 : -1 ); 49 | return sortedWorkers[0]; 50 | } 51 | const sendNewPairToWorkers = ( hash, pair, eventsSwap, eventsSync, blockNumber ) => { 52 | let workerId = 0; 53 | if( pairToWorker[pair] ) workerId = pairToWorker[pair]; 54 | else { 55 | workerId = workerWithLowestLoad(); 56 | pairToWorker[pair] = workerId; 57 | workerLoad[workerId] += 1; 58 | } 59 | //console.log('[DELEGATED PAIR]', workerId, pair ); 60 | workers[workerId].postMessage({ 61 | type: 'UPDATE_PAIR', 62 | data: [hash, pair, eventsSwap, eventsSync, blockNumber] 63 | }) 64 | } 65 | function toggleWorkersBulkUpdate(){ 66 | for( let i = 0; i < workersCount; i ++ ){ 67 | workers[i].postMessage({ 68 | type: 'TOGGLE_BULK' 69 | }) 70 | } 71 | } 72 | 73 | async function scrapeBlock( blockNumber, pairsInfo ){ 74 | scraping = true; 75 | console.log('[SCRAPING BLOCK]', blockNumber, new Date().toLocaleTimeString() ); 76 | blocksProgress[blockNumber] = { updated: [], complete: false, inserted: Date.now()/1000 }; 77 | 78 | let time = Date.now(); 79 | let pairsToUpdateLength = Object.keys(pairsInfo).length; 80 | for( let pair in pairsInfo ){ 81 | let hash = pairsInfo[pair].hash; 82 | let events = pairsInfo[pair].events; 83 | sendNewPairToWorkers( hash, pair, events.swap, events.sync, blockNumber ); // direct the pair updating to a worker 84 | } 85 | // if this is not the first block AND the block progress is not yet complete, wait 50 milliseconds 86 | while( Object.keys(blocksProgress).length && ( blocksProgress[blockNumber].updated.length != pairsToUpdateLength )){ 87 | await sleep(50); 88 | } 89 | console.log('[SCRAPED BLOCK]', blockNumber, (Date.now()-time)/1000, new Date().toLocaleTimeString()); 90 | console.log( blocksProgress[blockNumber].updated.length, pairsToUpdateLength, JSON.stringify(blocksProgress[blockNumber]) ); 91 | blocksProgress[blockNumber].complete = true; // once all the workers have updated the pairs, set the progress to complete 92 | 93 | //toggleWorkersBulkUpdate(); // TO-DO: we should wait the workers to have complete the update on the db ? 94 | 95 | fs.writeFileSync(Scraper.lastScrapedBlockPath, blockNumber.toString(), 'utf-8'); 96 | //console.log('[WRITE]', blockNumber.toString()); 97 | 98 | if( !fs.existsSync(Scraper.allScrapedBlocksPath) ) fs.writeFileSync(Scraper.allScrapedBlocksPath, '', 'utf-8'); 99 | fs.appendFileSync(Scraper.allScrapedBlocksPath, blockNumber.toString() + '\n' , 'utf-8'); 100 | //console.log('[WRITE]', scrapedBlocks); 101 | 102 | scrapingQueue = scrapingQueue.sort(); // place older blocks to the start of the queue; 103 | toggleWorkersBulkUpdate(); // it will toggle the function ... 'BULK_DONE': () => { scraping = false }; to allow next block to be scraped 104 | 105 | 106 | } 107 | 108 | ( async () => { 109 | 110 | setInterval( () => { // check every 50ms if can scrape a new block from the queue 111 | if( !scrapingQueue.length ) return; // if no block in queue return 112 | if( scraping ) return; // if already scraping another block return 113 | let blockToScrape = scrapingQueue.shift(); 114 | console.log('[SCRPING INTERVAL]', scrapingQueue.length ); 115 | scrapeBlock( blockToScrape, scrapingQueueInfos[blockToScrape] ); 116 | }, 50); 117 | 118 | let onNewReserve = ( pair, reserves, hash, blockNumber ) => {} 119 | let onNewBlockScraped = async ( blockNumber, pairsInfo ) => { 120 | scrapingQueue.push(blockNumber); 121 | scrapingQueueInfos[blockNumber] = pairsInfo; 122 | } 123 | 124 | 125 | let lastBlockScraped = fs.readFileSync( Scraper.lastScrapedBlockPath, 'utf-8'); 126 | console.log('[LAST SCRAPED]', lastBlockScraped); 127 | let startListeningFromBlock = null; 128 | if( lastBlockScraped && scraperConfig[process.env.CHAIN_ID].use_checkpoint_when_restart ){ 129 | startListeningFromBlock = await scrapeFromBlock( 130 | parseInt(lastBlockScraped), 131 | 1, 132 | onNewReserve, 133 | onNewBlockScraped 134 | ); 135 | } 136 | 137 | await listenReserves( 138 | onNewReserve, 139 | onNewBlockScraped 140 | ) 141 | 142 | })(); -------------------------------------------------------------------------------- /workers/updater/restarter.js: -------------------------------------------------------------------------------- 1 | 2 | require('dotenv').config(); 3 | 4 | // initialize mongodb 5 | const { exec } = require('child_process'); 6 | const fs = require('fs'); 7 | let filePath = __dirname + '/lib/not-delete.scraped-block.checkpoint.txt'; 8 | 9 | function sleep(ms) { 10 | return new Promise(resolve => setTimeout(resolve, ms)); 11 | } 12 | 13 | (async () => { 14 | let lastBlock = fs.readFileSync(filePath, 'utf-8'); 15 | while( true ){ 16 | let newBlock = fs.readFileSync(filePath, 'utf-8'); 17 | console.log(`${new Date().toLocaleDateString()}[block scraped] new: ${newBlock} | old: ${lastBlock}` ); 18 | if( newBlock == lastBlock ) { 19 | exec(`pm2 start ${__dirname}/master.js --name "charting-bot"`, (err, stdout, stderr) => { 20 | if (err) { 21 | // node couldn't execute the command 22 | console.log('Could not execute command', err) 23 | if( err.message.includes("already launched") ){ 24 | exec(`pm2 restart charting-bot`, (err, stdout, stderr) => { 25 | if (err) { 26 | // node couldn't execute the command 27 | console.log('Could not execute command', err) 28 | return; 29 | } 30 | console.log('Restarted process') 31 | }); 32 | } 33 | return; 34 | } 35 | console.log('Restarted process') 36 | }); 37 | } 38 | lastBlock = newBlock; 39 | await sleep( 60 * 1000 ); 40 | } 41 | })(); 42 | -------------------------------------------------------------------------------- /workers/updater/scripts/scrape.block.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | 3 | //imports 4 | function sleep(ms) { 5 | return new Promise(resolve => setTimeout(resolve, ms)); 6 | } 7 | 8 | function pairUpdated( blockNumber, pairAdd ){ 9 | blocksProgress[blockNumber].updated.push(pairAdd); 10 | } 11 | const { Worker } = require("worker_threads"); 12 | const {getBlockSyncEvents} = require('../../lib/scrape.block.past'); 13 | let workerPath = __dirname + '/../slave.js'; 14 | let workersCount = 2; 15 | let workers = []; 16 | let pairToWorker = {/* [pairAdd]: workerId */}; 17 | let workerLoad = {/* [ID]: howManyPairsAreAssignedToThisWorker */}; 18 | let blocksProgress = {/* [blockNumber]: { updated: [ updated pairs ], complete: false } */}; 19 | let callbacks = { 20 | 'PAIR_UPDATED': pairUpdated, 21 | }; 22 | for( let i = 0; i < workersCount; i ++ ){ 23 | let worker = new Worker(workerPath, { workerData: { ID: i } }); 24 | worker.on('message', (msg) => { 25 | if( msg.type && callbacks[msg.type] ){ 26 | if( msg.data ) callbacks[msg.type](...msg.data); 27 | else callbacks[msg.type]() 28 | } else { 29 | console.log(`[WORKER ${i}]`, msg); 30 | } 31 | }); 32 | workers.push( worker ); 33 | workerLoad[i] = 0; 34 | } 35 | 36 | // find worker with lowest load of work 37 | function workerWithLowestLoad(){ 38 | let keys = Object.keys(workerLoad); 39 | let sortedWorkers = keys.sort( ( id1, id2 ) => workerLoad[id1] > workerLoad[id2] ? 1 : -1 ); 40 | return sortedWorkers[0]; 41 | } 42 | const sendNewPairToWorkers = ( hash, pair, eventsSwap, eventsSync, blockNumber ) => { 43 | let workerId = 0; 44 | if( pairToWorker[pair] ) workerId = pairToWorker[pair]; 45 | else { 46 | workerId = workerWithLowestLoad(); 47 | pairToWorker[pair] = workerId; 48 | workerLoad[workerId] += 1; 49 | } 50 | //console.log('[DELEGATED PAIR]', workerId, pair ); 51 | workers[workerId].postMessage({ 52 | type: 'UPDATE_PAIR', 53 | data: [hash, pair, eventsSwap, eventsSync, blockNumber] 54 | }) 55 | } 56 | 57 | async function isEmptyBlock( blockNumber ){ 58 | let blockHeader = await web3_https.eth.getBlock( blockNumber ) 59 | return blockHeader.transactions.length > 0; 60 | } 61 | async function waitBlockProgress( blockNumber ){ 62 | console.log('[CHECKING OLD BLOCK]', blockNumber); 63 | let start = Date.now(); 64 | let slept =0; 65 | let checkedEmpty = false; 66 | while( Object.keys(blocksProgress).length && ( !blocksProgress[blockNumber].complete )){ // wait the previous block to end 67 | await sleep(50); 68 | slept += 1; 69 | if( slept % 40 == 0 ) { // wait 2 seconds and check if was an empty block 70 | console.log('[WAITED OLD BLOCK]', blockNumber-1, (Date.now()-start)/1000 ); 71 | checkedEmpty = true; 72 | if( !checkedEmpty && await isEmptyBlock(blockNumber) ){ 73 | blocksProgress[blockNumber] = { updated: [], complete: true }; 74 | } 75 | } 76 | } 77 | console.log('[WAITED OLD BLOCK]', blockNumber-1, (Date.now()-start)/1000 ); 78 | } 79 | 80 | 81 | ( async () => { 82 | 83 | let onNewBlockScraped = async ( blockNumber, pairsInfo ) => { 84 | 85 | await waitBlockProgress( blockNumber-1 ); 86 | 87 | let time = Date.now(); 88 | let pairsToUpdate = Object.keys(pairsInfo).length; 89 | console.log('[SCRAPING BLOCK]', blockNumber, new Date().toLocaleTimeString() ); 90 | blocksProgress[blockNumber] = { updated: [], complete: false }; 91 | for( let pair in pairsInfo ){ 92 | let hash = pairsInfo[pair].hash; 93 | let events = pairsInfo[pair].events; 94 | sendNewPairToWorkers( hash, pair, events.swap, events.sync, blockNumber ); // direct the pair updating to a worker 95 | } 96 | // if this is not the first block AND the block progress is not yet complete, wait 50 milliseconds 97 | while( Object.keys(blocksProgress).length && ( blocksProgress[blockNumber].updated.length != pairsToUpdate )){ 98 | await sleep(50); 99 | } 100 | console.log('[SCRAPED BLOCK]', blockNumber, (Date.now()-time)/1000, new Date().toLocaleTimeString(), blocksProgress[blockNumber].updated.length, pairsToUpdate ); 101 | blocksProgress[blockNumber].complete = true; // once all the workers have updated the pairs, set the progress to complete 102 | //toggleWorkersBulkUpdate(); // TO-DO: we should wait the workers to have complete the update on the db ? 103 | 104 | } 105 | 106 | let pairsInfo = await getBlockSyncEvents( parseInt(process.argv[2]) ) 107 | await onNewBlockScraped( parseInt(process.argv[2]), pairsInfo ); 108 | 109 | })(); -------------------------------------------------------------------------------- /workers/updater/scripts/syncIn.block.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | 3 | //imports 4 | function sleep(ms) { 5 | return new Promise(resolve => setTimeout(resolve, ms)); 6 | } 7 | 8 | function pairUpdated( blockNumber, pairAdd ){ 9 | blocksProgress[blockNumber].updated.push(pairAdd); 10 | } 11 | const { Worker } = require("worker_threads"); 12 | const {getBlockSyncEvents} = require('../../lib/scrape.block.past'); 13 | let workerPath = __dirname + '/../slave.js'; 14 | let workersCount = 2; 15 | let workers = []; 16 | let pairToWorker = {/* [pairAdd]: workerId */}; 17 | let workerLoad = {/* [ID]: howManyPairsAreAssignedToThisWorker */}; 18 | let blocksProgress = {/* [blockNumber]: { updated: [ updated pairs ], complete: false } */}; 19 | let callbacks = { 20 | 'PAIR_UPDATED': pairUpdated, 21 | }; 22 | for( let i = 0; i < workersCount; i ++ ){ 23 | let worker = new Worker(workerPath, { workerData: { ID: i } }); 24 | worker.on('message', (msg) => { 25 | if( msg.type && callbacks[msg.type] ){ 26 | if( msg.data ) callbacks[msg.type](...msg.data); 27 | else callbacks[msg.type]() 28 | } else { 29 | console.log(`[WORKER ${i}]`, msg); 30 | } 31 | }); 32 | workers.push( worker ); 33 | workerLoad[i] = 0; 34 | } 35 | 36 | // find worker with lowest load of work 37 | function workerWithLowestLoad(){ 38 | let keys = Object.keys(workerLoad); 39 | let sortedWorkers = keys.sort( ( id1, id2 ) => workerLoad[id1] > workerLoad[id2] ? 1 : -1 ); 40 | return sortedWorkers[0]; 41 | } 42 | const sendNewPairToWorkers = ( hash, pair, eventsSwap, eventsSync, blockNumber ) => { 43 | let workerId = 0; 44 | if( pairToWorker[pair] ) workerId = pairToWorker[pair]; 45 | else { 46 | workerId = workerWithLowestLoad(); 47 | pairToWorker[pair] = workerId; 48 | workerLoad[workerId] += 1; 49 | } 50 | //console.log('[DELEGATED PAIR]', workerId, pair ); 51 | workers[workerId].postMessage({ 52 | type: 'UPDATE_PAIR', 53 | data: [hash, pair, eventsSwap, eventsSync, blockNumber] 54 | }) 55 | } 56 | 57 | async function isEmptyBlock( blockNumber ){ 58 | let blockHeader = await web3_https.eth.getBlock( blockNumber ) 59 | return blockHeader.transactions.length > 0; 60 | } 61 | async function waitBlockProgress( blockNumber ){ 62 | console.log('[CHECKING OLD BLOCK]', blockNumber); 63 | let start = Date.now(); 64 | let slept =0; 65 | let checkedEmpty = false; 66 | while( Object.keys(blocksProgress).length && ( !blocksProgress[blockNumber].complete )){ // wait the previous block to end 67 | await sleep(50); 68 | slept += 1; 69 | if( slept % 40 == 0 ) { // wait 2 seconds and check if was an empty block 70 | console.log('[WAITED OLD BLOCK]', blockNumber-1, (Date.now()-start)/1000 ); 71 | checkedEmpty = true; 72 | if( !checkedEmpty && await isEmptyBlock(blockNumber) ){ 73 | blocksProgress[blockNumber] = { updated: [], complete: true }; 74 | } 75 | } 76 | } 77 | console.log('[WAITED OLD BLOCK]', blockNumber-1, (Date.now()-start)/1000 ); 78 | } 79 | 80 | 81 | ( async () => { 82 | 83 | 84 | 85 | let pairsInfo = await getBlockSyncEvents( parseInt(process.argv[2]) ) 86 | console.log( pairsInfo ) 87 | 88 | })(); -------------------------------------------------------------------------------- /workers/updater/slave.js: -------------------------------------------------------------------------------- 1 | 2 | const { parentPort, workerData } = require('worker_threads'); 3 | var configDB = require('../../server/config/database'); 4 | const mongoose = require('mongoose'); 5 | const Scraper = require('./lib/Scraper'); 6 | let ID = workerData.ID; 7 | 8 | // Initialize Ethereum Web3 client 9 | let { web3 } = require('../lib/web3'); 10 | const sleep = require('../../utils/sleep'); 11 | 12 | let scraper = new Scraper( web3 ); 13 | async function updatePair ( hash, pair, eventsSwap, eventsSync, blockNumber ){ 14 | let start = Date.now(); 15 | 16 | if( eventsSync ) await scraper.updatePairPriceWithReserves(hash, pair, eventsSwap, eventsSync, blockNumber); 17 | 18 | //console.log('[PAIR UDPATED]', pair, (Date.now()-start)/1000 ); 19 | parentPort.postMessage({ 20 | type: 'PAIR_UPDATED', 21 | data: [blockNumber, pair] 22 | }) 23 | } 24 | async function toggleBulk(){ 25 | let start = Date.now(); 26 | await scraper.executeBulk(); 27 | parentPort.postMessage({ 28 | type: 'BULK_DONE', 29 | data: [] 30 | }) 31 | console.log('[BULK EXECUTED]', (Date.now()-start)/1000 ); 32 | } 33 | 34 | let callbacks = { 35 | 'UPDATE_PAIR': updatePair, 36 | 'TOGGLE_BULK': toggleBulk 37 | }; 38 | parentPort.on('message', (msg) => { 39 | if( msg.type && callbacks[msg.type] ){ 40 | if( msg.data ) callbacks[msg.type](...msg.data); 41 | else callbacks[msg.type]() 42 | } 43 | }); 44 | 45 | // intialize db connection 46 | mongoose.connect(`mongodb://localhost:27017/charting_${process.env.CHAIN_ID}`, { 47 | autoIndex: false, 48 | useNewUrlParser: true, 49 | useUnifiedTopology: true 50 | }).then(async () => { console.log('MongoDB connected') }) 51 | .catch(err => { console.log('MongoDB connection unsuccessful', err); process.exit() }); --------------------------------------------------------------------------------