├── .eslintrc.json
├── .gitignore
├── Dockerfile
├── LICENSE
├── README.md
├── RELEASE.md
├── Untitled Diagram.drawio
├── config.js
├── dao.js
├── discord.js
├── docker-compose.yml
├── docker-start.js
├── docs
└── SETUP.md
├── edb.js
├── enforce.js
├── getPathObj.js
├── helpers.js
├── hive.js
├── index.js
├── ipfsSaveState.js
├── lil_ops.js
├── msa.js
├── package.json
├── pathwise.js
├── processing_routes
├── cert.js
├── cjv.js
├── comment.js
├── delegate_vesting_shares.js
├── dex.js
├── fork.js
├── gov.js
├── index.js
├── nft.js
├── nodes.js
├── nomention.js
├── onBlock.js
├── onStreamingStart.js
├── power.js
├── prediction.js
├── q4d.js
├── report.js
├── send.js
├── sig.js
└── vote.js
├── processor.js
├── report.js
├── routes
├── api.js
└── test.js
├── rtrades.js
├── state.js
├── tally.js
├── test
├── indexTest.js
├── test_blocks.js
└── test_state.js
└── voter.js
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "env": {
3 | "node": true,
4 | "commonjs": true,
5 | "es2021": true
6 | },
7 | "extends": [
8 | ],
9 | "parserOptions": {
10 | "ecmaVersion": 12
11 | },
12 | "plugins": [
13 | ],
14 | "rules": {
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | deploy.env
2 | event.json
3 | .lambda
4 | .db
5 | claudia.json
6 | context.json
7 | event_sources.json
8 | lambda.js
9 | dlux.zip
10 |
11 | # Created by https://www.gitignore.io/api/node,linux,macos,windows,webstorm+all
12 |
13 | ### Linux ###
14 | *~
15 |
16 | # temporary files which can be created if a process still has a handle open of a deleted file
17 | .fuse_hidden*
18 |
19 | # KDE directory preferences
20 | .directory
21 |
22 | # Linux trash folder which might appear on any partition or disk
23 | .Trash-*
24 |
25 | # .nfs files are created when an open file is removed but is still being accessed
26 | .nfs*
27 |
28 | ### macOS ###
29 | *.DS_Store
30 | .AppleDouble
31 | .LSOverride
32 |
33 | # Icon must end with two \r
34 | Icon
35 |
36 | # Thumbnails
37 | ._*
38 |
39 | # Files that might appear in the root of a volume
40 | .DocumentRevisions-V100
41 | .fseventsd
42 | .Spotlight-V100
43 | .TemporaryItems
44 | .Trashes
45 | .VolumeIcon.icns
46 | .com.apple.timemachine.donotpresent
47 |
48 | # Directories potentially created on remote AFP share
49 | .AppleDB
50 | .AppleDesktop
51 | Network Trash Folder
52 | Temporary Items
53 | .apdisk
54 |
55 | ### Node ###
56 | # Logs
57 | logs
58 | *.log
59 | npm-debug.log*
60 | yarn-debug.log*
61 | yarn-error.log*
62 |
63 | # Runtime data
64 | pids
65 | *.pid
66 | *.seed
67 | *.pid.lock
68 |
69 | # Directory for instrumented libs generated by jscoverage/JSCover
70 | lib-cov
71 |
72 | # Coverage directory used by tools like istanbul
73 | coverage
74 |
75 | # nyc test coverage
76 | .nyc_output
77 |
78 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
79 | .grunt
80 |
81 | # Bower dependency directory (https://bower.io/)
82 | bower_components
83 |
84 | # node-waf configuration
85 | .lock-wscript
86 |
87 | # Compiled binary addons (http://nodejs.org/api/addons.html)
88 | build/Release
89 |
90 | # Dependency directories
91 | node_modules/
92 | jspm_packages/
93 |
94 | # Typescript v1 declaration files
95 | typings/
96 |
97 | # Optional npm cache directory
98 | .npm
99 |
100 | # Optional eslint cache
101 | .eslintcache
102 |
103 | # Optional REPL history
104 | .node_repl_history
105 |
106 | # Output of 'npm pack'
107 | *.tgz
108 |
109 | # Yarn Integrity file
110 | .yarn-integrity
111 |
112 | # dotenv environment variables file
113 | .env
114 |
115 | ### WebStorm+all ###
116 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
117 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
118 |
119 | # User-specific stuff:
120 | .idea/**/workspace.xml
121 | .idea/**/tasks.xml
122 | .idea/dictionaries
123 |
124 | # Sensitive or high-churn files:
125 | .idea/**/dataSources/
126 | .idea/**/dataSources.ids
127 | .idea/**/dataSources.xml
128 | .idea/**/dataSources.local.xml
129 | .idea/**/sqlDataSources.xml
130 | .idea/**/dynamic.xml
131 | .idea/**/uiDesigner.xml
132 |
133 | # Gradle:
134 | .idea/**/gradle.xml
135 | .idea/**/libraries
136 |
137 | # CMake
138 | cmake-build-debug/
139 |
140 | # Mongo Explorer plugin:
141 | .idea/**/mongoSettings.xml
142 |
143 | ## File-based project format:
144 | *.iws
145 |
146 | ## Plugin-specific files:
147 |
148 | # IntelliJ
149 | /out/
150 |
151 | # mpeltonen/sbt-idea plugin
152 | .idea_modules/
153 |
154 | # JIRA plugin
155 | atlassian-ide-plugin.xml
156 |
157 | # Cursive Clojure plugin
158 | .idea/replstate.xml
159 |
160 | # Ruby plugin and RubyMine
161 | /.rakeTasks
162 |
163 | # Crashlytics plugin (for Android Studio and IntelliJ)
164 | com_crashlytics_export_strings.xml
165 | crashlytics.properties
166 | crashlytics-build.properties
167 | fabric.properties
168 |
169 | ### WebStorm+all Patch ###
170 | # Ignores the whole .idea folder and all .iml files
171 | # See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360
172 |
173 | .idea/
174 |
175 | # Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-249601023
176 |
177 | *.iml
178 | modules.xml
179 | .idea/misc.xml
180 | *.ipr
181 |
182 | ### Windows ###
183 | # Windows thumbnail cache files
184 | Thumbs.db
185 | ehthumbs.db
186 | ehthumbs_vista.db
187 |
188 | # Folder config file
189 | Desktop.ini
190 |
191 | # Recycle Bin used on file shares
192 | $RECYCLE.BIN/
193 |
194 | # Windows Installer files
195 | *.cab
196 | *.msi
197 | *.msm
198 | *.msp
199 |
200 | # Windows shortcuts
201 | *.lnk
202 |
203 |
204 | # End of https://www.gitignore.io/api/node,linux,macos,windows,webstorm+all
205 | .DS_Store
206 | package-lock.json
207 |
208 | # Elastic Beanstalk Files
209 | .elasticbeanstalk/*
210 | !.elasticbeanstalk/*.cfg.yml
211 | !.elasticbeanstalk/*.global.yml
212 | package-lock.json
213 | package-lock.json
214 | .DS_Store
215 |
216 | db/
217 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:14
2 |
3 | WORKDIR /honeycomb
4 |
5 | COPY package.json .
6 |
7 | RUN npm install
8 |
9 | COPY . .
10 |
11 | CMD ["node", "docker-start.js"]
12 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Steven Ettinger | dlux
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/RELEASE.md:
--------------------------------------------------------------------------------
1 | # Release Notes
2 |
3 | ## 1.3
4 |
5 | ### 1.3.4
6 |
7 | * Switch back to hive-tx after updates to package.
8 | * Retooling transaction verification and broadcasting
9 | * Quiet logging unless env.mode == 'verbose'
10 | * Fix error on account update logic
11 | * Allow for API address updates via configuration
12 |
13 | ### 1.3.3
14 |
15 | * Fix Liquidity Rewards
16 |
17 | ### 1.3.2
18 |
19 | * Remove artifacts
20 |
21 | ### 1.3.1
22 |
23 | * Fix signature verification
24 | * Fix Auto-Healing Consensus
25 | * Backoff timer in processor retries
26 |
27 | ### 1.3.0
28 |
29 | * Can Add IPFS Node API
30 | * Ensure Consensus has a majority of owners (double majority)
31 | * Ensure reassembled State is equivalent to Consensus
32 | * Allow for more than 3 Multi-Signature holders
33 | * Verify Signature without outside of HiveAPI
34 | * GetBlock Retry
35 | * Fix Downpower memory leak.
36 | * Fix NFT token check
37 | * Add API for Downpower
38 | * Docker Network Specified
39 | * IPFS image and network dependencies change
40 |
41 | ## 1.2
42 |
43 | * Updated restarts for scaling mechanism
44 | * Updated consensus mechanism for scaling
45 | * New witness promotion routine
46 | * Dockerized with own IPFS dependencies
47 | * Automated IPFS Swarming
48 | * Fast Replay
49 | * Autonomous Multi-sig Account Management
50 | * Accurate Tracking of collateralized safety margins
51 | * Enforcement of safety margins
52 | * Added a claim routine to ease tax reporting
53 | * Half of claims locked in gov or power
54 | * Opened NFT minting to all accounts
55 | * Fixed DEX history
56 | * Fixed DEX sell loop for expired trades
57 | * Implemented multiple hive-js node selection
58 | * Only Node operators can lock Gov
59 |
60 | ### 1.1.3
61 |
62 | * Remove stop callback for API failure. Rely on API Cycle only
63 | * Remove ecency from API List
64 | * Add release notes
65 |
--------------------------------------------------------------------------------
/Untitled Diagram.drawio:
--------------------------------------------------------------------------------
1 | 7VhZc9MwEP41Hp5gfBQnecRp0wKhU2ihw6NiK7aI7DWynINfzyqWYzsOOYD0yDQv0a43K2m/b4/YcPrx/FKQNPoEAeWGbQZzwzk3bNuybQe/lGZRaLq9bqEIBQu0UaW4Zb+oVppam7OAZg1DCcAlS5tKH5KE+rKhI0LArGk2Bt7cNSUhbSlufcLb2nsWyEjfwu5U+ivKwqjc2XJ7xZOYlMb6JllEApjVVM6F4fQFgCxW8bxPuQpeGZf794t7Ppy4lx8+Zz/JV+/j3fW314WzwSE/WV1B0ET+X9d24XpKeK7jpe8qF2UABeRJQJUT03C8SMYclxYuf1ApFxpwkktAFQgZQQgJ4UOAVNuNIZHazFIyTYJ3CliURxz8SaEaMM71Hihp+y5KmRQwWWGnHKyAUMacjCj3iD8JlwftAweBjxJIqHIVIBn0XarDXVRab8/YagwyyIVPt9jpbJFEhHSbP7ewU+er8VQjd0khplIs0EBQTiSbNslMdE6EK7sKd1xo6A+ggdOiwZDEqcp6oFnySuJqBmKyhRsKl1nEJL1NyTJAMywnTb7UeYBX9UJOskyjuAPkw0CaUiHpfGtYy6euzu2yuGlxVlUKq0z/qFYlzswjAXHWAuI7Fs+XlPyXlHT3TMmyY+3MSU2Wkhh7p6j2dAMskTUTGI8zPNg6dVYb/j2b3rbYdA0tMu3G41nT7ZHI1DmMS9aT55L7hxbhcgyDNxK4CtUq5XkYIlnQXWI4g3btiiAe5dnudtHAWDFoQGLGVbyuKJ9SyXyyoakQzkLc99xHuKnYTB7ckiUhSm4l3S3JivX3iM2m02w2K7nebcwN3aZ7rG7TaWF6g+gV0NkmX+J7Yh3fsddA6D12y+/uU6SfdQl+8I5fVtPdLd/as0xr9phvHPw0CPT0p4DeXjPlyxhwHIbZpzYIlCes8cnL+ag9CYxykSwHAcjlyySwbRKwNzWhB50EykJYA/ULTQlDMM3hKc4B6//8z5zHngPKStGEgKuY2maRYSeGwfos5ljHwwDF6k1tUQqr993OxW8=
--------------------------------------------------------------------------------
/config.js:
--------------------------------------------------------------------------------
1 | require('dotenv').config();
2 | const ENV = process.env;
3 |
4 | const username = ENV.account || 'disregardfiat';
5 | const active = ENV.active || '';
6 | const follow = ENV.follow || 'disregardfiat';
7 | const msowner = ENV.msowner || '';
8 | const mspublic = ENV.mspublic || '';
9 | const memoKey = ENV.memo || '';
10 | const hookurl = ENV.discordwebhook || '';
11 | const NODEDOMAIN = ENV.domain || 'http://dlux-token.herokuapp.com' //where your API lives
12 | const acm = ENV.account_creator || false //account creation market ... use your accounts HP to claim account tokens
13 | const mirror = ENV.mirror || false //makes identical posts, votes and IPFS pins as the leader account
14 | const port = ENV.PORT || 3001;
15 | const pintoken = ENV.pintoken || ''
16 | const pinurl = ENV.pinurl || '';
17 | const status = ENV.status || true
18 | const dbcs = ENV.DATABASE_URL || ''; //connection string to a postgres database
19 | const dbmods = ENV.DATABASE_MODS || []; //list of moderators to hide posts in above db
20 | const typeDefs = ENV.APPTYPES || {
21 | ["360"]: ['QmNby3SMAAa9hBVHvdkKvvTqs7ssK4nYa2jBdZkxqmRc16'],
22 | }
23 | const history = ENV.history || 3600
24 | const stream = ENV.stream || 'irreversible'
25 | const mode = ENV.mode || "normal";
26 | const timeoutStart = ENV.timeoutStart || 180000;
27 | const timeoutContinuous = ENV.timeoutContinuous || 30000;
28 |
29 | // testing configs for replays
30 | const override = ENV.override || 0 //69116600 //will use standard restarts after this blocknumber
31 | const engineCrank = ENV.startingHash || 'QmconUD3faVGbgC2jAXRiueEuLarjfaUiDz5SA74kptuvu' //but this state will be inserted before
32 |
33 | // third party configs
34 | const rta = ENV.rta || '' //rtrades account : IPFS pinning interface
35 | const rtp = ENV.rtp || '' //rtrades password : IPFS pinning interface
36 |
37 | const ipfshost = ENV.ipfshost || 'ipfs.infura.io' //IPFS upload/download provider provider
38 | const ipfsport = ENV.ipfsport || '5001' //IPFS upload/download provider provider
39 |
40 | const ipfsLinks = ENV.ipfsLinks
41 | ? ENV.ipfsLinks.split(" ")
42 | : [
43 | "https://ipfs:8080/ipfs/",
44 | "http://localhost:8080/ipfs/",
45 | "https://ipfs.3speak.tv/ipfs/",
46 | "https://infura-ipfs.io/ipfs/",
47 | "https://ipfs.alloyxuast.co.uk/ipfs/",
48 | ];
49 |
50 | const ipfsprotocol = ENV.ipfsprotocol || 'https' //IPFS upload/download protocol
51 | //node market config > 2500 is 25% inflation to node operators, this is currently not used
52 | const bidRate = ENV.BIDRATE || 2500 //
53 |
54 | //HIVE CONFIGS
55 | var startURL = ENV.STARTURL || "https://hive-api.dlux.io/";
56 | var clientURL = ENV.APIURL || "https://hive-api.dlux.io/";
57 | const clients = ENV.clients
58 | ? ENV.clients.split(" ")
59 | : [
60 | "https://api.deathwing.me/",
61 | "https://hive-api.dlux.io/",
62 | "https://rpc.ecency.com/",
63 | "https://hived.emre.sh/",
64 | "https://rpc.ausbit.dev/",
65 | "https://api.hive.blog/",
66 | ];
67 |
68 | //!!!!!!! -- THESE ARE COMMUNITY CONSTANTS -- !!!!!!!!!//
69 | //TOKEN CONFIGS -- ALL COMMUNITY RUNNERS NEED THESE SAME VALUES
70 | const starting_block = 49988008; //from what block does your token start
71 | const prefix = 'dlux_' //Community token name for Custom Json IDs
72 | const TOKEN = 'DLUX' //Token name
73 | const precision = 3 //precision of token
74 | const tag = 'dlux' //the fe.com//@/
75 | const jsonTokenName = 'dlux' //what customJSON in Escrows and sends is looking for
76 | const leader = 'dlux-io' //Default account to pull state from, will post token
77 | const ben = 'dlux-io' //Account where comment benifits trigger token action
78 | const delegation = 'dlux-io' //account people can delegate to for rewards
79 | const delegationWeight = 1000 //when to trigger community rewards with bens
80 | const msaccount = 'dlux-cc' //account controlled by community leaders
81 | const msPubMemo = 'STM5GNM3jpjWh7Msts5Z37eM9UPfGwTMU7Ksats3RdKeRaP5SveR9' //memo key for msaccount
82 | const msPriMemo = '5KDZ9fzihXJbiLqUCMU2Z2xU8VKb9hCggyRPZP37aprD2kVKiuL'
83 | const msmeta = ''
84 | const mainAPI = 'token.dlux.io' //leaders API probably
85 | const mainRender = 'data.dlux.io' //data and render server
86 | const mainFE = 'dlux.io' //frontend for content
87 | const mainIPFS = 'a.ipfs.dlux.io' //IPFS service
88 | const mainICO = 'robotolux' //Account collecting ICO HIVE
89 | const footer = `\n[Find us on Discord](https://discord.gg/Beeb38j)`
90 | const hive_service_fee = 100 //HIVE service fee for transactions in Hive/HBD in centipercents (1% = 100)
91 | const features = {
92 | pob: true, //proof of brain
93 | delegate: true, //delegation
94 | daily: true, // daily post
95 | liquidity: true, //liquidity
96 | ico: true, //ico
97 | inflation: true,
98 | dex: true, //dex
99 | nft: true, //nfts
100 | state: true, //api dumps
101 | claimdrop: false //claim drops
102 | }
103 | const featuresModel = {
104 | claim_id: 'claim',
105 | claim_S: 'Airdrop',
106 | claim_B: false,
107 | claim_json: 'drop',
108 | rewards_id: 'claim',
109 | rewards_S: 'Rewards',
110 | rewards_B: true,
111 | rewards_json: 'claim',
112 | rewardSel: true,
113 | reward2Gov: true,
114 | send_id: 'send',
115 | send_S: 'Send',
116 | send_B: true,
117 | send_json: 'send',
118 | powup_id: 'power_up',
119 | powup_B: true,
120 | pow_val: '',
121 | powdn_id: 'power_down',
122 | powdn_B: true,
123 | powsel_up: true,
124 | govup_id: 'gov_up',
125 | govup_B: true,
126 | gov_val: '',
127 | govsel_up: true,
128 | govdn_id: 'gov_down',
129 | govdn_B: true,
130 | node: {
131 | id: 'node_add',
132 | opts: [{
133 | S: 'Domain',
134 | type: 'text',
135 | info: 'https://no-trailing-slash.com',
136 | json: 'domain',
137 | val: ''
138 | },
139 | {
140 | S: 'DEX Fee Vote',
141 | type: 'number',
142 | info: '500 = .5%',
143 | max: 1000,
144 | min: 0,
145 | json: 'bidRate',
146 | val: ''
147 | },
148 | {
149 | S: 'DEX Max Vote',
150 | type: 'number',
151 | info: '10000 = 100%',
152 | max: 10000,
153 | min: 0,
154 | json: 'dm',
155 | val: ''
156 | },
157 | {
158 | S: 'DEX Slope Vote',
159 | type: 'number',
160 | info: '10000 = 100%',
161 | max: 10000,
162 | min: 0,
163 | json: 'ds',
164 | val: ''
165 | }
166 | ],
167 | }
168 | }
169 | const adverts = [
170 | 'https://camo.githubusercontent.com/954558e3ca2d68e0034cae13663d9807dcce3fcf/68747470733a2f2f697066732e627573792e6f72672f697066732f516d64354b78395548366a666e5a6748724a583339744172474e6b514253376359465032357a3467467132576f50'
171 | ]
172 | const detail = {
173 | name: 'Decentralized Limitless User eXperiences',
174 | symbol: TOKEN,
175 | icon: 'https://www.dlux.io/img/dlux-hive-logo-alpha.svg',
176 | supply:'5% Fixed Inflation, No Cap.',
177 | wp:`https://docs.google.com/document/d/1_jHIJsX0BRa5ujX0s-CQg3UoQC2CBW4wooP2lSSh3n0/edit?usp=sharing`,
178 | ws:`https://www.dlux.io`,
179 | be:`https://hiveblockexplorer.com/`,
180 | text: `DLUX is a Web3.0 technology that is focused on providing distribution of eXtended (Virtual and Augmented) Reality. It supports any browser based applications that can be statically delivered through IPFS. The DLUX Token Architecture is Proof of Stake as a layer 2 technology on the HIVE blockchain to take advantage of free transactions. With the first WYSIWYG VR Builder of any blockchain environment and the first Decentralized Exchange on the Hive Blockchain, DLUX is committed to breaking any boundaries for adoption of world changing technologies.`
181 | }
182 |
183 | //Aditionally on your branch, look closely at dao, this is where tokenomics happen and custom status posts are made
184 |
185 | let config = {
186 | username,
187 | active,
188 | msowner,
189 | mspublic,
190 | memoKey,
191 | timeoutContinuous,
192 | timeoutStart,
193 | follow,
194 | NODEDOMAIN,
195 | hookurl,
196 | status,
197 | history,
198 | dbcs,
199 | dbmods,
200 | typeDefs,
201 | mirror,
202 | bidRate,
203 | engineCrank,
204 | port,
205 | pintoken,
206 | pinurl,
207 | clientURL,
208 | startURL,
209 | clients,
210 | acm,
211 | rta,
212 | rtp,
213 | override,
214 | ipfshost,
215 | ipfsprotocol,
216 | ipfsport,
217 | ipfsLinks,
218 | starting_block,
219 | prefix,
220 | leader,
221 | msaccount,
222 | msPubMemo,
223 | msPriMemo,
224 | msmeta,
225 | ben,
226 | adverts,
227 | delegation,
228 | delegationWeight,
229 | TOKEN,
230 | precision,
231 | tag,
232 | mainAPI,
233 | jsonTokenName,
234 | mainFE,
235 | mainRender,
236 | mainIPFS,
237 | mainICO,
238 | detail,
239 | footer,
240 | hive_service_fee,
241 | features,
242 | stream,
243 | mode,
244 | featuresModel
245 | };
246 |
247 | module.exports = config;
248 |
--------------------------------------------------------------------------------
/discord.js:
--------------------------------------------------------------------------------
1 | const { Webhook, MessageBuilder } = require('discord-webhook-node');
2 | const config = require('./config');
3 | const { TXID } = require ('./index')
4 | const hook = new Webhook(config.hookurl);
5 | const fetch = require('node-fetch')
6 |
7 |
8 | exports.contentToDiscord = (author, permlink) => {
9 | let params = [author, permlink];
10 | let method = 'condenser_api.get_content'
11 | let body = {
12 | jsonrpc: "2.0",
13 | method,
14 | params,
15 | id: 1
16 | };
17 | fetch(config.clientURL, {
18 | body: JSON.stringify(body),
19 | headers: {
20 | "Content-Type": "application/x-www-form-urlencoded"
21 | },
22 | method: "POST"
23 | })
24 | .then(j => j.json())
25 | .then(result => {
26 | r = result.result
27 | const embed = new MessageBuilder()
28 | .setTitle(`New ${config.TOKEN} content!`)
29 | .setAuthor(author, 'https://cdn.discordapp.com/embed/avatars/0.png', `https://${config.mainFE}/@${author}`)
30 | .setURL(`https://${config.mainFE}/${config.tag}/@${author}/${permlink}`)
31 | .addField(r.title, (JSON.parse(r.json_metadata).description || `View this on ${config.mainFE}`), true)
32 | //.addField('Second field', 'this is not inline')
33 | .setColor('#00b0f4')
34 | //.setThumbnail('https://cdn.discordapp.com/embed/avatars/0.png')
35 | //.setDescription('Oh look a description :)')
36 | //.setImage('https://cdn.discordapp.com/embed/avatars/0.png')
37 | //.setFooter('Hey its a footer', 'https://cdn.discordapp.com/embed/avatars/0.png')
38 | .setTimestamp();
39 |
40 | hook.send(embed)
41 | .catch(e => console.log(e))
42 | }).catch(e => { console.log(e) })
43 |
44 | }
45 |
46 | exports.renderNFTtoDiscord = (script, uid, owner, set) => {
47 | const embed = new MessageBuilder()
48 | .setTitle(`New ${set} NFT minted!`)
49 | .setAuthor(owner, 'https://cdn.discordapp.com/embed/avatars/0.png', `https://${config.mainFE}/@${owner}`)
50 | .setURL(`https://${config.mainFE}/@${owner}#inventory/`)
51 | .addField(`${set}:${uid}`, `View this on ${config.mainFE}`, true)
52 | //.addField('Second field', 'this is not inline')
53 | .setColor('#00b0f4')
54 | //.setThumbnail('https://cdn.discordapp.com/embed/avatars/0.png')
55 | //.setDescription('Oh look a description :)')
56 | .setImage(`https://${config.mainRender}/render/${script}/${uid}`)
57 | //.setFooter('Hey its a footer', 'https://cdn.discordapp.com/embed/avatars/0.png')
58 | .setTimestamp();
59 |
60 | hook.send(embed)
61 | .catch(e => console.log(e))
62 |
63 | }
64 |
65 | //exports.contentToDiscord('disregardfiat', 'dlux-development-update-jan-15')
66 |
67 | exports.postToDiscord = (msg, id) => {
68 | if(config.hookurl)hook.send(msg)
69 | if(config.status)TXID.store(msg, id)
70 | }
71 |
72 | //great place to build a feed function to edb
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 | services:
3 | ipfs:
4 | # docker run -d --name ipfs_host -v $ipfs_staging:/export -v $ipfs_data:/data/ipfs -p 4001:4001 -p 127.0.0.1:8080:8080 -p 127.0.0.1:5001:5001 ipfs/go-ipfs:latest
5 | image: ipfs/kubo:latest
6 | restart: unless-stopped
7 | ports:
8 | - 4001:4001
9 | - 8080:8080
10 | - 5001:5001
11 | volumes:
12 | # - type: bind
13 | # source: ./staging_dir
14 | # target: /export
15 | # - type: bind
16 | # source: ./data_dir
17 | # target: /data/ipfs
18 | - ./staging_dir:/export
19 | # - ./data_dir:/data/ipfs
20 | - ipfs:/data/ipfs
21 | networks:
22 | - honeycomb
23 | honeycomb:
24 | # depends_on:
25 | # - ipfs
26 | build: .
27 | restart: unless-stopped
28 | ports:
29 | - "3001:3001"
30 | environment:
31 | - ipfshost=ipfs
32 | - ipfsprotocol=http
33 | - ipfsport=5001
34 | logging:
35 | options:
36 | max-size: "10m"
37 | max-file: "3"
38 | # stdin_open: true
39 | # tty: true
40 | stdin_open: true
41 | tty: true
42 | networks:
43 | - honeycomb
44 | # node-2:
45 | # image: ipfs/go-ipfs:latest
46 | # build:
47 | # context: private-network
48 | # environment:
49 | # LIBP2P_FORCE_PNET: '1'
50 | # # SWARM_KEY: "/key/swarm/psk/1.0.0/\n/base16/\ne0e7b1394fb6e928eecf2f8db77eaa99d3657684dc939519f285cb902bd93e22"
51 | # volumes:
52 | # - node-2:/data/ipfs
53 | # - ./private-network/.ipfs/swarm.key:/data/ipfs/swarm.key
54 | # - ./private-network/init.sh:/usr/local/bin/start_ipfs
55 | # ports:
56 | # - 8080
57 | # node-3:
58 | # build:
59 | # context: private-network
60 | # environment:
61 | # LIBP2P_FORCE_PNET: '1'
62 | # # SWARM_KEY: "/key/swarm/psk/1.0.0/\n/base16/\ne0e7b1394fb6e928eecf2f8db77eaa99d3657684dc939519f285cb902bd93e22"
63 | # SWARM_PEER: /ip4/172.18.0.2/tcp/4001/ipfs/QmUrp1E4ArW1ZDdFHnQrKfdH8tgGxM38hmPyjXEEoTmfto
64 | # volumes:
65 | # - node-3:/data/ipfs
66 | # - ./private-network/.ipfs/swarm.key:/data/ipfs/swarm.key
67 | # - ./private-network/init.sh:/usr/local/bin/start_ipfs
68 | # ports:
69 | # - 8080
70 | volumes:
71 | ipfs:
72 | # node-2:
73 | # node-3:
74 |
75 | networks:
76 | honeycomb:
77 |
--------------------------------------------------------------------------------
/docker-start.js:
--------------------------------------------------------------------------------
1 | const fetch = require('node-fetch');
2 | const spawn = require('child_process').spawn
3 | ping()
4 | function ping () {
5 | fetch(`http://${process.env.ipfshost}:${process.env.ipfsport}/ping`)
6 | .then(res => res.text())
7 | .then(text => {console.log('Deploying:');spawn('node', ['index.js'], {stdio: 'inherit'})})
8 | .catch(err => {console.log('Waiting for IPFS...');setTimeout(ping, 2000)});
9 | }
--------------------------------------------------------------------------------
/docs/SETUP.md:
--------------------------------------------------------------------------------
1 | ## Set Up Node
2 |
3 | #### Prereqs
4 | * Hive Account with ~100 HP
5 | * Additional Hive Key Pair
6 |
7 | ### (Privex) Docker Deploy
8 | * This will run IPFS. Ensure you select a region appropriate VPS in SWEDEN or USA
9 | * A Dual-Core 1GB / 25GB should be fine (Check with specific community for additional requirements)
10 | * Instructions for Ubuntu follow:
11 | * `sudo apt install docker docker-compose` --install dependencies
12 | * `git clone https://github.com/disregardfiat/honeycomb.git` --download this repo
13 | * `cd honeycomb` --change working directory
14 | * Edit your node specifics via `touch .env && nano .env`
15 | * Contents:
16 | ```
17 | account="hiveaccount"
18 | active=5JactivePrivateKey
19 | msowner=5KadditionalPrivateKey
20 | mspublic=STMpublickey
21 | ```
22 | * `sudo docker-compose build` --Build Docker environment
23 | * `sudo docker-compose up` --Deploy Docker environment
24 |
25 | #### nginx setup
26 | * `sudo apt install nginx certbot python3-certbot-nginx`
27 | Select `nginx-full`
28 | * `sudo nano /etc/nginx/sites-availible/default`
29 | * Enter and save:
30 | ```
31 | server{
32 | server_name location.yourdomain.io;
33 |
34 | location / {
35 | proxy_pass http://127.0.0.1:3001;
36 | proxy_http_version 1.1;
37 | proxy_set_header Upgrade $http_upgrade;
38 | proxy_set_header Connection 'upgrade';
39 | proxy_set_header Host $host;
40 | }
41 | }
42 | ```
43 | * `sudo systemctl reload nginx`
44 | * Ensure your DNS information points to your server and run `sudo certbot`
45 |
46 | ## Build A Token
--------------------------------------------------------------------------------
/edb.js:
--------------------------------------------------------------------------------
1 | const { Pool } = require('pg');
2 | const config = require('./config');
3 | const pool = new Pool({
4 | connectionString: config.dbcs,
5 | ssl: {
6 | rejectUnauthorized: false
7 | }
8 | });
9 |
10 | function getStats(table) {
11 | return new Promise((r, e) => {
12 | pool.query(`SELECT * FROM statssi;`, (err, res) => {
13 | if (err) {
14 | console.log(`Error - Failed to select all from ${table}`);
15 | e(err);
16 | }
17 | else {
18 | r(res.rows);
19 | }
20 | });
21 | })
22 | }
23 |
24 | exports.getPromotedPosts = getPromotedPosts
25 |
26 | function getPromotedPosts(amount, offset) {
27 | let off = offset,
28 | amt = amount
29 | if (!amount) amt = 50
30 | if (!off) off = 0
31 | return new Promise((r, e) => {
32 | pool.query(`SELECT
33 | author,
34 | permlink,
35 | block,
36 | votes,
37 | voteweight,
38 | promote,
39 | paid
40 | FROM
41 | posts
42 | WHERE
43 | promote > 0
44 | ORDER BY
45 | promote DESC
46 | OFFSET ${off} ROWS FETCH FIRST ${amt} ROWS ONLY;`, (err, res) => {
47 | if (err) {
48 | console.log(`Error - Failed to select some new from ${table}`);
49 | e(err);
50 | }
51 | else {
52 | r(res.rows);
53 | }
54 | });
55 | })
56 | }
57 |
58 | exports.getTrendingPosts = getTrendingPosts
59 |
60 | function getTrendingPosts(amount, offset) {
61 | let off = offset,
62 | amt = amount
63 | if (!amount) amt = 50
64 | if (!off) off = 0
65 | return new Promise((r, e) => {
66 | pool.query(`SELECT
67 | author,
68 | permlink,
69 | block,
70 | votes,
71 | voteweight,
72 | promote,
73 | paid
74 | FROM
75 | posts
76 | WHERE
77 | paid = false
78 | ORDER BY
79 | voteweight DESC
80 | OFFSET ${off} ROWS FETCH FIRST ${amt} ROWS ONLY;`, (err, res) => {
81 | if (err) {
82 | console.log(`Error - Failed to select some new from ${table}`);
83 | e(err);
84 | }
85 | else {
86 | r(res.rows);
87 | }
88 | });
89 | })
90 | }
91 |
92 | exports.getPost = getPost
93 |
94 | function getPost(author, permlink) {
95 | return new Promise((r, e) => {
96 | pool.query(`SELECT * FROM posts WHERE author = '${author}' AND permlink = '${permlink}';`, (err, res) => {
97 | if (err) {
98 | console.log(`Error - Failed to get a post from posts`);
99 | e(err);
100 | }
101 | else {
102 | r(res.rows[0]);
103 | }
104 | });
105 | })
106 | }
107 |
108 | exports.getNewPosts = getNewPosts
109 |
110 | function getNewPosts(amount, offset) {
111 | let off = offset,
112 | amt = amount
113 | if (!amount) amt = 50
114 | if (!off) off = 0
115 | return new Promise((r, e) => {
116 | pool.query(`SELECT author, permlink, block, votes, voteweight, promote, paid FROM posts ORDER BY block DESC OFFSET ${off} ROWS FETCH FIRST ${amt} ROWS ONLY;`, (err, res) => {
117 | if (err) {
118 | console.log(`Error - Failed to select some new from ${table}`);
119 | e(err);
120 | }
121 | else {
122 | r(res.rows);
123 | }
124 | });
125 | })
126 | }
127 |
128 | exports.getAuthorPosts = getAuthorPosts
129 |
130 | function getAuthorPosts(author, amount, offset) {
131 | let off = offset,
132 | amt = amount
133 | if (!amount) amt = 50
134 | if (!off) off = 0
135 | return new Promise((r, e) => {
136 | pool.query(`SELECT
137 | author,
138 | permlink,
139 | block,
140 | votes,
141 | voteweight,
142 | promote,
143 | paid
144 | FROM
145 | posts
146 | WHERE
147 | author = '${author}'
148 | ORDER BY
149 | block DESC
150 | OFFSET ${off} ROWS FETCH FIRST ${amt} ROWS ONLY;`, (err, res) => {
151 | if (err) {
152 | console.log(`Error - Failed to select some new from ${table}`);
153 | e(err);
154 | }
155 | else {
156 | r(res.rows);
157 | }
158 | });
159 | })
160 | }
161 |
162 |
163 | exports.insertNewPost = insertNewPost
164 |
165 | function insertNewPost(post) { //is good
166 | let record = {
167 | author: post.author,
168 | permlink: post.permlink,
169 | block: post.block,
170 | votes: post.votes || 0,
171 | voteweight: post.voteweight || 0,
172 | promote: post.promote || 0,
173 | paid: post.paid || false,
174 | payout: post.payout || 0,
175 | payout_author: post.payout_author || 0,
176 | linear_weight: post.linear_weight || 0,
177 | voters: post.voters || '',
178 | voters_paid: post.voters_paid || '',
179 | type: post.type,
180 | }
181 | return new Promise((r, e) => {
182 | pool.query(`INSERT INTO posts(author,permlink,block,votes,voteweight,promote,paid,payout,payout_author,linear_weight,voters,voters_paid,type)VALUES($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13)`,
183 | [
184 | record.author,
185 | record.permlink,
186 | record.block,
187 | record.votes,
188 | record.voteweight,
189 | record.promote,
190 | record.paid,
191 | record.payout,
192 | record.payout_author,
193 | record.linear_weight,
194 | record.voters,
195 | record.voters_paid,
196 | record.type
197 | ], (err, res) => {
198 | if (err) {
199 | console.log(`Error - Failed to insert data into posts`);
200 | e(err);
201 | } else {
202 | r(res)
203 | }
204 | });
205 | })
206 | }
207 |
208 | exports.moderate = function(hide, why, author, permlink) {
209 | pool.query(
210 | `UPDATE posts
211 | SET hide = '${hide ? true : false}',
212 | why = '${parseInt(why)}'
213 | WHERE author = '${author}' AND
214 | permlink = '${permlink}';`,
215 | (err, res) => {
216 | console.log(`Updated rating:${author}/${permlink}:`, err ? err : res);
217 | }
218 | )
219 | }
220 |
221 | exports.updateRating = function (author, permlink, rater, rating) {
222 | console.log('update rating: ', {author, permlink, rating, rater})
223 | rating = parseInt(rating)
224 | if (rating >= 1 && rating <= 5) {
225 | getPost(author, permlink).then(post => {
226 | var record = post
227 | var raters = record.raters ? record.raters.split(",") : []
228 | if (raters.indexOf(`{${rater}:1`) == -1 &&
229 | raters.indexOf(`{${rater}:2`) == -1 &&
230 | raters.indexOf(`{${rater}:3`) == -1 &&
231 | raters.indexOf(`{${rater}:4`) == -1 &&
232 | raters.indexOf(`{${rater}:5`) == -1) {
233 | raters.push(`{${rater}:${rating}}`);
234 | } else {
235 | raters[raters.indexOf(rater)] = `{${rater}:${rating}}`;
236 | }
237 | var a = 0, b = 0
238 | for (var i = 0; i < raters.length; i++) {
239 | a += parseInt(raters[i].split(':')[1])
240 | b++
241 | }
242 | var avg = parseFloat(a / b).toFixed(2)
243 | var ratings = raters.length
244 | var raters = raters.join(',')
245 | pool.query(
246 | `UPDATE posts
247 | SET rating = '${avg}',
248 | raters = '${raters}',
249 | ratings = '${ratings}'
250 | WHERE author = '${record.author}' AND
251 | permlink = '${record.permlink}';`,
252 | (err, res) => {
253 | console.log(`Updated rating:${author}/${permlink}:`, err ? err : res);
254 | }
255 | );
256 | })
257 | }
258 | }
259 |
260 | exports.updatePost = updatePost
261 |
262 | function updatePost(post) {
263 | let record = {
264 | author: post.author,
265 | permlink: post.permlink,
266 | block: post.block,
267 | votes: Object.keys(post.votes).length,
268 | voteweight: post.t.totalWeight,
269 | paid: true,
270 | payout: post.paid,
271 | payout_author: post.author_payout,
272 | linear_weight: post.t.linearWeight || 0,
273 | voters: post.voters || '',
274 | voters_paid: post.voters_paid || '',
275 | }
276 | for (v in post.votes) {
277 | record.voters += v + ','
278 | record.voters_paid += post.votes[v].p + ','
279 | }
280 | record.voters = record.voters.substring(0, record.voters.length - 1)
281 | record.voters_paid = record.voters_paid.substring(0, record.voters_paid.length - 1)
282 | return new Promise((r, e) => {
283 | getPost(post.author, post.permlink)
284 | .then(ret => {
285 | pool.query(`UPDATE posts
286 | SET votes = ${record.votes},
287 | voteweight = ${record.voteweight},
288 | paid = ${record.paid},
289 | payout = ${record.payout},
290 | payout_author = ${record.payout_author},
291 | linear_weight = ${record.linear_weight},
292 | voters = '${record.voters}',
293 | voters_paid = '${record.voters_paid}'
294 | WHERE author = '${record.author}' AND
295 | permlink = '${record.permlink}';`, (err, res) => {
296 | if (err) {
297 | console.log(`Error - Failed to insert data into posts`);
298 | e(err);
299 | } else {
300 | console.log(res)
301 | r(res)
302 | }
303 | });
304 | })
305 |
306 | })
307 | }
308 |
309 | exports.updatePostVotes = updatePostVotes
310 |
311 | function updatePostVotes(post) { //live votes
312 | return new Promise((r, e) => {
313 | let votes = Object.keys(post.votes).length,
314 | voteweight = 0,
315 | voters = ''
316 | for (v in post.votes) {
317 | voteweight += post.votes[v].v
318 | voters += v + ','
319 | }
320 | voters = voters.substring(0, voters.length - 1)
321 | pool.query(`UPDATE posts
322 | SET votes = ${votes},
323 | voteweight = ${voteweight},
324 | voters = '${voters}'
325 | WHERE author = '${post.author}' AND
326 | permlink = '${post.permlink}';`, (err, res) => {
327 | if (err) {
328 | console.log(`Error - Failed to insert data into posts`);
329 | e(err);
330 | } else {
331 | r(res)
332 | }
333 | });
334 | })
335 | }
336 |
337 | exports.updateStat = updateStat
338 |
339 | function insertStats(stat) { //is good
340 | let stats = {
341 | string: stat.string,
342 | int: stat.int
343 | }
344 | return new Promise((r, e) => {
345 | pool.query(`INSERT INTO statssi(string,int)VALUES($1,$2)`,
346 | [
347 | stats.string,
348 | stats.int
349 | ], (err, res) => {
350 | if (err) {
351 | console.log(`Error - Failed to insert data into statssi`);
352 | e(err);
353 | } else {
354 | r(res)
355 | }
356 | });
357 | })
358 | }
359 |
360 | function updateStat(stat) { //is good
361 | let record = {
362 | string: stat.string,
363 | int: stat.int
364 | }
365 | return new Promise((r, e) => {
366 | pool.query(`UPDATE statssi
367 | SET int = '${record.int}'
368 | WHERE string = '${record.string}';`, (err, res) => {
369 | if (err) {
370 | insertStats(stat)
371 | .then(ret => {
372 | r(ret)
373 | })
374 | .catch(errr => {
375 | console.log(err, errr)
376 | e(err, errr)
377 | })
378 | } else {
379 | r(res)
380 | }
381 | });
382 | })
383 | }
384 |
385 | exports.updatePromote = updatePromote
386 |
387 | function updatePromote(author, permlink, amt) { //is good
388 | return new Promise((r, e) => {
389 | getPost(post.author, post.permlink)
390 | .then(post => {
391 | amount = post.promote + amt
392 | pool.query(`UPDATE posts
393 | SET promote = '${amount}'
394 | WHERE author = '${author}' AND
395 | permlink = '${permlink}';`, (err, res) => {
396 | if (err) {
397 | insertStats(stat)
398 | .then(ret => {
399 | r(ret)
400 | })
401 | .catch(errr => {
402 | console.log(err, errr)
403 | e(err, errr)
404 | })
405 | } else {
406 | r(res)
407 | }
408 | });
409 | })
410 | })
411 | }
412 |
--------------------------------------------------------------------------------
/enforce.js:
--------------------------------------------------------------------------------
1 | const { getPathObj, getPathNum } = require("./getPathObj");
2 | const { store } = require("./index");
3 | const { chronAssign, penalty, add, nodeUpdate, deletePointer, addCol, addGov } = require('./lil_ops')
4 |
5 | function enforce(agent, txid, pointer, block_num) {
6 | console.log('Enforce params:', agent, txid, pointer);
7 | return new Promise((resolve, reject) => {
8 | Pop = getPathObj(['escrow', agent, txid]);
9 | Ppointer = getPathObj(['escrow', pointer.id, pointer.acc]);
10 | PtokenSupply = getPathNum(['stats', 'tokenSupply']);
11 | Promise.all([Pop, Ppointer, PtokenSupply])
12 | .then(r => {
13 | var enforced_op = r[0],
14 | point_to_contract = r[1]
15 | console.log('enforce:', { enforced_op }, 'pointer:', { point_to_contract });
16 | if (Object.keys(enforced_op).length) {
17 | let op = txid.split(":")[1],
18 | id = txid.split(":")[0],
19 | ops = [];
20 | getPathObj(['contracts', point_to_contract.for, point_to_contract.contract])
21 | .then(c => {
22 | var i = 0;
23 | for (item in c) {
24 | i++;
25 | }
26 | var lil_ops = [];
27 | let co = c.co;
28 | if (i) {
29 | switch (op) {
30 | case 'denyA':
31 | getPathObj(['escrow', '.' + c.to, `${c.from}/${c.escrow_id}:denyT`])
32 | .then(toOp => {
33 | chronAssign(block_num + 200, { op: 'denyT', agent: c.to, txid: `${c.from}/${c.escrow_id}:denyT`, acc: pointer.acc, id: pointer.id });
34 | penalty(c.agent, c.col)
35 | .then(col => {
36 | c.recovered = col;
37 | lil_ops = [
38 | add('rn', col),
39 | nodeUpdate(c.agent, 'strike', col)
40 | ];
41 | ops.push({ type: 'put', path: ['escrow', c.to, `${c.from}/${c.escrow_id}:denyT`], data: toOp });
42 | ops.push({ type: 'del', path: ['escrow', c.agent, `${c.from}/${c.escrow_id}:denyA`] });
43 | ops.push({ type: 'del', path: ['escrow', '.' + c.to, `${c.from}/${c.escrow_id}:denyT`] });
44 | ops.push({ type: 'put', path: ['contracts', pointed_contract.for, pointed_contract.contract], data: c });
45 | ops.push({ type: 'put', path: ['feed', `${block_num}:${txid}`], data: `@${c.agent} failed to make a timely transaction and has forfieted ${parseFloat(col / 1000).toFixed(3)} DLUX` });
46 | })
47 | .catch(e => { reject(e); });
48 | ops = [];
49 | })
50 | .catch(e => { reject(e); });
51 | break;
52 | case 'denyT':
53 | penalty(c.to, c.col)
54 | .then(col => {
55 | const returnable = col + c.recovered;
56 | console.log(returnable, col, c.recovered);
57 | ops.push({ type: 'put', path: ['feed', `${block_num}:${txid}`], data: `@${c.to} failed to make a timely transaction and has forfieted ${parseFloat(col / 1000).toFixed(3)} DLUX` });
58 | ops.push({ type: 'del', path: ['contracts', pointed_contract.for, pointed_contract.contract] }); //some more logic here to clean memory... or check if this was denies for colateral reasons
59 | ops.push({ type: 'del', path: ['escrow', c.to, `${c.from}/${c.escrow_id}:denyT`] });
60 | lil_ops = [
61 | deletePointer(pointer.id, pointer.acc),
62 | nodeUpdate(c.to, 'strike', col)
63 | ];
64 | if (col > parseInt(c.col / 4)) {
65 | lil_ops.push(add(c.from, parseInt(c.col / 4)));
66 | lil_ops.push(add('rn', parseInt(col - parseInt(c.col / 4))));
67 | } else if (c.recovered > parseInt(c.col / 4)) {
68 | lil_ops.push(add(c.from, parseInt(c.col / 4)));
69 | lil_ops.push(add('rn', parseInt(col - parseInt(c.col / 4))));
70 | } else if (returnable <= parseInt(c.col / 4)) {
71 | lil_ops.push(add(c.from, returnable));
72 | lil_ops.push(add('rn', parseInt(-c.recovered)));
73 | } else {
74 | lil_ops.push(add(c.from, parseInt(c.col / 4)));
75 | lil_ops.push(add('rn', parseInt(col - c.recovered)));
76 | }
77 | })
78 | .catch(e => { reject(e); });
79 | break;
80 | case 'dispute':
81 | ops.push({ type: 'put', path: ['feed', `${block_num}:${txid}`], data: `@${c.tagent} failed to make a timely transaction and has forfieted collateral` });
82 | ops.push({ type: 'del', path: ['escrow', agent, txid] });
83 | ops.push({ type: 'del', path: ['chrono', c.expire_path] });
84 | ops.push({ type: 'del', path: ['contracts', co, id] });
85 | ops.push({ type: 'put', path: ['stats', 'tokenSupply'], data: s - parseInt(c.escrow / 4) });
86 | lil_ops = [
87 | addGov(c.agent, parseInt(c.escrow / 2)),
88 | add(c.eo, parseInt(c.escrow / 4) - c.fee),
89 | add(c.agent, parseInt(c.fee / 3)),
90 | add('rn', c.fee - parseInt(c.fee / 3)),
91 | addCol(c.agent, -parseInt(c.escrow / 2)),
92 | addCol(c.tagent, -parseInt(c.escrow / 2)),
93 | deletePointer(pointer.id, pointer.acc),
94 | nodeUpdate(c.tagent, 'strike', parseInt(c.escrow / 4))
95 | ]; //strike recorded
96 | break;
97 | case 'buyApproveT':
98 | ops.push({ type: 'put', path: ['feed', `${block_num}:${txid}`], data: `@${c.tagent} failed to make a timely transaction` });
99 | ops.push({ type: 'del', path: ['escrow', agent, txid] });
100 | ops.push({ type: 'del', path: ['chrono', c.expire_path] });
101 | ops.push({ type: 'del', path: ['contracts', co, id] });
102 | ops.push({ type: 'put', path: ['stats', 'tokenSupply'], data: s - parseInt(c.escrow / 4) });
103 | lil_ops = [
104 | addGov(c.agent, parseInt(c.escrow / 2)),
105 | addCol(c.agent, -parseInt(c.escrow / 2)),
106 | addCol(c.tagent, -parseInt(c.escrow / 2)),
107 | add(c.agent, parseInt(c.fee / 3)),
108 | add('rn', c.fee - parseInt(c.fee / 3)),
109 | add(c.eo, parseInt(c.escrow / 4)),
110 | deletePointer(pointer.id, pointer.acc),
111 | nodeUpdate(c.tagent, 'strike', parseInt(c.escrow / 4))
112 | ];
113 | break;
114 | case 'buyApproveA':
115 | ops.push({ type: 'put', path: ['feed', `${block_num}:${txid}`], data: `@${c.agent} failed to make a timely transaction` });
116 | ops.push({ type: 'del', path: ['escrow', agent, txid] });
117 | ops.push({ type: 'del', path: ['chrono', c.expire_path] });
118 | ops.push({ type: 'del', path: ['contracts', co, id] });
119 | ops.push({ type: 'put', path: ['stats', 'tokenSupply'], data: s - parseInt(c.escrow / 4) });
120 | lil_ops = [
121 | addGov(c.tagent, parseInt(c.escrow / 2)),
122 | addCol(c.agent, -parseInt(c.escrow / 2)),
123 | addCol(c.tagent, -parseInt(c.escrow / 2)),
124 | add(c.tagent, parseInt(c.fee / 3)),
125 | add('rn', c.fee - parseInt(c.fee / 3)),
126 | add(c.eo, parseInt(c.escrow / 4)),
127 | deletePointer(pointer.id, pointer.acc),
128 | nodeUpdate(c.agent, 'strike', parseInt(c.escrow / 4))
129 | ];
130 | break;
131 | case 'listApproveT':
132 | ops.push({ type: 'put', path: ['feed', `${block_num}:${txid}`], data: `@${c.tagent} failed to make a timely transaction` });
133 | ops.push({ type: 'del', path: ['escrow', agent, txid] });
134 | ops.push({ type: 'del', path: ['chrono', c.expire_path] });
135 | ops.push({ type: 'del', path: ['contracts', co, id] });
136 | ops.push({ type: 'put', path: ['stats', 'tokenSupply'], data: s - parseInt(c.escrow / 4) });
137 | lil_ops = [
138 | addGov(c.agent, parseInt(c.escrow / 2)),
139 | add(c.eo, parseInt(c.escrow / 4)),
140 | add(c.agent, parseInt(c.fee / 3)),
141 | add('rn', c.fee - parseInt(c.fee / 3)),
142 | addCol(c.agent, -parseInt(c.escrow / 2)),
143 | addCol(c.tagent, -parseInt(c.escrow / 2)),
144 | deletePointer(pointer.id, pointer.acc),
145 | nodeUpdate(c.tagent, 'strike', parseInt(c.escrow / 4))
146 | ];
147 | break;
148 | case 'listApproveA':
149 | ops.push({ type: 'put', path: ['feed', `${block_num}:${txid}`], data: `@${c.agent} failed to make a timely transaction` });
150 | ops.push({ type: 'del', path: ['escrow', agent, txid] });
151 | ops.push({ type: 'del', path: ['chrono', c.expire_path] });
152 | ops.push({ type: 'del', path: ['contracts', co, id] });
153 | ops.push({ type: 'put', path: ['stats', 'tokenSupply'], data: s - parseInt(c.escrow / 4) });
154 | lil_ops = [
155 | addGov(c.tagent, parseInt(c.escrow / 2)),
156 | add(c.eo, parseInt(c.escrow / 4)),
157 | add(c.tagent, parseInt(c.fee / 3)),
158 | add('rn', c.fee - parseInt(c.fee / 3)),
159 | addCol(c.agent, -parseInt(c.escrow / 2)),
160 | addCol(c.tagent, -parseInt(c.escrow / 2)),
161 | deletePointer(pointer.id, pointer.acc),
162 | nodeUpdate(c.agent, 'strike', parseInt(c.escrow / 4))
163 | ];
164 | break;
165 | case 'release':
166 | ops.push({ type: 'put', path: ['feed', `${block_num}:${txid}`], data: `@${c.agent} failed to make a timely transaction and has forfieted collateral` });
167 | ops.push({ type: 'del', path: ['escrow', agent, txid] });
168 | ops.push({ type: 'del', path: ['chrono', c.expire_path] });
169 | ops.push({ type: 'del', path: ['contracts', co, id] });
170 | ops.push({ type: 'put', path: ['stats', 'tokenSupply'], data: s - parseInt(c.escrow / 4) });
171 | lil_ops = [
172 | addGov(c.tagent, parseInt(c.escrow / 2)),
173 | add(c.eo, parseInt(c.escrow / 4)),
174 | add(c.tagent, parseInt(c.fee / 3)),
175 | add('rn', c.fee - parseInt(c.fee / 3)),
176 | deletePointer(pointer.id, pointer.acc),
177 | nodeUpdate(c.agent, 'strike', parseInt(c.escrow / 4))
178 | ];
179 | break;
180 | case 'transfer':
181 | ops.push({ type: 'put', path: ['feed', `${block_num}:${txid}`], data: `@${c.tagent} failed to make a timely transaction and has forfieted collateral` });
182 | ops.push({ type: 'del', path: ['escrow', agent, txid] });
183 | ops.push({ type: 'del', path: ['chrono', c.expire_path] });
184 | ops.push({ type: 'del', path: ['contracts', co, id] });
185 | ops.push({ type: 'put', path: ['stats', 'tokenSupply'], data: s - parseInt(c.escrow / 2) });
186 | lil_ops = [
187 | add(c.eo, parseInt(c.escrow / 2)),
188 | add('rn', parseInt(c.fee / 3)),
189 | addCol(c.tagent, -parseInt(c.escrow)),
190 | deletePointer(pointer.id, pointer.acc),
191 | nodeUpdate(c.tagent, 'strike', parseInt(c.escrow / 4))
192 | ];
193 | break;
194 | case 'cancel':
195 | ops.push({ type: 'put', path: ['feed', `${block_num}:${txid}`], data: `@${c.tagent} failed to make a timely transaction and has forfieted collateral` });
196 | ops.push({ type: 'del', path: ['escrow', agent, txid] });
197 | ops.push({ type: 'del', path: ['chrono', c.expire_path] });
198 | ops.push({ type: 'del', path: ['contracts', co, id] });
199 | ops.push({ type: 'put', path: ['stats', 'tokenSupply'], data: s - parseInt(c.escrow / 4) });
200 | lil_ops = [
201 | addGov(c.agent, parseInt(c.escrow / 2)),
202 | add(c.eo, parseInt(c.escrow / 4)),
203 | addCol(c.agent, -parseInt(c.escrow / 2)),
204 | addCol(c.tagent, -parseInt(c.escrow / 2)),
205 | deletePointer(pointer.id, pointer.acc),
206 | nodeUpdate(c.tagent, 'strike', parseInt(c.escrow / 4))
207 | ];
208 | break;
209 | default:
210 | console.log(`Unknown Op: ${op}`);
211 | resolve();
212 | }
213 | }
214 | waitfor(lil_ops)
215 | .then(empty => {
216 | store.batch(ops, [resolve, reject]);
217 | })
218 | .catch(e => { reject(e); });
219 | })
220 | .catch(e => {
221 | reject(e);
222 | });
223 | } else {
224 | resolve();
225 | }
226 | })
227 | .catch(e => {
228 | reject(e);
229 | });
230 | });
231 | }
232 | exports.enforce = enforce;
233 |
234 | function waitfor(promises_array) {
235 | return new Promise((resolve, reject) => {
236 | Promise.all(promises_array)
237 | .then(r => {
238 | for (i = 0; i < r.length; i++) {
239 | console.log(r[i])
240 | if (r[i].consensus) {
241 | plasma.consensus = r[1].consensus
242 | }
243 | }
244 | resolve(1)
245 | })
246 | .catch(e => { reject(e) })
247 | })
248 | }
--------------------------------------------------------------------------------
/getPathObj.js:
--------------------------------------------------------------------------------
1 | const { store } = require("./index");
2 |
3 | function getPathObj(path) {
4 | return new Promise(function(resolve, reject) {
5 | store.get(path, function(err, obj) {
6 | if (err) {
7 | console.log(path)
8 | resolve({});
9 | } else {
10 | resolve(obj);
11 | }
12 | });
13 | });
14 | }
15 | exports.getPathObj = getPathObj;
16 |
17 | function getPathNum(path) {
18 | return new Promise(function(resolve, reject) {
19 | store.get(path, function(err, obj) {
20 | if (err) {
21 | reject(err);
22 | } else {
23 | if (typeof obj != 'number') {
24 | resolve(0);
25 | } else {
26 | resolve(obj);
27 | }
28 | }
29 | });
30 | });
31 | }
32 | exports.getPathNum = getPathNum;
33 |
34 | function getPathSome(path, arg) {
35 | return new Promise(function(resolve, reject) {
36 | store.someChildren(path, arg, function(err, obj) {
37 | if (err) {
38 | reject(err);
39 | resolve({})
40 | } else {
41 | resolve(obj);
42 | }
43 | });
44 | });
45 | }
46 | exports.getPathSome = getPathSome;
47 |
48 | exports.deleteObjs = (paths) => {
49 | return new Promise((resolve, reject) => {
50 | var ops = [];
51 | for (i = 0; i < paths.length; i++) {
52 | ops.push({ type: 'del', path: paths[i] });
53 | }
54 | store.batch(ops, [resolve, reject, paths.length]);
55 | })
56 | }
57 |
--------------------------------------------------------------------------------
/hive.js:
--------------------------------------------------------------------------------
1 | const config = require('./config.js');
2 | const { hiveClient } = require('./index');
3 |
4 | const Hive = {
5 | getOwners: function (account) {
6 | return new Promise(function (resolve, reject) {
7 | hiveClient.api.setOptions({ url: config.startURL });
8 | hiveClient.api.getAccounts([account], function (err, result) {
9 | hiveClient.api.setOptions({ url: config.clientURL });
10 | if (err) reject(err);
11 | else resolve(result[0].active.account_auths);
12 | });
13 | });
14 | },
15 | getAccounts: function (accounts) {
16 | return new Promise(function (resolve, reject) {
17 | hiveClient.api.setOptions({ url: config.startURL });
18 | hiveClient.api.getAccounts(accounts, function (err, result) {
19 | hiveClient.api.setOptions({ url: config.clientURL });
20 | if (err) reject(err);
21 | else resolve(result);
22 | });
23 | });
24 | },
25 | getRecentReport: function (account, walletOperationsBitmask) {
26 | return new Promise(function (resolve, reject) {
27 | hiveClient.api.setOptions({ url: "https://api.deathwing.me/" });
28 | hiveClient.api.getAccountHistory(
29 | account,
30 | -1,
31 | 100,
32 | ...walletOperationsBitmask,
33 | function (err, result) {
34 | hiveClient.api.setOptions({ url: config.clientURL });
35 | if (err) reject(err);
36 | for(var i = 0; i < result.length;i++){
37 | }
38 | let ebus = result.filter(
39 | (tx) => tx[1].op[1].id === `${config.prefix}report`
40 | ),
41 | recents = [];
42 | for (i = ebus.length - 1; i >= 0; i--) {
43 | if (
44 | JSON.parse(ebus[i][1].op[1].json).hash &&
45 | parseInt(JSON.parse(ebus[i][1].op[1].json).block) >
46 | parseInt(config.override)
47 | ) {
48 | recents.push([
49 | JSON.parse(ebus[i][1].op[1].json).hash,
50 | JSON.parse(ebus[i][1].op[1].json).block,
51 | ]);
52 | }
53 | }
54 | resolve(recents.shift());
55 | }
56 | );
57 | });
58 | },
59 | };
60 |
61 | exports.Hive = Hive
--------------------------------------------------------------------------------
/ipfsSaveState.js:
--------------------------------------------------------------------------------
1 | const { ipfs } = require("./index");
2 |
3 | exports.ipfsSaveState = (blocknum, buffer, ipfsc, tries) => {
4 | return new Promise((resolve, reject) => {
5 | if (tries) console.log("Retry IPFS Save:", tries);
6 | ipfs.add(buffer, (err, ipfs_return) => {
7 | if (!err) {
8 | var hash = "";
9 | try {
10 | hash = ipfs_return[0].hash;
11 | } catch (e) {
12 | console.log(e);
13 | }
14 | console.log(blocknum + `:Saved: ${hash}`);
15 | resolve({
16 | hashLastIBlock: hash,
17 | hashBlock: blocknum,
18 | });
19 | } else {
20 | reject(err);
21 | /*
22 | cycleipfs(cycle++)
23 | if (cycle >= 25) {
24 | cycle = 0;
25 | return;
26 | }
27 | */
28 | }
29 | });
30 | });
31 | };
32 |
33 | exports.ipfsPeerConnect = (peerid) => {
34 | return new Promise((resolve, reject) => {
35 | //ipfs.swarm.addrs().then((addrs) => {console.log(addrs)})
36 | ipfs.swarm.connect(`/p2p/${peerid}`, (err, res) => {
37 | if (res) resolve(res.Strings[0]);
38 | if (err) {
39 | resolve(`Failed to connect to${peerid}`);
40 | }
41 | });
42 | });
43 | };
44 |
--------------------------------------------------------------------------------
/lil_ops.js:
--------------------------------------------------------------------------------
1 | const { store } = require('./index')
2 | const { getPathObj, getPathNum } = require('./getPathObj')
3 | const crypto = require('crypto');
4 | const bs58 = require('bs58');
5 | const hashFunction = Buffer.from('12', 'hex');
6 | const stringify = require('json-stable-stringify');
7 | const { postToDiscord } = require('./discord');
8 | const config = require('./config');
9 |
10 | const burn = (amount) => {
11 | return new Promise((resolve, reject) => {
12 | getPathNum(['stats', 'tokenSupply'])
13 | .then(sup => {
14 | store.batch([{ type: 'put', path: ['stats', 'tokenSupply'], data: sup - amount }], [resolve, reject, 1])
15 | })
16 | })
17 | }
18 | exports.burn = burn
19 | const forceCancel = (rate, type, block_num) => {
20 | return new Promise((resolve, reject) => {
21 | const price = parseFloat(rate)
22 | let Ps = getPathObj(['dex', type, 'sellOrders'])
23 | let Pb = getPathObj(['dex', type, 'buyOrders'])
24 | Promise.all([Ps, Pb])
25 | .then(s => {
26 | let gone = 0
27 | for (o in s[0]) {
28 | if (parseFloat(o.split(":")[0]) < (price * .6)) {
29 | gone++
30 | release(o.from, o.split(":")[1], block_num)
31 | } else if (parseFloat(o.split(":")[0]) > (price * 1.4)) {
32 | gone++
33 | release(o.from, o.split(":")[1], block_num)
34 | }
35 | }
36 | for (o in s[1]) {
37 | if (parseFloat(o.split(":")[0]) < (price * .6)) {
38 | gone++
39 | release(o.from, o.split(":")[1], block_num)
40 | } else if (parseFloat(o.split(":")[0]) > (price * 1.4)) {
41 | gone++
42 | release(o.from, o.split(":")[1], block_num)
43 | }
44 | }
45 | resolve(gone)
46 | })
47 | .catch(e => { reject(e) })
48 | })
49 | }
50 | exports.forceCancel = forceCancel
51 |
52 | const add = (node, amount) => {
53 | return new Promise((resolve, reject) => {
54 | store.get(['balances', node], function(e, a) {
55 | if (!e) {
56 | console.log(amount + ' to ' + node)
57 | const a2 = typeof a != 'number' ? amount : a + amount
58 | console.log('final balance ' +a2)
59 | store.batch([{ type: 'put', path: ['balances', node], data: a2 }], [resolve, reject, 1])
60 | } else {
61 | console.log(e)
62 | }
63 | })
64 | })
65 | }
66 | exports.add = add
67 |
68 | const addc = (node, amount) => {
69 | return new Promise((resolve, reject) => {
70 | store.get(['cbalances', node], function(e, a) {
71 | if (!e) {
72 | console.log(amount + ' to ' + node)
73 | const a2 = typeof a != 'number' ? amount : a + amount
74 | console.log('final balance ' +a2)
75 | store.batch([{ type: 'put', path: ['cbalances', node], data: a2 }], [resolve, reject, 1])
76 | } else {
77 | console.log(e)
78 | }
79 | })
80 | })
81 | }
82 | exports.addc = addc
83 |
84 | const addMT = (path, amount) => {
85 | return new Promise((resolve, reject) => {
86 | store.get(path, function(e, a) {
87 | if (!e) {
88 | const a2 = typeof a != 'number' ? parseInt(amount) : parseInt(a) + parseInt(amount)
89 | console.log(`MTo:${a},add:${amount},final:${a2}`, )
90 | store.batch([{ type: 'put', path, data: a2 }], [resolve, reject, 1])
91 | } else {
92 | console.log(e)
93 | }
94 | })
95 | })
96 | }
97 | exports.addMT = addMT
98 |
99 | const addCol = (node, amount) => {
100 | return new Promise((resolve, reject) => {
101 | store.get(['col', node], function(e, a) {
102 | if (!e) {
103 | const a2 = typeof a != 'number' ? amount : a + amount
104 | console.log({ node, a })
105 | store.batch([{ type: 'put', path: ['col', node], data: a2 }], [resolve, reject, 1])
106 | } else {
107 | console.log(e)
108 | }
109 | })
110 | })
111 | }
112 | exports.addCol = addCol
113 |
114 | const addGov = (node, amount) => {
115 | return new Promise((resolve, reject) => {
116 | store.get(['gov', node], function(e, a) {
117 | if (!e) {
118 | const a2 = typeof a != 'number' ? amount : a + amount
119 | console.log({ node, a })
120 | store.batch([{ type: 'put', path: ['gov', node], data: a2 }], [resolve, reject, 1])
121 | } else {
122 | console.log(e)
123 | }
124 | })
125 | })
126 | }
127 | exports.addGov = addGov
128 |
129 | const deletePointer = (escrowID, user) => {
130 | return new Promise((resolve, reject) => {
131 | const escrow_id = typeof escrowID == 'string' ? escrowID : escrowID.toString()
132 | store.get(['escrow', escrow_id], function(e, a) {
133 | if (!e) {
134 | var found = false
135 | const users = Object.keys(a)
136 | for (i = 0; i < users.length; i++) {
137 | if (user = users[i]) {
138 | found = true
139 | break
140 | }
141 | }
142 | if (found && users.length == 1) {
143 | store.batch([{ type: 'del', path: ['escrow', escrow_id] }], [resolve, reject, users.length])
144 | } else if (found) {
145 | store.batch([{ type: 'del', path: ['escrow', escrow_id, user] }], [resolve, reject, users.length])
146 | }
147 | }
148 | })
149 | })
150 | }
151 | exports.deletePointer = deletePointer
152 |
153 | const credit = (node) => {
154 | return new Promise((resolve, reject) => {
155 | getPathNum(['markets', 'node', node, 'wins'])
156 | .then(a => {
157 | store.batch([{ type: 'put', path: ['markets', 'node', node, 'wins'], data: a++ }], [resolve, reject, 1])
158 | })
159 | .catch(e => {
160 | reject(e)
161 | })
162 | })
163 | }
164 | exports.credit = credit
165 |
166 |
167 | const nodeUpdate = (node, op, val) => {
168 | return new Promise((resolve, reject) => {
169 | store.get(['markets', 'node', node], function(e, a) {
170 | if (!e) {
171 | if (!a.strikes)
172 | a.strikes = 0
173 | if (!a.burned)
174 | a.burned = 0
175 | if (!a.moved)
176 | a.moved = 0
177 | switch (op) {
178 | case 'strike':
179 | a.strikes++
180 | a.burned += val
181 | break
182 | case 'ops':
183 | a.escrows++
184 | a.moved += val
185 | break
186 | default:
187 | }
188 | store.batch([{ type: 'put', path: ['markets', 'node', node], data: a }], [resolve, reject, 1])
189 | } else {
190 | console.log(e)
191 | resolve()
192 | }
193 | })
194 | })
195 | }
196 | exports.nodeUpdate = nodeUpdate
197 |
198 | const penalty = (node, amount) => {
199 | console.log('penalty: ', { node, amount })
200 | return new Promise((resolve, reject) => {
201 | pts = getPathNum(['gov', node])
202 | Promise.all([pts]).then(r => {
203 | var a2 = r[1]
204 | newBal = a2 - amount
205 | if (newBal < 0) { newBal = 0 }
206 | const forfiet = a2 - newBal
207 | var ops = [{ type: 'put', path: ['gov', node], data: newBal }]
208 | nodeUpdate(node, 'strike', amount)
209 | .then(empty => {
210 | store.batch(ops, [resolve, reject, forfiet])
211 | })
212 | .catch(e => { reject(e) })
213 | }).catch(e => {
214 | reject(e)
215 | })
216 | })
217 | }
218 | exports.penalty = penalty
219 |
220 | const chronAssign = (block, op) => {
221 | return new Promise((resolve, reject) => {
222 | const t = block + ':' + hashThis(stringify(op))
223 | store.batch([{ type: 'put', path: ['chrono', t], data: op }], [resolve, reject, t])
224 | })
225 | }
226 | exports.chronAssign = chronAssign
227 |
228 | function hashThis(data) {
229 | const digest = crypto.createHash('sha256').update(data).digest()
230 | const digestSize = Buffer.from(digest.byteLength.toString(16), 'hex')
231 | const combined = Buffer.concat([hashFunction, digestSize, digest])
232 | const multihash = bs58.encode(combined)
233 | return multihash.toString()
234 | }
235 | exports.hashThis = hashThis
236 |
237 | function isEmpty(obj) {
238 | for (var key in obj) {
239 | if (obj.hasOwnProperty(key)) return false;
240 | }
241 | return true
242 | }
243 | exports.isEmpty = isEmpty;
--------------------------------------------------------------------------------
/msa.js:
--------------------------------------------------------------------------------
1 | const { store, hiveClient } = require('./index')
2 | const { getPathObj } = require('./getPathObj')
3 | const config = require('./config')
4 | const stringify = require('json-stable-stringify');
5 | //const privateKey = hiveClient.PrivateKey.fromString(config.msprivatekey);
6 |
7 |
8 | exports.consolidate = (num, plasma, bh, owner) => {
9 | return new Promise((resolve, reject) => {
10 | var query = 'msa'
11 | if(owner == 'owner')query = 'mso'
12 | const queryf = query == 'msa' ? 'mss' : 'msso'
13 | const sel_key = query == 'msa' ? config.active : config.msowner
14 | store.get([query], (err, result) => {
15 | if (err || Object.keys(result).length === 0) {
16 | resolve('NONE')
17 | } else {
18 | let join = {},
19 | ops = []
20 | for (var item in result) {
21 | result[item] = JSON.parse(result[item])
22 | if (join[result[item][1].to]){
23 | join[result[item][1].to] = join[result[item][1].to] + ',' + item
24 | } else {
25 | join[result[item][1].to] = item
26 | }
27 | }
28 | for (var account in join){
29 | if(join[account].split(',').length > 1){
30 | let memohive = '',
31 | memohbd = '',
32 | hive = 0,
33 | hbd = 0,
34 | items = join[account].split(',')
35 | for (var item in items){
36 | if(result[items[item]][1].amount.split(' ')[1] == "HIVE"){
37 | hive = hive + parseInt(parseFloat(result[items[item]][1].amount.split(' ')[0])*1000)
38 | memohive = memohive + result[items[item]][1].memo + ','
39 | } else {
40 | hbd = hbd + parseInt(parseFloat(result[items[item]][1].amount.split(' ')[0])*1000)
41 | memohbd = memohbd + result[items[item]][1].memo + ','
42 | }
43 | delete result[items[item]]
44 | //ops.push({type: 'del', path:['msa', items[item]]})
45 | }
46 | memohbd += `hbd:${num}`
47 | memohive += `hive:${num}`
48 | if(hive){
49 | const transfer = [
50 | "transfer",
51 | {
52 | "from": config.msaccount,
53 | "to": account,
54 | "amount": parseFloat(hive/1000).toFixed(3) + ' HIVE',
55 | "memo": memohive
56 | }
57 | ]
58 | result[`${account}:hive:${num}`] = transfer
59 | }
60 | if(hbd){
61 | const transfer = [
62 | "transfer",
63 | {
64 | "from": config.msaccount,
65 | "to": account,
66 | "amount": parseFloat(hbd/1000).toFixed(3) + ' HBD',
67 | "memo": memohbd
68 | }
69 | ]
70 | result[`${account}:hbd:${num}`] = transfer
71 | }
72 | }
73 | }
74 | ops.push({type: 'del', path: [query]})
75 | let txs = []
76 | for (var tx in result){
77 | txs.push(result[tx])
78 | }
79 | let sig = {
80 | block: num,
81 | sig: ''
82 | },
83 | now = Date.parse(bh.timestamp + '.000Z'),
84 | op = {
85 | ref_block_num: bh.block_number & 0xffff,
86 | ref_block_prefix: Buffer.from(bh.block_id, 'hex').readUInt32LE(4),
87 | expiration: new Date(now + 300000).toISOString().slice(0, -5),
88 | operations: txs,
89 | extensions: [],
90 | }
91 | ops.push({type: 'put', path: [queryf, `${num}`], data: stringify(op)})
92 | if(config.msowner && config.active && txs.length){
93 | const stx = hiveClient.auth.signTransaction(op, [sel_key])
94 | sig.sig = stx.signatures[0]
95 | }
96 | store.batch(ops, [resolve, reject, sig])
97 | }
98 | })
99 | })
100 | }
101 |
102 | exports.osign = (num, type, missed, bh) => {
103 | return new Promise((resolve, reject) => {
104 | if(bh) {
105 | let Pmissed = getPathObj([type, `${type == 'mso' ? missed[0] : missed[0].replace(':sigs', '')}`]),
106 | Pstats = getPathObj(['stats'])
107 | Promise.all([Pmissed, Pstats]).then(mem => {
108 | let sig = {
109 | block: num,
110 | sig: ''
111 | },
112 | obj = typeof mem[0] == 'string' ? JSON.parse(mem[0]) : mem[0],
113 | ops = [],
114 | now = Date.parse(bh.timestamp + '.000Z')
115 | op = {
116 | ref_block_num: bh.block_number & 0xffff,
117 | ref_block_prefix: Buffer.from(bh.block_id, 'hex').readUInt32LE(4),
118 | expiration: new Date(now + 300000).toISOString().slice(0, -5),
119 | operations: obj.length ? [obj] : obj.operations,
120 | extensions: [],
121 | }
122 | for(var i = 0; i < missed.length; i++){
123 | ops.push({type:'del', path:[type, `${missed[i]}`]})
124 | }
125 | if(op.operations)ops.push({type: 'put', path: ['msso', `${num}`], data: stringify(op)})
126 | if(op.operations && mem[1].ms.active_account_auths[config.username] && config.msowner){
127 | const stx = hiveClient.auth.signTransaction(op, [config.msowner])
128 | sig.sig = stx.signatures[0]
129 | }
130 | store.batch(ops, [resolve, reject, sig])
131 |
132 | })
133 | } else {
134 | console.log('no BH')
135 | resolve('No Sig')
136 | }
137 |
138 | })
139 | }
140 |
141 | exports.sign = (num, plasma, missed, bh) => {
142 | return new Promise((resolve, reject) => {
143 | if(bh){
144 | let Pmissed = getPathObj(['mss', `${missed}`]),
145 | Pstats = getPathObj(['stats'])
146 | Promise.all([Pmissed, Pstats]).then(mem => {
147 | let sig = {
148 | block: num,
149 | sig: ''
150 | },
151 | obj = JSON.parse(mem[0]),
152 | ops = [],
153 | now = Date.parse(bh.timestamp + '.000Z'),
154 | op = {
155 | ref_block_num: bh.block_number & 0xffff,
156 | ref_block_prefix: Buffer.from(bh.block_id, 'hex').readUInt32LE(4),
157 | expiration: new Date(now + 300000).toISOString().slice(0, -5),
158 | operations: obj.operations,
159 | extensions: [],
160 | }
161 | ops.push({type:'del', path:['mss', `${missed}`]})
162 | ops.push({type:'del', path:['mss', `${missed}:sigs`]})
163 | ops.push({type: 'put', path: ['mss', `${num}`], data: stringify(op)})
164 | if(mem[1].ms.active_account_auths[config.username] && config.active){
165 | const stx = hiveClient.auth.signTransaction(op, [config.active])
166 | sig.sig = stx.signatures[0]
167 | }
168 | store.batch(ops, [resolve, reject, sig])
169 |
170 | })
171 | } else {
172 | console.log('no BH')
173 | resolve('No Sig')
174 | }
175 |
176 | })
177 | }
178 |
179 | /*
180 | exports.createAccount = (creator, account) => {
181 | return new Promise((resolve, reject) => {
182 | if (creator = config.username){
183 | var ops = []
184 | const op = [
185 | "create_claimed_account",
186 | {
187 | "creator": config.username,
188 | "new_account_name": "dlux-cc",
189 | "owner": {
190 | "weight_threshold": 2,
191 | "account_auths": [],
192 | "key_auths": [
193 | [
194 | "STM8TPTJXiCbGaEhAheXxQqbX4isq3UWiPuQBnHLmCKpmmNXhu31m",
195 | 1
196 | ],
197 | [
198 | "STM7Hgi4pjf5e7u6oKLdhWfgForEVikzvpkK5ejdaMzAzH6dWAtAD",
199 | 1
200 | ],
201 | [
202 | "STM5Rp1fWQMS7tAPVqatg8B22faeJGcKkfsez3mgUwGZPE9aqWd6X",
203 | 1
204 | ]
205 | ]
206 | },
207 | "active": {
208 | "weight_threshold": 2,
209 | "account_auths": [
210 | [
211 | "disregardfiat",
212 | 1
213 | ],
214 | [
215 | "dlux-io",
216 | 1
217 | ],
218 | [
219 | "markegiles",
220 | 1
221 | ]
222 | ],
223 | "key_auths": []
224 | },
225 | "posting": {
226 | "weight_threshold": 1,
227 | "account_auths": [
228 | [
229 | "disregardfiat",
230 | 1
231 | ],
232 | [
233 | "dlux-io",
234 | 1
235 | ],
236 | [
237 | "markegiles",
238 | 1
239 | ]
240 | ],
241 | "key_auths": []
242 | },
243 | "memo_key": "STM5se9o2oZwY7ztpo2scyvf12RR41zaYa6rozBtetwfr1DmH1J5k",
244 | "json_metadata": "{}"
245 | }
246 | ]
247 | ops.push(op)
248 | hiveClient.broadcast.send({
249 | extensions: [],
250 | operations: ops}, [config.active], (err, result) => {
251 | console.log(err, result);
252 | });
253 | } else {
254 | resolve('Not Me')
255 | }
256 | })
257 | }
258 |
259 | */
260 |
261 | exports.updateAccount = (accounts) => {
262 | return new Promise((resolve, reject) => {
263 | hiveClient.broadcast.accountCreate(wif, fee, creator, newAccountName, owner, active, posting, memoKey, jsonMetadata, function(err, result) {
264 | console.log(err, result);
265 | });
266 |
267 | })
268 | }
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "honeycomb",
3 | "version": "1.0.0",
4 | "description": "A Honeycomb for Hive Layer 2",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "mocha || true",
8 | "start": "node index.js",
9 | "lint": "eslint"
10 | },
11 | "keywords": [
12 | "dlux",
13 | "hive",
14 | "nft",
15 | "token",
16 | "dex"
17 | ],
18 | "author": "disregardfiat",
19 | "license": "MIT",
20 | "dependencies": {
21 | "@hiveio/dhive": "^0.14.12",
22 | "@hiveio/hive-js": "^2.0.4",
23 | "aes-js": "^3.1.2",
24 | "bs58": "^4.0.1",
25 | "cors": "^2.8.5",
26 | "decode-uri-component": "^0.2.0",
27 | "discord-webhook-node": "^1.1.8",
28 | "dotenv": "^8.2.0",
29 | "express": "^4.16.4",
30 | "form-data": "^2.3.3",
31 | "fs-extra": "^9.0.1",
32 | "hive-tx": "^4.1.2",
33 | "ipfs-http-client-lite": "^0.3.0",
34 | "ipfs-unixfs": "^0.1.16",
35 | "ipld-dag-pb": "^0.15.3",
36 | "json-stable-stringify": "^1.0.1",
37 | "level": "^5.0.1",
38 | "level-pathwise": "^4.0.1",
39 | "node-fetch": "^2.6.7",
40 | "pg": "^8.6.0",
41 | "request": "^2.88.2",
42 | "xmlhttprequest": "^1.8.0"
43 | },
44 | "devDependencies": {
45 | "chai": "^4.2.0",
46 | "eslint": "^7.17.0",
47 | "mocha": "^9.2.0"
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/pathwise.js:
--------------------------------------------------------------------------------
1 | var assert = require('assert');
2 | var defaults = require('levelup-defaults');
3 | var bytewise = require('bytewise');
4 | var type = require('component-type');
5 | var after = require('after');
6 | var streamToArray = require('stream-to-array');
7 | const stringify = require('json-stable-stringify');
8 | var { block } = require('./index')
9 | module.exports = Pathwise;
10 |
11 | function Pathwise(db) {
12 | assert(db, 'db required');
13 | this._db = defaults(db, {
14 | keyEncoding: bytewise,
15 | valueEncoding: 'json'
16 | });
17 | }
18 |
19 | Pathwise.prototype.put = function(path, obj, opts, fn) {
20 | if (typeof opts == 'function') {
21 | fn = opts;
22 | opts = {};
23 | }
24 | var batch = opts.batch || this._db.batch();
25 | this._write(batch, path, obj, fn);
26 | if (opts.batch) setImmediate(fn);
27 | else batch.write(fn);
28 | };
29 |
30 | Pathwise.prototype._write = function(batch, key, obj, fn) {
31 | var self = this;
32 | switch (type(obj)) {
33 | case 'object':
34 | var keys = Object.keys(obj);
35 | var next = after(keys.length, fn);
36 | keys.forEach(function(k) {
37 | self._write(batch, key.concat(k), obj[k], next);
38 | });
39 | break;
40 | case 'array':
41 | this._write(batch, key, arrToObj(obj), fn);
42 | break;
43 | default:
44 | batch.put(bytewise.encode(key), stringify(obj));
45 | break;
46 | }
47 | }
48 |
49 | Pathwise.prototype.batch = function(ops, pc) { // promise chain[resolve(), reject(), info]
50 | var self = this;
51 | var batch = this._db.batch();
52 | var next = after(ops.length, function(err) {
53 | if (err && pc[1]) {
54 | console.log('fail', err)
55 | pc[1](err)
56 | } else if (pc.length > 2) {
57 | block.ops.push('W')
58 | batch.write(()=>{pc[0](pc[2])})
59 | } else {
60 | block.ops.push('W')
61 | batch.write(()=>{pc[0]()})
62 | }
63 | });
64 | ops.forEach(function(op) {
65 | block.ops.push(stringify({type: op.type, path: op.path, data: op.data}))
66 | if (op.type == 'put') self.put(op.path, op.data, { batch: batch }, next)
67 | else if (op.type == 'del') self.del(op.path, { batch: batch }, next);
68 | });
69 | };
70 |
71 | Pathwise.prototype.get = function(path, fn) {
72 | var ret = {};
73 | var el = ret;
74 |
75 | streamToArray(this._db.createReadStream({
76 | start: path,
77 | end: path.concat(undefined)
78 | }), function(err, data) {
79 | if (err) return fn(err);
80 | let er = null
81 | try {
82 | data.forEach(function(kv) {
83 | var segs = kv.key.slice(path.length);
84 | if (segs.length) {
85 | segs.forEach(function(seg, idx) {
86 | if (!el[seg]) {
87 | if (idx == segs.length - 1) {
88 | el[seg] = kv.value;
89 | } else {
90 | el[seg] = {};
91 | }
92 | }
93 | el = el[seg];
94 | });
95 | el = ret;
96 | } else {
97 | ret = kv.value;
98 | }
99 | });
100 | } catch (err) { er = err }
101 | fn(er, ret);
102 | });
103 | };
104 |
105 | Pathwise.prototype.getWith = function(path, obj, fn) {
106 | var ret = {};
107 | var el = ret;
108 |
109 | streamToArray(this._db.createReadStream({
110 | start: path,
111 | end: path.concat(undefined)
112 | }), function(err, data) {
113 | if (err) return fn(err);
114 | let er = null
115 | try {
116 | data.forEach(function(kv) {
117 | var segs = kv.key.slice(path.length);
118 | if (segs.length) {
119 | segs.forEach(function(seg, idx) {
120 | if (!el[seg]) {
121 | if (idx == segs.length - 1) {
122 | el[seg] = kv.value;
123 | } else {
124 | el[seg] = {};
125 | }
126 | }
127 | el = el[seg];
128 | });
129 | el = ret;
130 | } else {
131 | ret = kv.value;
132 | }
133 | });
134 | } catch (err) { er = err }
135 | fn(er, ret, obj);
136 | });
137 | };
138 |
139 | Pathwise.prototype.del = function(path, opts, fn) {
140 | if (typeof opts == 'function') {
141 | fn = opts;
142 | opts = {};
143 | }
144 | var batch = opts.batch || this._db.batch();
145 |
146 | streamToArray(this._db.createKeyStream({
147 | start: path,
148 | end: path.concat(undefined)
149 | }), function(err, keys) {
150 | if (err) return fn(err);
151 | keys.forEach(function(key) { batch.del(bytewise.encode(key)) });
152 | if (opts.batch) fn();
153 | else batch.write(fn);
154 | });
155 | };
156 |
157 | Pathwise.prototype.children = function(path, fn) {
158 | streamToArray(this._db.createReadStream({
159 | start: path,
160 | end: path.concat(undefined)
161 | }), function(err, kv) {
162 | if (err) return fn(err);
163 | fn(null, kv.map(function(_kv) {
164 | return _kv.key[path.length] || _kv.value;
165 | }));
166 | });
167 | }
168 |
169 | Pathwise.prototype.someChildren = function(path, opts, fn) {
170 | streamToArray(this._db.createReadStream({
171 | start: [...path, opts.gte],
172 | end: [...path, opts.lte].concat(undefined)
173 | }), function(err, kv) {
174 | if (err) return fn(err);
175 | fn(null, kv.map(function(_kv) {
176 | return _kv.key[path.length] || _kv.value;
177 | }));
178 | });
179 | }
180 |
181 | function arrToObj(arr) {
182 | var obj = {};
183 | arr.forEach(function(el, idx) {
184 | obj[idx] = el;
185 | });
186 | return obj;
187 | }
--------------------------------------------------------------------------------
/processing_routes/cert.js:
--------------------------------------------------------------------------------
1 | const config = require('../config');
2 | const { store } = require('./../index')
3 | //const { postToDiscord } = require('./../discord')
4 |
5 | exports.cert = (json, from, active, pc) => {
6 | var postPromise = getPathObj(['posts', `${json.author}/${json.permlink}`]);
7 | Promise.all([postPromise])
8 | .then(function(v) {
9 | var post = v[0];
10 | ops = [];
11 | if (post) {
12 | post.cert[from] = json.cert;
13 | ops.push({ type: 'put', path: ['posts', `${json.author}/${json.permlink}`], data: post });
14 | let msg = `@${from}| Signed a certificate on ${json.author}/${json.permlink}`
15 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg });
16 | if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
17 | store.batch(ops, pc);
18 | } else {
19 | pc[0](pc[2]);
20 | }
21 | })
22 | .catch(function(e) {
23 | console.log(e);
24 | });
25 | }
--------------------------------------------------------------------------------
/processing_routes/cjv.js:
--------------------------------------------------------------------------------
1 | const { store } = require("./../index");
2 | const { getPathObj } = require("./../getPathObj");
3 | const config = require("../config");
4 | //const { postToDiscord } = require('./../discord')
5 |
6 | exports.cjv = (json, from, active, pc) => {
7 | var postPromise = getPathObj(['posts', `${json.a}/${json.p}`]);
8 | Promise.all([postPromise])
9 | .then(function(v) {
10 | var post = v[0];
11 | ops = [],
12 | auth = false;
13 | if (Object.keys(post).length) {
14 | if (from == config.leader) { //centralized pinning report
15 | post.b = json.b
16 | ops.push({ type: 'put', path: ['posts', `${json.a}/${json.p}`], data: post });
17 | store.batch(ops, pc);
18 | } else {
19 | pc[0](pc[2]);
20 | }
21 | } else {
22 | pc[0](pc[2]);
23 | }
24 | })
25 | .catch(function(e) {
26 | console.log(e);
27 | });
28 | }
--------------------------------------------------------------------------------
/processing_routes/comment.js:
--------------------------------------------------------------------------------
1 | const config = require('./../config')
2 | const { store } = require('./../index')
3 | const { chronAssign } = require('./../lil_ops')
4 | const { getPathObj } = require('../getPathObj')
5 | const { contentToDiscord } = require('./../discord')
6 | const { insertNewPost, updateRating, moderate } = require('./../edb');
7 |
8 | exports.comment = (json, pc) => {
9 | let meta = {}
10 | try { meta = JSON.parse(json.json_metadata) } catch (e) {}
11 | let community_post = false
12 | if (json.author == config.leader && parseInt(json.permlink.split(config.tag)[1]) > json.block_num - 31000) {
13 | //console.log('leader post')
14 | store.get(['escrow', json.author], function(e, a) {
15 | if (!e) {
16 | var ops = []
17 | for (b in a) {
18 | if (a[b][1].permlink == json.permlink && b == 'comment') {
19 | ops.push({ type: 'del', path: ['escrow', json.author, b] })
20 | }
21 | }
22 | if (process.env.npm_lifecycle_event == 'test') pc[2] = ops
23 | store.batch(ops, pc)
24 | } else {
25 | console.log(e)
26 | }
27 | })
28 | } else if (config.features.pob && meta.arHash || meta.vrHash || meta.appHash || meta.audHash) {
29 | Ppost = getPathObj(['posts', `${json.author}/${json.permlink}`])
30 | Promise.all([Ppost])
31 | .then(postarray => {
32 | post = postarray[0]
33 | var ops = []
34 | if (!Object.keys(post).length) { //check if promoted/voted
35 | //store json until a vote or promote with comment options
36 | ops.push({
37 | type: 'put',
38 | path: ['pend', `${json.author}/${json.permlink}`],
39 | data: {
40 | author: json.author,
41 | permlink: json.permlink,
42 | block_num: json.block_num,
43 | meta
44 | }
45 | })
46 | ops.push({
47 | type: 'put',
48 | path: ['chrono', `${json.block_num + 28800}:pend:${json.author}/${json.permlink}`],
49 | data: {
50 | author: json.author,
51 | permlink: json.permlink,
52 | block_num: json.block_num,
53 | op: 'del_pend'
54 | }
55 | })
56 | } else {
57 | post.meta = meta
58 | ops.push({
59 | type: 'put',
60 | path: ['posts', `${json.author}/${json.permlink}`],
61 | data: post
62 | })
63 | }
64 | if (process.env.npm_lifecycle_event == 'test') pc[2] = ops
65 | store.batch(ops, pc)
66 | })
67 | .catch(e => { console.log(e) })
68 | /*
69 | }
70 |
71 |
72 | //tag search for -LEO Community
73 | for (tag in meta.tags) {
74 | if (community_post) { break; }
75 | for (i = 0; i < config.community_tags.length; i++) {
76 | if (tag == config.community_tags[i]) {
77 | community_post = true
78 | break;
79 | }
80 | }
81 | }
82 |
83 |
84 | if (community_post) {
85 |
86 | //tag picker only -LEO Community
87 | var exp_path = chronAssign(json.block_num + 201600, { op: 'post_reward', a: json.author, p: json.permlink })
88 | promies.all([exp_path])
89 | .then(r => {
90 | const post = {
91 | author: json.author,
92 | permlink: json.permlink,
93 | expire_path: r[0],
94 | block: json.block_num
95 | }
96 | var ops = [{ type: 'put', path: ['posts', json.author, json.permlink], data: post }]
97 | store.batch(ops, pc)
98 | })
99 | .catch(e => console.log(e))
100 | */
101 | } else if (
102 | config.dbcs && json.parent_author &&
103 | json.parent_permlink &&
104 | meta?.review?.rating &&
105 | meta.review.rating >= 1 &&
106 | meta.review.rating <= 5
107 | ) {
108 | updateRating(
109 | json.parent_author,
110 | json.parent_permlink,
111 | json.author,
112 | meta.review.rating
113 | );
114 | pc[0](pc[2]);
115 | } else if (
116 | config.dbcs &&
117 | config?.dbmods.includes(json.author) &&
118 | json.parent_author &&
119 | json.parent_permlink &&
120 | meta?.review?.moderate
121 | ) {
122 | moderate(
123 | meta?.review?.moderate.hide,
124 | meta?.review?.moderate.reason,
125 | json.parent_author,
126 | json.parent_permlink
127 | );
128 | pc[0](pc[2]);
129 | } else {
130 | pc[0](pc[2]);
131 | }
132 | }
133 |
134 | exports.comment_options = (json, pc) => {
135 | //console.log(json)
136 | try {
137 | var filter = json.extensions[0][1].beneficiaries
138 | } catch (e) {
139 | pc[0](pc[2])
140 | return
141 | }
142 | var ops = []
143 | for (var i = 0; i < filter.length; i++) {
144 | if (filter[i].account == config.ben && filter[i].weight >= config.delegationWeight) {
145 | store.get(['pend', `${json.author}/${json.permlink}`], function(e, a) {
146 | if (e) { console.log(e) }
147 | if (Object.keys(a).length) {
148 | var assigns = []
149 | assigns.push(chronAssign(json.block_num + 201600, {
150 | block: parseInt(json.block_num + 201600),
151 | op: 'post_reward',
152 | author: json.author,
153 | permlink: json.permlink
154 | }))
155 | assigns.push(chronAssign(parseInt(json.block_num + 20000), {
156 | block: parseInt(json.block_num + 20000),
157 | op: 'post_vote',
158 | author: json.author,
159 | permlink: json.permlink
160 | }))
161 | ops.push({
162 | type: 'put',
163 | path: ['posts', `${json.author}/${json.permlink}`],
164 | data: {
165 | block: json.block_num,
166 | author: json.author,
167 | permlink: json.permlink,
168 | customJSON: a.meta
169 | }
170 | })
171 | if(config.dbcs){
172 | var type = "Blog";
173 | if (config.typeDefs['360'].includes(a.meta.vrHash))
174 | type = "360";
175 | else if (
176 | a.meta.vrHash
177 | )
178 | type = "VR";
179 | else if (
180 | a.meta.arHash
181 | )
182 | type = "AR";
183 | else if (
184 | a.meta.appHash
185 | )
186 | type = "APP";
187 | else if (
188 | a.meta.audHash
189 | )
190 | type = "Audio";
191 | else if (
192 | a.meta.vidHash
193 | )
194 | type = "Video";
195 | insertNewPost({
196 | block: json.block_num,
197 | author: json.author,
198 | permlink: json.permlink,
199 | type: type,
200 | })
201 | }
202 | var pins = {}
203 | for (i in a.meta.assets) {
204 | if (a.meta.assets[i].pin) {
205 | pins[a.meta.assets[i].hash] = {
206 | h: a.meta.assets[i].hash, //hash
207 | b: 0, //bytes
208 | v: 0 //verifies
209 | }
210 | }
211 | if (a.meta.assets[i].pin && a.meta.assets[i].thumbHash && a.meta.assets[i].thumbHash != a.meta.assets[i].hash){
212 | pins[a.meta.assets[i].thumbHash] = {
213 | h: a.meta.assets[i].thumbHash, //hash
214 | b: 0, //bytes
215 | v: 0 //verifies
216 | }
217 | }
218 | }
219 | if(Object.keys(pins).length)ops.push({ type: 'put', path: ['ipfs', 'unbundled', `${json.author}:${json.permlink}`], data: pins })
220 | if(config.pintoken){
221 | //ipfsVerify(`${json.author}:${json.permlink}`, pins)
222 | }
223 | /*
224 | if (config.pintoken) {
225 | var pins = []
226 | for (i in a.meta.assets) {
227 | if (a.meta.assets[i].pin) pins.push({ hash: a.meta.assets[i].hash })
228 | if (a.meta.assets[i].pin && a.meta.assets[i].thumbHash && a.meta.assets[i].thumbHash != a.meta.assets[i].hash) pins.push({ hash: a.meta.assets[i].hash })
229 | }
230 | if (pins.length) {
231 | var options = {
232 | 'method': 'POST',
233 | 'url': config.pinurl,
234 | 'headers': {
235 | 'Content-Type': 'application/json'
236 | },
237 | formData: {
238 | 'items': JSON.stringify(pins),
239 | 'secret': config.pintoken,
240 | 'by': json.author,
241 | 'block': json.block_num.toString()
242 | }
243 | };
244 | request(options, function(error, response) {
245 | if (error) throw new Error(error);
246 | console.log(response.body);
247 | });
248 | }
249 | }
250 | */
251 | /*
252 | if (config.username == config.leader) {
253 | var bytes = rtrades.checkNpin(a.meta.assets)
254 | bytes.then(function(value) {
255 | var op = ["custom_json", {
256 | required_auths: [config.username],
257 | required_posting_auths: [],
258 | id: `${config.prefix}cjv`,
259 | json: JSON.stringify({
260 | a: json.author,
261 | p: json.permlink,
262 | b: value //amount of bytes posted
263 | })
264 | }]
265 | unshiftOp([
266 | [0, 0], op
267 | ])
268 | })
269 | }
270 | */
271 | ops.push({ type: 'del', path: ['pend', `${json.author}/${json.permlink}`] })
272 | ops.push({ type: 'del', path: ['chrono', `${a.block_num + 28800}:pend:${json.author}/${json.permlink}`] })
273 | const msg = `@${json.author}|${json.permlink} added to ${config.TOKEN} rewardable content`
274 | if (config.hookurl) contentToDiscord(json.author, json.permlink)
275 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg })
276 | if (process.env.npm_lifecycle_event == 'test') pc[2] = ops
277 | Promise.all(assigns)
278 | .then(v=>{
279 | store.batch(ops, pc)
280 | })
281 | } else {
282 | ops.push({ type: 'del', path: ['pend', `${json.author}/${json.permlink}`] })
283 | ops.push({ type: 'del', path: ['chrono', `${a.block_num + 28800}:pend:${json.author}/${json.permlink}`] })
284 | if (process.env.npm_lifecycle_event == 'test') pc[2] = ops
285 | store.batch(ops, pc)
286 | }
287 | })
288 | } else {
289 | pc[0](pc[2])
290 | }
291 | }
292 | }
--------------------------------------------------------------------------------
/processing_routes/delegate_vesting_shares.js:
--------------------------------------------------------------------------------
1 | const config = require('./../config')
2 | const { store } = require('./../index')
3 | const { postToDiscord } = require('./../discord')
4 |
5 | exports.delegate_vesting_shares = (json, pc) => {
6 | var ops = []
7 | json.vesting_shares = denaier(json.vesting_shares)
8 | const vests = parseInt(parseFloat(json.vesting_shares) * 1000000)
9 | if (json.delegatee == config.delegation && vests) {
10 | ops.push({ type: 'put', path: ['delegations', json.delegator], data: vests })
11 | const msg = `@${json.delegator}| has delegated ${vests} vests to @${config.delegation}`
12 | if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
13 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg })
14 | } else if (json.delegatee == config.delegation && !vests) {
15 | ops.push({ type: 'del', path: ['delegations', json.delegator] })
16 | const msg = `@${json.delegator}| has removed delegation to @${config.delegation}`
17 | if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
18 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg })
19 | }
20 | store.batch(ops, pc)
21 | }
22 |
23 | function denaier(obj){
24 | if (typeof obj == 'string')return obj
25 | else return parseFloat(obj.amount / 1000000).toFixed(6)
26 | }
--------------------------------------------------------------------------------
/processing_routes/fork.js:
--------------------------------------------------------------------------------
1 | const config = require('./../config')
2 | const { store } = require('./../index')
3 | const { getPathObj } = require('../getPathObj')
4 | const { postToDiscord, contentToDiscord } = require('./../discord')
5 |
6 | exports.fork_propose = (json, from, active, pc) => {
7 | var postPromise = getPathObj(['posts', `${json.a}/${json.p}`]);
8 | Promise.all([postPromise])
9 | .then(function(v) {
10 | var post = v[0];
11 | ops = [],
12 | auth = false;
13 | if (Object.keys(post).length) {
14 | if (from == config.leader) { //centralized pinning report
15 | post.b = json.b
16 | ops.push({ type: 'put', path: ['posts', `${json.a}/${json.p}`], data: post });
17 | store.batch(ops, pc);
18 | } else {
19 | pc[0](pc[2]);
20 | }
21 | } else {
22 | pc[0](pc[2]);
23 | }
24 | })
25 | .catch(function(e) {
26 | console.log(e);
27 | });
28 | }
--------------------------------------------------------------------------------
/processing_routes/gov.js:
--------------------------------------------------------------------------------
1 | const config = require('./../config')
2 | const { store } = require("../index");
3 | const { getPathObj, getPathNum } = require('../getPathObj')
4 | const { chronAssign } = require('../lil_ops')
5 | const { postToDiscord } = require('./../discord')
6 |
7 | exports.gov_up = (json, from, active, pc) => {
8 | var amount = parseInt(json.amount),
9 | Pliquid = getPathNum(['balances', from]),
10 | Pgovt = getPathNum(['gov', 't']),
11 | Pgov = getPathNum(['gov', from]),
12 | Pnode = getPathObj(['markets', 'node', from])
13 |
14 | Promise.all([Pliquid, Pgovt, Pgov, Pnode])
15 | .then(bals => {
16 | let lbal = bals[0],
17 | govt = bals[1],
18 | gbal = bals[2],
19 | ops = [];
20 | if (amount <= lbal && active && bals[3].self == from) {
21 | ops.push({ type: 'put', path: ['balances', from], data: lbal - amount });
22 | ops.push({ type: 'put', path: ['gov', from], data: gbal + amount });
23 | ops.push({ type: 'put', path: ['gov', 't'], data: govt + amount });
24 | const msg = `@${from}| Locked ${parseFloat(json.amount / 1000).toFixed(3)} ${config.TOKEN} for Governance`
25 | if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
26 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg });
27 | } else {
28 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: `@${from}| Invalid gov up` });
29 | }
30 | if (process.env.npm_lifecycle_event == 'test') pc[2] = ops
31 | store.batch(ops, pc);
32 | })
33 | .catch(e => { console.log(e); });
34 |
35 | }
36 |
37 | exports.gov_down = (json, from, active, pc) => {
38 | var amount = parseInt(json.amount),
39 | Pgov = getPathNum(['gov', from]),
40 | Pgovd = getPathObj(['govd', from])
41 | Promise.all([Pgov, Pgovd, ])
42 | .then(o => {
43 | let gov = o[0],
44 | downs = o[1] || {}
45 | ops = [],
46 | assigns = [];
47 | if (typeof amount == 'number' && amount >= 0 && gov >= amount && active) {
48 | var odd = parseInt(amount % 4),
49 | weekly = parseInt(amount / 4);
50 | for (var i = 0; i < 4; i++) {
51 | if (i == 3) {
52 | weekly += odd;
53 | }
54 | assigns.push(chronAssign(parseInt(json.block_num + (201600 * (i + 1))), {
55 | block: parseInt(json.block_num + (201600 * (i + 1))),
56 | op: 'gov_down',
57 | amount: weekly,
58 | by: from
59 | }));
60 | }
61 | Promise.all(assigns)
62 | .then(a => {
63 | var newdowns = {};
64 | for (d in a) {
65 | newdowns[a[d]] = a[d];
66 | }
67 | ops.push({ type: 'put', path: ['govd', from], data: newdowns });
68 | for (i in downs) {
69 | ops.push({ type: 'del', path: ['chrono', downs[i]] });
70 | }
71 | const msg = `@${from}| Set withdrawl of ${parseFloat(amount / 1000).toFixed(3)} ${config.TOKEN} from Governance`
72 | if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
73 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg });
74 | if (process.env.npm_lifecycle_event == 'test') pc[2] = ops
75 | store.batch(ops, pc);
76 | });
77 | } else if (typeof amount == 'number' && amount == 0 && active) {
78 | for (i in downs) {
79 | ops.push({ type: 'del', path: ['chrono', i] });
80 | }
81 | const msg = `@${from}| Canceled Governance withdrawl`
82 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg });
83 | if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
84 | if (process.env.npm_lifecycle_event == 'test') pc[2] = ops
85 | store.batch(ops, pc);
86 | } else {
87 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: `@${from}| Invalid Governance withdrawl` });
88 | if (process.env.npm_lifecycle_event == 'test') pc[2] = ops
89 | store.batch(ops, pc);
90 | }
91 |
92 | })
93 | .catch(e => { console.log(e); });
94 |
95 | }
--------------------------------------------------------------------------------
/processing_routes/index.js:
--------------------------------------------------------------------------------
1 | const { onStreamingStart } = require('./onStreamingStart')
2 | const { send, claim } = require('./send')
3 | const { gov_up, gov_down } = require('./gov')
4 | const { power_up, power_down, power_grant } = require('./power')
5 | const { delegate_vesting_shares } = require('./delegate_vesting_shares')
6 | const { vote } = require('./vote')
7 | const { cert } = require('./cert')
8 | const { sig_submit, osig_submit, account_update } = require('./sig')
9 | const { cjv } = require('./cjv')
10 | const { nomention } = require('./nomention')
11 | const { q4d } = require('./q4d')
12 | const { node_add, node_delete } = require('./nodes')
13 | const { dex_sell, dex_clear, transfer, margins } = require('./dex')
14 | const { comment, comment_options } = require('./comment')
15 | const { report } = require('./report')
16 | const {
17 | nft_pfp,
18 | ft_bid,
19 | ft_auction,
20 | ft_sell_cancel,
21 | ft_buy,
22 | nft_sell,
23 | nft_sell_cancel,
24 | nft_buy, ft_sell,
25 | ft_escrow_cancel,
26 | ft_escrow_complete,
27 | ft_escrow,
28 | ft_airdrop,
29 | ft_transfer,
30 | fts_sell_h,
31 | fts_sell_hcancel,
32 | nft_bid,
33 | nft_auction,
34 | nft_hauction,
35 | nft_mint,
36 | nft_define,
37 | nft_add_roy,
38 | nft_div,
39 | nft_delete,
40 | nft_transfer_cancel,
41 | nft_reserve_complete,
42 | nft_transfer,
43 | nft_reserve_transfer
44 | } = require('./nft')
45 |
46 | module.exports = {
47 | nft_pfp,
48 | ft_bid,
49 | ft_auction,
50 | ft_sell_cancel,
51 | nft_sell,
52 | nft_sell_cancel,
53 | nft_buy,
54 | ft_buy,
55 | ft_escrow_cancel,
56 | ft_sell,
57 | ft_escrow_complete,
58 | ft_escrow,
59 | ft_transfer,
60 | fts_sell_h,
61 | fts_sell_hcancel,
62 | ft_airdrop,
63 | nft_transfer,
64 | nft_auction,
65 | nft_hauction,
66 | nft_bid,
67 | nft_transfer_cancel,
68 | nft_reserve_transfer,
69 | nft_reserve_complete,
70 | nft_delete,
71 | nft_define,
72 | nft_add_roy,
73 | nft_div,
74 | nft_mint,
75 | cert,
76 | cjv,
77 | comment,
78 | comment_options,
79 | account_update,
80 | delegate_vesting_shares,
81 | dex_clear,
82 | dex_sell,
83 | margins,
84 | gov_down,
85 | gov_up,
86 | node_add,
87 | node_delete,
88 | nomention,
89 | onStreamingStart,
90 | power_down,
91 | power_grant,
92 | power_up,
93 | q4d,
94 | report,
95 | send,
96 | claim,
97 | sig_submit,
98 | osig_submit,
99 | transfer,
100 | vote
101 | }
--------------------------------------------------------------------------------
/processing_routes/nodes.js:
--------------------------------------------------------------------------------
1 | const config = require("./../config");
2 | const { store } = require("./../index");
3 | const { getPathObj, deleteObjs } = require("./../getPathObj");
4 | const { isEmpty } = require("./../lil_ops");
5 | const { postToDiscord } = require("./../discord");
6 | const { decode, encode } = require("@hiveio/hive-js").memo;
7 |
8 | exports.node_add = function (json, from, active, pc) {
9 | if (json.domain && typeof json.domain === "string") {
10 | var escrow = true;
11 | if (json.escrow == "false") {
12 | escrow = false;
13 | }
14 | var mirror = false;
15 | if (json.mirror == "true") {
16 | mirror = true;
17 | }
18 | var mskey;
19 | if (json.mskey && json.mschallenge) {
20 | try {
21 | const verifyKey = decode(config.msPriMemo, json.mschallenge);
22 | const nowhammies = encode(
23 | config.msPriMemo,
24 | config.msPubMemo,
25 | verifyKey
26 | );
27 | const isValid = encode(config.msPriMemo, json.mskey, "#try");
28 | if (
29 | typeof isValid == "string" &&
30 | verifyKey == `#${json.mskey}` &&
31 | nowhammies != json.mschallenge
32 | )
33 | mskey = json.mskey;
34 | } catch (e) {}
35 | }
36 | var bid = parseInt(json.bidRate) || 0;
37 | if (bid < 1) {
38 | bid = 500;
39 | }
40 | if (bid > 1000) {
41 | bid = 1000;
42 | }
43 | var dm = parseInt(json.dm) || 10000; //dex max 10000 = 100.00% / 1 = 0.01%
44 | //the max size a dex buy order can be ON the buy book in relation to the safety limit determined by collateral amounts
45 | if (dm < 1) {
46 | dm = 10000;
47 | }
48 | if (dm > 10000) {
49 | dm = 10000;
50 | }
51 | var ds = parseInt(json.ds) || 0; //dex slope 10000 = 100.00% / 1 = 0.01%
52 | //the max size a dex buy order can be ON the buy book in relation to the current price. 0 = no slope, only max HIVE, 100% means a buy order at 50% of the current tick can be 50% of the dex max HIVE value.
53 | if (ds < 0) {
54 | ds = 0;
55 | }
56 | if (ds > 10000) {
57 | ds = 10000;
58 | }
59 | var dv = parseInt(json.dv) || 0; //dao vote 10000 = 100.00% / 1 = 0.01%
60 | //the portion of the claim that will be put into the chains DAO. Recommend 10-15%
61 | if (dv < 0) {
62 | dv = 1500;
63 | }
64 | if (dv > 10000) {
65 | dv = 1500;
66 | }
67 | var daoRate = parseInt(json.marketingRate) || 0;
68 | if (daoRate < 1) {
69 | daoRate = 0;
70 | }
71 | if (daoRate > 2000) {
72 | daoRate = 2000;
73 | }
74 | var liquidity = parseInt(json.liquidity) || 0;
75 | if (liquidity < 0) {
76 | liquidity = 100;
77 | }
78 | if (liquidity > 100) {
79 | liquidity = 100;
80 | }
81 | store.get(["markets", "node", from], function (e, a) {
82 | let ops = [];
83 | if (!e) {
84 | if (isEmpty(a)) {
85 | data = {
86 | domain: json.domain || "localhost",
87 | self: from,
88 | bidRate: bid,
89 | attempts: 0,
90 | yays: 0,
91 | wins: 0,
92 | strikes: 0,
93 | burned: 0,
94 | moved: 0,
95 | contracts: 0,
96 | escrows: 0,
97 | lastGood: 0,
98 | report: {},
99 | dm,
100 | ds,
101 | dv,
102 | };
103 | if (mskey) data.mskey = mskey;
104 | ops = [
105 | {
106 | type: "put",
107 | path: ["markets", "node", from],
108 | data,
109 | },
110 | ];
111 | } else {
112 | var b = a;
113 | b.domain = json.domain ? json.domain : b.domain;
114 | b.bidRate = bid ? bid : b.bidRate;
115 | b.dm = dm ? dm : b.dm || 10000;
116 | b.ds = ds ? ds : b.ds || 0;
117 | b.dv = dv ? dv : b.dv || 1500;
118 | b.liquidity = liquidity ? liquidity : b.liquidity || 100;
119 | if (mskey) b.mskey = mskey;
120 | ops = [{ type: "put", path: ["markets", "node", from], data: b }];
121 | }
122 | const msg = `@${from}| has bid the hive-state node ${json.domain} at ${json.bidRate}`;
123 | if (config.hookurl || config.status)
124 | postToDiscord(msg, `${json.block_num}:${json.transaction_id}`);
125 | ops.push({
126 | type: "put",
127 | path: ["feed", `${json.block_num}:${json.transaction_id}`],
128 | data: msg,
129 | });
130 | } else {
131 | console.log(e);
132 | }
133 | if (process.env.npm_lifecycle_event == "test") pc[2] = ops;
134 | store.batch(ops, pc);
135 | });
136 | } else {
137 | ops = [
138 | {
139 | type: "put",
140 | path: ["feed", `${json.block_num}:${json.transaction_id}`],
141 | data: `@${from}| sent and invalid node add operation`,
142 | },
143 | ];
144 | if (process.env.npm_lifecycle_event == "test") pc[2] = ops;
145 | store.batch(ops, pc);
146 | }
147 | };
148 |
149 | exports.node_delete = function (json, from, active, pc) {
150 | if (active) {
151 | var ops = [];
152 | var Pqueue = getPathObj(["queue"]),
153 | Pnode = getPathObj(["markets", "node", from]);
154 | Promise.all([Pqueue, Pnode, Prunners])
155 | .then(function (v) {
156 | deleteObjs([["queue"]])
157 | .then((empty) => {
158 | var q = v[0],
159 | n = v[1];
160 | if (typeof n.bidRate == "number") {
161 | for (var i = 0; i < q.length; i++) {
162 | if (q[i] == from) {
163 | found = i;
164 | break;
165 | }
166 | }
167 | delete q[from];
168 | ops.push({ type: "put", path: ["queue"], data: q });
169 | delete b.domain;
170 | delete b.bidRate;
171 | delete b.escrow;
172 | delete b.marketingRate;
173 | ops.push({ type: "del", path: ["runners", from] });
174 | ops.push({
175 | type: "put",
176 | path: ["markets", "node", from],
177 | data: b,
178 | });
179 | const msg = `@${from}| has signed off their ${config.TOKEN} node`;
180 | if (config.hookurl || config.status)
181 | postToDiscord(msg, `${json.block_num}:${json.transaction_id}`);
182 | ops.push({
183 | type: "put",
184 | path: ["feed", `${json.block_num}:${json.transaction_id}`],
185 | data: msg,
186 | });
187 | store.batch(ops, pc);
188 | } else {
189 | pc[0](pc[2]);
190 | }
191 | })
192 | .catch((e) => {
193 | console.log(e);
194 | });
195 | })
196 | .catch(function (e) {
197 | console.log(e);
198 | });
199 | } else {
200 | pc[0](pc[2]);
201 | }
202 | };
203 |
--------------------------------------------------------------------------------
/processing_routes/nomention.js:
--------------------------------------------------------------------------------
1 | const { store } = require('./../index')
2 |
3 | exports.nomention = (json, from, active, pc) => {
4 | if (typeof json.nomention == 'boolean') {
5 | store.get(['delegations', from], function(e, a) {
6 | var ops = []
7 | if (!e && json.nomention) {
8 | ops.push({ type: 'put', path: ['nomention', from], data: true })
9 | } else if (!e && !json.nomention) {
10 | ops.push({ type: 'del', path: ['nomention', from] })
11 | }
12 | store.batch(ops, pc)
13 | })
14 | } else {
15 | pc[0](pc[2])
16 | }
17 | }
--------------------------------------------------------------------------------
/processing_routes/onBlock.js:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dluxio/dlux_open_token/bc2a8ce95e94ad203e8d596c26df878466d87616/processing_routes/onBlock.js
--------------------------------------------------------------------------------
/processing_routes/onStreamingStart.js:
--------------------------------------------------------------------------------
1 | const config = require('./../config')
2 | const { store, unshiftOp, TXID } = require("./../index");
3 | const { encode } = require('@hiveio/hive-js').memo
4 |
5 | exports.onStreamingStart = () => {
6 | console.log("At real time.");
7 | TXID.current()
8 | store.get(['markets', 'node', config.username], function(e, a) {
9 | if ((!a.domain && config.NODEDOMAIN) || (a.domain != config.NODEDOMAIN)) {
10 | var mskey, mschallenge
11 | if(config.msowner && config.mspublic){
12 | mskey = config.mspublic
13 | mschallenge = encode(config.msowner, config.msPubMemo, `#${config.mspublic}`)
14 | }
15 | var op = ["custom_json", {
16 | required_auths: [config.username],
17 | required_posting_auths: [],
18 | id: `${config.prefix}node_add`,
19 | json: JSON.stringify({
20 | domain: config.NODEDOMAIN,
21 | bidRate: config.bidRate,
22 | mskey,
23 | mschallenge,
24 | escrow: true
25 | })
26 | }];
27 | unshiftOp([
28 | [0, 0], op
29 | ]);
30 | return op
31 | }
32 | });
33 | }
34 |
--------------------------------------------------------------------------------
/processing_routes/power.js:
--------------------------------------------------------------------------------
1 | const config = require('./../config')
2 | const { store } = require("../index");
3 | const { getPathObj, getPathNum } = require('../getPathObj')
4 | const { chronAssign } = require('../lil_ops')
5 | const { postToDiscord } = require('./../discord');
6 |
7 | exports.power_up = (json, from, active, pc) => {
8 | var amount = parseInt(json.amount),
9 | lpp = getPathNum(['balances', from]),
10 | tpowp = getPathNum(['pow', 't']),
11 | powp = getPathNum(['pow', from]);
12 |
13 | Promise.all([lpp, tpowp, powp])
14 | .then(bals => {
15 | let lb = bals[0],
16 | tpow = bals[1],
17 | pow = bals[2],
18 | lbal = typeof lb != 'number' ? 0 : lb,
19 | pbal = typeof pow != 'number' ? 0 : pow,
20 | ops = [];
21 | if (amount <= lbal && active) {
22 | ops.push({ type: 'put', path: ['balances', from], data: lbal - amount });
23 | ops.push({ type: 'put', path: ['pow', from], data: pbal + amount });
24 | ops.push({ type: 'put', path: ['pow', 't'], data: tpow + amount });
25 | const msg = `@${from}| Powered up ${parseFloat(json.amount / 1000).toFixed(3)} ${config.TOKEN}`
26 | if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
27 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg });
28 | } else {
29 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: `@${from}| Invalid power up` });
30 | }
31 | store.batch(ops, pc);
32 | })
33 | .catch(e => { console.log(e); });
34 |
35 | }
36 |
37 | exports.power_grant = (json, from, active, pc) => {
38 | var amount = parseInt(json.amount),
39 | to = json.to,
40 | Pgranting_from_total = getPathNum(['granting', from, 't']),
41 | Pgranting_to_from = getPathNum(['granting', from, to]),
42 | Pgranted_to_from = getPathNum(['granted', to, from]),
43 | Pgranted_to_total = getPathNum(['granted', to, 't']),
44 | Ppower = getPathNum(['pow', from]),
45 | Pup_from = getPathObj(['up', from]),
46 | Pdown_from = getPathObj(['down', from]),
47 | Pup_to = getPathObj(['up', to]),
48 | Pdown_to = getPathObj(['down', to])
49 | Promise.all([
50 | Ppower,
51 | Pgranted_to_from,
52 | Pgranted_to_total,
53 | Pgranting_to_from,
54 | Pgranting_from_total,
55 | Pup_from,
56 | Pup_to,
57 | Pdown_from,
58 | Pdown_to
59 | ])
60 | .then(mem => {
61 | let from_power = mem[0],
62 | granted_to_from = mem[1],
63 | granted_to_total = mem[2],
64 | granting_to_from = mem[3],
65 | granting_from_total = mem[4],
66 | up_from = mem[5],
67 | up_to = mem[6],
68 | down_from = mem[7],
69 | down_to = mem[8],
70 | ops = [];
71 | if (amount < from_power && amount >= 0 && active) {
72 | if (amount > granted_to_from) {
73 | let more = amount - granted_to_from
74 | if (up_from.max) {
75 | up_from.max -= more
76 | }
77 | if (down_from.max) {
78 | down_from.max -= more
79 | }
80 | if (up_to.max) {
81 | up_to.max += more
82 | }
83 | if (down_to.max) {
84 | down_to.max += more
85 | }
86 | ops.push({ type: 'put', path: ['granting', from, 't'], data: granting_from_total + more });
87 | ops.push({ type: 'put', path: ['granting', from, to], data: granting_to_from + more });
88 | ops.push({ type: 'put', path: ['granted', to, from], data: granted_to_from + more });
89 | ops.push({ type: 'put', path: ['granted', to, 't'], data: granted_to_total + more });
90 | ops.push({ type: 'put', path: ['pow', from], data: from_power - more }); //weeks wait? chron ops? no because of the power growth at vote
91 | if (Object.keys(up_from).length)
92 | ops.push({
93 | type: "put",
94 | path: ["up", from],
95 | data: up_from,
96 | });
97 | if (Object.keys(down_from).length)
98 | ops.push({
99 | type: "put",
100 | path: ["down", from],
101 | data: down_from,
102 | });
103 | if (Object.keys(up_to).length)
104 | ops.push({ type: "put", path: ["up", to], data: up_to });
105 | if (Object.keys(down_to).length)
106 | ops.push({
107 | type: "put",
108 | path: ["down", to],
109 | data: down_to,
110 | });
111 | const msg = `@${from}| Has granted ${parseFloat(amount/1000).toFixed(3)} to ${to}`
112 | if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
113 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg });
114 | } else if (amount < granted_to_from) {
115 | let less = granted_to_from - amount
116 | if (up_from.max) {
117 | up_from.max += less
118 | }
119 | if (down_from.max) {
120 | down_from.max += less
121 | }
122 | if (up_to.max) {
123 | up_to.max -= less
124 | }
125 | if (down_to.max) {
126 | down_to.max -= less
127 | }
128 | ops.push({ type: 'put', path: ['granting', from, 't'], data: granting_from_total - less });
129 | ops.push({ type: 'put', path: ['granting', from, to], data: granting_to_from - less });
130 | ops.push({ type: 'put', path: ['granted', to, from], data: granted_to_from - less });
131 | ops.push({ type: 'put', path: ['granted', to, 't'], data: granted_to_total - less });
132 | ops.push({ type: 'put', path: ['pow', from], data: from_power + less });
133 | if (Object.keys(up_from).length)
134 | ops.push({
135 | type: "put",
136 | path: ["up", from],
137 | data: up_from,
138 | });
139 | if (Object.keys(down_from).length)
140 | ops.push({
141 | type: "put",
142 | path: ["down", from],
143 | data: down_from,
144 | });
145 | if (Object.keys(up_to).length)
146 | ops.push({ type: "put", path: ["up", to], data: up_to });
147 | if (Object.keys(down_to).length)
148 | ops.push({
149 | type: "put",
150 | path: ["down", to],
151 | data: down_to,
152 | });
153 | const msg = `@${from}| Has granted ${parseFloat(amount/1000).toFixed(3)} to ${to}`
154 | if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
155 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg });
156 | } else {
157 | const msg = `@${from}| Has already granted ${parseFloat(amount/1000).toFixed(3)} to ${to}`
158 | if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
159 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg });
160 | }
161 | } else {
162 | const msg = `@${from}| Invalid delegation`
163 | if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
164 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg });
165 | }
166 | store.batch(ops, pc);
167 | })
168 | .catch(e => { console.log(e); });
169 |
170 | }
171 |
172 | exports.power_down = (json, from, active, pc) => {
173 | var powp = getPathNum(['pow', from]),
174 | powd = getPathObj(['powd', from]);
175 | Promise.all([powp, powd])
176 | .then(o => {
177 | let p = typeof o[0] != 'number' ? 0 : o[0],
178 | downs = o[1] || {},
179 | ops = [],
180 | assigns = [],
181 | amount = parseInt(json.amount)
182 | if (typeof amount == 'number' && amount >= 0 && p >= amount && active) {
183 | var odd = parseInt(amount % 4),
184 | weekly = parseInt(amount / 4);
185 | for (var i = 0; i < 4; i++) {
186 | if (i == 3) {
187 | weekly += odd;
188 | }
189 | assigns.push(chronAssign(parseInt(json.block_num + (200000 * (i + 1))), {
190 | block: parseInt(json.block_num + (200000 * (i + 1))),
191 | op: 'power_down',
192 | amount: weekly,
193 | by: from
194 | }));
195 | }
196 | Promise.all(assigns)
197 | .then(a => {
198 | var newdowns = {};
199 | for (d in a) {
200 | newdowns[a[d]] = a[d];
201 | }
202 | ops.push({
203 | type: "del",
204 | path: ["powd", from],
205 | });
206 | ops.push({ type: 'put', path: ['powd', from], data: newdowns });
207 | for (i in downs) {
208 | ops.push({ type: 'del', path: ['chrono', i] });
209 | }
210 | const msg = `@${from}| Powered down ${parseFloat(amount / 1000).toFixed(3)} ${config.TOKEN}`
211 | if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
212 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg });
213 | store.batch(ops, pc);
214 | });
215 | } else if (typeof amount == 'number' && amount == 0 && active) {
216 | for (i in downs) {
217 | ops.push({ type: 'del', path: ['chrono', downs[i]] });
218 | }
219 | const msg = `@${from}| Canceled Power Down`
220 | if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
221 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg });
222 | store.batch(ops, pc);
223 | } else {
224 | const msg = `@${from}| Invalid Power Down`
225 | if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
226 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg });
227 | store.batch(ops, pc);
228 | }
229 |
230 | })
231 | .catch(e => { console.log(e); });
232 |
233 | }
--------------------------------------------------------------------------------
/processing_routes/prediction.js:
--------------------------------------------------------------------------------
1 | const config = require('./../config')
2 | const { store } = require("./../index");
3 | const { getPathObj, getPathNum } = require("./../getPathObj");
4 | const { postToDiscord } = require('./../discord')
5 |
6 | // the oracle problem is notoriously dificult, several options exist but this is an
7 | // attempt to acquire the wisdom of the crowd in a dlux fashion
8 | // this is an attempt to make a predection market using sports scores
9 |
10 | /*
11 | exports.pm_make = (json, from, active, pc) => {
12 | if (active) {
13 | let promise_from_bal = getPathNum(['balances', from])
14 | let promise_stats = getPathObj(['stats'])
15 | Promise.all([promise_from_bal, promise_stats])
16 | .then(ret => {
17 | let from_bal = ret[0],
18 | stats = ret[1],
19 | ops = [];
20 | if (from_bal > stats.pm.fee) {
21 | if (typeof json.a.n == "string" && typeof json.b.n == "string" && typeof json.a.h == "number" && typeof json.b.h == "number" && )
22 | store.batch(ops, pc);
23 | } else {
24 | pc[0](pc[2])
25 | }
26 | })
27 | .catch(e => { console.log(e); });
28 | } else {
29 | pc[0](pc[2])
30 | }
31 | }
32 |
33 | exports.pm_place = (json, from, active, pc) => {
34 | if (active) {
35 | let promise_from_bal = getPathNum(['balances', from])
36 | let promise_stats = getPathObj(['stats'])
37 | Promise.all([promise_from_bal, promise_stats])
38 | .then(ret => {
39 | let from_bal = ret[0],
40 | stats = ret[1],
41 | ops = [];
42 | if (from_bal > stats.pm.fee) {
43 | if (typeof json.a.n == "string" && typeof json.b.n == "string" && typeof json.a.h == "number" && typeof json.b.h == "number" && )
44 | store.batch(ops, pc);
45 | } else {
46 | pc[0](pc[2])
47 | }
48 | })
49 | .catch(e => { console.log(e); });
50 | } else {
51 | pc[0](pc[2])
52 | }
53 | }
54 |
55 | exports.pm_settle = (json, from, active, pc) => {
56 | if (active) {
57 | let promise_from_bal = getPathNum(['balances', from])
58 | let promise_stats = getPathObj(['stats'])
59 | Promise.all([promise_from_bal, promise_stats])
60 | .then(ret => {
61 | let from_bal = ret[0],
62 | stats = ret[1],
63 | ops = [];
64 | if (from_bal > stats.pm.fee) {
65 | if (typeof json.a.n == "string" && typeof json.b.n == "string" && typeof json.a.h == "number" && typeof json.b.h == "number" && )
66 | store.batch(ops, pc);
67 | } else {
68 | pc[0](pc[2])
69 | }
70 | })
71 | .catch(e => { console.log(e); });
72 | } else {
73 | pc[0](pc[2])
74 | }
75 | }
76 |
77 | exports.pm_ = (json, from, active, pc) => {
78 | if (active) {
79 | let promise_from_bal = getPathNum(['balances', from])
80 | let promise_stats = getPathObj(['stats'])
81 | Promise.all([promise_from_bal, promise_stats])
82 | .then(ret => {
83 | let from_bal = ret[0],
84 | stats = ret[1],
85 | ops = [];
86 | if (from_bal > stats.pm.fee) {
87 | if (typeof json.a.n == "string" && typeof json.b.n == "string" && typeof json.a.h == "number" && typeof json.b.h == "number" && )
88 | store.batch(ops, pc);
89 | } else {
90 | pc[0](pc[2])
91 | }
92 | })
93 | .catch(e => { console.log(e); });
94 | } else {
95 | pc[0](pc[2])
96 | }
97 | }
98 | */
99 | /*
100 | {
101 | a:{
102 | n: "Name of Team : string",
103 | h: "handicap : int"
104 | },
105 | b:{
106 | n: "Name of Team : string",
107 | h: "handicap : int"
108 | },
109 | c:{
110 | s: "sport/league",
111 | t: "Scheduled match time", //block number ?
112 | l: "last bet", //block number ?
113 | r: "Ratification Time" //blocknum -> a week after?
114 | f: int -> "failure path" // 0-release no fault, 1-lottery of witnesses, 2-repoll, 3-repoll+ext
115 | c: "category"
116 | }
117 | }
118 | */
--------------------------------------------------------------------------------
/processing_routes/q4d.js:
--------------------------------------------------------------------------------
1 | const config = require('./../config')
2 | const { store } = require('./../index')
3 |
4 | exports.q4d = (json, from, active, pc) => {
5 | if (from = config.leader && json.text && json.title) {
6 | store.batch([{
7 | type: 'put',
8 | path: ['postQueue', json.title],
9 | data: {
10 | text: json.text,
11 | title: json.title
12 | }
13 | }], pc)
14 | } else {
15 | pc[0](pc[2])
16 | }
17 | }
--------------------------------------------------------------------------------
/processing_routes/report.js:
--------------------------------------------------------------------------------
1 | const config = require('./../config')
2 | const { store } = require("./../index");
3 | const { ipfsPeerConnect } = require("./../ipfsSaveState");
4 |
5 | exports.report = (json, from, active, pc) => {
6 | store.get(['markets', 'node', from], function(e, a) {
7 | if (!e) {
8 | var b = a
9 | if (from == b.self && active) {
10 | b.report = json
11 | delete b.report.timestamp
12 | var ops = [
13 | { type: 'put', path: ['markets', 'node', from], data: b }
14 | ]
15 | if(json.ipfs_id && config.ipfshost == 'ipfs')ipfsPeerConnect(json.ipfs_id)
16 | if (process.env.npm_lifecycle_event == 'test') pc[2] = ops
17 | store.batch(ops, pc)
18 | } else {
19 | pc[0](pc[2])
20 | }
21 | } else {
22 | pc[0](pc[2])
23 | console.log(e)
24 | }
25 | })
26 | }
27 |
28 |
--------------------------------------------------------------------------------
/processing_routes/send.js:
--------------------------------------------------------------------------------
1 | const config = require('./../config')
2 | const { store } = require("./../index");
3 | const { getPathNum } = require("./../getPathObj");
4 | const { postToDiscord } = require('./../discord');
5 | const { updatePromote } = require('./../edb');
6 |
7 | exports.send = (json, from, active, pc) => {
8 | let fbalp = getPathNum(['balances', from]),
9 | tbp = getPathNum(['balances', json.to]); //to balance promise
10 | Promise.all([fbalp, tbp])
11 | .then(bals => {
12 | let fbal = bals[0],
13 | tbal = bals[1],
14 | ops = [];
15 | send = parseInt(json.amount);
16 | if (json.to && typeof json.to == 'string' && send > 0 && fbal >= send && active && json.to != from) { //balance checks
17 | ops.push({ type: 'put', path: ['balances', from], data: parseInt(fbal - send) });
18 | ops.push({ type: 'put', path: ['balances', json.to], data: parseInt(tbal + send) });
19 | let msg = `@${from}| Sent @${json.to} ${parseFloat(parseInt(json.amount) / 1000).toFixed(3)} ${config.TOKEN}`
20 | if(json.to === 'null' && json.memo.split('/')[1]){
21 | msg = `@${from}| Promoted @${json.memo} with ${parseFloat(parseInt(json.amount) / 1000).toFixed(3)} ${config.TOKEN}`
22 | if(config.dbcs){
23 | let author = json.memo.split('/')[0],
24 | permlink = json.memo.split('/')[1]
25 | if(author.split('@')[1]){
26 | author = author.split('@')[1]
27 | }
28 | updatePromote(author,permlink, send)
29 | }
30 | }
31 | if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
32 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg });
33 | } else {
34 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: `@${from}| Invalid send operation` });
35 | }
36 | if (process.env.npm_lifecycle_event == 'test') pc[2] = ops
37 | store.batch(ops, pc);
38 | })
39 | .catch(e => { console.log(e); });
40 | }
41 |
42 | exports.claim = (json, from, active, pc) => {
43 | let fbalp = getPathNum(['cbalances', from]),
44 | tbp = getPathNum(['balances', from]),
45 | splitp = getPathNum([json.gov ? 'gov': 'pow', from]),
46 | totp = getPathNum([json.gov ? 'gov': 'pow', 't']);
47 | claimp = getPathNum(['claim', from]);
48 | Promise.all([fbalp, tbp, splitp, totp, claimp])
49 | .then(bals => {
50 | let fbal = bals[0],
51 | tbal = bals[1],
52 | split = bals[2],
53 | tot = bals[3],
54 | claims = bals[4],
55 | ops = [],
56 | claim = parseInt(fbal);
57 | if (claim > 0) {
58 | const half = parseInt(claim / 2),
59 | other = claim - half,
60 | msg = `@${from}| Claimed ${parseFloat(parseInt(claim) / 1000).toFixed(3)} ${config.TOKEN} - Half ${json.gov ? 'locked in gov': 'powered up.'}`
61 | ops.push({ type: 'del', path: ['cbalances', from] });
62 | ops.push({ type: 'put', path: ['balances', from], data: parseInt(tbal + half) });
63 | ops.push({ type: 'put', path: [json.gov ? 'gov': 'pow', from], data: parseInt(split + other) });
64 | ops.push({ type: 'put', path: [json.gov ? 'gov': 'pow', 't'], data: parseInt(tot + other) });
65 | if (config.hookurl || config.status) postToDiscord(msg, `${json.block_num}:${json.transaction_id}`)
66 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: msg });
67 | } else {
68 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: `@${from}| Invalid claim operation` });
69 | }
70 | if (process.env.npm_lifecycle_event == 'test') pc[2] = ops
71 | store.batch(ops, pc);
72 | })
73 | .catch(e => { console.log(e); });
74 | }
--------------------------------------------------------------------------------
/processing_routes/sig.js:
--------------------------------------------------------------------------------
1 | const config = require('./../config')
2 | const { store, Owners } = require("./../index");
3 | const { getPathObj } = require("./../getPathObj");
4 | const { verify_broadcast, verify_tx_sig } = require("./../tally");
5 |
6 | exports.account_update = (json, pc) => {
7 | if(json.account == config.msaccount){
8 | store.batch([{type:'del', path:['stats', 'ms']}], [after, pc[1], 'del'])
9 | function after() {
10 | var ops = []
11 | if(json.active) {
12 | let account_auths = {}
13 | for (var i = 0; i < json.active.account_auths.length; i++){
14 | account_auths[json.active.account_auths[i][0]] = json.active.account_auths[i][1]
15 | }
16 | ops.push({type:'put', path:['stats', 'ms', 'active_account_auths'], data: account_auths})
17 | if(json.active.weight_threshold) ops.push({type:'put', path:['stats', 'ms', 'active_threshold'], data: json.active.weight_threshold})
18 | }
19 | if(json.owner) {
20 | let owner_key_auths = {}
21 | for (var i = 0; i < json.owner.key_auths.length;i++){
22 | owner_key_auths[json.owner.key_auths[i][0]] = json.owner.key_auths[i][1]
23 | }
24 | ops.push({type:'put', path:['stats', 'ms', 'owner_key_auths'], data: owner_key_auths})
25 | if(json.owner.weight_threshold) ops.push({type:'put', path:['stats', 'ms', 'owner_threshold'], data: json.owner.weight_threshold})
26 | }
27 | if(json.posting) {
28 | let paccount_auths = {}
29 | for (var i = 0; i < json.posting.account_auths.length;i++){
30 | paccount_auths[json.posting.account_auths[i][0]] = json.posting.account_auths[i][1]
31 | }
32 | ops.push({type:'put', path:['stats', 'ms', 'active_account_auths'], data: paccount_auths})
33 | if(json.posting.weight_threshold) ops.push({type:'put', path:['stats', 'ms', 'posting_threshold'], data: json.posting.weight_threshold})
34 | }
35 | if(json.memo_key) ops.push({type:'put', path:['stats', 'ms', 'memo_key'], data: json.memo_key})
36 | ops.push({type:'del', path:['msso']})
37 | store.batch(ops, pc)
38 | }
39 | } else if (json.active && Owners.is(json.account)) {
40 | Owners.activeUpdate(json.account, json.active.account_auths[0][0]);
41 | pc[0](pc[2])
42 | } else {
43 | pc[0](pc[2])
44 | }
45 | }
46 |
47 | exports.sig_submit = (json, from, active, pc) => {
48 | var Pop = getPathObj(['mss', `${json.sig_block}`]),
49 | Psigs = getPathObj(['mss', `${json.sig_block}:sigs`]),
50 | Pstats = getPathObj(['stats'])
51 | Promise.all([Pop, Pstats, Psigs])
52 | .then(got => {
53 | let msop = got[0],
54 | stats = got[1],
55 | sigs = got[2]
56 | ops = []
57 | try{
58 | msop = JSON.parse(msop)
59 | } catch (e){}
60 | if (active && stats.ms.active_account_auths[from] && msop.expiration) {
61 | if(config.mode == 'verbose')console.log({sigs, from}, msop, json.sig, Owners.getKey(from))
62 | if (verify_tx_sig(msop, json.sig, Owners.getKey(from))){
63 | if (config.mode == "verbose") console.log("VERIFIED");
64 | ops.push({
65 | type: "put",
66 | path: ["mss", `${json.sig_block}:sigs`],
67 | data: sigs,
68 | });
69 | sigs[from] = json.sig;
70 | if (Object.keys(sigs).length >= stats.ms.active_threshold) {
71 | let sigarr = [];
72 | for (var i in sigs) {
73 | sigarr.push(sigs[i]);
74 | }
75 | verify_broadcast(msop, sigarr, stats.ms.active_threshold);
76 | }
77 | }
78 | store.batch(ops, pc);
79 | //try to sign
80 | } else {
81 | pc[0](pc[2])
82 | }
83 | })
84 | .catch(e => { console.log(e); });
85 | }
86 |
87 | exports.osig_submit = (json, from, active, pc) => {
88 | var Pop = getPathObj(['msso', `${json.sig_block}`]),
89 | Psigs = getPathObj(['msso', `${json.sig_block}:sigs`]),
90 | Pstats = getPathObj(['stats'])
91 | Promise.all([Pop, Pstats, Psigs])
92 | .then(got => {
93 | let msop = got[0],
94 | stats = got[1],
95 | sigs = got[2]
96 | ops = []
97 | try{
98 | msop = JSON.parse(msop)
99 | } catch (e){}
100 | if (active && stats.ms.active_account_auths[from] && msop.expiration) {
101 | sigs[from] = json.sig
102 | if(Object.keys(sigs).length >= stats.ms.active_threshold){
103 | let sigarr = []
104 | for(var i in sigs){
105 | sigarr.push(sigs[i])
106 | }
107 | verify_broadcast(msop, sigarr, stats.ms.owner_threshold)
108 | }
109 | ops.push({ type: 'put', path: ['msso', `${json.sig_block}:sigs`], data: sigs })
110 | store.batch(ops, pc);
111 | //try to sign
112 | } else {
113 | pc[0](pc[2])
114 | }
115 | })
116 | .catch(e => { console.log(e); });
117 | }
118 |
--------------------------------------------------------------------------------
/processing_routes/vote.js:
--------------------------------------------------------------------------------
1 | const config = require('./../config')
2 | const { store } = require("./../index");
3 | const { getPathObj, getPathNum, deleteObjs } = require('./../getPathObj')
4 | const { updatePostVotes } = require('./../edb');
5 |
6 | exports.vote = (json, pc) => {
7 | if (json.voter == config.leader) {
8 | deleteObjs([
9 | ['escrow', config.leader, `vote:${json.author}/${json.permlink}`]
10 | ])
11 | .then(empty => pc[0](pc[2]))
12 | .catch(e => console.log(e))
13 | } else {
14 | getPathObj(['posts', `${json.author}/${json.permlink}`]).then(p => {
15 | if (Object.keys(p).length) {
16 | const oldVotes = p.votes || {}
17 | p.votes = oldVotes
18 | var PvotePow = getPathObj(['up', json.voter]),
19 | PdVotePow = getPathObj(['down', json.voter]),
20 | PPow = getPathNum(['pow', json.voter]),
21 | PGrant = getPathNum(['granted', json.voter, 't'])
22 | Promise.all([PvotePow, PdVotePow, PPow, PGrant]).then(function(v) {
23 | var up = v[0],
24 | down = v[1],
25 | pow = v[2] + v[3],
26 | ops = [],
27 | weights
28 | if (!pow) {
29 | pc[0](pc[2])
30 | } else {
31 | if (!Object.keys(up).length) {
32 | up = {
33 | max: pow * 50,
34 | last: 0,
35 | power: pow * 50
36 | }
37 | down = {
38 | max: pow * 50,
39 | last: 0,
40 | power: pow * 50
41 | }
42 | } else {
43 | up.max = pow * 50
44 | down.max = pow * 50
45 | }
46 | if (json.weight >= 0) {
47 | weights = upPowerMagic(up, json)
48 | } else {
49 | weights = downPowerMagic(up, down, json)
50 | ops.push({ type: 'put', path: ['down', json.voter], data: weights.down })
51 | }
52 | p.votes[json.voter] = {
53 | b: json.block_num,
54 | v: weights.vote
55 | }
56 | if(config.dbcs){
57 | updatePostVotes(p)
58 | }
59 | ops.push({ type: 'put', path: ['up', json.voter], data: weights.up })
60 | ops.push({ type: 'put', path: ['posts', `${json.author}/${json.permlink}`], data: p })
61 | store.batch(ops, pc)
62 | }
63 | })
64 | .catch(e => pc[0](pc[2]))
65 | } else {
66 | pc[0](pc[2])
67 | }
68 | })
69 | }
70 | }
71 |
72 | /*
73 | exports.vote_content = (json, from, active, pc) => {
74 | var powPromise = getPathNum(['pow', from]),
75 | postPromise = getPathObj(['posts', `${json.author}/${json.permlink}`]),
76 | rollingPromise = getPathNum(['rolling', from]),
77 | nftPromise = getPathNum(['pow', 'n', from]); //an approach to token delegation by wrapping power in nft contracts - untested
78 | Promise.all([powPromise, postPromise, rollingPromise, nftPromise])
79 | .then(function(v) {
80 | var pow = v[0],
81 | post = v[1],
82 | rolling = v[2],
83 | nft = v[3],
84 | ops = [];
85 | if (pow >= 1) {
86 | if (Object.keys(post).length) {
87 | console.log(post);
88 | if (!post.voters) { post.voters = {}; }
89 | if (!rolling) {
90 | rolling = parseInt((nft + pow) * 10);
91 | }
92 | const w = json.weight > 0 && json.weight < 10001 ? parseInt(json.weight * rolling / 100000) : parseInt(rolling / 10);
93 | post.totalWeight += parseInt(json.weight * rolling / 100000);
94 | post.voters[from] = {
95 | block: json.block_num,
96 | weight: w
97 | };
98 | ops.push({ type: 'put', path: ['posts', `${json.author}/${json.permlink}`], data: post });
99 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: `@${from}| voted for @${json.author}/${json.permlink}` });
100 | rolling -= w;
101 | ops.push({ type: 'put', path: ['rolling', from], data: rolling });
102 | } else {
103 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: `@${from}| tried to vote for an unknown post` });
104 | }
105 | } else {
106 | ops.push({ type: 'put', path: ['feed', `${json.block_num}:${json.transaction_id}`], data: `@${from}| doesn't have the ${config.TOKEN} POWER to vote` });
107 | }
108 | store.batch(ops, pc);
109 | })
110 | .catch(function(e) {
111 | console.log(e);
112 | });
113 | }
114 | */
115 | function upPowerMagic(up, json) {
116 | const healTime = json.block_num - up.last //144000 blocks in 5 days
117 | const heal = parseInt(up.max * healTime / 144000)
118 | var newPower = up.power + heal
119 | if (newPower > up.max) {
120 | newPower = up.max
121 | }
122 | var vote = parseInt(newPower * json.weight / 500000) //50 from max AND 10000 from full weight
123 | newPower -= vote
124 | const newUp = {
125 | max: up.max,
126 | last: json.block_num,
127 | power: newPower
128 | }
129 | return { up: newUp, vote: vote }
130 | }
131 |
132 | function downPowerMagic(up, down, json) {
133 | const downHealTime = json.block_num - down.last //144000 blocks in 5 days
134 | const downHeal = parseInt(down.max * downHealTime / 144000)
135 | var newDownPower = down.power + downHeal
136 | if (newDownPower > down.max) {
137 | newDownPower = down.max
138 | }
139 | const healTime = json.block_num - up.last //144000 blocks in 5 days
140 | const heal = parseInt(up.max * healTime / 144000)
141 | var newPower = up.power + heal
142 | if (newPower > up.max) {
143 | newPower = up.max
144 | }
145 | var bigSpender = false
146 | var vote
147 | var downvote = parseInt(newDownPower * json.weight / 500000) //5 from max AND 10000 from full weight
148 | newDownPower -= downvote
149 | if (newDownPower < down.max * 0.9) { //further down power vote effect up and down power meters
150 | bigSpender = true
151 | }
152 | if (bigSpender) {
153 | vote = parseInt(newPower * json.weight / 500000) //50 from max AND 10000 from full weight
154 | if (vote > downVote) {
155 | newPower -= vote
156 | newDownPower -= vote
157 | } else {
158 | newPower -= downVote
159 | newDownPower -= downVote
160 | }
161 | }
162 | const newUp = {
163 | max: up.max,
164 | last: json.block_num,
165 | power: newPower
166 | }
167 | const newDown = {
168 | max: down.max,
169 | last: json.block_num,
170 | power: newDownPower
171 | }
172 | return { up: newUp, down: newDown, vote: downvote }
173 | }
--------------------------------------------------------------------------------
/processor.js:
--------------------------------------------------------------------------------
1 | const fetch = require("node-fetch");
2 | const { TXID } = require("./index");
3 | module.exports = function (
4 | client,
5 | nextBlock = 1,
6 | prefix = "dlux_",
7 | account = "null",
8 | vOpsRequired = false
9 | ) {
10 | var onCustomJsonOperation = {}; // Stores the function to be run for each operation id.
11 | var onOperation = {};
12 |
13 | var onNewBlock = function () {};
14 | var onStreamingStart = function () {};
15 | var behind = 0;
16 | var head_block;
17 | var isStreaming;
18 | var vOps = false;
19 | var stream;
20 | var blocks = {
21 | processing: 0,
22 | completed: nextBlock,
23 | stop: function () {
24 | blocks.clean(1);
25 | },
26 | ensure: function (last) {
27 | setTimeout(() => {
28 | if (!blocks.processing && blocks.completed == last) {
29 | getBlockNumber(nextBlock);
30 | if (!(last % 3))
31 | getHeadOrIrreversibleBlockNumber(function (result) {
32 | if (nextBlock < result - 5) {
33 | behind = result - nextBlock;
34 | beginBlockComputing();
35 | } else if (!isStreaming) {
36 | beginBlockStreaming();
37 | }
38 | });
39 | }
40 | }, 1000);
41 | },
42 | clean: function (stop = false) {
43 | var blockNums = Object.keys(blocks);
44 | for (var i = 0; i < blockNums.length; i++) {
45 | if (
46 | (parseInt(blockNums[i]) && parseInt(blockNums[i]) < nextBlock - 1) ||
47 | (stop && parseInt(blockNums[i]))
48 | ) {
49 | delete blocks[blockNums[i]];
50 | if (vOps) delete blocks[blockNums.v[i]];
51 | }
52 | }
53 | var blockNums = Object.keys(blocks.v);
54 | for (var i = 0; i < blockNums.length; i++) {
55 | if (
56 | (parseInt(blockNums[i]) && parseInt(blockNums[i]) < nextBlock - 1) ||
57 | (stop && parseInt(blockNums[i]))
58 | ) {
59 | delete blocks.v[blockNums[i]];
60 | }
61 | }
62 | },
63 | v: {},
64 | requests: {
65 | last_range: 0,
66 | last_block: 0,
67 | },
68 | manage: function (block_num, vOp = false) {
69 | if (!head_block || block_num > head_block || !(block_num % 100))
70 | getHeadOrIrreversibleBlockNumber(function (result) {
71 | head_block = result;
72 | behind = result - nextBlock;
73 | });
74 | if (
75 | !(block_num % 100) &&
76 | head_block > blocks.requests.last_range + 200 &&
77 | Object.keys(blocks).length < 1000
78 | ) {
79 | gbr(blocks.requests.last_range + 1, 100, 0);
80 | }
81 | if (
82 | !(block_num % 100) &&
83 | head_block - blocks.requests.last_range + 1 > 100
84 | ) {
85 | gbr(blocks.requests.last_range + 1, 100, 0);
86 | }
87 | if (!(block_num % 100)) blocks.clean();
88 | if (blocks.processing) {
89 | setTimeout(() => {
90 | blocks.manage(block_num);
91 | }, 100);
92 | blocks.clean();
93 | } else if (vOps && !blocks.v[block_num]) return;
94 | else if (vOp && !blocks[block_num]) return;
95 | else if (blocks[block_num] && block_num == nextBlock) {
96 | blocks.processing = nextBlock;
97 | processBlock(blocks[block_num]).then(() => {
98 | nextBlock = block_num + 1;
99 | blocks.completed = blocks.processing;
100 | blocks.processing = 0;
101 | delete blocks[block_num];
102 | if (blocks[nextBlock]) blocks.manage(nextBlock);
103 | });
104 | } else if (block_num > nextBlock) {
105 | if (blocks[nextBlock]) {
106 | processBlock(blocks[nextBlock]).then(() => {
107 | delete blocks[nextBlock];
108 | nextBlock++;
109 | blocks.completed = blocks.processing;
110 | blocks.processing = 0;
111 | if (blocks[nextBlock]) blocks.manage(nextBlock);
112 | });
113 | } else if (!blocks[nextBlock]) {
114 | getBlock(nextBlock);
115 | }
116 | if (!isStreaming || behind < 5) {
117 | getHeadOrIrreversibleBlockNumber(function (result) {
118 | head_block = result;
119 | if (nextBlock < result - 3) {
120 | behind = result - nextBlock;
121 | beginBlockComputing();
122 | } else if (!isStreaming) {
123 | beginBlockStreaming();
124 | }
125 | });
126 | }
127 | }
128 | blocks.ensure(block_num);
129 | },
130 | };
131 | var stopping = false;
132 |
133 | // Returns the block number of the last block on the chain or the last irreversible block depending on mode.
134 | function getHeadOrIrreversibleBlockNumber(callback) {
135 | client.database.getDynamicGlobalProperties().then(function (result) {
136 | callback(result.last_irreversible_block_num);
137 | });
138 | }
139 |
140 | function getVops(bn) {
141 | return new Promise((resolve, reject) => {
142 | fetch(client.currentAddress, {
143 | body: `{"jsonrpc":"2.0", "method":"condenser_api.get_ops_in_block", "params":[${bn},true], "id":1}`,
144 | headers: {
145 | "Content-Type": "application/x-www-form-urlencoded",
146 | "User-Agent": `${prefix}HoneyComb/${account}`,
147 | },
148 | method: "POST",
149 | })
150 | .then((res) => res.json())
151 | .then((json) => {
152 | if (!json.result) {
153 | blocks.v[bn] = [];
154 | blocks.manage(bn, true);
155 | } else {
156 | blocks.v[bn] = json.result;
157 | blocks.manage(bn, true);
158 | }
159 | })
160 | .catch((err) => {
161 | console.log("Failed to get Vops for block: ", bn, err);
162 | });
163 | });
164 | }
165 |
166 | function isAtRealTime(computeBlock) {
167 | getHeadOrIrreversibleBlockNumber(function (result) {
168 | head_block = result;
169 | if (nextBlock >= result) {
170 | beginBlockStreaming();
171 | } else {
172 | behind = result - nextBlock;
173 | computeBlock();
174 | }
175 | });
176 | }
177 |
178 | function getBlockNumber(bln) {
179 | client.database
180 | .getBlock(bln)
181 | .then((result) => {
182 | if (result) {
183 | blocks[parseInt(result.block_id.slice(0, 8), 16)] = result;
184 | blocks.manage(bln);
185 | }
186 | })
187 | .catch((e) => {
188 | console.log("getBlockNumber Error: ", e);
189 | });
190 | }
191 |
192 | function getBlock(bn) {
193 | if (behind && !stopping) gbr(bn, behind > 100 ? 100 : behind, 0);
194 | if (stopping) stream = undefined;
195 | else if (!stopping) gb(bn, 0);
196 | }
197 |
198 | function gb(bln, at) {
199 | if (blocks[bln]) {
200 | blocks.manage(bln);
201 | return;
202 | } else if (blocks.requests.last_block == bln) return;
203 | if (bln < TXID.saveNumber + 50) {
204 | blocks.requests.last_block = bln;
205 | client.database
206 | .getBlock(bln)
207 | .then((result) => {
208 | blocks[parseInt(result.block_id.slice(0, 8), 16)] = result;
209 | blocks.manage(bln);
210 | })
211 | .catch((err) => {
212 | if (at < 3) {
213 | setTimeout(() => {
214 | gbr(bln, at + 1);
215 | }, Math.pow(10, at + 1));
216 | } else {
217 | console.log("Get block attempt:", at, client.currentAddress);
218 | }
219 | });
220 | } else {
221 | setTimeout(() => {
222 | gb(bln, at + 1);
223 | }, Math.pow(10, at + 1));
224 | }
225 | }
226 | function gbr(bln, count, at) {
227 | if (!at && blocks.requests.last_range > bln) return;
228 | console.log({ bln, count, at });
229 | if (!at) blocks.requests.last_range = bln + count - 1;
230 | fetch(client.currentAddress, {
231 | body: `{"jsonrpc":"2.0", "method":"block_api.get_block_range", "params":{"starting_block_num": ${bln}, "count": ${count}}, "id":1}`,
232 | headers: {
233 | "Content-Type": "application/x-www-form-urlencoded",
234 | "User-Agent": `${prefix}HoneyComb/${account}`,
235 | },
236 | method: "POST",
237 | })
238 | .then((res) => res.json())
239 | .then((result) => {
240 | try {
241 | var Blocks = result.result.blocks;
242 | for (var i = 0; i < Blocks.length; i++) {
243 | const bkn = parseInt(Blocks[i].block_id.slice(0, 8), 16);
244 | for (var j = 0; j < Blocks[i].transactions.length; j++) {
245 | Blocks[i].transactions[j].block_num = bkn;
246 | Blocks[i].transactions[j].transaction_id =
247 | Blocks[i].transaction_ids[j];
248 | Blocks[i].transactions[j].transaction_num = j;
249 | var ops = [];
250 | for (
251 | var k = 0;
252 | k < Blocks[i].transactions[j].operations.length;
253 | k++
254 | ) {
255 | ops.push([
256 | Blocks[i].transactions[j].operations[k].type.replace(
257 | "_operation",
258 | ""
259 | ),
260 | Blocks[i].transactions[j].operations[k].value,
261 | ]);
262 | }
263 | Blocks[i].transactions[j].operations = ops;
264 | blocks[bkn] = Blocks[i];
265 | }
266 | }
267 | blocks.manage(bln);
268 | } catch (e) {
269 | console.log(e);
270 | if (at < 3) {
271 | setTimeout(() => {
272 | gbr(bln, count, at + 1);
273 | }, Math.pow(10, at + 1));
274 | } else {
275 | console.log("Get block range error", e);
276 | }
277 | }
278 | })
279 | .catch((err) => {
280 | console.log(err);
281 | if (at < 3) {
282 | setTimeout(() => {
283 | gbr(bln, count, at + 1);
284 | }, Math.pow(10, at + 1));
285 | } else {
286 | console.log("Get block range error", err);
287 | }
288 | });
289 | }
290 |
291 | function beginBlockComputing() {
292 | var blockNum = nextBlock; // Helper variable to prevent race condition
293 | // in getBlock()
294 | blocks.ensure(nextBlock);
295 | //var vops = getVops(blockNum);
296 | getBlock(blockNum);
297 | }
298 |
299 | function beginBlockStreaming() {
300 | isStreaming = true;
301 | onStreamingStart();
302 | stream = client.blockchain.getBlockStream();
303 | stream.on("data", function (Block) {
304 | var blockNum = parseInt(Block.block_id.slice(0, 8), 16);
305 | blocks[blockNum] = Block;
306 | blocks.requests.last_block = blockNum;
307 | blocks.requests.last_range = blockNum;
308 | blocks.manage(blockNum);
309 | });
310 | stream.on("end", function () {
311 | console.error(
312 | "Block stream ended unexpectedly. Restarting block computing."
313 | );
314 | beginBlockComputing();
315 | stream = undefined;
316 | });
317 | stream.on("error", function (err) {
318 | beginBlockComputing();
319 | stream = undefined;
320 | console.log("This place:", err);
321 | //throw err;
322 | });
323 | }
324 |
325 | function transactional(ops, i, pc, num, block, vops) {
326 | if (ops.length) {
327 | doOp(ops[i], [ops, i, pc, num, block, vops])
328 | .then((v) => {
329 | if (ops.length > i + 1) {
330 | transactional(v[0], v[1] + 1, v[2], v[3], v[4], v[5]);
331 | } else {
332 | // if (vops) {
333 | // var Vops = [];
334 | // vops
335 | // .then((vo) => {
336 | // for (var j = 0; j < vo.length; j++) {
337 | // if (onOperation[vo[j].op[0]] !== undefined) {
338 | // var json = vo[j].op[1];
339 | // json.block_num = vo[j].block;
340 | // //json.timestamp = vo[j].timestamp
341 | // json.txid = vo[j].trx_id;
342 | // Vops.push([vo[j].op[0], json]);
343 | // }
344 | // }
345 | // if (Vops.length) {
346 | // transactional(Vops, 0, v[2], v[3], v[4]);
347 | // } else {
348 | // onNewBlock(num, v, v[4].witness_signature, {
349 | // timestamp: v[4].timestamp,
350 | // block_id: v[4].block_id,
351 | // block_number: num,
352 | // })
353 | // .then((r) => {
354 | // pc[0](pc[2]);
355 | // })
356 | // .catch((e) => {
357 | // console.log(e);
358 | // });
359 | // }
360 | // })
361 | // .catch((e) => {
362 | // console.log(e);
363 | // });
364 | // } else {
365 | onNewBlock(num, v, v[4].witness_signature, {
366 | timestamp: v[4].timestamp,
367 | block_id: v[4].block_id,
368 | block_number: num,
369 | })
370 | .then((r) => {
371 | pc[0](pc[2]);
372 | })
373 | .catch((e) => {
374 | console.log(e);
375 | });
376 | // }
377 | }
378 | })
379 | .catch((e) => {
380 | console.log(e);
381 | pc[1](e);
382 | });
383 | } else if (parseInt(block.block_id.slice(0, 8), 16) != num) {
384 | pc[0]();
385 | console.log("double");
386 | } else {
387 | onNewBlock(num, pc, block.witness_signature, {
388 | timestamp: block.timestamp,
389 | block_id: block.block_id,
390 | block_number: num,
391 | })
392 | .then((r) => {
393 | r[0]();
394 | })
395 | .catch((e) => {
396 | pc[1](e);
397 | });
398 | }
399 |
400 | function doOp(op, pc) {
401 | return new Promise((resolve, reject) => {
402 | if (op.length == 4) {
403 | onCustomJsonOperation[op[0]](op[1], op[2], op[3], [
404 | resolve,
405 | reject,
406 | pc,
407 | ]);
408 | //console.log(op[0])
409 | } else if (op.length == 2) {
410 | onOperation[op[0]](op[1], [resolve, reject, pc]);
411 | //console.log(op[0])
412 | }
413 | });
414 | }
415 |
416 | function doVop(op, pc) {
417 | return new Promise((resolve, reject) => {
418 | console.log(op, pc);
419 | onVOperation[op[0]](op[1], [resolve, reject, pc]);
420 | });
421 | }
422 | }
423 |
424 | function processBlock(Block, Pvops) {
425 | return new Promise((resolve, reject) => {
426 | var transactions = Block.transactions;
427 | let ops = [];
428 | if (parseInt(Block.block_id.slice(0, 8), 16) === nextBlock) {
429 | for (var i = 0; i < transactions.length; i++) {
430 | for (var j = 0; j < transactions[i].operations.length; j++) {
431 | var op = transactions[i].operations[j];
432 | if (op[0] === "custom_json") {
433 | //console.log('check')
434 | if (typeof onCustomJsonOperation[op[1].id] === "function") {
435 | var ip = JSON.parse(op[1].json),
436 | from = op[1].required_posting_auths[0],
437 | active = false;
438 | if (
439 | typeof ip === "string" ||
440 | typeof ip === "number" ||
441 | Array.isArray(ip)
442 | )
443 | ip = {};
444 | ip.transaction_id = transactions[i].transaction_id;
445 | ip.block_num = transactions[i].block_num;
446 | ip.timestamp = Block.timestamp;
447 | ip.prand = Block.witness_signature;
448 | if (!from) {
449 | from = op[1].required_auths[0];
450 | active = true;
451 | }
452 | ops.push([op[1].id, ip, from, active]); //onCustomJsonOperation[op[1].id](ip, from, active);
453 | }
454 | } else if (onOperation[op[0]] !== undefined) {
455 | op[1].transaction_id = transactions[i].transaction_id;
456 | op[1].block_num = transactions[i].block_num;
457 | op[1].timestamp = Block.timestamp;
458 | op[1].prand = Block.witness_signature;
459 | ops.push([op[0], op[1]]); //onOperation[op[0]](op[1]);
460 | }
461 | }
462 | }
463 | transactional(ops, 0, [resolve, reject], nextBlock, Block, Pvops);
464 | }
465 | });
466 | }
467 |
468 | return {
469 | /*
470 | Determines a state update to be called when a new operation of the id
471 | operationId (with added prefix) is computed.
472 | */
473 | on: function (operationId, callback) {
474 | onCustomJsonOperation[prefix + operationId] = callback;
475 | },
476 |
477 | onOperation: function (type, callback) {
478 | onOperation[type] = callback;
479 | },
480 |
481 | onNoPrefix: function (operationId, callback) {
482 | onCustomJsonOperation[operationId] = callback;
483 | },
484 |
485 | /*
486 | Determines a state update to be called when a new block is computed.
487 | */
488 | onBlock: function (callback) {
489 | onNewBlock = callback;
490 | },
491 |
492 | start: function () {
493 | beginBlockComputing();
494 | isStreaming = false;
495 | },
496 |
497 | getCurrentBlockNumber: function () {
498 | return nextBlock;
499 | },
500 |
501 | isStreaming: function () {
502 | return isStreaming;
503 | },
504 | onStreamingStart: function (callback) {
505 | onStreamingStart = callback;
506 | },
507 |
508 | stop: function (callback) {
509 | if (isStreaming) {
510 | isStreaming = false;
511 | stopping = true;
512 | stream = undefined;
513 | blocks.stop();
514 | setTimeout(callback, 1000);
515 | } else {
516 | blocks.stop();
517 | stopping = true;
518 | stopCallback = callback;
519 | }
520 | },
521 | };
522 | };
523 |
--------------------------------------------------------------------------------
/report.js:
--------------------------------------------------------------------------------
1 | const config = require('./config');
2 | const { plasma, VERSION } = require('./index');
3 |
4 | //tell the hive your state, this is asynchronous with IPFS return...
5 | function report(plas, con) {
6 | return new Promise((resolve, reject) => {
7 | con.then(r =>{
8 | let report = {
9 | hash: plas.hashLastIBlock,
10 | block: plas.hashBlock,
11 | stash: plas.privHash,
12 | ipfs_id: plas.id,
13 | version: VERSION
14 | }
15 | try {if(r.block > report.block){
16 | report.sig = r.sig,
17 | report.sig_block = r.block
18 | }
19 | } catch (e){}
20 | try {if(plasma.oracle){
21 | report.oracle = plasma.oracle
22 | }
23 | } catch (e){}
24 |
25 | var op = ["custom_json", {
26 | required_auths: [config.username],
27 | required_posting_auths: [],
28 | id: `${config.prefix}report`,
29 | json: JSON.stringify(report)
30 | }];
31 | delete plasma.oracle
32 | resolve([
33 | [0, 0], op
34 | ])
35 | })
36 | })
37 | }
38 | exports.report = report;
39 |
40 | function sig_submit(sign) {
41 | return new Promise((resolve, reject) => {
42 | sign.then(r =>{
43 | let report = {
44 | sig: r.sig,
45 | sig_block: r.block
46 | }
47 | var op = ["custom_json", {
48 | required_auths: [config.username],
49 | required_posting_auths: [],
50 | id: `${config.prefix}sig_submit`,
51 | json: JSON.stringify(report)
52 | }];
53 | resolve([
54 | [0, 0], op
55 | ])
56 | })
57 | })
58 | }
59 | exports.sig_submit = sig_submit;
60 |
61 | function osig_submit(sign) {
62 | return new Promise((resolve, reject) => {
63 | sign.then(r =>{
64 | let report = {
65 | sig: r.sig,
66 | sig_block: r.block
67 | }
68 | var op = ["custom_json", {
69 | required_auths: [config.username],
70 | required_posting_auths: [],
71 | id: `${config.prefix}osig_submit`,
72 | json: JSON.stringify(report)
73 | }];
74 | resolve([
75 | [0, 0], op
76 | ])
77 | })
78 | })
79 | }
80 | exports.osig_submit = osig_submit;
--------------------------------------------------------------------------------
/routes/test.js:
--------------------------------------------------------------------------------
1 |
2 | //
46 | /*
47 | //
71 | */
72 | //
73 | //Append ?NFT_UID to the address bar to see that NFT. "...html?A6"
--------------------------------------------------------------------------------
/rtrades.js:
--------------------------------------------------------------------------------
1 | const config = require('./config');
2 | const fetch = require('node-fetch');
3 | //const { store } = require("./index");
4 | //const { getPathObj } = require("./getPathObj");
5 | module.exports = {
6 | ipfsVerify: function (str, pinobj) {
7 | return new Promise ((resolve, reject) => {
8 | const pins = Object.keys(pinobj)
9 | fetch(config.pinurl, {
10 | body: `{"jsonrpc":"2.0", "method":"ipfs.stats", "params":[${pins}], "id":1}`,
11 | headers: {
12 | "Content-Type": "application/x-www-form-urlencoded"
13 | },
14 | method: "POST"
15 | })
16 | .then(got=>{
17 | // put in plasma memory and report? this verification may not scale very well
18 | // maybe load by ${str} key and return [bytes]
19 | })
20 | })
21 | }
22 | }
--------------------------------------------------------------------------------
/state.js:
--------------------------------------------------------------------------------
1 | const config = require('./config');
2 | module.exports = {
3 | "balances": {
4 | [config.leader]: 0,
5 | "spk-cc": 0, //additional distributions
6 | "ra": 0,
7 | "rb": 0,
8 | "rc": 0,
9 | "rd": 0,
10 | "re": 0,
11 | "ri": 0, //in ICO account for fixed price
12 | "rm": 0,
13 | "rn": 0,
14 | "rr": 0
15 | },
16 | "delegations": {}, //these need to be preloaded if already on account before starting block
17 | "dex": {
18 | "hbd": {
19 | "tick": "0.012500", //ICO price
20 | "buyBook": ""
21 | },
22 | "hive": {
23 | "tick": "0.100000", //ICO Price
24 | "buyBook": ""
25 | }
26 | },
27 | "gov": {
28 | [config.leader]: 1,
29 | "t": 1 //total in other accounts
30 | },
31 | "markets": {
32 | "node": {
33 | [config.leader]: {
34 | "attempts": 0,
35 | "bidRate": 2000,
36 | "contracts": 0,
37 | "domain": config.mainAPI,
38 | "escrow": true,
39 | "escrows": 0,
40 | "lastGood": 49994100, //genesisblock
41 | "marketingRate": 0,
42 | "self": [config.leader],
43 | "wins": 0,
44 | "yays": 0
45 | }
46 | }
47 | },
48 | "pow": {
49 | [config.leader]: 0,
50 | "t": 0 //total in other accounts
51 | },
52 | "queue": {
53 | "0": [config.leader]
54 | },
55 | "runners": {
56 | [config.leader]: { //config.leader
57 | "g": 1, //config.mainAPI
58 | }
59 | },
60 | "stats": {
61 | "IPFSRate": 2000,
62 | "budgetRate": 2000,
63 | "currationRate": 2000,
64 | "delegationRate": 2000,
65 | "hashLastIBlock": "Genesis",
66 | "icoPrice": 0, //in millihive
67 | "interestRate": 999999999999, //mints 1 millitoken per this many millitokens in your DAO period
68 | "lastBlock": "",
69 | "marketingRate": 2500,
70 | "maxBudget": 1000000000,
71 | "MSHeld":{
72 | "HIVE": 0,
73 | "HBD": 0
74 | },
75 | "nodeRate": 2000,
76 | "outOnBlock": 0, //amm ICO pricing
77 | "savingsRate": 1000,
78 | "tokenSupply": 1 //your starting token supply
79 | }
80 | }
--------------------------------------------------------------------------------
/test/test_blocks.js:
--------------------------------------------------------------------------------
1 | [{ //modify to create a test blockstream to process, include good and bad transactions, can happen much quicker than normal and block numbers can jump for chrono reasons.
2 | previous: '02fc4fac0f2417eed8f14ba83f8b830fb89416e5',
3 | timestamp: '2021-01-02T14:29:57',
4 | witness: 'roelandp',
5 | transaction_merkle_root: '32b9197fc9f89db829a1211f4e105ac2aa174b6e',
6 | extensions: [],
7 | witness_signature: '1f04b353830a55067d7f8b93bbcb5393a99dfb7318fe29f344eda779e3555fa5b37823e8ce597322f187b38d7520d58c3c0edab8d7cf3884f427831eeb4cf5c381',
8 | transactions: [{
9 | ref_block_num: 20376,
10 | ref_block_prefix: 613751574,
11 | expiration: '2021-01-02T14:30:21',
12 | operations: [Array],
13 | extensions: [],
14 | signatures: [Array],
15 | transaction_id: '6926ccb0dac507fcf92be1c77a22cb3d993c9041',
16 | block_num: 50089901,
17 | transaction_num: 0
18 | },
19 | {
20 | ref_block_num: 20392,
21 | ref_block_prefix: 2334995049,
22 | expiration: '2021-01-02T14:30:52',
23 | operations: [Array],
24 | extensions: [],
25 | signatures: [Array],
26 | transaction_id: '0f1452684b12987c53903e7638c3bea24180c86b',
27 | block_num: 50089901,
28 | transaction_num: 1
29 | },
30 | {
31 | ref_block_num: 20375,
32 | ref_block_prefix: 2116988684,
33 | expiration: '2021-01-02T14:39:48',
34 | operations: [Array],
35 | extensions: [],
36 | signatures: [Array],
37 | transaction_id: '50a82370e3a31123f08d25268595202842b094de',
38 | block_num: 50089901,
39 | transaction_num: 2
40 | },
41 | {
42 | ref_block_num: 20395,
43 | ref_block_prefix: 1250065605,
44 | expiration: '2021-01-02T14:30:51',
45 | operations: [Array],
46 | extensions: [],
47 | signatures: [Array],
48 | transaction_id: 'ef7c8caed4576d42314ba994d3745f428c06d929',
49 | block_num: 50089901,
50 | transaction_num: 3
51 | },
52 | {
53 | ref_block_num: 20396,
54 | ref_block_prefix: 3994493967,
55 | expiration: '2021-01-02T14:30:54',
56 | operations: [Array],
57 | extensions: [],
58 | signatures: [Array],
59 | transaction_id: '1793ae5be40b8e073b83c3dbe50b278c9b66f5b1',
60 | block_num: 50089901,
61 | transaction_num: 4
62 | },
63 | {
64 | ref_block_num: 20396,
65 | ref_block_prefix: 3994493967,
66 | expiration: '2021-01-02T14:30:54',
67 | operations: [Array],
68 | extensions: [],
69 | signatures: [Array],
70 | transaction_id: '1d22f36437587edef4c95b199addd76076420da1',
71 | block_num: 50089901,
72 | transaction_num: 5
73 | },
74 | {
75 | ref_block_num: 20375,
76 | ref_block_prefix: 2116988684,
77 | expiration: '2021-01-02T14:39:48',
78 | operations: [Array],
79 | extensions: [],
80 | signatures: [Array],
81 | transaction_id: 'c67ce26d12f4a67c31ad22222ea1e79bf8268d51',
82 | block_num: 50089901,
83 | transaction_num: 6
84 | },
85 | {
86 | ref_block_num: 20396,
87 | ref_block_prefix: 3994493967,
88 | expiration: '2021-01-02T14:30:54',
89 | operations: [Array],
90 | extensions: [],
91 | signatures: [Array],
92 | transaction_id: '0d6514a88a4a9a0ab6f6bd5278e006863758070e',
93 | block_num: 50089901,
94 | transaction_num: 7
95 | },
96 | {
97 | ref_block_num: 20395,
98 | ref_block_prefix: 1250065605,
99 | expiration: '2021-01-02T14:31:21',
100 | operations: [Array],
101 | extensions: [],
102 | signatures: [Array],
103 | transaction_id: 'ff801f718fef2571d9535ca233d41a87dd765333',
104 | block_num: 50089901,
105 | transaction_num: 8
106 | },
107 | {
108 | ref_block_num: 20396,
109 | ref_block_prefix: 3994493967,
110 | expiration: '2021-01-02T14:30:54',
111 | operations: [Array],
112 | extensions: [],
113 | signatures: [Array],
114 | transaction_id: 'd9167bdad9c40f43a0de89b6f74e95469d9893ef',
115 | block_num: 50089901,
116 | transaction_num: 9
117 | },
118 | {
119 | ref_block_num: 20396,
120 | ref_block_prefix: 3994493967,
121 | expiration: '2021-01-02T14:30:54',
122 | operations: [Array],
123 | extensions: [],
124 | signatures: [Array],
125 | transaction_id: '06fcf7ab6660ce7c5e3f3d5242f7e0eb40bd8dcf',
126 | block_num: 50089901,
127 | transaction_num: 10
128 | },
129 | {
130 | ref_block_num: 20396,
131 | ref_block_prefix: 3994493967,
132 | expiration: '2021-01-02T14:30:54',
133 | operations: [Array],
134 | extensions: [],
135 | signatures: [Array],
136 | transaction_id: '03ef8cea495fe774678fc04e185a96bc77ec5868',
137 | block_num: 50089901,
138 | transaction_num: 11
139 | },
140 | {
141 | ref_block_num: 20396,
142 | ref_block_prefix: 3994493967,
143 | expiration: '2021-01-02T14:30:54',
144 | operations: [Array],
145 | extensions: [],
146 | signatures: [Array],
147 | transaction_id: '56a52e7920a803f1be72223870d3399352a3816b',
148 | block_num: 50089901,
149 | transaction_num: 12
150 | },
151 | {
152 | ref_block_num: 20375,
153 | ref_block_prefix: 2116988684,
154 | expiration: '2021-01-02T14:39:51',
155 | operations: [Array],
156 | extensions: [],
157 | signatures: [Array],
158 | transaction_id: '119976dab260f2c1a70bb606b3dc020a6da9412d',
159 | block_num: 50089901,
160 | transaction_num: 13
161 | },
162 | {
163 | ref_block_num: 20375,
164 | ref_block_prefix: 2116988684,
165 | expiration: '2021-01-02T14:39:51',
166 | operations: [Array],
167 | extensions: [],
168 | signatures: [Array],
169 | transaction_id: '332097c99ef4c306b4c1bfa8ea15c146331faa2d',
170 | block_num: 50089901,
171 | transaction_num: 14
172 | },
173 | {
174 | ref_block_num: 20375,
175 | ref_block_prefix: 2116988684,
176 | expiration: '2021-01-02T14:39:51',
177 | operations: [Array],
178 | extensions: [],
179 | signatures: [Array],
180 | transaction_id: 'c3d4fca1a07d90d248b1a1fdecd396ce6c87badb',
181 | block_num: 50089901,
182 | transaction_num: 15
183 | },
184 | {
185 | ref_block_num: 20374,
186 | ref_block_prefix: 2782030706,
187 | expiration: '2021-01-02T14:39:45',
188 | operations: [Array],
189 | extensions: [],
190 | signatures: [Array],
191 | transaction_id: '13050e5047d6e8c1707490441fda46c0399f3e0d',
192 | block_num: 50089901,
193 | transaction_num: 16
194 | },
195 | {
196 | ref_block_num: 20375,
197 | ref_block_prefix: 2116988684,
198 | expiration: '2021-01-02T14:39:51',
199 | operations: [Array],
200 | extensions: [],
201 | signatures: [Array],
202 | transaction_id: 'b647a840f6ad61947f8e32bba93171f8a3e7a1d9',
203 | block_num: 50089901,
204 | transaction_num: 17
205 | },
206 | {
207 | ref_block_num: 20375,
208 | ref_block_prefix: 2116988684,
209 | expiration: '2021-01-02T14:39:51',
210 | operations: [Array],
211 | extensions: [],
212 | signatures: [Array],
213 | transaction_id: 'ea89740ddefe6b959655bbb0844eaa59dcd4617b',
214 | block_num: 50089901,
215 | transaction_num: 18
216 | },
217 | {
218 | ref_block_num: 20375,
219 | ref_block_prefix: 2116988684,
220 | expiration: '2021-01-02T14:39:51',
221 | operations: [Array],
222 | extensions: [],
223 | signatures: [Array],
224 | transaction_id: '5e58ee85659bf7c0ed55e6cf46ed131acd52a9c5',
225 | block_num: 50089901,
226 | transaction_num: 19
227 | }
228 | ],
229 | block_id: '02fc4fadc3bbeca0e100c8ea60b12f51722ad50f',
230 | signing_key: 'STM5W57KPGhKDMsstLAz1Xr7CiCqrM9YnKi4tH7GgZHaVn1WYsMnE',
231 | transaction_ids: ['6926ccb0dac507fcf92be1c77a22cb3d993c9041',
232 | '0f1452684b12987c53903e7638c3bea24180c86b',
233 | '50a82370e3a31123f08d25268595202842b094de',
234 | 'ef7c8caed4576d42314ba994d3745f428c06d929',
235 | '1793ae5be40b8e073b83c3dbe50b278c9b66f5b1',
236 | '1d22f36437587edef4c95b199addd76076420da1',
237 | 'c67ce26d12f4a67c31ad22222ea1e79bf8268d51',
238 | '0d6514a88a4a9a0ab6f6bd5278e006863758070e',
239 | 'ff801f718fef2571d9535ca233d41a87dd765333',
240 | 'd9167bdad9c40f43a0de89b6f74e95469d9893ef',
241 | '06fcf7ab6660ce7c5e3f3d5242f7e0eb40bd8dcf',
242 | '03ef8cea495fe774678fc04e185a96bc77ec5868',
243 | '56a52e7920a803f1be72223870d3399352a3816b',
244 | '119976dab260f2c1a70bb606b3dc020a6da9412d',
245 | '332097c99ef4c306b4c1bfa8ea15c146331faa2d',
246 | 'c3d4fca1a07d90d248b1a1fdecd396ce6c87badb',
247 | '13050e5047d6e8c1707490441fda46c0399f3e0d',
248 | 'b647a840f6ad61947f8e32bba93171f8a3e7a1d9',
249 | 'ea89740ddefe6b959655bbb0844eaa59dcd4617b',
250 | '5e58ee85659bf7c0ed55e6cf46ed131acd52a9c5'
251 | ]
252 | }]
--------------------------------------------------------------------------------
/test/test_state.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | "balances": {
3 | "leader": 1000000,
4 | "test-from": 1000000, //additional distributions
5 | "test-to": 1000000,
6 | "ra": 0,
7 | "rb": 0,
8 | "rc": 0,
9 | "rd": 0,
10 | "re": 0,
11 | "ri": 100000000, //in ICO account for fixed price
12 | "rm": 0,
13 | "rn": 0,
14 | "rr": 0
15 | },
16 | "delegations": {}, //these need to be preloaded if already on account before starting block
17 | "dex": {
18 | "hbd": {
19 | "tick": "0.012500" //ICO price
20 | },
21 | "hive": {
22 | "tick": "0.100000" //ICO Price
23 | }
24 | },
25 | "markets": {
26 | "node": {
27 | "leader": {
28 | "attempts": 0,
29 | "bidRate": 2000,
30 | "contracts": 0,
31 | "domain": "localhost",
32 | "escrow": true,
33 | "escrows": 0,
34 | "lastGood": 1, //genesisblock
35 | "marketingRate": 0,
36 | "self": "leader",
37 | "wins": 0,
38 | "yays": 0
39 | }
40 | }
41 | },
42 | "pow": {
43 | "leader": 100000000,
44 | "t": 100000000 //total in other accounts
45 | },
46 | "queue": {
47 | "0": "leader"
48 | },
49 | "runners": {
50 | "leader": { //config.leader
51 | "domain": "localhost", //config.mainAPI
52 | "self": "leader" //config.leader
53 | }
54 | },
55 | "stats": {
56 | "IPFSRate": 2000,
57 | "budgetRate": 2000,
58 | "currationRate": 2000,
59 | "delegationRate": 2000,
60 | "hashLastIBlock": "Genesis",
61 | "icoPrice": 100, //in millihive
62 | "interestRate": 2100000, //mints 1 millitoken per this many millitokens in your DAO period
63 | "lastBlock": "",
64 | "marketingRate": 2500,
65 | "maxBudget": 1000000000,
66 | "nodeRate": 2000,
67 | "outOnBlock": 0, //amm ICO pricing
68 | "reblogReward": 10000, //unused
69 | "savingsRate": 1000,
70 | "tokenSupply": 203000000 //your starting token supply
71 | }
72 | }
--------------------------------------------------------------------------------
/voter.js:
--------------------------------------------------------------------------------
1 | const { getPathObj, deleteObjs } = require("./getPathObj");
2 | const { store } = require("./index");
3 | const config = require('./config');
4 |
5 | //determine consensus... needs some work with memory management
6 | exports.voter = () => {
7 | return new Promise((resolve, reject) => {
8 | var Ppending = getPathObj(['pendingvote'])
9 | Promise.all([Ppending]).then(function(v) {
10 | deleteObjs([
11 | ['pendingvote']
12 | ])
13 | .then(empty => {
14 | let posts = v[0],
15 | totalWeight = 0,
16 | ops = []
17 | for (post in posts) {
18 | totalWeight += posts[post].v
19 | }
20 | for (post in posts) {
21 | let b = {
22 | author: post.split('/')[0],
23 | permlink: post.split('/')[1]
24 | }
25 | ops.push({
26 | type: 'put',
27 | path: ['escrow', config.leader, `vote:${b.author}/${b.permlink}`],
28 | data: ["vote",
29 | {
30 | "voter": config.leader,
31 | "author": b.author,
32 | "permlink": b.permlink,
33 | "weight": parseInt((posts[post].v / totalWeight) * 10000)
34 | }
35 | ]
36 | })
37 | }
38 | if (ops.length) {
39 | store.batch(ops, [resolve, reject, 1])
40 | } else {
41 | resolve(1)
42 | }
43 | })
44 | .catch(e => { console.log(e) })
45 | })
46 | .catch(e => console.log(e))
47 | })
48 | }
--------------------------------------------------------------------------------