├── .babelrc ├── .env.sample ├── .gitignore ├── .prettierrc.json ├── .sequelizerc ├── Dockerfile ├── LICENSE ├── Procfile ├── README.md ├── bin └── pg_backup_to_s3 ├── dump.rdb ├── id.json ├── package.json ├── src ├── app.js ├── config │ ├── config.js │ └── migrations │ │ ├── 20210108180313-create-maker.js │ │ ├── 20210111010553-create-hotspot.js │ │ ├── 20210111171614-add-hotspot-indexes.js │ │ ├── 20210113164305-create-token.js │ │ ├── 20210113173036-remove-api-key-from-makers.js │ │ ├── 20210114001809-add-name-to-tokens.js │ │ ├── 20230714184637-widen-keys.js │ │ └── 20230922213538-add-device-type-to-hotspots.js ├── controllers │ ├── hotspotsController.js │ ├── makersController.js │ ├── transactionsController.js │ └── v3TransactionsController.js ├── helpers │ ├── index.js │ └── solana.js ├── models │ ├── hotspot.js │ ├── index.js │ ├── maker.js │ └── token.js ├── routes │ ├── apiRouter.js │ ├── appRouter.js │ └── indexRouter.js ├── server.js └── tasks │ ├── burn_tokens.js │ ├── create_maker.js │ ├── create_token.js │ ├── export_maker.js │ └── report_onboards.js └── yarn.lock /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": [ 3 | "@babel/preset-env" 4 | ], 5 | "plugins": [ 6 | ["@babel/transform-runtime", { 7 | "regenerator": true 8 | }] 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /.env.sample: -------------------------------------------------------------------------------- 1 | KEYRING={"1":"uDiMcWVNTuz//naQ88sOcN+E40CyBRGzGTT7OkoBS6M="} 2 | KEYRING_SALT=development_salt 3 | APP_USERNAME=app_user 4 | APP_PASSWORD=app_password 5 | ANCHOR_WALLET=/Users//.config/solana/id.json 6 | PGDATABASE= 7 | PGHOST= 8 | PGPORT= 9 | PGUSER= 10 | PGPASSWORD= 11 | AWS_REGION= 12 | ENABLE_SOLANA=true 13 | IOT_MINT=iotEVVZLEywoTn1QdwNPddxPWszn3zFhEot3MfL9fns 14 | MOBILE_MINT=mb1eu7TzEc71KxDpsmsKoucSSuuoGLv1drys1oP2jh6 15 | HNT_MINT=hntyVP6YFm1Hg25TN9WGLqM12b8TQmcknKrdu1oxWux 16 | ECC_VERIFIER=eccSAJM3tq7nQSpQTm8roxv4FPoipCkMsGizW2KBhqZ 17 | ASSET_API_URL= 18 | ECC_VERIFY_ENDPOINT=https://ecc-verifier.web.test-helium.com/verify 19 | ECC_VERIFIER=eccSAJM3tq7nQSpQTm8roxv4FPoipCkMsGizW2KBhqZ 20 | SOLANA_URL=https://solana-rpc.web.test-helium.com?session-key=Pluto 21 | RATE_LIMIT_MAX=1000 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | build 3 | .env 4 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": true, 3 | "trailingComma": "all", 4 | "semi": false 5 | } 6 | -------------------------------------------------------------------------------- /.sequelizerc: -------------------------------------------------------------------------------- 1 | var path = require('path') 2 | 3 | module.exports = { 4 | 'config': path.resolve('src', 'config/config.js'), 5 | 'migrations-path': path.resolve('src', 'config', 'migrations'), 6 | 'models-path': path.resolve('src', 'models'), 7 | 'seeders-path': path.resolve('config', 'seeders'), 8 | } 9 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22-alpine AS BUILD_IMAGE 2 | 3 | WORKDIR /usr/src/app 4 | 5 | COPY package.json ./ 6 | COPY yarn.lock ./ 7 | 8 | COPY .babelrc . 9 | RUN yarn install 10 | 11 | COPY src src 12 | 13 | RUN yarn run build 14 | RUN npm prune --production 15 | 16 | FROM node:22-alpine 17 | 18 | WORKDIR /usr/src/app 19 | 20 | COPY --from=BUILD_IMAGE /usr/src/app/build ./build 21 | COPY --from=BUILD_IMAGE /usr/src/app/node_modules ./node_modules 22 | 23 | # This isn't actually used, service is read only. But anchor wants a wallet. 24 | RUN echo "[124,96,181,146,132,165,175,182,60,194,167,230,29,91,110,109,226,38,41,155,207,186,24,33,205,120,108,98,218,67,77,95,13,60,79,204,253,10,183,101,60,94,220,177,117,97,16,29,31,124,35,65,121,147,161,114,159,23,207,202,122,164,170,201]" > id.json 25 | 26 | env ANCHOR_WALLET=/usr/src/app/id.json 27 | 28 | CMD ["node", "build/server.js"] 29 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright 2021 Helium Systems, Inc. 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /Procfile: -------------------------------------------------------------------------------- 1 | web: npm start 2 | release: yarn db:migrate 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Onboarding Server 2 | The onboarding server allows makers to manage their hotspot production and permit hotspot customers to onboard them to the blockchain. 3 | 4 | ## Development 5 | 6 | Before running the following commands, make sure to have a running postgres db instance. 7 | 8 | - clone the repo 9 | - run `yarn install` 10 | - initialize the database with `yarn db:create` 11 | - run the migrations with `yarn db:migrate` 12 | - run `yarn task src/tasks/create_maker.js` and follow the prompts to create or import a Maker record 13 | - run the dev server with `yarn dev` 14 | - the dev server will be accessible at `localhost:3002` 15 | 16 | ## Production 17 | 18 | When running in production, make sure to set the following env vars to secure random values: 19 | 20 | | Environment Variable | Description | Example | 21 | |----------------------|-------------------------------------------|------------------------------------------------------| 22 | | KEYRING | JSON keyring object | {"1":"2xNJEZvMlr99yPqGfh0sa7pO7j1tH73RTU9qJwwi4bs="} | 23 | | KEYRING_SALT | Additional entropy for keyring encryption | WmcKZ46ciIZqTvXm9TMd5V63b8k6iw/tVkcv/qEI0KU= | 24 | 25 | (add more important vars here) 26 | 27 | 28 | The recommended way of generating secure secrets is: 29 | 30 | ``` 31 | dd if=/dev/urandom bs=32 count=1 2>/dev/null | openssl base64 -A 32 | ``` 33 | 34 | Refer to [https://github.com/fnando/keyring-node](https://github.com/fnando/keyring-node) for additional details about how the at-rest encryption with keyring works. 35 | 36 | ### Rate Limiting 37 | 38 | In production it is recommended to enable rate limiting to prevent nefarious users from scraping or brute forcing the APIs. Rate limiting relies on Redis to store IP addresses and request counts. Redis connection info is supplied through a `REDIS_URL` env var. 39 | 40 | ### Heroku 41 | 42 | The onboarding server is configured to run on Heroku out of the box. It will use the nodejs buildpack, and comes with a Procfile that defines the web resource command as `npm start`. Additionally, the free Heroku Redis add-on can be installed and will automatically be picked up by the rate limiter. 43 | 44 | Admin tasks can be run on Heroku like so: `heroku run yarn task src/tasks/create_maker.js` 45 | 46 | ## Admin Tasks 47 | 48 | The following tasks are included for managing admin functionality of the onboarding server: 49 | 50 | ### Create Maker 51 | 52 | `yarn task src/tasks/create_maker.js` 53 | 54 | Creates a Maker account taking name, location nonce limit and optionally an exported wallet entropy string. You can also choose to create a Maker API token for the newly created Maker. 55 | ### Create Token 56 | 57 | `yarn task src/tasks/create_token.js` 58 | 59 | Creates a Maker API token for the selected Maker. 60 | 61 | ### Export Maker 62 | 63 | `yarn task src/tasks/export_maker.js` 64 | 65 | Exports the **unencrypted** wallet entropy seed to be imported into another onboarding server instance. 66 | 67 | ## Onboarding Server Maker API 68 | 69 | - Base route: `/api/v2` 70 | 71 | ### Authentication 72 | With all requests, include an `authorization` header where the value is your public token and private token joined with a colon (`:`). For example: 73 | 74 | ``` 75 | curl --location --request GET 'https://onboarding.example.com/api/v2/hotspots' \ 76 | --header 'authorization: pk_INSERT_REST_OF_PUBLIC_TOKEN:sk_INSERT_REST_OF_SECRET_TOKEN' 77 | ``` 78 | 79 | ### Other Headers 80 | With all POST and PUT requests that contain a body, include the `Content-Type: application/json` header. For example: 81 | 82 | ``` 83 | curl --location --request POST 'https://onboarding.example.com/api/v2/hotspots' \ 84 | --header 'Authorization: auth_tokens_here' \ 85 | --header 'Content-Type: application/json' \ 86 | --data-raw '{ 87 | "macWlan0": "example mac" 88 | }' 89 | ``` 90 | 91 | ### Hotspots 92 | This API allows a Maker to create and manage their Hotspots. 93 | 94 | #### Index 95 | Returns all Hotspots that a Maker has created in a paginated fashion. Page number and size can be controlled by optional url params. 96 | 97 | ##### Route: 98 | `GET /hotspots` 99 | 100 | #### Params: 101 | | Param | Required | Default Value | Description | 102 | |----------|----------|---------------|------------------------------------------------| 103 | | page | no | 0 | Page number used to paginate index of Hotspots | 104 | | pageSize | no | 100 | Number of Hotspots returned per page | 105 | 106 | ##### Example Request: 107 | `GET /hotspots?page=5&pageSize=100` 108 | 109 | #### Show 110 | Returns an individual Hotspot identified by its ID. 111 | 112 | ##### Route: 113 | `GET /hotspots/:id` 114 | 115 | ##### Example Request: 116 | `GET /hotspots/123` 117 | 118 | #### Search 119 | Searches for a Hotspot based on any of its attributes. 120 | 121 | ##### Route: 122 | `GET /hotspots/search` 123 | 124 | ##### Params: 125 | | Param | Required | Default Value | Description | 126 | |---------------|----------|---------------|----------------------------------------------------------------| 127 | | onboardingKey | no | NULL | A unique key that is used to identify a Hotspot for onboarding | 128 | | macWlan0 | no | NULL | Mac address of the wifi component | 129 | | macEth0 | no | NULL | Mac address of the ethernet component | 130 | | rpiSerial | no | NULL | Serial number of the Raspberry Pi unit | 131 | | heliumSerial | no | NULL | Serial of the Helium unit | 132 | | batch | no | NULL | A string used to identify manufacturing batches | 133 | | publicAddress | no | NULL | The b58 public address of the Hotspot that was onboarded | 134 | 135 | ##### Example Request 136 | `GET /hotspots/search?macWlan0=examplemac` 137 | 138 | #### Create 139 | Creates a new Hotspot entry. Hotspot details should be provided as a json object in the body of the POST request. 140 | 141 | ##### Route: 142 | `POST /hotspots` 143 | 144 | ##### Params: 145 | 146 | | Param | Required | Default Value | Description | 147 | |---------------|----------|---------------|----------------------------------------------------------------| 148 | | onboardingKey | no | NULL | A unique key that is used to identify a Hotspot for onboarding | 149 | | macWlan0 | no | NULL | Mac address of the wifi component | 150 | | macEth0 | no | NULL | Mac address of the ethernet component | 151 | | rpiSerial | no | NULL | Serial number of the Raspberry Pi unit | 152 | | heliumSerial | no | NULL | Serial of the Helium unit | 153 | | batch | no | NULL | A string used to identify manufacturing batches | 154 | 155 | ##### Example Request 156 | 157 | `POST /hotspots` 158 | 159 | request body: 160 | ```json 161 | { 162 | "onboardingKey": "example-onboarding-key", 163 | "macWlan0": "22:98:17:a3:03:90", 164 | "macEth0": "e4:0b:59:88:27:5f", 165 | "rpiSerial": "example-rpi-serial", 166 | "heliumSerial": "example-helium-serial", 167 | "batch": "example-batch" 168 | } 169 | ``` 170 | 171 | #### Update 172 | Updates attributes of a Hotspot. **NOTE:** once a Hotspot has been onboarded and the `publicAddress` field set, it becomes immutable and cannot be changed or deleted by the Maker. 173 | 174 | ##### Route: 175 | `PUT /hotspots` 176 | 177 | ##### Params: 178 | 179 | | Param | Required | Default Value | Description | 180 | |---------------|----------|---------------|----------------------------------------------------------------| 181 | | onboardingKey | no | NULL | A unique key that is used to identify a Hotspot for onboarding | 182 | | macWlan0 | no | NULL | Mac address of the wifi component | 183 | | macEth0 | no | NULL | Mac address of the ethernet component | 184 | | rpiSerial | no | NULL | Serial number of the Raspberry Pi unit | 185 | | heliumSerial | no | NULL | Serial of the Helium unit | 186 | | batch | no | NULL | A string used to identify manufacturing batches | 187 | 188 | ##### Example Request: 189 | `PUT /hotspots/123` 190 | 191 | request body: 192 | ```json 193 | { 194 | "rpiSerial": "updated-rpi-serial", 195 | "heliumSerial": "updated-helium-serial", 196 | } 197 | ``` 198 | 199 | #### Destroy 200 | Deletes a Hotspot record identified by ID. **NOTE:** once a Hotspot has been onboarded and the `publicAddress` field set, it becomes immutable and cannot be changed or deleted by the Maker. 201 | 202 | ##### Route: 203 | `DELETE /hotspots/:id` 204 | 205 | ##### Params: 206 | 207 | | Param | Required | Default Value | Description | 208 | |---------------|----------|---------------|----------------------------------------------------------------| 209 | | onboardingKey | no | NULL | A unique key that is used to identify a Hotspot for onboarding | 210 | | macWlan0 | no | NULL | Mac address of the wifi component | 211 | | macEth0 | no | NULL | Mac address of the ethernet component | 212 | | rpiSerial | no | NULL | Serial number of the Raspberry Pi unit | 213 | | heliumSerial | no | NULL | Serial of the Helium unit | 214 | | batch | no | NULL | A string used to identify manufacturing batches | 215 | 216 | ##### Example Request: 217 | `DELETE /hotspots/123` 218 | -------------------------------------------------------------------------------- /bin/pg_backup_to_s3: -------------------------------------------------------------------------------- 1 | # Set the script to fail fast if there 2 | # is an error or a missing variable 3 | 4 | set -eu 5 | set -o pipefail 6 | 7 | #!/bin/sh 8 | 9 | # Download the latest backup from 10 | # Heroku and gzip it 11 | 12 | heroku pg:backups:download --output=/tmp/pg_backup.dump --app $APP_NAME 13 | gzip /tmp/pg_backup.dump 14 | 15 | # Encrypt the gzipped backup file 16 | # using GPG passphrase 17 | 18 | gpg --yes --batch --passphrase=$PG_BACKUP_PASSWORD -c /tmp/pg_backup.dump.gz 19 | 20 | # Remove the plaintext backup file 21 | 22 | rm /tmp/pg_backup.dump.gz 23 | 24 | # Generate backup filename based 25 | # on the current date 26 | 27 | BACKUP_FILE_NAME="heroku-backup-$(date '+%Y-%m-%d_%H.%M').gpg" 28 | 29 | # Upload the file to S3 using 30 | # AWS CLI 31 | 32 | aws s3 cp /tmp/pg_backup.dump.gz.gpg "s3://${S3_BUCKET_NAME}/${BACKUP_FILE_NAME}" 33 | 34 | # Remove the encrypted backup file 35 | 36 | rm /tmp/pg_backup.dump.gz.gpg 37 | -------------------------------------------------------------------------------- /dump.rdb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/helium/onboarding-server/23c9d57a4a76c0d7f591906503a7a284954a1177/dump.rdb -------------------------------------------------------------------------------- /id.json: -------------------------------------------------------------------------------- 1 | [124,96,181,146,132,165,175,182,60,194,167,230,29,91,110,109,226,38,41,155,207,186,24,33,205,120,108,98,218,67,77,95,13,60,79,204,253,10,183,101,60,94,220,177,117,97,16,29,31,124,35,65,121,147,161,114,159,23,207,202,122,164,170,201] 2 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "onboarding-server", 3 | "version": "1.0.1", 4 | "private": true, 5 | "scripts": { 6 | "dev": "nodemon -r dotenv/config --exec babel-node ./src/server.js", 7 | "build": "babel src --out-dir build --copy-files", 8 | "start": "node build/server.js", 9 | "clean": "rimraf build", 10 | "db:create": "npx sequelize-cli db:create", 11 | "db:migrate": "npx sequelize-cli db:migrate", 12 | "db:migrate:undo": "npx sequelize-cli db:migrate:undo", 13 | "task": "DISABLE_DB_LOGGING=true node -r dotenv/config" 14 | }, 15 | "dependencies": { 16 | "@babel/cli": "^7.12.10", 17 | "@babel/core": "^7.12.10", 18 | "@babel/node": "^7.12.10", 19 | "@babel/plugin-transform-runtime": "^7.12.10", 20 | "@babel/preset-env": "^7.12.11", 21 | "@fnando/keyring": "^0.4.0", 22 | "@helium/account-fetch-cache": "^0.10.0", 23 | "@helium/address": "^4.12.0", 24 | "@helium/crypto": "^4.12.0", 25 | "@helium/data-credits-sdk": "^0.10.0", 26 | "@helium/helium-entity-manager-sdk": "^0.10.0", 27 | "@helium/helium-sub-daos-sdk": "^0.10.0", 28 | "@helium/http": "^4.12.0", 29 | "@helium/spl-utils": "^0.10.0", 30 | "@helium/transactions": "^4.12.0", 31 | "@metaplex-foundation/mpl-bubblegum": "^0.7.0", 32 | "@noble/ed25519": "^2.1.0", 33 | "@project-serum/anchor": "^0.25.0", 34 | "@solana/spl-account-compression": "^0.1.5", 35 | "@solana/web3.js": "^1.87.6", 36 | "aws-sdk": "^2.1318.0", 37 | "axios": "^1.3.4", 38 | "bcryptjs": "^2.4.3", 39 | "camelcase-keys": "^6.2.2", 40 | "compression": "^1.7.4", 41 | "cookie-parser": "~1.4.4", 42 | "cors": "^2.8.5", 43 | "debug": "~2.6.9", 44 | "dotenv": "^8.2.0", 45 | "express": "~4.16.1", 46 | "express-basic-auth": "^1.2.0", 47 | "express-rate-limit": "^5.2.3", 48 | "ioredis": "^4.19.4", 49 | "morgan": "~1.9.1", 50 | "pg": "^8.5.1", 51 | "pg-hstore": "^2.3.3", 52 | "prompts": "^2.4.2", 53 | "rate-limit-redis": "^2.0.0", 54 | "sequelize": "^6.3.5", 55 | "sequelize-cli": "^6.2.0", 56 | "snakecase-keys": "^3.2.1", 57 | "typescript-collections": "^1.3.3" 58 | }, 59 | "resolutions": { 60 | "@helium/address": "^4.12.0", 61 | "@solana/web3.js": "^1.87.6" 62 | }, 63 | "devDependencies": { 64 | "nodemon": "^2.0.7", 65 | "prettier": "^2.2.1", 66 | "rimraf": "^3.0.2" 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /src/app.js: -------------------------------------------------------------------------------- 1 | const express = require('express') 2 | const cookieParser = require('cookie-parser') 3 | const logger = require('morgan') 4 | const compression = require('compression') 5 | const env = process.env.NODE_ENV || 'development' 6 | 7 | import indexRouter from './routes/indexRouter' 8 | import apiRouter from './routes/apiRouter' 9 | import appRouter from './routes/appRouter' 10 | 11 | var app = express() 12 | 13 | if (env === 'production') { 14 | app.enable('trust proxy') 15 | app.use(compression()) 16 | } 17 | 18 | app.use(logger('dev')) 19 | app.use(express.json()) 20 | app.use(express.urlencoded({ extended: false })) 21 | app.use(cookieParser()) 22 | 23 | app.use('/', indexRouter) 24 | app.use('/api', apiRouter) 25 | app.use('/app', appRouter) 26 | 27 | module.exports = app 28 | -------------------------------------------------------------------------------- /src/config/config.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | require('dotenv').config(); 3 | 4 | module.exports = { 5 | development: { 6 | username: process.env.PGUSER, 7 | password: process.env.PGPASSWORD, 8 | database: process.env.PGDATABASE, 9 | port: process.env.PGPORT, 10 | host: '127.0.0.1', 11 | dialect: 'postgres', 12 | }, 13 | production: { 14 | username: process.env.PGUSER, 15 | password: process.env.PGPASSWORD, 16 | database: process.env.PGDATABASE, 17 | port: process.env.PGPORT, 18 | host: process.env.PGHOST, 19 | dialect: 'postgres', 20 | dialectOptions: { 21 | "ssl": { 22 | "require": true, 23 | "rejectUnauthorized": false 24 | }, 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/config/migrations/20210108180313-create-maker.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | up: async (queryInterface, Sequelize) => { 3 | await queryInterface.createTable('makers', { 4 | id: { 5 | allowNull: false, 6 | autoIncrement: true, 7 | primaryKey: true, 8 | type: Sequelize.INTEGER, 9 | }, 10 | name: { 11 | allowNull: false, 12 | type: Sequelize.STRING, 13 | }, 14 | address: { 15 | type: Sequelize.STRING, 16 | }, 17 | api_key: { 18 | type: Sequelize.STRING, 19 | }, 20 | location_nonce_limit: { 21 | allowNull: false, 22 | defaultValue: 1, 23 | type: Sequelize.INTEGER, 24 | }, 25 | encrypted_keypair_entropy: { 26 | type: Sequelize.TEXT, 27 | }, 28 | keyring_id: { 29 | allowNull: false, 30 | type: Sequelize.INTEGER, 31 | }, 32 | created_at: { 33 | allowNull: false, 34 | type: Sequelize.DATE, 35 | }, 36 | updated_at: { 37 | allowNull: false, 38 | type: Sequelize.DATE, 39 | }, 40 | }) 41 | }, 42 | down: async (queryInterface, Sequelize) => { 43 | await queryInterface.dropTable('makers') 44 | }, 45 | } 46 | -------------------------------------------------------------------------------- /src/config/migrations/20210111010553-create-hotspot.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | module.exports = { 3 | up: async (queryInterface, Sequelize) => { 4 | await queryInterface.createTable('hotspots', { 5 | id: { 6 | allowNull: false, 7 | autoIncrement: true, 8 | primaryKey: true, 9 | type: Sequelize.INTEGER 10 | }, 11 | maker_id: { 12 | allowNull: false, 13 | type: Sequelize.INTEGER, 14 | references: { 15 | model: { 16 | tableName: 'makers', 17 | }, 18 | key: 'id', 19 | }, 20 | }, 21 | onboarding_key: { 22 | type: Sequelize.STRING 23 | }, 24 | mac_wlan0: { 25 | type: Sequelize.STRING 26 | }, 27 | rpi_serial: { 28 | type: Sequelize.STRING 29 | }, 30 | batch: { 31 | type: Sequelize.STRING 32 | }, 33 | public_address: { 34 | type: Sequelize.STRING 35 | }, 36 | helium_serial: { 37 | type: Sequelize.STRING 38 | }, 39 | mac_eth0: { 40 | type: Sequelize.STRING 41 | }, 42 | created_at: { 43 | allowNull: false, 44 | type: Sequelize.DATE 45 | }, 46 | updated_at: { 47 | allowNull: false, 48 | type: Sequelize.DATE 49 | } 50 | }); 51 | }, 52 | down: async (queryInterface, Sequelize) => { 53 | await queryInterface.dropTable('hotspots'); 54 | } 55 | }; 56 | -------------------------------------------------------------------------------- /src/config/migrations/20210111171614-add-hotspot-indexes.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | up: async (queryInterface, Sequelize) => { 5 | const transaction = await queryInterface.sequelize.transaction(); 6 | try { 7 | await queryInterface.addIndex( 8 | 'hotspots', 9 | ['onboarding_key'], 10 | { 11 | unique: true, 12 | transaction, 13 | } 14 | ); 15 | await queryInterface.addIndex( 16 | 'hotspots', 17 | ['public_address'], 18 | { 19 | unique: true, 20 | transaction, 21 | } 22 | ); 23 | await queryInterface.addIndex( 24 | 'hotspots', 25 | ['mac_wlan0'], 26 | { 27 | unique: true, 28 | transaction, 29 | } 30 | ); 31 | await queryInterface.addIndex( 32 | 'hotspots', 33 | ['mac_eth0'], 34 | { 35 | unique: true, 36 | transaction, 37 | } 38 | ); 39 | await queryInterface.addIndex( 40 | 'hotspots', 41 | ['helium_serial'], 42 | { 43 | unique: true, 44 | transaction, 45 | } 46 | ); 47 | await queryInterface.addIndex( 48 | 'hotspots', 49 | ['rpi_serial'], 50 | { 51 | unique: true, 52 | transaction, 53 | } 54 | ); 55 | await transaction.commit(); 56 | } catch (err) { 57 | await transaction.rollback(); 58 | throw err; 59 | } 60 | }, 61 | 62 | down: async (queryInterface, Sequelize) => { 63 | const transaction = await queryInterface.sequelize.transaction(); 64 | try { 65 | await queryInterface.removeIndex('hotspots', 'hotspots_onboarding_key', { transaction }) 66 | await queryInterface.removeIndex('hotspots', 'hotspots_public_address', { transaction }) 67 | await queryInterface.removeIndex('hotspots', 'hotspots_mac_wlan0', { transaction }) 68 | await queryInterface.removeIndex('hotspots', 'hotspots_mac_eth0', { transaction }) 69 | await queryInterface.removeIndex('hotspots', 'hotspots_helium_serial', { transaction }) 70 | await queryInterface.removeIndex('hotspots', 'hotspots_rpi_serial', { transaction }) 71 | await transaction.commit(); 72 | } catch (err) { 73 | await transaction.rollback(); 74 | throw err; 75 | } 76 | } 77 | }; 78 | -------------------------------------------------------------------------------- /src/config/migrations/20210113164305-create-token.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | module.exports = { 3 | up: async (queryInterface, Sequelize) => { 4 | const transaction = await queryInterface.sequelize.transaction() 5 | try { 6 | await queryInterface.createTable( 7 | 'tokens', 8 | { 9 | id: { 10 | allowNull: false, 11 | autoIncrement: true, 12 | primaryKey: true, 13 | type: Sequelize.INTEGER, 14 | }, 15 | public_token: { 16 | allowNull: false, 17 | type: Sequelize.STRING, 18 | }, 19 | secret_token: { 20 | allowNull: false, 21 | type: Sequelize.STRING, 22 | }, 23 | maker_id: { 24 | allowNull: false, 25 | type: Sequelize.INTEGER, 26 | references: { 27 | model: { 28 | tableName: 'makers', 29 | }, 30 | key: 'id', 31 | }, 32 | }, 33 | last_used_at: { 34 | type: Sequelize.DATE, 35 | }, 36 | created_at: { 37 | allowNull: false, 38 | type: Sequelize.DATE, 39 | }, 40 | updated_at: { 41 | allowNull: false, 42 | type: Sequelize.DATE, 43 | }, 44 | }, 45 | { 46 | transaction, 47 | }, 48 | ) 49 | await queryInterface.addIndex('tokens', ['public_token'], { 50 | unique: true, 51 | transaction, 52 | }) 53 | await queryInterface.addIndex('tokens', ['secret_token'], { 54 | unique: true, 55 | transaction, 56 | }) 57 | await transaction.commit(); 58 | } catch (error) { 59 | await transaction.rollback() 60 | throw error 61 | } 62 | }, 63 | down: async (queryInterface, Sequelize) => { 64 | const transaction = await queryInterface.sequelize.transaction() 65 | try { 66 | await queryInterface.dropTable('tokens', { transaction }) 67 | await queryInterface.removeIndex('tokens', 'tokens_public_token', { 68 | transaction, 69 | }) 70 | await queryInterface.removeIndex('tokens', 'tokens_secret_token', { 71 | transaction, 72 | }) 73 | await transaction.commit() 74 | } catch (err) { 75 | await transaction.rollback() 76 | throw err 77 | } 78 | }, 79 | } 80 | -------------------------------------------------------------------------------- /src/config/migrations/20210113173036-remove-api-key-from-makers.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | up: async (queryInterface, Sequelize) => { 5 | queryInterface.removeColumn('makers', 'api_key') 6 | }, 7 | 8 | down: async (queryInterface, Sequelize) => { 9 | queryInterface.addColumn('makers', 'api_key', Sequelize.STRING) 10 | } 11 | }; 12 | -------------------------------------------------------------------------------- /src/config/migrations/20210114001809-add-name-to-tokens.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | up: async (queryInterface, Sequelize) => { 5 | queryInterface.addColumn('tokens', 'name', Sequelize.STRING) 6 | }, 7 | 8 | down: async (queryInterface, Sequelize) => { 9 | queryInterface.removeColumn('tokens', 'name') 10 | } 11 | }; 12 | -------------------------------------------------------------------------------- /src/config/migrations/20230714184637-widen-keys.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | /* 4 | * Widen public key fields. New key types for RSA-2048 require 5 | * more characters. 6 | */ 7 | module.exports = { 8 | up: async (queryInterface, Sequelize) => { 9 | const transaction = await queryInterface.sequelize.transaction() 10 | try { 11 | await queryInterface.changeColumn('hotspots', 'onboarding_key', { 12 | type: Sequelize.TEXT, 13 | allowNull: true, 14 | }, { 15 | transaction, 16 | }) 17 | await queryInterface.changeColumn('hotspots', 'public_address', { 18 | type: Sequelize.TEXT, 19 | allowNull: true, 20 | }, { 21 | transaction, 22 | }) 23 | await transaction.commit() 24 | } catch (error) { 25 | await transaction.rollback() 26 | throw error 27 | } 28 | }, 29 | 30 | down: async (queryInterface, Sequelize) => { 31 | const transaction = await queryInterface.sequelize.transaction() 32 | try { 33 | await queryInterface.changeColumn('hotspots', 'onboarding_key', { 34 | type: Sequelize.STRING, 35 | allowNull: true, 36 | }, { 37 | transaction, 38 | }) 39 | await queryInterface.changeColumn('hotspots', 'public_address', { 40 | type: Sequelize.STRING, 41 | allowNull: true, 42 | }, { 43 | transaction, 44 | }) 45 | await transaction.commit() 46 | } catch (error) { 47 | await transaction.rollback() 48 | throw error 49 | } 50 | } 51 | }; 52 | -------------------------------------------------------------------------------- /src/config/migrations/20230922213538-add-device-type-to-hotspots.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | up: async (queryInterface, Sequelize) => { 5 | queryInterface.addColumn('hotspots', 'device_type', Sequelize.TEXT) 6 | }, 7 | 8 | down: async (queryInterface, Sequelize) => { 9 | queryInterface.removeColumn('hotspots', 'device_type', Sequelize.TEXT) 10 | } 11 | }; 12 | -------------------------------------------------------------------------------- /src/controllers/hotspotsController.js: -------------------------------------------------------------------------------- 1 | import snakeCaseKeys from 'snakecase-keys' 2 | import camelcaseKeys from 'camelcase-keys' 3 | import { Op } from 'sequelize' 4 | import { Hotspot, Maker } from '../models' 5 | import { errorResponse, paginate, successResponse } from '../helpers' 6 | 7 | export const index = async (req, res) => { 8 | try { 9 | const { maker } = req 10 | const page = req.query.page ? parseInt(req.query.page) : 0 11 | const pageSize = req.query.pageSize ? parseInt(req.query.pageSize) : 100 12 | 13 | const hotspots = await Hotspot.findAll({ 14 | where: { makerId: maker.id }, 15 | ...paginate({ page, pageSize }), 16 | }) 17 | 18 | return successResponse(req, res, hotspots, 200, { 19 | page, 20 | pageSize, 21 | }) 22 | } catch (error) { 23 | errorResponse(req, res, error.message, 500, error.errors) 24 | } 25 | } 26 | 27 | export const showLegacy = async (req, res) => { 28 | try { 29 | const { onboardingKey } = req.params 30 | const hotspot = await Hotspot.findOne({ 31 | where: { onboardingKey }, 32 | include: [{ model: Maker }], 33 | }) 34 | hotspot.Maker.locationNonceLimit = hotspot.Maker.locationNonceLimit + 1 35 | const hotspotJSON = hotspot.toJSON() 36 | return successResponse(req, res, snakeCaseKeys(hotspotJSON)) 37 | } catch (error) { 38 | errorResponse(req, res, error.message, 500, error.errors) 39 | } 40 | } 41 | 42 | export const show = async (req, res) => { 43 | try { 44 | const { maker } = req 45 | const { onboardingKeyOrId } = req.params 46 | const where = maker 47 | ? { [Op.and]: [{ id: onboardingKeyOrId }, { makerId: maker.id }] } 48 | : { 49 | [Op.or]: [ 50 | { onboardingKey: onboardingKeyOrId }, 51 | { publicAddress: onboardingKeyOrId }, 52 | ], 53 | } 54 | const hotspot = await Hotspot.findOne({ 55 | where, 56 | include: [{ model: Maker }], 57 | }) 58 | if (!hotspot) { 59 | return errorResponse(req, res, 'Unable to find hotspot', 404) 60 | } 61 | const hotspotJSON = hotspot.toJSON() 62 | return successResponse(req, res, camelcaseKeys(hotspotJSON)) 63 | } catch (error) { 64 | errorResponse(req, res, error.message, 500, error.errors) 65 | } 66 | } 67 | 68 | export const search = async (req, res) => { 69 | try { 70 | const { maker } = req 71 | 72 | const searchQuery = [] 73 | for (const [key, value] of Object.entries(req.query)) { 74 | searchQuery.push({ [key]: value }) 75 | } 76 | 77 | const hotspot = await Hotspot.findAll({ 78 | where: { 79 | [Op.or]: searchQuery, 80 | [Op.and]: { makerId: maker.id }, 81 | }, 82 | }) 83 | 84 | return successResponse(req, res, hotspot) 85 | } catch (error) { 86 | errorResponse(req, res, error.message, 500, error.errors) 87 | } 88 | } 89 | 90 | const VALID_DEVICE_TYPES = new Set(['Cbrs', 'WifiIndoor', 'WifiOutdoor']) 91 | export const create = async (req, res) => { 92 | try { 93 | const { maker } = req 94 | 95 | const { 96 | onboardingKey, 97 | macWlan0, 98 | rpiSerial, 99 | batch, 100 | heliumSerial, 101 | macEth0, 102 | deviceType 103 | } = req.body 104 | 105 | if (deviceType && !VALID_DEVICE_TYPES.has(deviceType)) { 106 | return errorResponse(req, res, 'Invalid device type', 422) 107 | } 108 | 109 | const hotspot = await Hotspot.create({ 110 | onboardingKey, 111 | macWlan0, 112 | rpiSerial, 113 | batch, 114 | heliumSerial, 115 | macEth0, 116 | makerId: maker.id, 117 | deviceType 118 | }) 119 | 120 | return successResponse(req, res, hotspot, 201) 121 | } catch (error) { 122 | errorResponse(req, res, error.message, 500, error.errors) 123 | } 124 | } 125 | 126 | export const update = async (req, res) => { 127 | try { 128 | const { maker } = req 129 | const { id } = req.params 130 | 131 | const { 132 | onboardingKey, 133 | macWlan0, 134 | rpiSerial, 135 | batch, 136 | heliumSerial, 137 | macEth0, 138 | } = req.body 139 | 140 | const hotspot = await Hotspot.findOne({ 141 | where: { 142 | [Op.and]: [{ id }, { makerId: maker.id }], 143 | }, 144 | }) 145 | 146 | if (!hotspot) { 147 | return errorResponse(req, res, 'Hotspot not found', 404) 148 | } 149 | 150 | if (hotspot.publicAddress) { 151 | return errorResponse(req, res, 'Hotspot is immutable', 422) 152 | } 153 | 154 | if (onboardingKey) hotspot.onboardingKey = onboardingKey 155 | if (macWlan0) hotspot.macWlan0 = macWlan0 156 | if (rpiSerial) hotspot.rpiSerial = rpiSerial 157 | if (batch) hotspot.batch = batch 158 | if (heliumSerial) hotspot.heliumSerial = heliumSerial 159 | if (macEth0) hotspot.macEth0 = macEth0 160 | 161 | const updatedHotspot = await hotspot.save() 162 | return successResponse(req, res, updatedHotspot) 163 | } catch (error) { 164 | errorResponse(req, res, error.message, 500, error.errors) 165 | } 166 | } 167 | 168 | export const destroy = async (req, res) => { 169 | try { 170 | const { maker } = req 171 | const { id } = req.params 172 | 173 | const hotspot = await Hotspot.findOne({ 174 | where: { 175 | [Op.and]: [{ id }, { makerId: maker.id }], 176 | }, 177 | }) 178 | 179 | if (!hotspot) { 180 | return errorResponse(req, res, 'Hotspot not found', 404) 181 | } 182 | 183 | if (hotspot.publicAddress) { 184 | return errorResponse(req, res, 'Hotspot is immutable', 422) 185 | } 186 | 187 | await hotspot.destroy() 188 | return successResponse(req, res, {}, 200) 189 | } catch (error) { 190 | errorResponse(req, res, error.message, 500, error.errors) 191 | } 192 | } 193 | -------------------------------------------------------------------------------- /src/controllers/makersController.js: -------------------------------------------------------------------------------- 1 | import { Maker } from '../models' 2 | import { errorResponse, successResponse } from '../helpers' 3 | 4 | export const index = async (req, res) => { 5 | try { 6 | const makers = await Maker.findAll() 7 | return successResponse(req, res, makers) 8 | } catch (error) { 9 | errorResponse(req, res, error.message, 500, error.errors) 10 | } 11 | } 12 | 13 | export const show = async (req, res) => { 14 | try { 15 | const { makerId } = req.params 16 | const maker = await Maker.findByPk(makerId) 17 | return successResponse(req, res, maker) 18 | } catch (error) { 19 | errorResponse(req, res, error.message, 500, error.errors) 20 | } 21 | } 22 | 23 | export const legacyAddress = async (req, res) => { 24 | try { 25 | const maker = await Maker.findByPk(1) 26 | return successResponse(req, res, { address: maker.address }) 27 | } catch (error) { 28 | errorResponse(req, res, error.message, 500, error.errors) 29 | } 30 | } 31 | 32 | export const legacyLimits = async (req, res) => { 33 | try { 34 | const maker = await Maker.findByPk(1) 35 | return res.json({ location_nonce: maker.locationNonceLimit + 1 }) 36 | } catch (error) { 37 | errorResponse(req, res, error.message, 500, error.errors) 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/controllers/transactionsController.js: -------------------------------------------------------------------------------- 1 | import { Keypair } from '@helium/crypto' 2 | import { 3 | Transaction, 4 | AddGatewayV1, 5 | AssertLocationV1, 6 | AssertLocationV2, 7 | } from '@helium/transactions' 8 | import { Maker, Hotspot } from '../models' 9 | import { errorResponse, successResponse } from '../helpers' 10 | import { Op } from 'sequelize' 11 | 12 | const env = process.env.NODE_ENV || 'development' 13 | 14 | export const pay = async (req, res) => { 15 | try { 16 | const { onboardingKey } = req.params 17 | const { transaction } = req.body 18 | 19 | if (!transaction) { 20 | return errorResponse(req, res, 'Missing transaction param', 422) 21 | } 22 | 23 | const hotspot = await Hotspot.findOne({ 24 | where: { [Op.or]: [{ onboardingKey }, { publicAddress: onboardingKey }] }, 25 | }) 26 | 27 | if (!hotspot) { 28 | return errorResponse(req, res, 'Hotspot not found', 404) 29 | } 30 | 31 | const maker = await Maker.scope('withKeypair').findByPk(hotspot.makerId) 32 | const keypairEntropy = Buffer.from(maker.keypairEntropy, 'hex') 33 | const keypair = await Keypair.fromEntropy(keypairEntropy) 34 | 35 | let txn 36 | switch (Transaction.stringType(transaction)) { 37 | case 'addGateway': 38 | txn = AddGatewayV1.fromString(transaction) 39 | break 40 | 41 | case 'assertLocation': 42 | txn = AssertLocationV1.fromString(transaction) 43 | 44 | // transactions are only signed up until the maker's nonce limit 45 | if (txn.nonce > maker.locationNonceLimit) { 46 | return errorResponse(req, res, 'Nonce limit exceeded', 422) 47 | } 48 | break 49 | 50 | case 'assertLocationV2': 51 | txn = AssertLocationV2.fromString(transaction) 52 | 53 | // transactions are only signed up until the maker's nonce limit 54 | if (txn.nonce > maker.locationNonceLimit) { 55 | return errorResponse(req, res, 'Nonce limit exceeded', 422) 56 | } 57 | break 58 | 59 | default: 60 | throw new Error('Unsupported transaction type') 61 | } 62 | 63 | // The transaction must include the onboarding server as the payer 64 | if (txn?.payer?.b58 !== maker.address) { 65 | return errorResponse(req, res, 'Invalid payer address', 422) 66 | } 67 | 68 | // Starting after hotspot 32951, it's required that the onboarding key 69 | // match the txn gateway address 70 | if (hotspot.id > 32951 && txn?.gateway?.b58 !== onboardingKey) { 71 | return errorResponse(req, res, 'Invalid hotspot address', 422) 72 | } 73 | 74 | // Once an onboarding key has been associated with a hotspot's public 75 | // address, it cannot be used for a hotspot with a different public address 76 | if (hotspot.publicAddress && hotspot.publicAddress !== txn?.gateway?.b58) { 77 | return errorResponse(req, res, 'Onboarding key already used', 422) 78 | } 79 | 80 | hotspot.publicAddress = txn?.gateway?.b58 81 | await hotspot.save() 82 | 83 | const signedTxn = await txn.sign({ payer: keypair }) 84 | return successResponse(req, res, { transaction: signedTxn.toString() }) 85 | } catch (error) { 86 | errorResponse( 87 | req, 88 | res, 89 | env === 'development' ? error.message : 'Internal error', 90 | 500, 91 | env === 'development' ? error.errors : [], 92 | ) 93 | } 94 | } 95 | 96 | export const sample = async (req, res) => { 97 | if (env === 'production') { 98 | return errorResponse(req, res, 'Not available', 422) 99 | } 100 | 101 | const maker = await Maker.scope('withKeypair').findByPk(1) 102 | const keypairEntropy = Buffer.from(maker.keypairEntropy, 'hex') 103 | const keypair = await Keypair.fromEntropy(keypairEntropy) 104 | 105 | const owner = await Keypair.makeRandom() 106 | const gateway = await Keypair.makeRandom() 107 | 108 | const txn = new AddGatewayV1({ 109 | owner: owner.address, 110 | gateway: gateway.address, 111 | payer: keypair.address, 112 | stakingFee: 40000, 113 | }) 114 | 115 | const signedTxn1 = await txn.sign({ owner }) 116 | const signedTxn2 = await signedTxn1.sign({ gateway }) 117 | 118 | return successResponse(req, res, { txn: signedTxn2.toString() }) 119 | } 120 | -------------------------------------------------------------------------------- /src/controllers/v3TransactionsController.js: -------------------------------------------------------------------------------- 1 | import { 2 | init, 3 | makerKey, 4 | onboardIotHotspot, 5 | onboardMobileHotspot, 6 | updateIotMetadata as updateIotMetadataFn, 7 | updateMobileMetadata as updateMobileMetadataFn, 8 | rewardableEntityConfigKey, 9 | keyToAssetKey, 10 | mobileInfoKey, 11 | iotInfoKey, 12 | } from '@helium/helium-entity-manager-sdk' 13 | import { daoKey, subDaoKey } from '@helium/helium-sub-daos-sdk' 14 | import { 15 | getConcurrentMerkleTreeAccountSize, 16 | ConcurrentMerkleTreeAccount, 17 | PROGRAM_ID as SPL_ACCOUNT_COMPRESSION_PROGRAM_ID, 18 | } from '@solana/spl-account-compression' 19 | import { dataCreditsKey } from '@helium/data-credits-sdk' 20 | import { helium } from '@helium/proto' 21 | import { 22 | PROGRAM_ID as BUBBLEGUM_PROGRAM_ID, 23 | TreeConfig, 24 | } from '@metaplex-foundation/mpl-bubblegum' 25 | import { AddGatewayV1, Transaction } from '@helium/transactions' 26 | import { 27 | Keypair as SolanaKeypair, 28 | PublicKey, 29 | SystemProgram, 30 | Transaction as SolanaTransaction, 31 | LAMPORTS_PER_SOL, 32 | } from '@solana/web3.js' 33 | import { Op } from 'sequelize' 34 | import { errorResponse, successResponse } from '../helpers' 35 | import { ASSET_API_URL, provider } from '../helpers/solana' 36 | import { Hotspot, Maker } from '../models' 37 | import BN from 'bn.js' 38 | import bs58 from 'bs58' 39 | import { DC_MINT, sendInstructions, withPriorityFees } from '@helium/spl-utils' 40 | import axios from 'axios' 41 | 42 | const ECC_VERIFY_ENDPOINT = process.env.ECC_VERIFY_ENDPOINT 43 | const IOT_MINT = new PublicKey(process.env.IOT_MINT) 44 | const HNT_MINT = new PublicKey(process.env.HNT_MINT) 45 | const MOBILE_MINT = new PublicKey(process.env.MOBILE_MINT) 46 | const ECC_VERIFIER = new PublicKey(process.env.ECC_VERIFIER) 47 | const DAO_KEY = daoKey(HNT_MINT)[0] 48 | const IOT_SUB_DAO_KEY = subDaoKey(IOT_MINT)[0] 49 | const MOBILE_SUB_DAO_KEY = subDaoKey(MOBILE_MINT)[0] 50 | const INITIAL_SOL = process.env.INITIAL_SOL 51 | const DATA_CREDITS_KEY = dataCreditsKey(DC_MINT)[0] 52 | 53 | const BASE_PRIORITY_FEE_MICROLAMPORTS = Number( 54 | process.env.BASE_PRIORITY_FEE_MICROLAMPORTS || '1', 55 | ) 56 | 57 | export const createHotspot = async (req, res) => { 58 | const { transaction, payer: inPayer } = req.body 59 | console.log(req.body) 60 | const sdk = await init(provider) 61 | 62 | try { 63 | if (!transaction) { 64 | return errorResponse(req, res, 'Missing transaction param', 422) 65 | } 66 | 67 | if (Transaction.stringType(transaction) !== 'addGateway') { 68 | throw new Error('Unsupported transaction type') 69 | } 70 | 71 | const txn = AddGatewayV1.fromString(transaction) 72 | 73 | const onboardingKey = txn.gateway.b58 74 | const hotspot = await Hotspot.findOne({ 75 | where: { 76 | [Op.or]: [{ onboardingKey }, { publicAddress: onboardingKey }], 77 | }, 78 | }) 79 | 80 | if (!hotspot) { 81 | return errorResponse(req, res, 'Hotspot not found', 404) 82 | } 83 | 84 | const makerDbEntry = await Maker.scope('withKeypair').findByPk( 85 | hotspot.makerId, 86 | ) 87 | const keypairEntropy = Buffer.from(makerDbEntry.keypairEntropy, 'hex') 88 | const makerSolanaKeypair = SolanaKeypair.fromSeed(keypairEntropy) 89 | let payer 90 | if (!inPayer) { 91 | payer = makerSolanaKeypair.publicKey 92 | } else { 93 | payer = new PublicKey(inPayer) 94 | } 95 | const maker = makerKey(DAO_KEY, makerDbEntry.name)[0] 96 | const program = await init(provider) 97 | const makerAcc = await program.account.makerV0.fetchNullable(maker) 98 | if (!makerAcc) { 99 | return errorResponse(req, res, 'Maker does not exist', 404) 100 | } 101 | const merkle = makerAcc.merkleTree 102 | const treeAuthority = PublicKey.findProgramAddressSync( 103 | [merkle.toBuffer()], 104 | BUBBLEGUM_PROGRAM_ID, 105 | )[0] 106 | const treeConfig = await TreeConfig.fromAccountAddress( 107 | provider.connection, 108 | treeAuthority, 109 | ) 110 | 111 | if (treeConfig.numMinted >= treeConfig.totalMintCapacity - 2) { 112 | const oldMerkle = await ConcurrentMerkleTreeAccount.fromAccountAddress( 113 | provider.connection, 114 | merkle, 115 | ) 116 | const newMerkle = SolanaKeypair.generate() 117 | const space = await getConcurrentMerkleTreeAccountSize( 118 | oldMerkle.getMaxDepth(), 119 | oldMerkle.getMaxBufferSize(), 120 | oldMerkle.getCanopyDepth(), 121 | ) 122 | console.log( 123 | `Tree is full with ${treeConfig.numMinted} minted and ${treeConfig.totalMintCapacity} capacity, creating a new tree`, 124 | ) 125 | const createMerkle = SystemProgram.createAccount({ 126 | fromPubkey: makerSolanaKeypair.publicKey, 127 | newAccountPubkey: newMerkle.publicKey, 128 | lamports: await provider.connection.getMinimumBalanceForRentExemption( 129 | space, 130 | ), 131 | space: space, 132 | programId: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID, 133 | }) 134 | const updateTree = await program.methods 135 | .updateMakerTreeV0({ 136 | maxBufferSize: oldMerkle.getMaxBufferSize(), 137 | maxDepth: oldMerkle.getMaxDepth(), 138 | }) 139 | .accountsPartial({ 140 | payer: makerSolanaKeypair.publicKey, 141 | maker, 142 | treeAuthority, 143 | newTreeAuthority: PublicKey.findProgramAddressSync( 144 | [newMerkle.publicKey.toBuffer()], 145 | BUBBLEGUM_PROGRAM_ID, 146 | )[0], 147 | newMerkleTree: newMerkle.publicKey, 148 | }) 149 | .instruction() 150 | 151 | await sendInstructions( 152 | provider, 153 | await withPriorityFees({ 154 | connection: provider.connection, 155 | instructions: [createMerkle, updateTree], 156 | basePriorityFee: BASE_PRIORITY_FEE_MICROLAMPORTS, 157 | computeUnits: 500000, 158 | }), 159 | [makerSolanaKeypair, newMerkle], 160 | makerSolanaKeypair.publicKey, 161 | 'confirmed', 162 | ) 163 | } 164 | 165 | const hotspotOwner = new PublicKey(txn.owner.publicKey) 166 | 167 | const { instruction: solanaIx, pubkeys } = await sdk.methods 168 | .issueEntityV0({ 169 | entityKey: Buffer.from(bs58.decode(txn.gateway.b58)), 170 | }) 171 | .accountsPartial({ 172 | payer, 173 | maker, 174 | eccVerifier: ECC_VERIFIER, 175 | dao: DAO_KEY, 176 | recipient: hotspotOwner, 177 | issuingAuthority: makerSolanaKeypair.publicKey, 178 | }) 179 | .prepare() 180 | 181 | let solanaTransactions = [] 182 | // Only return txns if the hotspot doesn't exist 183 | if (!(await provider.connection.getAccountInfo(pubkeys.keyToAsset))) { 184 | const tx = new SolanaTransaction({ 185 | recentBlockhash: ( 186 | await provider.connection.getLatestBlockhash('finalized') 187 | ).blockhash, 188 | feePayer: makerSolanaKeypair.publicKey, 189 | }) 190 | tx.add( 191 | ...(await withPriorityFees({ 192 | connection: provider.connection, 193 | instructions: [solanaIx], 194 | computeUnits: 1000000, 195 | basePriorityFee: BASE_PRIORITY_FEE_MICROLAMPORTS, 196 | })), 197 | ) 198 | 199 | // If INITIAL_SOL env provided, fund new wallets with that amount of sol 200 | // Only fund the wallet if they aren't doing a payer override. 201 | if (INITIAL_SOL && !payer) { 202 | const ownerAcc = await provider.connection.getAccountInfo(hotspotOwner) 203 | const initialLamports = 204 | (await provider.connection.getMinimumBalanceForRentExemption(0)) + 205 | Number(INITIAL_SOL) * LAMPORTS_PER_SOL 206 | if (!ownerAcc || ownerAcc.lamports < initialLamports) { 207 | tx.add( 208 | SystemProgram.transfer({ 209 | fromPubkey: makerSolanaKeypair.publicKey, 210 | toPubkey: hotspotOwner, 211 | lamports: ownerAcc 212 | ? initialLamports - ownerAcc.lamports 213 | : initialLamports, 214 | }), 215 | ) 216 | } 217 | } 218 | 219 | tx.partialSign(makerSolanaKeypair) 220 | 221 | // Verify the gateway that signed is correct so we can sign for the Solana transaction. 222 | const addGateway = txn.toProto(true) 223 | const serialized = helium.blockchain_txn_add_gateway_v1 224 | .encode(addGateway) 225 | .finish() 226 | 227 | try { 228 | const { transaction: eccVerifiedTxn } = ( 229 | await axios.post(ECC_VERIFY_ENDPOINT, { 230 | transaction: tx 231 | .serialize({ 232 | requireAllSignatures: false, 233 | }) 234 | .toString('hex'), 235 | msg: Buffer.from(serialized).toString('hex'), 236 | signature: Buffer.from(txn.gatewaySignature).toString('hex'), 237 | }) 238 | ).data 239 | solanaTransactions = [ 240 | SolanaTransaction.from(Buffer.from(eccVerifiedTxn, 'hex')), 241 | ] 242 | } catch (e) { 243 | console.error(e) 244 | return errorResponse( 245 | req, 246 | res, 247 | 'Invalid gateway signature', 248 | e.response.status || 400, 249 | ) 250 | } 251 | } 252 | 253 | // Once an onboarding key has been associated with a hotspot's public 254 | // address, it cannot be used for a hotspot with a different public address 255 | if (hotspot.publicAddress && hotspot.publicAddress !== txn?.gateway?.b58) { 256 | return errorResponse(req, res, 'Onboarding key already used', 422) 257 | } 258 | 259 | hotspot.publicAddress = txn?.gateway?.b58 260 | await hotspot.save() 261 | 262 | return successResponse(req, res, { 263 | solanaTransactions: solanaTransactions.map( 264 | (tx) => 265 | Buffer.from( 266 | tx.serialize({ 267 | requireAllSignatures: false, 268 | }), 269 | ).toJSON().data, 270 | ), 271 | }) 272 | } catch (error) { 273 | console.error(error) 274 | errorResponse(req, res, error.message, mapCode(error), error.errors) 275 | } 276 | } 277 | 278 | export const onboardToIot = async (req, res) => { 279 | try { 280 | const { entityKey, location, elevation, gain, payer: inPayer } = req.body 281 | console.log(req.body) 282 | if (!entityKey) { 283 | return errorResponse(req, res, 'Missing entityKey param', 422) 284 | } 285 | 286 | const program = await init(provider) 287 | const keyToAsset = await program.account.keyToAssetV0.fetchNullable( 288 | ( 289 | await keyToAssetKey(DAO_KEY, entityKey, 'b58') 290 | )[0], 291 | ) 292 | 293 | if (!keyToAsset) { 294 | return errorResponse( 295 | req, 296 | res, 297 | 'Key to asset does not exist, has the entity been created?', 298 | 404, 299 | ) 300 | } 301 | 302 | const assetId = keyToAsset.asset 303 | 304 | const hotspot = await Hotspot.findOne({ 305 | where: { 306 | [Op.or]: [{ publicAddress: entityKey }], 307 | }, 308 | }) 309 | 310 | if (!hotspot) { 311 | return errorResponse(req, res, 'Hotspot not found', 404) 312 | } 313 | 314 | const makerDbEntry = await Maker.scope('withKeypair').findByPk( 315 | hotspot.makerId, 316 | ) 317 | const keypairEntropy = Buffer.from(makerDbEntry.keypairEntropy, 'hex') 318 | const makerSolanaKeypair = SolanaKeypair.fromSeed(keypairEntropy) 319 | let payer 320 | if (!inPayer) { 321 | payer = makerSolanaKeypair.publicKey 322 | } else { 323 | payer = new PublicKey(inPayer) 324 | } 325 | const dcFeePayer = payer 326 | 327 | const { instruction } = await ( 328 | await onboardIotHotspot({ 329 | program, 330 | location: typeof location === 'undefined' ? null : new BN(location), 331 | elevation: typeof elevation === 'undefined' ? null : elevation, 332 | gain: typeof gain === 'undefined' ? null : gain, 333 | rewardableEntityConfig: rewardableEntityConfigKey( 334 | IOT_SUB_DAO_KEY, 335 | 'IOT', 336 | )[0], 337 | assetId, 338 | payer, 339 | dcFeePayer, 340 | maker: makerKey(DAO_KEY, makerDbEntry.name)[0], 341 | dao: DAO_KEY, 342 | assetEndpoint: ASSET_API_URL, 343 | }) 344 | ) 345 | .accountsPartial({ 346 | dc: DATA_CREDITS_KEY, 347 | dcMint: DC_MINT, 348 | }) 349 | .prepare() 350 | 351 | const tx = new SolanaTransaction({ 352 | recentBlockhash: ( 353 | await provider.connection.getLatestBlockhash('finalized') 354 | ).blockhash, 355 | feePayer: makerSolanaKeypair.publicKey, 356 | }) 357 | tx.add( 358 | ...(await withPriorityFees({ 359 | instructions: [instruction], 360 | connection: provider.connection, 361 | basePriorityFee: BASE_PRIORITY_FEE_MICROLAMPORTS, 362 | computeUnits: 300000, 363 | })), 364 | ) 365 | tx.partialSign(makerSolanaKeypair) 366 | 367 | return successResponse(req, res, { 368 | solanaTransactions: [tx.serialize({ requireAllSignatures: false })], 369 | }) 370 | } catch (error) { 371 | console.error(error) 372 | errorResponse(req, res, error.message, mapCode(error), error.errors) 373 | } 374 | } 375 | 376 | export const onboardToMobile = async (req, res) => { 377 | try { 378 | const { entityKey, location, payer: inPayer, deploymentInfo } = req.body 379 | if (!entityKey) { 380 | return errorResponse(req, res, 'Missing entityKey param', 422) 381 | } 382 | 383 | const program = await init(provider) 384 | const keyToAsset = await program.account.keyToAssetV0.fetchNullable( 385 | ( 386 | await keyToAssetKey(DAO_KEY, entityKey, 'b58') 387 | )[0], 388 | ) 389 | if (!keyToAsset) { 390 | return errorResponse( 391 | req, 392 | res, 393 | 'Key to asset does not exist, has the entity been created?', 394 | 404, 395 | ) 396 | } 397 | const assetId = keyToAsset.asset 398 | 399 | const hotspot = await Hotspot.findOne({ 400 | where: { 401 | [Op.or]: [{ publicAddress: entityKey }], 402 | }, 403 | }) 404 | 405 | if (!hotspot) { 406 | return errorResponse(req, res, 'Hotspot not found', 404) 407 | } 408 | 409 | const makerDbEntry = await Maker.scope('withKeypair').findByPk( 410 | hotspot.makerId, 411 | ) 412 | const keypairEntropy = Buffer.from(makerDbEntry.keypairEntropy, 'hex') 413 | const makerSolanaKeypair = SolanaKeypair.fromSeed(keypairEntropy) 414 | let payer 415 | if (!inPayer) { 416 | payer = makerSolanaKeypair.publicKey 417 | } else { 418 | payer = new PublicKey(inPayer) 419 | } 420 | const dcFeePayer = payer 421 | 422 | const { instruction } = await ( 423 | await onboardMobileHotspot({ 424 | program, 425 | location: typeof location === 'undefined' ? null : new BN(location), 426 | rewardableEntityConfig: rewardableEntityConfigKey( 427 | MOBILE_SUB_DAO_KEY, 428 | 'MOBILE', 429 | )[0], 430 | assetId, 431 | payer, 432 | dcFeePayer, 433 | maker: makerKey(DAO_KEY, makerDbEntry.name)[0], 434 | dao: DAO_KEY, 435 | assetEndpoint: ASSET_API_URL, 436 | deviceType: hotspot.deviceType 437 | ? lowercaseFirstLetter(hotspot.deviceType) 438 | : 'cbrs', 439 | deploymentInfo: 440 | typeof deploymentInfo === 'undefined' ? null : deploymentInfo, 441 | }) 442 | ) 443 | .accountsPartial({ 444 | dc: DATA_CREDITS_KEY, 445 | dcMint: DC_MINT, 446 | dntMint: MOBILE_MINT, 447 | }) 448 | .prepare() 449 | 450 | const tx = new SolanaTransaction({ 451 | recentBlockhash: ( 452 | await provider.connection.getLatestBlockhash('finalized') 453 | ).blockhash, 454 | feePayer: makerSolanaKeypair.publicKey, 455 | }) 456 | 457 | tx.add( 458 | ...(await withPriorityFees({ 459 | instructions: [instruction], 460 | connection: provider.connection, 461 | basePriorityFee: BASE_PRIORITY_FEE_MICROLAMPORTS, 462 | computeUnits: 300000, 463 | })), 464 | ) 465 | 466 | tx.partialSign(makerSolanaKeypair) 467 | 468 | return successResponse(req, res, { 469 | solanaTransactions: [tx.serialize({ requireAllSignatures: false })], 470 | }) 471 | } catch (error) { 472 | console.error(error) 473 | errorResponse(req, res, error.message, mapCode(error), error.errors) 474 | } 475 | } 476 | 477 | function lowercaseFirstLetter(str) { 478 | if (str.length === 0) { 479 | return str // Handle empty string 480 | } 481 | 482 | return str.charAt(0).toLowerCase() + str.slice(1) 483 | } 484 | 485 | export const updateMobileMetadata = async (req, res) => { 486 | try { 487 | const { 488 | entityKey, 489 | location, 490 | wallet, 491 | payer: passedPayer, 492 | deploymentInfo, 493 | } = req.body 494 | if (!entityKey) { 495 | return errorResponse(req, res, 'Missing entityKey param', 422) 496 | } 497 | if (!wallet) { 498 | return errorResponse(req, res, 'Missing wallet param', 422) 499 | } 500 | const program = await init(provider) 501 | const keyToAsset = await program.account.keyToAssetV0.fetchNullable( 502 | ( 503 | await keyToAssetKey(DAO_KEY, entityKey, 'b58') 504 | )[0], 505 | ) 506 | if (!keyToAsset) { 507 | return errorResponse( 508 | req, 509 | res, 510 | 'Key to asset does not exist, has the entity been created?', 511 | 404, 512 | ) 513 | } 514 | const assetId = keyToAsset.asset 515 | 516 | const hotspot = await Hotspot.findOne({ 517 | where: { 518 | [Op.or]: [{ publicAddress: entityKey }], 519 | }, 520 | }) 521 | 522 | const makerDbEntry = 523 | hotspot && (await Maker.scope('withKeypair').findByPk(hotspot.makerId)) 524 | const keypairEntropy = 525 | makerDbEntry && Buffer.from(makerDbEntry.keypairEntropy, 'hex') 526 | const makerSolanaKeypair = 527 | keypairEntropy && SolanaKeypair.fromSeed(keypairEntropy) 528 | 529 | if ( 530 | makerSolanaKeypair && 531 | makerSolanaKeypair.publicKey.toBase58() === passedPayer 532 | ) { 533 | return errorResponse(req, res, 'Payer cannot be the maker', 422) 534 | } 535 | 536 | const rewardableEntityConfig = rewardableEntityConfigKey( 537 | MOBILE_SUB_DAO_KEY, 538 | 'MOBILE', 539 | )[0] 540 | const [info] = await mobileInfoKey(rewardableEntityConfig, entityKey) 541 | const infoAcc = await program.account.mobileHotspotInfoV0.fetchNullable( 542 | info, 543 | ) 544 | 545 | if (!infoAcc) { 546 | return errorResponse( 547 | req, 548 | res, 549 | 'Hotspot info does not exist, has it been onboarded?', 550 | 404, 551 | ) 552 | } 553 | 554 | const payer = passedPayer 555 | ? new PublicKey(passedPayer) 556 | : location && 557 | makerDbEntry && 558 | infoAcc.numLocationAsserts < makerDbEntry.locationNonceLimit 559 | ? makerSolanaKeypair.publicKey 560 | : new PublicKey(wallet) 561 | 562 | const { instruction } = await ( 563 | await updateMobileMetadataFn({ 564 | location: typeof location === 'undefined' ? null : new BN(location), 565 | program, 566 | rewardableEntityConfig, 567 | assetId, 568 | payer, 569 | dcFeePayer: payer, 570 | assetEndpoint: ASSET_API_URL, 571 | deploymentInfo: 572 | typeof deploymentInfo === 'undefined' ? null : deploymentInfo, 573 | }) 574 | ) 575 | .accountsPartial({ 576 | dc: DATA_CREDITS_KEY, 577 | dcMint: DC_MINT, 578 | dntMint: MOBILE_MINT, 579 | dao: DAO_KEY 580 | }) 581 | .prepare() 582 | 583 | const tx = new SolanaTransaction({ 584 | recentBlockhash: ( 585 | await provider.connection.getLatestBlockhash('finalized') 586 | ).blockhash, 587 | feePayer: payer, 588 | }) 589 | 590 | tx.add( 591 | ...(await withPriorityFees({ 592 | instructions: [instruction], 593 | connection: provider.connection, 594 | basePriorityFee: BASE_PRIORITY_FEE_MICROLAMPORTS, 595 | computeUnits: 200000, 596 | })), 597 | ) 598 | 599 | if (makerSolanaKeypair && payer.equals(makerSolanaKeypair.publicKey)) { 600 | tx.partialSign(makerSolanaKeypair) 601 | } 602 | 603 | return successResponse(req, res, { 604 | solanaTransactions: [tx.serialize({ requireAllSignatures: false })], 605 | }) 606 | } catch (error) { 607 | console.error(error) 608 | errorResponse(req, res, error.message, mapCode(error), error.errors) 609 | } 610 | } 611 | 612 | function mapCode(error) { 613 | if (error.message && error.message.includes('No asset')) { 614 | return 404 615 | } 616 | return 500 617 | } 618 | 619 | export const updateIotMetadata = async (req, res) => { 620 | try { 621 | const { 622 | entityKey, 623 | location, 624 | elevation, 625 | gain, 626 | wallet, 627 | payer: passedPayer, 628 | } = req.body 629 | if (!entityKey) { 630 | return errorResponse(req, res, 'Missing entityKey param', 422) 631 | } 632 | if (!wallet) { 633 | return errorResponse(req, res, 'Missing wallet param', 422) 634 | } 635 | 636 | const program = await init(provider) 637 | const keyToAsset = await program.account.keyToAssetV0.fetchNullable( 638 | ( 639 | await keyToAssetKey(DAO_KEY, entityKey, 'b58') 640 | )[0], 641 | ) 642 | 643 | if (!keyToAsset) { 644 | return errorResponse( 645 | req, 646 | res, 647 | 'Key to asset does not exist, has the entity been created?', 648 | 404, 649 | ) 650 | } 651 | const assetId = keyToAsset.asset 652 | 653 | const hotspot = await Hotspot.findOne({ 654 | where: { 655 | [Op.or]: [{ publicAddress: entityKey }], 656 | }, 657 | }) 658 | 659 | const makerDbEntry = 660 | hotspot && (await Maker.scope('withKeypair').findByPk(hotspot.makerId)) 661 | const keypairEntropy = 662 | makerDbEntry && Buffer.from(makerDbEntry.keypairEntropy, 'hex') 663 | const makerSolanaKeypair = 664 | keypairEntropy && SolanaKeypair.fromSeed(keypairEntropy) 665 | 666 | if ( 667 | makerSolanaKeypair && 668 | makerSolanaKeypair.publicKey.toBase58() === passedPayer 669 | ) { 670 | return errorResponse(req, res, 'Payer cannot be the maker', 422) 671 | } 672 | 673 | const rewardableEntityConfig = rewardableEntityConfigKey( 674 | IOT_SUB_DAO_KEY, 675 | 'IOT', 676 | )[0] 677 | 678 | const [info] = await iotInfoKey(rewardableEntityConfig, entityKey) 679 | const infoAcc = await program.account.iotHotspotInfoV0.fetchNullable(info) 680 | 681 | if (!infoAcc) { 682 | return errorResponse( 683 | req, 684 | res, 685 | 'Hotspot info does not exist, has it been onboarded?', 686 | 404, 687 | ) 688 | } 689 | 690 | const payer = passedPayer 691 | ? new PublicKey(passedPayer) 692 | : location && 693 | makerSolanaKeypair && 694 | infoAcc.numLocationAsserts < makerDbEntry.locationNonceLimit 695 | ? makerSolanaKeypair.publicKey 696 | : new PublicKey(wallet) 697 | 698 | const { instruction } = await ( 699 | await updateIotMetadataFn({ 700 | location: typeof location === 'undefined' ? null : new BN(location), 701 | program, 702 | rewardableEntityConfig, 703 | assetId, 704 | elevation: typeof elevation === 'undefined' ? null : elevation, 705 | gain: typeof gain === 'undefined' ? null : gain, 706 | payer: payer, 707 | dcFeePayer: payer, 708 | assetEndpoint: ASSET_API_URL, 709 | }) 710 | ) 711 | .accountsPartial({ 712 | dc: DATA_CREDITS_KEY, 713 | dcMint: DC_MINT, 714 | dao: DAO_KEY, 715 | }) 716 | .prepare() 717 | 718 | const tx = new SolanaTransaction({ 719 | recentBlockhash: ( 720 | await provider.connection.getLatestBlockhash('finalized') 721 | ).blockhash, 722 | feePayer: payer, 723 | }) 724 | 725 | tx.add( 726 | ...(await withPriorityFees({ 727 | instructions: [instruction], 728 | connection: provider.connection, 729 | basePriorityFee: BASE_PRIORITY_FEE_MICROLAMPORTS, 730 | computeUnits: 200000, 731 | })), 732 | ) 733 | 734 | if (makerSolanaKeypair && payer.equals(makerSolanaKeypair.publicKey)) { 735 | tx.partialSign(makerSolanaKeypair) 736 | } 737 | 738 | return successResponse(req, res, { 739 | solanaTransactions: [tx.serialize({ requireAllSignatures: false })], 740 | }) 741 | } catch (error) { 742 | console.error(error) 743 | errorResponse(req, res, error.message, mapCode(error), error.errors) 744 | } 745 | } 746 | -------------------------------------------------------------------------------- /src/helpers/index.js: -------------------------------------------------------------------------------- 1 | import bcrypt from 'bcryptjs' 2 | import { Maker, Token } from '../models' 3 | 4 | export const successResponse = (req, res, data, code = 200, meta) => 5 | res.send({ 6 | code, 7 | data, 8 | success: true, 9 | ...(meta !== undefined && { meta }), 10 | }) 11 | 12 | export const errorResponse = ( 13 | req, 14 | res, 15 | errorMessage = 'Something went wrong', 16 | code = 500, 17 | errors = [], 18 | ) => { 19 | console.log(errorMessage, code) 20 | res.status(code).json({ 21 | code, 22 | errorMessage, 23 | errors, 24 | data: null, 25 | success: false, 26 | }) 27 | } 28 | 29 | export const validateFields = (object, fields) => { 30 | const errors = [] 31 | fields.forEach((f) => { 32 | if (!(object && object[f])) { 33 | errors.push(f) 34 | } 35 | }) 36 | return errors.length ? `${errors.join(', ')} are required fields.` : '' 37 | } 38 | 39 | export const verifyApiKey = async (req, res, next) => { 40 | const authHeader = req.headers['authorization'] 41 | 42 | if (authHeader) { 43 | const [publicToken, secretToken] = authHeader.split(':') 44 | if (publicToken && secretToken) { 45 | const token = await Token.findOne({ where: { publicToken } }) 46 | if (token && bcrypt.compareSync(secretToken, token.secretToken)) { 47 | const maker = await Maker.findByPk(token.makerId) 48 | token.lastUsedAt = new Date() 49 | token.save() 50 | if (maker) { 51 | req.maker = maker 52 | } 53 | } 54 | } 55 | } 56 | 57 | next() 58 | } 59 | 60 | export const restrictToMaker = (req, res, next) => { 61 | if (req.maker) { 62 | next() 63 | } else { 64 | res.sendStatus(403) // Forbidden 65 | } 66 | } 67 | 68 | export const paginate = ({ page = 0, pageSize = 100 }) => { 69 | const offset = page * pageSize 70 | const limit = pageSize 71 | 72 | return { 73 | offset, 74 | limit, 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/helpers/solana.js: -------------------------------------------------------------------------------- 1 | import { AccountFetchCache } from '@helium/account-fetch-cache' 2 | import * as anchor from "@coral-xyz/anchor"; 3 | import { Keypair } from "@solana/web3.js"; 4 | import fs from "fs"; 5 | import axios from "axios"; 6 | 7 | export const SOLANA_STATUS_URL = process.env.SOLANA_STATUS_URL || "https://solana-status.helium.com" 8 | export const SOLANA_URL = process.env.SOLANA_URL || 'http://127.0.0.1:8899' 9 | export const ASSET_API_URL = 10 | process.env.ASSET_API_URL || SOLANA_URL || 'http://127.0.0.1:8899' 11 | process.env.ANCHOR_PROVIDER_URL = SOLANA_URL; 12 | anchor.setProvider(anchor.AnchorProvider.local(SOLANA_URL)); 13 | 14 | export const provider = anchor.getProvider(); 15 | export const cache = new AccountFetchCache({ 16 | connection: provider.connection, 17 | commitment: "confirmed", 18 | extendConnection: true, 19 | }); 20 | 21 | export const wallet = loadKeypair(process.env.ANCHOR_WALLET); 22 | 23 | export function loadKeypair(keypair) { 24 | console.log(process.env.ANCHOR_PROVIDER_URL) 25 | anchor.setProvider(anchor.AnchorProvider.env()) 26 | 27 | return Keypair.fromSecretKey( 28 | new Uint8Array(JSON.parse(fs.readFileSync(keypair).toString())), 29 | ) 30 | } 31 | 32 | export async function isEnabled() { 33 | return ( 34 | process.env.ENABLE_SOLANA === 'true' || 35 | (await axios.get(SOLANA_STATUS_URL)).data.migrationStatus !== 'not_started' 36 | ) 37 | } 38 | -------------------------------------------------------------------------------- /src/models/hotspot.js: -------------------------------------------------------------------------------- 1 | const { Model } = require('sequelize') 2 | 3 | module.exports = (sequelize, DataTypes) => { 4 | class Hotspot extends Model { 5 | /** 6 | * Helper method for defining associations. 7 | * This method is not a part of Sequelize lifecycle. 8 | * The `models/index` file will call this method automatically. 9 | */ 10 | static associate(models) { 11 | this.maker = this.belongsTo(models.Maker, { 12 | foreignKey: 'makerId', 13 | }) 14 | } 15 | } 16 | Hotspot.init( 17 | { 18 | onboardingKey: DataTypes.TEXT, 19 | macWlan0: DataTypes.STRING, 20 | rpiSerial: DataTypes.STRING, 21 | batch: DataTypes.STRING, 22 | publicAddress: DataTypes.TEXT, 23 | heliumSerial: DataTypes.STRING, 24 | macEth0: DataTypes.STRING, 25 | deviceType: DataTypes.TEXT, 26 | }, 27 | { 28 | defaultScope: { 29 | attributes: { exclude: ['MakerId'] }, 30 | }, 31 | sequelize, 32 | modelName: 'Hotspot', 33 | underscored: true, 34 | }, 35 | ) 36 | return Hotspot 37 | } 38 | -------------------------------------------------------------------------------- /src/models/index.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | const path = require('path') 3 | const Sequelize = require('sequelize') 4 | const basename = path.basename(__filename) 5 | const db = {} 6 | const AWS = require('aws-sdk') 7 | const pg = require('pg') 8 | 9 | const host = process.env.PGHOST || 'localhost' 10 | const port = Number(process.env.PGPORT) || 5432 11 | const sequelize = new Sequelize({ 12 | host: host, 13 | dialect: 'postgres', 14 | port: port, 15 | logging: process.env.DISABLE_DB_LOGGING === 'true' ? false : console.log, 16 | dialectModule: pg, 17 | username: process.env.PGUSER, 18 | database: process.env.PGDATABASE, 19 | dialectOptions: { 20 | ssl: process.env.PG_DISABLE_SSL 21 | ? { 22 | ssl: { 23 | require: false, 24 | rejectUnauthorized: false, 25 | }, 26 | } 27 | : undefined, 28 | }, 29 | hooks: { 30 | beforeConnect: async (config) => { 31 | const isRds = host.includes('rds.amazonaws.com') 32 | 33 | let password = process.env.PGPASSWORD 34 | if (isRds && !password) { 35 | const signer = new AWS.RDS.Signer({ 36 | region: process.env.AWS_REGION, 37 | hostname: process.env.PGHOST, 38 | port, 39 | username: process.env.PGUSER, 40 | }) 41 | password = await new Promise((resolve, reject) => 42 | signer.getAuthToken({}, (err, token) => { 43 | if (err) { 44 | return reject(err) 45 | } 46 | resolve(token) 47 | }), 48 | ) 49 | config.dialectOptions = { 50 | ssl: { 51 | require: false, 52 | rejectUnauthorized: false, 53 | }, 54 | } 55 | } 56 | config.password = password 57 | }, 58 | }, 59 | }) 60 | 61 | fs.readdirSync(__dirname) 62 | .filter((file) => { 63 | return ( 64 | file.indexOf('.') !== 0 && file !== basename && file.slice(-3) === '.js' 65 | ) 66 | }) 67 | .forEach((file) => { 68 | const model = require(path.join(__dirname, file))( 69 | sequelize, 70 | Sequelize.DataTypes, 71 | ) 72 | db[model.name] = model 73 | }) 74 | 75 | Object.keys(db).forEach((modelName) => { 76 | if (db[modelName].associate) { 77 | db[modelName].associate(db) 78 | } 79 | }) 80 | 81 | db.sequelize = sequelize 82 | db.Sequelize = Sequelize 83 | 84 | module.exports = db 85 | -------------------------------------------------------------------------------- /src/models/maker.js: -------------------------------------------------------------------------------- 1 | const { Model } = require('sequelize') 2 | const { keyring } = require('@fnando/keyring') 3 | const Address = require('@helium/address').default 4 | const { PublicKey } = require('@solana/web3.js') 5 | 6 | const keys = JSON.parse(process.env.KEYRING) 7 | const digestSalt = process.env.KEYRING_SALT 8 | 9 | module.exports = (sequelize, DataTypes) => { 10 | class Maker extends Model { 11 | static associate(models) { 12 | this.hotspots = this.hasMany(models.Hotspot) 13 | this.tokens = this.hasMany(models.Token) 14 | } 15 | } 16 | Maker.init( 17 | { 18 | name: DataTypes.STRING, 19 | address: DataTypes.STRING, 20 | locationNonceLimit: DataTypes.INTEGER, 21 | encryptedKeypairEntropy: DataTypes.TEXT, 22 | keypairEntropy: DataTypes.VIRTUAL, 23 | keyringId: DataTypes.INTEGER, 24 | solanaAddress: DataTypes.VIRTUAL, 25 | }, 26 | { 27 | sequelize, 28 | modelName: 'Maker', 29 | tableName: 'makers', 30 | underscored: true, 31 | hooks: { 32 | beforeCreate: (record) => { 33 | const encryptor = keyring(keys, { digestSalt }) 34 | const { keypairEntropy } = record 35 | 36 | record.keyringId = encryptor.currentId() 37 | record.encryptedKeypairEntropy = encryptor.encrypt(keypairEntropy) 38 | }, 39 | afterFind: (record) => { 40 | if (!record) return 41 | 42 | const records = Array.isArray(record) ? record : [record] 43 | for (const record of records) { 44 | const addr = record.address && Address.fromB58(record.address) 45 | record.solanaAddress = 46 | addr && new PublicKey(addr.publicKey).toBase58() 47 | } 48 | 49 | const { encryptedKeypairEntropy, keyringId } = record 50 | if (encryptedKeypairEntropy) { 51 | const encryptor = keyring(keys, { digestSalt }) 52 | record.keypairEntropy = encryptor.decrypt( 53 | encryptedKeypairEntropy, 54 | keyringId, 55 | ) 56 | } 57 | }, 58 | }, 59 | defaultScope: { 60 | attributes: { 61 | exclude: ['keypairEntropy', 'encryptedKeypairEntropy', 'keyringId'], 62 | }, 63 | }, 64 | scopes: { 65 | withKeypair: { 66 | attributes: { 67 | include: ['keypairEntropy', 'encryptedKeypairEntropy', 'keyringId'], 68 | }, 69 | }, 70 | }, 71 | }, 72 | ) 73 | 74 | return Maker 75 | } 76 | -------------------------------------------------------------------------------- /src/models/token.js: -------------------------------------------------------------------------------- 1 | const { Model } = require('sequelize') 2 | const bcrypt = require('bcryptjs') 3 | 4 | module.exports = (sequelize, DataTypes) => { 5 | class Token extends Model { 6 | static associate(models) { 7 | this.maker = this.belongsTo(models.Maker, { 8 | foreignKey: 'makerId', 9 | }) 10 | } 11 | } 12 | Token.init( 13 | { 14 | name: DataTypes.STRING, 15 | publicToken: DataTypes.STRING, 16 | secretToken: DataTypes.STRING, 17 | makerId: DataTypes.INTEGER, 18 | lastUsedAt: DataTypes.DATE, 19 | }, 20 | { 21 | sequelize, 22 | modelName: 'Token', 23 | tableName: 'tokens', 24 | underscored: true, 25 | hooks: { 26 | beforeCreate: (record) => { 27 | record.secretToken = bcrypt.hashSync(record.secretToken, 10) 28 | }, 29 | }, 30 | }, 31 | ) 32 | return Token 33 | } 34 | -------------------------------------------------------------------------------- /src/routes/apiRouter.js: -------------------------------------------------------------------------------- 1 | import express from 'express' 2 | import rateLimit from 'express-rate-limit' 3 | import RedisStore from 'rate-limit-redis' 4 | import Redis from 'ioredis' 5 | import cors from 'cors' 6 | import * as transactionsController from '../controllers/transactionsController' 7 | import * as v3TransactionsController from '../controllers/v3TransactionsController' 8 | 9 | import * as makersController from '../controllers/makersController' 10 | import * as hotspotsController from '../controllers/hotspotsController' 11 | import { restrictToMaker, successResponse, verifyApiKey } from '../helpers' 12 | 13 | const REQUIRED_FIRMWARE_VERSION = '2019.11.06.0' 14 | 15 | const router = express.Router() 16 | 17 | router.use(cors()) 18 | router.options('*', cors()) 19 | 20 | const numberEnv = (envName, fallback) => { 21 | if (process.env[envName]) { 22 | return parseInt(process.env[envName]) 23 | } 24 | return fallback 25 | } 26 | 27 | let redisClient 28 | if (process.env.REDIS_URL) { 29 | redisClient = new Redis(process.env.REDIS_URL) 30 | } 31 | 32 | const strictLimitOpts = { 33 | windowMs: 10 * 60 * 1000, 34 | max: 10, 35 | skip: (req, res) => req.maker, 36 | } 37 | if (process.env.REDIS_URL) { 38 | strictLimitOpts.store = new RedisStore({ 39 | client: redisClient, 40 | }) 41 | } 42 | const strictLimit = rateLimit(strictLimitOpts) 43 | 44 | const defaultLimitOpts = { 45 | windowMs: numberEnv('RATE_LIMIT_WINDOW', 15 * 60 * 1000), // 15 minutes 46 | max: numberEnv('RATE_LIMIT_MAX', 3), 47 | skip: (req, res) => req.maker, 48 | } 49 | if (process.env.REDIS_URL) { 50 | defaultLimitOpts.store = new RedisStore({ 51 | client: redisClient, 52 | }) 53 | } 54 | const defaultLimit = rateLimit(defaultLimitOpts) 55 | 56 | router.use(verifyApiKey) 57 | router.use(defaultLimit) 58 | 59 | // Legacy CLI Support (2020) 60 | router.post( 61 | '/v1/transactions/pay/:onboardingKey', 62 | strictLimit, 63 | transactionsController.pay, 64 | ) 65 | router.get('/v1/address', makersController.legacyAddress) 66 | router.get('/v1/limits', (req, res) => { 67 | return successResponse(req, res, { location_nonce: 3 }) 68 | }) 69 | 70 | // V2 (Q1 2021) 71 | // Restricted Maker API 72 | router.get('/v2/hotspots', restrictToMaker, hotspotsController.index) 73 | router.get('/v2/hotspots/search', restrictToMaker, hotspotsController.search) 74 | router.post('/v2/hotspots', restrictToMaker, hotspotsController.create) 75 | router.put('/v2/hotspots/:id', restrictToMaker, hotspotsController.update) 76 | router.delete('/v2/hotspots/:id', restrictToMaker, hotspotsController.destroy) 77 | 78 | // Public rate limited API 79 | router.get( 80 | '/v2/hotspots/:onboardingKeyOrId', 81 | strictLimit, 82 | hotspotsController.show, 83 | ) 84 | router.post( 85 | '/v2/transactions/pay/:onboardingKey', 86 | strictLimit, 87 | transactionsController.pay, 88 | ) 89 | router.get('/v2/transactions/sample', transactionsController.sample) 90 | router.get('/v2/makers', makersController.index) 91 | router.get('/v2/makers/:makerId', makersController.show) 92 | router.get('/v2/firmware', (req, res) => { 93 | return successResponse(req, res, { version: REQUIRED_FIRMWARE_VERSION }) 94 | }) 95 | 96 | 97 | router.get('/v3/hotspots', restrictToMaker, hotspotsController.index) 98 | router.get('/v3/hotspots/search', restrictToMaker, hotspotsController.search) 99 | router.post('/v3/hotspots', restrictToMaker, hotspotsController.create) 100 | router.put('/v3/hotspots/:id', restrictToMaker, hotspotsController.update) 101 | router.delete('/v3/hotspots/:id', restrictToMaker, hotspotsController.destroy) 102 | 103 | // Public rate limited API 104 | router.get( 105 | '/v3/hotspots/:onboardingKeyOrId', 106 | strictLimit, 107 | hotspotsController.show, 108 | ) 109 | router.get('/v3/makers', makersController.index) 110 | router.get('/v3/makers/:makerId', makersController.show) 111 | router.get('/v3/firmware', (req, res) => { 112 | return successResponse(req, res, { version: REQUIRED_FIRMWARE_VERSION }) 113 | }) 114 | 115 | router.post( 116 | '/v3/transactions/create-hotspot', 117 | v3TransactionsController.createHotspot, 118 | ) 119 | router.post( 120 | '/v3/transactions/iot/onboard', 121 | v3TransactionsController.onboardToIot, 122 | ) 123 | router.post( 124 | '/v3/transactions/mobile/onboard', 125 | v3TransactionsController.onboardToMobile, 126 | ) 127 | 128 | router.post( 129 | '/v3/transactions/mobile/update-metadata', 130 | v3TransactionsController.updateMobileMetadata, 131 | ) 132 | 133 | router.post( 134 | '/v3/transactions/iot/update-metadata', 135 | v3TransactionsController.updateIotMetadata, 136 | ) 137 | 138 | module.exports = router 139 | -------------------------------------------------------------------------------- /src/routes/appRouter.js: -------------------------------------------------------------------------------- 1 | import express from 'express' 2 | import basicAuth from 'express-basic-auth' 3 | import * as transactionsController from '../controllers/transactionsController' 4 | import * as makersController from '../controllers/makersController' 5 | import * as hotspotsController from '../controllers/hotspotsController' 6 | import { successResponse } from '../helpers' 7 | 8 | const router = express.Router() 9 | 10 | const username = process.env.APP_USERNAME 11 | const password = process.env.APP_PASSWORD 12 | 13 | router.use( 14 | basicAuth({ 15 | users: { [username]: password }, 16 | }), 17 | ) 18 | 19 | const REQUIRED_FIRMWARE_VERSION = '2019.11.06.0' 20 | 21 | // Legacy Support (2020) 22 | router.get('/hotspots/:onboardingKey', hotspotsController.showLegacy) 23 | router.post('/transactions/pay/:onboardingKey', transactionsController.pay) 24 | router.get('/address', makersController.legacyAddress) 25 | router.get('/limits', makersController.legacyLimits) 26 | router.get('/firmware', (req, res) => { 27 | return successResponse(req, res, { version: REQUIRED_FIRMWARE_VERSION }) 28 | }) 29 | 30 | module.exports = router 31 | -------------------------------------------------------------------------------- /src/routes/indexRouter.js: -------------------------------------------------------------------------------- 1 | var express = require('express'); 2 | var router = express.Router(); 3 | 4 | /* GET home page. */ 5 | router.get('/', function(req, res, next) { 6 | res.send('index') 7 | }); 8 | 9 | module.exports = router; 10 | -------------------------------------------------------------------------------- /src/server.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | /** 4 | * Module dependencies. 5 | */ 6 | import app from './app' 7 | var debug = require('debug')('onboarding-server:server'); 8 | var http = require('http'); 9 | 10 | /** 11 | * Get port from environment and store in Express. 12 | */ 13 | 14 | var port = normalizePort(process.env.PORT || '3002'); 15 | app.set('port', port); 16 | 17 | /** 18 | * Create HTTP server. 19 | */ 20 | 21 | var server = http.createServer(app); 22 | 23 | /** 24 | * Listen on provided port, on all network interfaces. 25 | */ 26 | 27 | server.listen(port, "0.0.0.0"); 28 | server.on('error', onError); 29 | server.on('listening', onListening); 30 | 31 | /** 32 | * Normalize a port into a number, string, or false. 33 | */ 34 | 35 | function normalizePort(val) { 36 | var port = parseInt(val, 10); 37 | 38 | if (isNaN(port)) { 39 | // named pipe 40 | return val; 41 | } 42 | 43 | if (port >= 0) { 44 | // port number 45 | return port; 46 | } 47 | 48 | return false; 49 | } 50 | 51 | /** 52 | * Event listener for HTTP server "error" event. 53 | */ 54 | 55 | function onError(error) { 56 | if (error.syscall !== 'listen') { 57 | throw error; 58 | } 59 | 60 | var bind = typeof port === 'string' 61 | ? 'Pipe ' + port 62 | : 'Port ' + port; 63 | 64 | // handle specific listen errors with friendly messages 65 | switch (error.code) { 66 | case 'EACCES': 67 | console.error(bind + ' requires elevated privileges'); 68 | process.exit(1); 69 | break; 70 | case 'EADDRINUSE': 71 | console.error(bind + ' is already in use'); 72 | process.exit(1); 73 | break; 74 | default: 75 | throw error; 76 | } 77 | } 78 | 79 | /** 80 | * Event listener for HTTP server "listening" event. 81 | */ 82 | 83 | function onListening() { 84 | var addr = server.address(); 85 | var bind = typeof addr === 'string' 86 | ? 'pipe ' + addr 87 | : 'port ' + addr.port; 88 | debug('Listening on ' + bind); 89 | } 90 | -------------------------------------------------------------------------------- /src/tasks/burn_tokens.js: -------------------------------------------------------------------------------- 1 | const prompts = require('prompts') 2 | const { Client, Network } = require('@helium/http') 3 | const { TokenBurnV1, Transaction } = require('@helium/transactions') 4 | const { Keypair } = require('@helium/crypto') 5 | const { Address } = require('@helium/address') 6 | 7 | ;(async () => { 8 | const client = new Client(Network.production) 9 | const vars = await client.vars.get() 10 | Transaction.config(vars) 11 | 12 | const { Maker } = require('../models') 13 | const makers = await Maker.findAll() 14 | 15 | const makerAccounts = {} 16 | 17 | for (const maker of makers) { 18 | const account = await client.accounts.get(maker.address) 19 | makerAccounts[maker.id] = account 20 | } 21 | 22 | const makerChoice = await prompts({ 23 | type: 'select', 24 | name: 'makerId', 25 | message: 'Select a Maker with an HNT balance to burn:', 26 | choices: makers.map((maker) => ({ 27 | title: `${maker.name} (${makerAccounts[maker.id].balance.toString(2)})`, 28 | value: maker.id, 29 | disabled: makerAccounts[maker.id].balance.integerBalance === 0 30 | })), 31 | }) 32 | 33 | if (!makerChoice.makerId) { 34 | return process.exit(0) 35 | } 36 | 37 | const maker = await Maker.findByPk(makerChoice.makerId) 38 | const makerAccount = makerAccounts[makerChoice.makerId] 39 | 40 | const amountChoice = await prompts({ 41 | type: 'number', 42 | name: 'amount', 43 | message: 'How much HNT should be burned?', 44 | style: 'default', 45 | float: true, 46 | round: 8, 47 | min: 0.00000001, 48 | max: makerAccount.balance.floatBalance, 49 | validate: (v) => (v !== '' ? true : 'This field is required'), 50 | }) 51 | 52 | const txn = new TokenBurnV1({ 53 | payer: Address.fromB58(maker.address), 54 | payee: Address.fromB58(maker.address), 55 | amount: amountChoice.amount * 100000000, 56 | nonce: makerAccount.speculativeNonce + 1, 57 | memo: "", 58 | }) 59 | 60 | console.log('Payer', txn.payer.b58) 61 | console.log('Payee', txn.payee.b58) 62 | console.log('Amount (in Bones)', txn.amount) 63 | console.log('Fee (in DC)', txn.fee) 64 | console.log('Nonce', txn.nonce) 65 | console.log('Memo', txn.memo) 66 | 67 | const confirmResponse = await prompts({ 68 | type: 'text', 69 | name: 'understand', 70 | message: "Danger! Confirm transaction details above. This will sign and submit a DC Burn transaction. If you know what you are doing, type 'I UNDERSTAND'", 71 | validate: (v) => (v === "I UNDERSTAND" ? true : 'This field is required'), 72 | }) 73 | 74 | 75 | if (confirmResponse.understand !== 'I UNDERSTAND') { 76 | return process.exit(0) 77 | } 78 | 79 | const makerWithKeypair = await Maker.scope('withKeypair').findByPk(makerChoice.makerId) 80 | const keypairEntropy = Buffer.from(makerWithKeypair.keypairEntropy, 'hex') 81 | 82 | const keypair = await Keypair.fromEntropy(keypairEntropy) 83 | 84 | const signedTxn = await txn.sign({ payer: keypair }) 85 | 86 | const pendingTxn = await client.transactions.submit(signedTxn.toString()) 87 | 88 | console.log(pendingTxn) 89 | 90 | return process.exit(0) 91 | })() 92 | -------------------------------------------------------------------------------- /src/tasks/create_maker.js: -------------------------------------------------------------------------------- 1 | const prompts = require('prompts') 2 | const { Keypair, utils } = require('@helium/crypto') 3 | 4 | const generateToken = async (type, bytes) => { 5 | const buf = await utils.randomBytes(bytes) 6 | return [type, buf.toString('base64')].join('_') 7 | } 8 | 9 | ;(async () => { 10 | const { Maker, Token } = require('../models') 11 | 12 | const response = await prompts([ 13 | { 14 | type: 'text', 15 | name: 'name', 16 | message: "What is the Maker's name?", 17 | validate: (name) => (name.length > 0 ? true : 'This field is required'), 18 | }, 19 | { 20 | type: 'number', 21 | name: 'locationNonceLimit', 22 | message: 'How many assert location transactions should be paid for?', 23 | style: 'default', 24 | min: 0, 25 | max: 10, 26 | validate: (v) => (v !== '' ? true : 'This field is required'), 27 | }, 28 | { 29 | type: 'text', 30 | name: 'keypairEntropy', 31 | message: '(optional) What is the wallet entropy?', 32 | }, 33 | { 34 | type: 'confirm', 35 | name: 'apiKey', 36 | message: 'Do you want to create an API key for this Maker?', 37 | }, 38 | ]) 39 | 40 | if (!response.name || !response.locationNonceLimit) { 41 | return process.exit(0) 42 | } 43 | 44 | let keypairEntropy 45 | if (response.keypairEntropy) { 46 | keypairEntropy = Buffer.from(response.keypairEntropy, 'hex') 47 | } else { 48 | keypairEntropy = await utils.randomBytes(32) 49 | } 50 | const keypair = await Keypair.fromEntropy(keypairEntropy) 51 | const address = keypair.address.b58 52 | 53 | const maker = await Maker.create({ 54 | name: response.name, 55 | address, 56 | keypairEntropy: keypairEntropy.toString('hex'), 57 | locationNonceLimit: response.locationNonceLimit, 58 | }) 59 | 60 | console.log('Maker successfully created') 61 | console.log({ 62 | id: maker.id, 63 | name: maker.name, 64 | address: maker.address, 65 | locationNonceLimit: maker.locationNonceLimit, 66 | }) 67 | 68 | if (response.apiKey === false) { 69 | return process.exit(0) 70 | } 71 | 72 | const publicToken = await generateToken('pk', 32) 73 | const secretToken = await generateToken('sk', 64) 74 | 75 | const token = await Token.create({ 76 | publicToken, 77 | secretToken, 78 | makerId: maker.id, 79 | }) 80 | 81 | console.log('Maker API key successfully created') 82 | console.log({ 83 | maker: maker.name, 84 | makerId: maker.id, 85 | publicToken: token.publicToken, 86 | secretToken, 87 | }) 88 | 89 | return process.exit(0) 90 | })() 91 | -------------------------------------------------------------------------------- /src/tasks/create_token.js: -------------------------------------------------------------------------------- 1 | const prompts = require('prompts') 2 | const { utils } = require('@helium/crypto') 3 | 4 | const generateToken = async (type, bytes) => { 5 | const buf = await utils.randomBytes(bytes) 6 | return [type, buf.toString('base64')].join('_') 7 | } 8 | 9 | ;(async () => { 10 | const { Maker, Token } = require('../models') 11 | const makers = await Maker.findAll() 12 | 13 | const response = await prompts({ 14 | type: 'select', 15 | name: 'makerId', 16 | message: 'Select a Maker to create an API token for:', 17 | choices: makers.map(maker => ({ 18 | title: maker.name, 19 | value: maker.id, 20 | })), 21 | }); 22 | 23 | if (!response.makerId) { 24 | return process.exit(0) 25 | } 26 | 27 | const maker = await Maker.findByPk(response.makerId) 28 | 29 | const publicToken = await generateToken('pk', 32) 30 | const secretToken = await generateToken('sk', 64) 31 | 32 | const token = await Token.create({ 33 | publicToken, 34 | secretToken, 35 | makerId: maker.id, 36 | }) 37 | 38 | console.log('Maker API key successfully created') 39 | console.log({ 40 | maker: maker.name, 41 | makerId: maker.id, 42 | publicToken: token.publicToken, 43 | secretToken, 44 | }) 45 | 46 | return process.exit(0) 47 | })() 48 | -------------------------------------------------------------------------------- /src/tasks/export_maker.js: -------------------------------------------------------------------------------- 1 | const prompts = require('prompts') 2 | 3 | ;(async () => { 4 | const { Maker } = require('../models') 5 | const makers = await Maker.findAll() 6 | 7 | const response = await prompts({ 8 | type: 'select', 9 | name: 'makerId', 10 | message: 'Select a Maker to export:', 11 | choices: makers.map((maker) => ({ 12 | title: maker.name, 13 | value: maker.id, 14 | })), 15 | }) 16 | 17 | if (!response.makerId) { 18 | return process.exit(0) 19 | } 20 | 21 | const confirmResponse = await prompts({ 22 | type: 'text', 23 | name: 'understand', 24 | message: "Danger! The selected Maker's **UNENCRYPTED** wallet entropy seed will now be displayed. This provides full access to the Maker wallet including its DC, HNT and onboarding rights. If you know what you are doing, type 'I UNDERSTAND'", 25 | validate: (v) => (v === "I UNDERSTAND" ? true : 'This field is required'), 26 | }) 27 | 28 | if (confirmResponse.understand !== 'I UNDERSTAND') { 29 | return process.exit(0) 30 | } 31 | 32 | const maker = await Maker.scope('withKeypair').findByPk(response.makerId) 33 | const keypairEntropy = maker.keypairEntropy 34 | 35 | console.log('Maker details:') 36 | console.log({ 37 | id: maker.id, 38 | name: maker.name, 39 | address: maker.address, 40 | locationNonceLimit: maker.locationNonceLimit, 41 | }) 42 | 43 | console.log('Maker wallet entropy:') 44 | console.log(keypairEntropy) 45 | return process.exit(0) 46 | })() 47 | -------------------------------------------------------------------------------- /src/tasks/report_onboards.js: -------------------------------------------------------------------------------- 1 | (async () => { 2 | const { Maker, Hotspot } = require('../models') 3 | const { Op } = require('sequelize') 4 | const makers = await Maker.findAll() 5 | 6 | const results = await Promise.all(makers.map(async (maker) => { 7 | const total_count = await Hotspot.count({ 8 | where: { 9 | [Op.and]: 10 | [ 11 | { makerId: maker.id }, 12 | { onboardingKey: {[Op.ne]: null} }, 13 | ] 14 | } 15 | }) 16 | 17 | const unonboarded_count = await Hotspot.count({ 18 | where: { 19 | [Op.and]: 20 | [ 21 | { makerId: maker.id }, 22 | { publicAddress: {[Op.eq]: null} }, 23 | { onboardingKey: {[Op.ne]: null} }, 24 | ] 25 | } 26 | }) 27 | 28 | return { 29 | maker: maker.name, 30 | makerId: maker.id, 31 | totalCount: total_count, 32 | unonboardedCount: unonboarded_count, 33 | } 34 | })) 35 | 36 | console.log(results) 37 | 38 | return process.exit(0) 39 | })() 40 | --------------------------------------------------------------------------------