├── .babelrc ├── .dockerignore ├── .flowconfig ├── .gitignore ├── .npmignore ├── .travis.yml ├── CHANGELOG.md ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── README.md ├── bin ├── aleph.js └── mcclient.js ├── docker-compose.yml ├── docs ├── build.sh ├── clean.sh ├── jsdoc-conf.json └── publish.js ├── flow-typed └── npm │ ├── chai_v3.5.x.js │ ├── flow-bin_v0.x.x.js │ ├── knex_v0.12.x.js │ ├── lodash_v4.x.x.js │ ├── mkdirp_v0.5.x.js │ └── mocha_v3.1.x.js ├── integration-test ├── concat-dependencies │ ├── Dockerfile │ └── build.sh ├── concat │ ├── Dockerfile │ └── test-identities │ │ ├── mcdir │ │ └── identity.node │ │ └── mcnode │ │ ├── identity.node │ │ └── identity.publisher ├── index.js ├── merge_test.js ├── node_info_test.js ├── ping_test.js ├── push_test.js ├── query_test.js ├── remote_data_test.js ├── run-in-docker.sh └── util.js ├── npm-shrinkwrap.json ├── package.json ├── pre-push.sh ├── scripts ├── build-jq.js ├── dataset-fetch.py ├── ingest-parallel.py ├── ingest.sh ├── io.mediachain.indexer-image-jsonschema-1-0-0.json ├── publish-500px.sh ├── publish-dpla.sh ├── publish-flickr-c10.sh ├── publish-flickr-c100.sh ├── publish-flickr.sh ├── publish-pexels.sh ├── publish.sh └── validate.sh ├── src ├── client │ ├── api │ │ ├── RestClient.js │ │ └── index.js │ └── cli │ │ ├── commands │ │ ├── archive.js │ │ ├── archive │ │ │ ├── dump.js │ │ │ └── load.js │ │ ├── auth.js │ │ ├── auth │ │ │ ├── grant.js │ │ │ ├── revoke.js │ │ │ └── show.js │ │ ├── config.js │ │ ├── config │ │ │ ├── dir.js │ │ │ ├── info.js │ │ │ └── nat.js │ │ ├── data.js │ │ ├── data │ │ │ ├── compact.js │ │ │ ├── gc.js │ │ │ ├── get.js │ │ │ ├── keys.js │ │ │ ├── put.js │ │ │ └── sync.js │ │ ├── delete.js │ │ ├── getData.js │ │ ├── id.js │ │ ├── listNamespaces.js │ │ ├── listPeers.js │ │ ├── lookupPeer.js │ │ ├── manifest.js │ │ ├── manifest │ │ │ ├── add.js │ │ │ ├── get.js │ │ │ ├── lookup.js │ │ │ ├── remove.js │ │ │ ├── self.js │ │ │ └── set.js │ │ ├── merge.js │ │ ├── net.js │ │ ├── net │ │ │ ├── addr.js │ │ │ ├── connections.js │ │ │ ├── findPeers.js │ │ │ ├── identify.js │ │ │ └── ping.js │ │ ├── netAddr.js │ │ ├── netConnections.js │ │ ├── ping.js │ │ ├── publish.js │ │ ├── publishRaw.js │ │ ├── publishSchema.js │ │ ├── push.js │ │ ├── putData.js │ │ ├── query.js │ │ ├── shutdown.js │ │ ├── statement.js │ │ ├── status.js │ │ └── validate.js │ │ ├── index.js │ │ └── util.js ├── common │ └── util.js ├── metadata │ ├── jqStream.js │ ├── schema.js │ ├── schemas │ │ ├── com.snowplowanalytics.self-desc-jsonschema-1-0-0.json │ │ └── io.mediachain.jsonld-jsonschema-1-0-0.json │ ├── schemaver.js │ └── serialize.js ├── model │ ├── query_result.js │ └── statement.js ├── peer │ ├── constants.js │ ├── datastore.js │ ├── db │ │ ├── index.js │ │ └── migrations │ │ │ └── 20161221093425_create_tables.js │ ├── directory.js │ ├── identity.js │ ├── libp2p_node.js │ ├── merge.js │ ├── node.js │ ├── push.js │ ├── repl │ │ ├── commands │ │ │ ├── query.js │ │ │ └── repl.js │ │ ├── index.js │ │ └── util.js │ └── util.js └── protobuf │ ├── dir.proto │ ├── index.js │ ├── manifest.proto │ ├── node.proto │ ├── stmt.proto │ └── types.js └── test ├── common └── util_test.js ├── config.js ├── metadata ├── schema_test.js └── signature_test.js ├── model ├── query_result_test.js └── statement_test.js ├── peer ├── datastore_test.js ├── directory_test.js ├── identity_test.js ├── libp2p_node_test.js ├── merge_test.js ├── node_info_test.js ├── ping_test.js ├── push_test.js ├── remote_query_test.js ├── statement_db_test.js └── util_test.js ├── resources ├── fixtures │ ├── concat-message-signature.js │ └── test-statements.js ├── generate_test_publisher_ids.js ├── generate_testing_ids.js ├── publisher_ids │ ├── 4XTTM2UhNoDF1EfwonksnNN1zRGcZCMFutDRMtXYgciwiLzCf.id │ ├── 4XTTM33j1mHcbg5ctzCmsmvaiUCixX5jxTbkKHm2vCokC6uxw.id │ ├── 4XTTM3NVNm4VryWP91VXdEeoDXC2NzZkKRvUmPMnb1JdosJFk.id │ ├── 4XTTM6UEBfKSWUnC4KERbW8hesAAAGgrZcu68AXqihsFvU8w6.id │ ├── 4XTTM6UNDgjn7Y6FqnGyKiRf7NmqKzi7ofeh3ZRzFGmv5tkyQ.id │ ├── 4XTTM7xSyXgsrwG83aXEKt5Edp9LFmqxCxum1ABXAXDw5QHHy.id │ ├── 4XTTMATDw8fXWT5BGGaoc4A2sEbnLmMYBbekYnznDJ9K8c2cg.id │ ├── 4XTTMAhtTw2zgTVa5XFxHKeXcJGftdCVmsGR5zYzGspdLr2qf.id │ ├── 4XTTMB2cFTq23PTFGEdeT93cLdNptcSUekShxoUp5vzkjbkA4.id │ ├── 4XTTMBauYYKRkmWLyDwdU2sb4JgQX7VoZwwkPpmeFqrQoWWt7.id │ ├── 4XTTMCh4xe8E49486DUFko3cmqak3vrymKd5kMUBAVhMbxUkj.id │ ├── 4XTTMDi5msCJgo8GFAHvZkE57cCK4QCPRQSrUSQcPMqQexT3E.id │ ├── 4XTTMEj74YR3oT18Uh1ZR1DbWpG5BXftvcSnyrwC6JbgS9AcG.id │ ├── 4XTTMF7HGrVmmnFATLrCMwboCDxnnm6CPcC8dyE8Tigfb5kYh.id │ ├── 4XTTMFB2m8B8ShvUiXNuywUmK6r3e8YM5fsQsLArHMeUpjhWk.id │ ├── 4XTTMFSVc6B4fRt5s71438dXcPE9NBPVQ5sRow1qtcARARHCJ.id │ ├── 4XTTMFVjr2Zga6A1PF4GPZ43fSqvgWUfR8T3seEsfEBe3PUDi.id │ ├── 4XTTMGbTZFhxPnpwUmC8JaQS6NHeLj98TdbdjSeAT9qupk5x9.id │ ├── 4XTTMHa54HQoW8Lp6nLUzKt5QwZAUxux6DDshbBN5w4WNj759.id │ ├── 4XTTMHhM2e1nPVRgN6M7WM8WUcCnVwYwb8aS3ou9uKg5rdrHX.id │ └── concat │ │ └── 4XTTM4JKrrBeAK6qXmo8FoKmT5RkfjeXfZrnWjJNw9fKvPnEs.privateKey └── test_node_ids.json └── util.js /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": ["transform-flow-strip-types"], 3 | "env": { 4 | "test": { 5 | "plugins": ["transform-flow-strip-types", "istanbul"] 6 | } 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | .git/ 3 | -------------------------------------------------------------------------------- /.flowconfig: -------------------------------------------------------------------------------- 1 | [ignore] 2 | /lib/.* 3 | 4 | 5 | [include] 6 | 7 | [libs] 8 | 9 | [options] 10 | suppress_comment= \\(.\\|\n\\)*\\$FlowIssue 11 | unsafe.enable_getters_and_setters=true 12 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | 6 | # Runtime data 7 | pids 8 | *.pid 9 | *.seed 10 | 11 | # Directory for instrumented libs generated by jscoverage/JSCover 12 | lib-cov 13 | 14 | # Coverage directory used by tools like istanbul 15 | coverage 16 | 17 | # nyc test coverage 18 | .nyc_output 19 | 20 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 21 | .grunt 22 | 23 | # node-waf configuration 24 | .lock-wscript 25 | 26 | # Compiled binary addons (http://nodejs.org/api/addons.html) 27 | build/Release 28 | 29 | # Dependency directories 30 | node_modules 31 | jspm_packages 32 | 33 | # Optional npm cache directory 34 | .npm 35 | 36 | # Optional REPL history 37 | .node_repl_history 38 | 39 | lib/ 40 | 41 | # emacs backups 42 | *~ 43 | 44 | # sublime 45 | *.sublime-project 46 | *.sublime-workspace 47 | 48 | bin/jq 49 | 50 | # generated documentation 51 | docs/dist/ 52 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | 6 | # Runtime data 7 | pids 8 | *.pid 9 | *.seed 10 | 11 | # Directory for instrumented libs generated by jscoverage/JSCover 12 | lib-cov 13 | 14 | # Coverage directory used by tools like istanbul 15 | coverage 16 | 17 | # nyc test coverage 18 | .nyc_output 19 | 20 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 21 | .grunt 22 | 23 | # node-waf configuration 24 | .lock-wscript 25 | 26 | # Compiled binary addons (http://nodejs.org/api/addons.html) 27 | build/Release 28 | 29 | # Dependency directories 30 | node_modules 31 | jspm_packages 32 | 33 | # Optional npm cache directory 34 | .npm 35 | 36 | # Optional REPL history 37 | .node_repl_history 38 | 39 | # emacs backups 40 | *~ 41 | 42 | *.iml 43 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: required 2 | services: 3 | - docker 4 | env: 5 | DOCKER_COMPOSE_VERSION: 1.8.0 6 | concat_ref: master 7 | 8 | language: node_js 9 | node_js: 10 | - "6" 11 | cache: 12 | directories: 13 | - node_modules 14 | notifications: 15 | slack: 16 | on_change: always 17 | email: false 18 | 19 | before_install: 20 | - sudo rm /usr/local/bin/docker-compose 21 | - curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > docker-compose 22 | - chmod +x docker-compose 23 | - sudo mv docker-compose /usr/local/bin 24 | - docker pull mediachain/concat-dependencies 25 | - docker pull mhart/alpine-node:6.7 26 | 27 | script: 28 | - npm run test && if [ "$SKIP_INTEGRATION_TEST" == "true" ]; then true; else ./integration-test/run-in-docker.sh; fi 29 | 30 | after_success: npm run coverage 31 | 32 | deploy: 33 | skip_cleanup: true 34 | provider: npm 35 | email: yusef@napora.org 36 | api_key: "$NPM_API_KEY" 37 | on: 38 | tags: true 39 | repo: mediachain/aleph 40 | # only publish if we're on a commit whose tag starts with 'v' 41 | condition: $(git describe --tags) == v* 42 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | ## Contributing Guidelines 2 | 3 | ### 4 | If things aren't working, please [file an issue](https://github.com/mediachain/aleph/issues), or reach out to 5 | us on our Slack community: http://slack.mediachain.io 6 | 7 | To set up a development environment, make sure you have node 6 installed. [nvm](https://github.com/creationix/nvm) 8 | may be helpful if you need to manage multiple node versions, or if your platform includes an ancient system version. 9 | 10 | You'll likely also want to install [flow](https://flowtype.org), either from the 11 | [latest release](https://github.com/facebook/flow/releases/latest), or, if you're on a Mac, with 12 | [homebrew](https://brew.sh): `brew install flow`. It's possible to build the project without flow, 13 | but flow types are used pervasively throughout, and you might as well get the benefit of the analyzer :) 14 | 15 | Once that's set up, `npm run build` will run the `src` directory through babel and output compiled code to the `lib` 16 | directory. At the moment babel is only used for removing flow type annotations from the compiled output, although 17 | we may lean on it more as we target other execution environments (e.g., the browser). 18 | 19 | If you're working on the `mcclient` code, you might want to use `npm run cli -- # args for mcclient go after double dashes`, which will 20 | compile the code before running the command. Otherwise you need to remember to run `npm run build` before `mcclient` 21 | to compile your changes. 22 | 23 | ### Style 24 | 25 | Aleph is written in the [subset of ES2016 supported by node 6](http://node.green), with [flow type annotations](https://flowtype.org). 26 | Code is formatted according to [standard.js rules](http://standardjs.com/), with plugins to make standard.js play 27 | nice with flow. The upshot is that you can use most fancy "next-gen" JS features, with the exception of 28 | `async`/`await` and the "object spread" syntax (e.g. `const fooWithBar = {...foo, bar: 'baz'}`). If you find 29 | yourself needing the latter, you can use the "desugared" `const fooWithBar = Object.assign({}, foo, {bar: 'baz'})`. 30 | 31 | 32 | ### Tests 33 | Running `npm run check` will run both standard and flow, and it's good to get into the habit of running it 34 | periodically to catch any type errors, etc. If you want, you can force the habit upon yourself by using the 35 | `pre-push.sh` git hook, which can be installed with `cd .git/hooks && ln -s ../../pre-push.sh pre-push`. The 36 | pre-push hook will also run the unit tests with `npm run test` to try to catch any regressions. 37 | 38 | That said, please don't let type checkers or style guides discourage you from contributing! If you'd rather not 39 | mess about with pleasing our nitpick bots, just open a PR and we can help sort it out and get it merged in. 40 | 41 | ### Integration Tests 42 | We've set up a dockerized end-to-end test flow for concat + aleph. Take a look at it in action on [travis-ci](https://travis-ci.org/mediachain/aleph). 43 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mhart/alpine-node:6.7 2 | WORKDIR /integration 3 | 4 | # install git, needed for npm install 5 | RUN apk update 6 | RUN apk add git 7 | 8 | # we have native dependencies, we'll need extra tools 9 | RUN apk add --no-cache make gcc g++ python 10 | 11 | # npm install 12 | COPY ./package.json . 13 | COPY ./npm-shrinkwrap.json . 14 | RUN npm install 15 | 16 | # copy sources 17 | ADD . . 18 | 19 | # run integration test 20 | CMD ["/integration/node_modules/.bin/mocha", "--compilers", "js:babel-register", "/integration/integration-test"] 21 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Mediachain 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /bin/aleph.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | require('../lib/peer/repl') 4 | -------------------------------------------------------------------------------- /bin/mcclient.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | require('../lib/client/cli') 4 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | services: 3 | integration-tester: 4 | tty: true 5 | build: . 6 | links: 7 | - mcdir 8 | - mcnode 9 | 10 | mcdir: 11 | build: 12 | context: ./integration-test/concat 13 | args: 14 | concat_ref: ${concat_ref} 15 | entrypoint: ["mcdir", "-d", "/integration-test/mcdir"] 16 | 17 | mcnode: 18 | build: 19 | context: ./integration-test/concat 20 | args: 21 | concat_ref: ${concat_ref} 22 | entrypoint: ["mcnode", "-b", "0.0.0.0", "-d", "/integration-test/mcnode"] 23 | -------------------------------------------------------------------------------- /docs/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | REPO_ROOT=$(git rev-parse --show-toplevel) 4 | THIS_DIR="${REPO_ROOT}/docs" 5 | NODE_MODULES="${REPO_ROOT}/node_modules" 6 | JSDOC="${NODE_MODULES}/.bin/jsdoc" 7 | TEMPLATE="${NODE_MODULES}/ink-docstrap/template" 8 | SOURCE="${REPO_ROOT}/src" 9 | README="${REPO_ROOT}/README.md" 10 | OUTPUT_DIR="${THIS_DIR}/dist" 11 | CONFIG="${THIS_DIR}/jsdoc-conf.json" 12 | 13 | cd ${REPO_ROOT} && ${JSDOC} -c ${CONFIG} -r ${SOURCE} -d ${OUTPUT_DIR} -t ${TEMPLATE} -R ${README} 14 | -------------------------------------------------------------------------------- /docs/clean.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | REPO_ROOT=$(git rev-parse --show-toplevel) 4 | THIS_DIR="${REPO_ROOT}/docs" 5 | OUTPUT_DIR="${THIS_DIR}/dist" 6 | 7 | rm -rf "${OUTPUT_DIR}" 8 | -------------------------------------------------------------------------------- /docs/jsdoc-conf.json: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": ["plugins/markdown", "node_modules/jsdoc-babel"], 3 | "templates": { 4 | "systemName": "Mediachain (aleph)", 5 | "includeDate": false, 6 | "theme": "lumen" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /docs/publish.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const ghpages = require('gh-pages') 4 | const path = require('path') 5 | 6 | ghpages.publish(path.join(__dirname, 'dist'), (err) => { 7 | if (err) { 8 | console.error('Error updating gh-pages branch:', err) 9 | process.exit(1) 10 | } 11 | 12 | console.log('Done updating gh-pages branch') 13 | }) 14 | -------------------------------------------------------------------------------- /flow-typed/npm/flow-bin_v0.x.x.js: -------------------------------------------------------------------------------- 1 | // flow-typed signature: 6a5610678d4b01e13bbfbbc62bdaf583 2 | // flow-typed version: 3817bc6980/flow-bin_v0.x.x/flow_>=v0.25.x 3 | 4 | declare module 'flow-bin' { 5 | declare module.exports: string; 6 | } 7 | -------------------------------------------------------------------------------- /flow-typed/npm/mkdirp_v0.5.x.js: -------------------------------------------------------------------------------- 1 | // flow-typed signature: 82aa0feffc2bbd64dce3bec492f5d601 2 | // flow-typed version: 3315d89a00/mkdirp_v0.5.x/flow_>=v0.25.0 3 | 4 | declare module 'mkdirp' { 5 | declare type Options = number | { mode?: number; fs?: mixed }; 6 | 7 | declare type Callback = (err: ?Error, path: ?string) => void; 8 | 9 | declare module.exports: { 10 | (path: string, options?: Options | Callback, callback?: Callback): void; 11 | sync(path: string, options?: Options): void; 12 | }; 13 | } 14 | -------------------------------------------------------------------------------- /flow-typed/npm/mocha_v3.1.x.js: -------------------------------------------------------------------------------- 1 | // flow-typed signature: 6b82cf8c1da27b4f0fa7a58e5ed5babf 2 | // flow-typed version: edf70dde46/mocha_v3.1.x/flow_>=v0.22.x 3 | 4 | type TestFunction = ((done: () => void) => void | Promise); 5 | 6 | declare var describe : { 7 | (name:string, spec:() => void): void; 8 | only(description:string, spec:() => void): void; 9 | skip(description:string, spec:() => void): void; 10 | timeout(ms:number): void; 11 | }; 12 | 13 | declare var context : typeof describe; 14 | 15 | declare var it : { 16 | (name:string, spec?:TestFunction): void; 17 | only(description:string, spec:TestFunction): void; 18 | skip(description:string, spec:TestFunction): void; 19 | timeout(ms:number): void; 20 | }; 21 | 22 | declare function before(method : TestFunction):void; 23 | declare function beforeEach(method : TestFunction):void; 24 | declare function after(method : TestFunction):void; 25 | declare function afterEach(method : TestFunction):void; 26 | -------------------------------------------------------------------------------- /integration-test/concat-dependencies/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM golang:1.7.3 2 | 3 | ARG concat_ref=master 4 | 5 | # Fetch package.json so the setup script can find gx dependencies 6 | WORKDIR /go/src/github.com/mediachain/concat 7 | RUN curl -O -L https://raw.githubusercontent.com/mediachain/concat/${concat_ref}/package.json 8 | 9 | # Fetch the setup script and run it 10 | RUN curl -O -L https://raw.githubusercontent.com/mediachain/concat/${concat_ref}/setup.sh 11 | RUN bash ./setup.sh 12 | 13 | # remove the mediachain dir from the go source root, so we can clone into it in a child image 14 | RUN rm -rf /go/src/github.com/mediachain/concat 15 | -------------------------------------------------------------------------------- /integration-test/concat-dependencies/build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # helper script to build the concat-dependencies image. 4 | # 5 | 6 | OPTIND=1 7 | image_tag="latest" 8 | push_to_docker_hub=0 9 | 10 | while getopts "hpt:" opt; do 11 | case "$opt" in 12 | h) 13 | echo "usage: $0 [-t ] [-p]" 14 | echo "-t tag for docker image, defaults to 'latest'" 15 | echo "-p if present, will push to docker hub after successful build." 16 | echo " pushing requires prior docker hub login" 17 | exit 0 18 | ;; 19 | p) 20 | push_to_docker_hub=1 21 | ;; 22 | t) 23 | image_tag=$OPTARG 24 | ;; 25 | esac 26 | done 27 | 28 | # set current directory to this dir, in case we're running from elsewhere 29 | repo_root=$(git rev-parse --show-toplevel) 30 | dir="${repo_root}/integration-test/concat-dependencies" 31 | 32 | cd "${dir}" 33 | 34 | full_tag="mediachain/concat-dependencies:${image_tag}" 35 | 36 | if [ "${concat_ref}" == "" ]; then 37 | concat_ref="master" 38 | fi 39 | 40 | docker build --build-arg concat_ref=${concat_ref} -t ${full_tag} . 41 | status=$? 42 | if [ ${status} -ne 0 ]; then 43 | echo "Error building image, not uploading to docker hub" 44 | exit ${status} 45 | fi 46 | 47 | echo "image \"${full_tag}\" built successfully" 48 | 49 | if [ ${push_to_docker_hub} -ne 0 ]; then 50 | docker push ${full_tag} 51 | 52 | status=$? 53 | if [ ${status} -ne 0 ]; then 54 | echo "Pushing to docker hub failed. You may need to 'docker login' first." 55 | fi 56 | exit ${status} 57 | fi 58 | -------------------------------------------------------------------------------- /integration-test/concat/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mediachain/concat-dependencies 2 | 3 | # Clone concat repo 4 | RUN mkdir -p /go/src/github.com/mediachain 5 | RUN rm -rf /go/src/github.com/mediachain/concat 6 | ARG concat_ref=master 7 | RUN echo "checking out concat at ${concat_ref}" && git clone https://github.com/mediachain/concat /go/src/github.com/mediachain/concat 8 | WORKDIR /go/src/github.com/mediachain/concat 9 | RUN git checkout ${concat_ref} 10 | 11 | # Build the project 12 | RUN /go/src/github.com/mediachain/concat/install.sh 13 | 14 | # Copy the test identities to /integration-test/mcnode and /integration-test/mcdir 15 | VOLUME /integration-test 16 | COPY ./test-identities/mcnode /integration-test/mcnode 17 | COPY ./test-identities/mcdir /integration-test/mcdir 18 | 19 | # expose ports 20 | EXPOSE 9000 9001 9002 21 | -------------------------------------------------------------------------------- /integration-test/concat/test-identities/mcdir/identity.node: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/integration-test/concat/test-identities/mcdir/identity.node -------------------------------------------------------------------------------- /integration-test/concat/test-identities/mcnode/identity.node: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/integration-test/concat/test-identities/mcnode/identity.node -------------------------------------------------------------------------------- /integration-test/concat/test-identities/mcnode/identity.publisher: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/integration-test/concat/test-identities/mcnode/identity.publisher -------------------------------------------------------------------------------- /integration-test/index.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const { describe, it } = require('mocha') 4 | const assert = require('assert') 5 | 6 | const RestClient = require('../src/client/api/RestClient') 7 | 8 | describe('crazy docker integration setup', () => { 9 | it('can contact a node using a docker-compose service name as hostname', () => { 10 | const client = new RestClient({apiUrl: 'http://mcnode:9002'}) 11 | return client.id().then(id => { 12 | assert(id != null, 'should be able to get node id') 13 | }) 14 | }) 15 | }) 16 | -------------------------------------------------------------------------------- /integration-test/merge_test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | /* eslint-env mocha */ 3 | 4 | const assert = require('assert') 5 | const { describe, it, before, after } = require('mocha') 6 | const uuid = require('uuid') 7 | 8 | const { getTestNodeId } = require('../test/util') 9 | const { MediachainNode: AlephNode } = require('../src/peer/node') 10 | const { concatNodeClient, concatNodePeerInfo } = require('./util') 11 | 12 | const TEST_NAMESPACE = 'scratch.merge-test' 13 | const INVALID_STATEMENT_NAMESPACE = 'scratch.merge-test.invalid-stmt' 14 | 15 | const seedObjects = [ 16 | {id: uuid.v4(), hello: 'world'}, 17 | {id: uuid.v4(), foo: 'bar'}, 18 | {id: uuid.v4(), etc: 'and so on'} 19 | ] 20 | 21 | describe('Merge (concat -> aleph)', () => { 22 | let objectIds 23 | let seedStatements 24 | let concatClient 25 | let concatPeerInfo 26 | 27 | before(() => { 28 | return concatNodeClient() 29 | .then(_client => { concatClient = _client }) 30 | .then(() => concatNodePeerInfo()) 31 | .then(_pInfo => { concatPeerInfo = _pInfo }) 32 | .then(() => concatClient.setStatus('online')) 33 | .then(() => concatClient.putData(...seedObjects)) 34 | .then(_objectIds => { objectIds = _objectIds }) 35 | .then(() => { 36 | seedStatements = objectIds.map((object, idx) => ({ 37 | object, 38 | refs: [`test:obj:${idx.toString()}`], 39 | tags: ['test'], 40 | deps: [] 41 | })) 42 | return concatClient.publish({namespace: TEST_NAMESPACE}, ...seedStatements) 43 | }) 44 | .then(() => 45 | // add a statement with a reference to a non-existent object 46 | concatClient.publish({namespace: INVALID_STATEMENT_NAMESPACE}, { 47 | object: 'QmNLftPEMzsadpbTsGaVP3haETYJb4GfnCgQiaFj5Red9G', 48 | refs: ['test:invalid:ref'], 49 | tags: [], 50 | deps: [] 51 | })) 52 | }) 53 | 54 | after(() => 55 | concatClient.delete(`DELETE FROM ${TEST_NAMESPACE}`) 56 | .then(() => concatClient.delete(`DELETE FROM ${INVALID_STATEMENT_NAMESPACE}`)) 57 | ) 58 | 59 | it('merges statements from a concat node', () => { 60 | let alephNode 61 | return getTestNodeId().then(peerId => { alephNode = new AlephNode({ peerId }) }) 62 | .then(() => alephNode.start()) 63 | .then(() => alephNode.merge(concatPeerInfo, `SELECT * FROM ${TEST_NAMESPACE}`)) 64 | .then(results => { 65 | assert.notEqual(results, null, 'merge did not return a result') 66 | assert.equal(results.statementCount, seedStatements.length, 'aleph node merged an unexpected number of statements') 67 | assert.equal(results.objectCount, objectIds.length, 'aleph node merged an unexpected number of objects') 68 | }) 69 | }) 70 | 71 | it('returns counts + error message for partially successful merge', () => { 72 | let alephNode 73 | return getTestNodeId() 74 | .then(peerId => { alephNode = new AlephNode({ peerId }) }) 75 | .then(() => alephNode.start()) 76 | .then(() => alephNode.merge(concatPeerInfo, `SELECT * FROM ${TEST_NAMESPACE}.* ORDER BY counter`)) 77 | .catch(err => { 78 | assert.fail(err, 'no error', '', '!==') 79 | }) 80 | .then(result => { 81 | assert.notEqual(result, null, 'partially-successful merge should return a result') 82 | assert(typeof result.error === 'string' && result.error.length > 0, 83 | 'partially successful merge should return an error message') 84 | }) 85 | }) 86 | }) 87 | -------------------------------------------------------------------------------- /integration-test/node_info_test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | /* eslint-env mocha */ 3 | 4 | const assert = require('assert') 5 | const { before, describe, it } = require('mocha') 6 | 7 | const { getTestNodeId } = require('../test/util') 8 | const { MediachainNode: AlephNode } = require('../src/peer/node') 9 | const { setConcatNodeStatus, concatNodePeerInfo, setConcatNodeInfoMessage } = require('./util') 10 | 11 | describe('Node Info', () => { 12 | let nodeId 13 | const infoMessage = `I'm a concat test node` 14 | 15 | before(() => { 16 | return Promise.all([ 17 | getTestNodeId().then(id => { nodeId = id }), 18 | setConcatNodeInfoMessage(infoMessage) 19 | ]) 20 | }) 21 | 22 | it('retrieves the node ids and info message from a concat node', () => { 23 | const alephPeer = new AlephNode({peerId: nodeId}) 24 | return alephPeer.start() 25 | .then(() => setConcatNodeStatus('online')) 26 | .then(() => concatNodePeerInfo()) 27 | .then(concatNodeInfo => alephPeer.remoteNodeInfo(concatNodeInfo)) 28 | .then(result => { 29 | assert.equal(result.info, infoMessage, 30 | 'node info response should include correct info message' 31 | ) 32 | }) 33 | }) 34 | }) 35 | -------------------------------------------------------------------------------- /integration-test/ping_test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | /* eslint-env mocha */ 3 | 4 | const assert = require('assert') 5 | const { describe, it } = require('mocha') 6 | 7 | const { getTestNodeId } = require('../test/util') 8 | const { MediachainNode: AlephNode } = require('../src/peer/node') 9 | const { setConcatNodeStatus, concatNodePeerInfo, concatNodePeerId, directoryPeerInfo } = require('./util') 10 | 11 | describe('Ping', () => { 12 | it('pings a concat node directly by PeerInfo', () => { 13 | let alephPeer 14 | 15 | return getTestNodeId() 16 | .then(peerId => { alephPeer = new AlephNode({peerId}) }) 17 | .then(() => alephPeer.start()) 18 | .then(() => setConcatNodeStatus('online')) 19 | .then(() => concatNodePeerInfo()) 20 | .then(concatNodeInfo => alephPeer.ping(concatNodeInfo)) 21 | .then(result => assert(result != null, 'ping failed')) 22 | }) 23 | 24 | it('pings a concat node via a directory lookup', () => { 25 | let alephPeer 26 | return getTestNodeId() 27 | .then(peerId => { alephPeer = new AlephNode({peerId}) }) 28 | .then(() => directoryPeerInfo()) 29 | .then(dirInfo => alephPeer.setDirectory(dirInfo)) 30 | .then(() => alephPeer.start()) 31 | .then(() => setConcatNodeStatus('public')) 32 | .then(() => concatNodePeerId()) 33 | .then(peerId => alephPeer.ping(peerId)) 34 | .then(result => assert(result != null, 'ping failed')) 35 | }) 36 | }) 37 | -------------------------------------------------------------------------------- /integration-test/push_test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | /* eslint-env mocha */ 3 | 4 | const assert = require('assert') 5 | const { describe, it, before, after } = require('mocha') 6 | const uuid = require('uuid') 7 | const { promiseHash } = require('../src/common/util') 8 | 9 | const { getTestNodeId } = require('../test/util') 10 | const { MediachainNode: AlephNode } = require('../src/peer/node') 11 | const { concatNodeClient, concatNodePeerInfo } = require('./util') 12 | const { PublisherId } = require('../src/peer/identity') 13 | const { SignedStatement } = require('../src/model/statement') 14 | 15 | const TEST_NAMESPACE = 'scratch.push-test' 16 | const UNAUTHORIZED_NAMESPACE = 'scratch.unauthorized-push-test' 17 | 18 | const seedObjects = [ 19 | {id: uuid.v4(), foo: 'bar'}, 20 | {id: uuid.v4(), foo: 'baz'} 21 | ] 22 | 23 | function seedStatementsToAleph (alephNode: AlephNode): Promise> { 24 | return Promise.all( 25 | seedObjects.map(obj => 26 | alephNode.ingestSimpleStatement(TEST_NAMESPACE, obj, { refs: [obj.id] }) 27 | ) 28 | ) 29 | } 30 | 31 | function seedUnauthorizedStatement (alephNode: AlephNode): Promise { 32 | const obj = {letMeIn: 'please'} 33 | return alephNode.ingestSimpleStatement(UNAUTHORIZED_NAMESPACE, obj, { refs: ['foo'] }) 34 | } 35 | 36 | function preparePartiallyValidStatements (alephNode: AlephNode, numValid: number): Promise> { 37 | return alephNode.putData({hello: 'world'}) 38 | .then(([object]) => { 39 | const promises = [] 40 | for (let i = 0; i < numValid; i++) { 41 | promises.push(SignedStatement.createSimple(alephNode.publisherId, TEST_NAMESPACE, { 42 | object, 43 | refs: [`test:${i.toString()}`] 44 | }, 45 | alephNode.statementCounter)) 46 | } 47 | // add a statement with an invalid object reference 48 | promises.push(SignedStatement.createSimple(alephNode.publisherId, TEST_NAMESPACE, { 49 | object: 'QmNLftPEMzsadpbTsGaVP3haETYJb4GfnCgQiaFj5Red9G', refs: [], deps: [], tags: [] 50 | })) 51 | return Promise.all(promises) 52 | }) 53 | } 54 | 55 | describe('Push', () => { 56 | let concatClient 57 | let alephNode 58 | let alephPeerIdB58 59 | let publisherId 60 | let statementIds 61 | let unauthorizedStatementId 62 | 63 | before(() => PublisherId.generate() 64 | .then(_publisherId => { publisherId = _publisherId }) 65 | .then(() => getTestNodeId()) 66 | .then(nodeId => { 67 | const peerId = nodeId 68 | alephPeerIdB58 = peerId.toB58String() 69 | alephNode = new AlephNode({peerId, publisherId}) 70 | }) 71 | .then(() => seedStatementsToAleph(alephNode)) 72 | .then(_statementIds => { statementIds = _statementIds }) 73 | .then(() => seedUnauthorizedStatement(alephNode)) 74 | .then(_stmtId => { unauthorizedStatementId = _stmtId }) 75 | .then(() => concatNodeClient()) 76 | .then(client => { concatClient = client }) 77 | .then(() => concatClient.authorize(alephPeerIdB58, [TEST_NAMESPACE])) 78 | ) 79 | 80 | after(() => 81 | concatClient.delete(`DELETE FROM ${TEST_NAMESPACE}`) 82 | ) 83 | 84 | it('pushes data to a concat node', () => { 85 | return alephNode.start() 86 | .then(() => concatNodePeerInfo()) 87 | .then(pInfo => alephNode.pushStatementsById(pInfo, statementIds)) 88 | .then(result => { 89 | assert(result != null) 90 | assert.equal(result.statements, seedObjects.length, 'peer did not accept all statements') 91 | assert.equal(result.objects, seedObjects.length, 'peer did not accept all objects') 92 | assert.equal(result.error, '', 'peer returned an error') 93 | }) 94 | }) 95 | 96 | it('errors if not authorized for a given namespace', () => { 97 | return alephNode.start() 98 | .then(() => concatNodePeerInfo()) 99 | .then(pInfo => alephNode.pushStatementsById(pInfo, [unauthorizedStatementId])) 100 | .catch(err => { 101 | assert(err != null) 102 | assert(err.message.toLowerCase().includes('auth')) 103 | }) 104 | }) 105 | 106 | it('returns counts + error message for partially successful push', () => { 107 | const numValid = 10 108 | return alephNode.start() 109 | .then(() => promiseHash({ 110 | pInfo: concatNodePeerInfo(), 111 | statements: preparePartiallyValidStatements(alephNode, numValid) 112 | })) 113 | .then(({pInfo, statements}) => alephNode.pushStatements(pInfo, statements)) 114 | .then(result => { 115 | // concat will accept the statement with the missing object, since it's structurally valid. 116 | // but it will end the push operation with an error. 117 | const expectedStatements = numValid + 1 118 | assert(result != null) 119 | assert.equal(result.statements, expectedStatements, 'peer did not accept valid statements') 120 | assert(typeof result.error === 'string', 'peer did not return an error message') 121 | }) 122 | }) 123 | }) 124 | -------------------------------------------------------------------------------- /integration-test/query_test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | /* eslint-env mocha */ 3 | 4 | const assert = require('assert') 5 | const { describe, it, before, after } = require('mocha') 6 | 7 | const { getTestNodeId } = require('../test/util') 8 | const { MediachainNode: AlephNode } = require('../src/peer/node') 9 | const { concatNodeClient, concatNodePeerInfo } = require('./util') 10 | 11 | const TEST_NAMESPACE = 'scratch.query-test' 12 | 13 | const seedStatements = [ 14 | {object: 'QmF00123', tags: [], refs: [], deps: []}, 15 | {object: 'QmF00456', tags: ['foo'], refs: [], deps: []}, 16 | {object: 'QmFoo789', refs: ['bar'], tags: ['foo'], deps: []} 17 | ] 18 | 19 | describe('Query', () => { 20 | let concatClient 21 | before(() => 22 | concatNodeClient() 23 | .then(client => { concatClient = client }) 24 | .then(() => concatClient.setStatus('online')) 25 | .then(() => concatClient.publish({namespace: TEST_NAMESPACE}, ...seedStatements)) 26 | ) 27 | 28 | after(() => 29 | concatClient.delete(`DELETE FROM ${TEST_NAMESPACE}`) 30 | ) 31 | 32 | it('queries a remote concat node from aleph node', () => { 33 | let alephNode 34 | return getTestNodeId().then(peerId => { alephNode = new AlephNode({ peerId }) }) 35 | .then(() => alephNode.start()) 36 | .then(() => concatNodePeerInfo()) 37 | .then(concatInfo => alephNode.remoteQuery(concatInfo, `SELECT * FROM ${TEST_NAMESPACE} ORDER BY counter`)) 38 | .then(results => { 39 | assert(results != null && results.length > 0, 'query returned no results') 40 | 41 | // unpack query results and compare to seed statements 42 | const resultStatements = results.map(r => r.body.toProtobuf().simple) 43 | assert.deepEqual(seedStatements, resultStatements, 'query returned unexpected results') 44 | }) 45 | }) 46 | }) 47 | -------------------------------------------------------------------------------- /integration-test/remote_data_test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | /* eslint-env mocha */ 3 | 4 | const assert = require('assert') 5 | const { describe, it, before } = require('mocha') 6 | const uuid = require('uuid') 7 | 8 | const { getTestNodeId } = require('../test/util') 9 | const { MediachainNode: AlephNode } = require('../src/peer/node') 10 | const { concatNodeClient, concatNodePeerInfo } = require('./util') 11 | 12 | const seedObjects = [ 13 | {id: uuid.v4(), foo: 'bar'}, 14 | {id: uuid.v4(), hello: 'world'} 15 | ] 16 | 17 | describe('Remote Data Fetching', () => { 18 | let dataIds = [] 19 | let concatClient 20 | 21 | before(() => { 22 | return concatNodeClient() 23 | .then(client => { concatClient = client }) 24 | .then(() => concatClient.putData(...seedObjects)) 25 | .then(ids => { dataIds = ids }) 26 | }) 27 | 28 | it('can fetch data from a remote concat node', () => { 29 | let alephNode 30 | return getTestNodeId().then(id => { alephNode = new AlephNode({peerId: id}) }) 31 | .then(() => alephNode.start()) 32 | .then(() => concatNodePeerInfo()) 33 | .then(concatInfo => alephNode.remoteData(concatInfo, dataIds)) 34 | .then(results => { 35 | assert(results != null && results.length > 0, 'remote data fetch returned no results') 36 | 37 | for (let i = 0; i < results.length; i++) { 38 | const key = results[i].key 39 | assert.equal(key, dataIds[i], 'remote data fetch should return objects with same keys as query') 40 | } 41 | }) 42 | }) 43 | }) 44 | -------------------------------------------------------------------------------- /integration-test/run-in-docker.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # define some colors to use for output 4 | RED='\033[0;31m' 5 | GREEN='\033[0;32m' 6 | NC='\033[0m' 7 | # kill and remove any running containers 8 | cleanup () { 9 | docker-compose -p ci kill 10 | docker-compose -p ci rm -f 11 | } 12 | # catch unexpected failures, do cleanup and output an error message 13 | trap 'cleanup ; printf "${RED}Tests Failed For Unexpected Reasons${NC}\n"'\ 14 | HUP INT QUIT PIPE TERM 15 | # build and run the composed services 16 | docker-compose -p ci build && docker-compose -p ci up -d 17 | if [ $? -ne 0 ] ; then 18 | printf "${RED}Docker Compose Failed${NC}\n" 19 | exit -1 20 | fi 21 | # wait for the test service to complete and grab the exit code 22 | TEST_EXIT_CODE=`docker wait ci_integration-tester_1` 23 | # output the logs for the test (for clarity) 24 | docker logs ci_integration-tester_1 25 | # inspect the output of the test and display respective message 26 | if [ -z ${TEST_EXIT_CODE+x} ] || [ "$TEST_EXIT_CODE" -ne 0 ] ; then 27 | printf "${RED}Tests Failed${NC} - Exit Code: $TEST_EXIT_CODE\n" 28 | else 29 | printf "${GREEN}Tests Passed${NC}\n" 30 | fi 31 | # call the cleanup fuction 32 | cleanup 33 | # exit the script with the same code as the test service code 34 | exit $TEST_EXIT_CODE 35 | -------------------------------------------------------------------------------- /integration-test/util.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const dns = require('dns') 4 | const path = require('path') 5 | const Multiaddr = require('multiaddr') 6 | const PeerId = require('peer-id') 7 | const PeerInfo = require('peer-info') 8 | const RestClient = require('../src/client/api/RestClient') 9 | const { loadIdentity } = require('../src/peer/identity') 10 | import type { NodeStatus } from '../src/client/api/RestClient' 11 | 12 | function dnsLookup (hostname: string): Promise { 13 | return new Promise((resolve, reject) => { 14 | dns.lookup(hostname, 4, (err, address) => { 15 | if (err) return reject(err) 16 | resolve(address) 17 | }) 18 | }) 19 | } 20 | 21 | const DIRECTORY_HOSTNAME = 'mcdir' 22 | const NODE_HOSTNAME = 'mcnode' 23 | const DIRECTORY_PORT = 9000 24 | const NODE_P2P_PORT = 9001 25 | const NODE_API_PORT = 9002 26 | 27 | function lookupMultiaddr (hostname: string, port: number): Promise { 28 | return dnsLookup(hostname).then(addr => Multiaddr(`/ip4/${addr}/tcp/${port}`)) 29 | } 30 | 31 | function directoryMultiaddr (): Promise { 32 | return lookupMultiaddr(DIRECTORY_HOSTNAME, DIRECTORY_PORT) 33 | } 34 | 35 | function directoryPeerId (): Promise { 36 | return loadIdentity(path.join(__dirname, 'concat', 'test-identities', 'mcdir', 'identity.node')) 37 | } 38 | 39 | function directoryPeerInfo (): Promise { 40 | return Promise.all([directoryMultiaddr(), directoryPeerId()]) 41 | .then(([maddr, peerId]) => { 42 | const peerInfo = new PeerInfo(peerId) 43 | peerInfo.multiaddr.add(maddr) 44 | return peerInfo 45 | }) 46 | } 47 | 48 | function concatNodeMultiaddr (): Promise { 49 | return lookupMultiaddr(NODE_HOSTNAME, NODE_P2P_PORT) 50 | } 51 | 52 | function concatNodePeerId (): Promise { 53 | return loadIdentity(path.join(__dirname, 'concat', 'test-identities', 'mcnode', 'identity.node')) 54 | } 55 | 56 | function concatNodeClient (): Promise { 57 | return dnsLookup(NODE_HOSTNAME) 58 | .then(ipAddr => new RestClient({apiUrl: `http://${ipAddr}:${NODE_API_PORT}`})) 59 | } 60 | 61 | function setConcatNodeDirectoryInfo (): Promise<*> { 62 | return Promise.all([concatNodeClient(), directoryMultiaddr(), directoryPeerId()]) 63 | .then(([client, dirAddr, dirId]) => { 64 | return client.setDirectoryIds(dirAddr.toString() + '/p2p/' + dirId.toB58String()) 65 | }) 66 | } 67 | 68 | function setConcatNodeStatus (status: NodeStatus): Promise { 69 | let setupPromise = Promise.resolve() 70 | if (status === 'public') { 71 | setupPromise = setConcatNodeDirectoryInfo() 72 | } 73 | 74 | return setupPromise 75 | .then(() => concatNodeClient()) 76 | .then(client => client.setStatus(status)) 77 | } 78 | 79 | function setConcatNodeInfoMessage (message: string): Promise { 80 | return concatNodeClient() 81 | .then(client => client.setInfo(message)) 82 | } 83 | 84 | function concatNodePeerInfo (): Promise { 85 | return Promise.all([concatNodeMultiaddr(), concatNodePeerId()]) 86 | .then(([maddr, peerId]) => { 87 | const peerInfo = new PeerInfo(peerId) 88 | peerInfo.multiaddr.add(maddr) 89 | return peerInfo 90 | }) 91 | } 92 | 93 | module.exports = { 94 | dnsLookup, 95 | lookupMultiaddr, 96 | directoryMultiaddr, 97 | directoryPeerId, 98 | directoryPeerInfo, 99 | concatNodeMultiaddr, 100 | concatNodeClient, 101 | setConcatNodeStatus, 102 | setConcatNodeInfoMessage, 103 | concatNodePeerId, 104 | concatNodePeerInfo 105 | } 106 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "aleph", 3 | "version": "1.7.0", 4 | "description": "Mediachain client and javascript peer", 5 | "main": "index.js", 6 | "scripts": { 7 | "build-jq": "node scripts/build-jq.js", 8 | "postinstall": "npm run build-jq", 9 | "test": "NODE_ENV=test nyc --require babel-register mocha --recursive './test/**/*_test.js'", 10 | "coverage": "nyc report --reporter=text-lcov | coveralls", 11 | "coverage:lcov": "npm run test && nyc report --reporter=lcov", 12 | "build": "mkdirp ./lib && babel --copy-files src -d lib", 13 | "check": "npm run check:style && npm run check:types", 14 | "check:style": "standard", 15 | "check:types": "flow", 16 | "cli": "npm run build >/dev/null && node ./lib/client/cli/index.js", 17 | "prepublish": "npm run build", 18 | "docs": "npm run docs:build && npm run docs:publish && npm run docs:clean", 19 | "docs:build": "./docs/build.sh", 20 | "docs:publish": "node ./docs/publish.js", 21 | "docs:clean": "./docs/clean.sh" 22 | }, 23 | "bin": { 24 | "aleph": "./bin/aleph.js", 25 | "mcclient": "./bin/mcclient.js" 26 | }, 27 | "repository": { 28 | "type": "git", 29 | "url": "git+https://github.com/mediachain/aleph.git" 30 | }, 31 | "author": "", 32 | "license": "MIT", 33 | "bugs": { 34 | "url": "https://github.com/mediachain/aleph/issues" 35 | }, 36 | "homepage": "https://github.com/mediachain/aleph#readme", 37 | "dependencies": { 38 | "ajv": "^4.9.2", 39 | "bin-build": "^2.2.0", 40 | "borc": "^2.0.1", 41 | "byline": "^5.0.0", 42 | "digest-stream": "^2.0.0", 43 | "duplex-child-process": "0.0.5", 44 | "gunzip-maybe": "^1.3.1", 45 | "knex": "^0.12.6", 46 | "levelup": "^1.3.3", 47 | "libp2p-crypto": "^0.8.0", 48 | "libp2p-ping": "^0.3.0", 49 | "libp2p-secio": "^0.6.4", 50 | "libp2p-spdy": "^0.10.1", 51 | "libp2p-swarm": "^0.26.2", 52 | "libp2p-tcp": "^0.9.1", 53 | "libp2p-websockets": "^0.9.1", 54 | "locks": "^0.2.2", 55 | "lodash": "^4.17.1", 56 | "mafmt": "^2.1.5", 57 | "map-stream": "^0.0.6", 58 | "memdown": "^1.2.4", 59 | "mkdirp": "^0.5.1", 60 | "multiaddr": "^2.1.3", 61 | "multihashes": "^0.4.3", 62 | "ndjson": "^1.4.3", 63 | "node-fetch": "^1.6.3", 64 | "peer-book": "^0.3.0", 65 | "peer-id": "^0.8.0", 66 | "peer-info": "^0.8.0", 67 | "protocol-buffers": "^3.2.0", 68 | "pull-abortable": "^4.1.0", 69 | "pull-length-prefixed": "^1.2.0", 70 | "pull-paramap": "^1.2.1", 71 | "pull-promise": "^2.0.0", 72 | "pull-pushable": "^2.0.1", 73 | "pull-stream": "^3.5.0", 74 | "pull-window": "^2.1.4", 75 | "sqlite3": "^3.1.8", 76 | "tar-stream": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.5.2.tgz", 77 | "temp": "^0.8.3", 78 | "thenify-all": "^1.6.0", 79 | "tunnel-ssh": "^4.1.1", 80 | "uuid": "^3.0.1", 81 | "yargs": "^6.3.0" 82 | }, 83 | "devDependencies": { 84 | "babel-cli": "^6.22.0", 85 | "babel-eslint": "^7.1.0", 86 | "babel-plugin-istanbul": "^3.1.2", 87 | "babel-plugin-transform-flow-strip-types": "^6.22.0", 88 | "babel-register": "^6.22.0", 89 | "chai": "^3.5.0", 90 | "chai-as-promised": "^6.0.0", 91 | "coveralls": "^2.11.15", 92 | "eslint-plugin-flowtype": "^2.30.3", 93 | "flow-bin": "^0.41.0", 94 | "gh-pages": "^0.12.0", 95 | "ink-docstrap": "^1.3.0", 96 | "interface-connection": "^0.3.0", 97 | "jsdoc": "^3.4.3", 98 | "jsdoc-babel": "^0.3.0", 99 | "mocha": "^3.2.0", 100 | "mocha-eventually": "^1.1.0", 101 | "mocha-lcov-reporter": "^1.2.0", 102 | "nyc": "^10.0.0", 103 | "standard": "^8.6.0", 104 | "std-mocks": "^1.0.1" 105 | }, 106 | "standard": { 107 | "parser": "babel-eslint", 108 | "plugins": [ 109 | "flowtype" 110 | ], 111 | "ignore": [ 112 | "/flow-typed/**", 113 | "/docs/**" 114 | ] 115 | }, 116 | "nyc": { 117 | "sourceMap": false, 118 | "instrument": false 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /pre-push.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # check for nvm (node version manager) and make sure it's enabled 4 | # helpful if running commmit hook from outside your shell env 5 | # e.g. from a GUI like gitup 6 | 7 | if [ -e "$HOME/.nvm/nvm.sh" ]; then 8 | source "$HOME/.nvm/nvm.sh" 9 | fi 10 | 11 | # stash un-staged changes 12 | STASH_REF_BEFORE=$(git rev-parse --verify -q refs/stash 2>/dev/null) 13 | git stash -q --keep-index 14 | STASH_REF_AFTER=$(git rev-parse --verify -q refs/stash 2>/dev/null) 15 | 16 | # run standard (code style enforcer), flow, and tests 17 | npm run check && npm run test && npm shrinkwrap 18 | 19 | RESULT=$? 20 | 21 | # un-stash, if the previous stash command actually created a stash 22 | if [ "$STASH_REF_BEFORE" != "$STASH_REF_AFTER" ]; then 23 | git stash pop -q 24 | fi 25 | 26 | # abort commit if anything failed 27 | [ $RESULT -ne 0 ] && exit 1 28 | 29 | exit 0 30 | -------------------------------------------------------------------------------- /scripts/build-jq.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const os = require('os') 4 | const fs = require('fs') 5 | const path = require('path') 6 | const BinBuild = require('bin-build') 7 | const mkdirp = require('mkdirp') 8 | const { get } = require('lodash') 9 | const fetch = require('node-fetch') 10 | const digestStream = require('digest-stream') 11 | 12 | const JQ_INFO = { 13 | name: 'jq', 14 | url: 'https://github.com/stedolan/jq/releases/download', 15 | version: 'jq-1.5' 16 | } 17 | 18 | const BINARY_INFO = { 19 | linux: { 20 | x86: {name: 'jq-linux32', sha256: 'ab440affb9e3f546cf0d794c0058543eeac920b0cd5dff660a2948b970beb632'}, 21 | x64: {name: 'jq-linux64', sha256: 'c6b3a7d7d3e7b70c6f51b706a3b90bd01833846c54d32ca32f0027f00226ff6d'} 22 | }, 23 | darwin: { 24 | x64: {name: 'jq-osx-amd64', sha256: '386e92c982a56fe4851468d7a931dfca29560cee306a0e66c6a1bd4065d3dac5'} 25 | }, 26 | win32: { 27 | x86: {name: 'jq-win32.exe', sha256: '1860c77bc2816b74f91705b84c7fa0dad3a062b355f021aa8c8e427e388e23fc'}, 28 | x64: {name: 'jq-win64.exe', sha256: 'ebecd840ba47efbf66822868178cc721a151060937f7ac406e3d31bd015bde94'} 29 | } 30 | } 31 | 32 | const outputDir = path.join(__dirname, '..', 'bin') 33 | const outputPath = path.join(outputDir, 'jq') 34 | 35 | try { 36 | fs.accessSync(outputPath, fs.F_OK) 37 | // already exists 38 | process.exit(0) 39 | } catch (e) {} 40 | 41 | const build = new BinBuild() 42 | .src(JQ_INFO.url + '/' + JQ_INFO.version + '/' + JQ_INFO.version + '.tar.gz') 43 | .cmd('./configure --disable-maintainer-mode') 44 | .cmd('make') 45 | .cmd(`cp ./jq ${outputPath}`) 46 | 47 | function downloadBinary (destinationPath) { 48 | const platform = os.platform() 49 | const arch = os.arch() 50 | const binInfo = get(BINARY_INFO, [platform, arch]) 51 | if (binInfo == null) { 52 | throw new Error(`No jq binary for ${platform}/${arch}`) 53 | } 54 | 55 | const {name, sha256} = binInfo 56 | const binUrl = [JQ_INFO.url, JQ_INFO.version, name].join('/') 57 | console.log(`Downloading jq binary from ${binUrl}`) 58 | return fetch(binUrl) 59 | .then(response => new Promise((resolve, reject) => { 60 | const hashStream = digestStream('sha256', 'hex', (digest) => { 61 | if (digest !== sha256) { 62 | return new Error(`Expected ${name} to have sha256 checksum of ${sha256}, actual: ${digest}`) 63 | } 64 | }) 65 | const output = fs.createWriteStream(destinationPath) 66 | response.body.on('error', reject) 67 | hashStream.on('error', reject) 68 | output.on('error', reject) 69 | output.on('close', resolve) 70 | response.body 71 | .pipe(hashStream) 72 | .pipe(output) 73 | })) 74 | .then(() => { 75 | if (platform !== 'win32') { 76 | fs.chmodSync(destinationPath, '755') 77 | } 78 | }) 79 | .catch(err => { 80 | // delete output file on download error 81 | try { 82 | fs.unlinkSync(destinationPath) 83 | } catch (e) { 84 | // ignore deletion errors, just re-throw the underlying error 85 | } 86 | throw err 87 | }) 88 | } 89 | 90 | mkdirp.sync(outputDir) 91 | 92 | downloadBinary(outputPath) 93 | .catch(err => { 94 | console.log(`Error downloading jq binary: ${err.message}`) 95 | console.log('building jq...') 96 | 97 | build.run((err) => { 98 | if (err) { 99 | console.log('Error building jq: ', err) 100 | process.exit(1) 101 | } else { 102 | console.log(`jq compiled to ${outputPath}`) 103 | } 104 | }) 105 | }) 106 | 107 | -------------------------------------------------------------------------------- /scripts/dataset-fetch.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys 4 | import os 5 | import os.path 6 | import argparse 7 | import subprocess 8 | import multiprocessing 9 | import math 10 | 11 | ncpus = multiprocessing.cpu_count() 12 | 13 | def go(cfg): 14 | with open("%s.manifest" % cfg.dataset) as mf: 15 | chunks = [line.strip() for line in mf.readlines()] 16 | chunks = chunks[cfg.start:] 17 | 18 | if cfg.count > 0: 19 | chunks = chunks[:cfg.count] 20 | 21 | try: 22 | os.mkdir(cfg.dataset, 0755) 23 | except OSError as e: 24 | print e.strerror 25 | 26 | index = cfg.start 27 | while len(chunks) > 0: 28 | batch = chunks[:cfg.batch] 29 | fetch(cfg, batch, index) 30 | chunks = chunks[cfg.batch:] 31 | index += len(batch) 32 | 33 | def fetch(cfg, batch, index): 34 | wget(cfg, batch, index) 35 | zcat(cfg, batch, index) 36 | 37 | def wget(cfg, batch, index): 38 | print "Fetching batch %d" % index 39 | urls = [cfg.url + chunk for chunk in batch] 40 | pchunk = int(math.ceil(len(urls)/ncpus)) 41 | pargs = [urls[x:x+pchunk] for x in range(0, len(urls), pchunk)] 42 | procs = [wget1(cfg, args) for args in pargs] 43 | retry = [] 44 | for x, p in enumerate(procs): 45 | rc = p.wait() 46 | if rc != 0: 47 | retry += pargs[x] 48 | if len(retry) > 0: 49 | retry = [url for url in retry if not os.path.exists(os.path.join(cfg.dataset, os.path.basename(url)))] 50 | p = wget1(cfg, retry) 51 | rc = p.wait() 52 | if rc != 0: 53 | raise Exception("Error fetching data; wget exit code %d" % rc) 54 | 55 | def wget1(cfg, urls): 56 | return subprocess.Popen(["wget", "-q", "-P", cfg.dataset] + urls) 57 | 58 | def zcat(cfg, batch, index): 59 | chunks = [os.path.join(cfg.dataset, os.path.basename(chunk)) for chunk in batch] 60 | batchf = os.path.join(cfg.dataset, "batch_%d.json" % index) 61 | with open(batchf, 'w') as out: 62 | p = subprocess.Popen(["zcat"] + chunks, stdout = out) 63 | rc = p.wait() 64 | if rc != 0: 65 | raise Exception("Error batching data: zcat exit code %d" % rc) 66 | p = subprocess.Popen(["rm"] + chunks) 67 | rc = p.wait() 68 | if rc != 0: 69 | raise Exception("Error removing compressed chunks; rm exit code %d" % rc) 70 | 71 | def main(args): 72 | parser = argparse.ArgumentParser( 73 | prog = "dataset-fetch.py", 74 | description = "fetch (part of) a dataset and prepare it for ingestion") 75 | 76 | parser.add_argument('-b', '--batch', 77 | type = int, 78 | default = 100, 79 | dest = 'batch', 80 | help = "Number of chunks on each worker batch") 81 | parser.add_argument('-c', '--count', 82 | type = int, 83 | default = 0, 84 | dest = 'count', 85 | help = "How many chunks to fetch; 0 will fetch the entire dataset") 86 | parser.add_argument('-s', '--start', 87 | type = int, 88 | default = 0, 89 | dest = 'start', 90 | help = "Start index in the manifest") 91 | parser.add_argument('dataset', 92 | type = str, 93 | help = "Name of the dataset") 94 | parser.add_argument('url', 95 | type = str, 96 | help = "Base URL for dataset chunks") 97 | cfg = parser.parse_args(args) 98 | go(cfg) 99 | 100 | if __name__ == '__main__': 101 | main(sys.argv[1:]) 102 | -------------------------------------------------------------------------------- /scripts/ingest-parallel.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys 4 | import os 5 | import subprocess 6 | import time 7 | import argparse 8 | import tempfile 9 | import shutil 10 | import glob 11 | import multiprocessing 12 | 13 | def ingest(ns): 14 | procs = dict() 15 | 16 | def spawn(afile): 17 | print "Ingest %s" % afile 18 | proc = subprocess.Popen([ns.script, afile]) 19 | procs[proc.pid] = proc 20 | 21 | def reap_some(): 22 | done = [] 23 | while True: 24 | for (pid, proc) in procs.items(): 25 | if proc.poll() is not None: 26 | done.append(pid) 27 | 28 | if len(done) > 0: 29 | for pid in done: 30 | del procs[pid] 31 | break 32 | else: 33 | time.sleep(1) 34 | 35 | for afile in glob.glob('{}/*'.format(ns.dir)): 36 | if (len(procs) < ns.procs): 37 | spawn(afile) 38 | else: 39 | reap_some() 40 | spawn(afile) 41 | 42 | while len(procs) > 0: 43 | reap_some() 44 | 45 | def main(args): 46 | ncpus = multiprocessing.cpu_count() 47 | 48 | parser = argparse.ArgumentParser( 49 | prog = "ingest-parallel.py", 50 | description = "ingest a dir of ndjson files in parallel" 51 | ) 52 | parser.add_argument('-n', '--processes', 53 | type = int, 54 | default = ncpus, 55 | dest = 'procs', 56 | help = "Number of parallel ingestion processes; defaults to number of cpus") 57 | parser.add_argument('script', 58 | type = str, 59 | help = "Ingest script; must accept ndjson filname") 60 | parser.add_argument('dir', 61 | type = str, 62 | help = "input dir of ndjson files") 63 | ns = parser.parse_args(args) 64 | ingest(ns) 65 | 66 | 67 | if __name__ == '__main__': 68 | main(sys.argv[1:]) 69 | -------------------------------------------------------------------------------- /scripts/ingest.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | mcclient putData $1 > /dev/null 4 | 5 | -------------------------------------------------------------------------------- /scripts/io.mediachain.indexer-image-jsonschema-1-0-0.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-04/schema#", 3 | "description": "Mediachain Labs indexer image metadata", 4 | "self": { 5 | "vendor": "io.mediachain.indexer", 6 | "name": "image", 7 | "format": "jsonschema", 8 | "version": "1-0-0" 9 | }, 10 | "type": "object", 11 | "properties": { 12 | "orientation": { 13 | "type": ["string", "null"] 14 | }, 15 | "camera_exif": { 16 | "type": ["object", "null"] 17 | }, 18 | "origin": { 19 | "type": ["string", "null"] 20 | }, 21 | "attribution": { 22 | "type": ["string", "null", "array"] 23 | }, 24 | "description": { 25 | "type": ["string", "null"] 26 | }, 27 | "source_tags": { 28 | "type": "array", 29 | "items": { 30 | "type": "string" 31 | } 32 | }, 33 | "transient_info": { 34 | "type": "object" 35 | }, 36 | "title": { 37 | "type": ["string", "null", "array"], 38 | "items": { 39 | "type": "string" 40 | } 41 | }, 42 | "aspect_ratio": { 43 | "type": ["number", "null"] 44 | }, 45 | "dedupe_hsh": { 46 | "type": "string" 47 | }, 48 | "source_dataset": { 49 | "type": "string" 50 | }, 51 | "artist_name": { 52 | "type": ["string", "null"] 53 | }, 54 | "keywords": { 55 | "type": "array", 56 | "items": { 57 | "properties": {} 58 | } 59 | }, 60 | "native_id": { 61 | "type": "string" 62 | }, 63 | "license_tags": { 64 | "type": "array", 65 | "items": { 66 | "properties": {} 67 | } 68 | }, 69 | "license_url": { 70 | "type": ["string", "null"] 71 | }, 72 | "sizes": { 73 | "type": "array", 74 | "uniqueItems": true, 75 | "items": { 76 | "properties": { 77 | "width": { 78 | "type": ["number", "null"] 79 | }, 80 | "content_type": { 81 | "type": ["string", "null"] 82 | }, 83 | "height": { 84 | "type": "number" 85 | } 86 | } 87 | } 88 | }, 89 | "licenses": { 90 | "type": "array", 91 | "uniqueItems": true, 92 | "items": { 93 | "properties": { 94 | "name": { 95 | "type": ["string", "null"] 96 | }, 97 | "name_long": { 98 | "type": ["string", "null"] 99 | }, 100 | "details": { 101 | "type": ["string", "null", "array"] 102 | } 103 | } 104 | } 105 | }, 106 | "date_captured": { 107 | "type": ["string", "null"] 108 | }, 109 | "date_created": { 110 | "type": ["string", "null"] 111 | }, 112 | "date_created_at_source": { 113 | "type": ["string", "null"] 114 | }, 115 | "date_created_original": { 116 | "type": ["string", "null"] 117 | }, 118 | "date_source_version": { 119 | "type": ["string", "null"] 120 | }, 121 | "artist_names": { 122 | "type": ["array", "null"], 123 | "items": { 124 | "type": ["string", "array", "null"] 125 | } 126 | }, 127 | "url_direct": { 128 | "type": ["object", "null"], 129 | "properties": { 130 | "url": { 131 | "type": "string" 132 | } 133 | } 134 | }, 135 | "native_source_id": { 136 | "type": ["string", "null"] 137 | }, 138 | "url_shown_at": { 139 | "type": ["object", "null"], 140 | "properties": { 141 | "url": { 142 | "type": "string" 143 | } 144 | } 145 | }, 146 | "location": { 147 | "type": ["object", "null"] 148 | }, 149 | "license_name": { 150 | "type": ["string", "null"] 151 | }, 152 | "license_name_long": { 153 | "type": ["string", "null"] 154 | }, 155 | "aes_unsplash_out_v1": { 156 | "type": ["object", "null"] 157 | }, 158 | "derived_qualities": { 159 | "type": ["object", "null"] 160 | }, 161 | "providers_list": { 162 | "type": ["array", "null"], 163 | "items": { 164 | "type": "object", 165 | "properties": { 166 | "name": { 167 | "type": "string" 168 | } 169 | } 170 | } 171 | }, 172 | "order_model_3": { 173 | "type": ["string", "null"] 174 | }, 175 | "xann": { 176 | "type": ["array", "null"], 177 | "items": { 178 | "type": "string" 179 | } 180 | }, 181 | "nsfw": { 182 | "type": ["boolean", "null"] 183 | }, 184 | "source": { 185 | "type": ["object", "null"], 186 | "properties": { 187 | "url": { 188 | "type": "string" 189 | }, 190 | "name": { 191 | "type": "string" 192 | } 193 | }, 194 | "required": [ 195 | "url", 196 | "name" 197 | ] 198 | } 199 | }, 200 | "additionalProperties": false 201 | } 202 | -------------------------------------------------------------------------------- /scripts/publish-500px.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | NAMESPACE="--namespace images.500px" 4 | # Hash of schema that must be published on the node before ingestion. 5 | # You must update this if you change the schema. 6 | SCHEMA_HASH='--schemaReference QmYGRQYmWC3BAtTAi88mFb7GVeFsUKGM4nm25SBUB9vfc9' 7 | 8 | SKIP_VALIDATION='--skipSchemaValidation' 9 | # if you want to validate every record, use this one instead: 10 | # SKIP_VALIDATION='' 11 | 12 | mcclient publish ${SKIP_VALIDATION} \ 13 | --jqFilter '._source | del(.aesthetics)' \ 14 | --idFilter '.native_id | sub("500px[^_]*_(?\\d+)"; "\(.id)")' \ 15 | --prefix '500px' \ 16 | ${NAMESPACE} \ 17 | ${SCHEMA_HASH} \ 18 | $1 > /dev/null 19 | 20 | -------------------------------------------------------------------------------- /scripts/publish-dpla.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | NAMESPACE='--namespace images.dpla' 4 | 5 | # Hash of schema that must be published on the node before ingestion. 6 | # You must update this if you change the schema. 7 | SCHEMA_HASH='--schemaReference QmYGRQYmWC3BAtTAi88mFb7GVeFsUKGM4nm25SBUB9vfc9' 8 | 9 | SKIP_VALIDATION='--skipSchemaValidation' 10 | # if you want to validate every record, use this one instead: 11 | # SKIP_VALIDATION='' 12 | 13 | mcclient publish ${SKIP_VALIDATION} \ 14 | --jqFilter '._source | del(.aesthetics)' \ 15 | --idFilter '.native_id | sub("dpla_http://dp.la/api/items/(?.+)"; "\(.id)")' \ 16 | --prefix 'dpla' \ 17 | ${NAMESPACE} \ 18 | ${SCHEMA_HASH} \ 19 | $1 > /dev/null 20 | 21 | -------------------------------------------------------------------------------- /scripts/publish-flickr-c10.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | NAMESPACE="--namespace images.flickr" 4 | SCHEMA_HASH='--schemaReference QmYGRQYmWC3BAtTAi88mFb7GVeFsUKGM4nm25SBUB9vfc9' 5 | COMPOUND="--compound 10" 6 | 7 | mcclient publish ${COMPOUND} \ 8 | --skipSchemaValidation \ 9 | --jqFilter '._source | del(.aesthetics)' \ 10 | --idFilter '.native_id | sub("flickr[^_]*_(?\\d+)"; "\(.id)")' \ 11 | --prefix 'flickr' \ 12 | ${NAMESPACE} \ 13 | ${SCHEMA_HASH} \ 14 | $1 > /dev/null 15 | 16 | -------------------------------------------------------------------------------- /scripts/publish-flickr-c100.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | NAMESPACE="--namespace images.flickr" 4 | SCHEMA_HASH='--schemaReference QmYGRQYmWC3BAtTAi88mFb7GVeFsUKGM4nm25SBUB9vfc9' 5 | COMPOUND="--compound 100" 6 | 7 | mcclient publish ${COMPOUND} \ 8 | --skipSchemaValidation \ 9 | --jqFilter '._source | del(.aesthetics)' \ 10 | --idFilter '.native_id | sub("flickr[^_]*_(?\\d+)"; "\(.id)")' \ 11 | --prefix 'flickr' \ 12 | ${NAMESPACE} \ 13 | ${SCHEMA_HASH} \ 14 | $1 > /dev/null 15 | 16 | -------------------------------------------------------------------------------- /scripts/publish-flickr.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | NAMESPACE='--namespace images.flickr' 4 | 5 | # Hash of schema that must be published on the node before ingestion. 6 | # You must update this if you change the schema. 7 | SCHEMA_HASH='--schemaReference QmYGRQYmWC3BAtTAi88mFb7GVeFsUKGM4nm25SBUB9vfc9' 8 | 9 | SKIP_VALIDATION='--skipSchemaValidation' 10 | # if you want to validate every record, use this one instead: 11 | # SKIP_VALIDATION='' 12 | 13 | mcclient publish ${SKIP_VALIDATION} \ 14 | --jqFilter '._source | del(.aesthetics)' \ 15 | --idFilter '.native_id | sub("flickr[^_]*_(?\\d+)"; "\(.id)")' \ 16 | --prefix 'flickr' \ 17 | ${NAMESPACE} \ 18 | ${SCHEMA_HASH} \ 19 | $1 > /dev/null 20 | 21 | -------------------------------------------------------------------------------- /scripts/publish-pexels.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | NAMESPACE="--namespace images.pexels" 4 | # Hash of schema that must be published on the node before ingestion. 5 | # You must update this if you change the schema. 6 | SCHEMA_HASH='--schemaReference QmYGRQYmWC3BAtTAi88mFb7GVeFsUKGM4nm25SBUB9vfc9' 7 | 8 | SKIP_VALIDATION='--skipSchemaValidation' 9 | # if you want to validate every record, use this one instead: 10 | # SKIP_VALIDATION='' 11 | 12 | mcclient publish ${SKIP_VALIDATION} \ 13 | --jqFilter '._source | del(.aesthetics)' \ 14 | --idFilter '.native_id | sub("pexels[^_]*_(?\\d+)"; "\(.id)")' \ 15 | --prefix 'pexels' \ 16 | ${NAMESPACE} \ 17 | ${SCHEMA_HASH} \ 18 | $1 > /dev/null 19 | 20 | -------------------------------------------------------------------------------- /scripts/publish.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | NAMESPACE='--namespace foo.bar' # FIXME: should accept this as an argument, need to update ingest-parallel to pass it in 4 | 5 | # Hash of schema that must be published on the node before ingestion. 6 | # You must update this if you change the schema. 7 | SCHEMA_HASH='--schemaReference QmYGRQYmWC3BAtTAi88mFb7GVeFsUKGM4nm25SBUB9vfc9' 8 | 9 | SKIP_VALIDATION='--skipSchemaValidation' 10 | # if you want to validate every record, use this one instead: 11 | # SKIP_VALIDATION='' 12 | 13 | JQ_FILTER='--jqFilter "._source | del(.aesthetics)"' 14 | ID_FILTER='--idFilter .native_id' 15 | 16 | mcclient publish ${SKIP_VALIDATION} ${JQ_FILTER} ${ID_FILTER} ${NAMESPACE} ${SCHEMA_HASH} $1 > /dev/null 17 | 18 | -------------------------------------------------------------------------------- /scripts/validate.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | REPO_ROOT=$(git rev-parse --show-toplevel) 4 | SCHEMA_FILE="${REPO_ROOT}/scripts/io.mediachain.indexer-image-jsonschema-1-0-0.json" 5 | 6 | mcclient validate \ 7 | --jqFilter '._source | del(.aesthetics)' \ 8 | ${SCHEMA_FILE} $1 9 | 10 | -------------------------------------------------------------------------------- /src/client/api/index.js: -------------------------------------------------------------------------------- 1 | 2 | module.exports = { 3 | RestClient: require('./RestClient') 4 | } 5 | -------------------------------------------------------------------------------- /src/client/cli/commands/archive.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const path = require('path') 4 | 5 | module.exports = { 6 | command: 'archive ', 7 | description: 'Commands to create or load archives of mediachain statements & objects.\n', 8 | builder: (yargs: Object) => yargs 9 | .commandDir(path.join(__dirname, 'archive')) 10 | .help() 11 | .strict(), 12 | 13 | handler: () => {} 14 | } 15 | -------------------------------------------------------------------------------- /src/client/cli/commands/archive/load.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const fs = require('fs') 4 | const gunzip = require('gunzip-maybe') 5 | const tar = require('tar-stream') 6 | const {consumeStream} = require('../../../../common/util') 7 | const {subcommand, printlnErr, println, pluralizeCount} = require('../../util') 8 | 9 | import type {RestClient} from '../../../api' 10 | import type {Readable as ReadableStream} from 'stream' 11 | 12 | const OBJECT_BATCH_SIZE = 1024 13 | 14 | module.exports = { 15 | command: 'load [filename]', 16 | description: 'Load an archive (generated by `mcclient archive dump`) into the local node.\n', 17 | builder: { 18 | filename: { 19 | description: 'A tar archive (optionally gzipped) from which to read statements and objects, ' + 20 | 'as generated by `mcclient archive dump`. If not given, archive will be read from standard input.\n', 21 | required: false, 22 | type: 'string', 23 | default: null 24 | }, 25 | allowErrors: { 26 | alias: ['warn', 'w'], 27 | description: 'Warn if an error occurs when loading the archive instead of aborting the load.\n', 28 | type: 'boolean', 29 | default: false 30 | } 31 | }, 32 | handler: subcommand((opts: {client: RestClient, filename?: ?string, allowErrors: boolean}) => new Promise((resolve, reject) => { 33 | const {client, filename, allowErrors} = opts 34 | const inputStream = (filename != null) ? fs.createReadStream(filename) : process.stdin 35 | const inputStreamName = filename || 'standard input' 36 | const handlerPromises = [] 37 | 38 | const tarStream = tar.extract() 39 | let objectBatch = [] 40 | 41 | let objectCount = 0 42 | let statementCount = 0 43 | 44 | function handleError (message: string): (err: Error) => void { 45 | return err => { 46 | const msg = message + ': ' + err.message 47 | if (allowErrors) { 48 | printlnErr(msg) 49 | } else { 50 | throw new Error(msg) 51 | } 52 | } 53 | } 54 | 55 | function sendBatch (force: boolean = false) { 56 | if (force || objectBatch.length >= OBJECT_BATCH_SIZE) { 57 | if (objectBatch.length < 1) return 58 | 59 | const objects = objectBatch 60 | objectBatch = [] 61 | handlerPromises.push( 62 | client.putData(...objects) 63 | .then(keys => { 64 | objectCount += keys.length 65 | }) 66 | .catch(handleError('Error sending data objects')) 67 | ) 68 | } 69 | } 70 | 71 | tarStream.on('entry', (header, contentStream, done) => { 72 | const {name} = header 73 | if (name.startsWith('stmt/')) { 74 | handlerPromises.push( 75 | handleStatementEntry(contentStream, client) 76 | .then(count => { 77 | statementCount += count 78 | }) 79 | .catch(handleError(`Error importing statements from ${name}`)) 80 | .then(() => done()) 81 | ) 82 | } else if (name.startsWith('data/')) { 83 | readDataEntry(contentStream) 84 | .then(obj => { 85 | objectBatch.push(obj) 86 | sendBatch() 87 | }) 88 | .catch(handleError(`Error reading data object from ${name}`)) 89 | .then(() => done()) 90 | } else { 91 | printlnErr(`Unexpected entry "${name}", ignoring`) 92 | done() 93 | } 94 | }) 95 | 96 | inputStream.on('error', err => { 97 | reject(new Error(`Error reading from ${inputStreamName}: ${err.message}`)) 98 | }) 99 | tarStream.on('error', err => { 100 | reject(new Error(`Error reading from tar archive: ${err.message}`)) 101 | }) 102 | 103 | tarStream.on('finish', () => { 104 | handlerPromises.push(sendBatch(true)) 105 | Promise.all(handlerPromises) 106 | .then(() => { 107 | println(`Imported ${pluralizeCount(statementCount, 'new statement')} and sent ${pluralizeCount(objectCount, 'object')}`) 108 | resolve() 109 | }) 110 | }) 111 | 112 | inputStream 113 | .pipe(gunzip()) 114 | .pipe(tarStream) 115 | })) 116 | } 117 | 118 | function handleStatementEntry (contentStream: ReadableStream, client: RestClient): Promise { 119 | return consumeStream(contentStream) 120 | .then(ndjson => client.importRaw(ndjson)) 121 | } 122 | 123 | function readDataEntry (contentStream: ReadableStream): Promise { 124 | return new Promise((resolve, reject) => { 125 | const chunks = [] 126 | contentStream.on('data', chunk => { 127 | chunks.push(chunk) 128 | }) 129 | contentStream.on('end', () => { 130 | resolve(Buffer.concat(chunks)) 131 | }) 132 | contentStream.on('error', reject) 133 | }) 134 | } 135 | -------------------------------------------------------------------------------- /src/client/cli/commands/auth.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const path = require('path') 4 | 5 | module.exports = { 6 | command: 'auth ', 7 | description: 'Display and edit the set of peers authorized to push data to the local node.\n', 8 | builder: (yargs: Object) => yargs 9 | .commandDir(path.join(__dirname, 'auth')) 10 | .help() 11 | .strict(), 12 | 13 | handler: () => {} 14 | } 15 | -------------------------------------------------------------------------------- /src/client/cli/commands/auth/grant.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../../api/RestClient') 4 | const { subcommand, printJSON, println } = require('../../util') 5 | 6 | module.exports = { 7 | command: 'grant ', 8 | description: 'Set the list of namespaces that a given peer can push to. ' + 9 | 'This will replace any existing authorizations. ' + 10 | "Namespaces may include wildcards, e.g. 'images.*'\n", 11 | builder: (yargs: Object) => yargs 12 | .example('$0 auth grant QmZtSnkmB9DkKJ1L4V65XZZAJC2GyCdge7x2cGn9Z9NTBs images.dpla museums.*'), 13 | handler: subcommand((opts: {client: RestClient, peerId: string, namespaces: Array}) => { 14 | const {client, peerId, namespaces} = opts 15 | 16 | return client.authorize(peerId, namespaces) 17 | .then(() => client.getAuthorizations()) 18 | .then(auths => { 19 | println(`Granted authorizations for peer ${peerId}:`) 20 | printJSON(auths[peerId]) 21 | }) 22 | }) 23 | } 24 | -------------------------------------------------------------------------------- /src/client/cli/commands/auth/revoke.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../../api/RestClient') 4 | const { subcommand, println } = require('../../util') 5 | 6 | module.exports = { 7 | command: 'revoke ', 8 | description: 'Revoke all authorizations for the given peer.\n', 9 | handler: subcommand((opts: {apiUrl: string, peerId: string}) => { 10 | const {apiUrl, peerId} = opts 11 | const client = new RestClient({apiUrl}) 12 | return client.revokeAuthorization(peerId) 13 | .then(() => { println(`Revoked authorization for ${peerId}.`) }) 14 | }) 15 | } 16 | -------------------------------------------------------------------------------- /src/client/cli/commands/auth/show.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../../api/RestClient') 4 | const { subcommand, printJSON } = require('../../util') 5 | 6 | module.exports = { 7 | command: 'show', 8 | description: 'Show the peers authorized to push data to the local node.\n', 9 | handler: subcommand((opts: {client: RestClient}) => { 10 | const {client} = opts 11 | return client.getAuthorizations() 12 | .then(authInfo => printJSON(authInfo)) 13 | }) 14 | } 15 | -------------------------------------------------------------------------------- /src/client/cli/commands/config.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const path = require('path') 4 | 5 | module.exports = { 6 | command: 'config ', 7 | describe: 'Show and set local node configuration. Use "config --help" to see subcommands.\n', 8 | builder: (yargs: Function) => { 9 | return yargs 10 | .commandDir(path.join(__dirname, './config')) 11 | .help() 12 | .strict() 13 | }, 14 | handler: () => {} 15 | } 16 | -------------------------------------------------------------------------------- /src/client/cli/commands/config/dir.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../../api/RestClient') 4 | const { subcommand, println, pluralizeCount } = require('../../util') 5 | 6 | module.exports = { 7 | command: 'dir [dirIds..]', 8 | description: 'Get or set the directory servers.\n', 9 | builder: { 10 | clear: { 11 | type: 'boolean', 12 | description: 'If given, clear out the existing directory configuration.\n' 13 | } 14 | }, 15 | handler: subcommand((opts: {client: RestClient, dirIds: Array, clear?: boolean}) => { 16 | const {client, dirIds, clear} = opts 17 | 18 | if (clear) { 19 | return client.setDirectoryIds() 20 | .then(() => println('Directory configuration cleared')) 21 | } 22 | 23 | if (dirIds.length > 0) { 24 | return client.setDirectoryIds(...dirIds) 25 | .then(() => { 26 | println(`Set ${pluralizeCount(dirIds.length, 'directory server')}:`) 27 | dirIds.forEach(println) 28 | }) 29 | } 30 | 31 | return client.getDirectoryIds() 32 | .then(ids => ids.forEach(println)) 33 | }) 34 | } 35 | -------------------------------------------------------------------------------- /src/client/cli/commands/config/info.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../../api/RestClient') 4 | const { subcommand, println } = require('../../util') 5 | 6 | module.exports = { 7 | command: 'info [peerInfo]', 8 | description: 'Get or set the peer info message.\n', 9 | handler: subcommand((opts: {client: RestClient, peerInfo?: string}) => { 10 | const {client, peerInfo} = opts 11 | if (peerInfo) { 12 | return client.setInfo(peerInfo) 13 | .then(() => { 14 | println(`set peer info to "${peerInfo}"`) 15 | }) 16 | } else { 17 | return client.getInfo() 18 | .then(println) 19 | } 20 | }) 21 | } 22 | -------------------------------------------------------------------------------- /src/client/cli/commands/config/nat.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../../api/RestClient') 4 | const { subcommand, println } = require('../../util') 5 | 6 | module.exports = { 7 | command: 'nat [natConfig]', 8 | description: `Get or set the NAT configuration. Valid settings are 'none', 'auto', '*', '*:port', 'ip:port'. \n`, 9 | handler: subcommand((opts: {client: RestClient, natConfig?: string}) => { 10 | const {client, natConfig} = opts 11 | if (natConfig) { 12 | return client.setNATConfig(natConfig) 13 | .then(() => { 14 | println(`set NAT configuration to "${natConfig}"`) 15 | }) 16 | .catch(err => { 17 | throw new Error(`Error setting NAT configuration: ${err.message}`) 18 | }) 19 | } else { 20 | return client.getNATConfig() 21 | .then(println) 22 | } 23 | }) 24 | } 25 | -------------------------------------------------------------------------------- /src/client/cli/commands/data.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const path = require('path') 4 | 5 | module.exports = { 6 | command: 'data ', 7 | describe: `Interact with the local node's datastore. Use "data --help" to see subcommands.\n`, 8 | builder: (yargs: Function) => { 9 | return yargs 10 | .commandDir(path.join(__dirname, './data')) 11 | .demand(1, 'Missing command argument') 12 | .help() 13 | .strict() 14 | }, 15 | handler: () => {} 16 | } 17 | -------------------------------------------------------------------------------- /src/client/cli/commands/data/compact.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import type {RestClient} from '../../../api' 4 | const {subcommand, println} = require('../../util') 5 | 6 | module.exports = { 7 | command: 'compact', 8 | description: 'Compact the datastore to optimize disk usage.\n', 9 | handler: subcommand((opts: {client: RestClient}) => { 10 | const {client} = opts 11 | return client.compactDatastore() 12 | .then(() => { 13 | println('Compaction successful') 14 | }) 15 | }) 16 | } 17 | -------------------------------------------------------------------------------- /src/client/cli/commands/data/gc.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import type {RestClient} from '../../../api' 4 | const {subcommand, pluralizeCount, println} = require('../../util') 5 | 6 | module.exports = { 7 | command: 'gc', 8 | description: 'Trigger garbage collection to remove orphan data objects that are not referenced by any statement.\n', 9 | handler: subcommand((opts: {client: RestClient}) => { 10 | const {client} = opts 11 | return client.garbageCollectDatastore() 12 | .then(count => { 13 | println(`Garbage collected ${pluralizeCount(count, 'object')}`) 14 | }) 15 | }) 16 | } 17 | -------------------------------------------------------------------------------- /src/client/cli/commands/data/get.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../../api/RestClient') 4 | const { formatJSON, subcommand } = require('../../util') 5 | import type { Readable as ReadableStream } from 'stream' 6 | 7 | module.exports = { 8 | command: 'get ', 9 | description: 'Request one or more `objectIds` from the local node and print to the console.' + 10 | 'If multiple ids are given and pretty-printing is enabled, results will be returned as a JSON map, ' + 11 | 'with `objectIds` as keys.\n', 12 | builder: { 13 | color: { 14 | type: 'boolean', 15 | description: 'Explicitly enable (or disable, with --no-color) colorized output.\n', 16 | default: null, 17 | defaultDescription: 'Print in color if stdout is a tty, and monochrome if piped or pretty-printing is disabled.' 18 | }, 19 | pretty: { 20 | type: 'boolean', 21 | description: 'Pretty print the output.\n', 22 | default: true, 23 | defaultDescription: 'True. Use --no-pretty for compact output.' 24 | }, 25 | batch: { 26 | type: 'boolean', 27 | description: 'Force "batch-mode", even if only one key is given. Disables color and pretty-printing.\n', 28 | default: false 29 | } 30 | }, 31 | 32 | handler: subcommand((opts: {client: RestClient, objectIds: Array, color: ?boolean, pretty: boolean, batch: boolean}) => { 33 | const {client, objectIds, batch} = opts 34 | let {color, pretty} = opts 35 | if (batch) { 36 | color = false 37 | pretty = false 38 | } 39 | 40 | if (!batch && objectIds.length === 1) { 41 | return client.getData(objectIds[0]) 42 | .then(objectFormatter(color, pretty)) 43 | .then(output => process.stdout.write(output)) 44 | } 45 | 46 | return client.batchGetDataStream(objectIds) 47 | .then(stream => { 48 | if (pretty) { 49 | return printStreamPretty(stream, objectIds, color) 50 | } else { 51 | return printStreamCompact(stream) 52 | } 53 | }) 54 | }) 55 | } 56 | 57 | function printStreamCompact (stream: ReadableStream): Promise { 58 | return new Promise((resolve, reject) => { 59 | const formatObject = objectFormatter(false, false) 60 | const {stdout} = process 61 | stream.on('data', obj => { 62 | stdout.write(formatObject(obj)) 63 | }) 64 | stream.on('end', resolve) 65 | stream.on('error', reject) 66 | }) 67 | } 68 | 69 | function printStreamPretty (stream: ReadableStream, objectIds: Array, color: ?boolean, wrap: boolean = true): Promise { 70 | const formatObject = objectFormatter(color, true) 71 | const {stdout} = process 72 | const padding = wrap ? 2 : 0 73 | if (wrap) { 74 | stdout.write('{\n') 75 | } 76 | return new Promise((resolve, reject) => { 77 | stream.on('data', result => { 78 | const key = objectIds.pop() 79 | const keyString = formatObject(key).trim() 80 | const formatted = formatObject(result).trim() 81 | const output = indent(`${keyString}: ${formatted}`, padding) 82 | stdout.write(output) 83 | 84 | if (wrap && objectIds.length > 0) { 85 | stdout.write(',') 86 | } 87 | stdout.write('\n') 88 | }) 89 | stream.on('error', (err) => { 90 | if (wrap) { 91 | stdout.write(`}\n`) 92 | } 93 | reject(err) 94 | }) 95 | stream.on('end', () => { 96 | if (wrap) { 97 | stdout.write(`}\n`) 98 | } 99 | resolve() 100 | }) 101 | }) 102 | } 103 | 104 | const objectFormatter = (color: ?boolean, pretty: boolean) => (obj: ?mixed): string => { 105 | if (obj instanceof Buffer) { 106 | obj = obj.toString('base64') 107 | } 108 | 109 | return formatJSON(obj, {color, pretty}) 110 | } 111 | 112 | function indent (s: string, spaces: number = 2): string { 113 | const padding = new Array(spaces).fill(' ').join('') 114 | return s.split('\n').map(line => padding + line).join('\n') 115 | } 116 | -------------------------------------------------------------------------------- /src/client/cli/commands/data/keys.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import type {RestClient} from '../../../api' 4 | const {subcommand, println} = require('../../util') 5 | 6 | module.exports = { 7 | command: 'keys', 8 | description: 'Print the keys for all objects in the datastore.\n', 9 | handler: subcommand((opts: {client: RestClient}) => { 10 | const {client} = opts 11 | return client.getDatastoreKeyStream() 12 | .then(stream => new Promise((resolve, reject) => { 13 | stream.on('data', data => { println(data.toString()) }) 14 | stream.on('error', reject) 15 | stream.on('end', () => resolve()) 16 | })) 17 | }) 18 | } 19 | -------------------------------------------------------------------------------- /src/client/cli/commands/data/put.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const fs = require('fs') 4 | const ndjson = require('ndjson') 5 | const RestClient = require('../../../api/RestClient') 6 | const { subcommand, println } = require('../../util') 7 | import type { Readable } from 'stream' 8 | 9 | const BATCH_SIZE = 1000 10 | 11 | module.exports = { 12 | command: 'put [filename]', 13 | description: 'Read newline-delimited JSON data from `filename` or stdin and store in the remote node as IPLD.\n', 14 | builder: { 15 | batchSize: { default: BATCH_SIZE } 16 | }, 17 | 18 | handler: subcommand((opts: {client: RestClient, batchSize: number, filename: ?string}) => { 19 | const {client, batchSize, filename} = opts 20 | const streamName = filename || 'standard input' 21 | 22 | let items: Array = [] 23 | let promises: Array> = [] 24 | 25 | let inputStream: Readable 26 | if (filename) { 27 | inputStream = fs.createReadStream(filename) 28 | } else { 29 | inputStream = process.stdin 30 | } 31 | 32 | return new Promise((resolve, reject) => { 33 | inputStream.pipe(ndjson.parse()) 34 | .on('data', obj => { 35 | items.push(obj) 36 | if (items.length >= batchSize) { 37 | promises.push(putItems(client, items)) 38 | items = [] 39 | } 40 | }) 41 | .on('end', () => { 42 | if (items.length > 0) { 43 | promises.push(putItems(client, items)) 44 | } 45 | Promise.all(promises) 46 | .then(() => resolve()) 47 | }) 48 | .on('error', err => { 49 | err = new Error(`Error reading from ${streamName}: ${err.message}`) 50 | reject(err) 51 | }) 52 | }) 53 | }) 54 | } 55 | 56 | function putItems (client: RestClient, items: Array): Promise<*> { 57 | return client.putData(...items).then( 58 | hashes => { 59 | hashes.forEach(h => println(h)) 60 | } 61 | ) 62 | } 63 | -------------------------------------------------------------------------------- /src/client/cli/commands/data/sync.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | import type {RestClient} from '../../../api' 4 | const {subcommand, println} = require('../../util') 5 | 6 | module.exports = { 7 | command: 'sync', 8 | description: 'Flushes the datastore. Useful for immediately reclaiming space after garbage collection\n', 9 | handler: subcommand((opts: {client: RestClient}) => { 10 | const {client} = opts 11 | return client.syncDatastore() 12 | .then(count => { 13 | println(`Sync successful`) 14 | }) 15 | }) 16 | } 17 | -------------------------------------------------------------------------------- /src/client/cli/commands/delete.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../api/RestClient') 4 | const { subcommand, pluralizeCount, println } = require('../util') 5 | 6 | module.exports = { 7 | command: 'delete ', 8 | description: 'Send a delete query to the local node to delete matching statements.\n', 9 | handler: subcommand((opts: {client: RestClient, queryString: string}) => { 10 | const {client, queryString} = opts 11 | 12 | return client.delete(queryString) 13 | .then(count => { 14 | println(`Deleted ${pluralizeCount(count, 'statement')}`) 15 | }) 16 | }) 17 | } 18 | -------------------------------------------------------------------------------- /src/client/cli/commands/getData.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const cmd = require('./data/get') 4 | 5 | module.exports = Object.assign({}, cmd, { 6 | command: 'getData ', 7 | description: `${cmd.description.trim()} (alias for 'data get')\n` 8 | }) 9 | -------------------------------------------------------------------------------- /src/client/cli/commands/id.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../api/RestClient') 4 | const {subcommand, println} = require('../util') 5 | 6 | module.exports = { 7 | command: 'id [peerId]', 8 | description: 'Request the peer id, publisher id, and info string of the local node, ' + 9 | 'or a remote peer if `peerId` is given and a directory server is connected.\n', 10 | handler: subcommand((opts: {client: RestClient, peerId?: string}) => { 11 | const {client, peerId} = opts 12 | return client.id(peerId) 13 | .then(printIds) 14 | }) 15 | } 16 | 17 | function printIds (opts: {peer: string, publisher: string, info: string}) { 18 | const {peer, publisher, info} = opts 19 | println(`Peer ID: ${peer}`) 20 | println(`Publisher ID: ${publisher}`) 21 | println(`Info: ${info}`) 22 | } 23 | -------------------------------------------------------------------------------- /src/client/cli/commands/listNamespaces.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../api/RestClient') 4 | const { subcommand, println } = require('../util') 5 | 6 | module.exports = { 7 | command: 'listNamespaces', 8 | description: `Fetch a list of namespaces published by all known peers.\n`, 9 | handler: subcommand((opts: {client: RestClient}) => { 10 | const {client} = opts 11 | return client.listNamespaces().then( 12 | namespaces => { 13 | namespaces.sort().forEach(ns => println(ns)) 14 | } 15 | ) 16 | }) 17 | } 18 | -------------------------------------------------------------------------------- /src/client/cli/commands/listPeers.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../api/RestClient') 4 | const { subcommand, println } = require('../util') 5 | 6 | type Opts = {client: RestClient, info: boolean, namespace?: string, includeSelf: boolean} 7 | 8 | module.exports = { 9 | command: 'listPeers [namespace]', 10 | builder: { 11 | info: { 12 | type: 'boolean', 13 | alias: 'i', 14 | default: false, 15 | description: 'Also fetch the "info" string for each peer. This requires an extra network request per-peer.\n' 16 | }, 17 | includeSelf: { 18 | type: 'boolean', 19 | alias: 'all', 20 | description: 'Include the local node in namespace listings. Has no effect if namespace is not given.\n', 21 | default: false 22 | } 23 | }, 24 | description: 'Fetch a list of remote peers from a directory server or the DHT. ' + 25 | 'If the `namespace` argument is given, only peers that have published to the given namespace will be returned. ' + 26 | 'Namespace listings require the node to be configured to use a directory server.\n', 27 | handler: subcommand((opts: Opts) => { 28 | const {client, info, namespace} = opts 29 | let {includeSelf} = opts 30 | if (namespace == null) { 31 | includeSelf = false 32 | } 33 | 34 | return client.listPeers(namespace, includeSelf).then( 35 | peers => { 36 | if (info) { 37 | return fetchInfos(peers, opts) 38 | } else { 39 | peers.forEach(p => println(p)) 40 | } 41 | } 42 | ) 43 | }) 44 | } 45 | 46 | function printInfo (ids: Object, isSelf: boolean = false) { 47 | let msg = 'No info published' 48 | if (ids.info != null && ids.info.length > 0) { 49 | msg = ids.info 50 | } 51 | const selfMsg = isSelf ? '(self) ' : '' 52 | println(`${ids.peer} ${selfMsg}-- ${msg}`) 53 | } 54 | 55 | function fetchInfos (peerIds: Array, opts: Opts): Promise<*> { 56 | const {client, includeSelf} = opts 57 | const promises: Array> = [] 58 | let selfInfoPromise: Promise = includeSelf 59 | ? client.id() 60 | : Promise.resolve(null) 61 | 62 | return selfInfoPromise.then(selfInfo => { 63 | for (const peer of peerIds) { 64 | if (selfInfo != null && peer === selfInfo.peer) { 65 | const s = selfInfo // make flow happy by assigning to non-null var before entering new scope 66 | promises.push( 67 | Promise.resolve().then(() => { 68 | printInfo(s, true) 69 | }) 70 | ) 71 | } else { 72 | promises.push( 73 | client.id(peer) 74 | .then(printInfo) 75 | .catch(err => { println(`${peer} -- Unable to fetch info: ${err.message}`) }) 76 | ) 77 | } 78 | } 79 | return Promise.all(promises) 80 | }) 81 | } 82 | -------------------------------------------------------------------------------- /src/client/cli/commands/lookupPeer.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../api/RestClient') 4 | const { subcommand, println } = require('../util') 5 | 6 | module.exports = { 7 | command: 'lookupPeer ', 8 | describe: 'Lookup a remote peer, identified by `peerId`. ' + 9 | 'Will first try the directory if one is configured, otherwise will fallback to DHT lookup.\n', 10 | handler: subcommand((opts: {peerId: string, client: RestClient}) => { 11 | const {peerId, client} = opts 12 | 13 | return client.netLookup(peerId) 14 | .then( 15 | addrs => { 16 | addrs.forEach(a => { println(a) }) 17 | }, 18 | err => { throw new Error(`Error during peer lookup: ${err.message}`) } 19 | ) 20 | }) 21 | } 22 | -------------------------------------------------------------------------------- /src/client/cli/commands/manifest.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const path = require('path') 4 | 5 | module.exports = { 6 | command: 'manifest ', 7 | describe: 'Commands for setting and retrieving identity manifests. Use "manifest --help" to see subcommands.\n', 8 | builder: (yargs: Function) => { 9 | return yargs 10 | .commandDir(path.join(__dirname, './manifest')) 11 | .help() 12 | .strict() 13 | }, 14 | handler: () => {} 15 | } 16 | -------------------------------------------------------------------------------- /src/client/cli/commands/manifest/add.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const fs = require('fs') 4 | const _ = require('lodash') 5 | const RestClient = require('../../../api/RestClient') 6 | const { subcommand, println } = require('../../util') 7 | const { consumeStream } = require('../../../../common/util') 8 | 9 | module.exports = { 10 | command: 'add [filename]', 11 | description: 'Add a signed manifest to the local node. ' + 12 | 'If `filename` is not given, will read from standard input.\n', 13 | handler: subcommand((opts: {client: RestClient, filename?: string}) => { 14 | const {client, filename} = opts 15 | let streamName = 'standard input' 16 | let inputStream = process.stdin 17 | if (filename != null) { 18 | streamName = filename 19 | inputStream = fs.createReadStream(filename) 20 | } 21 | 22 | let manifest: Object 23 | 24 | return consumeStream(inputStream) 25 | .catch(err => { 26 | throw new Error(`Error reading from ${streamName}: ${err.message}`) 27 | }) 28 | .then(contents => { 29 | manifest = JSON.parse(contents) 30 | }) 31 | .then(() => client.getManifests()) 32 | .then(manifests => { 33 | if (_.some(manifests, m => _.isEqual(m, manifest))) { 34 | println('Node already contains manifest, ignoring') 35 | return 36 | } 37 | manifests.push(manifest) 38 | return client.setManifests(...manifests) 39 | .then(() => { 40 | println('Manifest added successfully') 41 | }) 42 | }) 43 | }) 44 | } 45 | -------------------------------------------------------------------------------- /src/client/cli/commands/manifest/get.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../../api/RestClient') 4 | const { subcommand, printJSON } = require('../../util') 5 | 6 | module.exports = { 7 | command: 'get [remotePeer]', 8 | description: `Get the signed manifests for the local node or a remote peer.\n`, 9 | handler: subcommand((opts: {client: RestClient, remotePeer?: string}) => { 10 | const {client, remotePeer} = opts 11 | return client.getManifests(remotePeer) 12 | .then(m => printJSON(m)) 13 | }) 14 | } 15 | -------------------------------------------------------------------------------- /src/client/cli/commands/manifest/lookup.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../../api/RestClient') 4 | const { subcommand, printJSON } = require('../../util') 5 | 6 | module.exports = { 7 | command: 'lookup ', 8 | builder: { 9 | entityId: { 10 | type: 'string', 11 | description: 'An "entity" identifier that can be used to verify a public identity. ' + 12 | 'e.g: "blockstack:mediachainlabs.id" or "keybase:yusef"' 13 | } 14 | }, 15 | description: `Query the directory for the manifests belonging to \`entityId\`.\n`, 16 | handler: subcommand((opts: {client: RestClient, entityId: string}) => { 17 | const {client, entityId} = opts 18 | return client.listManifestsForEntity(entityId) 19 | .then(m => printJSON(m)) 20 | }) 21 | } 22 | -------------------------------------------------------------------------------- /src/client/cli/commands/manifest/remove.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const fs = require('fs') 4 | const _ = require('lodash') 5 | const RestClient = require('../../../api/RestClient') 6 | const { subcommand, println } = require('../../util') 7 | const { consumeStream } = require('../../../../common/util') 8 | 9 | module.exports = { 10 | command: 'remove [filename]', 11 | description: 'Remove a signed manifest from the local node. ' + 12 | 'If `filename` is not given, will read from standard input.\n', 13 | handler: subcommand((opts: {client: RestClient, filename?: string}) => { 14 | const {client, filename} = opts 15 | let streamName = 'standard input' 16 | let inputStream = process.stdin 17 | if (filename != null) { 18 | streamName = filename 19 | inputStream = fs.createReadStream(filename) 20 | } 21 | 22 | let manifest: Object 23 | 24 | return consumeStream(inputStream) 25 | .catch(err => { 26 | throw new Error(`Error reading from ${streamName}: ${err.message}`) 27 | }) 28 | .then(contents => { 29 | manifest = JSON.parse(contents) 30 | }) 31 | .then(() => client.getManifests()) 32 | .then(manifests => { 33 | const without = _.filter(manifests, m => !_.isEqual(m, manifest)) 34 | if (without.length === manifests.length) { 35 | println('Node does not contain manifest, ignoring') 36 | return 37 | } 38 | return client.setManifests(...without) 39 | .then(() => { 40 | println('Manifest removed successfully') 41 | }) 42 | }) 43 | }) 44 | } 45 | -------------------------------------------------------------------------------- /src/client/cli/commands/manifest/self.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../../api/RestClient') 4 | const { subcommand, println } = require('../../util') 5 | 6 | module.exports = { 7 | command: 'self', 8 | description: `Get the unsigned "node manifest" for the local node, ` + 9 | `suitable for signing by mcid to produce a manifest.\n`, 10 | handler: subcommand((opts: {client: RestClient}) => { 11 | const {client} = opts 12 | return client.getSelfManifest() 13 | .then(m => println(m)) 14 | }) 15 | } 16 | -------------------------------------------------------------------------------- /src/client/cli/commands/manifest/set.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const fs = require('fs') 4 | const RestClient = require('../../../api/RestClient') 5 | const { subcommand, println } = require('../../util') 6 | const { consumeStream } = require('../../../../common/util') 7 | 8 | module.exports = { 9 | command: 'set [filename]', 10 | description: `Set the signed manifests for the local node, replacing any existing manifests. ` + 11 | 'If `filename is not given, will read from stdin`\n', 12 | builder: { 13 | ndjson: { 14 | type: 'boolean', 15 | description: 'If present, input should be newline-delimited json, one object per line. ' + 16 | 'Otherwise, input can be either a single json object, or an array of objects.', 17 | default: 'false' 18 | } 19 | }, 20 | handler: subcommand((opts: {client: RestClient, filename?: string, ndjson: boolean}) => { 21 | const {client, filename, ndjson} = opts 22 | let streamName = 'standard input' 23 | let inputStream = process.stdin 24 | if (filename != null) { 25 | streamName = filename 26 | inputStream = fs.createReadStream(filename) 27 | } 28 | 29 | return consumeStream(inputStream) 30 | .catch(err => { 31 | throw new Error(`Error reading from ${streamName}: ${err.message}`) 32 | }) 33 | .then(contents => { 34 | let manifests = [] 35 | if (ndjson) { 36 | manifests = contents.split('\n') 37 | .filter(line => line && line.length > 0) 38 | .map(line => JSON.parse(line)) 39 | } else { 40 | const parsed = JSON.parse(contents) 41 | if (Array.isArray(parsed)) { 42 | manifests = parsed 43 | } else { 44 | manifests = [parsed] 45 | } 46 | } 47 | return manifests 48 | }) 49 | .then(manifests => client.setManifests(...manifests)) 50 | .then(() => { 51 | println('Manifests set successfully') 52 | }) 53 | }) 54 | } 55 | -------------------------------------------------------------------------------- /src/client/cli/commands/merge.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../api/RestClient') 4 | const { subcommand, pluralizeCount, println } = require('../util') 5 | 6 | module.exports = { 7 | command: 'merge ', 8 | description: 'Merge statements and their referenced objects that match `query` from ' + 9 | '`remotePeer` into the local node.\n', 10 | handler: subcommand((opts: {client: RestClient, queryString: string, remotePeer: string}) => { 11 | const {client, queryString, remotePeer} = opts 12 | 13 | return client.merge(queryString, remotePeer) 14 | .then(({statementCount, objectCount}) => { 15 | println( 16 | `merged ${pluralizeCount(statementCount, 'statement')} and ${pluralizeCount(objectCount, 'object')}` 17 | ) 18 | }) 19 | }) 20 | } 21 | 22 | -------------------------------------------------------------------------------- /src/client/cli/commands/net.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const path = require('path') 4 | 5 | module.exports = { 6 | command: 'net ', 7 | description: 'Commands for inspecting and managing peer-to-peer network connections.\n', 8 | builder: (yargs: Object) => yargs 9 | .commandDir(path.join(__dirname, 'net')) 10 | .help() 11 | .strict(), 12 | 13 | handler: () => {} 14 | } 15 | -------------------------------------------------------------------------------- /src/client/cli/commands/net/addr.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../../api/RestClient') 4 | const { subcommand, println } = require('../../util') 5 | 6 | module.exports = { 7 | command: 'addr [peerId]', 8 | description: `Print the local node's network addresses in multiaddr format. ` + 9 | `If 'peerId' is given, prints the locally known addresses for that peer (useful for debugging).\n`, 10 | handler: subcommand((opts: {client: RestClient, peerId?: string}) => { 11 | const {client, peerId} = opts 12 | 13 | return client.getNetAddresses(peerId) 14 | .then( 15 | addresses => { 16 | if (addresses.length < 1) { 17 | if (peerId != null) { 18 | println(`No known addresses for peer ${peerId}`) 19 | } else { 20 | println( 21 | 'Local node does not have an address. Make sure status is set to "online" or "public"' 22 | ) 23 | } 24 | } else { 25 | addresses.forEach(addr => { 26 | println(addr) 27 | }) 28 | } 29 | }) 30 | .catch( 31 | err => { throw new Error(`Error retrieving addresses: ${err.message}`) } 32 | ) 33 | }) 34 | } 35 | -------------------------------------------------------------------------------- /src/client/cli/commands/net/connections.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../../api/RestClient') 4 | const { subcommand, println } = require('../../util') 5 | 6 | module.exports = { 7 | command: 'connections', 8 | description: `Print a list of all actively connected peers (useful for debugging).\n`, 9 | handler: subcommand((opts: {client: RestClient}) => { 10 | const {client} = opts 11 | 12 | return client.getNetConnections() 13 | .then( 14 | addresses => { 15 | if (addresses.length < 1) { 16 | println( 17 | 'No active network connections. Is the node online?' 18 | ) 19 | } else { 20 | addresses.forEach(addr => { 21 | println(addr) 22 | }) 23 | } 24 | }) 25 | .catch( 26 | err => { throw new Error(`Error retrieving network connection list: ${err.message}`) } 27 | ) 28 | }) 29 | } 30 | -------------------------------------------------------------------------------- /src/client/cli/commands/net/findPeers.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../../api/RestClient') 4 | const { subcommand, println } = require('../../util') 5 | 6 | module.exports = { 7 | command: 'findPeers', 8 | description: `Find all public peers registerd in the DHT.\n`, 9 | handler: subcommand((opts: {client: RestClient}) => { 10 | const {client} = opts 11 | 12 | return client.netFindPeers() 13 | .then( 14 | peers => { 15 | peers.forEach(peer => { 16 | println(peer) 17 | }) 18 | }) 19 | .catch( 20 | err => { throw new Error(`Error finding peers: ${err.message}`) } 21 | ) 22 | }) 23 | } 24 | -------------------------------------------------------------------------------- /src/client/cli/commands/net/identify.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../../api/RestClient') 4 | const { subcommand, printJSON } = require('../../util') 5 | 6 | module.exports = { 7 | command: 'identify ', 8 | description: `Use the libp2p-identify protocol to get information about a peer and print in JSON format.\n`, 9 | builder: { 10 | color: { 11 | type: 'boolean', 12 | description: 'Explicitly enable (or disable, with --no-color) colorized output.\n', 13 | default: null, 14 | defaultDescription: 'Print in color if stdout is a tty, and monochrome if piped or pretty-printing is disabled.' 15 | }, 16 | pretty: { 17 | type: 'boolean', 18 | description: 'Pretty print the output.\n', 19 | default: true, 20 | defaultDescription: 'True. Use --no-pretty for compact output.' 21 | } 22 | }, 23 | handler: subcommand((opts: {client: RestClient, peerId: string, color: ?boolean, pretty: boolean}) => { 24 | const {client, peerId, color, pretty} = opts 25 | 26 | return client.netIdentify(peerId) 27 | .then(info => { 28 | printJSON(info, {color, pretty}) 29 | }) 30 | .catch( 31 | err => { throw new Error(`Error retrieving network connection list: ${err.message}`) } 32 | ) 33 | }) 34 | } 35 | -------------------------------------------------------------------------------- /src/client/cli/commands/net/ping.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../../api/RestClient') 4 | const { subcommand, println } = require('../../util') 5 | 6 | module.exports = { 7 | command: 'ping ', 8 | describe: 'Ping a remote peer, identified by `peerId`, using the libp2p ping protocol. ' + 9 | 'Will attempt to lookup the peer with a configured directory server or DHT.\n', 10 | handler: subcommand((opts: {peerId: string, client: RestClient}) => { 11 | const {peerId, client} = opts 12 | println('Pinging peer: ', peerId) 13 | 14 | return client.netPing(peerId) 15 | .then( 16 | latency => println(`Ping OK. Latency: ${latency}`), 17 | err => { throw new Error(`Error pinging: ${err.message}`) } 18 | ) 19 | }) 20 | } 21 | -------------------------------------------------------------------------------- /src/client/cli/commands/netAddr.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const cmd = require('./net/addr') 4 | module.exports = Object.assign({}, cmd, { 5 | command: 'netAddr [peerId]', 6 | description: `${cmd.description.trim()} (alias for 'net addr')\n` 7 | }) 8 | -------------------------------------------------------------------------------- /src/client/cli/commands/netConnections.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const cmd = require('./net/connections') 4 | module.exports = Object.assign({}, cmd, { 5 | command: 'netConnections', 6 | description: `${cmd.description.trim()} (alias for 'net connections')\n` 7 | }) 8 | -------------------------------------------------------------------------------- /src/client/cli/commands/ping.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../api/RestClient') 4 | const { subcommand, println } = require('../util') 5 | 6 | module.exports = { 7 | command: 'ping ', 8 | describe: 'Ping a remote peer, identified by `peerId`. Deprecated in favor of `mcclient net ping`.' + 9 | 'Will attempt to lookup the peer with a configured directory server or DHT.\n', 10 | handler: subcommand((opts: {peerId: string, client: RestClient}) => { 11 | const {peerId, client} = opts 12 | println('Pinging peer: ', peerId) 13 | 14 | return client.ping(peerId) 15 | .then( 16 | success => println('Ping OK'), 17 | err => { throw new Error(`Error pinging: ${err.message}`) } 18 | ) 19 | }) 20 | } 21 | -------------------------------------------------------------------------------- /src/client/cli/commands/publishRaw.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../api/RestClient') 4 | const { subcommand, println } = require('../util') 5 | 6 | module.exports = { 7 | command: 'publishRaw ', 8 | description: 'Publish a statement whose body (actual metadata content) has ' + 9 | 'already been stored in the node. `statementBodyId` should be the multihash ' + 10 | 'identifier of the statement body.\n', 11 | 12 | handler: subcommand((opts: {client: RestClient, namespace: string, statementBodyId: string}) => { 13 | const {client, namespace, statementBodyId} = opts 14 | 15 | return client.publish({namespace}, {object: statementBodyId}) 16 | .then(objectIds => objectIds.forEach(println)) 17 | }) 18 | } 19 | -------------------------------------------------------------------------------- /src/client/cli/commands/publishSchema.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../api/RestClient') 4 | const { subcommand, println } = require('../util') 5 | const { loadSelfDescribingSchema, schemaDescriptionToWKI } = require('../../../metadata/schema') 6 | 7 | const SCHEMA_NAMESPACE = 'mediachain.schemas' 8 | 9 | module.exports = { 10 | command: 'publishSchema ', 11 | description: 'Publish a self-describing json-schema document to the local node.\n', 12 | builder: { 13 | namespace: { 14 | description: 'Namespace to publish the schema to.', 15 | type: 'string', 16 | default: SCHEMA_NAMESPACE 17 | } 18 | }, 19 | 20 | handler: subcommand((opts: {client: RestClient, schemaName: string, version: string, filename: string, namespace: string}) => { 21 | const {client, filename, namespace} = opts 22 | 23 | const schema = loadSelfDescribingSchema(filename) 24 | const wki = schemaDescriptionToWKI(schema.self) 25 | 26 | return client.putData(schema) 27 | .then(([objectId]) => 28 | client.publish({namespace}, {object: objectId, refs: [wki]}) 29 | .then(([statementId]) => { 30 | println(`Published schema with wki = ${wki} to namespace ${namespace}`) 31 | println(`Object ID: ${objectId}`) 32 | println(`Statement ID: ${statementId}`) 33 | })) 34 | }) 35 | } 36 | -------------------------------------------------------------------------------- /src/client/cli/commands/push.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../api/RestClient') 4 | const { subcommand, pluralizeCount, println } = require('../util') 5 | 6 | module.exports = { 7 | command: 'push ', 8 | description: 'Push statements and their referenced objects that match `query` to ' + 9 | '`remotePeer` from the local node. The local node must be authorized with the remote ' + 10 | ' peer for the namespaces you are pushing to. \n', 11 | handler: subcommand((opts: {client: RestClient, queryString: string, remotePeer: string}) => { 12 | const {client, queryString, remotePeer} = opts 13 | 14 | return client.push(queryString, remotePeer) 15 | .then(({statementCount, objectCount}) => { 16 | println( 17 | `Pushed ${pluralizeCount(statementCount, 'statement')} and ${pluralizeCount(objectCount, 'object')}` 18 | ) 19 | }) 20 | }) 21 | } 22 | 23 | -------------------------------------------------------------------------------- /src/client/cli/commands/putData.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const cmd = require('./data/put') 4 | 5 | module.exports = Object.assign({}, cmd, { 6 | command: 'putData [filename]', 7 | description: `${cmd.description.trim()} (alias for 'data put')\n` 8 | }) 9 | -------------------------------------------------------------------------------- /src/client/cli/commands/query.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../api/RestClient') 4 | const { subcommand, printJSON } = require('../util') 5 | 6 | module.exports = { 7 | command: 'query ', 8 | builder: { 9 | remotePeer: { 10 | description: 'The id of a remote peer to route the query to.', 11 | alias: 'r' 12 | }, 13 | color: { 14 | type: 'boolean', 15 | description: 'Explicitly enable (or disable, with --no-color) colorized output.\n', 16 | default: null, 17 | defaultDescription: 'Print in color if stdout is a tty, and monochrome if piped or pretty-printing is disabled.' 18 | }, 19 | pretty: { 20 | type: 'boolean', 21 | description: 'Pretty print the output.\n', 22 | default: true, 23 | defaultDescription: 'True. Use --no-pretty for compact output.' 24 | } 25 | }, 26 | description: 'Send a mediachain query to the local node or a remote peer for evaluation.\n', 27 | handler: subcommand((opts: {client: RestClient, queryString: string, remotePeer?: string, pretty: boolean, color?: boolean}) => { 28 | const {client, queryString, remotePeer, pretty, color} = opts 29 | 30 | return client.queryStream(queryString, remotePeer) 31 | .then(response => new Promise((resolve, reject) => { 32 | response.stream() 33 | .on('data', result => { 34 | printJSON(result, {color, pretty}) 35 | }) 36 | .on('end', resolve) 37 | .on('error', reject) 38 | })) 39 | }) 40 | } 41 | -------------------------------------------------------------------------------- /src/client/cli/commands/shutdown.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../api/RestClient') 4 | const { subcommand, println } = require('../util') 5 | 6 | module.exports = { 7 | command: 'shutdown', 8 | description: 'Tell the local node to shutdown.\n', 9 | handler: subcommand((opts: {client: RestClient}) => { 10 | const {client} = opts 11 | return client.shutdown().then( 12 | println('Node shutdown successfully') 13 | ) 14 | }) 15 | } 16 | -------------------------------------------------------------------------------- /src/client/cli/commands/statement.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../api/RestClient') 4 | const { subcommand, printJSON } = require('../util') 5 | 6 | module.exports = { 7 | command: 'statement ', 8 | description: 'Retrieve a statement from the local node by its id.\n', 9 | builder: { 10 | color: { 11 | type: 'boolean', 12 | description: 'Explicitly enable (or disable, with --no-color) colorized output.\n', 13 | default: null, 14 | defaultDescription: 'Print in color if stdout is a tty, and monochrome if piped or pretty-printing is disabled.' 15 | }, 16 | pretty: { 17 | type: 'boolean', 18 | description: 'Pretty print the output.\n', 19 | default: true, 20 | defaultDescription: 'True. Use --no-pretty for compact output.' 21 | } 22 | }, 23 | 24 | handler: subcommand((opts: {client: RestClient, statementId: string, color: ?boolean, pretty: boolean}) => { 25 | const {client, statementId, color, pretty} = opts 26 | 27 | return client.statement(statementId) 28 | .then(obj => { printJSON(obj, {color, pretty}) }) 29 | }) 30 | } 31 | -------------------------------------------------------------------------------- /src/client/cli/commands/status.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const RestClient = require('../../api/RestClient') 4 | const { subcommand, println } = require('../util') 5 | import type { NodeStatus } from '../../api/RestClient' 6 | 7 | module.exports = { 8 | command: 'status [newStatus]', 9 | description: 'Get or set the status of the local node. ' + 10 | 'If `newStatus` is not given, returns the current status. ' + 11 | '`newStatus` must be one of: online, offline, public\n', 12 | handler: subcommand((opts: {client: RestClient, newStatus?: string}) => { 13 | const {client, newStatus} = opts 14 | 15 | if (!newStatus) { 16 | return client.getStatus().then(println) 17 | } 18 | 19 | let status: NodeStatus 20 | switch (newStatus) { 21 | case 'online': 22 | case 'offline': 23 | case 'public': 24 | status = newStatus 25 | break 26 | default: 27 | return Promise.reject( 28 | new Error(`Cannot set status to ${newStatus}. Must be one of: online, offline, public`) 29 | ) 30 | } 31 | return client.setStatus(status) 32 | .then(returnedStatus => { 33 | println(`status set to ${returnedStatus}`) 34 | }) 35 | }) 36 | } 37 | -------------------------------------------------------------------------------- /src/client/cli/commands/validate.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const fs = require('fs') 4 | const path = require('path') 5 | const RestClient = require('../../api/RestClient') 6 | const { JQTransform } = require('../../../metadata/jqStream') 7 | const { validate, loadSelfDescribingSchema, validateSelfDescribingSchema } = require('../../../metadata/schema') 8 | const { subcommand, pluralizeCount, isB58Multihash, println } = require('../util') 9 | import type { Readable } from 'stream' 10 | import type { SelfDescribingSchema } from '../../../metadata/schema' 11 | 12 | type HandlerOptions = { 13 | client: RestClient, 14 | schema?: string, 15 | jsonld: boolean, 16 | filename?: string, 17 | jqFilter: string, 18 | } 19 | 20 | module.exports = { 21 | command: 'validate [filename]', 22 | description: 'Validate newline-delimited json objects against a schema. ' + 23 | 'Objects will be read from `filename` or stdin.\n', 24 | builder: { 25 | schema: { 26 | description: 'Either a path to a local schema, or the base58 object id of a published schema. ' + 27 | 'Required, unless --jsonld is present.\n', 28 | type: 'string' 29 | }, 30 | jsonld: { 31 | description: 'If --jsonld is present, validate that inputs are structurally valid JSONLD. ' + 32 | 'This does not ensure that they are semantically correct.\n', 33 | type: 'boolean', 34 | default: false, 35 | defaultDescription: 'False. Setting this flag will cause the --schema argument to be ignored.' 36 | }, 37 | jqFilter: { 38 | type: 'string', 39 | description: 'A jq filter to apply to input records as a pre-processing step. ' + 40 | 'The filtered output will be validated against the schema. ' + 41 | 'If you use this, idSelector should be relative to the filtered output.\n', 42 | default: '.' 43 | } 44 | }, 45 | 46 | handler: subcommand((opts: HandlerOptions) => { 47 | const { client, filename, jqFilter, jsonld } = opts 48 | let { schema } = opts 49 | let streamName = 'standard input' 50 | 51 | let stream: Readable 52 | if (filename) { 53 | stream = fs.createReadStream(filename) 54 | streamName = filename 55 | } else { 56 | stream = process.stdin 57 | } 58 | 59 | if (jsonld === true) { 60 | schema = path.join(__dirname, '..', '..', '..', 'metadata', 'schemas', 61 | 'io.mediachain.jsonld-jsonschema-1-0-0.json') 62 | } 63 | 64 | if (schema == null) { 65 | throw new Error('You must provide either the --schema or --jsonld arguments.') 66 | } 67 | 68 | let schemaPromise: Promise 69 | if (isB58Multihash(schema)) { 70 | schemaPromise = client.getData(schema).then(validateSelfDescribingSchema) 71 | } else { 72 | schemaPromise = Promise.resolve(loadSelfDescribingSchema(schema)) 73 | } 74 | 75 | return schemaPromise.then(schema => 76 | validateStream({ 77 | stream, 78 | streamName, 79 | schema, 80 | jqFilter 81 | }) 82 | ) 83 | }) 84 | } 85 | 86 | function validateStream (opts: { 87 | stream: Readable, 88 | streamName: string, 89 | schema: SelfDescribingSchema, 90 | jqFilter: string 91 | }): Promise<*> { 92 | const { 93 | stream, 94 | streamName, 95 | schema, 96 | jqFilter 97 | } = opts 98 | 99 | let count = 0 100 | 101 | const jq = new JQTransform(jqFilter) 102 | 103 | return new Promise((resolve, reject) => { 104 | stream.pipe(jq) 105 | .on('data', jsonString => { 106 | const obj = JSON.parse(jsonString) 107 | const result = validate(schema, obj) 108 | if (!result.success) { 109 | return reject(new Error(`${result.error.message}.\nFailed object:\n${jsonString}`)) 110 | } 111 | count += 1 112 | }) 113 | .on('error', err => { 114 | return reject(new Error(`Error reading from ${streamName}: ${err.message}`)) 115 | }) 116 | .on('end', () => { 117 | println(`${pluralizeCount(count, 'statement')} validated successfully`) 118 | resolve() 119 | }) 120 | }) 121 | } 122 | 123 | -------------------------------------------------------------------------------- /src/client/cli/index.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const yargs = require('yargs') 4 | 5 | yargs 6 | .usage('Usage: $0 [options] [command-options]') 7 | .help() 8 | .version() 9 | .demand(1, 'Missing command argument') 10 | .option('apiUrl', { 11 | alias: ['p'], 12 | description: 'Root URL of the REST API for a mediachain node', 13 | default: 'http://localhost:9002' 14 | }) 15 | .option('sshConfig', { 16 | description: 'Path to a configuration file for SSH tunnelling, e.g. the credentials file created by Mediachain Deploy' 17 | }) 18 | .option('timeout', { 19 | type: 'number', 20 | description: `Timeout (in seconds), to use for requests to the mediachain node's API.`, 21 | default: 0, 22 | defaultDescription: '0 (no timeout)', 23 | coerce: seconds => Math.floor(seconds * 1000) // convert to milliseconds for easier consumption 24 | }) 25 | .global('apiUrl') 26 | .global('sshConfig') 27 | .global('timeout') 28 | .commandDir('commands') 29 | .strict() 30 | .wrap(yargs.terminalWidth()) 31 | .argv 32 | 33 | -------------------------------------------------------------------------------- /src/client/cli/util.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const { clone, set } = require('lodash') 4 | const fs = require('fs') 5 | const { JQ_PATH } = require('../../metadata/jqStream') 6 | const childProcess = require('child_process') 7 | const sshTunnel = require('tunnel-ssh') 8 | const { RestClient } = require('../api') 9 | const { println, printlnErr, isB58Multihash } = require('../../common/util') 10 | 11 | function formatJSON (obj: ?mixed, 12 | options: {color?: ?boolean, pretty?: boolean} = {}): string { 13 | const compactOutput = options.pretty === false 14 | 15 | let useColor = false 16 | // print in color if explicitly enabled, or if pretty-printing to a tty 17 | if (options.color === true || (options.color == null && process.stdout.isTTY && !compactOutput)) { 18 | useColor = true 19 | } 20 | 21 | if (!useColor && compactOutput) { 22 | // skip jq if we don't want color or pretty printing 23 | return JSON.stringify(obj) + '\n' 24 | } 25 | 26 | const jqOpts = [(useColor ? '-C' : '-M'), '-a', '.'] 27 | if (options.pretty === false) { 28 | jqOpts.unshift('-c') 29 | } 30 | 31 | return childProcess.execFileSync(JQ_PATH, jqOpts, {input: JSON.stringify(obj), encoding: 'utf-8'}).toString() 32 | } 33 | 34 | function printJSON (obj: ?mixed, 35 | options: {color?: ?boolean, pretty?: boolean} = {}) { 36 | process.stdout.write(formatJSON(obj, options)) 37 | } 38 | 39 | function pluralizeCount (count: number, word: string): string { 40 | let plural = word 41 | if (count !== 1) plural += 's' 42 | return count.toString() + ' ' + plural 43 | } 44 | 45 | function setupSSHTunnel (config: Object): Promise { 46 | return new Promise((resolve, reject) => { 47 | sshTunnel(config, (err, server) => { 48 | if (err) return reject(err) 49 | resolve(server) 50 | }) 51 | }) 52 | } 53 | 54 | function ensureAll (obj: Object, keys: Array, description: string = 'object') { 55 | for (const key of keys) { 56 | if (obj[key] === undefined) { 57 | throw new Error(`${description} is missing required field ${key}`) 58 | } 59 | } 60 | } 61 | 62 | function prepareSSHConfig (config: Object | string): Object { 63 | if (typeof config === 'string') { 64 | config = JSON.parse(fs.readFileSync(config, 'utf8')) 65 | } 66 | 67 | ensureAll(config, ['host', 'username'], 'SSH configuration') 68 | 69 | const defaultOpts = { 70 | dstPort: 9002, 71 | localPort: 0, 72 | localHost: 'localhost', 73 | keepAlive: true 74 | } 75 | 76 | return Object.assign({}, defaultOpts, config) 77 | } 78 | 79 | type GlobalOptions = { 80 | apiUrl: string, 81 | sshConfig?: string | Object, 82 | timeout: number 83 | } 84 | 85 | type SubcommandGlobalOptions = { // eslint-disable-line no-unused-vars 86 | client: RestClient 87 | } 88 | 89 | function subcommand (handler: (argv: T) => Promise<*>): (argv: GlobalOptions) => void { 90 | return (argv: GlobalOptions) => { 91 | const {sshConfig, timeout} = argv 92 | let {apiUrl} = argv 93 | 94 | const sshTunnelConfig = (sshConfig != null) 95 | ? prepareSSHConfig(sshConfig) 96 | : null 97 | 98 | let sshTunnelPromise 99 | let sshTunnel = null 100 | if (sshTunnelConfig != null) { 101 | sshTunnelPromise = setupSSHTunnel(sshTunnelConfig) 102 | .then(tunnel => { 103 | tunnel.on('error', err => { 104 | console.error(`SSH Error: ${err.message}`) 105 | tunnel.close() 106 | process.exit(1) 107 | }) 108 | const addr = tunnel.address() 109 | 110 | sshTunnel = tunnel 111 | apiUrl = `http://${addr.address}:${addr.port}` 112 | }) 113 | } else { 114 | sshTunnelPromise = Promise.resolve() 115 | } 116 | 117 | function closeTunnel () { 118 | if (sshTunnel != null) { 119 | sshTunnel.close() 120 | } 121 | } 122 | 123 | sshTunnelPromise 124 | .then(() => { 125 | const client = new RestClient({apiUrl, requestTimeout: timeout}) 126 | return set(clone(argv), 'client', client) 127 | }) 128 | .then(subcommandOptions => handler(subcommandOptions)) 129 | .then(closeTunnel) 130 | .catch(err => { 131 | closeTunnel() 132 | console.error(err.message) 133 | process.exit(1) 134 | }) 135 | } 136 | } 137 | 138 | module.exports = { 139 | println, 140 | printlnErr, 141 | formatJSON, 142 | printJSON, 143 | pluralizeCount, 144 | isB58Multihash, 145 | subcommand, 146 | prepareSSHConfig 147 | } 148 | -------------------------------------------------------------------------------- /src/metadata/jqStream.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const ChildProcessStream = require('duplex-child-process') 4 | const byline = require('byline') 5 | const path = require('path') 6 | const JQ_PATH = path.join(__dirname, '..', '..', 'bin', 'jq') 7 | 8 | class JQTransform extends ChildProcessStream { 9 | _args: Array 10 | 11 | constructor (filter: string) { 12 | super({encoding: 'utf-8'}) 13 | this._args = [ 14 | '-a', // ascii output (escape unicode characters) 15 | '-c', // compact (no pretty print) 16 | '-M', // monochrome output 17 | '-S', // sort object keys 18 | filter // don't need to escape input, since child_process.spawn doesn't use a subshell 19 | ] 20 | 21 | // overwrite the _reader member of superclass to return results line-by-line 22 | this._reader = byline(this._reader) 23 | 24 | // do the thing 25 | this.spawn(JQ_PATH, this._args) 26 | } 27 | } 28 | 29 | module.exports = { 30 | JQTransform, 31 | JQ_PATH 32 | } 33 | -------------------------------------------------------------------------------- /src/metadata/schemas/com.snowplowanalytics.self-desc-jsonschema-1-0-0.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema" : "http://json-schema.org/draft-04/schema#", 3 | "description": "Meta-schema for self-describing JSON schema", 4 | "self": { 5 | "vendor": "com.snowplowanalytics.self-desc", 6 | "name": "schema", 7 | "format": "jsonschema", 8 | "version": "1-0-0" 9 | }, 10 | 11 | "allOf": [ 12 | { 13 | "properties": { 14 | "self": { 15 | "type": "object", 16 | "properties": { 17 | "vendor": { 18 | "type": "string", 19 | "pattern": "^[a-zA-Z0-9-_.]+$" 20 | }, 21 | "name": { 22 | "type": "string", 23 | "pattern": "^[a-zA-Z0-9-_]+$" 24 | }, 25 | "format": { 26 | "type": "string", 27 | "pattern": "^[a-zA-Z0-9-_]+$" 28 | }, 29 | "version": { 30 | "type": "string", 31 | "pattern": "^[0-9]+-[0-9]+-[0-9]+$" 32 | } 33 | }, 34 | "required": ["vendor", "name", "format", "version"], 35 | "additionalProperties": false 36 | } 37 | }, 38 | "required": ["self"] 39 | }, 40 | 41 | { 42 | "$ref": "http://json-schema.org/draft-04/schema#" 43 | } 44 | ] 45 | 46 | } 47 | -------------------------------------------------------------------------------- /src/metadata/schemas/io.mediachain.jsonld-jsonschema-1-0-0.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Schema for JSON-LD", 3 | "$schema": "http://json-schema.org/draft-04/schema#", 4 | 5 | "self": { 6 | "vendor": "io.mediachain", 7 | "name": "jsonld", 8 | "format": "jsonschema", 9 | "version": "1-0-0" 10 | }, 11 | 12 | "definitions":{ 13 | "context": { 14 | "additionalProperties": true, 15 | "properties": { 16 | "@context": { 17 | "description": "Used to define the short-hand names that are used throughout a JSON-LD document.", 18 | "type": ["object", "string", "array", "null"] 19 | } 20 | } 21 | }, 22 | "graph": { 23 | "additionalProperties": true, 24 | "properties": { 25 | "@graph": { 26 | "description": "Used to express a graph.", 27 | "type": ["array", "object"], 28 | "additionalItems": { 29 | "anyOf": [{ "$ref": "#/definitions/common" }] 30 | } 31 | } 32 | } 33 | }, 34 | "common":{ 35 | "additionalProperties": { 36 | "anyOf": [{ "$ref": "#/definitions/common" }] 37 | }, 38 | "properties": { 39 | "@id": { 40 | "description": "Used to uniquely identify things that are being described in the document with IRIs or blank node identifiers.", 41 | "type": "string", 42 | "format": "uri" 43 | }, 44 | "@value": { 45 | "description": "Used to specify the data that is associated with a particular property in the graph.", 46 | "type": ["string", "boolean", "number", "null"] 47 | }, 48 | "@language": { 49 | "description": "Used to specify the language for a particular string value or the default language of a JSON-LD document.", 50 | "type": ["string", "null"] 51 | }, 52 | "@type": { 53 | "description": "Used to set the data type of a node or typed value.", 54 | "type": ["string", "null", "array"] 55 | }, 56 | "@container": { 57 | "description": "Used to set the default container type for a term.", 58 | "type": ["string", "null"], 59 | "enum": ["@list", "@index", "@set"] 60 | }, 61 | "@list": { 62 | "description": "Used to express an ordered set of data." 63 | }, 64 | "@set": { 65 | "description": "Used to express an unordered set of data and to ensure that values are always represented as arrays." 66 | }, 67 | "@reverse": { 68 | "description": "Used to express reverse properties.", 69 | "type": ["string", "object", "null"], 70 | "additionalProperties": { 71 | "anyOf": [{ "$ref": "#/definitions/common" }] 72 | } 73 | }, 74 | "@base": { 75 | "description": "Used to set the base IRI against which relative IRIs are resolved", 76 | "type": ["string", "null"], 77 | "format": "uri" 78 | }, 79 | "@vocab": { 80 | "description": "Used to expand properties and values in @type with a common prefix IRI", 81 | "type": ["string", "null"], 82 | "format": "uri" 83 | } 84 | } 85 | } 86 | }, 87 | 88 | "allOf": [ 89 | { "$ref": "#/definitions/context" }, 90 | { "$ref": "#/definitions/graph" }, 91 | { "$ref": "#/definitions/common" } 92 | ], 93 | 94 | "type": ["object", "array"], 95 | "additionalProperties": true 96 | } 97 | -------------------------------------------------------------------------------- /src/metadata/schemaver.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | // tools for parsing and comparing SchemaVer semantic versions for data schemas 4 | // see https://github.com/snowplow/iglu/wiki/SchemaVer 5 | // and http://snowplowanalytics.com/blog/2014/05/13/introducing-schemaver-for-semantic-versioning-of-schemas/ 6 | 7 | export type SchemaVer = { 8 | model: number, 9 | revision: number, 10 | addition: number 11 | } 12 | 13 | function isSchemaVer (obj: mixed): boolean { 14 | if (obj == null || typeof (obj) !== 'object') return false 15 | if (obj.model == null || typeof (obj.model) !== 'number') return false 16 | if (obj.revision == null || typeof (obj.revision) !== 'number') return false 17 | if (obj.addition == null || typeof (obj.addition) !== 'number') return false 18 | return true 19 | } 20 | 21 | function parseSchemaVer (version: string | Object): ?SchemaVer { 22 | if (typeof (version) === 'string') { 23 | const components = version.split('-') 24 | .map(s => Number(s)) 25 | .filter(n => !isNaN(n)) 26 | 27 | if (components.length !== 3) return null 28 | const [model, revision, addition] = components 29 | return {model, revision, addition} 30 | } 31 | 32 | if (isSchemaVer(version)) return version 33 | 34 | return null 35 | } 36 | 37 | function isCompatible (a: SchemaVer | string, b: SchemaVer | string): boolean { 38 | const parsedA = parseSchemaVer(a) 39 | const parsedB = parseSchemaVer(b) 40 | if (parsedA == null || parsedB == null) return false 41 | 42 | return parsedA.model === parsedB.model && 43 | parsedA.revision === parsedB.revision 44 | } 45 | 46 | module.exports = { 47 | parseSchemaVer, 48 | isCompatible 49 | } 50 | -------------------------------------------------------------------------------- /src/metadata/serialize.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const cbor = require('borc') 4 | const {encode, decodeFirst} = cbor 5 | 6 | module.exports = { 7 | encode, 8 | decode: decodeFirst 9 | } 10 | -------------------------------------------------------------------------------- /src/model/query_result.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const { Statement, StatementBody } = require('./statement') 4 | import type { QueryResultMsg, QueryResultValueMsg, SimpleValueMsg, CompoundValueMsg } from '../protobuf/types' 5 | 6 | export type QueryResult = QueryResultValue | Error 7 | export type QueryResultValue = SimpleQueryResultValue | CompoundQueryResultValue 8 | export type SimpleQueryResultValue = number | string | Statement | StatementBody 9 | 10 | function unpackQueryResultProtobuf (msg: QueryResultMsg): QueryResult { 11 | if (msg.error != null) { 12 | const errorMsg = msg.error.error || 'Unknown error' 13 | return new Error(errorMsg) 14 | } 15 | if (msg.value != null) { 16 | return unpackQueryResultValueProtobuf((msg.value: any)) 17 | } 18 | throw new Error('Unexpected query result: ' + JSON.stringify(msg)) 19 | } 20 | 21 | function unpackQueryResultValueProtobuf (msg: QueryResultValueMsg): QueryResultValue { 22 | if (msg.simple != null) { 23 | return unpackSimpleValue((msg.simple: any)) 24 | } 25 | if (msg.compound != null) { 26 | return CompoundQueryResultValue.fromProtobuf((msg.compound: any)) 27 | } 28 | throw new Error('Unexpected query result value ' + JSON.stringify(msg)) 29 | } 30 | 31 | function unpackSimpleValue (val: SimpleValueMsg): SimpleQueryResultValue { 32 | if (val.stringValue != null) return (val.stringValue: any) 33 | if (val.intValue != null) return (val.intValue: any) 34 | if (val.stmt != null) return Statement.fromProtobuf((val.stmt: any)) 35 | if (val.stmtBody != null) return StatementBody.fromProtobuf((val.stmtBody: any)) 36 | 37 | throw new Error('Unexpected query result value: ' + JSON.stringify(val)) 38 | } 39 | 40 | type KVPair = {key: string, value: SimpleQueryResultValue} 41 | class CompoundQueryResultValue { 42 | body: Array 43 | 44 | constructor (body: Array) { 45 | this.body = body 46 | } 47 | 48 | static fromProtobuf (msg: CompoundValueMsg): CompoundQueryResultValue { 49 | return new CompoundQueryResultValue( 50 | msg.body.map(kv => ({ 51 | key: kv.key, 52 | value: unpackSimpleValue(kv.value) 53 | })) 54 | ) 55 | } 56 | 57 | keys (): Array { 58 | return this.body.map(kv => kv.key) 59 | } 60 | 61 | values (): Array { 62 | return this.body.map(kv => kv.value) 63 | } 64 | 65 | statements (): Array { 66 | return this.values() 67 | .filter(v => v instanceof Statement) 68 | .map(v => (v: any)) // dammit flow 69 | } 70 | } 71 | 72 | module.exports = { 73 | unpackQueryResultProtobuf, 74 | unpackQueryResultValueProtobuf, 75 | CompoundQueryResultValue 76 | } 77 | -------------------------------------------------------------------------------- /src/peer/constants.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const Multiaddr = require('multiaddr') 4 | 5 | const DEFAULT_LISTEN_ADDR = Multiaddr('/ip4/127.0.0.1/tcp/0') 6 | const PROTOCOLS = { 7 | node: { 8 | id: '/mediachain/node/id', 9 | ping: '/mediachain/node/ping', 10 | query: '/mediachain/node/query', 11 | data: '/mediachain/node/data', 12 | push: '/mediachain/node/push' 13 | }, 14 | dir: { 15 | list: '/mediachain/dir/list', 16 | listns: '/mediachain/dir/listns', 17 | lookup: '/mediachain/dir/lookup', 18 | register: '/mediachain/dir/register' 19 | } 20 | } 21 | 22 | module.exports = { 23 | DEFAULT_LISTEN_ADDR, 24 | PROTOCOLS 25 | } 26 | -------------------------------------------------------------------------------- /src/peer/datastore.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | /** 4 | * @module aleph/peer/datastore 5 | */ 6 | 7 | const { clone } = require('lodash') 8 | const Levelup = require('levelup') 9 | const uuid = require('uuid') 10 | const serialize = require('../metadata/serialize') 11 | const { b58MultihashForBuffer } = require('../common/util') 12 | 13 | export type DatastoreOptions = { 14 | backend: 'memory', // just in-memory for now, expand to e.g. rocksdb 15 | location?: string 16 | } 17 | 18 | const DefaultOptions: DatastoreOptions = { 19 | backend: 'memory' 20 | } 21 | 22 | /** 23 | * A content-addressed datastore used by {@link MediachainNode} to store data objects. 24 | * Objects are keyed by the base58-encoded multihash string of their content. 25 | * Plain JS objects may be stored, in which case they will be serialized to CBOR, 26 | * and the key will be the hash of the encoded object. 27 | * 28 | * Currently only supports in-memory storage. 29 | */ 30 | class Datastore { 31 | db: Levelup 32 | location: string 33 | 34 | constructor (options: ?DatastoreOptions = null) { 35 | if (options == null) { 36 | options = DefaultOptions 37 | } else { 38 | options = Object.assign(clone(DefaultOptions), options) 39 | } 40 | 41 | const levelOpts: Object = {} 42 | switch (options.backend) { 43 | case 'memory': 44 | levelOpts.db = require('memdown') 45 | break 46 | 47 | default: 48 | throw new Error(`Datastore backend ${options.backend} not supported`) 49 | } 50 | 51 | levelOpts.valueEncoding = valueCodec 52 | const location = options.location || '/aleph/data-' + uuid.v4() 53 | 54 | this.db = Levelup(location, levelOpts) 55 | this.location = location 56 | } 57 | 58 | /** 59 | * Add a `Buffer` of binary data or a JS object that can be encoded to CBOR. 60 | * @param value 61 | * @returns {Promise} 62 | * resolves to a base58-encoded multihash string, which encodes the sha256 hash 63 | * of the object's content. If `value` is a JS object, it will be serialized to 64 | * CBOR before being hashed and stored. 65 | */ 66 | put (value: Buffer | Object): Promise { 67 | return Promise.resolve().then(() => { 68 | if (!Buffer.isBuffer(value)) { 69 | value = serialize.encode(value) 70 | } 71 | 72 | const key = b58MultihashForBuffer(value) 73 | 74 | return new Promise((resolve, reject) => { 75 | this.db.put(key, value, { sync: true }, (err) => { 76 | if (err) return reject(err) 77 | resolve(key) 78 | }) 79 | }) 80 | }) 81 | } 82 | 83 | /** 84 | * Fetches an object by its `key`, returning either the raw `Buffer` for the key, or 85 | * a deserialized JS object. 86 | * @param key 87 | * @param opts 88 | * If `opts.returnRawBuffer` is `true`, will not attempt to deserialize data objects from CBOR. 89 | * @returns {Promise} 90 | * If `opts.returnRawBuffer` is `true`, or if a given value can't be deserialized, resolves to 91 | * the raw `Buffer` associated with the given key. 92 | * If `opts.returnRawBuffer` is `false` or not present, will attempt to deserialize objects from CBOR. 93 | * If deserialization fails, resolves to the raw buffer. 94 | * 95 | * Will fail with an Error if no object exists for the given key, or if a low-level error occurs. 96 | * "object not found" errors will have a `.notFound` property, which can be used to distinguish them 97 | * from low-level errors. 98 | */ 99 | get (key: string, opts: {returnRawBuffer?: boolean} = {}): Promise { 100 | return new Promise((resolve, reject) => { 101 | this.db.get(key, (err, val) => { 102 | if (err) return reject(err) 103 | 104 | if (opts.returnRawBuffer === true) { 105 | return resolve(val) 106 | } 107 | 108 | try { 109 | return resolve(serialize.decode(val)) 110 | } catch (err) { 111 | return resolve(val) 112 | } 113 | }) 114 | }) 115 | } 116 | 117 | /** 118 | * Check if the datastore has an object for the given key. 119 | * @param key 120 | * @returns {Promise} 121 | * Resolves to true if the object exists, false otherwise. 122 | * Fails with an `Error` if a low-level failure occurs. 123 | */ 124 | has (key: string): Promise { 125 | return this.get(key, {returnRawBuffer: true}) 126 | .then(() => true) 127 | .catch(err => { 128 | if (err.notFound) return false 129 | throw err 130 | }) 131 | } 132 | } 133 | 134 | const valueCodec = { 135 | type: 'noop', 136 | buffer: true, 137 | 138 | encode (val: Buffer): Buffer { 139 | return val 140 | }, 141 | 142 | decode (buf: Buffer): Buffer { 143 | return buf 144 | } 145 | } 146 | 147 | module.exports = { 148 | Datastore 149 | } 150 | -------------------------------------------------------------------------------- /src/peer/db/migrations/20161221093425_create_tables.js: -------------------------------------------------------------------------------- 1 | 2 | exports.up = function (knex, Promise) { 3 | return knex.schema.raw('CREATE TABLE Statement (id VARCHAR(128) PRIMARY KEY, data VARBINARY)') 4 | .then(() => knex.schema.raw('CREATE TABLE Envelope (counter INTEGER PRIMARY KEY AUTOINCREMENT, id VARCHAR(128), namespace VARCHAR, publisher VARCHAR, source VARCHAR, timestamp INTEGER)')) 5 | .then(() => knex.schema.raw('CREATE UNIQUE INDEX EnvelopeId ON Envelope (id)')) 6 | .then(() => knex.schema.raw('CREATE INDEX EnvelopeNS ON Envelope (namespace)')) 7 | .then(() => knex.schema.raw('CREATE TABLE Refs (id VARCHAR(128), wki VARCHAR)')) 8 | .then(() => knex.schema.raw('CREATE INDEX RefsId ON Refs (id)')) 9 | .then(() => knex.schema.raw('CREATE INDEX RefsWki ON Refs (wki)')) 10 | } 11 | 12 | exports.down = function (knex, Promise) { 13 | return knex.schema.raw('DROP INDEX RefsId') 14 | .then(() => knex.schema.raw('DROP INDEX RefsWki')) 15 | .then(() => knex.schema.raw('DROP INDEX EnvelopeNS')) 16 | .then(() => knex.schema.raw('DROP INDEX EnvelopeId')) 17 | .then(() => knex.schema.raw('DROP TABLE Refs')) 18 | .then(() => knex.schema.raw('DROP TABLE Envelope')) 19 | .then(() => knex.schema.raw('DROP TABLE Statement')) 20 | } 21 | -------------------------------------------------------------------------------- /src/peer/directory.js: -------------------------------------------------------------------------------- 1 | const P2PNode = require('./libp2p_node') 2 | const PeerId = require('peer-id') 3 | const PeerInfo = require('peer-info') 4 | const PeerBook = require('peer-book') 5 | const Multiaddr = require('multiaddr') 6 | const pull = require('pull-stream') 7 | const { values } = require('lodash') 8 | const pb = require('../protobuf') 9 | const { protoStreamDecode, protoStreamEncode, peerInfoProtoUnmarshal } = require('./util') 10 | const { DEFAULT_LISTEN_ADDR, PROTOCOLS } = require('./constants') 11 | const { isB58Multihash } = require('../common/util') 12 | 13 | import type { Connection } from 'interface-connection' 14 | import type { LookupPeerRequestMsg, LookupPeerResponseMsg } from '../protobuf/types' 15 | 16 | export type DirectoryNodeOptions = { 17 | peerId: PeerId, 18 | listenAddresses: Array 19 | } 20 | 21 | class DirectoryNode { 22 | p2p: P2PNode 23 | peerBook: PeerBook 24 | 25 | constructor (options: DirectoryNodeOptions) { 26 | let { peerId, listenAddresses } = options 27 | if (listenAddresses == null) listenAddresses = [DEFAULT_LISTEN_ADDR] 28 | 29 | const peerInfo = new PeerInfo(peerId) 30 | listenAddresses.forEach((addr: Multiaddr | string) => { 31 | peerInfo.multiaddr.add(Multiaddr(addr)) 32 | }) 33 | 34 | this.p2p = new P2PNode({peerInfo}) 35 | this.peerBook = new PeerBook() 36 | this.p2p.handle(PROTOCOLS.dir.register, this.registerHandler.bind(this)) 37 | this.p2p.handle(PROTOCOLS.dir.lookup, this.lookupHandler.bind(this)) 38 | this.p2p.handle(PROTOCOLS.dir.list, this.listHandler.bind(this)) 39 | } 40 | 41 | start (): Promise { 42 | return this.p2p.start() 43 | } 44 | 45 | stop (): Promise { 46 | return this.p2p.stop() 47 | } 48 | 49 | get peerInfo (): PeerInfo { 50 | return this.p2p.peerInfo 51 | } 52 | 53 | get registeredPeers (): Array { 54 | return values(this.peerBook.getAll()) 55 | } 56 | 57 | getPeerInfo (peerId: PeerId | string): ?PeerInfo { 58 | let peerId58 59 | if (typeof peerId === 'string' && isB58Multihash(peerId)) { 60 | peerId58 = peerId 61 | } else if (peerId instanceof PeerId) { 62 | peerId58 = peerId.toB58String() 63 | } else { 64 | throw new Error('getPeerInfo needs a PeerId or base58-encoded multihash') 65 | } 66 | 67 | try { 68 | return this.peerBook.getByB58String(peerId58) 69 | } catch (err) { 70 | return null 71 | } 72 | } 73 | 74 | registerHandler (protocol: string, conn: Connection) { 75 | conn.getPeerInfo((err, pInfo) => { 76 | if (err) { 77 | console.error('Error getting peer info for connection:', err) 78 | return 79 | } 80 | 81 | const abortable = this.p2p.newAbortable() 82 | pull( 83 | conn, 84 | protoStreamDecode(pb.dir.RegisterPeer), 85 | pull.map(req => req.info), 86 | pull.map(peerInfoProtoUnmarshal), 87 | pull.through(reqInfo => { 88 | this.peerBook.put(reqInfo) 89 | }), 90 | abortable, 91 | pull.drain() 92 | ) 93 | }) 94 | } 95 | 96 | lookupHandler (protocol: string, conn: Connection) { 97 | const abortable = this.p2p.newAbortable() 98 | 99 | pull( 100 | conn, 101 | protoStreamDecode(pb.dir.LookupPeerRequest), 102 | pull.map(req => this._doLookup(req)), 103 | protoStreamEncode(pb.dir.LookupPeerResponse), 104 | abortable, 105 | conn, 106 | ) 107 | } 108 | 109 | _doLookup (req: LookupPeerRequestMsg): LookupPeerResponseMsg { 110 | if (req.id == null) { 111 | return { peer: null } 112 | } 113 | 114 | try { 115 | const peerInfo = this.peerBook.getByB58String(req.id) 116 | const peer = { 117 | id: peerInfo.id.toB58String(), 118 | addr: peerInfo.multiaddrs.map(maddr => maddr.buffer) 119 | } 120 | return {peer} 121 | } catch (err) { 122 | return { peer: null } 123 | } 124 | } 125 | 126 | listHandler (protocol: string, conn: Connection) { 127 | 128 | } 129 | } 130 | 131 | module.exports = DirectoryNode 132 | -------------------------------------------------------------------------------- /src/peer/repl/commands/query.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const pull = require('pull-stream') 4 | const { bootstrap, binaryToB64 } = require('../util') 5 | const { printJSON } = require('../../../client/cli/util') 6 | const { Statement, StatementBody } = require('../../../model/statement') 7 | 8 | module.exports = { 9 | command: 'query ', 10 | builder: { 11 | includeData: { 12 | type: 'boolean', 13 | alias: 'i', 14 | description: 'Also fetch the associated data objects for each query result and display them inline.\n', 15 | default: false 16 | }, 17 | color: { 18 | type: 'boolean', 19 | description: 'Explicitly enable (or disable, with --no-color) colorized output.\n', 20 | default: null, 21 | defaultDescription: 'Print in color if stdout is a tty, and monochrome if piped or pretty-printing is disabled.' 22 | }, 23 | pretty: { 24 | type: 'boolean', 25 | description: 'Pretty print the output.\n', 26 | default: true, 27 | defaultDescription: 'True. Use --no-pretty for compact output.' 28 | } 29 | }, 30 | description: 'Query a remote peer using the mediachain peer-to-peer query protocol.\n', 31 | handler: (opts: {queryString: string, dir?: string, remotePeer?: string, identityPath: string, pretty: boolean, color?: boolean, includeData: boolean}) => { 32 | const {queryString, remotePeer, pretty, color, includeData} = opts 33 | if (remotePeer == null) { 34 | // TODO: better message 35 | console.error('remotePeer is required.') 36 | process.exit(1) 37 | } 38 | 39 | let node, remote 40 | bootstrap(opts).then(nodes => { 41 | node = nodes.node 42 | remote = nodes.remote 43 | return node.start() 44 | }) 45 | .then(() => { 46 | if (remote == null) { 47 | throw new Error('Remote peer is unavailable') 48 | } 49 | 50 | let queryPromise 51 | if (includeData) { 52 | queryPromise = node.remoteQueryWithDataStream(remote.remotePeerInfo, queryString) 53 | } else { 54 | queryPromise = node.remoteQueryStream(remote.remotePeerInfo, queryString) 55 | } 56 | 57 | return queryPromise 58 | .then(stream => printResultStream(stream, color, pretty)) 59 | }) 60 | .then(() => { process.exit(0) }) 61 | .catch(err => { 62 | console.error(err.message) 63 | process.exit(1) 64 | }) 65 | } 66 | } 67 | 68 | function printResultStream (queryStream: Function, color: ?boolean, pretty: boolean): Promise { 69 | return new Promise((resolve, reject) => { 70 | pull( 71 | queryStream, 72 | pull.through(result => { 73 | if (result instanceof Statement || result instanceof StatementBody) { 74 | printJSON(result.inspect(), {color, pretty}) 75 | } else { 76 | printJSON(binaryToB64(result), {color, pretty}) 77 | } 78 | }), 79 | pull.onEnd((err) => { 80 | if (err) return reject(err) 81 | resolve() 82 | }) 83 | ) 84 | }) 85 | } 86 | 87 | -------------------------------------------------------------------------------- /src/peer/repl/commands/repl.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const os = require('os') 4 | // $FlowIssue flow doesn't find repl builtin? 5 | const Repl = require('repl') 6 | const { inspect } = require('util') 7 | const { bootstrap } = require('../util') 8 | 9 | module.exports = { 10 | command: 'repl', 11 | describe: 'start the aleph repl\n', 12 | builder: (yargs: Function) => { 13 | return yargs 14 | .option('dir', { 15 | 'alias': 'd', 16 | 'type': 'string', 17 | 'describe': 'directory to connect to (multiaddress)', 18 | 'demand': false 19 | }) 20 | .help() 21 | }, 22 | handler: (opts: {dir?: string, remotePeer?: string, identityPath: string}) => { 23 | const {remotePeer} = opts 24 | 25 | inspect.defaultOptions = { colors: true, depth: null } 26 | 27 | bootstrap(opts) 28 | .catch(err => { 29 | console.error(`Error setting up aleph node: ${err.message}`) 30 | process.exit(1) 31 | }) 32 | .then(({node, remote}) => { 33 | let init 34 | if (remote != null) { 35 | init = node.start() 36 | .then(() => node.openConnection(remote.remotePeerInfo)) 37 | .then(() => { console.log(`Connected to `, remotePeer) }) 38 | } else { 39 | console.log('No remote peer specified, running in detached mode') 40 | // TODO: create dummy RemoteNode class that just throws 41 | init = node.start() 42 | } 43 | 44 | // TODO: directory stuff 45 | if (node.directory == null) { 46 | // TODO: get directory from remote peer (and amend message below) 47 | console.log('No directory specified, running without directory') 48 | } 49 | 50 | init.then(() => { 51 | const repl = Repl.start({ 52 | 'prompt': 'א > ', 53 | 'useColors': true, 54 | 'ignoreUndefined': true 55 | }) 56 | repl.context.node = node 57 | repl.context.remote = remote 58 | const defaultEval = repl.eval 59 | repl.eval = promiseEval(defaultEval) 60 | repl.on('exit', () => { 61 | process.exit() 62 | }) 63 | }).catch(err => { 64 | console.log(err) 65 | }) 66 | }) 67 | } 68 | } 69 | 70 | const EMPTY = '(' + os.EOL + ')' 71 | const promiseEval = (defaultEval) => (cmd, context, filename, callback) => { 72 | if (cmd === EMPTY) return callback() 73 | defaultEval(cmd, context, filename, (err, result) => { 74 | if (err) { return callback(err) } 75 | 76 | if (result instanceof Promise) { 77 | result.then( 78 | asyncResult => { callback(null, asyncResult) }, 79 | asyncErr => { callback(asyncErr) } 80 | ) 81 | } else { 82 | callback(null, result) 83 | } 84 | }) 85 | } 86 | -------------------------------------------------------------------------------- /src/peer/repl/index.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const os = require('os') 4 | const path = require('path') 5 | const mkdirp = require('mkdirp') 6 | // TODO: for bonus points, specify this as option, too 7 | const home = path.join(os.homedir(), '.mediachain', 'aleph') 8 | 9 | function ensureDirExists (filePath: string, fileDescription = 'file') { 10 | const basedir = path.dirname(filePath) 11 | try { 12 | mkdirp.sync(basedir) 13 | } catch (err) { 14 | throw new Error( 15 | `Unable to create parent directory for ${fileDescription} at ${filePath}: ${err.message}`) 16 | } 17 | return filePath 18 | } 19 | 20 | require('yargs') 21 | .usage('Usage: $0 [options] [command-options]') 22 | .help() 23 | .version() 24 | .example('$0 repl', 'start the aleph repl') 25 | .demand(1, 'Missing command argument') 26 | .commandDir('commands') 27 | .options({ 28 | 'config': { 29 | // this is the only way to get useful config in subcommands 30 | 'config': true, 31 | 'global': true, 32 | 'default': path.join(home, 'config.json') 33 | }, 34 | 'identityPath': { 35 | 'global': true, 36 | 'default': path.join(home, 'identity.node') 37 | }, 38 | 'remotePeer': { 39 | 'global': true, 40 | 'type': 'string', 41 | 'describe': 'The remote peer to pair with (multiaddress)' 42 | } 43 | }) 44 | .coerce('config', filePath => ensureDirExists(filePath, 'config file')) 45 | .coerce('identityPath', filePath => ensureDirExists(filePath, 'identity file')) 46 | .argv 47 | -------------------------------------------------------------------------------- /src/peer/repl/util.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const _ = require('lodash') 4 | const { MediachainNode, RemoteNode } = require('../node') 5 | const Identity = require('../identity') 6 | 7 | type BootstrapOpts = { 8 | identityPath: string, 9 | remotePeer?: string, 10 | dir?: string 11 | } 12 | 13 | type BootstrapResult = { 14 | node: MediachainNode, 15 | remote: ?RemoteNode 16 | } 17 | 18 | function bootstrap (opts: BootstrapOpts): Promise { 19 | const {identityPath, remotePeer, dir} = opts 20 | 21 | return Identity.loadOrGenerateIdentity(identityPath) 22 | .then(peerId => new MediachainNode({peerId})) 23 | .then(node => { 24 | let remote = null 25 | if (remotePeer != null) { 26 | const remotePeerInfo = Identity.inflateMultiaddr(remotePeer) 27 | remote = new RemoteNode(node, remotePeerInfo) 28 | } 29 | 30 | if (dir != null) { 31 | node.setDirectory(dir) 32 | } 33 | 34 | return { 35 | node, 36 | remote 37 | } 38 | }) 39 | } 40 | 41 | function binaryToB64 (result: Object): Object { 42 | const replacer = obj => { 43 | if (obj instanceof Buffer) { 44 | return obj.toString('base64') 45 | } 46 | } 47 | 48 | return (_.cloneDeepWith(result, replacer): any) 49 | } 50 | 51 | module.exports = { 52 | bootstrap, 53 | binaryToB64 54 | } 55 | -------------------------------------------------------------------------------- /src/protobuf/dir.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package proto; 3 | 4 | import "manifest.proto"; 5 | 6 | message PeerInfo { 7 | string id = 1; // peer id 8 | repeated bytes addr = 2; // peer multiaddrs 9 | } 10 | 11 | message PublisherInfo { 12 | string id = 1; // publisher id 13 | repeated string namespaces = 2; // namespaces in peer's store 14 | } 15 | 16 | // /mediachain/dir/register 17 | message RegisterPeer { 18 | PeerInfo info = 1; 19 | PublisherInfo publisher = 2; // optional (v1.4) 20 | repeated Manifest manifest = 3; // optional (v1.5) 21 | } 22 | 23 | // /mediachain/dir/lookup 24 | message LookupPeerRequest { 25 | string id = 1; 26 | } 27 | 28 | message LookupPeerResponse { 29 | PeerInfo peer = 1; // absent if peer not found 30 | } 31 | 32 | // /mediachain/dir/list 33 | message ListPeersRequest { 34 | string namespace = 1; // optional (v1.4); peers in namespace 35 | } 36 | 37 | message ListPeersResponse { 38 | repeated string peers = 1; 39 | } 40 | 41 | // /mediachain/dir/listns 42 | message ListNamespacesRequest {} 43 | 44 | message ListNamespacesResponse { 45 | repeated string namespaces = 1; 46 | } 47 | 48 | // /mediachain/dir/listmf 49 | message ListManifestRequest { 50 | string entity = 1; 51 | } 52 | 53 | message ListManifestResponse { 54 | repeated Manifest manifest = 1; 55 | } 56 | 57 | -------------------------------------------------------------------------------- /src/protobuf/manifest.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package proto; 3 | 4 | message Manifest { 5 | string entity = 1; 6 | string keyId = 2; 7 | ManifestBody body = 3; 8 | int64 timestamp = 4; 9 | bytes signature = 5; 10 | } 11 | 12 | message ManifestBody { 13 | oneof body { 14 | NodeManifest node = 1; 15 | } 16 | } 17 | 18 | message NodeManifest { 19 | string peer = 1; 20 | string publisher = 2; 21 | } 22 | -------------------------------------------------------------------------------- /src/protobuf/node.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package proto; 3 | 4 | import "stmt.proto"; 5 | import "manifest.proto"; 6 | 7 | // end of result stream marker 8 | message StreamEnd {} 9 | 10 | // stream errors 11 | message StreamError { 12 | string error = 1; 13 | } 14 | 15 | // /mediachain/node/id 16 | message NodeInfoRequest {} 17 | 18 | message NodeInfo { 19 | string peer = 1; 20 | string publisher = 2; 21 | string info = 3; 22 | } 23 | 24 | // /mediachain/node/manifest 25 | message ManifestRequest {} 26 | 27 | message ManifestResponse { 28 | repeated Manifest manifest = 1; 29 | } 30 | 31 | // /mediachain/node/ping 32 | message Ping {} 33 | message Pong {} 34 | 35 | // /mediachain/node/query 36 | message QueryRequest { 37 | string query = 1; 38 | } 39 | 40 | message QueryResult { 41 | oneof result { 42 | QueryResultValue value = 1; 43 | StreamEnd end = 2; 44 | StreamError error = 3; 45 | } 46 | } 47 | 48 | message QueryResultValue { 49 | oneof value { 50 | SimpleValue simple = 1; 51 | CompoundValue compound = 2; 52 | } 53 | } 54 | 55 | message SimpleValue { 56 | oneof value { 57 | int64 intValue = 1; 58 | string stringValue = 2; 59 | Statement stmt = 3; 60 | StatementBody stmtBody = 4; 61 | } 62 | } 63 | 64 | message CompoundValue { 65 | repeated KeyValuePair body = 1; 66 | } 67 | 68 | message KeyValuePair { 69 | string key = 1; 70 | SimpleValue value = 2; 71 | } 72 | 73 | // /mediachain/node/data 74 | message DataRequest { 75 | repeated string keys = 1; 76 | } 77 | 78 | message DataResult { 79 | oneof result { 80 | DataObject data = 1; 81 | StreamEnd end = 2; 82 | StreamError error = 3; 83 | } 84 | } 85 | 86 | message DataObject { 87 | string key = 1; 88 | bytes data = 2; 89 | } 90 | 91 | // /mediachain/node/push 92 | message PushRequest { 93 | repeated string namespaces = 1; 94 | } 95 | 96 | message PushResponse { 97 | oneof body { 98 | PushAccept accept = 1; 99 | PushReject reject = 2; 100 | } 101 | } 102 | 103 | message PushAccept {} 104 | message PushReject { 105 | string error = 1; 106 | } 107 | 108 | message PushValue { 109 | oneof value { 110 | Statement stmt = 1; 111 | StreamEnd end = 2; 112 | } 113 | } 114 | 115 | message PushEnd { 116 | int64 statements = 1; 117 | int64 objects = 2; 118 | string error = 3; 119 | } 120 | -------------------------------------------------------------------------------- /src/protobuf/stmt.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package proto; 3 | 4 | message Statement { 5 | string id = 1; 6 | string publisher = 2; 7 | string namespace = 3; 8 | StatementBody body = 4; 9 | int64 timestamp = 5; 10 | bytes signature = 6; 11 | } 12 | 13 | message StatementBody { 14 | oneof body { 15 | SimpleStatement simple = 1; 16 | CompoundStatement compound = 2; 17 | EnvelopeStatement envelope = 3; 18 | ArchiveStatement archive = 4; 19 | } 20 | } 21 | 22 | message SimpleStatement { 23 | string object = 1; 24 | repeated string refs = 2; 25 | repeated string tags = 3; 26 | repeated string deps = 4; 27 | } 28 | 29 | message CompoundStatement { 30 | repeated SimpleStatement body = 1; 31 | } 32 | 33 | message EnvelopeStatement { 34 | repeated Statement body = 1; 35 | } 36 | 37 | message ArchiveStatement { 38 | 39 | } 40 | -------------------------------------------------------------------------------- /src/protobuf/types.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | // flow types for protobuf messages. resisting urge to write automatic flow code generator for these 3 | 4 | // dir.proto 5 | export type PeerInfoMsg = { 6 | id: string, 7 | addr: Array 8 | } 9 | 10 | export type PublisherInfoMsg = { 11 | id: string, 12 | namespaces: Array 13 | } 14 | 15 | export type RegisterPeerMsg = { 16 | info: PeerInfoMsg, 17 | publisher?: PublisherInfoMsg, 18 | manifest?: Array 19 | } 20 | 21 | export type LookupPeerRequestMsg = { 22 | id: string 23 | } 24 | 25 | export type LookupPeerResponseMsg = { 26 | peer: ?PeerInfoMsg 27 | } 28 | 29 | export type ListPeersRequestMsg = { 30 | namespace?: string 31 | } 32 | 33 | export type ListPeersResponseMsg = { 34 | peers: Array 35 | } 36 | 37 | export type ListManifestRequestMsg = { 38 | entity: string 39 | } 40 | 41 | export type ListManifestResponseMsg = { 42 | manifest: Array 43 | } 44 | 45 | // node.proto 46 | 47 | export type ManifestRequestMsg = {} 48 | 49 | export type ManifestResponseMsg = { 50 | manifest: Array 51 | } 52 | 53 | export type PingMsg = { } 54 | export type PongMsg = { } 55 | 56 | export type NodeInfoRequestMsg = { } 57 | 58 | export type NodeInfoMsg = { 59 | peer: string, 60 | publisher: string, 61 | info: string 62 | } 63 | 64 | export type DataRequestMsg = { 65 | keys: Array 66 | } 67 | 68 | export type DataResultMsg = { data: DataObjectMsg } | 69 | { end: StreamEndMsg } | 70 | { error: StreamErrorMsg } 71 | 72 | export type DataObjectMsg = { 73 | key: string, 74 | data: Buffer 75 | } 76 | 77 | export type PushRequestMsg = { 78 | namespaces: Array 79 | } 80 | 81 | export type PushAcceptMsg = {} 82 | export type PushRejectMsg = { 83 | error: string 84 | } 85 | 86 | export type PushResponseMsg = { accept: PushAcceptMsg } | {reject: PushRejectMsg } 87 | export type PushValueMsg = {stmt: StatementMsg} | {end: StreamEndMsg} 88 | export type PushEndMsg = { 89 | statements: number, 90 | objects: number, 91 | error: string 92 | } 93 | 94 | export type QueryRequestMsg = { 95 | query: string 96 | } 97 | 98 | export type QueryResultMsg = { value: QueryResultValueMsg } | 99 | { end: StreamEndMsg } | 100 | { error: StreamErrorMsg } 101 | 102 | export type QueryResultValueMsg = { simple: SimpleValueMsg } | { compound: CompoundValueMsg } 103 | 104 | export type SimpleValueMsg = { intValue: number } | 105 | { stringValue: string } | 106 | { stmt: StatementMsg } | 107 | { stmtBody: StatementBodyMsg } 108 | 109 | export type CompoundValueMsg = { 110 | body: Array 111 | } 112 | 113 | export type KeyValuePairMsg = { 114 | key: string, 115 | value: SimpleValueMsg 116 | } 117 | 118 | export type StreamErrorMsg = { 119 | error: string 120 | } 121 | 122 | export type StreamEndMsg = { 123 | 124 | } 125 | 126 | // stmt.proto 127 | 128 | export type SimpleStatementMsg = { 129 | object: string, 130 | refs?: Array, 131 | tags?: Array, 132 | deps?: Array 133 | }; 134 | 135 | export type CompoundStatementMsg = { 136 | body: Array 137 | }; 138 | 139 | export type EnvelopeStatementMsg = { 140 | body: Array 141 | } 142 | 143 | export type ArchiveStatementMsg = { 144 | 145 | } 146 | 147 | export type StatementBodyMsg = { simple: SimpleStatementMsg } | 148 | { compound: CompoundStatementMsg } | 149 | { envelope: EnvelopeStatementMsg } | 150 | { archive: ArchiveStatementMsg } 151 | 152 | export type StatementMsg = { 153 | id: string, 154 | publisher: string, 155 | namespace: string, 156 | body: StatementBodyMsg, 157 | timestamp: number, 158 | signature?: Buffer, 159 | }; 160 | 161 | // manifest.proto 162 | // message Manifest { 163 | // string entity = 1; 164 | // string keyId = 2; 165 | // ManifestBody body = 3; 166 | // int64 timestamp = 4; 167 | // bytes signature = 5; 168 | // } 169 | export type ManifestMsg = { 170 | entity: string, 171 | keyId: string, 172 | body: ManifestBodyMsg, 173 | timestamp: number, 174 | signature: Buffer 175 | } 176 | 177 | export type ManifestBodyMsg = {node: NodeManifestMsg} 178 | 179 | export type NodeManifestMsg = { 180 | peer: string, 181 | publisher: string 182 | } 183 | 184 | export type ProtoCodec = { encode: (obj: T) => Buffer, decode: (buf: Buffer) => T } 185 | -------------------------------------------------------------------------------- /test/common/util_test.js: -------------------------------------------------------------------------------- 1 | const {assert, expect} = require('chai') 2 | const { before, after, describe, it } = require('mocha') 3 | const util = require('../../src/common/util') 4 | const stdMocks = require('std-mocks') 5 | const { PassThrough } = require('stream') 6 | 7 | describe('Multihash helpers', () => { 8 | it('isB58Multihash returns true for valid multihash', () => { 9 | assert(util.isB58Multihash('QmNLftPEMzsadpbTsGaVP3haETYJb4GfnCgQiaFj5Red9G') === true) 10 | }) 11 | 12 | it('isB58Multihash returns false for non-multihash', () => { 13 | assert(util.isB58Multihash('QmF00123456789') === false) 14 | assert(util.isB58Multihash('foo') === false) 15 | }) 16 | }) 17 | 18 | describe('Stream functions', () => { 19 | before(() => { 20 | stdMocks.use() 21 | }) 22 | 23 | after(() => { 24 | stdMocks.restore() 25 | }) 26 | 27 | it('println and friends', () => { 28 | util.println('Hello world') 29 | util.printlnErr('Oh no!') 30 | const output = stdMocks.flush() 31 | assert.deepEqual(output.stdout, ['Hello world\n']) 32 | assert.deepEqual(output.stderr, ['Oh no!\n']) 33 | }) 34 | 35 | it('consumeStream', () => { 36 | const stream = new PassThrough() 37 | util.writeln('Line 1', stream) 38 | util.writeln('Line 2', stream) 39 | stream.end() 40 | return util.consumeStream(stream) 41 | .then(contents => { 42 | assert.equal(contents, 'Line 1\nLine 2\n') 43 | }) 44 | }) 45 | 46 | it('consumeStream error', () => { 47 | const stream = new PassThrough() 48 | // need to call consumeStream before emitting the error, or it won't be caught 49 | const promise = util.consumeStream(stream) 50 | .catch(err => { 51 | assert.equal(err.message, 'Something went wrong') 52 | }) 53 | util.writeln('Hello', stream) 54 | stream.emit('error', new Error('Something went wrong')) 55 | return promise 56 | }) 57 | }) 58 | 59 | describe('Misc utils', () => { 60 | it('setEquals', () => { 61 | const {setEquals} = util 62 | expect(setEquals(new Set(['a', 'b', 'c']), new Set(['b', 'c', 'a']))) 63 | .to.be.true 64 | 65 | expect(setEquals(new Set(['a', 'b', 'c']), new Set(['a', 'b']))) 66 | .to.be.false 67 | 68 | expect(setEquals(new Set(['a', 'b', 'c']), new Set(['a', 'b', 'z']))) 69 | .to.be.false 70 | }) 71 | 72 | it('stringifyNestedBuffers', () => { 73 | const obj = { 74 | foo: { 75 | bar: Buffer.from('Hello World') 76 | } 77 | } 78 | 79 | const expected = { 80 | foo: { 81 | bar: Buffer.from('Hello World').toString('base64') 82 | } 83 | } 84 | 85 | expect(util.stringifyNestedBuffers(obj)).to.deep.eql(expected) 86 | }) 87 | }) 88 | -------------------------------------------------------------------------------- /test/config.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const testConfig = { 4 | secureIOEnabled: false 5 | } 6 | 7 | if (process.env.ALEPH_TEST_USE_SECIO !== undefined) { 8 | testConfig.secureIOEnabled = true 9 | } 10 | 11 | module.exports = testConfig 12 | -------------------------------------------------------------------------------- /test/metadata/signature_test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | // eslint-env mocha 3 | 4 | const assert = require('assert') 5 | const { before, describe, it } = require('mocha') 6 | const path = require('path') 7 | 8 | const { Statement, SignedStatement } = require('../../src/model/statement') 9 | const { PublisherId, PrivateSigningKey } = require('../../src/peer/identity') 10 | 11 | const CONCAT_PUBLISHER_ID_PUB58 = '4XTTM4JKrrBeAK6qXmo8FoKmT5RkfjeXfZrnWjJNw9fKvPnEs' 12 | const CONCAT_PUBLISHER_ID_PATH = path.join(__dirname, '..', 'resources', 'publisher_ids', 'concat', 13 | `${CONCAT_PUBLISHER_ID_PUB58}.privateKey`) 14 | const CONCAT_MESSAGE_FIXTURES = require('./../resources/fixtures/concat-message-signature') 15 | 16 | describe('Signature verification', () => { 17 | let publisherId 18 | 19 | before(() => 20 | PublisherId.generate() 21 | .then(_pubId => { publisherId = _pubId }) 22 | ) 23 | 24 | it('signs and validates a buffer', () => { 25 | const msg = Buffer.from(`You can get anything you want, at Alice's Restaurant`) 26 | return publisherId.sign(msg) 27 | .then(sig => publisherId.verify(msg, sig)) 28 | .then(result => { 29 | assert(result === true, 'signature did not validate') 30 | }) 31 | }) 32 | 33 | it('does not validate a modified buffer', () => { 34 | const msg = Buffer.from(`Launch code: 0000`) 35 | return publisherId.sign(msg) 36 | .then(sig => publisherId.verify(Buffer.from('Launch code: 0001'), sig)) 37 | .then(result => { 38 | assert(result === false, 'signature validated an invalid message') 39 | }) 40 | }) 41 | 42 | it('validates a statement made with makeSimpleStatement helper', () => { 43 | return SignedStatement.createSimple(publisherId, 'scratch.sig-test', {object: 'QmF00123', refs: []}) 44 | .then(stmt => stmt.verifySignature) 45 | .then(valid => { 46 | assert(valid, 'statement did not validate') 47 | }) 48 | }) 49 | 50 | it('signs and validates a manually-constructed statement', () => { 51 | const unsigned = Statement.fromProtobuf({ 52 | id: 'foo', 53 | publisher: publisherId.id58, 54 | namespace: 'scratch.sig-test', 55 | timestamp: Date.now(), 56 | body: {simple: {object: 'QmF00123', refs: [], deps: [], tags: []}} 57 | }).asUnsignedStatement() 58 | return unsigned.sign(publisherId) 59 | .then(signed => signed.verifySignature()) 60 | .then(valid => { 61 | assert(valid, 'statement did not validate') 62 | }) 63 | }) 64 | 65 | it('does not validate an altered statement', () => { 66 | SignedStatement.createSimple(publisherId, 'scratch.sig-test', {object: 'QmF00123', refs: []}) 67 | .then(stmt => { 68 | stmt.namespace = 'scratch.new-namespace' 69 | return stmt 70 | }) 71 | .then(altered => altered.verifySignature()) 72 | .then(valid => { 73 | assert(!valid, 'incorrectly validated an altered statement') 74 | }) 75 | }) 76 | 77 | it('loads a publisher id from a file generated by concat', () => { 78 | return PublisherId.load(CONCAT_PUBLISHER_ID_PATH) 79 | .then(id => { 80 | assert.notEqual(id, null, 'publisher id did not load') 81 | assert.equal(CONCAT_PUBLISHER_ID_PUB58, id.privateKey.publicKey.toB58String(), 82 | 'base58-encoded public key does not match fixture') 83 | }) 84 | }) 85 | 86 | it('generates the same signature as concat using the same publisher id', () => { 87 | let privateKey 88 | return PrivateSigningKey.fromB58String(CONCAT_MESSAGE_FIXTURES.privateKeyB58) 89 | .then(key => { 90 | privateKey = key 91 | assert.equal(privateKey.publicKey.toB58String(), CONCAT_MESSAGE_FIXTURES.publisherIdB58, 92 | 'base58-encoded public key does not match fixture') 93 | 94 | return privateKey.sign(CONCAT_MESSAGE_FIXTURES.message) 95 | }) 96 | .then(sig => { 97 | assert.deepEqual(sig, CONCAT_MESSAGE_FIXTURES.signature, 98 | 'signatures did not match') 99 | }) 100 | }) 101 | }) 102 | -------------------------------------------------------------------------------- /test/model/query_result_test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const { assert, expect } = require('chai') 4 | const { describe, it } = require('mocha') 5 | const { 6 | unpackQueryResultProtobuf, 7 | unpackQueryResultValueProtobuf, 8 | CompoundQueryResultValue 9 | } = require('../../src/model/query_result') 10 | 11 | const { Statement, StatementBody } = require('../../src/model/statement') 12 | 13 | const statementFixtures = require('../resources/fixtures/test-statements') 14 | 15 | describe('Query Result unpacking', () => { 16 | it('Unpacks simple values', () => { 17 | expect(unpackQueryResultValueProtobuf({simple: {stringValue: 'hello'}})) 18 | .to.be.eql('hello') 19 | 20 | expect(unpackQueryResultValueProtobuf({simple: {intValue: 42}})) 21 | .to.be.eql(42) 22 | 23 | const stmt = statementFixtures.statements.simple[0] 24 | const unpackedStmt = unpackQueryResultValueProtobuf({simple: {stmt}}) 25 | if (!(unpackedStmt instanceof Statement)) { 26 | assert(false, 'statement query result should unpack to Statement object') 27 | return 28 | } 29 | expect(unpackedStmt.toProtobuf()).to.deep.eql(stmt) 30 | 31 | const stmtBody = stmt.body 32 | const unpackedBody = unpackQueryResultValueProtobuf({simple: {stmtBody}}) 33 | if (!(unpackedBody instanceof StatementBody)) { 34 | assert(false, 'statement body query result should unpack to StatementBody object') 35 | } 36 | 37 | let invalid: any = {simple: {fooValue: 42}} 38 | expect(() => unpackQueryResultValueProtobuf(invalid)) 39 | .to.throw('Unexpected query result value') 40 | 41 | invalid = {amazing: {intValue: 42}} 42 | expect(() => unpackQueryResultValueProtobuf(invalid)) 43 | .to.throw('Unexpected query result value') 44 | }) 45 | 46 | it('Unpacks compound values and query errors', () => { 47 | const err: any = unpackQueryResultProtobuf({error: {error: 'Oh no!'}}) 48 | expect(err).to.be.an.instanceof(Error) 49 | expect(err.message).to.be.eql('Oh no!') 50 | 51 | const errNoMessage: any = unpackQueryResultProtobuf(({error: {}}: any)) 52 | expect(errNoMessage).to.be.an.instanceof(Error) 53 | expect(errNoMessage.message).to.be.eql('Unknown error') 54 | }) 55 | 56 | const stmt = statementFixtures.statements.simple[0] 57 | const compoundResult = { 58 | value: { 59 | compound: { 60 | body: [ 61 | {key: 'foo', value: {stringValue: 'bar'}}, 62 | {key: 'The Answer', value: {intValue: 42}}, 63 | {key: 'statement', value: {stmt}} 64 | ] 65 | } 66 | } 67 | } 68 | 69 | const unpackedCompound = unpackQueryResultProtobuf(compoundResult) 70 | if (!(unpackedCompound instanceof CompoundQueryResultValue)) { 71 | assert(false, 'compound query results should unpack to CompoundQueryResultValue objects') 72 | return 73 | } 74 | expect(unpackedCompound.statements()).to.have.length(1) 75 | expect(unpackedCompound.keys()).to.deep.eql(['foo', 'The Answer', 'statement']) 76 | expect(unpackedCompound.values()).to.include('bar', 42) 77 | 78 | const invalid: any = {foo: 'bar'} 79 | expect(() => unpackQueryResultProtobuf(invalid)) 80 | .to.throw('Unexpected query result') 81 | }) 82 | -------------------------------------------------------------------------------- /test/peer/datastore_test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const assert = require('assert') 4 | const { before, describe, it } = require('mocha') 5 | const { makeNode } = require('../util') 6 | const { zip } = require('lodash') 7 | const { encode } = require('../../src/metadata/serialize') 8 | 9 | const SEED_OBJECTS = [ 10 | {'foo': 'bar'}, 11 | {'hello': 'world'} 12 | ] 13 | 14 | describe('Datastore', function () { 15 | let p1, p2 16 | 17 | let expectedMessages = [] 18 | let expectedKeys = [] 19 | before(() => makeNode().then(_p1 => { p1 = _p1 }) 20 | .then(() => makeNode().then(_p2 => { p2 = _p2 })) 21 | .then(() => p1.putData(...SEED_OBJECTS)) 22 | .then(keys => { 23 | expectedKeys = keys 24 | const kvs = zip(keys, SEED_OBJECTS.map(encode)) 25 | expectedMessages = kvs.map(([key, data]) => ({ 26 | key, 27 | data 28 | })) 29 | })) 30 | 31 | it(`fetches data from another node's datastore`, () => { 32 | return Promise.all([p1.start(), p2.start()]) // start both peers 33 | .then(() => p2.remoteData(p1.peerInfo, expectedKeys)) 34 | .then(results => { 35 | assert.equal(results.length, expectedMessages.length) 36 | for (let i = 0; i < results.length; i++) { 37 | const res = results[i] 38 | const expected = expectedMessages[i] 39 | assert.equal(res.key, expected.key) 40 | assert(res.data.equals(expected.data)) 41 | } 42 | }) 43 | }) 44 | 45 | it('returns an error for non-existent keys', () => { 46 | return Promise.all([p1.start(), p2.start()]) 47 | .then(() => p2.remoteData(p1.peerInfo, ['QmNLftPEMzsadpbTsGaVP3haETYJb4GfnCgQiaFj5Red9G'])) 48 | .catch(err => { 49 | assert(typeof err.message === 'string') 50 | }) 51 | }) 52 | }) 53 | -------------------------------------------------------------------------------- /test/peer/directory_test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | const chai = require('chai') 3 | chai.use(require('chai-as-promised')) 4 | const { assert, expect } = chai 5 | const { describe, it, before, afterEach } = require('mocha') 6 | const eventually = require('mocha-eventually') 7 | const PeerInfo = require('peer-info') 8 | 9 | const { makeNode, makeDirectory } = require('../util') 10 | 11 | describe('Directory Node', function () { 12 | let dir, node, nodeIdB58 13 | 14 | before(() => makeDirectory().then(_dir => { dir = _dir }) 15 | .then(() => makeNode()) 16 | .then(_node => { 17 | node = _node 18 | node.setDirectory(dir.peerInfo) 19 | nodeIdB58 = node.peerInfo.id.toB58String() 20 | }) 21 | .then(() => Promise.all([node.start(), dir.start()])) 22 | ) 23 | 24 | afterEach(() => { 25 | dir.peerBook.removeByB58String(nodeIdB58) 26 | }) 27 | 28 | it('adds a node to its registry in response to a register message', function () { 29 | // verify that the peer is not registered before the call 30 | expect(dir.getPeerInfo(nodeIdB58)).to.be.null 31 | 32 | return node.register() 33 | .then(() => eventually((done) => { 34 | const result = dir.getPeerInfo(nodeIdB58) 35 | expect(result).to.not.be.null 36 | expect(result).to.be.an.instanceof(PeerInfo) 37 | expect(result.id.toB58String()).to.be.eql(nodeIdB58) 38 | done() 39 | })) 40 | }) 41 | 42 | it('node can set directory with string multiaddr', () => { 43 | const dirInfo = dir.p2p.peerInfo 44 | const dirId = dirInfo.id 45 | const addrString = dirInfo.multiaddrs[0].toString() + '/p2p/' + dirId.toB58String() 46 | expect(() => 47 | node.setDirectory(addrString) 48 | ).to.not.throw() 49 | 50 | node.setDirectory(dirInfo) 51 | }) 52 | 53 | it('responds to lookup requests for known peers', function () { 54 | // just stuff the node's id into the directory manually 55 | dir.peerBook.put(node.peerInfo) 56 | 57 | return node.lookup(nodeIdB58) 58 | .then(peerInfo => { 59 | assert(peerInfo != null) 60 | assert.equal(peerInfo.id.toB58String(), nodeIdB58) 61 | }) 62 | }) 63 | 64 | it('node throws during lookup & register if no directory is set', () => 65 | Promise.resolve() 66 | .then(() => { node.directory = null }) 67 | .then(() => 68 | Promise.all([ 69 | expect(node.lookup(nodeIdB58)) 70 | .to.eventually.be.rejectedWith('No known directory server'), 71 | expect(node.register()) 72 | .to.eventually.be.rejectedWith('No known directory server') 73 | ])) 74 | .then(() => { node.setDirectory(dir.peerInfo) }) 75 | ) 76 | 77 | it('node throws if asked to lookup an invalid string or other bogus input', () => 78 | Promise.all([ 79 | expect(node.lookup('foo')) 80 | .to.eventually.be.rejectedWith('not a valid multihash'), 81 | expect(node.lookup(42)) 82 | .to.eventually.be.rejectedWith('invalid input') 83 | ]) 84 | ) 85 | 86 | it('can lookup by string or PeerId', () => { 87 | dir.peerBook.put(node.peerInfo) 88 | return expect(node.lookup(node.peerInfo.id)) 89 | .to.eventually.be.an.instanceof(PeerInfo) 90 | }) 91 | 92 | it('internal _resolvePeer method accepts PeerInfo, multiaddr string', () => { 93 | dir.peerBook.put(node.peerInfo) 94 | 95 | return Promise.all([ 96 | expect(node._resolvePeer(node.peerInfo)) 97 | .to.eventually.eql(node.peerInfo), 98 | 99 | expect(node._resolvePeer(nodeIdB58)) 100 | .to.eventually.be.an.instanceof(PeerInfo), 101 | 102 | expect(node._resolvePeer('/ip4/127.0.0.1/tcp/1234/p2p/QmZvvcVA8t5qrM5DeQ8xM6PK18qzCYxseYNtaqauhSc4Na')) 103 | .to.eventually.be.an.instanceof(PeerInfo), 104 | 105 | expect(node._resolvePeer('/ip4/not-a-real-multiaddr')) 106 | .to.eventually.be.rejectedWith('not a valid multiaddr'), 107 | 108 | expect(node._resolvePeer('QmZvvcVA8t5qrM5DeQ8xM6PK18qzCYxseYNtaqauhSc4Na')) 109 | .to.eventually.be.rejectedWith('Unable to locate peer') 110 | ]) 111 | } 112 | ) 113 | }) 114 | -------------------------------------------------------------------------------- /test/peer/identity_test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const chai = require('chai') 4 | chai.use(require('chai-as-promised')) 5 | 6 | const { expect } = chai 7 | const { describe, it, before } = require('mocha') 8 | const temp = require('temp').track() 9 | 10 | const Id = require('../../src/peer/identity') 11 | const PeerId = require('peer-id') 12 | const PeerInfo = require('peer-info') 13 | 14 | describe('Peer Identity tools', () => { 15 | let peerId 16 | before(() => Id.generateIdentity() 17 | .then(id => { peerId = id }) 18 | .then(() => expect(peerId).to.be.an.instanceof(PeerId)) 19 | ) 20 | 21 | it('loads and saves a PeerId to disk', () => { 22 | const tmpPath = temp.path() 23 | Id.saveIdentity(peerId, tmpPath) 24 | return expect( 25 | Id.loadIdentity(tmpPath).then(loadedId => loadedId.toB58String()) 26 | ).to.eventually.be.eql(peerId.toB58String()) 27 | }) 28 | 29 | it('throws when saving a PeerId without a private key', () => { 30 | const id = PeerId.createFromB58String('QmZvvcVA8t5qrM5DeQ8xM6PK18qzCYxseYNtaqauhSc4Nw') 31 | expect(() => Id.saveIdentity(id, temp.path())) 32 | .to.throw(Error) 33 | }) 34 | 35 | it('loadOrGenerateIdentity', () => Promise.all([ 36 | expect(Id.loadOrGenerateIdentity(temp.path())) 37 | .to.eventually.be.an.instanceof(PeerId), 38 | 39 | expect(Id.loadOrGenerateIdentity('/this-dir-probably-doesnt-exist/foo.id')) 40 | .to.eventually.be.rejectedWith('the containing directory does not exist') 41 | ])) 42 | 43 | it('inflateMultiaddr', () => { 44 | expect(Id.inflateMultiaddr('/ip4/127.0.0.1/tcp/1234/p2p/QmZvvcVA8t5qrM5DeQ8xM6PK18qzCYxseYNtaqauhSc4Nw')) 45 | .to.be.an.instanceof(PeerInfo) 46 | 47 | expect(() => { Id.inflateMultiaddr('/ip4/127.0.0.1/tcp/1234') }) 48 | .to.throw('must contain /p2p/ or /ipfs/ protocol') 49 | }) 50 | }) 51 | -------------------------------------------------------------------------------- /test/peer/libp2p_node_test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const chai = require('chai') 4 | chai.use(require('chai-as-promised')) 5 | const { expect } = chai 6 | const { before, describe, it } = require('mocha') 7 | const { getTestNodeId } = require('../util') 8 | const PeerInfo = require('peer-info') 9 | const Multiaddr = require('multiaddr') 10 | const P2PNode = require('../../src/peer/libp2p_node') 11 | 12 | describe('LibP2P Node base class', () => { 13 | let id1, id2 14 | 15 | before(() => Promise.all([ 16 | getTestNodeId().then(_id1 => { id1 = _id1 }), 17 | getTestNodeId().then(_id2 => { id2 = _id2 }) 18 | ] 19 | )) 20 | 21 | it('works with websockets', () => { 22 | const info1 = new PeerInfo(id1) 23 | const info2 = new PeerInfo(id2) 24 | info1.multiaddr.add(Multiaddr('/ip4/127.0.0.1/tcp/9090/ws')) 25 | info2.multiaddr.add(Multiaddr('/ip4/127.0.0.1/tcp/9091/ws')) 26 | 27 | const node1 = new P2PNode({peerInfo: info1}) 28 | const node2 = new P2PNode({peerInfo: info2}) 29 | return Promise.all([node1.start(), node2.start()]) 30 | .then(() => node1.ping(node2.peerInfo)) 31 | .then(result => { 32 | expect(result).to.exist 33 | }) 34 | .then(() => Promise.all([node1.stop(), node2.stop()])) 35 | }) 36 | 37 | // note: this test fails if you use raw TCP transport unless you set 38 | // a high timeout (more than ~3 seconds), but succeeds 39 | // if you use websockets. The problem seems to be in libp2p-swarm's 40 | // `.close()` method, which takes a while to shut down if you've opened 41 | // any TCP connections, even if you've since hung up to the peers you 42 | // dialed. 43 | it('fails if you try to dial/hangup when not online', () => { 44 | const info1 = new PeerInfo(id1) 45 | const info2 = new PeerInfo(id2) 46 | info1.multiaddr.add(Multiaddr('/ip4/127.0.0.1/tcp/9090/ws')) 47 | info2.multiaddr.add(Multiaddr('/ip4/127.0.0.1/tcp/9091/ws')) 48 | 49 | const node1 = new P2PNode({peerInfo: info1}) 50 | const node2 = new P2PNode({peerInfo: info2}) 51 | node2.handle('foo-protocol', (_proto, conn) => { }) 52 | 53 | return Promise.all([ 54 | expect(node1.dialByPeerInfo(info2, 'foo-protocol')) 55 | .to.eventually.be.rejectedWith('The libp2p node is not started yet'), 56 | expect(node1.hangUpByPeerInfo(info2)) 57 | .to.eventually.be.rejectedWith('The libp2p node is not started yet'), 58 | expect( 59 | Promise.all([node1.start(), node2.start()]) 60 | .then(() => node1.dialByPeerInfo(info2, 'foo-protocol')) 61 | .then(() => node1.hangUpByPeerInfo(info2)) 62 | .then(() => Promise.all([node1.stop(), node2.stop()])) 63 | ).to.eventually.be.fulfilled 64 | ]) 65 | }) 66 | 67 | it('start/stop are idempotent', () => { 68 | const info = new PeerInfo(id1) 69 | info.multiaddr.add(Multiaddr('/ip4/127.0.0.1/tcp/9094')) 70 | 71 | const node = new P2PNode({peerInfo: info}) 72 | expect(node.isOnline).to.eql(false) 73 | return node.start() 74 | .then(() => { 75 | expect(node.isOnline).to.eql(true) 76 | return node.start() 77 | }) 78 | .then(() => node.stop()) 79 | .then(() => { 80 | expect(node.isOnline).to.eql(false) 81 | node.stop() 82 | }) 83 | }) 84 | 85 | it('aborts long-lived listeners on stop', () => { 86 | const info = new PeerInfo(id1) 87 | info.multiaddr.add(Multiaddr('/ip4/127.0.0.1/tcp/9095')) 88 | 89 | const node = new P2PNode({peerInfo: info}) 90 | const abortable = node.newAbortable() 91 | let aborted = false 92 | abortable((end, cb) => { 93 | aborted = end 94 | cb(end) 95 | }) 96 | expect(node.abortables.size).to.be.eql(1) 97 | 98 | return node.start() 99 | .then(() => node.stop()) 100 | .then(() => { 101 | expect(node.abortables.size).to.be.eql(0) 102 | expect(aborted).to.be.eql(true) 103 | }) 104 | }) 105 | }) 106 | -------------------------------------------------------------------------------- /test/peer/merge_test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const assert = require('assert') 4 | const { before, describe, it } = require('mocha') 5 | 6 | const uuid = require('uuid') 7 | 8 | const { makeNode, mockQueryHandler } = require('../util') 9 | const { PROTOCOLS } = require('../../src/peer/constants') 10 | const { b58MultihashForBuffer } = require('../../src/common/util') 11 | const { Statement, SignedStatement } = require('../../src/model/statement') 12 | const serialize = require('../../src/metadata/serialize') 13 | const { PublisherId } = require('../../src/peer/identity') 14 | 15 | import type { QueryResultMsg } from '../../src/protobuf/types' 16 | 17 | const TEST_NAMESPACE = 'scratch.merge-test' 18 | 19 | const SEED_OBJECT_BUFFERS = [ 20 | {id: uuid.v4(), foo: 'bar'}, 21 | {id: uuid.v4(), test: 'yep'} 22 | ].map(obj => serialize.encode(obj)) 23 | 24 | function makeSeedStatements (publisherId: PublisherId, seedObjectBuffers: Array): Promise> { 25 | return Promise.all( 26 | seedObjectBuffers.map((buf, idx) => { 27 | const object = b58MultihashForBuffer(buf) 28 | return SignedStatement.createSimple(publisherId, TEST_NAMESPACE, {object, refs: [`merge-test:${idx.toString()}`]}, idx) 29 | }) 30 | ) 31 | } 32 | 33 | function mockQueryResults (statements: Array): Array { 34 | const results: Array = statements.map(stmt => { 35 | return { value: { simple: { stmt: stmt.toProtobuf() } } } 36 | }) 37 | 38 | return [...results, {end: {}}] 39 | } 40 | 41 | describe('Merge', () => { 42 | let alephNode 43 | let mockSource 44 | let publisherId 45 | let seedStatements 46 | 47 | before(() => 48 | makeNode() 49 | .then(node => { alephNode = node }) 50 | .then(() => PublisherId.generate()) 51 | .then(pubId => { publisherId = pubId }) 52 | .then(() => makeSeedStatements(publisherId, SEED_OBJECT_BUFFERS)) 53 | .then(statements => { seedStatements = statements }) 54 | .then(() => makeNode()) 55 | .then(mockNode => { 56 | mockSource = mockNode 57 | mockSource.p2p.unhandle(PROTOCOLS.node.query) 58 | mockSource.p2p.handle(PROTOCOLS.node.query, mockQueryHandler(mockQueryResults(seedStatements))) 59 | return mockNode.putData(...SEED_OBJECT_BUFFERS) 60 | }) 61 | ) 62 | 63 | it('adds statements and objects from a remote source', () => 64 | alephNode.start() 65 | .then(() => mockSource.start()) 66 | .then(() => alephNode.merge(mockSource.peerInfo, `SELECT * FROM ${TEST_NAMESPACE}`)) 67 | .then(result => { 68 | assert.notEqual(result, null, 'merge did not return a result') 69 | assert.equal(result.statementCount, seedStatements.length, 'merged an unexpected number of statements') 70 | assert.equal(result.objectCount, SEED_OBJECT_BUFFERS.length, 'merged an unexpected number of objects') 71 | }) 72 | .catch(err => { 73 | console.error('error during merge test: ', err) 74 | throw err 75 | }) 76 | ) 77 | }) 78 | -------------------------------------------------------------------------------- /test/peer/node_info_test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | const assert = require('assert') 4 | const { before, describe, it } = require('mocha') 5 | const { makeNode } = require('../util') 6 | const { PublisherId } = require('../../src/peer/identity') 7 | 8 | describe('Node Info', function () { 9 | const infoMessage = 'tests are great!' 10 | 11 | let p1, p2 12 | before(() => PublisherId.generate() 13 | .then(publisherId => Promise.all([ 14 | makeNode({listenAddresses: ['/ip4/127.0.0.1/tcp/9090/ws']}).then(_p1 => { p1 = _p1 }), 15 | makeNode({publisherId, listenAddresses: ['/ip4/127.0.0.1/tcp/9091/ws']}).then(_p2 => { p2 = _p2; p2.setInfoMessage(infoMessage) }) 16 | ])) 17 | ) 18 | 19 | it('retrieves the ids and info message from another node', () => { 20 | return Promise.all([p1.start(), p2.start()]) // start both peers 21 | .then(() => p1.remoteNodeInfo(p2.peerInfo)) 22 | .then(result => { 23 | assert.equal(result.peer, p2.peerInfo.id.toB58String(), 24 | 'node info response should include correct peer id') 25 | assert.equal(result.info, infoMessage, 26 | 'node info response should include correct info message') 27 | assert.equal(result.publisher, p2.publisherId.id58, 28 | 'if remote node has a publisher id, it should be included') 29 | }) 30 | .then(() => Promise.all([p1.stop(), p2.stop()])) 31 | }) 32 | }) 33 | -------------------------------------------------------------------------------- /test/peer/ping_test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | const assert = require('assert') 4 | const { before, describe, it } = require('mocha') 5 | const { getTestNodeId, makeNode } = require('../util') 6 | const PeerInfo = require('peer-info') 7 | const Multiaddr = require('multiaddr') 8 | const Ping = require('libp2p-ping') 9 | 10 | describe('Ping', function () { 11 | let p1, p2, p3, invalidPeer 12 | 13 | before(() => Promise.all([ 14 | makeNode().then(_p1 => { p1 = _p1 }), 15 | makeNode().then(_p2 => { p2 = _p2 }), 16 | makeNode().then(_p3 => { p3 = _p3 }), 17 | getTestNodeId().then(id => { 18 | invalidPeer = PeerInfo(id) 19 | invalidPeer.multiaddr.add(Multiaddr('/ip4/1.2.3.4/tcp/4321')) 20 | }) 21 | ])) 22 | 23 | it('pings another node directly by PeerInfo', () => { 24 | return Promise.all([p1.start(), p2.start()]) // start both peers 25 | .then(() => p1.ping(p2.peerInfo)) 26 | .then(result => assert(result != null)) 27 | }) 28 | 29 | it('uses the libp2p-ping protocol (if possible)', () => { 30 | return Promise.all([p1.start(), p2.start()]) // start both peers 31 | .then(() => p1.p2p.ping(p2.peerInfo)) 32 | .then(result => assert.equal(typeof result, 'number', 'libp2p-ping should return latency in ms')) 33 | }) 34 | 35 | it('falls back to mediachain ping if libp2p-ping fails', () => { 36 | Ping.unmount(p3.p2p.swarm) 37 | return Promise.all([p1.start(), p3.start()]) 38 | .then(() => p2.ping(p3.peerInfo)) 39 | .then(result => { 40 | assert(result != null) 41 | }) 42 | }) 43 | 44 | it('fails to ping a non-existent node', () => { 45 | p1.p2p.dialTimeout = 20 46 | return p1.start() 47 | .then(() => p1.ping(invalidPeer)) 48 | .catch(err => { 49 | assert(err != null) 50 | }) 51 | }) 52 | }) 53 | -------------------------------------------------------------------------------- /test/peer/push_test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const chai = require('chai') 4 | chai.use(require('chai-as-promised')) 5 | 6 | const { expect } = chai 7 | const { before, describe, it } = require('mocha') 8 | 9 | const uuid = require('uuid') 10 | 11 | const { makeNode, mockPushHandler } = require('../util') 12 | const { PROTOCOLS } = require('../../src/peer/constants') 13 | const { b58MultihashForBuffer } = require('../../src/common/util') 14 | const { Statement, SignedStatement } = require('../../src/model/statement') 15 | const serialize = require('../../src/metadata/serialize') 16 | const { PublisherId } = require('../../src/peer/identity') 17 | 18 | const TEST_NAMESPACE = 'scratch.push-test' 19 | const SEED_OBJECT_BUFFERS = [ 20 | {id: uuid.v4(), foo: 'bar'}, 21 | {id: uuid.v4(), test: 'yep'} 22 | ].map(obj => serialize.encode(obj)) 23 | 24 | function makeSeedStatements (publisherId: PublisherId, seedObjectBuffers: Array): Promise> { 25 | return Promise.all( 26 | seedObjectBuffers.map((buf, idx) => { 27 | const object = b58MultihashForBuffer(buf) 28 | return SignedStatement.createSimple(publisherId, TEST_NAMESPACE, {object, refs: [`merge-test:${idx.toString()}`]}, idx) 29 | }) 30 | ) 31 | } 32 | 33 | describe('Push', () => { 34 | let alephNode 35 | let mockDestination 36 | let publisherId 37 | let seedStatements 38 | 39 | before(() => 40 | makeNode() 41 | .then(node => { alephNode = node }) 42 | .then(() => makeNode()) 43 | .then(dest => { mockDestination = dest }) 44 | .then(() => PublisherId.generate()) 45 | .then(pubId => { publisherId = pubId }) 46 | .then(() => makeSeedStatements(publisherId, SEED_OBJECT_BUFFERS)) 47 | .then(statements => { seedStatements = statements }) 48 | ) 49 | 50 | it('handles rejection', () => 51 | alephNode.start() 52 | .then(() => { 53 | mockDestination.p2p.handle(PROTOCOLS.node.push, mockPushHandler({reject: {error: 'not authorized'}}, 0)) 54 | return mockDestination.start() 55 | }) 56 | .then(() => 57 | expect(alephNode.pushStatements(mockDestination.peerInfo, seedStatements)) 58 | .to.eventually.be.rejectedWith('not authorized') 59 | ) 60 | ) 61 | 62 | it('sends statements if authorized', () => { 63 | const result = { 64 | objects: seedStatements.length, 65 | statements: seedStatements.length, 66 | error: '' 67 | } 68 | 69 | return alephNode.start() 70 | .then(() => Promise.all(seedStatements.map(s => alephNode.db.put(s)))) 71 | .then(() => { 72 | mockDestination.p2p.handle(PROTOCOLS.node.push, mockPushHandler({accept: {}}, seedStatements.length, result)) 73 | return mockDestination.start() 74 | }) 75 | .then(() => 76 | expect(alephNode.pushStatementsById(mockDestination.peerInfo, seedStatements.map(s => s.id))) 77 | .to.eventually.be.deep.eql(result) 78 | ) 79 | }) 80 | }) 81 | -------------------------------------------------------------------------------- /test/peer/remote_query_test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const assert = require('assert') 4 | const { before, describe, it } = require('mocha') 5 | 6 | const { PROTOCOLS } = require('../../src/peer/constants') 7 | const pull = require('pull-stream') 8 | const { PublisherId } = require('../../src/peer/identity') 9 | const { makeNode, mockQueryHandler } = require('../util') 10 | const { unpackQueryResultProtobuf } = require('../../src/model/query_result') 11 | 12 | import type Node from '../../src/peer/node' 13 | 14 | function startNodes (...nodes: Array): Promise<*> { 15 | return Promise.all(nodes.map(n => n.start())) 16 | } 17 | 18 | describe('Remote Query', () => { 19 | let local 20 | 21 | before(() => makeNode().then(peer => { local = peer })) 22 | 23 | it('decodes all query result types correctly', function () { 24 | this.timeout(3000) 25 | 26 | const responses = [ 27 | {value: {simple: {stringValue: 'hello world'}}}, 28 | {value: {simple: {intValue: 123}}}, 29 | {value: { 30 | compound: { 31 | body: [ 32 | {key: 'foo', value: {stringValue: 'bar'}}, 33 | {key: 'bar', value: {intValue: 1}}]}}}, 34 | {end: {}} 35 | ] 36 | 37 | // the stream doesn't deliver the "end" response, it just ends the stream 38 | const expected = responses.slice(0, responses.length - 1) 39 | .map(raw => unpackQueryResultProtobuf(raw)) 40 | 41 | let remote 42 | 43 | return makeNode() 44 | .then(node => { 45 | remote = node 46 | remote.p2p.handle(PROTOCOLS.node.query, mockQueryHandler(responses)) 47 | }) 48 | .then(() => startNodes(local, remote)) // start both peers 49 | .then(() => local.remoteQuery(remote.p2p.peerInfo, 'SELECT * FROM foo.bar')) 50 | .then(results => { 51 | assert.deepEqual(results, expected, 'query should return all expected results') 52 | }) 53 | }) 54 | 55 | it('ends the stream with an error when it gets an error response', function () { 56 | this.timeout(3000) 57 | 58 | const errorMessage = 'server on fire' 59 | const responses = [ 60 | {value: {simple: {stringValue: 'hello world'}}}, 61 | {value: {simple: {intValue: 123}}}, 62 | {error: {error: errorMessage}} 63 | ] 64 | 65 | const expected = responses.slice(0, responses.length - 1) 66 | .map(raw => unpackQueryResultProtobuf(raw)) 67 | 68 | let remote 69 | return makeNode() 70 | .then(node => { 71 | remote = node 72 | remote.p2p.handle(PROTOCOLS.node.query, mockQueryHandler(responses)) 73 | }) 74 | .then(() => startNodes(local, remote)) 75 | .then(() => local.remoteQueryStream(remote.p2p.peerInfo, 'SELECT * FROM foo.bar')) 76 | .then(resultStream => new Promise(resolve => { 77 | pull( 78 | resultStream, 79 | pull.collect((err, results) => { 80 | assert(err instanceof Error, 'query should return an error object') 81 | assert.equal(err.message, errorMessage, 'error object should have error message from remote node') 82 | assert.deepEqual(results, expected, 'we should still get all results before the error') 83 | resolve() 84 | }) 85 | ) 86 | })) 87 | }) 88 | }) 89 | 90 | describe('Remote Query with inline data', () => { 91 | let local, remote 92 | 93 | const seedObject = {foo: 'bar'} 94 | 95 | before(() => 96 | makeNode().then(peer => { local = peer }) 97 | .then(() => PublisherId.generate()) 98 | .then(publisherId => makeNode({publisherId})) 99 | .then(peer => { 100 | remote = peer 101 | }) 102 | .then(() => 103 | remote.ingestSimpleStatement('scratch.test.queryWithData', seedObject, {refs: ['test-1']}) 104 | ) 105 | .then(stmtId => remote.db.get(stmtId)) 106 | .then(stmt => { 107 | remote.p2p.handle(PROTOCOLS.node.query, mockQueryHandler([{value: {simple: {stmt: stmt.toProtobuf()}}}])) 108 | }) 109 | ) 110 | 111 | it('returns query results with data objects inline', () => 112 | Promise.all([local.start(), remote.start()]) 113 | .then(() => local.remoteQueryWithData(remote.peerInfo, 'SELECT * FROM scratch.test.queryWithData')) 114 | .then(result => { 115 | assert(result != null) 116 | assert(Array.isArray(result)) 117 | assert.deepEqual(result[0].body.object, seedObject) 118 | }) 119 | ) 120 | }) 121 | -------------------------------------------------------------------------------- /test/peer/statement_db_test.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const { assert, expect } = require('chai') 4 | const { before, describe, it } = require('mocha') 5 | const path = require('path') 6 | const { StatementDB } = require('../../src/peer/db/index') 7 | const { Statement } = require('../../src/model/statement') 8 | 9 | const MIGRATIONS_DIR = path.join(__dirname, '..', '..', 'src', 'peer', 'db', 'migrations') 10 | 11 | const SEED_STATEMENTS: Array = [ 12 | { 13 | id: 'QmF001234:foo:5678', 14 | publisher: 'foo', 15 | namespace: 'scratch.test', 16 | body: { 17 | simple: { 18 | object: 'QmF00123456789', 19 | refs: ['foo:bar123'], 20 | tags: ['test'], 21 | deps: [] 22 | } 23 | }, 24 | timestamp: Date.now(), 25 | signature: Buffer.from('') 26 | }, 27 | { 28 | id: 'QmF001234:foo:6789', 29 | publisher: 'foo', 30 | namespace: 'scratch.blah', 31 | body: { 32 | simple: { 33 | object: 'QmF00123456789', 34 | refs: ['foo:bar456'], 35 | tags: ['test'], 36 | deps: [] 37 | } 38 | }, 39 | timestamp: Date.now(), 40 | signature: Buffer.from('') 41 | } 42 | ].map(stmt => Statement.fromProtobuf(stmt)) 43 | 44 | describe('Statement DB', () => { 45 | const db = new StatementDB(null) 46 | 47 | before(() => db.sqlDB() 48 | .then(() => Promise.all(SEED_STATEMENTS.map(stmt => db.put(stmt))))) 49 | 50 | it('can get statements by id', () => 51 | Promise.all(SEED_STATEMENTS.map(stmt => db.get(stmt.id))) 52 | .then(retrieved => { 53 | assert.deepEqual(retrieved, SEED_STATEMENTS) 54 | }) 55 | ) 56 | 57 | it('can get statements by WKI', () => 58 | db.getByWKI('foo:bar123') 59 | .then(results => { 60 | const expected = SEED_STATEMENTS.filter((stmt: Statement) => stmt.refSet.has('foo:bar123')) 61 | assert.deepEqual(results, expected) 62 | })) 63 | 64 | it('can get statements by namespace', () => 65 | Promise.all([ 66 | db.getByNamespace('scratch.test') 67 | .then(results => { 68 | const expected = SEED_STATEMENTS.filter(stmt => stmt.namespace === 'scratch.test') 69 | assert.deepEqual(results, expected) 70 | }), 71 | db.getByNamespace('nothing.here') 72 | .then(results => assert.equal(results.length, 0)) 73 | ])) 74 | 75 | it('can use wildcards in namespace queries', () => 76 | Promise.all([ 77 | db.getByNamespace('*') 78 | .then(results => { 79 | assert.deepEqual(results, SEED_STATEMENTS) 80 | }), 81 | db.getByNamespace('scratch.*') 82 | .then(results => { 83 | const expected = SEED_STATEMENTS.filter(stmt => stmt.namespace.startsWith('scratch.')) 84 | assert.deepEqual(results, expected) 85 | }) 86 | ])) 87 | }) 88 | 89 | describe('StatementDB migrations', () => { 90 | it('migrates and rolls back', () => { 91 | const db = new StatementDB() 92 | let sqlDB 93 | return db.sqlDB() 94 | .then(_sqlDB => { sqlDB = _sqlDB }) 95 | .then(() => sqlDB.select().table('Statement')) 96 | .then(result => { 97 | expect(result).to.exist 98 | }) 99 | .then(() => sqlDB.migrate.rollback({ 100 | directory: MIGRATIONS_DIR 101 | })) 102 | .then(() => 103 | sqlDB.select().table('Statement') 104 | .catch(err => { 105 | expect(err).to.be.an.instanceof(Error) 106 | }) 107 | ) 108 | }) 109 | }) 110 | -------------------------------------------------------------------------------- /test/peer/util_test.js: -------------------------------------------------------------------------------- 1 | const { expect } = require('chai') 2 | const { describe, it } = require('mocha') 3 | 4 | const PeerInfo = require('peer-info') 5 | const util = require('../../src/peer/util') 6 | 7 | describe('P2P utils', () => { 8 | it('lookupResponseToPeerInfo converts from directory lookup to PeerInfo object', () => { 9 | expect(util.lookupResponseToPeerInfo({})).to.be.null 10 | 11 | const noAddrs = {peer: {id: 'QmZvvcVA8t5qrM5DeQ8xM6PK18qzCYxseYNtaqauhSc4Nw'}} 12 | const noAddrsResult = util.lookupResponseToPeerInfo((noAddrs)) 13 | expect(noAddrsResult).to.be.an.instanceof(PeerInfo) 14 | expect(noAddrsResult.multiaddrs).to.be.empty 15 | }) 16 | }) 17 | -------------------------------------------------------------------------------- /test/resources/fixtures/concat-message-signature.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | publisherIdB58: '4XTTM4JKrrBeAK6qXmo8FoKmT5RkfjeXfZrnWjJNw9fKvPnEs', 3 | privateKeyB58: 'K3TgUMffighKb23qdfq8oyJM4ByiG3nMgKMHNQeTFEKtfDJzGaTeEzJzp1BUmPYikTtzrgyiEg8DGSfWXemtWh5APs2a8K8hpfHYwhmvxh35YdGrrYHgMEpxFxCEEiaRkN7oDt7q', 4 | message: Buffer.from('This message was signed with a key generated by concat', 'utf-8'), 5 | signature: Buffer.from('Id02py5k9yGJBYQ6/W1qOThAdF9Y5LQySQGbCQdV+pYou+xNeRG26sIJQCZ3B/9SQ/CpJfZjwr+QjwydNWg6Ag==', 'base64') 6 | } 7 | -------------------------------------------------------------------------------- /test/resources/fixtures/test-statements.js: -------------------------------------------------------------------------------- 1 | const SIMPLE_STMT_1 = { 2 | id: '4XTTM81cjwraTF9FW33DyCz2PbdQ9peqCXWTz9rBhU3bwm4TE:1485446977027:0', 3 | publisher: '4XTTM81cjwraTF9FW33DyCz2PbdQ9peqCXWTz9rBhU3bwm4TE', 4 | namespace: 'scratch.test', 5 | timestamp: 1485446977027, 6 | body: { simple: { object: 'foo', refs: [ 'simple-1' ], deps: [ 'dep1', 'dep2' ], tags: [] } }, 7 | signature: Buffer.from('4Xl7an0GdvCZtNR8Hw50RBOhfthNydlyMHBZeIoFnuk0fAtZE8BfQqltrVMXxWp9dabE8g5rR/F+3Fdzl5yyAQ==', 'base64') 8 | } 9 | 10 | const SIMPLE_STMT_2 = { 11 | id: '4XTTM81cjwraTF9FW33DyCz2PbdQ9peqCXWTz9rBhU3bwm4TE:1485447081587:1', 12 | publisher: '4XTTM81cjwraTF9FW33DyCz2PbdQ9peqCXWTz9rBhU3bwm4TE', 13 | namespace: 'scratch.test', 14 | timestamp: 1485447081587, 15 | body: { simple: { object: 'foo', refs: [ 'simple-2' ], deps: [ 'dep1', 'dep3' ], tags: [] } }, 16 | signature: Buffer.from('u+u8ICJbRHiAsGFeLFVBODX29DXYf4Wj6J2am2J7TbQqhIdhbMjBhQ1kXFWeAMxmXpdxfRt3CocDoxo3z3t7CQ==', 'base64') 17 | } 18 | 19 | const COMPOUND_STMT = { 20 | id: '4XTTMDah7ai6vqk6yzAhDtW9ATaEmTDJPNK3kcPT4bLKRuotG:1485447651564:0', 21 | publisher: '4XTTMDah7ai6vqk6yzAhDtW9ATaEmTDJPNK3kcPT4bLKRuotG', 22 | namespace: 'scratch.test.compound-stmt', 23 | timestamp: 1485447651564, 24 | body: { 25 | compound: { 26 | body: [ 27 | { object: 'foo', refs: [ 'compound-1' ], deps: [], tags: [] }, 28 | { object: 'foo', refs: [ 'compound-2' ], deps: [], tags: [] } 29 | ] 30 | } 31 | }, 32 | signature: Buffer.from('eJlR+rsTdiZQ7Lt8oI7M+tvtQPshjOb50OyKtrNQBfZ2KDyTpBIZnTWlZ2CAIq15oYjHetzrfZBxj81Nfu1QCw==', 'base64') 33 | } 34 | 35 | const COMPOUND_STMT_2 = { 36 | id: '4XTTMDah7ai6vqk6yzAhDtW9ATaEmTDJPNK3kcPT4bLKRuotG:1485447651564:1', 37 | publisher: '4XTTMDah7ai6vqk6yzAhDtW9ATaEmTDJPNK3kcPT4bLKRuotG', 38 | namespace: 'scratch.test.compound-stmt', 39 | timestamp: 1485447651564, 40 | body: { 41 | compound: { 42 | body: [ 43 | { object: 'foo', refs: [ 'compound-3' ], deps: [ 'dep1', 'dep2' ], tags: [] }, 44 | { object: 'foo', refs: [ 'compound-4' ], deps: [ 'dep1', 'dep3', 'dep4' ], tags: [] } 45 | ] 46 | } 47 | }, 48 | signature: Buffer.from('8nBP5iUEJu0TeMSWr4+HTg6Gp9I3yzu7Q590+HvVG7zbcbjJvI3qPN9yrnmh2txuVXua7lPHF9ORpOWdByeyDA==', 'base64') 49 | } 50 | 51 | const ENVELOPE_EMPTY = { 52 | id: '4XTTM2hkDuu73NXYakvw2uD6QfNAxB5emTd1P11uYt7YkmcXv:1485448028036:0', 53 | publisher: '4XTTM2hkDuu73NXYakvw2uD6QfNAxB5emTd1P11uYt7YkmcXv', 54 | namespace: 'scratch.test.envelope-stmt', 55 | timestamp: 1485448028036, 56 | body: { envelope: { body: [] } }, 57 | signature: Buffer.from('2Gk0n6XgzeaY3SCxjXXqwYuZfVqPnulEwyTW3eYeCI1NTq1g8D1I57602ItxEgBfg1CYXr9TBCdkMWnYXWIbDw==', 'base64') 58 | } 59 | 60 | const ENVELOPE_STMT = { 61 | id: '4XTTM2hkDuu73NXYakvw2uD6QfNAxB5emTd1P11uYt7YkmcXv:1485448141505:1', 62 | publisher: '4XTTM2hkDuu73NXYakvw2uD6QfNAxB5emTd1P11uYt7YkmcXv', 63 | namespace: 'scratch.test.envelope-stmt', 64 | timestamp: 1485448141505, 65 | body: { envelope: { body: [ SIMPLE_STMT_1, SIMPLE_STMT_2 ] } }, 66 | signature: Buffer.from('dEhboo/dqqHK/hB/Jur/DBQSKDpnr3bLM1sJgmCaRSlEtJpZdBHKlLjvy2CPyy9gqRCtczOAiMwkwgkiYvYaAg==', 'base64') 67 | } 68 | 69 | module.exports = { 70 | publisherIds: { 71 | simple: { 72 | id58: '4XTTM81cjwraTF9FW33DyCz2PbdQ9peqCXWTz9rBhU3bwm4TE', 73 | privateKey58: 'K3TgUjU7LqKoaGET8vHzwVY33PEk95fU4goQHfBvEWR1bVCLxd2V1RQnLqaTVHXQ2h14EXGxqwtvSir8vBxLmQ43KEjWbbJTgKAKexSr3apeHtQL2scWeaAHShHwVLffX4BzMu9E' 74 | }, 75 | compound: { 76 | id58: '4XTTMDah7ai6vqk6yzAhDtW9ATaEmTDJPNK3kcPT4bLKRuotG', 77 | privateKey58: 'K3TgUsAVZtKR71WG9jUrAt5GzXEVoDyjgDs2eiCaNuTcS8SzFjoBxvrKZ9c5btp4qByMLFrX7XXK9H5G4MEo2Z9hptfWAYK9iBnrWrdVPLLX7SbJz2CPZidrJxMTbF5xM7zX3edA' 78 | }, 79 | envelope: { 80 | id58: '4XTTM2hkDuu73NXYakvw2uD6QfNAxB5emTd1P11uYt7YkmcXv', 81 | privateKey58: 'K3TgTjd6PriGenibzwXWkZcxSYWLPKqMejgFdP9BUjErgwSDPkgPL7AACkvzkXExcvEepo1ZbfoHbo79HJzfkchVFP42UzZ3pYdD2wsq1JHL1UrJ28rza43CqfVW29yYjC81NgJY' 82 | } 83 | }, 84 | statements: { 85 | simple: [ SIMPLE_STMT_1, SIMPLE_STMT_2 ], 86 | compound: [ COMPOUND_STMT, COMPOUND_STMT_2 ], 87 | envelope: [ ENVELOPE_STMT ], 88 | envelopeEmpty: [ ENVELOPE_EMPTY ] 89 | }, 90 | expectedRefs: { 91 | simple: [ new Set(['simple-1']), new Set(['simple-2']) ], 92 | compound: [ new Set(['compound-1', 'compound-2']), new Set(['compound-3', 'compound-4']) ], 93 | envelope: [ new Set(['simple-1', 'simple-2']) ], 94 | envelopeEmpty: [ new Set() ] 95 | }, 96 | expectedSources: { 97 | simple: [ SIMPLE_STMT_1.publisher, SIMPLE_STMT_2.publisher ], 98 | compound: [ COMPOUND_STMT.publisher, COMPOUND_STMT_2.publisher ], 99 | envelope: [ SIMPLE_STMT_1.publisher ], 100 | envelopeEmpty: [ ENVELOPE_STMT.publisher ] 101 | }, 102 | expectedDeps: { 103 | simple: [ new Set(['dep1', 'dep2']), new Set(['dep1', 'dep3']) ], 104 | compound: [ new Set(), new Set(['dep1', 'dep2', 'dep3', 'dep4']) ], 105 | envelope: [ new Set(['dep1', 'dep2', 'dep3']) ], 106 | envelopeEmpty: [ new Set() ] 107 | }, 108 | objectIds: { 109 | simple: [ ['foo'], ['foo'] ], 110 | compound: [ ['foo', 'foo'], ['foo', 'foo'] ], 111 | envelope: [ ['foo', 'foo'] ], 112 | envelopeEmpty: [ [] ] 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /test/resources/generate_test_publisher_ids.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const mkdirp = require('mkdirp') 3 | const {PublisherId} = require('../../lib/peer/identity') 4 | 5 | const NUM_IDS = 20 6 | const outputDir = path.join(__dirname, 'publisher_ids') 7 | mkdirp.sync(outputDir) 8 | 9 | const promises = [] 10 | for (let i = 0; i < NUM_IDS; i++) { 11 | promises.push( 12 | PublisherId.generate().then(id => 13 | id.save(path.join(outputDir, `${id.id58}.id`))) 14 | ) 15 | } 16 | 17 | Promise.all(promises).then(ids => { 18 | console.log(`wrote ${ids.length} ids to ${outputDir}`) 19 | }) 20 | -------------------------------------------------------------------------------- /test/resources/generate_testing_ids.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | const path = require('path') 3 | const {generateIdentity} = require('../../lib/peer/identity') 4 | 5 | const NUM_IDS = 20 6 | const outputPath = path.join(__dirname, 'test_node_ids.json') 7 | 8 | const promises = [] 9 | for (let i = 0; i < NUM_IDS; i++) { 10 | promises.push( 11 | generateIdentity().then(i => i.toJSON()) 12 | ) 13 | } 14 | 15 | Promise.all(promises).then(ids => { 16 | fs.writeFileSync(outputPath, JSON.stringify(ids), {encoding: 'utf8'}) 17 | console.log(`wrote ${ids.length} ids to ${outputPath}`) 18 | }) 19 | -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTM2UhNoDF1EfwonksnNN1zRGcZCMFutDRMtXYgciwiLzCf.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTM2UhNoDF1EfwonksnNN1zRGcZCMFutDRMtXYgciwiLzCf.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTM33j1mHcbg5ctzCmsmvaiUCixX5jxTbkKHm2vCokC6uxw.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTM33j1mHcbg5ctzCmsmvaiUCixX5jxTbkKHm2vCokC6uxw.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTM3NVNm4VryWP91VXdEeoDXC2NzZkKRvUmPMnb1JdosJFk.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTM3NVNm4VryWP91VXdEeoDXC2NzZkKRvUmPMnb1JdosJFk.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTM6UEBfKSWUnC4KERbW8hesAAAGgrZcu68AXqihsFvU8w6.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTM6UEBfKSWUnC4KERbW8hesAAAGgrZcu68AXqihsFvU8w6.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTM6UNDgjn7Y6FqnGyKiRf7NmqKzi7ofeh3ZRzFGmv5tkyQ.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTM6UNDgjn7Y6FqnGyKiRf7NmqKzi7ofeh3ZRzFGmv5tkyQ.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTM7xSyXgsrwG83aXEKt5Edp9LFmqxCxum1ABXAXDw5QHHy.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTM7xSyXgsrwG83aXEKt5Edp9LFmqxCxum1ABXAXDw5QHHy.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTMATDw8fXWT5BGGaoc4A2sEbnLmMYBbekYnznDJ9K8c2cg.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTMATDw8fXWT5BGGaoc4A2sEbnLmMYBbekYnznDJ9K8c2cg.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTMAhtTw2zgTVa5XFxHKeXcJGftdCVmsGR5zYzGspdLr2qf.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTMAhtTw2zgTVa5XFxHKeXcJGftdCVmsGR5zYzGspdLr2qf.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTMB2cFTq23PTFGEdeT93cLdNptcSUekShxoUp5vzkjbkA4.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTMB2cFTq23PTFGEdeT93cLdNptcSUekShxoUp5vzkjbkA4.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTMBauYYKRkmWLyDwdU2sb4JgQX7VoZwwkPpmeFqrQoWWt7.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTMBauYYKRkmWLyDwdU2sb4JgQX7VoZwwkPpmeFqrQoWWt7.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTMCh4xe8E49486DUFko3cmqak3vrymKd5kMUBAVhMbxUkj.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTMCh4xe8E49486DUFko3cmqak3vrymKd5kMUBAVhMbxUkj.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTMDi5msCJgo8GFAHvZkE57cCK4QCPRQSrUSQcPMqQexT3E.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTMDi5msCJgo8GFAHvZkE57cCK4QCPRQSrUSQcPMqQexT3E.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTMEj74YR3oT18Uh1ZR1DbWpG5BXftvcSnyrwC6JbgS9AcG.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTMEj74YR3oT18Uh1ZR1DbWpG5BXftvcSnyrwC6JbgS9AcG.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTMF7HGrVmmnFATLrCMwboCDxnnm6CPcC8dyE8Tigfb5kYh.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTMF7HGrVmmnFATLrCMwboCDxnnm6CPcC8dyE8Tigfb5kYh.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTMFB2m8B8ShvUiXNuywUmK6r3e8YM5fsQsLArHMeUpjhWk.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTMFB2m8B8ShvUiXNuywUmK6r3e8YM5fsQsLArHMeUpjhWk.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTMFSVc6B4fRt5s71438dXcPE9NBPVQ5sRow1qtcARARHCJ.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTMFSVc6B4fRt5s71438dXcPE9NBPVQ5sRow1qtcARARHCJ.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTMFVjr2Zga6A1PF4GPZ43fSqvgWUfR8T3seEsfEBe3PUDi.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTMFVjr2Zga6A1PF4GPZ43fSqvgWUfR8T3seEsfEBe3PUDi.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTMGbTZFhxPnpwUmC8JaQS6NHeLj98TdbdjSeAT9qupk5x9.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTMGbTZFhxPnpwUmC8JaQS6NHeLj98TdbdjSeAT9qupk5x9.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTMHa54HQoW8Lp6nLUzKt5QwZAUxux6DDshbBN5w4WNj759.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTMHa54HQoW8Lp6nLUzKt5QwZAUxux6DDshbBN5w4WNj759.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/4XTTMHhM2e1nPVRgN6M7WM8WUcCnVwYwb8aS3ou9uKg5rdrHX.id: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/4XTTMHhM2e1nPVRgN6M7WM8WUcCnVwYwb8aS3ou9uKg5rdrHX.id -------------------------------------------------------------------------------- /test/resources/publisher_ids/concat/4XTTM4JKrrBeAK6qXmo8FoKmT5RkfjeXfZrnWjJNw9fKvPnEs.privateKey: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mediachain/aleph/f18de6fb353a018fdc4b2a53df3a0e910a2e5b27/test/resources/publisher_ids/concat/4XTTM4JKrrBeAK6qXmo8FoKmT5RkfjeXfZrnWjJNw9fKvPnEs.privateKey -------------------------------------------------------------------------------- /test/util.js: -------------------------------------------------------------------------------- 1 | // @flow 2 | 3 | const { MediachainNode: Node } = require('../src/peer/node') 4 | const Directory = require('../src/peer/directory') 5 | const config = require('./config') 6 | const thenify = require('thenify') 7 | const PeerId = require('peer-id') 8 | const path = require('path') 9 | const pull = require('pull-stream') 10 | const lp = require('pull-length-prefixed') 11 | 12 | const pb = require('../src/protobuf') 13 | const { protoStreamEncode, protoStreamDecode } = require('../src/peer/util') 14 | const createFromJSON = thenify(PeerId.createFromJSON) 15 | const nodeIdObjects = require('./resources/test_node_ids.json') 16 | const testNodeIds = Promise.all(nodeIdObjects.map(id => createFromJSON(id))) 17 | 18 | import type { Connection } from 'interface-connection' 19 | import type { QueryResultMsg, PushResponseMsg, PushEndMsg } from '../src/protobuf/types' 20 | 21 | function getTestNodeId (): Promise { 22 | return testNodeIds.then(ids => { 23 | const id = ids.pop() 24 | if (id == null) { 25 | throw new Error( 26 | 'Out of pre-generated test ids! You should make some more and put them in ' + 27 | path.join(__dirname, 'resources', 'test_node_ids.json') 28 | ) 29 | } 30 | return id 31 | }) 32 | } 33 | 34 | function makeNode (options: Object = {}): Promise { 35 | return getTestNodeId().then(peerId => { 36 | const nodeOptions = Object.assign({peerId}, options) 37 | const node = new Node(nodeOptions) 38 | node.p2p.setSecureIOEnabled(config.secureIOEnabled) 39 | return node 40 | }) 41 | } 42 | 43 | function makeDirectory (options: Object): Promise { 44 | return getTestNodeId().then(peerId => { 45 | const dirOptions = Object.assign({peerId}, options) 46 | const dir = new Directory(dirOptions) 47 | dir.p2p.setSecureIOEnabled(config.secureIOEnabled) 48 | return dir 49 | }) 50 | } 51 | 52 | /** 53 | * Respond to any query with the given QueryResult messages. 54 | * Should be added to an aleph node with 55 | * `node.p2p.handle(PROTOCOLS.node.query, mockQueryHandler(results)` 56 | * 57 | * @param results - the query results, including any `StreamEnd` or `StreamError` messages 58 | */ 59 | const mockQueryHandler = (results: Array) => (protocol: string, conn: Connection) => pull( 60 | conn, 61 | protoStreamDecode(pb.node.QueryRequest), 62 | pull.map(() => results), 63 | pull.flatten(), 64 | protoStreamEncode(pb.node.QueryResult), 65 | conn 66 | ) 67 | 68 | const mockPushHandler = (handshakeResponse: PushResponseMsg, expectedStatements: number = 0, result?: PushEndMsg) => (protocol: string, conn: Connection) => { 69 | let sentHandshake = false 70 | let statementsReceived = 0 71 | pull( 72 | conn, 73 | pull.asyncMap((val, cb) => { 74 | if (!sentHandshake) { 75 | sentHandshake = true 76 | return cb(null, pb.node.PushResponse.encode(handshakeResponse)) 77 | } 78 | statementsReceived++ 79 | if (statementsReceived === expectedStatements && result != null) { 80 | return cb(null, pb.node.PushEnd.encode(result)) 81 | } 82 | cb(null, null) 83 | }), 84 | pull.filter(val => val !== null), 85 | lp.encode(), 86 | conn 87 | ) 88 | } 89 | 90 | module.exports = { 91 | getTestNodeId, 92 | makeNode, 93 | makeDirectory, 94 | mockQueryHandler, 95 | mockPushHandler 96 | } 97 | --------------------------------------------------------------------------------