├── .gitignore ├── schema.proto ├── npm-data.js ├── package.json ├── LICENSE ├── README.md ├── import.js ├── index.js ├── install.js └── messages.js /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | *.db 3 | sandbox 4 | sandbox.js 5 | -------------------------------------------------------------------------------- /schema.proto: -------------------------------------------------------------------------------- 1 | message Package { 2 | message Dep { 3 | required string name = 1; 4 | required string range = 2; 5 | } 6 | 7 | optional uint64 sameDependencies = 1; 8 | repeated Dep dependencies = 2; 9 | repeated Dep devDependencies = 3; 10 | } 11 | 12 | message ResolveRequest { 13 | required string name = 1; 14 | optional string range = 2; 15 | optional bool production = 3; 16 | } 17 | 18 | message ResolveResult { 19 | repeated uint64 seqs = 1 [packed=true]; 20 | } 21 | -------------------------------------------------------------------------------- /npm-data.js: -------------------------------------------------------------------------------- 1 | const ndjson = require('ndjson') 2 | const get = require('simple-get') 3 | const each = require('stream-each') 4 | const pump = require('pump') 5 | 6 | module.exports = sync 7 | 8 | function sync (since, ondata, cb) { 9 | const url = `https://replicate.npmjs.com/_changes?feed=continuous&include_docs=true&since=${since}` 10 | 11 | get({url, timeout: 30000}, function (err, res) { 12 | if (err) return cb(err) 13 | each(pump(res, ndjson.parse()), parse, cb) 14 | }) 15 | 16 | function parse (data, next) { 17 | if (data.id[0] === '_') return next() 18 | ondata({seq: data.seq, id: data.id, deleted: !!data.deleted, versions: mapVersions(data.doc.versions)}, next) 19 | } 20 | } 21 | 22 | function mapVersions (v) { 23 | if (!v) return null 24 | 25 | const list = [] 26 | for (const k of Object.keys(v)) { 27 | list.push({ 28 | version: k, 29 | dependencies: v[k].dependencies || null, 30 | devDependencies: v[k].devDependencies || null 31 | }) 32 | } 33 | 34 | return list 35 | } 36 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ims", 3 | "version": "1.4.2", 4 | "description": "Install My Stuff - an opinionated npm module installer", 5 | "main": "index.js", 6 | "dependencies": { 7 | "diffy": "^2.0.0", 8 | "gunzip-maybe": "^1.4.1", 9 | "hyperdiscovery": "^8.0.0", 10 | "hypertrie": "^2.1.0", 11 | "minimist": "^1.2.0", 12 | "mkdirp": "^0.5.1", 13 | "ndjson": "^1.5.0", 14 | "protocol-buffers-encodings": "^1.1.0", 15 | "pump": "^3.0.0", 16 | "semver": "^5.5.0", 17 | "sodium-universal": "^2.0.0", 18 | "stream-each": "^1.2.2", 19 | "tar-fs": "^1.16.3", 20 | "undici": "^0.3.0" 21 | }, 22 | "devDependencies": { 23 | "standard": "^11.0.1" 24 | }, 25 | "scripts": { 26 | "test": "standard" 27 | }, 28 | "bin": { 29 | "ims": "./install.js" 30 | }, 31 | "repository": { 32 | "type": "git", 33 | "url": "https://github.com/mafintosh/ims.git" 34 | }, 35 | "author": "Mathias Buus (@mafintosh)", 36 | "license": "MIT", 37 | "bugs": { 38 | "url": "https://github.com/mafintosh/ims/issues" 39 | }, 40 | "homepage": "https://github.com/mafintosh/ims" 41 | } 42 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2018 Mathias Buus 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ims 2 | 3 | Install My Stuff - an opinionated npm module installer 4 | 5 | ``` 6 | npm install -g ims 7 | ``` 8 | 9 | IMS is a npm module installer that uses a p2p cache hosted on the Dat network to resolve all dependencies as fast as possible. 10 | 11 | The cache is stored in a [hypertrie](https://github.com/mafintosh/hypertrie) which makes it fast to update and always get the latest version while minimising the amount of roundtrips, compared to `npm install`. 12 | 13 | The module tarballs themself are still downloaded from the npm registry 14 | 15 | ## Usage 16 | 17 | ``` sh 18 | # installs hypercore to ./node_modules 19 | ims hypercore 20 | ``` 21 | 22 | For more options do `ims --help` 23 | 24 | ``` 25 | Usage: ims [options] 26 | 27 | --save, -s saves the dep to package.json 28 | --save-dev, -S saves the dev dep to package.json 29 | --global, -g installs as a cli tool 30 | --production, -p skip dev dependencies 31 | --update, -u force update the cache 32 | --quiet, -q do not print anything 33 | --seed seed all metadata on the dat network 34 | 35 | If is omitted the deps from package.json is used 36 | ``` 37 | 38 | IMS stores its cache in `~/.ims`. 39 | 40 | Note that it uses sparse files for its database format so use `ls -sh` to list the *actual* size of the cache. 41 | 42 | ## License 43 | 44 | MIT 45 | -------------------------------------------------------------------------------- /import.js: -------------------------------------------------------------------------------- 1 | const npmData = require('./npm-data') 2 | const fs = require('fs') 3 | const hyperdiscovery = require('hyperdiscovery') 4 | const IMS = require('./') 5 | 6 | if (!process.env.HOME) process.env.HOME = '/root' 7 | 8 | const key = process.argv[2] 9 | const ims = IMS(process.env.HOME + '/npm.db', key || null, {sparse: false}) 10 | const db = ims.db 11 | 12 | db.ready(function () { 13 | hyperdiscovery(ims) 14 | if (db.feed.writable) onwritable() 15 | }) 16 | 17 | var state = '' 18 | var id = '' 19 | var inc = 0 20 | 21 | function onwritable () { 22 | setInterval(function () { 23 | if (id === toId()) return 24 | id = toId() 25 | console.log('state:', state) 26 | console.log('feed:', db.feed) 27 | }, 5000) 28 | 29 | runImport() 30 | } 31 | 32 | function toId () { 33 | return state + '@' + db.feed.length 34 | } 35 | 36 | function runImport () { 37 | console.log('start import', inc++) 38 | fs.readFile(process.env.HOME + '/npm.db/seq', 'utf-8', function (_, seq) { 39 | seq = Number(seq || '0') 40 | npmData(seq, ondata, function (err) { 41 | console.log('end import', --inc, err) 42 | setTimeout(runImport, 1000) 43 | }) 44 | }) 45 | } 46 | 47 | function ondata (data, next) { 48 | if (data.deleted) return deleteAll(data.id, next) 49 | 50 | var i = 0 51 | loop(null) 52 | 53 | function loop (err) { 54 | if (err) return next(err) 55 | if (i === data.versions.length) return note(data.seq, next) 56 | 57 | const n = mapVersion(data.id, data.versions[i++]) 58 | 59 | state = 'preget' 60 | db.get(n.key, function (_, node) { 61 | state = 'postget' 62 | if (node) return loop(null) 63 | state = 'precopy' 64 | hasCopy(data.id, n.value, function (err, seq) { 65 | state = 'postcopy' 66 | if (err) return next(err) 67 | if (seq) n.value = {sameDependencies: seq} 68 | state = 'preput' 69 | db.put(n.key, n.value, function () { 70 | state = 'postput' 71 | loop() 72 | }) 73 | }) 74 | }) 75 | } 76 | } 77 | 78 | function hasCopy (id, v, cb) { 79 | const ite = db.iterator(id) 80 | 81 | ite.next(function loop (err, node) { 82 | if (err) return cb(err) 83 | if (!node) return cb(null, 0) 84 | if (deps(node.value) === deps(v)) return cb(null, node.seq) 85 | ite.next(loop) 86 | }) 87 | } 88 | 89 | function deps (v) { 90 | return JSON.stringify({ 91 | dependencies: v.dependencies, 92 | devDependencies: v.devDependencies 93 | }) 94 | } 95 | 96 | function mapDeps (d) { 97 | if (!d) return null 98 | 99 | const list = [] 100 | for (const k of Object.keys(d)) { 101 | if (typeof d[k] !== 'string') continue 102 | list.push({ 103 | name: k, 104 | range: d[k] 105 | }) 106 | } 107 | return list 108 | } 109 | 110 | function mapVersion (id, v) { 111 | const deps = mapDeps(v.dependencies) 112 | const devDeps = mapDeps(v.devDependencies) 113 | 114 | return { 115 | key: id + '/' + v.version, 116 | value: { 117 | dependencies: deps, 118 | devDependencies: devDeps 119 | } 120 | } 121 | } 122 | 123 | function deleteAll (prefix, cb) { 124 | const ite = db.iterator(prefix) 125 | 126 | ite.next(function loop (err, node) { 127 | if (err) return cb(err) 128 | if (!node) return cb(null) 129 | db.del(node.key, function (err) { 130 | if (err) return cb(err) 131 | ite.next(loop) 132 | }) 133 | }) 134 | } 135 | 136 | function note (seq, cb) { 137 | fs.writeFile(process.env.HOME + '/npm.db/seq.tmp', '' + seq, function (err) { 138 | if (err) return cb(err) 139 | fs.rename(process.env.HOME + '/npm.db/seq.tmp', process.env.HOME + '/npm.db/seq', cb) 140 | }) 141 | } 142 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | const hypertrie = require('hypertrie') 2 | const semver = require('semver') 3 | const messages = require('./messages') 4 | 5 | module.exports = (storage, key, opts) => new IMS(storage, key, opts) 6 | 7 | class IMS { 8 | constructor (storage, key, opts) { 9 | if (!opts) opts = {} 10 | 11 | const self = this 12 | 13 | this.db = hypertrie(storage, key, { 14 | valueEncoding: messages.Package, 15 | sparse: opts.sparse !== false, 16 | maxRequests: 512 17 | }) 18 | 19 | if (this.db.feed.sparse) update() 20 | 21 | this.db.feed.on('peer-add', function (peer) { 22 | peer.stream.on('extension', function (name, data) { 23 | switch (name) { 24 | case 'ims/resolve': return onresolve(data) 25 | case 'ims/seqs': return onseqs(data) 26 | } 27 | }) 28 | 29 | function onresolve (data) { 30 | const { name, range, production } = messages.ResolveRequest.decode(data) 31 | self._seqs(name, range || '*', production, onsendseqs) 32 | } 33 | 34 | function onseqs (data) { 35 | const { seqs } = messages.ResolveResult.decode(data) 36 | const len = Math.min(8192, seqs.length) 37 | const feed = self.db.feed 38 | 39 | for (var i = 0; i < len; i++) { 40 | if (feed.bitfield && feed.bitfield.get(seqs[i])) continue 41 | feed.get(seqs[i], noop) 42 | } 43 | } 44 | 45 | function onsendseqs (err, seqs) { 46 | if (err) return 47 | const res = [] 48 | for (const seq of seqs.values()) { 49 | if (peer.remoteBitfield && peer.remoteBitfield.get(seq)) continue 50 | res.push(seq) 51 | } 52 | if (!res.length) return 53 | peer.stream.extension('ims/seqs', messages.ResolveResult.encode({seqs: res})) 54 | } 55 | }) 56 | 57 | this.key = this.db.key 58 | this.discoveryKey = this.db.discoveryKey 59 | this.ready(noop) 60 | 61 | function update () { 62 | self.update(update) 63 | } 64 | } 65 | 66 | update (cb) { 67 | this.db.feed.update(cb) 68 | } 69 | 70 | ready (cb) { 71 | if (!cb) cb = noop 72 | 73 | const self = this 74 | 75 | this.db.ready(function (err) { 76 | if (err) return cb(err) 77 | self.key = self.db.key 78 | self.discoveryKey = self.db.discoveryKey 79 | cb(null) 80 | }) 81 | } 82 | 83 | replicate (opts) { 84 | return this.db.replicate({ 85 | live: true, 86 | extensions: [ 87 | 'ims/resolve', 88 | 'ims/seqs' 89 | ] 90 | }) 91 | } 92 | 93 | _extension (name, data) { 94 | for (const peer of this.db.feed.peers) { 95 | peer.stream.extension(name, data) 96 | } 97 | } 98 | 99 | _seqs (name, range, prod, cb) { 100 | const self = this 101 | const seen = new Set() 102 | const seqs = new Set() 103 | 104 | visit(name, range, prod, cb) 105 | 106 | function visit (name, range, prod, cb) { 107 | self.getLatest(name, range, {seqs}, function (err, node) { 108 | if (err) return cb(err) 109 | if (!node || seen.has(node.key)) return cb(null, seqs) 110 | seen.add(node.key) 111 | 112 | var missing = 0 113 | var error = null 114 | 115 | ondeps(node.value.dependencies) 116 | if (!prod) ondeps(node.value.devDependencies) 117 | 118 | if (!missing) cb(null, seqs) 119 | 120 | function ondeps (deps) { 121 | for (var i = 0; i < deps.length; i++) { 122 | missing++ 123 | visit(deps[i].name, deps[i].range, true, ondone) 124 | } 125 | } 126 | 127 | function ondone (err) { 128 | if (err) error = err 129 | if (--missing) return 130 | cb(error, seqs) 131 | } 132 | }) 133 | } 134 | } 135 | 136 | resolve (name, opts, cb) { 137 | if (typeof opts === 'function') return this.resolve(name, null, opts) 138 | if (!cb) cb = noop 139 | if (!opts) opts = {} 140 | 141 | const self = this 142 | const ondep = opts.ondep || noop 143 | const range = opts.range || '*' 144 | const production = !!opts.production 145 | const root = {parent: null, name: null, version: null, range, deps: new Map(), toJSON} 146 | 147 | var missing = 0 148 | var error = null 149 | 150 | if (typeof name === 'string') { 151 | this._extension('ims/resolve', messages.ResolveRequest.encode({name, production, range})) 152 | root.name = name 153 | missing++ 154 | visit(name, range, root, onvisit) 155 | return 156 | } 157 | 158 | const deps = depsToArray(name.dependencies).concat(production ? [] : depsToArray(name.devDependencies)) 159 | root.name = name.name 160 | root.version = name.version || null 161 | 162 | // TODO: support only sending one msg here with the resolves as an array 163 | for (const {name, range} of deps) { 164 | this._extension('ims/resolve', messages.ResolveRequest.encode({name, production, range})) 165 | } 166 | 167 | missing += visitDeps(root, deps, onvisit) 168 | 169 | function onvisit (err) { 170 | if (err) error = err 171 | if (--missing) return 172 | if (error) return cb(error) 173 | cb(null, root) 174 | } 175 | 176 | function visit (name, range, tree, cb) { 177 | const prod = tree !== root || production 178 | 179 | self.getLatest(name, range, function (err, node) { 180 | if (err) return cb(err) 181 | if (!node) return cb(new Error('Module ' + name + '@' + range + ' not found')) 182 | 183 | const pkg = parse(node.key) 184 | const visited = inTree(tree, pkg.name, pkg.version) 185 | 186 | tree.range = range 187 | tree.version = pkg.version 188 | 189 | if (visited) { 190 | tree.parent.deps.delete(tree.name) 191 | return cb(null) 192 | } 193 | 194 | ondep(pkg, tree) 195 | 196 | var error = null 197 | var missing = 1 198 | 199 | missing += visitDeps(tree, node.value.dependencies, ondone) 200 | if (!prod) missing += visitDeps(tree, node.value.devDependencies, ondone) 201 | 202 | ondone(null) 203 | 204 | function ondone (err) { 205 | if (err) error = err 206 | if (--missing) return 207 | if (error) return cb(error) 208 | cb(null) 209 | } 210 | }) 211 | } 212 | 213 | function visitDeps (tree, deps, ondone) { 214 | for (var i = 0; i < deps.length; i++) { 215 | const dep = { 216 | parent: tree, 217 | name: deps[i].name, 218 | version: null, 219 | deps: new Map() 220 | } 221 | tree.deps.set(dep.name, dep) 222 | visit(dep.name, deps[i].range, dep, ondone) 223 | } 224 | 225 | return deps.length 226 | } 227 | } 228 | 229 | getLatest (name, range, opts, cb) { 230 | if (typeof opts === 'function') return this.getLatest(name, range, null, opts) 231 | if (range === 'latest') range = '*' 232 | 233 | const db = this.db 234 | const seqs = opts ? opts.seqs : null 235 | 236 | db.ready(function (err) { 237 | if (err) return cb(err) 238 | if (db.version) return run(null) 239 | db.feed.update(run) 240 | }) 241 | 242 | function run (err) { 243 | if (err) return cb(err) 244 | 245 | const ite = db.iterator(name) 246 | 247 | var latest = null 248 | var latestNode = null 249 | 250 | ite.next(function loop (err, node) { 251 | if (err) return cb(err) 252 | if (!node) return cb(null, latestNode) 253 | if (!node.value.sameDependencies) return onnode(node) 254 | 255 | db.getBySeq(node.value.sameDependencies, function (err, n) { 256 | if (err) return cb(err) 257 | if (seqs) seqs.add(n.seq) 258 | node.value = n.value 259 | onnode(node) 260 | }) 261 | 262 | function onnode (node) { 263 | if (seqs) seqs.add(node.seq) 264 | const pkg = parse(node.key) 265 | const v = pkg.version 266 | if (satisfies(v, range) && (!latest || semver.gt(v, latest))) { 267 | latest = v 268 | latestNode = node 269 | } 270 | 271 | ite.next(loop) 272 | } 273 | }) 274 | } 275 | } 276 | } 277 | 278 | function noop () {} 279 | 280 | function parse (key) { 281 | const parts = key.split('/') 282 | return parts.length === 3 283 | ? {name: parts[0] + '/' + parts[1], version: parts[2]} 284 | : {name: parts[0], version: parts[1]} 285 | } 286 | 287 | function inTree (tree, name, version) { 288 | while (tree) { 289 | if (tree.name === name && tree.version === version) return true 290 | const dep = tree.deps.get(name) 291 | if (dep && dep.version === version) return true 292 | tree = tree.parent 293 | } 294 | return false 295 | } 296 | 297 | function depsToArray (deps) { 298 | if (!deps) return [] 299 | const ks = Object.keys(deps) 300 | const res = [] 301 | for (var i = 0; i < ks.length; i++) { 302 | const name = ks[i] 303 | res.push({name, range: deps[name]}) 304 | } 305 | return res 306 | } 307 | 308 | function toJSON () { 309 | const deps = {} 310 | for (const [k, v] of this.deps) deps[k] = toJSON.call(v) 311 | 312 | return { 313 | name: this.name, 314 | version: this.version, 315 | range: this.range, 316 | deps 317 | } 318 | } 319 | 320 | function satisfies (a, b) { 321 | try { 322 | return semver.satisfies(a, b) 323 | } catch (err) { 324 | return false 325 | } 326 | } 327 | -------------------------------------------------------------------------------- /install.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const { Pool } = require('undici') 4 | const path = require('path') 5 | const mkdirp = require('mkdirp') 6 | const fs = require('fs') 7 | const sodium = require('sodium-universal') 8 | const pump = require('pump') 9 | const tar = require('tar-fs') 10 | const gunzip = require('gunzip-maybe') 11 | const os = require('os') 12 | const IMS = require('./') 13 | const trim = require('diffy/trim+newline') 14 | const minimist = require('minimist') 15 | 16 | var tick = 1 17 | var rendered = 0 18 | setInterval(() => tick++, 250).unref() 19 | 20 | // we have some bug in the seed'er so whitelist 21 | // against these peers until we fix it 22 | const whitelist = [ 23 | '88.99.3.86', 24 | '159.65.107.57' 25 | ] 26 | 27 | const pool = new Pool('https://registry.npmjs.org', { 28 | // magic numbers on what works good on my OS 29 | // if those high number of connections are not needed 30 | // they won't be used 31 | connections: 128, 32 | pipelining: 2 33 | }) 34 | 35 | const argv = minimist(process.argv.slice(2), { 36 | alias: { 37 | global: 'g', 38 | save: 's', 39 | 'save-dev': 'S', 40 | key: 'k', 41 | production: 'p', 42 | help: 'h', 43 | update: 'u', 44 | quiet: 'q' 45 | }, 46 | boolean: ['seed', 'quiet', 'update', 'help', 'global', 'save', 'save-dev', 'production'] 47 | }) 48 | 49 | const key = argv.key || '13f46b517a126b5d3f64cd2a7ec386140b06b38be6a7a47ffb5ba9b6461ee563' 50 | const dir = path.join(os.homedir(), '.ims') 51 | const ims = IMS(path.join(dir, 'db'), key, {sparse: !argv.seed}) 52 | const localPkg = fs.existsSync('package.json') && require(path.join(process.cwd(), 'package.json')) 53 | const name = getName(argv._[0]) || localPkg 54 | const range = getRange(argv._[0]) 55 | 56 | if (argv.help || (!name && !argv.seed)) { 57 | console.error('Usage: ims [options]') 58 | console.error('') 59 | console.error(' --save, -s saves the dep to package.json') 60 | console.error(' --save-dev, -S saves the dev dep to package.json') 61 | console.error(' --global, -g installs as a cli tool') 62 | console.error(' --production, -p skip dev dependencies') 63 | console.error(' --update, -u force update the cache') 64 | console.error(' --quiet, -q do not print anything') 65 | console.error(' --seed seed all metadata on the dat network') 66 | console.error('') 67 | console.error('If is omitted the deps from package.json is used') 68 | process.exit() 69 | } 70 | 71 | if (argv.seed) argv.quiet = true 72 | 73 | const base = argv.global ? '/usr/local/lib/node_modules' : './node_modules' 74 | 75 | var missing = 0 76 | var installed = false 77 | var sw = null 78 | var installs = 0 79 | var downloads = 0 80 | var ended = 0 81 | 82 | // ims or --global is always production 83 | if (typeof name === 'string' || argv.global) argv.production = true 84 | 85 | const fetching = new Map() 86 | const started = Date.now() 87 | const diffy = require('diffy')() 88 | 89 | diffy.render(render) 90 | 91 | const opts = { 92 | range, 93 | production: argv.production, 94 | ondep: function (pkg, tree) { 95 | const shard = hashShard(pkg.name + '@' + pkg.version) 96 | const cache = path.join(dir, 'cache', shard) 97 | const topLevel = typeof name === 'string' ? !tree.parent : (tree.parent && !tree.parent.parent) 98 | 99 | missing++ 100 | 101 | fs.stat(cache, function (err) { 102 | if (!err) return finish(null) 103 | fetch(pkg, cache, finish) 104 | }) 105 | 106 | function finish (err) { 107 | if (err) return onerror(err) 108 | 109 | const nm = path.join(base, '..', pathify(tree)) 110 | const check = path.join(nm, '.ims') 111 | var pkg = null 112 | 113 | fs.readFile(check, 'utf-8', function (_, stored) { 114 | if (stored === shard) return done(null) 115 | fs.unlink(check, function () { 116 | pump(fs.createReadStream(cache), gunzip(), tar.extract(nm, {map, mapStream}), function (err) { 117 | if (err) return onerror(err) 118 | linkBins(pkg, '..', path.join(nm, '../.bin'), function (err) { 119 | if (err) return onerror(err) 120 | fs.writeFile(check, shard, done) 121 | }) 122 | }) 123 | }) 124 | }) 125 | 126 | function mapStream (stream, header) { 127 | if (header.name !== 'package.json') return stream 128 | if (!topLevel) return stream 129 | 130 | const buf = [] 131 | stream.on('data', data => buf.push(data)) 132 | stream.on('end', function () { 133 | pkg = JSON.parse(Buffer.concat(buf)) 134 | }) 135 | 136 | return stream 137 | } 138 | 139 | function done (err) { 140 | if (err) return onerror(err) 141 | installs++ 142 | renderMaybe() 143 | if (!--missing && installed) exit() 144 | } 145 | } 146 | } 147 | } 148 | 149 | ims.ready(function () { 150 | if (argv.seed) { 151 | require('hyperdiscovery')(ims, {whitelist}) 152 | return 153 | } 154 | 155 | diffy.render() 156 | 157 | sw = require('hyperdiscovery')(ims, {whitelist}).once('connection', function () { 158 | if (!argv.update) return resolve() 159 | ims.update(resolve) 160 | 161 | function resolve () { 162 | diffy.render() 163 | ims.resolve(name, opts, function (err, tree) { 164 | if (err) return onerror(err) 165 | installed = true 166 | 167 | if (localPkg && (argv['save-dev'] || argv.save) && !argv.global) { 168 | const key = argv.save ? 'dependencies' : 'devDependencies' 169 | const deps = localPkg[key] || {} 170 | deps[name] = range || ('^' + tree.version) 171 | localPkg[key] = sort(deps) 172 | fs.writeFileSync('package.json', JSON.stringify(localPkg, null, 2) + '\n') 173 | } 174 | 175 | if (!missing) exit() 176 | }) 177 | } 178 | }) 179 | 180 | // always render on connections 181 | sw.on('connection', () => diffy.render()) 182 | }) 183 | 184 | function map (header) { 185 | header.name = header.name.replace(/^package\//, '') 186 | return header 187 | } 188 | 189 | function sort (deps) { 190 | const copy = {} 191 | for (const k of Object.keys(deps).sort()) copy[k] = deps[k] 192 | return copy 193 | } 194 | 195 | function pathify (tree) { 196 | const skipLast = typeof name !== 'string' 197 | var p = '' 198 | while (tree) { 199 | p = path.join('node_modules', tree.name, p) 200 | tree = tree.parent 201 | if (skipLast && !tree.parent) break 202 | } 203 | return p 204 | } 205 | 206 | function hashShard (name) { 207 | const buf = Buffer.alloc(32) 208 | sodium.crypto_generichash(buf, Buffer.from(name)) 209 | const hex = buf.toString('hex') 210 | return path.join(hex.slice(0, 2), hex.slice(2, 4), hex.slice(4)) 211 | } 212 | 213 | function linkBins (pkg, dir, binDir, cb) { 214 | if (!pkg) return cb(null) 215 | 216 | var missing = 1 217 | var error = null 218 | 219 | if (!pkg.bin || !pkg.name) return done(null) 220 | if (/(^\.)|[/\\]/.test(pkg.name)) return done(null) 221 | 222 | const bin = typeof pkg.bin === 'string' ? {[pkg.name]: pkg.bin} : pkg.bin 223 | 224 | for (const k of Object.keys(bin)) { 225 | if (/(^\.)|[/\\]/.test(k)) continue 226 | missing++ 227 | link(path.join(dir, pkg.name, bin[k]), k) 228 | } 229 | 230 | done(null) 231 | 232 | function link (exe, k) { 233 | fs.symlink(exe, path.join(binDir, k), function (err) { 234 | if (err && err.code === 'ENOENT') return mkdirp(binDir, retry) 235 | if (err && err.code !== 'EEXIST') return done(err) 236 | fs.chmod(path.join(binDir, exe), 0o755, done) 237 | }) 238 | 239 | function retry (err) { 240 | if (err) return done(err) 241 | link(exe, k) 242 | } 243 | } 244 | 245 | function done (err) { 246 | if (err) error = err 247 | if (--missing) return 248 | cb(error) 249 | } 250 | } 251 | 252 | function exit () { 253 | diffy.render() 254 | 255 | if (!argv.global) return done(null) 256 | 257 | const pkg = require(path.join(base, name, 'package.json')) 258 | linkBins(pkg, '../lib/node_modules', '/usr/local/bin', done) 259 | 260 | function done (err) { 261 | if (err) return onerror(err) 262 | ended = Date.now() 263 | diffy.render() 264 | setImmediate(() => process.exit()) 265 | } 266 | } 267 | 268 | function onerror (err) { 269 | if (err) throw err 270 | } 271 | 272 | function renderMaybe () { 273 | if (tick === rendered) return 274 | rendered = tick 275 | diffy.render() 276 | } 277 | 278 | function render () { 279 | if (argv.quiet) return '' 280 | 281 | const time = ended ? '(took ' + (ended - started) + 'ms)' : '' 282 | const latest = ims.db.version ? '(Latest version: ' + ims.db.version + ')' : '' 283 | 284 | return trim(` 285 | Connected to ${sw ? sw.connections.length : 0} peer(s) ${latest} 286 | Downloaded ${downloads} new module tarballs 287 | Installed ${installs} modules to ${base} ${time} 288 | `) 289 | } 290 | 291 | function fetch (pkg, cache, cb) { 292 | if (fetching.has(cache)) { 293 | fetching.get(cache).push(cb) 294 | return 295 | } 296 | 297 | fetching.set(cache, [cb]) 298 | 299 | mkdirp(path.dirname(cache), function (err) { 300 | if (err) return done(err) 301 | 302 | downloads++ 303 | renderMaybe() 304 | pool.request({ 305 | method: 'GET', 306 | path: '/' + pkg.name + '/-/' + pkg.name + '-' + pkg.version + '.tgz' 307 | }, function (err, res) { 308 | if (err) return done(err) 309 | if (res.statusCode !== 200) return done(new Error('Bad response (' + res.statusCode + ')')) 310 | 311 | pump(res.body, fs.createWriteStream(cache + '.tmp'), function (err) { 312 | if (err) return done(err) 313 | fs.rename(cache + '.tmp', cache, done) 314 | }) 315 | }) 316 | }) 317 | 318 | function done (err) { 319 | for (const cb of fetching.get(cache)) cb(err) 320 | fetching.delete(cache) 321 | } 322 | } 323 | 324 | function getRange (name) { 325 | if (!name || !/.@/.test(name)) return null 326 | return name.split('@').pop() 327 | } 328 | 329 | function getName (name) { 330 | if (!name || !/.@/.test(name)) return name 331 | return name.split('@').slice(0, -1).join('@') 332 | } 333 | -------------------------------------------------------------------------------- /messages.js: -------------------------------------------------------------------------------- 1 | // This file is auto generated by the protocol-buffers cli tool 2 | 3 | /* eslint-disable quotes */ 4 | /* eslint-disable indent */ 5 | /* eslint-disable no-redeclare */ 6 | /* eslint-disable camelcase */ 7 | 8 | // Remember to `npm install --save protocol-buffers-encodings` 9 | var encodings = require('protocol-buffers-encodings') 10 | var varint = encodings.varint 11 | var skip = encodings.skip 12 | 13 | var Package = exports.Package = { 14 | buffer: true, 15 | encodingLength: null, 16 | encode: null, 17 | decode: null 18 | } 19 | 20 | var ResolveRequest = exports.ResolveRequest = { 21 | buffer: true, 22 | encodingLength: null, 23 | encode: null, 24 | decode: null 25 | } 26 | 27 | var ResolveResult = exports.ResolveResult = { 28 | buffer: true, 29 | encodingLength: null, 30 | encode: null, 31 | decode: null 32 | } 33 | 34 | definePackage() 35 | defineResolveRequest() 36 | defineResolveResult() 37 | 38 | function definePackage () { 39 | var Dep = Package.Dep = { 40 | buffer: true, 41 | encodingLength: null, 42 | encode: null, 43 | decode: null 44 | } 45 | 46 | defineDep() 47 | 48 | function defineDep () { 49 | var enc = [ 50 | encodings.string 51 | ] 52 | 53 | Dep.encodingLength = encodingLength 54 | Dep.encode = encode 55 | Dep.decode = decode 56 | 57 | function encodingLength (obj) { 58 | var length = 0 59 | if (!defined(obj.name)) throw new Error("name is required") 60 | var len = enc[0].encodingLength(obj.name) 61 | length += 1 + len 62 | if (!defined(obj.range)) throw new Error("range is required") 63 | var len = enc[0].encodingLength(obj.range) 64 | length += 1 + len 65 | return length 66 | } 67 | 68 | function encode (obj, buf, offset) { 69 | if (!offset) offset = 0 70 | if (!buf) buf = Buffer.allocUnsafe(encodingLength(obj)) 71 | var oldOffset = offset 72 | if (!defined(obj.name)) throw new Error("name is required") 73 | buf[offset++] = 10 74 | enc[0].encode(obj.name, buf, offset) 75 | offset += enc[0].encode.bytes 76 | if (!defined(obj.range)) throw new Error("range is required") 77 | buf[offset++] = 18 78 | enc[0].encode(obj.range, buf, offset) 79 | offset += enc[0].encode.bytes 80 | encode.bytes = offset - oldOffset 81 | return buf 82 | } 83 | 84 | function decode (buf, offset, end) { 85 | if (!offset) offset = 0 86 | if (!end) end = buf.length 87 | if (!(end <= buf.length && offset <= buf.length)) throw new Error("Decoded message is not valid") 88 | var oldOffset = offset 89 | var obj = { 90 | name: "", 91 | range: "" 92 | } 93 | var found0 = false 94 | var found1 = false 95 | while (true) { 96 | if (end <= offset) { 97 | if (!found0 || !found1) throw new Error("Decoded message is not valid") 98 | decode.bytes = offset - oldOffset 99 | return obj 100 | } 101 | var prefix = varint.decode(buf, offset) 102 | offset += varint.decode.bytes 103 | var tag = prefix >> 3 104 | switch (tag) { 105 | case 1: 106 | obj.name = enc[0].decode(buf, offset) 107 | offset += enc[0].decode.bytes 108 | found0 = true 109 | break 110 | case 2: 111 | obj.range = enc[0].decode(buf, offset) 112 | offset += enc[0].decode.bytes 113 | found1 = true 114 | break 115 | default: 116 | offset = skip(prefix & 7, buf, offset) 117 | } 118 | } 119 | } 120 | } 121 | 122 | var enc = [ 123 | encodings.varint, 124 | Dep 125 | ] 126 | 127 | Package.encodingLength = encodingLength 128 | Package.encode = encode 129 | Package.decode = decode 130 | 131 | function encodingLength (obj) { 132 | var length = 0 133 | if (defined(obj.sameDependencies)) { 134 | var len = enc[0].encodingLength(obj.sameDependencies) 135 | length += 1 + len 136 | } 137 | if (defined(obj.dependencies)) { 138 | for (var i = 0; i < obj.dependencies.length; i++) { 139 | if (!defined(obj.dependencies[i])) continue 140 | var len = enc[1].encodingLength(obj.dependencies[i]) 141 | length += varint.encodingLength(len) 142 | length += 1 + len 143 | } 144 | } 145 | if (defined(obj.devDependencies)) { 146 | for (var i = 0; i < obj.devDependencies.length; i++) { 147 | if (!defined(obj.devDependencies[i])) continue 148 | var len = enc[1].encodingLength(obj.devDependencies[i]) 149 | length += varint.encodingLength(len) 150 | length += 1 + len 151 | } 152 | } 153 | return length 154 | } 155 | 156 | function encode (obj, buf, offset) { 157 | if (!offset) offset = 0 158 | if (!buf) buf = Buffer.allocUnsafe(encodingLength(obj)) 159 | var oldOffset = offset 160 | if (defined(obj.sameDependencies)) { 161 | buf[offset++] = 8 162 | enc[0].encode(obj.sameDependencies, buf, offset) 163 | offset += enc[0].encode.bytes 164 | } 165 | if (defined(obj.dependencies)) { 166 | for (var i = 0; i < obj.dependencies.length; i++) { 167 | if (!defined(obj.dependencies[i])) continue 168 | buf[offset++] = 18 169 | varint.encode(enc[1].encodingLength(obj.dependencies[i]), buf, offset) 170 | offset += varint.encode.bytes 171 | enc[1].encode(obj.dependencies[i], buf, offset) 172 | offset += enc[1].encode.bytes 173 | } 174 | } 175 | if (defined(obj.devDependencies)) { 176 | for (var i = 0; i < obj.devDependencies.length; i++) { 177 | if (!defined(obj.devDependencies[i])) continue 178 | buf[offset++] = 26 179 | varint.encode(enc[1].encodingLength(obj.devDependencies[i]), buf, offset) 180 | offset += varint.encode.bytes 181 | enc[1].encode(obj.devDependencies[i], buf, offset) 182 | offset += enc[1].encode.bytes 183 | } 184 | } 185 | encode.bytes = offset - oldOffset 186 | return buf 187 | } 188 | 189 | function decode (buf, offset, end) { 190 | if (!offset) offset = 0 191 | if (!end) end = buf.length 192 | if (!(end <= buf.length && offset <= buf.length)) throw new Error("Decoded message is not valid") 193 | var oldOffset = offset 194 | var obj = { 195 | sameDependencies: 0, 196 | dependencies: [], 197 | devDependencies: [] 198 | } 199 | while (true) { 200 | if (end <= offset) { 201 | decode.bytes = offset - oldOffset 202 | return obj 203 | } 204 | var prefix = varint.decode(buf, offset) 205 | offset += varint.decode.bytes 206 | var tag = prefix >> 3 207 | switch (tag) { 208 | case 1: 209 | obj.sameDependencies = enc[0].decode(buf, offset) 210 | offset += enc[0].decode.bytes 211 | break 212 | case 2: 213 | var len = varint.decode(buf, offset) 214 | offset += varint.decode.bytes 215 | obj.dependencies.push(enc[1].decode(buf, offset, offset + len)) 216 | offset += enc[1].decode.bytes 217 | break 218 | case 3: 219 | var len = varint.decode(buf, offset) 220 | offset += varint.decode.bytes 221 | obj.devDependencies.push(enc[1].decode(buf, offset, offset + len)) 222 | offset += enc[1].decode.bytes 223 | break 224 | default: 225 | offset = skip(prefix & 7, buf, offset) 226 | } 227 | } 228 | } 229 | } 230 | 231 | function defineResolveRequest () { 232 | var enc = [ 233 | encodings.string, 234 | encodings.bool 235 | ] 236 | 237 | ResolveRequest.encodingLength = encodingLength 238 | ResolveRequest.encode = encode 239 | ResolveRequest.decode = decode 240 | 241 | function encodingLength (obj) { 242 | var length = 0 243 | if (!defined(obj.name)) throw new Error("name is required") 244 | var len = enc[0].encodingLength(obj.name) 245 | length += 1 + len 246 | if (defined(obj.range)) { 247 | var len = enc[0].encodingLength(obj.range) 248 | length += 1 + len 249 | } 250 | if (defined(obj.production)) { 251 | var len = enc[1].encodingLength(obj.production) 252 | length += 1 + len 253 | } 254 | return length 255 | } 256 | 257 | function encode (obj, buf, offset) { 258 | if (!offset) offset = 0 259 | if (!buf) buf = Buffer.allocUnsafe(encodingLength(obj)) 260 | var oldOffset = offset 261 | if (!defined(obj.name)) throw new Error("name is required") 262 | buf[offset++] = 10 263 | enc[0].encode(obj.name, buf, offset) 264 | offset += enc[0].encode.bytes 265 | if (defined(obj.range)) { 266 | buf[offset++] = 18 267 | enc[0].encode(obj.range, buf, offset) 268 | offset += enc[0].encode.bytes 269 | } 270 | if (defined(obj.production)) { 271 | buf[offset++] = 24 272 | enc[1].encode(obj.production, buf, offset) 273 | offset += enc[1].encode.bytes 274 | } 275 | encode.bytes = offset - oldOffset 276 | return buf 277 | } 278 | 279 | function decode (buf, offset, end) { 280 | if (!offset) offset = 0 281 | if (!end) end = buf.length 282 | if (!(end <= buf.length && offset <= buf.length)) throw new Error("Decoded message is not valid") 283 | var oldOffset = offset 284 | var obj = { 285 | name: "", 286 | range: "", 287 | production: false 288 | } 289 | var found0 = false 290 | while (true) { 291 | if (end <= offset) { 292 | if (!found0) throw new Error("Decoded message is not valid") 293 | decode.bytes = offset - oldOffset 294 | return obj 295 | } 296 | var prefix = varint.decode(buf, offset) 297 | offset += varint.decode.bytes 298 | var tag = prefix >> 3 299 | switch (tag) { 300 | case 1: 301 | obj.name = enc[0].decode(buf, offset) 302 | offset += enc[0].decode.bytes 303 | found0 = true 304 | break 305 | case 2: 306 | obj.range = enc[0].decode(buf, offset) 307 | offset += enc[0].decode.bytes 308 | break 309 | case 3: 310 | obj.production = enc[1].decode(buf, offset) 311 | offset += enc[1].decode.bytes 312 | break 313 | default: 314 | offset = skip(prefix & 7, buf, offset) 315 | } 316 | } 317 | } 318 | } 319 | 320 | function defineResolveResult () { 321 | var enc = [ 322 | encodings.varint 323 | ] 324 | 325 | ResolveResult.encodingLength = encodingLength 326 | ResolveResult.encode = encode 327 | ResolveResult.decode = decode 328 | 329 | function encodingLength (obj) { 330 | var length = 0 331 | if (defined(obj.seqs)) { 332 | var packedLen = 0 333 | for (var i = 0; i < obj.seqs.length; i++) { 334 | if (!defined(obj.seqs[i])) continue 335 | var len = enc[0].encodingLength(obj.seqs[i]) 336 | packedLen += len 337 | } 338 | if (packedLen) { 339 | length += 1 + packedLen + varint.encodingLength(packedLen) 340 | } 341 | } 342 | return length 343 | } 344 | 345 | function encode (obj, buf, offset) { 346 | if (!offset) offset = 0 347 | if (!buf) buf = Buffer.allocUnsafe(encodingLength(obj)) 348 | var oldOffset = offset 349 | if (defined(obj.seqs)) { 350 | var packedLen = 0 351 | for (var i = 0; i < obj.seqs.length; i++) { 352 | if (!defined(obj.seqs[i])) continue 353 | packedLen += enc[0].encodingLength(obj.seqs[i]) 354 | } 355 | if (packedLen) { 356 | buf[offset++] = 10 357 | varint.encode(packedLen, buf, offset) 358 | offset += varint.encode.bytes 359 | } 360 | for (var i = 0; i < obj.seqs.length; i++) { 361 | if (!defined(obj.seqs[i])) continue 362 | enc[0].encode(obj.seqs[i], buf, offset) 363 | offset += enc[0].encode.bytes 364 | } 365 | } 366 | encode.bytes = offset - oldOffset 367 | return buf 368 | } 369 | 370 | function decode (buf, offset, end) { 371 | if (!offset) offset = 0 372 | if (!end) end = buf.length 373 | if (!(end <= buf.length && offset <= buf.length)) throw new Error("Decoded message is not valid") 374 | var oldOffset = offset 375 | var obj = { 376 | seqs: [] 377 | } 378 | while (true) { 379 | if (end <= offset) { 380 | decode.bytes = offset - oldOffset 381 | return obj 382 | } 383 | var prefix = varint.decode(buf, offset) 384 | offset += varint.decode.bytes 385 | var tag = prefix >> 3 386 | switch (tag) { 387 | case 1: 388 | var packedEnd = varint.decode(buf, offset) 389 | offset += varint.decode.bytes 390 | packedEnd += offset 391 | while (offset < packedEnd) { 392 | obj.seqs.push(enc[0].decode(buf, offset)) 393 | offset += enc[0].decode.bytes 394 | } 395 | break 396 | default: 397 | offset = skip(prefix & 7, buf, offset) 398 | } 399 | } 400 | } 401 | } 402 | 403 | function defined (val) { 404 | return val !== null && val !== undefined && (typeof val !== 'number' || !isNaN(val)) 405 | } 406 | --------------------------------------------------------------------------------