├── .github └── workflows │ └── mikeals-workflow.yml ├── .gitignore ├── README.md ├── Schema.md ├── browser.js ├── cli.js ├── index.js ├── package.json ├── reg.sh ├── src ├── cli.js ├── nodejs │ ├── deflate.js │ ├── linker.js │ ├── loader.mjs │ ├── pull.js │ ├── push.js │ ├── registry.js │ ├── storage.js │ └── types.js └── schema.json └── test ├── basic.spec.js └── fixture ├── hello-world.js ├── registry └── main.js └── src └── hello-world.js /.github/workflows/mikeals-workflow.yml: -------------------------------------------------------------------------------- 1 | on: [push, pull_request] 2 | name: Build, Test and maybe Publish 3 | jobs: 4 | test: 5 | name: Build & Test 6 | runs-on: ubuntu-latest 7 | strategy: 8 | matrix: 9 | node-version: [12.x, 14.x] 10 | steps: 11 | - uses: actions/checkout@v2 12 | - name: Use Node.js ${{ matrix.node-version }} 13 | uses: actions/setup-node@v1 14 | with: 15 | node-version: ${{ matrix.node-version }} 16 | - name: Cache node_modules 17 | id: cache-modules 18 | uses: actions/cache@v1 19 | with: 20 | path: node_modules 21 | key: ${{ matrix.node-version }}-${{ runner.OS }}-build-${{ hashFiles('package.json') }} 22 | - name: Build 23 | if: steps.cache-modules.outputs.cache-hit != 'true' 24 | run: npm install 25 | - name: Test 26 | run: npm_config_yes=true npx best-test@latest 27 | publish: 28 | name: Publish 29 | needs: test 30 | runs-on: ubuntu-latest 31 | if: github.event_name == 'push' && ( github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' ) 32 | steps: 33 | - uses: actions/checkout@v2 34 | - name: Cache node_modules 35 | id: cache-modules 36 | uses: actions/cache@v1 37 | with: 38 | path: node_modules 39 | key: 12.x-${{ runner.OS }}-build-${{ hashFiles('package.json') }} 40 | - name: Build 41 | if: steps.cache-modules.outputs.cache-hit != 'true' 42 | run: npm install 43 | - name: Test 44 | run: npm_config_yes=true npx best-test@latest 45 | 46 | - name: Publish 47 | uses: mikeal/merge-release@master 48 | env: 49 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 50 | NPM_AUTH_TOKEN: ${{ secrets.NPM_AUTH_TOKEN }} 51 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .nyc_output 2 | coverage 3 | package-lock.json 4 | node_modules 5 | .DS_Store 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # `reg` - Native ESM Package Management 2 | 3 | `reg` is a package manager for native ES Modules. It's 4 | built to enable dependency management for Universal JavaScript 5 | (JavaScript that can run in the Browser and in Node.js w/o a compiler). 6 | 7 | **This library is highly experimental and still likely to break without 8 | notice. DO NOT USE THIS IN PRODUCTION.** 9 | 10 | Supporting Universal JavaScript is quite difficult as the Browser's 11 | module system has a very unique set of constraints. In order to build 12 | effective dependency management on-par with the features you'd expect 13 | from `npm` the approach `reg` takes is radically different. 14 | 15 | `reg` statically links the dependency tree by hash reference, re-writing 16 | the import statements of the output files (this is the **only** alteration to 17 | your source file `reg` does, it is not a compiler). A package registry 18 | is then just a namespace that maps users, package names, and versions to 19 | specific package hashes. You can then directly import the resulting package 20 | references in a Browser or in Node.js with a special loader. 21 | 22 | **Note: since this is built for Universal JavaScript, you cannot use any 23 | of the Node.js standard library or any packages from `npm` since none 24 | of this is available in Browsers without a compiler.** 25 | 26 | ## CLI 27 | 28 | ### `reg input-file.js` 29 | 30 | This command takes an input file, statically links the dependency tree, 31 | stages it in a local cache and executes the input file. 32 | 33 | ### `reg @mikeal/test/1.0.0` 34 | 35 | The same command can also take a package reference from the registry. 36 | 37 | ### `reg publish ` 38 | 39 | Publish an input file to a package to the registry. 40 | 41 | ``` 42 | Positionals: 43 | filename Filename of script to run. Example `reg input.js` 44 | semver Package version number. 45 | 46 | Options: 47 | --help Show help 48 | --version Show version number 49 | --token GitHub personal access token 50 | Defaults to process.env.GHTOKEN || process.env.GITHUB_TOKEN 51 | ``` 52 | 53 | Note that this command requires a GitHub Token that only needs enough permissions 54 | to validate the user, no write or read access to any of your repositories are 55 | required. 56 | 57 | All package names must be proceeded by the user's GitHub username. There are currently 58 | no top level packages. 59 | 60 | ## Developer CLI 61 | 62 | There are several more commands that have been useful while developin this 63 | software. They may eventually be removed from the public API. 64 | 65 | ``` 66 | Run a local script file in reg 67 | 68 | Commands: 69 | cli.js stage Run the linker and stage the tree in 70 | local cache 71 | cli.js linker Run the static linker 72 | cli.js info Get info for named alias 73 | cli.js cat Print the file data for the named 74 | alias 75 | cli.js pkg-info Get package information 76 | ``` 77 | 78 | ## Data Model 79 | 80 | `reg` implements a data model for its package data that is similar to 81 | `git` in many ways. The highlights of this data structure are: 82 | 83 | * Optimized for offline, sync, and decentralization (just like git!) 84 | * A module is only a single file (the Browser requires this) with the 85 | dependency tree attached. 86 | * Every module gets a unique hash (like a git commit) which means 87 | cache de-duplication works across differing module names and version. 88 | * Every file is chunked with an algorithm called Rabin which creates 89 | good block boundaries for diffing (this is what rsync uses). This 90 | gives us sub-file de-duplication in cache which is especially 91 | useful for de-duplicating file parts between versions. 92 | 93 | This data structure also enables some important HTTP/2 features 94 | we need in order to be competitive with bundle performance. 95 | 96 | * HTTP/2 Push for all of the dependencies required by a single import. 97 | * If an old e-tag is presented for a module, `reg` can diff the two 98 | dependency trees and use HTTP/2 Push of **only the assets that have 99 | changed**. 100 | 101 | For a more detailed look at the data structure you can read 102 | [the schema](./Schema.md). 103 | 104 | Some visual examples follow: 105 | 106 | ![IMG_0306](https://user-images.githubusercontent.com/579/70022957-d562b300-154a-11ea-8b45-fee33f8316a7.jpeg) 107 | 108 | ![IMG_0307](https://user-images.githubusercontent.com/579/70022968-dd225780-154a-11ea-864c-868983eace65.jpeg) 109 | 110 | ![IMG_0308](https://user-images.githubusercontent.com/579/70022981-e3183880-154a-11ea-97e0-3c02b7f47554.jpeg) 111 | 112 | ![IMG_0309](https://user-images.githubusercontent.com/579/70022985-e8758300-154a-11ea-9f47-14fcdc363155.jpeg) 113 | 114 | ![IMG_0310](https://user-images.githubusercontent.com/579/70023075-2c688800-154b-11ea-9584-75e6904bdce2.jpeg) 115 | 116 | ![IMG_0311](https://user-images.githubusercontent.com/579/70023088-2ffc0f00-154b-11ea-9e35-f77574fb3600.jpeg) 117 | -------------------------------------------------------------------------------- /Schema.md: -------------------------------------------------------------------------------- 1 | # `reg` IPLD Schema 2 | 3 | This is the [IPLD Schema](https://specs.ipld.io) for `reg` Packages. 4 | 5 | The `File` type is from UnixFSv2 (next-generation 6 | data structures for IPFS). In this implementation 7 | of `reg` the UnixFSv2 implementation is imported from the 8 | [`unixfsv2`](https://github.com/ipld/js-unixfsv2) module. 9 | 10 | ```sh 11 | type PackageV1 struct { 12 | file &File 13 | deps PackageMap 14 | } 15 | type PackageMap {String:&Package} 16 | type Package union { 17 | | PackageV1 "v1" 18 | } representation keyed 19 | ``` 20 | 21 | A package is a single file of JavaScript and a map 22 | of dependent packages. This means that a typical 23 | project will actually be a collection of packages 24 | that are all linked together. 25 | 26 | The keys of `PackageMap` are the exact import strings 27 | from the original source file. 28 | 29 | The linker is responsible for replacing the original 30 | registry package references (`@mikeal/test/1.0.0`) 31 | in the source file with references to the hash linked 32 | version of the file root file (`@reg/${CID}.js`). 33 | The original names are then used as the `PackageMap` key 34 | strings. 35 | 36 | The reason we do this is so that every individual file maintains 37 | a hash based list of its dependencies. Without this, we 38 | wouldn't be able to predict the dependency tree of 39 | package files other than the index.js. 40 | 41 | Also, it's just a bit simpler to have a unified map of import keys 42 | to CIDs. Once the files are content addressed and put in the registry 43 | there isn't much difference between a "local" file (`./src/file.js`) 44 | and a registry package (`@mikeal/test/1.0.0`). 45 | -------------------------------------------------------------------------------- /browser.js: -------------------------------------------------------------------------------- 1 | const createTypes = require('./src/nodejs/types.js') 2 | module.exports = { createTypes } 3 | -------------------------------------------------------------------------------- /cli.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 'use strict' 3 | 4 | /* hack - fixes bug in multicodec table */ 5 | const path = require('path') 6 | const table = require('multicodec/src/name-table') 7 | const modpath = Object.keys(require.cache).find(k => k.endsWith(path.join('multicodec', 'src', 'name-table.js'))) 8 | require.cache[modpath].exports = { ...table, '0129': 'dag-json' } 9 | /* end hack */ 10 | 11 | const mkdirp = require('mkdirp') 12 | const push = require('./src/nodejs/push') 13 | const linker = require('./src/nodejs/linker') 14 | const storage = require('./src/nodejs/storage.js') 15 | const deflate = require('./src/nodejs/deflate.js') 16 | const { execSync } = require('child_process') 17 | const printify = require('@ipld/printify') 18 | const registry = require('./src/nodejs/registry') 19 | const CID = require('cids') 20 | const createTypes = require('./src/nodejs/types') 21 | const tmp = require('tmp') 22 | 23 | const tmpdir = f => tmp.dirSync(f) 24 | 25 | const pushOptions = yargs => { 26 | } 27 | const runStage = async argv => { 28 | const store = storage.local() 29 | const pkg = await push(argv.filename, store.put) 30 | const cid = await pkg.block().cid() 31 | mkdirp.sync(argv['target-dir']) 32 | const manifest = await deflate(cid, argv['target-dir'], store) 33 | console.log(manifest) 34 | console.log(`Staged "@reg/${cid.toString()}"`) 35 | manifest.main = cid 36 | return manifest 37 | } 38 | 39 | const runPublish = async argv => { 40 | const store = storage.store(argv.token) 41 | const pkg = await push(argv.filename, store.put) 42 | const cid = (await pkg.block().cid()).toString() 43 | console.log(`Published "@reg/${cid}"`) 44 | const _registry = registry(argv.token) 45 | const res = await _registry.alias(argv.name, cid, argv.semver, argv.latest) 46 | console.log(`Aliased ${argv.name + '/' + argv.semver}`) 47 | if (res.info.latest) { 48 | console.log(`Aliased ${argv.name}`) 49 | } 50 | } 51 | 52 | const runCat = async argv => { 53 | const _registry = registry() 54 | const pkg = await _registry.pkg(argv.name) 55 | const store = storage.store() 56 | const types = createTypes({ getBlock: store.get }) 57 | const block = await store.get(new CID(pkg.pkg)) 58 | const p = types.Package.decoder(block.decode()) 59 | const data = await p.getNode('*/file/data') 60 | for await (const chunk of data.read()) { 61 | process.stdout.write(chunk) 62 | } 63 | } 64 | 65 | const bin = path.join(__dirname, 'reg.sh') 66 | 67 | const runScript = async argv => { 68 | const dir = tmpdir().name 69 | argv['target-dir'] = dir 70 | const stage = await runStage(argv) 71 | const filename = path.join(dir, stage.main.toString('base32') + '.js') 72 | return execSync(`${bin} ${filename}`, { stdio: 'inherit' }) 73 | } 74 | const runLinker = async argv => { 75 | for await (let { root, block } of linker(argv.filename)) { 76 | if (root) { 77 | block = root.block() 78 | } 79 | if (block.codec === 'raw') { 80 | console.log('Block', (await block.cid()).toString()) 81 | } else { 82 | console.log('Block<' + block.codec + '>', printify(block.decode())) 83 | } 84 | } 85 | } 86 | 87 | const runInfo = async argv => { 88 | const _registry = registry() 89 | const pkg = await _registry.pkg(argv.name) 90 | console.log(pkg) 91 | } 92 | 93 | const validate = str => { 94 | try { 95 | new CID(str) 96 | } catch (e) { 97 | return false 98 | } 99 | return true 100 | } 101 | 102 | const runPkgInfo = async argv => { 103 | let cid 104 | if (!validate(argv.cid)) { 105 | const _registry = registry() 106 | const pkg = await _registry.pkg(argv.name) 107 | cid = new CID(pkg.pkg) 108 | } else { 109 | cid = new CID(argv.cid) 110 | } 111 | const store = storage.store() 112 | const block = await store.get(cid) 113 | console.log(printify(block.decode())) 114 | } 115 | 116 | const inputOptions = yargs => { 117 | yargs.positional('filename', { 118 | desc: 'Filename of script to run. Example `reg myFile.js`' 119 | }) 120 | } 121 | 122 | const stageOptions = yargs => { 123 | inputOptions(yargs) 124 | yargs.option('target-dir', { 125 | desc: 'Directory to deflate all required files', 126 | default: path.join(process.env.HOME, '.reg', 'deflate') 127 | }) 128 | } 129 | 130 | const publishOptions = yargs => { 131 | inputOptions(yargs) 132 | yargs.option('token', { 133 | describe: 'GitHub personal access token', 134 | type: 'string', 135 | default: process.env.GHTOKEN || process.env.GITHUB_TOKEN 136 | }) 137 | yargs.positional('semver', { 138 | describe: 'Package version number.', 139 | type: 'string', 140 | default: 'minor' 141 | }) 142 | } 143 | 144 | const yargs = require('yargs') 145 | const args = yargs 146 | .command('$0 ', 'Run a local script file in reg', inputOptions, runScript) 147 | .command('publish ', 148 | 'Publish a module to the registry', publishOptions, runPublish) 149 | .command('stage ', 'Run the linker and stage the tree in local cache', stageOptions, runStage) 150 | .command('linker ', 'Run the static linker', inputOptions, runLinker) 151 | .command('info ', 'Get info for named alias', () => {}, runInfo) 152 | .command('cat ', 'Print the file data for the named alias', () => {}, runCat) 153 | .command('pkg-info ', 'Get package information', () => {}, runPkgInfo) 154 | .argv 155 | 156 | if (!args._.length && !args.filename) { 157 | yargs.showHelp() 158 | } 159 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | const push = require('./src/nodejs/push.js') 2 | const storage = require('./src/nodejs/storage.js') 3 | const createTypes = require('./src/nodejs/types.js') 4 | 5 | module.exports = { push, storage, createTypes } 6 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "reg", 3 | "version": "0.0.0-dev", 4 | "description": "", 5 | "main": "index.js", 6 | "browser": "browser.js", 7 | "scripts": { 8 | "build": "ipld-schema to-json Schema.md > src/schema.json", 9 | "lint": "standard", 10 | "test": "mocha test/*.spec.js" 11 | }, 12 | "bin": { 13 | "reg": "./cli.js" 14 | }, 15 | "keywords": [], 16 | "author": "Mikeal Rogers (https://www.mikealrogers.com/)", 17 | "license": "(Apache-2.0 AND MIT)", 18 | "devDependencies": { 19 | "mocha": "^6.2.2", 20 | "standard": "^14.3.1" 21 | }, 22 | "dependencies": { 23 | "@ipld/block": "^2.1.3", 24 | "@ipld/printify": "0.0.0", 25 | "@ipld/schema-gen": "0.1.0", 26 | "@ipld/unixfsv2": "0.0.1", 27 | "bent": "^7.0.4", 28 | "cids": "^0.7.1", 29 | "recast": "^0.18.5", 30 | "tmp": "^0.1.0" 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /reg.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | BASEDIR=$(dirname "$0") 4 | 5 | exec /usr/bin/env node --no-warnings --experimental-modules --loader $BASEDIR/src/nodejs/loader.mjs "$@" 6 | -------------------------------------------------------------------------------- /src/cli.js: -------------------------------------------------------------------------------- 1 | // nodejs cli 2 | -------------------------------------------------------------------------------- /src/nodejs/deflate.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const path = require('path') 3 | const createTypes = require('./types') 4 | const fs = require('fs') 5 | const { stat } = fs.promises 6 | const { once } = require('events') 7 | 8 | const _write = async (stream, chunk) => { 9 | if (!stream.write(chunk)) await once(stream, 'drain') 10 | } 11 | 12 | const exists = async filename => { 13 | try { 14 | return fs.stat(filename) 15 | } catch (e) { 16 | return false 17 | } 18 | } 19 | 20 | const write = async (filename, pkg, dir) => { 21 | if (await exists(filename)) { 22 | return 23 | } 24 | const data = await pkg.getNode('*/file/data') 25 | const f = fs.createWriteStream(filename) 26 | for await (const chunk of data.read()) { 27 | await _write(f, chunk) 28 | } 29 | const finish = once(f, 'finish') 30 | f.end() 31 | await finish 32 | } 33 | 34 | const getDeps = async (pkg, dir, store, name, obj) => { 35 | const deps = await pkg.get('*/deps') 36 | const promises = [] 37 | for (const [key, link] of Object.entries(deps)) { 38 | let _name = name 39 | if (key.startsWith('@')) { 40 | _name = key 41 | } else { 42 | _name += '/' + key 43 | } 44 | promises.push(deflate(link, dir, store, _name, obj)) 45 | } 46 | return Promise.all(promises) 47 | } 48 | 49 | const deflate = async (cid, dir, store, name = '', obj = {}) => { 50 | const key = cid.toString('base32') 51 | if (!obj[key]) obj[key] = [] 52 | obj[key].push(name || 'main') 53 | const filename = path.join(dir, cid.toString('base32') + '.js') 54 | const types = createTypes({ getBlock: store.get }) 55 | const block = await store.get(cid) 56 | const pkg = types.Package.decoder(block.decode()) 57 | await Promise.all([write(filename, pkg, dir), getDeps(pkg, dir, store, name, obj)]) 58 | return obj 59 | } 60 | 61 | module.exports = deflate 62 | -------------------------------------------------------------------------------- /src/nodejs/linker.js: -------------------------------------------------------------------------------- 1 | const createTypes = require('./types.js') 2 | const path = require('path') 3 | const types = createTypes({ codec: 'dag-json' }) 4 | const CID = require('cids') 5 | const makeRegistry = require('./registry') 6 | 7 | /* 8 | Registry layout is simple. Every user has their own namespace 9 | and can publish a package to that name. 10 | 11 | /:github-username/:package-name 12 | 13 | The registry is simply an authenticated k/v store that points 14 | these namespaces to CIDs. 15 | 16 | /@mikeal/bent => CID 17 | 18 | The CID must be a valid Package. 19 | */ 20 | 21 | const { parse, print } = require('recast') 22 | const { readFile } = require('fs').promises 23 | 24 | // TODO: replace with better API on unixfs File, File.fromString 25 | const fileIter = async function * (str) { yield Buffer.from(str) } 26 | 27 | const importer = async function * (parser) { 28 | const registry = makeRegistry() 29 | const ast = await parser.parsed 30 | const pending = [] 31 | const isLocal = s => { 32 | if (s.startsWith('./')) return true 33 | if (s.startsWith('../')) return true 34 | return false 35 | } 36 | const deps = {} 37 | const _parse = async function * (i) { 38 | const dec = ast.program.body[i] 39 | const source = dec.source.value 40 | let cid 41 | if (source.startsWith('@reg/')) { 42 | // For some reason, this is already hash linked. 43 | deps[source] = new CID(source.slice('@reg/'.length)) 44 | return 45 | } else if (source.startsWith('@')) { 46 | /* reserve @std/ for browser standard library */ 47 | if (source.startsWith('@std/')) return 48 | const info = await registry.pkg(source) 49 | if (!info) throw new Error(`No package in registry named ${source}`) 50 | cid = new CID(info.pkg) 51 | } else if (isLocal(source)) { 52 | for await (let { root, block } of parser.resolve(source)) { 53 | if (root) { 54 | block = root.block() 55 | cid = await block.cid() 56 | } 57 | yield { block } 58 | } 59 | } else { 60 | throw new Error(`Unknown import "${source}"`) 61 | } 62 | deps[source] = cid 63 | dec.source.value = `/@reg/${cid.toString()}.js` 64 | const comment = parse(`// static-link("${source}")`).program.comments[0] 65 | // TODO: figure out how to add the comment to the line 66 | } 67 | let i = 0 68 | for (const dec of [...ast.program.body]) { 69 | if (dec.type === 'ImportDeclaration') { 70 | yield * _parse(i) 71 | } 72 | i++ 73 | } 74 | const code = print(ast).code 75 | 76 | let fileLink 77 | const iter = types.File.fromIter(fileIter(code), 'test') 78 | for await (let { block, root } of iter) { 79 | if (root) { 80 | block = root.block() 81 | fileLink = await block.cid() 82 | } 83 | yield { block } 84 | } 85 | const pkg = types.Package.encoder({ v1: { file: await fileLink, deps } }) 86 | const block = pkg.block() 87 | yield { root: pkg } 88 | } 89 | 90 | class Parser { 91 | constructor (file) { 92 | this.file = file 93 | this.parsed = this.parse() 94 | } 95 | 96 | async parse () { 97 | const buffer = await readFile(this.file) 98 | return parse(buffer.toString()) 99 | } 100 | 101 | imports () { 102 | return importer(this) 103 | } 104 | 105 | resolve (local) { 106 | const f = path.resolve(path.dirname(this.file), local) 107 | const parser = new Parser(f) 108 | return parser.imports() 109 | } 110 | } 111 | 112 | const linker = async function * (file) { 113 | const parser = new Parser(file) 114 | yield * parser.imports() 115 | } 116 | 117 | module.exports = linker 118 | 119 | /* 120 | const push = async (file, putBlock) => { 121 | for await (let { block, root } of linker(file)) { 122 | // noop 123 | } 124 | return 125 | const puts = [] 126 | const files = {} 127 | // TODO: parse file and re-write all imports to 128 | // CID references. 129 | 130 | } 131 | module.exports = push 132 | */ 133 | -------------------------------------------------------------------------------- /src/nodejs/loader.mjs: -------------------------------------------------------------------------------- 1 | import path from 'path' 2 | import process from 'process' 3 | import Module from 'module' 4 | import fs from 'fs' 5 | import { promisify } from 'util' 6 | import { fileURLToPath } from 'url' 7 | 8 | const require = Module.createRequire(import.meta.url) 9 | const CID = require('cids') 10 | const { local } = require('./storage.js') 11 | const createTypes = require('./types.js') 12 | const mkdirp = promisify(require('mkdirp')) 13 | 14 | const { readFile } = fs.promises 15 | const __filename = fileURLToPath(import.meta.url) 16 | const __dirname = path.dirname(__filename) 17 | 18 | const builtins = Module.builtinModules 19 | const JS_EXTENSIONS = new Set(['.js', '.mjs']) 20 | 21 | const baseURL = new URL(`${process.cwd()}/`, 'file://') 22 | 23 | const store = local() 24 | const types = createTypes({getBlock: store.get}) 25 | const cache = path.join(process.env.HOME, '.reg', 'deflate') 26 | 27 | const { createWriteStream } = fs 28 | 29 | let globals = {} 30 | 31 | export async function resolve (specifier, parentModuleURL = baseURL, defaultResolve) { 32 | if (specifier.startsWith('@') || specifier.startsWith('@')) { 33 | throw new Error("Unsupported: The Node.js ESM loader has bugs in loader IO so we can't yet do dynamic loading") 34 | } else if (specifier.startsWith('/@reg/')) { 35 | const stage = parentModuleURL.slice(0, parentModuleURL.lastIndexOf('/')) 36 | let filename = specifier.slice('/@reg/'.length) 37 | return { url: stage + '/' + filename, format: 'module' } 38 | } else { 39 | if (!specifier.startsWith('./') && 40 | !specifier.startsWith('../') && 41 | !specifier.startsWith('/') && 42 | !specifier.startsWith('file://')){ 43 | throw new Error(`Unknown import: "${specifier}`) 44 | } 45 | let url = specifier.startsWith('file://') ? specifier : 'file://' + specifier 46 | return { url, format: 'module' } 47 | throw new Error("not implemented") 48 | } 49 | 50 | /* Fallback */ 51 | if (builtins.includes(specifier)) { 52 | return { 53 | url: specifier, 54 | format: 'builtin' 55 | } 56 | } 57 | if (/^\.{0,2}[/]/.test(specifier) !== true && !specifier.startsWith('file:')) { 58 | // For node_modules support: 59 | // return defaultResolve(specifier, parentModuleURL); 60 | throw new Error( 61 | `imports must begin with '/', './', or '../'; '${specifier}' does not`) 62 | } 63 | const resolved = new URL(specifier, parentModuleURL) 64 | const ext = path.extname(resolved.pathname) 65 | if (!JS_EXTENSIONS.has(ext)) { 66 | throw new Error( 67 | `Cannot load file with non-JavaScript file extension ${ext}.`) 68 | } 69 | return { 70 | url: resolved.href, 71 | format: 'module' 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /src/nodejs/pull.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const tmp = require('tmp') 3 | const CID = require('cids') 4 | 5 | const __filename = fileURLToPath(import.meta.url) 6 | const __dirname = path.dirname(__filename) 7 | 8 | const store = storage(path.join(__dirname, 'cache')) 9 | 10 | const cache = new Map() 11 | 12 | const local = tmp.tmpDirSync() 13 | 14 | const pull = async cid => { 15 | if (!CID.isCID(cid)) cid = new CID(cid) 16 | const root = await get(cid) 17 | const pkg = root.decode() 18 | if (!pkg.type === 'reg') { 19 | throw new Error(`CID is not reg package, ${cid.toString()}`) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/nodejs/push.js: -------------------------------------------------------------------------------- 1 | const createTypes = require('./types.js') 2 | const linker = require('./linker.js') 3 | 4 | const push = async (file, putBlock) => { 5 | const types = createTypes({ codec: 'dag-json' }) 6 | const puts = [] 7 | let pkg 8 | for await (let { block, root } of linker(file)) { 9 | if (root) { 10 | block = root.block() 11 | pkg = root 12 | } 13 | puts.push(putBlock(block)) 14 | } 15 | await Promise.all(puts) 16 | return pkg 17 | } 18 | module.exports = push 19 | -------------------------------------------------------------------------------- /src/nodejs/registry.js: -------------------------------------------------------------------------------- 1 | const bent = require('bent') 2 | 3 | const main = (token, host = 'reg.mikeal.workers.dev') => { 4 | auth = `?GITHUB_TOKEN=${token}` 5 | const get = bent(`https://${host}/`, 'json') 6 | const put = bent('PUT', `https://${host}/`, 'json') 7 | const alias = async (name, pkg, version, latest = true) => { 8 | const body = { version, pkg, latest } 9 | const info = await put(name + '/_publish' + auth, body) 10 | return info 11 | } 12 | 13 | const pkg = async name => { 14 | let pkg 15 | try { 16 | pkg = await get(name + '/_pkg') 17 | } catch (e) { 18 | if (e.responseBody) { 19 | const body = (await e.responseBody).toString() 20 | e.message += `\nMessage\n${body}` 21 | } 22 | throw e 23 | } 24 | return pkg 25 | } 26 | return { alias, pkg } 27 | } 28 | module.exports = main 29 | -------------------------------------------------------------------------------- /src/nodejs/storage.js: -------------------------------------------------------------------------------- 1 | const localStorage = require('@ipld/unixfsv2/src/local-storage') 2 | const mkdirp = require('mkdirp') 3 | const path = require('path') 4 | const Block = require('@ipld/block') 5 | const bent = require('bent') 6 | 7 | const defaultDir = path.join(process.env.HOME, '.reg', 'blocks') 8 | 9 | const toString = async block => { 10 | const cid = block.cid ? block.cid() : block 11 | return (await cid).toString('base32') 12 | } 13 | 14 | const store = (token, dir = defaultDir, host = 'reg.mikeal.workers.dev') => { 15 | mkdirp.sync(dir) 16 | const auth = `?GITHUB_TOKEN=${token}` 17 | 18 | const _put = bent('PUT', `https://${host}/@reg/put/`, 'json') 19 | const _get = bent(`https://${host}/@reg/block/`, 'buffer') 20 | 21 | const registryPut = async block => { 22 | const res = await _put(await toString(block) + auth, block.encode()) 23 | return res.key 24 | } 25 | const registryGet = async cid => { 26 | const buffer = await _get(await toString(cid)) 27 | return Block(buffer, cid) 28 | } 29 | 30 | const cache = localStorage(dir) 31 | const get = async cid => { 32 | let block = await cache.get(cid) 33 | if (!block) { 34 | block = await registryGet(cid) 35 | if (block) await cache.put(block) 36 | } 37 | return block 38 | } 39 | const put = async block => { 40 | const tasks = [cache.put(block), registryPut(block)] 41 | const result = await Promise.all(tasks) 42 | return result[1] 43 | } 44 | return { get, put, _get, _put } 45 | } 46 | 47 | const local = (dir = defaultDir) => { 48 | mkdirp.sync(dir) 49 | return localStorage(dir) 50 | } 51 | 52 | module.exports = { local, store } 53 | -------------------------------------------------------------------------------- /src/nodejs/types.js: -------------------------------------------------------------------------------- 1 | const schema = require('../schema.json') 2 | const gen = require('@ipld/schema-gen') 3 | const { createTypes } = require('@ipld/unixfsv2') 4 | 5 | const create = (opts = {}) => { 6 | const types = createTypes(opts) 7 | opts = { ...opts, types } 8 | return { ...types, ...gen(schema, opts) } 9 | } 10 | 11 | module.exports = create 12 | -------------------------------------------------------------------------------- /src/schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "types": { 3 | "PackageV1": { 4 | "kind": "struct", 5 | "fields": { 6 | "file": { 7 | "type": { 8 | "kind": "link", 9 | "expectedType": "File" 10 | } 11 | }, 12 | "deps": { 13 | "type": "PackageMap" 14 | } 15 | }, 16 | "representation": { 17 | "map": {} 18 | } 19 | }, 20 | "PackageMap": { 21 | "kind": "map", 22 | "keyType": "String", 23 | "valueType": { 24 | "kind": "link", 25 | "expectedType": "Package" 26 | } 27 | }, 28 | "Package": { 29 | "kind": "union", 30 | "representation": { 31 | "keyed": { 32 | "v1": "PackageV1" 33 | } 34 | } 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /test/basic.spec.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | const tmp = require('tmp') 3 | const path = require('path') 4 | const assert = require('assert') 5 | const { writeFile } = require('fs').promises 6 | const { it } = require('mocha') 7 | const { push, storage } = require('../index.js') 8 | const { execSync } = require('child_process') 9 | const test = it 10 | 11 | const full = p => path.join(__dirname, p) 12 | 13 | const store = storage.local() 14 | 15 | const fixture = async p => { 16 | const pkg = await push(full('fixture/src/hello-world.js'), store.put) 17 | return pkg.block().cid() 18 | } 19 | 20 | const write = async str => { 21 | const f = tmp.tmpNameSync() + '.js' 22 | await writeFile(f, Buffer.from(str)) 23 | return f 24 | } 25 | 26 | const bin = path.join(__dirname, '..', 'cli.js') 27 | 28 | const run = script => { 29 | const ret = execSync(`${bin} ${script}`) 30 | return ret.toString() 31 | } 32 | 33 | /* disable tests temporarily 34 | test('basic push and import', async () => { 35 | const pkg = await fixture('src/hello-world.js', store.put) 36 | const module = ` 37 | import { hello } from '@reg/${pkg.toString()}' 38 | console.log(hello) 39 | ` 40 | const f = await write(module) 41 | assert.strictEqual(run(f), 'world\n') 42 | }) 43 | */ 44 | -------------------------------------------------------------------------------- /test/fixture/hello-world.js: -------------------------------------------------------------------------------- 1 | import { hello } from './src/hello-world.js' 2 | import { hello as hello2 } from '@test/test' 3 | -------------------------------------------------------------------------------- /test/fixture/registry/main.js: -------------------------------------------------------------------------------- 1 | console.log('ok') 2 | 3 | export default 'hello world' 4 | -------------------------------------------------------------------------------- /test/fixture/src/hello-world.js: -------------------------------------------------------------------------------- 1 | const hello = 'world' 2 | export { hello } 3 | --------------------------------------------------------------------------------