├── misc ├── k8s │ ├── build │ │ └── .gitkeep │ ├── .gitignore │ ├── templates │ │ ├── web-service.js │ │ ├── storage-claim-pvc.js │ │ ├── redis-service.js │ │ ├── storage-service.js │ │ ├── registry-service.js │ │ ├── redis-deployment.js │ │ ├── entropic-env-configmap.js │ │ ├── web-deployment.js │ │ ├── registry-deployment.js │ │ └── storage-deployment.js │ ├── index.js │ └── README.md ├── install.sh ├── build-images.sh └── nginx.conf ├── data └── .gitkeep ├── services ├── storage │ ├── migrations │ │ ├── .gitkeep │ │ ├── 20190518210559-add-invitation.js │ │ ├── 20190519210115-namespace-member-accepted.js │ │ ├── 20190429000001-add-builtin-namespaces.js │ │ ├── 20190508000000-add-derived-files-and-denormalized-integrity.js │ │ ├── 20190428180547-add-users.js │ │ ├── 20190505000000-add-2fa-fields-and-package-version-fields.js │ │ ├── 20190514000000-add-namespace-hostnames.js │ │ ├── 20190503000000-add-tokens-and-authentications.js │ │ └── 20190429000000-more-tables.js │ ├── db-migrate.json │ ├── README.md │ ├── test │ │ ├── utils │ │ │ ├── logger.js │ │ │ ├── users.js │ │ │ ├── registry.js │ │ │ └── postgres.js │ │ ├── 00-basics.spec.js │ │ ├── models │ │ │ ├── package.spec.js │ │ │ └── token.spec.js │ │ ├── 02-validations.spec.js │ │ └── 01-packages.spec.js │ ├── healthcheck.js │ ├── middleware │ │ ├── object-store.js │ │ ├── internal-auth.js │ │ ├── transaction.js │ │ └── postgres.js │ ├── .env-example │ ├── decorators │ │ ├── find-invitee.js │ │ ├── authn.js │ │ ├── is-namespace-member.js │ │ ├── package-exists.js │ │ └── can-write-package.js │ ├── server.js │ ├── models │ │ ├── host.js │ │ ├── namespace.js │ │ ├── authentication.js │ │ ├── namespace-member.js │ │ ├── maintainer.js │ │ ├── user.js │ │ ├── token.js │ │ ├── package.js │ │ └── package-version.js │ ├── handlers │ │ ├── index.js │ │ └── providers.js │ ├── package.json │ └── lib │ │ ├── validations.js │ │ ├── object-storage.js │ │ └── clone-legacy-package.js ├── web │ ├── README.md │ ├── .env-example │ ├── healthcheck.js │ ├── server.js │ ├── lib │ │ ├── session-map.js │ │ └── providers.js │ ├── handlers │ │ └── index.js │ ├── package.json │ └── middleware │ │ └── session.js ├── common │ ├── README.md │ └── boltzmann │ │ ├── example.js │ │ ├── middleware │ │ ├── storage-api.js │ │ ├── flush-request.js │ │ ├── requestid.js │ │ ├── index.js │ │ ├── redis.js │ │ ├── logger.js │ │ └── dev-only.js │ │ ├── package.json │ │ ├── index.js │ │ ├── request-handler.js │ │ ├── router.js │ │ └── response.js ├── registry │ ├── .env-example │ ├── healthcheck.js │ ├── server.js │ ├── decorators │ │ └── authn.js │ ├── handlers │ │ ├── auth.js │ │ ├── index.js │ │ └── users.js │ ├── package.json │ ├── Package.toml │ ├── middleware │ │ └── bearer-auth.js │ └── README.md ├── workers │ ├── package.json │ └── package-lock.json └── Dockerfile ├── .mailmap ├── .prettierrc ├── docs ├── assets │ ├── install.png │ ├── install.graffle │ └── community-ownership-pact.png ├── installing │ ├── arch │ │ └── PKGBUILD │ ├── homebrew │ │ └── ds.rb │ └── README.md ├── HACKING.md ├── LORE.md ├── rfcs │ └── 0001-entropic-workflow.md └── meetings │ └── 20190708-minutes.md ├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE.md └── PULL_REQUEST_TEMPLATE.md ├── cli ├── test │ ├── utils │ │ ├── FakeLogger.js │ │ └── FakeApi.js │ └── lib │ │ ├── commands │ │ └── whoami.js │ │ └── main.js ├── lib │ ├── commands │ │ ├── -v.js │ │ ├── help-download-en_us.txt │ │ ├── help-publish-en_us.txt │ │ ├── help-invite-en_us.txt │ │ ├── --version.js │ │ ├── help-login-en_us.txt │ │ ├── help-whoami-en_us.txt │ │ ├── whoami.js │ │ ├── packages.js │ │ ├── ping.js │ │ ├── login.js │ │ ├── invite.js │ │ ├── join.js │ │ ├── accept.js │ │ ├── decline.js │ │ ├── help.js │ │ ├── invitations.js │ │ ├── members.js │ │ ├── maintainerships.js │ │ └── download.js │ ├── errors.js │ ├── api.js │ ├── utils.js │ ├── logger.js │ ├── fetch-package-version.js │ ├── load-package-toml.js │ ├── fetch.js │ ├── config.js │ ├── fetch-package.js │ ├── canonicalize-spec.js │ ├── fetch-object.js │ └── main.js ├── snapcraft.yaml ├── Package.toml ├── README.md └── package.json ├── .editorconfig ├── .dockerignore ├── .gitignore ├── .eslintrc ├── .markdownlint.json ├── tools └── vcpm-sync │ └── package.json ├── package.json ├── .circleci └── config.yml └── docker-compose.yml /misc/k8s/build/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /data/.gitkeep: -------------------------------------------------------------------------------- 1 | keep this dir around 2 | -------------------------------------------------------------------------------- /misc/k8s/.gitignore: -------------------------------------------------------------------------------- 1 | build/*.json 2 | -------------------------------------------------------------------------------- /services/storage/migrations/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.mailmap: -------------------------------------------------------------------------------- 1 | Felix Wu 2 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": true, 3 | "printWidth": 80, 4 | "tabWidth": 2 5 | } 6 | -------------------------------------------------------------------------------- /docs/assets/install.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/entropic-dev/entropic/HEAD/docs/assets/install.png -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | open_collective: entropic 4 | -------------------------------------------------------------------------------- /docs/assets/install.graffle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/entropic-dev/entropic/HEAD/docs/assets/install.graffle -------------------------------------------------------------------------------- /cli/test/utils/FakeLogger.js: -------------------------------------------------------------------------------- 1 | module.exports = class FakeLogger { 2 | static log() {} 3 | static error() {} 4 | }; 5 | -------------------------------------------------------------------------------- /cli/lib/commands/-v.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = v; 4 | 5 | function v() { 6 | require('./--version')(); 7 | } 8 | -------------------------------------------------------------------------------- /docs/assets/community-ownership-pact.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/entropic-dev/entropic/HEAD/docs/assets/community-ownership-pact.png -------------------------------------------------------------------------------- /cli/lib/commands/help-download-en_us.txt: -------------------------------------------------------------------------------- 1 | Help for: ds download 2 | 3 | Fetch & insert into cache the content blobs for the named package-version. 4 | -------------------------------------------------------------------------------- /cli/lib/errors.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | CouldNotReadConfigFile: class extends Error {}, 5 | CouldNotParseConfigToml: class extends Error {} 6 | }; 7 | -------------------------------------------------------------------------------- /cli/lib/commands/help-publish-en_us.txt: -------------------------------------------------------------------------------- 1 | Help for: ds publish 2 | 3 | Run this command to publish your package to the registry specified in your 4 | Package.toml. 5 | -------------------------------------------------------------------------------- /cli/lib/commands/help-invite-en_us.txt: -------------------------------------------------------------------------------- 1 | Help for: ds invite [name] --to [pkg/namespace] 2 | 3 | Invite a namespace to join the maintainers list for a given package or 4 | namespace. 5 | -------------------------------------------------------------------------------- /cli/lib/commands/--version.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = version; 4 | 5 | async function version() { 6 | const pkg = require('../../package.json'); 7 | console.log(`v${pkg.version}`); 8 | } 9 | -------------------------------------------------------------------------------- /services/web/README.md: -------------------------------------------------------------------------------- 1 | # website 2 | 3 | Entropic's website goes here. 4 | 5 | Perhaps you know how to make websites. Check out the [project requirements](https://github.com/entropic-dev/entropic/issues/107). 6 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | end_of_line = LF 5 | charset = utf-8 6 | max_line_length = 120 7 | indent_style = space 8 | insert_final_newline = true 9 | trim_trailing_whitespace = true 10 | indent_size = 2 11 | -------------------------------------------------------------------------------- /services/storage/db-migrate.json: -------------------------------------------------------------------------------- 1 | { 2 | "entropic": { 3 | "driver": "postgres", 4 | "database": { 5 | "ENV": "PGDATABASE" 6 | }, 7 | "host": { 8 | "ENV": "PGHOST" 9 | } 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .env 3 | .nyc_output 4 | coverage 5 | legacy-cache 6 | allowed-list 7 | .DS_Store 8 | *~ 9 | *.swp 10 | services/data/entropic-cache 11 | .opt-in 12 | .opt-out 13 | *.snap 14 | 15 | # IDEs 16 | .idea 17 | .vscode 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .env 3 | .nyc_output 4 | coverage 5 | legacy-cache 6 | allowed-list 7 | .DS_Store 8 | *~ 9 | *.swp 10 | services/data/entropic-cache 11 | .opt-in 12 | .opt-out 13 | *.snap 14 | 15 | # IDEs 16 | .idea 17 | .vscode 18 | -------------------------------------------------------------------------------- /services/storage/README.md: -------------------------------------------------------------------------------- 1 | # entropic registry storage 2 | 3 | Internal APIs used by the website, the entropic public API service, and the workers. 4 | 5 | A chunk of what's in the main registry is likely to move here, because this should be the only service that ever touches postgres. 6 | -------------------------------------------------------------------------------- /services/common/README.md: -------------------------------------------------------------------------------- 1 | # entropic services common 2 | 3 | Sub-modules used by more than one entropic service. 4 | 5 | The plan is to use `file:` specifiers in package.json to pull in code from here until we're fully self-hosting, at which time we'll publish modules to a nice stable instance of ourselves. 6 | -------------------------------------------------------------------------------- /services/web/.env-example: -------------------------------------------------------------------------------- 1 | PORT=3001 2 | REDIS_URL=redis://redis:6379 3 | STORAGE_API_URL=http://storage:3002 4 | EXTERNAL_HOST=http://localhost:3001 5 | OAUTH_GITHUB_CLIENT=gh_client_id_here 6 | OAUTH_GITHUB_SECRET=gh_secret_here 7 | SESSION_SECRET="it's a really good secret folks, but I'll never tell what it is." 8 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | # Is this a feature request or a bug? 2 | 3 | # Expected behavior: 4 | 5 | # Actual behavior: 6 | 7 | # Steps to replicate: 8 | 9 | # Environment info (where relevant) 10 | 11 | - Browser: 12 | - Operating System & Version: 13 | - Version of Node (`node -v`): 14 | - Version of ds (`ds --version`): 15 | -------------------------------------------------------------------------------- /services/registry/.env-example: -------------------------------------------------------------------------------- 1 | NODE_ENV=dev 2 | DEV_LATENCY_ERROR_MS=10000 3 | REDIS_URL=redis://redis:6379 4 | PORT=3000 5 | CACHE_DIR=../data/entropic-cache 6 | EXTERNAL_HOST=http://localhost:3000 7 | SESSION_SECRET=long_pw_for_encrypting_sessions_here 8 | SESSION_EXPIRY_SECONDS=31536000 9 | WEB_HOST=http://localhost:3001 10 | STORAGE_API_URL=http://storage:3002 11 | -------------------------------------------------------------------------------- /cli/lib/commands/help-login-en_us.txt: -------------------------------------------------------------------------------- 1 | Help for: ds login 2 | 3 | Run this command to login to the preferred registry specified in your 4 | ~/.entropicrc file. (Default: https://registry.entropic.dev). Running the 5 | command will open a browser window to authorize the registy access to 6 | your Github account for authorization. Return to the terminal to complete 7 | the login. 8 | -------------------------------------------------------------------------------- /cli/lib/commands/help-whoami-en_us.txt: -------------------------------------------------------------------------------- 1 | Help for: ds whoami [--registry=https://your.registry.here] 2 | 3 | Find out the username associated with the local token on a given registry. 4 | Default registry is https://registry.entropic.dev; a different registry may 5 | be passed with `--registry`. The URL for this registry must be a fully- 6 | qualified domain name, including `https://` 7 | -------------------------------------------------------------------------------- /services/workers/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "workers", 3 | "version": "1.0.0", 4 | "description": "do work with beanstalkd", 5 | "keywords": [], 6 | "license": "Apache-2.0", 7 | "main": "index.js", 8 | "private": true, 9 | "scripts": { 10 | "test": "echo \"Error: no test specified\" && exit 1" 11 | }, 12 | "dependencies": { 13 | "beanstalkd": "~2.2.1" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /misc/k8s/templates/web-service.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | apiVersion: 'v1', 3 | kind: 'Service', 4 | metadata: { 5 | creationTimestamp: null, 6 | labels: { 'entropic-service': 'web' }, 7 | name: 'web' 8 | }, 9 | spec: { 10 | ports: [{ name: '3001', port: 3001, targetPort: 3001 }], 11 | selector: { 'entropic-service': 'web' } 12 | }, 13 | status: { loadBalancer: {} } 14 | }; 15 | -------------------------------------------------------------------------------- /misc/k8s/templates/storage-claim-pvc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | apiVersion: 'v1', 3 | kind: 'PersistentVolumeClaim', 4 | metadata: { 5 | creationTimestamp: null, 6 | labels: { 'entropic-service': 'storage-claim' }, 7 | name: 'storage-claim' 8 | }, 9 | spec: { 10 | accessModes: ['ReadWriteOnce'], 11 | resources: { requests: { storage: '100Mi' } } 12 | }, 13 | status: {} 14 | }; 15 | -------------------------------------------------------------------------------- /misc/k8s/templates/redis-service.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | apiVersion: 'v1', 3 | kind: 'Service', 4 | metadata: { 5 | creationTimestamp: null, 6 | labels: { 'entropic-service': 'redis' }, 7 | name: 'redis' 8 | }, 9 | spec: { 10 | ports: [{ name: '6379', port: 6379, targetPort: 6379 }], 11 | selector: { 'entropic-service': 'redis' } 12 | }, 13 | status: { loadBalancer: {} } 14 | }; 15 | -------------------------------------------------------------------------------- /misc/k8s/templates/storage-service.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | apiVersion: 'v1', 3 | kind: 'Service', 4 | metadata: { 5 | creationTimestamp: null, 6 | labels: { 'entropic-service': 'storage' }, 7 | name: 'storage' 8 | }, 9 | spec: { 10 | ports: [{ name: '3000', port: 3000, targetPort: 3000 }], 11 | selector: { 'entropic-service': 'storage' } 12 | }, 13 | status: { loadBalancer: {} } 14 | }; 15 | -------------------------------------------------------------------------------- /services/storage/test/utils/logger.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = provideLogger; 4 | 5 | function provideLogger() { 6 | return function mw(next) { 7 | return async function inner(context) { 8 | context.logger = { 9 | info: () => {}, 10 | error: console.error 11 | }; 12 | const response = await next(context); 13 | 14 | return response; 15 | }; 16 | }; 17 | } 18 | -------------------------------------------------------------------------------- /cli/lib/api.js: -------------------------------------------------------------------------------- 1 | const fetch = require('node-fetch'); 2 | 3 | class Api { 4 | constructor(baseUrl) { 5 | this.baseUrl = baseUrl; 6 | } 7 | 8 | headers(token) { 9 | return { 10 | authorization: `Bearer ${token}` 11 | }; 12 | } 13 | 14 | whoAmI(token) { 15 | return fetch(`${this.baseUrl}/v1/auth/whoami`, { 16 | headers: this.headers(token) 17 | }); 18 | } 19 | } 20 | 21 | module.exports = Api; 22 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "node": true, 4 | "es6": true, 5 | "mocha": true 6 | }, 7 | "parser": "babel-eslint", 8 | "parserOptions": { 9 | "ecmaVersion": 10 10 | }, 11 | "extends": ["eslint:recommended" ], 12 | "rules": { 13 | "strict": 0, 14 | "no-console": 0, 15 | "no-unused-vars": 0, 16 | "no-case-declarations": 0, 17 | "no-empty": 0, 18 | "prefer-const": 2 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /cli/lib/utils.js: -------------------------------------------------------------------------------- 1 | module.exports.whoAmI = async ({ registry, token, api }) => { 2 | const response = await api.whoAmI(token); 3 | 4 | let body = null; 5 | try { 6 | body = await response.json(); 7 | } catch (err) { 8 | throw new Error(`Caught error requesting "${registry}/v1/auth/whoami"`); 9 | } 10 | 11 | if (response.status > 399) { 12 | throw new Error(body.message || body); 13 | } 14 | 15 | return body.username; 16 | }; 17 | -------------------------------------------------------------------------------- /cli/lib/logger.js: -------------------------------------------------------------------------------- 1 | const chalk = require('chalk'); 2 | 3 | module.exports = class Logger { 4 | static log(msg) { 5 | console.log(chalk.default.yellow(msg)); 6 | } 7 | static error(msg, stacktrace = undefined) { 8 | console.log(chalk.default.red(msg)); 9 | 10 | // Don't print the stacktrace in red. That 11 | // would be overwhelming 12 | if (stacktrace) { 13 | console.log(stacktrace); 14 | } 15 | } 16 | static http(msg) {} 17 | }; 18 | -------------------------------------------------------------------------------- /misc/k8s/templates/registry-service.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | apiVersion: 'v1', 3 | kind: 'Service', 4 | metadata: { 5 | creationTimestamp: null, 6 | labels: { 'entropic-service': 'registry' }, 7 | name: 'registry' 8 | }, 9 | spec: { 10 | type: 'NodePort', 11 | ports: [{ name: '3000', port: 3000, targetPort: 3000, nodePort: 30303 }], 12 | selector: { 'entropic-service': 'registry' } 13 | }, 14 | status: { loadBalancer: {} } 15 | }; 16 | -------------------------------------------------------------------------------- /services/web/healthcheck.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | // Use Node http rather than installing curl 4 | // to reduce container size 5 | const request = require('http').request( 6 | { 7 | host: 'localhost', 8 | port: process.env.PORT || 3000, 9 | timeout: 1000 10 | }, 11 | res => { 12 | process.exit(res.statusCode === 200 ? 0 : 1); 13 | } 14 | ); 15 | 16 | request.on('error', () => { 17 | process.exit(1); 18 | }); 19 | 20 | request.end(); 21 | -------------------------------------------------------------------------------- /services/registry/healthcheck.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | // Use Node http rather than installing curl 4 | // to reduce container size 5 | const request = require('http').request( 6 | { 7 | host: 'localhost', 8 | port: process.env.PORT || 3000, 9 | timeout: 1000 10 | }, 11 | res => { 12 | process.exit(res.statusCode === 200 ? 0 : 1); 13 | } 14 | ); 15 | 16 | request.on('error', () => { 17 | process.exit(1); 18 | }); 19 | 20 | request.end(); 21 | -------------------------------------------------------------------------------- /services/storage/healthcheck.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | // Use Node http rather than installing curl 4 | // to reduce container size 5 | const request = require('http').request( 6 | { 7 | host: 'localhost', 8 | port: process.env.PORT || 3002, 9 | timeout: 1000 10 | }, 11 | res => { 12 | process.exit(res.statusCode === 200 ? 0 : 1); 13 | } 14 | ); 15 | 16 | request.on('error', () => { 17 | process.exit(1); 18 | }); 19 | 20 | request.end(); 21 | -------------------------------------------------------------------------------- /.markdownlint.json: -------------------------------------------------------------------------------- 1 | { 2 | "default": true, 3 | "MD013": false, 4 | "MD025": false, 5 | "MD026": false, 6 | "MD028": false, 7 | "MD033": { 8 | "allowed_elements": [ 9 | "a", 10 | "b", 11 | "br", 12 | "details", 13 | "sub", 14 | "summary", 15 | "table", 16 | "td", 17 | "tr" 18 | ] 19 | }, 20 | "MD034": false, 21 | "MD040": false 22 | } 23 | -------------------------------------------------------------------------------- /misc/install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | cd /tmp 5 | 6 | if hash curl 2>/dev/null; then 7 | curl -SsL https://www.entropic.dev/ds-latest.tgz -o ds-latest.tgz 8 | elif hash wget 2>/dev/null; then 9 | wget https://www.entropic.dev/ds-latest.tgz -o ds-latest.tgz 10 | else 11 | echo 'Please install curl or wget!' 12 | exit 1 13 | fi 14 | 15 | # engine-strict config forces checking against engines.node version constraint 16 | NPM_CONFIG_ENGINE_STRICT=true npm install -g ds-latest.tgz 17 | -------------------------------------------------------------------------------- /cli/test/utils/FakeApi.js: -------------------------------------------------------------------------------- 1 | module.exports = class FakeApi { 2 | constructor(response, status) { 3 | this.desiredResponse = response; 4 | this.desiredStatus = status; 5 | } 6 | 7 | async whoAmI() { 8 | return new Promise((resolve, reject) => { 9 | resolve({ 10 | status: this.desiredStatus, 11 | message: 'OK', 12 | json: () => 13 | new Promise((jsonRes, _jsonRej) => jsonRes(this.desiredResponse)) 14 | }); 15 | }); 16 | } 17 | }; 18 | -------------------------------------------------------------------------------- /services/storage/test/00-basics.spec.js: -------------------------------------------------------------------------------- 1 | /* eslint-env node, mocha */ 2 | 'use strict'; 3 | 4 | const fetch = require('node-fetch'); 5 | const demand = require('must'); 6 | const providePostgres = require('../utils/postgres'); 7 | const provideRegistry = require('../utils/registry'); 8 | 9 | describe('entropic', () => { 10 | it( 11 | 'has tests', 12 | providePostgres( 13 | provideRegistry(async url => { 14 | const result = await fetch(url); 15 | }) 16 | ) 17 | ); 18 | }); 19 | -------------------------------------------------------------------------------- /services/storage/middleware/object-store.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = createObjectStorageMW; 4 | 5 | const ObjectStorage = require('../lib/object-storage'); 6 | 7 | function createObjectStorageMW(strategyName = 'FileSystemStrategy') { 8 | return next => { 9 | const strategy = new ObjectStorage[strategyName](); 10 | const storage = new ObjectStorage(strategy); 11 | 12 | return (context, ...args) => { 13 | context.storage = storage; 14 | return next(context, ...args); 15 | }; 16 | }; 17 | } 18 | -------------------------------------------------------------------------------- /services/web/server.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 'use strict'; 3 | 4 | require('dotenv').config(); 5 | 6 | const { run } = require('boltzmann'); 7 | const router = require('./handlers')(); 8 | 9 | const myMiddles = [ 10 | require('boltzmann/middleware/logger'), 11 | require('boltzmann/middleware/flush-request'), 12 | require('boltzmann/middleware/requestid'), 13 | require('boltzmann/middleware/redis'), 14 | require('boltzmann/middleware/storage-api'), 15 | require('./middleware/session') 16 | ]; 17 | 18 | run(router, myMiddles); 19 | -------------------------------------------------------------------------------- /services/common/boltzmann/example.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { response, fork, make } = require('./index.js'); 4 | 5 | async function greeting(context, params) { 6 | return response.text(`greetings, ${params.human}`); 7 | } 8 | 9 | const router = fork.router()(fork.get('/greet/:human', greeting)); 10 | 11 | const server = make(router, [ 12 | require('./middleware/flush-request'), 13 | require('./middleware/requestid') 14 | ]); 15 | server.listen(process.env.PORT, '0.0.0.0'); 16 | console.log(`now listening on ${process.env.PORT}`); 17 | -------------------------------------------------------------------------------- /services/registry/server.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 'use strict'; 3 | 4 | require('dotenv').config(); 5 | 6 | const { run } = require('boltzmann'); 7 | const router = require('./handlers')(); 8 | 9 | const myMiddles = [ 10 | require('boltzmann/middleware/logger'), 11 | require('boltzmann/middleware/flush-request'), 12 | require('boltzmann/middleware/requestid'), 13 | require('boltzmann/middleware/redis'), 14 | require('boltzmann/middleware/storage-api'), 15 | require('./middleware/bearer-auth') 16 | ]; 17 | 18 | run(router, myMiddles); 19 | -------------------------------------------------------------------------------- /services/storage/.env-example: -------------------------------------------------------------------------------- 1 | NODE_ENV=dev 2 | DEV_LATENCY_ERROR_MS=10000 3 | POSTGRES_URL=postgres://postgres@db:5432/entropic_dev?connectionTimeoutMillis=5000 4 | PGHOST=db 5 | REDIS_URL=redis://redis:6379 6 | PORT=3002 7 | PGUSER=postgres 8 | PGDATABASE=entropic_dev 9 | CACHE_DIR=../data/entropic-cache 10 | OAUTH_GITHUB_CLIENT=gh_client_id_here 11 | OAUTH_GITHUB_SECRET=gh_secret_here 12 | OAUTH_PASSWORD=pw_for_encrypting_tokens_here 13 | EXTERNAL_HOST=http://localhost:3000 14 | SESSION_SECRET=long_pw_for_encrypting_sessions_here 15 | SESSION_EXPIRY_SECONDS=31536000 16 | -------------------------------------------------------------------------------- /services/common/boltzmann/middleware/storage-api.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = createStorageApi; 4 | 5 | const StorageAPI = require('../client'); 6 | 7 | function createStorageApi({ 8 | url = process.env.STORAGE_API_URL || 'http://localhost:3002' 9 | } = {}) { 10 | return next => { 11 | return async function(context) { 12 | context.storageApi = new StorageAPI({ 13 | url, 14 | requestId: context.id, 15 | logger: context.logger('storage-api') 16 | }); 17 | 18 | return next(context); 19 | }; 20 | }; 21 | } 22 | -------------------------------------------------------------------------------- /services/storage/decorators/find-invitee.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const Namespace = require('../models/namespace'); 4 | 5 | module.exports = findInvitee; 6 | 7 | function findInvitee(next) { 8 | return async (context, params) => { 9 | const ns = await Namespace.objects 10 | .get({ 11 | active: true, 12 | name: params.invitee, 13 | 'host.name': params.host, 14 | 'host.active': true 15 | }) 16 | .catch(Namespace.objects.NotFound, () => null); 17 | 18 | context.invitee = ns; 19 | return next(context, params); 20 | }; 21 | } 22 | -------------------------------------------------------------------------------- /services/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mhart/alpine-node:12 2 | ARG SERVICE 3 | RUN addgroup -g 1000 -S node && \ 4 | adduser -u 1000 -S node -G node 5 | WORKDIR /services 6 | COPY --chown=node:node ${SERVICE} ${SERVICE} 7 | COPY --chown=node:node common common 8 | WORKDIR /services/${SERVICE} 9 | RUN npm install --quiet 10 | USER node 11 | ENV NODE_ENV=production \ 12 | TERM=linux \ 13 | TERMINFO=/etc/terminfo \ 14 | PORT=3000 15 | EXPOSE 3000 16 | HEALTHCHECK --interval=30s \ 17 | --timeout=2s \ 18 | --retries=10 \ 19 | CMD node /services/${SERVICE}/healthcheck.js 20 | CMD ["npm", "start"] 21 | -------------------------------------------------------------------------------- /cli/lib/commands/whoami.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = whoami; 4 | 5 | const figgy = require('figgy-pudding'); 6 | const { whoAmI } = require('../utils'); 7 | 8 | const whoamiOpts = figgy({ 9 | registry: { default: 'https://registry.entropic.dev' }, 10 | token: true, 11 | log: true, 12 | api: true 13 | }); 14 | 15 | async function whoami(opts) { 16 | opts = whoamiOpts(opts); 17 | 18 | try { 19 | const username = await whoAmI(opts); 20 | opts.log.log(username); 21 | return 0; 22 | } catch (err) { 23 | opts.log.error(err.message, err); 24 | return 1; 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /services/web/lib/session-map.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = class SessionMap extends Map { 4 | constructor(...args) { 5 | super(...args); 6 | this.dirty = false; 7 | } 8 | 9 | set(key, value) { 10 | const current = this.get(key); 11 | const result = super.set(key, value); 12 | if (current !== value) { 13 | this.dirty = true; 14 | } 15 | return result; 16 | } 17 | 18 | delete(key) { 19 | const had = this.has(key); 20 | const result = super.delete(key); 21 | if (had) { 22 | this.dirty = true; 23 | } 24 | return result; 25 | } 26 | }; 27 | -------------------------------------------------------------------------------- /services/common/boltzmann/middleware/flush-request.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = flush; 4 | 5 | const { send } = require('micro'); 6 | 7 | function flush() { 8 | return next => { 9 | return async (context, ...args) => { 10 | const response = await next(context); 11 | 12 | if (response.headers) { 13 | for (const [header, value] of response.headers) { 14 | context.rawResponse.setHeader(header, value); 15 | } 16 | } 17 | 18 | await send(context.rawResponse, response.status, response.body); 19 | 20 | return response; 21 | }; 22 | }; 23 | } 24 | -------------------------------------------------------------------------------- /docs/installing/arch/PKGBUILD: -------------------------------------------------------------------------------- 1 | # https://aur.archlinux.org/packages/nodejs-entropic/ 2 | # Maintainer: Dian Fay 3 | pkgname=nodejs-entropic 4 | pkgver=0.0.1 5 | pkgrel=1 6 | pkgdesc="The Entropic package registry's ds command-line interface" 7 | arch=("x86_64") 8 | url="https://entropic.dev" 9 | license=("MIT") 10 | install=$pkgname.install 11 | source=( 12 | https://www.entropic.dev/ds-latest.tgz 13 | ) 14 | makedepends=( 15 | "npm" 16 | ) 17 | 18 | package() { 19 | npm install -g --user root --prefix "$pkgdir"/usr "$srcdir"/ds-latest.tgz 20 | } 21 | md5sums=('cb96ef64adcafe8b93e912028eb2fc1c') 22 | -------------------------------------------------------------------------------- /services/storage/test/utils/users.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | createUser, 5 | createToken 6 | }; 7 | 8 | const Token = require('../../models/token'); 9 | const User = require('../../models/user'); 10 | 11 | async function createUser(username, email = `${username}@entropic.dev`) { 12 | return await User.signup(username, email, null); 13 | } 14 | 15 | async function createToken(username) { 16 | const user = username.id 17 | ? username 18 | : await User.objects.get({ active: true, name: username }); 19 | 20 | return await Token.create({ for: user, description: 'just a test' }); 21 | } 22 | -------------------------------------------------------------------------------- /services/storage/server.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 'use strict'; 3 | 4 | require('dotenv').config(); 5 | 6 | const { run } = require('boltzmann'); 7 | const router = require('./handlers')(); 8 | 9 | const myMiddles = [ 10 | require('boltzmann/middleware/logger'), 11 | require('boltzmann/middleware/flush-request'), 12 | require('boltzmann/middleware/requestid'), 13 | require('./middleware/postgres'), 14 | require('./middleware/transaction'), 15 | require('boltzmann/middleware/redis'), 16 | require('./middleware/internal-auth'), 17 | require('./middleware/object-store') 18 | ]; 19 | 20 | run(router, myMiddles); 21 | -------------------------------------------------------------------------------- /misc/build-images.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | 4 | version=$(git describe --exact 2>/dev/null || git rev-parse --short=10 HEAD) 5 | 6 | for service in registry storage web; do 7 | tag=entropicdev/$service:$version 8 | docker build -t $tag -f services/Dockerfile --build-arg SERVICE=$service services 9 | docker push $tag 10 | # If we're on a git tag, also push a `latest` so folks can pull 11 | # without specifying a docker tag 12 | if [[ $(git describe --exact 2>/dev/null) ]]; then 13 | latest_tag=entropicdev/$service:latest 14 | docker tag $tag $latest_tag 15 | docker push $latest_tag 16 | fi 17 | done 18 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## Change Type (Feature, Chore, Bug Fix, One Pager, etc.) 2 | 3 | ## Description 4 | 5 | 17 | 18 | ## How to test 19 | 20 | ## Checklist 21 | 22 | * [ ] Added tests / did not decrease code coverage 23 | * [ ] Tested in supported environments (common browsers or current Node) 24 | -------------------------------------------------------------------------------- /cli/snapcraft.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: entropic-ds 3 | summary: "ds: entropy delta; the entropic client" 4 | description: | 5 | A client for the federated entropy package registry, 6 | base: core18 7 | grade: devel 8 | adopt-info: ds 9 | 10 | parts: 11 | ds: 12 | override-pull: | 13 | snapcraftctl pull 14 | DS_SNAP_VERSION=${DS_SNAP_VERSION:-master} 15 | snapcraftctl set-version "${DS_SNAP_VERSION}" 16 | source: . 17 | plugin: nodejs 18 | nodejs-version: 12.4.0 19 | nodejs-package-manager: npm 20 | 21 | apps: 22 | ds: 23 | command: ds 24 | plugs: 25 | - home 26 | - network-bind 27 | - network 28 | -------------------------------------------------------------------------------- /services/storage/migrations/20190518210559-add-invitation.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var dbm; 4 | var type; 5 | var seed; 6 | 7 | /** 8 | * We receive the dbmigrate dependency from dbmigrate initially. 9 | * This enables us to not have to rely on NODE_PATH. 10 | */ 11 | exports.setup = function(options, seedLink) { 12 | dbm = options.dbmigrate; 13 | type = dbm.dataType; 14 | seed = seedLink; 15 | }; 16 | 17 | exports.up = async function(db) { 18 | return await db.runSql(` 19 | alter table "maintainers" add column "accepted" boolean null default FALSE; 20 | `); 21 | }; 22 | 23 | exports.down = async function(db) {}; 24 | 25 | exports._meta = { 26 | version: 1 27 | }; 28 | -------------------------------------------------------------------------------- /services/storage/migrations/20190519210115-namespace-member-accepted.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var dbm; 4 | var type; 5 | var seed; 6 | 7 | /** 8 | * We receive the dbmigrate dependency from dbmigrate initially. 9 | * This enables us to not have to rely on NODE_PATH. 10 | */ 11 | exports.setup = function(options, seedLink) { 12 | dbm = options.dbmigrate; 13 | type = dbm.dataType; 14 | seed = seedLink; 15 | }; 16 | 17 | exports.up = async function(db) { 18 | return await db.runSql(` 19 | alter table "namespace_members" add column "accepted" boolean null default FALSE; 20 | `); 21 | }; 22 | 23 | exports.down = async function(db) {}; 24 | 25 | exports._meta = { 26 | version: 1 27 | }; 28 | -------------------------------------------------------------------------------- /services/storage/migrations/20190429000001-add-builtin-namespaces.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var dbm; 4 | var type; 5 | var seed; 6 | 7 | /** 8 | * We receive the dbmigrate dependency from dbmigrate initially. 9 | * This enables us to not have to rely on NODE_PATH. 10 | */ 11 | exports.setup = function(options, seedLink) { 12 | dbm = options.dbmigrate; 13 | type = dbm.dataType; 14 | seed = seedLink; 15 | }; 16 | 17 | exports.up = async function(db) { 18 | return await db.runSql(` 19 | insert into "namespaces" ("name") VALUES ('abandonware'), ('legacy'); 20 | `); 21 | }; 22 | 23 | exports.down = function(db) { 24 | return null; 25 | }; 26 | 27 | exports._meta = { 28 | version: 1 29 | }; 30 | -------------------------------------------------------------------------------- /services/web/handlers/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = makeRouter; 4 | 5 | const ship = require('culture-ships').random(); 6 | const { fork, response } = require('boltzmann'); 7 | 8 | function makeRouter() { 9 | const router = fork.router()( 10 | fork.get('/', homepage), 11 | fork.get('/ping', ping), 12 | ...require('./auth') 13 | ); 14 | 15 | return router; 16 | } 17 | 18 | async function ping() { 19 | return response.text(ship); 20 | } 21 | 22 | async function homepage() { 23 | return response.html(` 24 | 25 | 26 | 27 |

WELCOME TO ENTROPIC

28 | 29 | 30 | `); 31 | } 32 | -------------------------------------------------------------------------------- /services/common/boltzmann/middleware/requestid.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const bole = require('bole'); 4 | const uuid = require('uuid'); 5 | const os = require('os'); 6 | 7 | module.exports = createRequestId; 8 | 9 | function createRequestId( 10 | requestIdHeader = process.env.REQUEST_ID_HEADER || 'request-id' 11 | ) { 12 | const host = os.hostname(); 13 | return function mw(next) { 14 | return async function inner(context) { 15 | const request = context.request; 16 | context.id = request.headers[requestIdHeader] || `${host}_${uuid.v1()}`; 17 | context.logger = bole(context.id); 18 | const response = await next(context); 19 | 20 | return response; 21 | }; 22 | }; 23 | } 24 | -------------------------------------------------------------------------------- /services/storage/models/host.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const orm = require('ormnomnom'); 4 | const joi = require('@hapi/joi'); 5 | 6 | module.exports = class Host { 7 | constructor({ id, name, created, modified, active }) { 8 | this.id = id; 9 | 10 | // XXX: note that this name is NOT UNIQUE. 11 | this.name = name; 12 | this.created = created; 13 | this.modified = modified; 14 | this.active = active; 15 | } 16 | }; 17 | 18 | module.exports.objects = orm(module.exports, { 19 | id: joi 20 | .number() 21 | .integer() 22 | .greater(-1) 23 | .required(), 24 | name: joi.string().min(1), 25 | created: joi.date(), 26 | modified: joi.date(), 27 | active: joi.boolean().default(true) 28 | }); 29 | -------------------------------------------------------------------------------- /cli/lib/fetch-package-version.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = fetchPackageVersion; 4 | 5 | const { pipeline: _ } = require('stream'); 6 | const { promisify } = require('util'); 7 | const fetch = require('./fetch'); 8 | const cacache = require('cacache'); 9 | const ssri = require('ssri'); 10 | 11 | const pipeline = promisify(_); 12 | 13 | const fetchObject = require('./fetch-object'); 14 | 15 | // This used to be a different endpoint. 16 | async function fetchPackageVersion( 17 | { registry, cache }, 18 | name, 19 | version, 20 | integrity 21 | ) { 22 | const data = await fetchObject( 23 | { registry, cache }, 24 | integrity, 25 | 'please load it' 26 | ); 27 | 28 | return JSON.parse(String(data.data)); 29 | } 30 | -------------------------------------------------------------------------------- /misc/k8s/index.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const fs = require('fs'); 4 | const { resolve } = require('path'); 5 | const buildDir = resolve(__dirname, 'build'); 6 | 7 | // clean up old files 8 | fs.readdirSync(buildDir) 9 | .filter(f => f.endsWith('.json')) 10 | .forEach(f => { 11 | fs.unlink(`${buildDir}/${f}`, err => { 12 | if (err) { 13 | throw err; 14 | } 15 | }); 16 | }); 17 | 18 | // write new json files for kubectl 19 | fs.readdirSync(resolve(__dirname, 'templates')) 20 | .filter(f => f.endsWith('.js')) 21 | .forEach(file => { 22 | fs.writeFileSync( 23 | resolve(buildDir, file.replace(/\.js$/, '.json')), 24 | JSON.stringify(require(`./templates/${file}`), null, 2) 25 | ); 26 | }); 27 | -------------------------------------------------------------------------------- /cli/Package.toml: -------------------------------------------------------------------------------- 1 | name = "chris@registry.entropic.dev/ds" 2 | version = "0.0.0-beta" 3 | 4 | [dependencies] 5 | "@iarna/toml" = "^2.2.3" 6 | "legacy@registry.entropic.dev/figgy-pudding" = "^3.5.1" 7 | "legacy@registry.entropic.dev/form-data" = "^2.3.3" 8 | "legacy@registry.entropic.dev/minimist" = "^1.2.0" 9 | "legacy@registry.entropic.dev/node-fetch" = "^2.5.0" 10 | "legacy@registry.entropic.dev/npm-packlist" = "^1.4.1" 11 | "legacy@registry.entropic.dev/npm-profile" = "^4.0.1" 12 | "legacy@registry.entropic.dev/npmlog" = "^4.1.2" 13 | "legacy@registry.entropic.dev/opener" = "^1.5.1" 14 | "legacy@registry.entropic.dev/read-package-tree" = "^5.2.2" 15 | "legacy@registry.entropic.dev/ssri" = "^6.0.1" 16 | "legacy@registry.entropic.dev/user-home" = "^2.0.0" 17 | -------------------------------------------------------------------------------- /cli/lib/load-package-toml.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = load; 4 | 5 | const { promises: fs } = require('graceful-fs'); 6 | const toml = require('@iarna/toml'); 7 | const path = require('path'); 8 | 9 | async function load(dir) { 10 | do { 11 | try { 12 | const src = await fs.readFile(path.join(dir, 'Package.toml'), 'utf8'); 13 | return { location: dir, content: toml.parse(src) }; 14 | } catch (err) { 15 | if (err.code !== 'ENOENT') { 16 | throw err; 17 | } 18 | } 19 | 20 | const newDir = path.resolve(dir, '..'); 21 | if (newDir === dir) { 22 | throw new Error('Could not find Package.toml.'); 23 | } 24 | 25 | dir = newDir; 26 | } while (true); // eslint-disable-line no-constant-condition 27 | } 28 | -------------------------------------------------------------------------------- /docs/installing/homebrew/ds.rb: -------------------------------------------------------------------------------- 1 | require "language/node" 2 | 3 | # Keep the version and version in the test up to date! 4 | # Get the sha256 with shasum -a 256 ds-latest.tgz 5 | 6 | class Ds < Formula 7 | desc "ds: entropy delta; the entropic client" 8 | homepage "https://github.com/entropic-dev/entropic/tree/master/cli" 9 | url "https://www.entropic.dev/ds-latest.tgz" 10 | sha256 "4d98654bddd2d4e31450a0cd9d45514d3257ff218f243feca7919e2b0e8ed3b8" 11 | depends_on "node" 12 | version "1.0.0" 13 | 14 | def install 15 | system "npm", "install" , *Language::Node.std_npm_install_args(libexec) 16 | bin.install_symlink Dir["#{libexec}/bin/*"] 17 | end 18 | 19 | test do 20 | assert_match "v1.0.0", shell_output("#{bin}/ds --version") 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /cli/lib/fetch.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = fetchWithAgent; 4 | 5 | const dns = require('dns'); 6 | const URL = require('url').URL; 7 | const http = require('http'); 8 | const fetch = require('node-fetch'); 9 | const https = require('https'); 10 | 11 | const httpAgent = new http.Agent({ 12 | family: 6, 13 | hints: dns.ADDRCONFIG | dns.V4MAPPED 14 | }); 15 | const httpsAgent = new https.Agent({ 16 | family: 6, 17 | hints: dns.ADDRCONFIG | dns.V4MAPPED 18 | }); 19 | 20 | function fetchWithAgent(resource, init) { 21 | if (!init) init = {}; 22 | 23 | const url = new URL(resource); 24 | if (url.protocol == 'https:') { 25 | init.agent = httpsAgent; 26 | } else { 27 | init.agent = httpAgent; 28 | } 29 | 30 | return fetch(resource, init); 31 | } 32 | -------------------------------------------------------------------------------- /services/storage/models/namespace.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const orm = require('ormnomnom'); 4 | const joi = require('@hapi/joi'); 5 | 6 | const Host = require('./host'); 7 | 8 | module.exports = class Namespace { 9 | constructor({ id, name, host, created, modified, active }) { 10 | this.id = id; 11 | this.name = name; 12 | this.host = host; 13 | this.created = created; 14 | this.modified = modified; 15 | this.active = active; 16 | } 17 | }; 18 | 19 | module.exports.objects = orm(module.exports, { 20 | id: joi 21 | .number() 22 | .integer() 23 | .greater(-1) 24 | .required(), 25 | name: joi.string().min(1), 26 | host: orm.fk(Host), 27 | created: joi.date(), 28 | modified: joi.date(), 29 | active: joi.boolean().default(true) 30 | }); 31 | -------------------------------------------------------------------------------- /misc/k8s/templates/redis-deployment.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | apiVersion: 'extensions/v1beta1', 3 | kind: 'Deployment', 4 | metadata: { 5 | creationTimestamp: null, 6 | labels: { 'entropic-service': 'redis' }, 7 | name: 'redis' 8 | }, 9 | spec: { 10 | replicas: 1, 11 | strategy: {}, 12 | template: { 13 | metadata: { 14 | creationTimestamp: null, 15 | labels: { 'entropic-service': 'redis' } 16 | }, 17 | spec: { 18 | containers: [ 19 | { 20 | image: 'redis:alpine', 21 | name: 'redis', 22 | ports: [{ containerPort: 6379 }], 23 | resources: {} 24 | } 25 | ], 26 | restartPolicy: 'Always' 27 | } 28 | } 29 | }, 30 | status: {} 31 | }; 32 | -------------------------------------------------------------------------------- /cli/lib/commands/packages.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // This is kind of a toy command because it will swiftly 4 | // become unwieldy with large numbers of packages. But it's fun for now! 5 | 6 | const fetch = require('../fetch'); 7 | const figgy = require('figgy-pudding'); 8 | 9 | module.exports = packages; 10 | 11 | const pkgOps = figgy({ 12 | registry: true, 13 | log: { default: require('npmlog') } 14 | }); 15 | 16 | async function packages(opts) { 17 | opts = pkgOps(opts); 18 | const response = await fetch(`${opts.registry}/v1/packages`); 19 | const body = await response.json(); 20 | if (!body.objects) { 21 | console.error('Something went wrong:'); 22 | console.error(body); 23 | return 1; 24 | } 25 | 26 | body.objects.forEach(p => { 27 | console.log(p.name); 28 | }); 29 | return 0; 30 | } 31 | -------------------------------------------------------------------------------- /services/storage/decorators/authn.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { required, optional }; 4 | 5 | const { response } = require('boltzmann'); 6 | 7 | function optional(next) { 8 | return async (context, ...args) => { 9 | context.user = await context.getUser(); 10 | return next(context, ...args); 11 | }; 12 | } 13 | 14 | function required(next) { 15 | return optional((context, ...args) => { 16 | if (!context.user) { 17 | if (!context.request.headers.bearer) { 18 | return response.authneeded( 19 | 'You must be logged in to access this resource (try running "ds login")' 20 | ); 21 | } 22 | return response.error( 23 | 'You are not authorized to access this resource', 24 | 403 25 | ); 26 | } 27 | 28 | return next(context, ...args); 29 | }); 30 | } 31 | -------------------------------------------------------------------------------- /docs/HACKING.md: -------------------------------------------------------------------------------- 1 | # Hacking 2 | 3 | ## Running Services 4 | 5 | * [Get Docker](https://docs.docker.com/install/) 6 | * [Get Docker Compose](https://docs.docker.com/compose/install/) 7 | * [Get Node](https://nodejs.org/en/download/) 8 | 9 | Once you have Node, Docker, and Docker Compose, 10 | `cp services/registry/.env-example services/registry/.env` 11 | `cp services/storage/.env-example services/storage/.env` 12 | `cp services/web/.env-example services/web/.env` 13 | and make any adjustments you like. 14 | 15 | run `npm i` in services/registry/, services/storage/, services/web, services/workers 16 | then `npm start` (or `docker-compose up`) in main directory 17 | 18 | Then go to . 19 | 20 | ## Hooks 21 | 22 | To run linting and formatting pre-commit, add a file in the project root called 23 | `.opt-in` with the content `pre-commit`. 24 | -------------------------------------------------------------------------------- /services/storage/migrations/20190508000000-add-derived-files-and-denormalized-integrity.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var dbm; 4 | var type; 5 | var seed; 6 | 7 | /** 8 | * We receive the dbmigrate dependency from dbmigrate initially. 9 | * This enables us to not have to rely on NODE_PATH. 10 | */ 11 | exports.setup = function(options, seedLink) { 12 | dbm = options.dbmigrate; 13 | type = dbm.dataType; 14 | seed = seedLink; 15 | }; 16 | 17 | exports.up = async function(db) { 18 | return await db.runSql(` 19 | alter table "package_versions" add column "derivedFiles" jsonb not null default '{}'::jsonb; 20 | alter table "packages" add column "version_integrities" jsonb not null default '{}'::jsonb; 21 | 22 | `); 23 | }; 24 | 25 | exports.down = function(db) { 26 | return null; 27 | }; 28 | 29 | exports._meta = { 30 | version: 1 31 | }; 32 | -------------------------------------------------------------------------------- /services/common/boltzmann/middleware/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const isDev = require('are-we-dev'); 4 | 5 | module.exports = { 6 | build 7 | }; 8 | 9 | function build(middleware) { 10 | // We are NOT in production. 11 | if (isDev()) { 12 | const dev = require('./dev-only'); 13 | 14 | // Add a middleware that runs between each middleware layer so we can detect 15 | // slow views, hangs, etc. 16 | const result = middleware.reduce((lhs, rhs) => { 17 | const [mw, ...args] = Array.isArray(rhs) ? rhs : [rhs]; 18 | return [...lhs, dev(mw), mw(...args)]; 19 | }, []); 20 | 21 | result.push(dev('registry/handlers/*')); 22 | return result; 23 | } 24 | 25 | // Build for production. 26 | return middleware.map(xs => { 27 | const [mw, ...args] = Array.isArray(xs) ? xs : [xs]; 28 | return xs(...args); 29 | }); 30 | } 31 | -------------------------------------------------------------------------------- /tools/vcpm-sync/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "vcpm-sync", 3 | "description": "sync a package from vcpm into the legacy namespace of an entropic instance", 4 | "version": "1.0.0", 5 | "author": "Chris Dickinson (http://neversaw.us/)", 6 | "bin": "./main.js", 7 | "bugs": { 8 | "url": "https://github.com/entropic-dev/entropic/issues" 9 | }, 10 | "dependencies": { 11 | "form-data": "^2.3.3", 12 | "minimist": "^1.2.0", 13 | "node-fetch": "^2.5.0", 14 | "pacote": "^9.5.0" 15 | }, 16 | "homepage": "https://github.com/entopic-dev/entropic#readme", 17 | "keywords": [], 18 | "license": "MIT", 19 | "main": "index.js", 20 | "repository": { 21 | "type": "git", 22 | "url": "git://github.com/entopic-dev/entropic.git" 23 | }, 24 | "scripts": { 25 | "test": "echo \"Error: no test specified\" && exit 1" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /services/storage/migrations/20190428180547-add-users.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var dbm; 4 | var type; 5 | var seed; 6 | 7 | /** 8 | * We receive the dbmigrate dependency from dbmigrate initially. 9 | * This enables us to not have to rely on NODE_PATH. 10 | */ 11 | exports.setup = function(options, seedLink) { 12 | dbm = options.dbmigrate; 13 | type = dbm.dataType; 14 | seed = seedLink; 15 | }; 16 | 17 | exports.up = async function(db) { 18 | return await db.runSql(` 19 | CREATE TABLE IF NOT EXISTS "users" ( 20 | id SERIAL PRIMARY KEY, 21 | name text UNIQUE NOT NULL, 22 | email text, 23 | created TIMESTAMP DEFAULT NOW(), 24 | modified TIMESTAMP DEFAULT NOW(), 25 | active BOOLEAN DEFAULT TRUE 26 | ); 27 | `); 28 | }; 29 | 30 | exports.down = function(db) { 31 | return null; 32 | }; 33 | 34 | exports._meta = { 35 | version: 1 36 | }; 37 | -------------------------------------------------------------------------------- /services/storage/handlers/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = makeRouter; 4 | 5 | const ship = require('culture-ships').random(); 6 | 7 | const { response, fork } = require('boltzmann'); 8 | const pkg = require('../package.json'); 9 | 10 | function makeRouter() { 11 | const router = fork.router()( 12 | fork.get('/', version), 13 | ...require('./providers'), 14 | ...require('./users'), 15 | ...require('./packages'), 16 | ...require('./maintainers'), 17 | ...require('./namespaces'), 18 | 19 | fork.get('/ping', ping) 20 | ); 21 | 22 | return router; 23 | } 24 | 25 | async function version() { 26 | const data = { 27 | server: 'entropic', 28 | version: pkg.version, 29 | message: ship, 30 | website: 'https://www.entropic.dev' 31 | }; 32 | return response.json(data); 33 | } 34 | 35 | async function ping() { 36 | return response.text(ship); 37 | } 38 | -------------------------------------------------------------------------------- /services/registry/decorators/authn.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { anonymous, required }; 4 | 5 | const { response } = require('boltzmann'); 6 | 7 | function required(next) { 8 | return async function(context, params) { 9 | if (!context.user) { 10 | return response.error( 11 | `You must be authenticated to access "${context.request.url}"`, 12 | 401, 13 | { 14 | 'www-authenticate': 'Bearer' 15 | } 16 | ); 17 | } 18 | 19 | return next(context, params); 20 | }; 21 | } 22 | 23 | function anonymous(redirect = '/') { 24 | if (typeof redirect === 'function') { 25 | return anonymous('/')(redirect); 26 | } 27 | 28 | return next => { 29 | return async function(context, params) { 30 | if (context.user) { 31 | return response.redirect(redirect); 32 | } 33 | 34 | return next(context, params); 35 | }; 36 | }; 37 | } 38 | -------------------------------------------------------------------------------- /docs/installing/README.md: -------------------------------------------------------------------------------- 1 | # Installing `ds` 2 | 3 | Entropic requires a new command-line client, called `ds` (or "entropy delta".) **`ds` requires at least Node 12.** Install the cli: 4 | 5 | ## Curl or wget 6 | 7 | ```sh 8 | curl -sSL https://www.entropic.dev/install.sh | bash 9 | wget -O - https://www.entropic.dev/install.sh | bash 10 | ``` 11 | 12 | ### Brew 13 | 14 | ```sh 15 | brew install https://raw.githubusercontent.com/entropic-dev/entropic/master/docs/installing/homebrew/ds.rb 16 | ``` 17 | 18 | ### Arch Linux 19 | 20 | If you're using Arch Linux, install the [`nodejs-entropic`](https://aur.archlinux.org/packages/nodejs-entropic/) package from the AUR. 21 | 22 | ### Snap packages (multiple Linux distributions) 23 | 24 | If you're using a Linxu distro that supports `snapd`, you can install using `snap`: 25 | 26 | ```sh 27 | # You might need to `sudo snap` if you aren't logged in with `snap login` 28 | snap install entropic-ds 29 | ``` 30 | -------------------------------------------------------------------------------- /services/storage/test/utils/registry.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = provideRegistry; 4 | 5 | const listen = require('test-listen'); 6 | const micro = require('micro'); 7 | 8 | const { muxer } = require('boltzmann'); 9 | const flush = require('boltzmann/middleware/flush-request'); 10 | const registry = require('../../handlers'); 11 | 12 | function provideRegistry(to) { 13 | let ourmiddles = []; 14 | const testHandler = async function(...args) { 15 | const requestHandler = muxer(registry(), [flush, ...ourmiddles]); 16 | 17 | const service = await micro(requestHandler); 18 | const url = await listen(service); 19 | 20 | try { 21 | await to(url, ...args); 22 | } finally { 23 | service.close(); 24 | } 25 | }; 26 | 27 | testHandler.middleware = function(mw) { 28 | if (!mw) { 29 | return ourmiddles; 30 | } 31 | ourmiddles = mw; 32 | return this; 33 | }; 34 | 35 | return testHandler; 36 | } 37 | -------------------------------------------------------------------------------- /cli/test/lib/commands/whoami.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const whoami = require('../../../lib/commands/whoami'); 3 | const sinon = require('sinon'); 4 | 5 | const FakeApi = require('../../utils/FakeApi'); 6 | const FakeLogger = require('../../utils/FakeLogger'); 7 | 8 | test('whoami calls console.log with username when API response is successful', async t => { 9 | const log = sinon.stub(FakeLogger, 'log'); 10 | 11 | const username = 'RaynorJim'; 12 | await whoami({ 13 | log: FakeLogger, 14 | api: new FakeApi({ username }, 200) 15 | }); 16 | 17 | t.is(log.calledWith(username), true); 18 | log.restore(); 19 | }); 20 | 21 | test('whoami calls error when not successful', async t => { 22 | const stubbedLogger = sinon.stub(FakeLogger, 'error'); 23 | 24 | const error = 'You are forbidden!'; 25 | 26 | await whoami({ 27 | log: FakeLogger, 28 | api: new FakeApi({ message: error }, 403) 29 | }); 30 | 31 | t.is(stubbedLogger.calledWith(error), true); 32 | }); 33 | -------------------------------------------------------------------------------- /cli/lib/commands/ping.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = ping; 4 | 5 | const figgy = require('figgy-pudding'); 6 | const fetch = require('../fetch'); 7 | 8 | const pingOpts = figgy({ 9 | registry: { default: 'https://registry.entropic.dev' }, 10 | argv: true, 11 | log: { default: require('npmlog') } 12 | }); 13 | 14 | // usage: ds ping 15 | 16 | async function ping(opts) { 17 | opts = pingOpts(opts); 18 | 19 | console.log(`PING: ${opts.registry}`); 20 | 21 | const start = Date.now(); 22 | const response = await fetch(`${opts.registry}/ping`); 23 | 24 | let body = null; 25 | try { 26 | body = await response.text(); 27 | } catch (err) { 28 | opts.log.error(`Caught error requesting "${opts.registry}/ping"`); 29 | return 1; 30 | } 31 | 32 | if (response.status > 399) { 33 | opts.log.error(body.message || body); 34 | return 1; 35 | } 36 | 37 | const time = Date.now() - start; 38 | 39 | console.log(`PONG: (${time / 1000}ms) ${body}`); 40 | return 0; 41 | } 42 | -------------------------------------------------------------------------------- /services/storage/decorators/is-namespace-member.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const Namespace = require('../models/namespace'); 4 | const { response } = require('boltzmann'); 5 | 6 | module.exports = isNamespaceMember; 7 | 8 | function isNamespaceMember(next) { 9 | return async (context, params) => { 10 | if (!context.user) { 11 | return response.error( 12 | 'You must be logged in to perform this action', 13 | 403 14 | ); 15 | } 16 | 17 | const ns = await Namespace.objects 18 | .get({ 19 | active: true, 20 | name: params.member, 21 | 'namespace_members.active': true, 22 | 'namespace_members.user_id': context.user.id 23 | }) 24 | .catch(Namespace.objects.NotFound, () => null); 25 | 26 | if (!ns) { 27 | return response.error( 28 | `You cannot act on behalf of ${params.member}`, 29 | 403 30 | ); 31 | } 32 | 33 | context.member = ns; 34 | return next(context, params); 35 | }; 36 | } 37 | -------------------------------------------------------------------------------- /cli/lib/config.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { load, save }; 4 | 5 | if (Number(process.version.match(/^v(\d+\.\d+)/)[1]) < 12) require('fs.promises') 6 | const { promises: fs } = require('fs'); 7 | const toml = require('@iarna/toml'); 8 | const home = require('user-home'); 9 | const path = require('path'); 10 | 11 | const errors = require('./errors'); 12 | 13 | async function load(filename = path.join(home, '.entropicrc')) { 14 | let content = null; 15 | try { 16 | content = await fs.readFile(filename, 'utf8'); 17 | } catch (e) { 18 | if (e.code === 'ENOENT') { 19 | return {}; 20 | } 21 | 22 | throw new errors.CouldNotReadConfigFile(filename, e); 23 | } 24 | 25 | let parsed = null; 26 | try { 27 | parsed = toml.parse(content); 28 | } catch (e) { 29 | throw new errors.CouldNotParseConfigToml(filename, e); 30 | } 31 | 32 | return parsed; 33 | } 34 | 35 | async function save(content, filename = path.join(home, '.entropicrc')) { 36 | await fs.writeFile(filename, toml.stringify(content)); 37 | } 38 | -------------------------------------------------------------------------------- /cli/lib/fetch-package.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = fetchPackage; 4 | 5 | const { pipeline: _ } = require('stream'); 6 | const { promisify } = require('util'); 7 | const fetch = require('./fetch'); 8 | const cacache = require('cacache'); 9 | const ssri = require('ssri'); 10 | 11 | async function fetchPackage( 12 | { registry, cache, expires = 5 * 60 * 1000 }, 13 | name, 14 | now = Date.now() 15 | ) { 16 | let meta = await cacache 17 | .get(cache, `spackage:${name}`) 18 | .then(xs => JSON.parse(String(xs.data))) 19 | .catch(() => null); 20 | 21 | if (!meta || now - Date.parse(meta.date) > Number(expires)) { 22 | const pkgReq = await fetch(`${registry}/v1/packages/package/${name}`); 23 | meta = { 24 | date: Date.parse(pkgReq.headers.date), 25 | data: await pkgReq.json() 26 | }; 27 | 28 | if (pkgReq.status > 399) { 29 | console.log(name, meta.data); 30 | throw new Error(); 31 | } 32 | 33 | await cacache.put(cache, `spackage:${name}`, JSON.stringify(meta)); 34 | } 35 | 36 | return meta.data; 37 | } 38 | -------------------------------------------------------------------------------- /services/storage/middleware/internal-auth.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const User = require('../models/user'); 4 | 5 | module.exports = createInternalAuthMW; 6 | 7 | function createInternalAuthMW() { 8 | return next => { 9 | return async context => { 10 | // Being lazy here, because we may not actually care 11 | // to materialize the user on most requests. So, make 12 | // `context.user` return a promise for the currently 13 | // authenticated user (if any.) 14 | // 15 | // TODO: Someday use JWTs, maybe. 16 | let user; 17 | 18 | if (!context.request.headers.bearer) { 19 | user = null; 20 | } 21 | 22 | context.getUser = async () => { 23 | if (user !== undefined) { 24 | return user; 25 | } 26 | 27 | user = await User.objects 28 | .get({ 29 | name: context.request.headers.bearer 30 | }) 31 | .catch(User.objects.NotFound, () => null); 32 | return user; 33 | }; 34 | 35 | return next(context); 36 | }; 37 | }; 38 | } 39 | -------------------------------------------------------------------------------- /services/storage/middleware/transaction.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = createTxnMiddleware; 4 | 5 | function createTxnMiddleware() { 6 | return next => { 7 | return async context => { 8 | const req = context.request; 9 | if (req.method === 'GET' || req.method === 'HEAD') { 10 | return next(context); 11 | } 12 | 13 | const getClient = context.getPostgresClient; 14 | let client = null; 15 | context.getPostgresClient = async () => { 16 | client = await getClient(); 17 | await client.query('BEGIN'); 18 | return client; 19 | }; 20 | 21 | let closeTransaction = 'COMMIT'; 22 | try { 23 | const response = await next(context); 24 | 25 | if (response.status > 399) { 26 | closeTransaction = 'ROLLBACK'; 27 | } 28 | 29 | return response; 30 | } catch (err) { 31 | closeTransaction = 'ROLLBACK'; 32 | throw err; 33 | } finally { 34 | if (client) { 35 | await client.query(closeTransaction); 36 | } 37 | } 38 | }; 39 | }; 40 | } 41 | -------------------------------------------------------------------------------- /services/storage/test/utils/postgres.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = providePostgres; 4 | 5 | const orm = require('ormnomnom'); 6 | const { Client } = require('pg'); 7 | 8 | function providePostgres(to) { 9 | return async function(...args) { 10 | const client = new Client(); 11 | 12 | await client.connect(); 13 | await client.query('begin'); 14 | if (typeof to.middleware === 'function') { 15 | to.middleware([ 16 | ...to.middleware(), 17 | function addpg() { 18 | return next => { 19 | return req => { 20 | req.getPostgresClient = async () => client; 21 | return next(req); 22 | }; 23 | }; 24 | } 25 | ]); 26 | } 27 | 28 | orm.setConnection(async () => { 29 | return { 30 | connection: client, 31 | release() {} 32 | }; 33 | }); 34 | 35 | try { 36 | await to(...args); 37 | } finally { 38 | await client.query('rollback'); 39 | await client.end(); 40 | orm.setConnection(() => { 41 | throw new Error('no connection available'); 42 | }); 43 | } 44 | }; 45 | } 46 | -------------------------------------------------------------------------------- /cli/lib/commands/login.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = login; 4 | 5 | const figgy = require('figgy-pudding'); 6 | const profile = require('npm-profile'); 7 | const opener = require('opener'); 8 | 9 | const { load, save } = require('../config'); 10 | 11 | const loginOpts = figgy({ 12 | log: { default: require('npmlog') }, 13 | registry: { default: 'https://registry.entropic.dev' } 14 | }); 15 | 16 | async function login(opts) { 17 | opts = loginOpts(opts); 18 | 19 | const { username, token } = await profile.loginWeb( 20 | async url => { 21 | console.log(`Here's your login url:\n ${url}\nA browser window should open momentarily (If it doesn't, open the above link manually.)`); 22 | return opener(url); 23 | }, 24 | opts 25 | ); 26 | 27 | // load _just_ the config file, not the config file + env + cli args. 28 | const current = await load(); 29 | 30 | current.registries = current.registries || {}; 31 | current.registries[opts.registry] = current.registries[opts.registry] || {}; 32 | current.registries[opts.registry].token = token; 33 | current.registries[opts.registry].username = username; 34 | 35 | await save(current); 36 | } 37 | -------------------------------------------------------------------------------- /misc/k8s/templates/entropic-env-configmap.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | apiVersion: 'v1', 3 | data: { 4 | CACHE_DIR: '/var/cache/entropic', 5 | DEV_LATENCY_ERROR_MS: '10000', 6 | SESSION_EXPIRY_SECONDS: '31536000', 7 | STORAGE_API_URL: 'http://storage:3002', 8 | 9 | // You'll want to change this 10 | EXTERNAL_HOST: 'http://localhost:3000', 11 | NODE_ENV: 'production', 12 | 13 | // You'll need to change these, too 14 | OAUTH_GITHUB_CLIENT: 'gh_client_id_here', 15 | OAUTH_GITHUB_SECRET: 'gh_secret_here', 16 | OAUTH_PASSWORD: 'pw_for_encrypting_tokens_here', 17 | SESSION_SECRET: 'long_pw_for_encrypting_sessions_here', 18 | 19 | // Change this 20 | WEB_HOST: 'http://localhost:3001', 21 | 22 | // Provide these 23 | PGDATABASE: 'entropic_dev', 24 | PGHOST: 'db', 25 | PGUSER: 'postgres', 26 | POSTGRES_URL: 'postgres://postgres@db:5432', 27 | 28 | // If you're using hosted/external Redis, change this 29 | REDIS_URL: 'redis://redis:6379' 30 | }, 31 | kind: 'ConfigMap', 32 | metadata: { 33 | creationTimestamp: null, 34 | labels: { 'entropic-service': 'entropic-env' }, 35 | name: 'entropic-env' 36 | } 37 | }; 38 | -------------------------------------------------------------------------------- /services/storage/decorators/package-exists.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const Package = require('../models/package'); 4 | const { response } = require('boltzmann'); 5 | 6 | module.exports = packageExists; 7 | 8 | function packageExists(mapping) { 9 | if (typeof mapping === 'function') { 10 | return packageExists({ 11 | namespace: 'namespace', 12 | host: 'host', 13 | name: 'name' 14 | })(mapping); 15 | } 16 | 17 | const { namespace, host, name } = mapping; 18 | return next => { 19 | return async (context, params) => { 20 | const { [namespace]: ns, [host]: h, [name]: pkg } = params; 21 | 22 | const result = await Package.objects 23 | .get({ 24 | active: true, 25 | name: pkg, 26 | 'namespace.active': true, 27 | 'namespace.name': ns, 28 | 'namespace.host.name': h, 29 | 'namespace.host.active': true 30 | }) 31 | .catch(Package.objects.NotFound, () => null); 32 | 33 | if (!result) { 34 | return response.error(`Package ${ns}@${h}/${pkg} not found`, 404); 35 | } 36 | 37 | context.pkg = result; 38 | return next(context, params); 39 | }; 40 | }; 41 | } 42 | -------------------------------------------------------------------------------- /services/storage/test/models/package.spec.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const demand = require('must'); 4 | const providePostgres = require('../utils/postgres'); 5 | const Package = require('../../models/package'); 6 | const Namespace = require('../../models/namespace'); 7 | 8 | const { createUser } = require('../utils/users'); 9 | 10 | async function getUserNamespace(name, email) { 11 | const newUser = await createUser(name); 12 | return await Namespace.objects.get({ 13 | active: true, 14 | name: newUser.name 15 | }); 16 | } 17 | 18 | describe('Package', () => { 19 | before(async () => {}); 20 | 21 | it( 22 | 'Given minimum, valid parameters it creates a new package ', 23 | 24 | providePostgres(async () => { 25 | const newPkgName = 'test_pkg'; 26 | const namespace = getUserNamespace('foo bar', 'bar@entropic.dev'); 27 | 28 | const result = await Package.objects.create({ 29 | name: newPkgName, 30 | namespace 31 | }); 32 | 33 | demand(result.name).to.be(newPkgName); 34 | demand(result.namespace_id).to.be(undefined); 35 | demand(result.require_tfa).to.be(null); 36 | demand(result.yanked).to.be(false); 37 | demand(result.active).to.be.truthy(); 38 | }) 39 | ); 40 | }); 41 | -------------------------------------------------------------------------------- /services/registry/handlers/auth.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { json } = require('micro'); 4 | const joi = require('@hapi/joi'); 5 | const uuid = require('uuid'); 6 | 7 | const { response } = require('boltzmann'); 8 | 9 | module.exports = { 10 | login, 11 | poll 12 | }; 13 | 14 | async function login(context) { 15 | // this is all that's required to work with npm-profile. 16 | const body = await json(context.request); 17 | const { session: id } = await context.storageApi.createCLISession({ 18 | description: body.hostname 19 | }); 20 | return response.json({ 21 | doneUrl: `${process.env.EXTERNAL_HOST}/-/v1/login/poll/${id}`, 22 | loginUrl: `${process.env.WEB_HOST}/login?cli=${id}` 23 | }); 24 | } 25 | 26 | async function poll(context, { session }) { 27 | const { error } = joi.validate( 28 | session, 29 | joi 30 | .string() 31 | .uuid() 32 | .required() 33 | ); 34 | if (error) { 35 | return response.error('invalid request', 400); 36 | } 37 | const result = await context.storageApi.fetchCLISession({ session }); 38 | if (result.value) { 39 | return response.json({ 40 | token: result.value 41 | }); 42 | } 43 | 44 | return response.json({}, 202, { 45 | 'retry-after': 5 46 | }); 47 | } 48 | -------------------------------------------------------------------------------- /cli/lib/canonicalize-spec.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = parsePackageSpec; 4 | 5 | function parsePackageSpec(input, defaultHost) { 6 | if (input[0] === '@') { 7 | // it's a scoped package hosted by legacy. 8 | const [, name, range = 'latest'] = input.split('@'); 9 | 10 | return { 11 | canonical: `legacy@${defaultHost}/${encodeURIComponent('@' + name)}`, 12 | host: defaultHost, 13 | name: `@${name}`, 14 | namespace: 'legacy', 15 | range, 16 | input 17 | }; 18 | } 19 | 20 | const [namespacehost, namerange] = input.split('/'); 21 | 22 | if (!namerange) { 23 | const [name, range = 'latest'] = namespacehost.split('@'); 24 | 25 | return { 26 | canonical: `legacy@${defaultHost}/${name}`, 27 | host: defaultHost, 28 | name, 29 | namespace: 'legacy', 30 | range, 31 | input 32 | }; 33 | } 34 | 35 | const [namespace, host = defaultHost] = namespacehost.split('@'); 36 | const [namePartialEnc, range = 'latest'] = namerange.split('@'); 37 | 38 | const name = decodeURIComponent(namePartialEnc); 39 | return { 40 | canonical: `${namespace}@${host}/${encodeURIComponent(name)}`, 41 | host, 42 | name, 43 | namespace, 44 | range, 45 | input 46 | }; 47 | } 48 | -------------------------------------------------------------------------------- /services/common/boltzmann/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "boltzmann", 3 | "description": "The constant parts of entropic services.", 4 | "version": "1.0.0", 5 | "author": "C J Silverio ", 6 | "bugs": { 7 | "url": "https://github.com/entropic-dev/entropic/issues" 8 | }, 9 | "dependencies": { 10 | "are-we-dev": "~1.0.0", 11 | "bistre": "1.0.1", 12 | "bole": "~3.0.2", 13 | "find-my-way": "~2.0.1", 14 | "micro": "~9.3.4", 15 | "node-fetch": "~2.6.0", 16 | "redis": "~2.8.0", 17 | "uuid": "~3.3.2" 18 | }, 19 | "devDependencies": { 20 | "babel-eslint": "~10.0.1", 21 | "eslint": "~5.16.0", 22 | "eslint-config-prettier": "~4.3.0", 23 | "eslint-plugin-prettier": "~3.1.0", 24 | "prettier": "~1.17.1" 25 | }, 26 | "homepage": "https://github.com/entopic-dev/entropic#readme", 27 | "keywords": [ 28 | "entropy", 29 | "heat death" 30 | ], 31 | "license": "Apache-2.0", 32 | "main": "index.js", 33 | "private": true, 34 | "repository": { 35 | "type": "git", 36 | "url": "git://github.com/entopic-dev/entropic.git" 37 | }, 38 | "scripts": { 39 | "lint": "eslint .", 40 | "lint-fix": "prettier --write '**/*.js'", 41 | "test": "echo \"Error: no test specified\" && exit 1" 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /services/common/boltzmann/middleware/redis.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = createRedisMW; 4 | 5 | const redis = require('redis'); 6 | const { promisify } = require('util'); 7 | 8 | // No bluebird here. 9 | for (const prop in redis.RedisClient.prototype) { 10 | if (typeof redis.RedisClient.prototype[prop] === 'function') { 11 | redis.RedisClient.prototype[prop + 'Async'] = promisify( 12 | redis.RedisClient.prototype[prop] 13 | ); 14 | } 15 | } 16 | 17 | for (const prop in redis.Multi.prototype) { 18 | if (typeof redis.Multi.prototype[prop] === 'function') { 19 | redis.Multi.prototype[prop + 'Async'] = promisify( 20 | redis.Multi.prototype[prop] 21 | ); 22 | } 23 | } 24 | 25 | function createRedisMW({ 26 | redisURL = process.env.REDIS_URL || 'redis://localhost:6379' 27 | } = {}) { 28 | return next => { 29 | const client = redis.createClient(redisURL, { 30 | retry_strategy: function (options) { 31 | if (options.attempt > 10) { 32 | // Fail with redis error 33 | return; 34 | } 35 | // reconnect after 36 | return Math.min(options.attempt * 50, 250); 37 | }, 38 | }); 39 | 40 | return context => { 41 | context.redis = client; 42 | return next(context); 43 | }; 44 | }; 45 | } 46 | -------------------------------------------------------------------------------- /misc/k8s/README.md: -------------------------------------------------------------------------------- 1 | # Deploying in Kubernetes 2 | 3 | This assumes you have a cluster already set up, and also have Postgres running 4 | somewhere (since running databases in containers is not really the best idea). 5 | If you don't, check out some links: 6 | 7 | * Quick cluster setup: 8 | * [Microk8s](https://microk8s.io/) 9 | * [Rancher Quickstart](https://github.com/rancher/quickstart) (works with AWS, 10 | Digital Ocean, and Vagrant) 11 | * Hosted Postgres: 12 | * [AWS RDS](https://aws.amazon.com/rds/postgresql/) 13 | * [Azure Database](https://azure.microsoft.com/en-us/services/postgresql/) 14 | * [Google Cloud SQL](https://cloud.google.com/sql/) 15 | * [Digital Ocean Managed Databases](https://www.digitalocean.com/products/managed-databases/) 16 | * [Hosted Postgres List](https://www.postgresql.org/support/professional_hosting/) 17 | 18 | Take a look at the variables and comments in the configmap template. Once you've 19 | got those squared away, `npm i && ./index.js`, then check out the `build` 20 | directory. 21 | 22 | If you don't want to run Redis in Kubernetes and prefer something external 23 | (ElastiCache, for example) you can remove those files. Once you're satisfied, 24 | `kubectl apply -R -f build`. 25 | 26 | ## Why is this in JS? 27 | 28 | Because editing large amounts of YAML or JSON by hand is not fun. 29 | -------------------------------------------------------------------------------- /services/common/boltzmann/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const micro = require('micro'); 4 | const isDev = require('are-we-dev'); 5 | const bistre = require('bistre'); 6 | const bole = require('bole'); 7 | 8 | module.exports = { 9 | response: require('./response'), 10 | fork: require('./router'), 11 | muxer: require('./request-handler').muxer, 12 | middleware: require('./middleware'), 13 | make, 14 | run 15 | }; 16 | 17 | const logger = bole('runner'); 18 | if (isDev()) { 19 | const prettystream = bistre({ time: true }); 20 | prettystream.pipe(process.stdout); 21 | bole.output({ level: 'debug', stream: prettystream }); 22 | } else { 23 | bole.output({ level: 'info', stream: process.stdout }); 24 | } 25 | 26 | function make(router, middleware) { 27 | const handler = module.exports.muxer(router, middleware); 28 | return micro((req, res) => handler(req, res)); 29 | } 30 | 31 | function run(router, middlewares) { 32 | const server = make(router, middlewares); 33 | server.listen(process.env.PORT, '0.0.0.0'); 34 | logger.info(`listening on port: ${process.env.PORT}`); 35 | 36 | // Docker gives containers 10 seconds to handle SIGTERM 37 | // before sending SIGKILL. Close all current connections 38 | // gracefully and exit with 0. 39 | process.on('SIGTERM', () => { 40 | server.close(() => { 41 | process.exit(0); 42 | }); 43 | }); 44 | } 45 | -------------------------------------------------------------------------------- /services/common/boltzmann/request-handler.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { build } = require('./middleware'); 4 | 5 | module.exports = { 6 | muxer 7 | }; 8 | 9 | // A request context holds the raw req/response objects and 10 | // all other useful information shared by views and middlewares. 11 | 12 | class Context { 13 | constructor(request, res) { 14 | this.request = request; 15 | this.rawResponse = res; 16 | this.start = Date.now(); 17 | 18 | this.remote = request.socket 19 | ? request.socket.remoteAddress.replace('::ffff:', '') 20 | : request.remoteAddress 21 | ? request.remoteAddress 22 | : ''; 23 | const [host, _] = request.headers['host'].split(':'); 24 | this.host = host; 25 | this._parsedUrl = null; 26 | } 27 | 28 | get url() { 29 | if (this._parsedUrl) { 30 | return this._parsedUrl; 31 | } 32 | this._parsedUrl = new URL(this.request.url, 'http://entropic.dev'); 33 | return this._parsedUrl; 34 | } 35 | } 36 | 37 | function muxer(router, middleware) { 38 | const built = build(middleware); 39 | const handler = built.reduceRight((lhs, rhs) => { 40 | return rhs(lhs); 41 | }, router); 42 | 43 | return async (req, res) => { 44 | const context = new Context(req, res); 45 | 46 | try { 47 | await handler(context); 48 | } catch (err) { 49 | console.log(err); 50 | res.end(); 51 | } 52 | }; 53 | } 54 | -------------------------------------------------------------------------------- /services/common/boltzmann/router.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // This router is an *even simpler* version of micro-fork. 4 | // It uses a context object instead of a req/res pair. 5 | // See https://github.com/amio/micro-fork 6 | 7 | const response = require('./response'); 8 | const fmw = require('find-my-way'); 9 | 10 | function router(options) { 11 | const wayfinder = fmw(options); 12 | return function(...routes) { 13 | routes.forEach(rt => wayfinder.on(...rt)); 14 | 15 | return context => { 16 | const { request } = context; 17 | const baseURL = 'http://' + request.headers.host + '/'; 18 | const { pathname } = new URL(request.url, baseURL); 19 | 20 | const match = wayfinder.find(request.method, pathname); 21 | 22 | if (!match) { 23 | return response.error({ message: 'Not found', code: 'ENOTFOUND' }, 404); 24 | } 25 | 26 | return match.handler(context, match.params, match.store); 27 | }; 28 | }; 29 | } 30 | 31 | const get = (path, fn) => ['GET', path, fn]; 32 | const put = (path, fn) => ['PUT', path, fn]; 33 | const del = (path, fn) => ['DELETE', path, fn]; 34 | const post = (path, fn) => ['POST', path, fn]; 35 | const head = (path, fn) => ['HEAD', path, fn]; 36 | const patch = (path, fn) => ['PATCH', path, fn]; 37 | const options = (path, fn) => ['OPTIONS', path, fn]; 38 | 39 | module.exports = { 40 | router, 41 | get, 42 | put, 43 | del, 44 | post, 45 | head, 46 | patch, 47 | options 48 | }; 49 | -------------------------------------------------------------------------------- /misc/k8s/templates/web-deployment.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | apiVersion: 'extensions/v1beta1', 3 | kind: 'Deployment', 4 | metadata: { 5 | creationTimestamp: null, 6 | labels: { 'entropic-service': 'web' }, 7 | name: 'web' 8 | }, 9 | spec: { 10 | replicas: 1, 11 | strategy: { type: 'Recreate' }, 12 | template: { 13 | metadata: { 14 | creationTimestamp: null, 15 | labels: { 'entropic-service': 'web' } 16 | }, 17 | spec: { 18 | containers: [ 19 | { 20 | args: ['npm', 'start'], 21 | env: [ 22 | 'EXTERNAL_HOST', 23 | 'NODE_ENV', 24 | 'OAUTH_GITHUB_CLIENT', 25 | 'OAUTH_GITHUB_SECRET', 26 | 'REDIS_URL', 27 | 'SESSION_SECRET', 28 | 'STORAGE_API_URL' 29 | ].map(name => ({ 30 | name, 31 | valueFrom: { 32 | configMapKeyRef: { 33 | key: name, 34 | name: 'entropic-env' 35 | } 36 | } 37 | })), 38 | image: 'entropicdev/web:latest', 39 | imagePullPolicy: 'Always', 40 | name: 'web', 41 | ports: [{ containerPort: 3000 }], 42 | resources: {}, 43 | workingDir: '/services/web' 44 | } 45 | ], 46 | restartPolicy: 'Always' 47 | } 48 | } 49 | }, 50 | status: {} 51 | }; 52 | -------------------------------------------------------------------------------- /services/web/lib/providers.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const querystring = require('querystring'); 4 | const fetch = require('node-fetch'); 5 | 6 | class Provider { 7 | constructor(name, id, secret, redirectUrl, accessUrl, getIdentity) { 8 | this.name = name; 9 | this.id = id; 10 | this.secret = secret; 11 | this.redirectUrl = redirectUrl; 12 | this.accessUrl = accessUrl; 13 | this.getIdentity = getIdentity; 14 | } 15 | 16 | redirect(state) { 17 | return ( 18 | this.redirectUrl + 19 | `?` + 20 | querystring.stringify({ 21 | redirect_uri: `${process.env.EXTERNAL_HOST}/login/providers/${ 22 | this.name 23 | }/callback`, 24 | state, 25 | client_id: this.id 26 | }) 27 | ); 28 | } 29 | } 30 | 31 | const PROVIDERS = [ 32 | new Provider( 33 | 'github', 34 | process.env.OAUTH_GITHUB_CLIENT, 35 | process.env.OAUTH_GITHUB_SECRET, 36 | 'https://github.com/login/oauth/authorize', 37 | 'https://github.com/login/oauth/access_token', 38 | async token => { 39 | const response = await fetch('https://api.github.com/user', { 40 | headers: { 41 | authorization: `token ${token}`, 42 | accept: 'application/json' 43 | } 44 | }); 45 | 46 | const { login, email } = await response.json(); 47 | 48 | return { id: login, username: login, email: email || '' }; 49 | } 50 | ) 51 | ]; 52 | 53 | module.exports = PROVIDERS; 54 | -------------------------------------------------------------------------------- /cli/lib/commands/invite.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const fetch = require('../fetch'); 4 | const figgy = require('figgy-pudding'); 5 | const parsePackageSpec = require('../canonicalize-spec'); 6 | 7 | module.exports = invite; 8 | 9 | const inviteOpts = figgy({ 10 | argv: true, 11 | registry: true, 12 | token: true, 13 | namespace: true, 14 | package: true, 15 | log: { default: require('npmlog') } 16 | }); 17 | 18 | async function invite(opts) { 19 | opts = inviteOpts(opts); 20 | 21 | // I do note that it would be nice to invite a list of people at once. 22 | const invitee = opts.argv[0]; 23 | let uri; 24 | 25 | if (opts.package) { 26 | const parsed = parsePackageSpec( 27 | opts.package, 28 | opts.registry.replace(/^https?:\/\//, '') 29 | ); 30 | uri = `${opts.registry}/v1/packages/package/${ 31 | parsed.canonical 32 | }/maintainers/${invitee}`; 33 | } else { 34 | let ns = opts.namespace; 35 | if (!ns.includes('@')) { 36 | ns += '@' + opts.registry.replace(/^https?:\/\//, ''); 37 | } 38 | 39 | uri = `${opts.registry}/v1/namespaces/namespace/${ns}/members/${invitee}`; 40 | } 41 | 42 | const response = await fetch(uri, { 43 | method: 'POST', 44 | headers: { 45 | authorization: `Bearer ${opts.token}` 46 | } 47 | }); 48 | const body = await response.json(); 49 | if (body.message) { 50 | console.log(body.message); 51 | return 0; 52 | } 53 | console.log(body); 54 | return 1; 55 | } 56 | -------------------------------------------------------------------------------- /services/common/boltzmann/middleware/logger.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = createLogger; 4 | 5 | const isDev = require('are-we-dev'); 6 | const logger = require('bole')('req'); 7 | 8 | function createLogger() { 9 | return next => { 10 | return isDev() ? devLogger : prodLogger; 11 | 12 | async function devLogger(context, ...params) { 13 | const response = await next(context, params); 14 | logger.info( 15 | `${context.remote} ${response.status} ${context.request.method} ${ 16 | context.request.url 17 | } ${context.rawResponse.getHeader('Content-Length')} ${Date.now() - 18 | context.start}ms` 19 | ); 20 | return response; 21 | } 22 | 23 | async function prodLogger(context, ...params) { 24 | const response = await next(context, params); 25 | logger.info({ 26 | message: `${response.status} ${context.request.method} ${ 27 | context.request.url 28 | }`, 29 | id: context.id, 30 | ip: context.remote, 31 | host: context.host, 32 | method: context.request.method, 33 | url: context.request.url, 34 | elapsed: Date.now() - context.start, 35 | status: response.status, 36 | userAgent: context.request.headers['user-agent'], 37 | referer: context.request.headers['referer'], 38 | bytes_out: context.rawResponse.getHeader('Content-Length') 39 | }); 40 | return response; 41 | } 42 | }; 43 | } 44 | -------------------------------------------------------------------------------- /services/registry/handlers/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = makeRouter; 4 | 5 | const ship = require('culture-ships').random(); 6 | 7 | const { response, fork } = require('boltzmann'); 8 | const authn = require('../decorators/authn'); 9 | const pkg = require('../package.json'); 10 | const auth = require('./auth'); 11 | 12 | function makeRouter() { 13 | const router = fork.router()( 14 | fork.get('/', version), 15 | ...require('./users'), 16 | ...require('./packages'), 17 | ...require('./maintainers'), 18 | ...require('./namespaces'), 19 | 20 | fork.get('/-/v1/login/poll/:session', authn.anonymous(auth.poll)), 21 | fork.post('/-/v1/login', authn.anonymous(auth.login)), 22 | fork.get('/v1/auth/whoami', authn.required(whoami)), 23 | fork.get('/ping', ping) 24 | ); 25 | 26 | return router; 27 | } 28 | 29 | async function version() { 30 | const data = { 31 | server: 'entropic', 32 | version: pkg.version, 33 | message: ship, 34 | website: 'https://www.entropic.dev' 35 | }; 36 | return response.json(data); 37 | } 38 | 39 | async function ping() { 40 | return response.text(ship); 41 | } 42 | 43 | async function whoami(context) { 44 | if (!context.user) { 45 | return response.error({ 46 | message: 'You are not logged in', 47 | CODE: 'ENOTLOGGEDIN' 48 | }); 49 | } 50 | // This isn't to spec but is what vcpm does. Consider changing it. 51 | return response.json({ username: context.user.name }); 52 | } 53 | -------------------------------------------------------------------------------- /misc/k8s/templates/registry-deployment.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | apiVersion: 'extensions/v1beta1', 3 | kind: 'Deployment', 4 | metadata: { 5 | creationTimestamp: null, 6 | labels: { 'entropic-service': 'registry' }, 7 | name: 'registry' 8 | }, 9 | spec: { 10 | replicas: 1, 11 | strategy: { type: 'Recreate' }, 12 | template: { 13 | metadata: { 14 | creationTimestamp: null, 15 | labels: { 'entropic-service': 'registry' } 16 | }, 17 | spec: { 18 | containers: [ 19 | { 20 | args: ['npm', 'start'], 21 | env: [ 22 | 'DEV_LATENCY_ERROR_MS', 23 | 'EXTERNAL_HOST', 24 | 'NODE_ENV', 25 | 'REDIS_URL', 26 | 'SESSION_EXPIRY_SECONDS', 27 | 'SESSION_SECRET', 28 | 'STORAGE_API_URL', 29 | 'WEB_HOST' 30 | ].map(name => ({ 31 | name, 32 | valueFrom: { 33 | configMapKeyRef: { 34 | key: name, 35 | name: 'entropic-env' 36 | } 37 | } 38 | })), 39 | image: 'entropicdev/registry:latest', 40 | imagePullPolicy: 'Always', 41 | name: 'registry', 42 | ports: [{ containerPort: 3000 }], 43 | resources: {}, 44 | workingDir: '/services/registry' 45 | } 46 | ], 47 | restartPolicy: 'Always' 48 | } 49 | } 50 | }, 51 | status: {} 52 | }; 53 | -------------------------------------------------------------------------------- /cli/lib/fetch-object.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = fetchObject; 4 | 5 | const { pipeline: _ } = require('stream'); 6 | const { promisify } = require('util'); 7 | const fetch = require('./fetch'); 8 | const cacache = require('cacache'); 9 | const ssri = require('ssri'); 10 | 11 | const pipeline = promisify(_); 12 | 13 | const EMPTY_HASH = 14 | 'z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg=='; 15 | const EMPTY_BUF = Buffer.from([]); 16 | 17 | async function fetchObject({ registry, cache }, integrity, load = false) { 18 | const parsed = ssri.parse(integrity); 19 | const algo = parsed.pickAlgorithm(); 20 | const [{ digest }] = parsed[algo]; 21 | 22 | if (digest === EMPTY_HASH && algo === 'sha512') { 23 | return load ? { data: EMPTY_BUF } : true; 24 | } 25 | 26 | if (await cacache.get.hasContent(cache, integrity)) { 27 | return load ? cacache.get(cache, integrity) : true; 28 | } 29 | 30 | const response = await fetch( 31 | `${registry}/v1/objects/object/${algo}/${encodeURIComponent(digest)}` 32 | ); 33 | 34 | if (response.status > 399) { 35 | throw new Error('error fetching object'); 36 | } 37 | 38 | let destIntegrity = null; 39 | const dest = cacache.put.stream(cache, integrity); 40 | dest.on('integrity', i => (destIntegrity = i)); 41 | await pipeline(response.body, dest); 42 | 43 | if (!parsed.match(destIntegrity)) { 44 | throw new Error('file integrity mismatch!'); 45 | } 46 | 47 | return load ? cacache.get(cache, integrity) : true; 48 | } 49 | -------------------------------------------------------------------------------- /services/storage/migrations/20190505000000-add-2fa-fields-and-package-version-fields.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var dbm; 4 | var type; 5 | var seed; 6 | 7 | /** 8 | * We receive the dbmigrate dependency from dbmigrate initially. 9 | * This enables us to not have to rely on NODE_PATH. 10 | */ 11 | exports.setup = function(options, seedLink) { 12 | dbm = options.dbmigrate; 13 | type = dbm.dataType; 14 | seed = seedLink; 15 | }; 16 | 17 | exports.up = async function(db) { 18 | return await db.runSql(` 19 | alter table "users" add column "tfa_secret" text null default null; 20 | alter table "users" add column "backup_codes" jsonb null default null; 21 | alter table "users" add column "tfa_active" boolean null default null; 22 | alter table "packages" add column "require_tfa" boolean null default null; 23 | 24 | alter table "package_versions" add column "dependencies" jsonb not null default '{}'::jsonb; 25 | alter table "package_versions" add column "devDependencies" jsonb not null default '{}'::jsonb; 26 | alter table "package_versions" add column "optionalDependencies" jsonb not null default '{}'::jsonb; 27 | alter table "package_versions" add column "bundledDependencies" jsonb not null default '{}'::jsonb; 28 | alter table "package_versions" add column "peerDependencies" jsonb not null default '{}'::jsonb; 29 | alter table "package_versions" drop column "namespace_id"; 30 | `); 31 | }; 32 | 33 | exports.down = function(db) { 34 | return null; 35 | }; 36 | 37 | exports._meta = { 38 | version: 1 39 | }; 40 | -------------------------------------------------------------------------------- /cli/lib/commands/join.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const fetch = require('../fetch'); 4 | const figgy = require('figgy-pudding'); 5 | const parsePackageSpec = require('../canonicalize-spec'); 6 | 7 | module.exports = join; 8 | 9 | // usage: ds join name@host/pkg --as namespace 10 | 11 | const joinOpts = figgy({ 12 | argv: true, 13 | as: true, 14 | registry: true, 15 | token: true, 16 | log: { default: require('npmlog') } 17 | }); 18 | 19 | async function join(opts) { 20 | opts = joinOpts(opts); 21 | 22 | if (opts.argv.length !== 1 || !opts.as) { 23 | console.error('Usage: ds join --as '); 24 | return 1; 25 | } 26 | 27 | const host = opts.registry.replace(/^https?:\/\//, ''); 28 | const invitee = opts.as; 29 | let uri; 30 | 31 | if (opts.argv[0].includes('/')) { 32 | const { _, ...parsed } = parsePackageSpec( 33 | opts.argv[0], 34 | opts.registry.replace(/^https?:\/\//, '') 35 | ); 36 | uri = `${opts.registry}/v1/packages/package/${ 37 | parsed.canonical 38 | }/maintainers/${invitee}/invitation`; 39 | } else { 40 | const ns = opts.argv[0] + (opts.argv[0].includes('@') ? '' : `@${host}`); 41 | uri = `${ 42 | opts.registry 43 | }/v1/namespaces/namespace/${ns}/members/${invitee}/invitation`; 44 | } 45 | 46 | const response = await fetch(uri, { 47 | method: 'POST', 48 | headers: { 49 | authorization: `Bearer ${opts.token}` 50 | } 51 | }); 52 | const body = await response.json(); 53 | console.log(body.message ? body.message : body); 54 | return 0; 55 | } 56 | -------------------------------------------------------------------------------- /services/registry/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "entropic-registry", 3 | "description": "community package manager", 4 | "version": "0.0.1", 5 | "author": "C J Silverio ", 6 | "bugs": { 7 | "url": "https://github.com/entropic-dev/entropic/issues" 8 | }, 9 | "dependencies": { 10 | "@hapi/joi": "~15.0.2", 11 | "boltzmann": "file:../common/boltzmann", 12 | "culture-ships": "~1.0.0", 13 | "dotenv": "~8.0.0", 14 | "micro": "~9.3.4", 15 | "node-fetch": "~2.5.0", 16 | "uuid": "~3.3.2" 17 | }, 18 | "devDependencies": { 19 | "babel-eslint": "~10.0.1", 20 | "depcheck": "^0.8.0", 21 | "eslint": "~5.16.0", 22 | "eslint-config-prettier": "~4.3.0", 23 | "eslint-plugin-prettier": "~3.1.0", 24 | "mocha": "~6.1.4", 25 | "must": "~0.13.4", 26 | "nodemon": "^1.19.1", 27 | "nyc": "~14.1.1", 28 | "prettier": "~1.17.0", 29 | "test-listen": "^1.1.0" 30 | }, 31 | "homepage": "https://github.com/entopic-dev/entropic#readme", 32 | "keywords": [ 33 | "entropy", 34 | "heat death" 35 | ], 36 | "license": "Apache-2.0", 37 | "main": "index.js", 38 | "private": true, 39 | "repository": { 40 | "type": "git", 41 | "url": "git://github.com/entopic-dev/entropic.git" 42 | }, 43 | "scripts": { 44 | "coverage": "nyc mocha -R spec", 45 | "dev": "NODE_ENV=dev nodemon ./server.js", 46 | "lint": "eslint .", 47 | "lint-fix": "prettier --write '**/*.js'", 48 | "posttest": "npm run lint", 49 | "start": "./server.js", 50 | "test": "mocha test/**/*.spec.js", 51 | "audit-deps": "depcheck ." 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /cli/README.md: -------------------------------------------------------------------------------- 1 | # ds 2 | 3 | The change in entropy is equal to the heat transfer divided by the temperature. `∂S = ∂Q/T` 4 | 5 | ## Configuration 6 | 7 | `ds` reads its config from a [toml](https://github.com/toml-lang/toml) file in `~/.entropicrc`. The format looks like this: 8 | 9 | ```toml 10 | registry="https://entropic.dev" 11 | 12 | [registries."https://entropic.dev"] 13 | token="my-auth-token here" 14 | 15 | [registries."https://registry.example.com"] 16 | token="my-example.com-auth-token here" 17 | ``` 18 | 19 | ## Commands 20 | 21 | Implement new commands as files in `lib/commands/`. Commands implemented as of this start at documentation: 22 | 23 | * __login__: log into the selected registry, generating and storing an auth token 24 | * __whoami__: respond with the name of the authenticated user 25 | * __publish__: publish a new package-version, creating a package as a side effect if necessary 26 | * __download__: fetch & insert into cache the content blobs for the named package-version 27 | * __invite__: `invite [name] --to pkg/namespace` invites the namespace to join the maintainers list 28 | * __invitations__: list all your open invitations 29 | * __join__: accept an invitation to join a namespace or maintain a package 30 | * __decline__: decline an invitation to join a namespace or package maintainers list 31 | * __members__: show members of a namespace OR list all maintainers of a package 32 | 33 | Commands that should exist: 34 | 35 | * __help__: usage help 36 | * __create__: create a new package, meta-info only 37 | * __about__: or 'info' or 'view'; describe a package 38 | * __install__: probably this would be handy 39 | -------------------------------------------------------------------------------- /services/registry/Package.toml: -------------------------------------------------------------------------------- 1 | name = "ceejbot@registry.entropic.dev/entropic-registry" 2 | version = "0.0.0-alpha" 3 | 4 | [dependencies] 5 | "legacy@registry.entropic.dev/%40hapi%2firon" = "^5.1.0" 6 | "legacy@registry.entropic.dev/%40hapi%2fjoi" = "~15.0.2" 7 | "legacy@registry.entropic.dev/are-we-dev" = "^1.0.0" 8 | "legacy@registry.entropic.dev/bole" = "~3.0.2" 9 | "legacy@registry.entropic.dev/cls-hooked" = "^4.2.2" 10 | "legacy@registry.entropic.dev/cookie" = "^0.3.1" 11 | "legacy@registry.entropic.dev/csrf" = "~3.1.0" 12 | "legacy@registry.entropic.dev/culture-ships" = "~1.0.0" 13 | "legacy@registry.entropic.dev/dotenv" = "~8.0.0" 14 | "legacy@registry.entropic.dev/escape-html" = "^1.0.3" 15 | "legacy@registry.entropic.dev/find-my-way" = "~2.0.1" 16 | "legacy@registry.entropic.dev/graceful-fs" = "^4.1.15" 17 | "legacy@registry.entropic.dev/is-email-maybe" = "^1.0.1" 18 | "legacy@registry.entropic.dev/markdown" = "^0.5.0" 19 | "legacy@registry.entropic.dev/micro" = "~9.3.4" 20 | "legacy@registry.entropic.dev/mkdirp" = "~0.5.1" 21 | "legacy@registry.entropic.dev/multiparty" = "^4.2.1" 22 | "legacy@registry.entropic.dev/node-fetch" = "~2.5.0" 23 | "legacy@registry.entropic.dev/npm-user-validate" = "^1.0.0" 24 | "legacy@registry.entropic.dev/ormnomnom" = "^5.2.2" 25 | "legacy@registry.entropic.dev/pacote" = "~9.5.0" 26 | "legacy@registry.entropic.dev/pg" = "~7.4.0" 27 | "legacy@registry.entropic.dev/redis" = "^2.8.0" 28 | "legacy@registry.entropic.dev/semver" = "^6.0.0" 29 | "legacy@registry.entropic.dev/ssri" = "^6.0.1" 30 | "legacy@registry.entropic.dev/uuid" = "~3.3.2" 31 | "legacy@registry.entropic.dev/validate-npm-package-name" = "^3.0.0" 32 | -------------------------------------------------------------------------------- /services/storage/migrations/20190514000000-add-namespace-hostnames.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var dbm; 4 | var type; 5 | var seed; 6 | /** 7 | * We receive the dbmigrate dependency from dbmigrate initially. 8 | * This enables us to not have to rely on NODE_PATH. 9 | */ 10 | exports.setup = function(options, seedLink) { 11 | dbm = options.dbmigrate; 12 | type = dbm.dataType; 13 | seed = seedLink; 14 | }; 15 | 16 | if (!process.env.EXTERNAL_HOST) { 17 | console.log( 18 | '\nYou must set up a .env file with "EXTERNAL_HOST" set to run this migration.\n' 19 | ); 20 | process.exit(1); 21 | } 22 | 23 | exports.up = async function(db) { 24 | return await db.runSql(` 25 | create table if not exists "hosts" ( 26 | id serial primary key, 27 | name text not null, 28 | created TIMESTAMP DEFAULT NOW(), 29 | modified TIMESTAMP DEFAULT NOW(), 30 | active BOOLEAN DEFAULT TRUE 31 | ); 32 | 33 | insert into "hosts" (name) values ('${process.env.EXTERNAL_HOST.replace( 34 | /^https?:\/\//, 35 | '' 36 | )}'); 37 | 38 | alter table "namespaces" add column "host_id" integer references "hosts" ("id") not null default 1; 39 | 40 | create index "namespaces_host_id_idx" on "namespaces" ("host_id") WHERE ( "active" ); 41 | create index "host_name_idx" on "hosts" ("name") WHERE ( "active" ); 42 | create index "namespace_name_idx" on "namespaces" ("name") WHERE ( "active" ); 43 | create index "packages_name_idx" on "packages" ("name") WHERE ( "active" ); 44 | `); 45 | }; 46 | 47 | exports.down = function(db) { 48 | return null; 49 | }; 50 | 51 | exports._meta = { 52 | version: 1 53 | }; 54 | -------------------------------------------------------------------------------- /cli/lib/commands/accept.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const fetch = require('../fetch'); 4 | const figgy = require('figgy-pudding'); 5 | const parsePackageSpec = require('../canonicalize-spec'); 6 | 7 | module.exports = accept; 8 | 9 | // usage: ds accept --package name@host/pkg --as namespace 10 | // ds accept --namespace name@host 11 | 12 | const acceptOpts = figgy({ 13 | as: true, 14 | namespace: true, 15 | package: true, 16 | registry: true, 17 | token: true, 18 | log: { default: require('npmlog') } 19 | }); 20 | 21 | async function accept(opts) { 22 | opts = acceptOpts(opts); 23 | 24 | if (!opts.as || (!opts.package && !opts.namespace)) { 25 | console.error( 26 | 'Usage: ds accept --package --as \n' + 27 | ' ds accept --namespace ' 28 | ); 29 | return 1; 30 | } 31 | 32 | let uri; 33 | if (opts.package) { 34 | const parsed = parsePackageSpec( 35 | opts.package, 36 | opts.registry.replace(/^https?:\/\//, '') 37 | ); 38 | 39 | const invitee = opts.as; 40 | 41 | uri = `${ 42 | opts.registry 43 | }/v1/namespaces/namespace/${invitee}/maintainerships/${parsed.canonical}`; 44 | } else { 45 | let ns = opts.namespace; 46 | if (!ns.includes('@')) { 47 | ns += '@' + opts.registry.replace(/^https?:\/\//, ''); 48 | } 49 | 50 | uri = `${opts.registry}/v1/users/user/memberships/invitations/${ns}`; 51 | } 52 | 53 | const response = await fetch(uri, { 54 | method: 'POST', 55 | headers: { 56 | authorization: `Bearer ${opts.token}` 57 | } 58 | }); 59 | const body = await response.json(); 60 | console.log(body.message ? body.message : body); 61 | return 0; 62 | } 63 | -------------------------------------------------------------------------------- /cli/lib/commands/decline.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const fetch = require('../fetch'); 4 | const figgy = require('figgy-pudding'); 5 | const parsePackageSpec = require('../canonicalize-spec'); 6 | 7 | module.exports = decline; 8 | 9 | // usage: ds decline --package name@host/pkg --as namespace 10 | // ds decline --namespace name@host 11 | 12 | const declineOpts = figgy({ 13 | as: true, 14 | namespace: true, 15 | package: true, 16 | registry: true, 17 | token: true, 18 | log: { default: require('npmlog') } 19 | }); 20 | 21 | async function decline(opts) { 22 | opts = declineOpts(opts); 23 | 24 | if (!opts.as || (!opts.package && !opts.namespace)) { 25 | console.error( 26 | 'Usage: ds decline --package --as \n' + 27 | ' ds decline --namespace ' 28 | ); 29 | return 1; 30 | } 31 | 32 | let uri; 33 | if (opts.package) { 34 | const parsed = parsePackageSpec( 35 | opts.package, 36 | opts.registry.replace(/^https?:\/\//, '') 37 | ); 38 | 39 | const invitee = opts.as; 40 | 41 | uri = `${ 42 | opts.registry 43 | }/v1/namespaces/namespace/${invitee}/maintainerships/${parsed.canonical}`; 44 | } else { 45 | let ns = opts.namespace; 46 | if (!ns.includes('@')) { 47 | ns += '@' + opts.registry.replace(/^https?:\/\//, ''); 48 | } 49 | 50 | uri = `${opts.registry}/v1/users/user/memberships/invitations/${ns}`; 51 | } 52 | 53 | const response = await fetch(uri, { 54 | method: 'DELETE', 55 | headers: { 56 | authorization: `Bearer ${opts.token}` 57 | } 58 | }); 59 | const body = await response.json(); 60 | console.log(body.message ? body.message : body); 61 | return 0; 62 | } 63 | -------------------------------------------------------------------------------- /services/registry/middleware/bearer-auth.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { response } = require('boltzmann'); 4 | const crypto = require('crypto'); 5 | 6 | module.exports = createBearerAuthMW; 7 | 8 | function createBearerAuthMW({ 9 | sessionTimeout = Number(process.env.SESSION_TIMEOUT) || 5 * 60 10 | } = {}) { 11 | return next => { 12 | return async context => { 13 | const bearer = context.request.headers['authorization'] 14 | ? context.request.headers['authorization'].replace(/^Bearer /, '') 15 | : ''; 16 | if (!bearer) { 17 | return next(context); 18 | } 19 | 20 | if (!bearer.startsWith('ent_')) { 21 | return response.authneeded( 22 | 'Your auth token is not a valid entropic token.' 23 | ); 24 | } 25 | 26 | // getting access to the redis doesn't get you the tokens. 27 | const hash = crypto 28 | .createHash('sha256') 29 | .update(bearer + process.env.SESSION_SECRET) 30 | .digest('base64'); 31 | 32 | const key = `token_${hash}`; 33 | let data = await context.redis.getAsync(key); 34 | 35 | // eslint-disable: no-empty 36 | try { 37 | data = JSON.parse(data); 38 | context.user = data; 39 | } catch {} 40 | 41 | if (data === null) { 42 | const [err, result] = await context.storageApi 43 | .getToken(bearer) 44 | .then(xs => [null, xs], xs => [xs, null]); 45 | 46 | if (!err) { 47 | await context.redis.setexAsync( 48 | key, 49 | sessionTimeout, 50 | JSON.stringify(result.user) 51 | ); 52 | context.user = result.user; 53 | } 54 | } 55 | 56 | return next(context); 57 | }; 58 | }; 59 | } 60 | -------------------------------------------------------------------------------- /services/storage/migrations/20190503000000-add-tokens-and-authentications.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var dbm; 4 | var type; 5 | var seed; 6 | 7 | /** 8 | * We receive the dbmigrate dependency from dbmigrate initially. 9 | * This enables us to not have to rely on NODE_PATH. 10 | */ 11 | exports.setup = function(options, seedLink) { 12 | dbm = options.dbmigrate; 13 | type = dbm.dataType; 14 | seed = seedLink; 15 | }; 16 | 17 | exports.up = async function(db) { 18 | return await db.runSql(` 19 | CREATE TABLE "tokens" ( 20 | id SERIAL PRIMARY KEY, 21 | user_id integer NOT NULL REFERENCES "users" ("id"), 22 | value_hash text NOT NULL, 23 | description text NOT NULL, 24 | 25 | created TIMESTAMP DEFAULT NOW(), 26 | modified TIMESTAMP DEFAULT NOW(), 27 | active BOOLEAN DEFAULT TRUE 28 | ); 29 | CREATE UNIQUE INDEX "tokens_value_hash_idx" ON "tokens" ( "value_hash" ) WHERE ( "active" ); 30 | 31 | CREATE TABLE "authentications" ( 32 | id SERIAL PRIMARY KEY, 33 | user_id integer NOT NULL REFERENCES "users" ("id"), 34 | 35 | remote_identity text NOT NULL, 36 | provider text NOT NULL DEFAULT 'github', 37 | access_token_enc text NOT NULL, 38 | 39 | created TIMESTAMP DEFAULT NOW(), 40 | modified TIMESTAMP DEFAULT NOW(), 41 | active BOOLEAN DEFAULT TRUE 42 | ); 43 | CREATE UNIQUE INDEX "authentications_user_id_access_token_enc" ON "authentications" ( "user_id", "access_token_enc" ) WHERE ( "active" ); 44 | CREATE UNIQUE INDEX "authentications_provider_remote_identity" ON "authentications" ( "provider", "remote_identity" ) WHERE ( "active" ); 45 | `); 46 | }; 47 | 48 | exports.down = function(db) { 49 | return null; 50 | }; 51 | 52 | exports._meta = { 53 | version: 1 54 | }; 55 | -------------------------------------------------------------------------------- /services/common/boltzmann/middleware/dev-only.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = dev; 4 | 5 | const hangWarning = Symbol('hang-stall'); 6 | const hangError = Symbol('hang-error'); 7 | 8 | function dev( 9 | nextName, 10 | warnAt = Number(process.env.DEV_LATENCY_WARNING_MS) || 500, 11 | errorAt = Number(process.env.DEV_LATENCY_ERROR_MS) || 2000 12 | ) { 13 | return function mw(next) { 14 | return async function inner(context) { 15 | const req = context.request; 16 | if (context[hangWarning]) { 17 | clearTimeout(context[hangWarning]); 18 | } 19 | context[hangWarning] = setTimeout(() => { 20 | console.error( 21 | `⚠️ Response from ${nextName} > ${warnAt}ms fetching "${req.method} ${ 22 | req.url 23 | }".` 24 | ); 25 | console.error( 26 | `\x1b[0;37m - (Tune timeout using DEV_LATENCY_WARNING_MS env variable.)\x1b[0;0m` 27 | ); 28 | }, warnAt); 29 | 30 | if (context[hangError]) { 31 | clearTimeout(context[hangError]); 32 | } 33 | context[hangError] = setTimeout(() => { 34 | console.error( 35 | `🛑 STALL: Response from ${nextName} > ${errorAt}ms: "${req.method} ${ 36 | req.url 37 | }". (Tune timeout using DEV_LATENCY_ERROR_MS env variable.)` 38 | ); 39 | console.error( 40 | `\x1b[0;37m - (Tune timeout using DEV_LATENCY_ERROR_MS env variable.)\x1b[0;0m` 41 | ); 42 | }, errorAt); 43 | 44 | try { 45 | return await next(context); 46 | } finally { 47 | clearTimeout(context[hangWarning]); 48 | context[hangWarning] = null; 49 | 50 | clearTimeout(context[hangError]); 51 | context[hangError] = null; 52 | } 53 | }; 54 | }; 55 | } 56 | -------------------------------------------------------------------------------- /services/storage/models/authentication.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const orm = require('ormnomnom'); 4 | const joi = require('@hapi/joi'); 5 | 6 | module.exports = class Authentication { 7 | #user = null; 8 | 9 | constructor({ 10 | id, 11 | provider, 12 | remote_identity, 13 | access_token_enc, 14 | user, 15 | user_id, 16 | created, 17 | modified, 18 | active 19 | }) { 20 | this.id = id; 21 | 22 | this.remote_identity = remote_identity; 23 | this.provider = provider; 24 | this.access_token_enc = access_token_enc; 25 | 26 | this.#user = user ? Promise.resolve(user) : null; 27 | this.user_id = user_id; 28 | 29 | this.created = created; 30 | this.modified = modified; 31 | this.active = active; 32 | } 33 | 34 | async serialize() { 35 | const user = await this.user; 36 | const { created, modified, active, provider, remote_identity } = this; 37 | return { 38 | user, 39 | provider, 40 | remote_identity, 41 | created, 42 | modified, 43 | active 44 | }; 45 | } 46 | 47 | get user() { 48 | if (this.#user === null) { 49 | this.#user = User.objects.get({ id: this.user_id }); 50 | this.#user.catch(() => {}); 51 | } 52 | 53 | return this.#user; 54 | } 55 | 56 | set user(u) { 57 | this.#user = Promise.resolve(u); 58 | this.user_id = this.#user.id; 59 | } 60 | }; 61 | 62 | const User = require('./user'); 63 | 64 | module.exports.objects = orm(module.exports, { 65 | id: joi 66 | .number() 67 | .integer() 68 | .greater(-1) 69 | .required(), 70 | user: orm.fk(User), 71 | remote_identity: joi.string(), 72 | provider: joi.any().allow(['github']), 73 | access_token_enc: joi.string(), 74 | created: joi.date(), 75 | modified: joi.date(), 76 | active: joi.boolean().default(true) 77 | }); 78 | -------------------------------------------------------------------------------- /cli/test/lib/main.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const main = require('../../lib/main'); 3 | const help = require('../../lib/commands/help'); 4 | const whoami = require('../../lib/commands/whoami'); 5 | const sinon = require('sinon'); 6 | 7 | async function testUnpack(argv) { 8 | const log = { log: sinon.spy(), error: sinon.spy() }; 9 | const load = sinon.spy(() => ({})); 10 | 11 | const result = await main.unpack(argv, { log, load }); 12 | return { log, load, result }; 13 | } 14 | 15 | test('empty argv dumps help', async t => { 16 | const { 17 | result: { cmd }, 18 | log: { log, error }, 19 | load, 20 | } = await testUnpack([]); 21 | 22 | t.is(cmd, help); 23 | t.is(log.callCount, 0); 24 | t.is(error.callCount, 0); 25 | t.is(load.callCount, 1); 26 | }); 27 | 28 | test('bad command name dumps help', async t => { 29 | const badCommand = '../test/lib/main'; // Refers to this test module 30 | 31 | const { 32 | result: { cmd }, 33 | log: { log, error }, 34 | load, 35 | } = await testUnpack([ badCommand ]); 36 | 37 | t.is(cmd, help); 38 | t.is(log.callCount, 1); 39 | t.is(log.args[0][0], `Ignoring malformed command name: "${ badCommand }"`); 40 | t.is(error.callCount, 0); 41 | t.is(load.callCount, 1); 42 | }); 43 | 44 | test('when you ask for help you get help', async t => { 45 | const { 46 | result: { cmd }, 47 | log: { log, error }, 48 | load, 49 | } = await testUnpack([ 'help' ]); 50 | 51 | t.is(cmd, help); 52 | t.is(log.callCount, 0); 53 | t.is(error.callCount, 0); 54 | t.is(load.callCount, 1); 55 | }); 56 | 57 | test('when you ask whoami you get whoami', async t => { 58 | const { 59 | result: { cmd }, 60 | log: { log, error }, 61 | load, 62 | } = await testUnpack([ 'whoami' ]); 63 | 64 | t.is(cmd, whoami); 65 | t.is(log.callCount, 0); 66 | t.is(error.callCount, 0); 67 | t.is(load.callCount, 1); 68 | }); 69 | -------------------------------------------------------------------------------- /services/storage/middleware/postgres.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { createNamespace, destroyNamespace } = require('cls-hooked'); 4 | const orm = require('ormnomnom'); 5 | const { Pool } = require('pg'); 6 | 7 | module.exports = createPostgresPool; 8 | 9 | function createPostgresPool(url = process.env.POSTGRES_URL) { 10 | return function postgres(next) { 11 | const pool = new Pool( 12 | ...(url 13 | ? [ 14 | { 15 | connectionString: url 16 | } 17 | ] 18 | : []) 19 | ); 20 | const namespace = createNamespace('postgres'); 21 | orm.setConnection(async () => { 22 | const connector = namespace.get('getConnection'); 23 | if (typeof connector !== 'function') { 24 | throw new Error( 25 | 'Accessing postgres outside the context of a request? UNACCEPTABLE' 26 | ); 27 | } 28 | 29 | const connection = await connector(); 30 | return { 31 | connection, 32 | release() {} 33 | }; 34 | }); 35 | 36 | return async function inner(context) { 37 | let client = null; 38 | context.getPostgresClient = async () => { 39 | if (client) { 40 | return client; 41 | } 42 | 43 | client = await pool.connect(); 44 | return client; 45 | }; 46 | 47 | try { 48 | const response = await namespace.runAndReturn(async () => { 49 | namespace.set('getConnection', () => context.getPostgresClient()); 50 | return next(context); 51 | }); 52 | 53 | return response; 54 | } finally { 55 | context.getPostgresClient = fail; 56 | if (client) { 57 | client.release(); 58 | } 59 | } 60 | }; 61 | }; 62 | } 63 | 64 | async function fail() { 65 | throw new Error( 66 | 'Attempting to request postgres connection after handler has completed.' 67 | ); 68 | } 69 | -------------------------------------------------------------------------------- /services/storage/models/namespace-member.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const orm = require('ormnomnom'); 4 | const joi = require('@hapi/joi'); 5 | 6 | module.exports = class NamespaceMember { 7 | #user = null; 8 | #namespace = null; 9 | 10 | constructor({ 11 | id, 12 | user_id, 13 | user, 14 | namespace_id, 15 | namespace, 16 | created, 17 | modified, 18 | active, 19 | accepted 20 | }) { 21 | this.id = id; 22 | this.user_id = user_id; 23 | this.#user = user ? Promise.resolve(user) : null; 24 | this.namespace_id = namespace_id; 25 | this.#namespace = namespace ? Promise.resolve(namespace) : null; 26 | this.created = created; 27 | this.modified = modified; 28 | this.active = active; 29 | this.accepted = accepted; 30 | } 31 | 32 | get user() { 33 | if (this.#user === null) { 34 | this.#user = User.objects.get({ id: this.user_id }); 35 | this.#user.catch(() => {}); 36 | } 37 | 38 | return this.#user; 39 | } 40 | 41 | set user(u) { 42 | this.#user = Promise.resolve(u); 43 | this.user_id = this.#user.id; 44 | } 45 | 46 | get namespace() { 47 | if (this.#namespace === null) { 48 | this.#namespace = Namespace.objects.get({ id: this.namespace_id }); 49 | this.#namespace.catch(() => {}); 50 | } 51 | 52 | return this.#namespace; 53 | } 54 | 55 | set namespace(u) { 56 | this.#namespace = Promise.resolve(u); 57 | this.namespace_id = this.#namespace.id; 58 | } 59 | }; 60 | 61 | const User = require('./user'); 62 | const Namespace = require('./namespace'); 63 | 64 | module.exports.objects = orm(module.exports, { 65 | id: joi 66 | .number() 67 | .integer() 68 | .greater(-1) 69 | .required(), 70 | user: orm.fk(User), 71 | namespace: orm.fk(Namespace), 72 | created: joi.date(), 73 | modified: joi.date(), 74 | active: joi.boolean().default(true), 75 | accepted: joi.boolean().default(true) 76 | }); 77 | -------------------------------------------------------------------------------- /misc/k8s/templates/storage-deployment.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | apiVersion: 'extensions/v1beta1', 3 | kind: 'Deployment', 4 | metadata: { 5 | creationTimestamp: null, 6 | labels: { 'entropic-service': 'storage' }, 7 | name: 'storage' 8 | }, 9 | spec: { 10 | replicas: 1, 11 | strategy: { type: 'Recreate' }, 12 | template: { 13 | metadata: { 14 | creationTimestamp: null, 15 | labels: { 'entropic-service': 'storage' } 16 | }, 17 | spec: { 18 | containers: [ 19 | { 20 | args: ['npm', 'start'], 21 | env: [ 22 | 'CACHE_DIR', 23 | 'DEV_LATENCY_ERROR_MS', 24 | 'EXTERNAL_HOST', 25 | 'NODE_ENV', 26 | 'OAUTH_GITHUB_CLIENT', 27 | 'OAUTH_GITHUB_SECRET', 28 | 'OAUTH_PASSWORD', 29 | 'PGDATABASE', 30 | 'PGHOST', 31 | 'PGUSER', 32 | 'SESSION_EXPIRY_SECONDS', 33 | 'SESSION_SECRET', 34 | 'STORAGE_API_URL' 35 | ].map(name => ({ 36 | name, 37 | valueFrom: { 38 | configMapKeyRef: { 39 | key: name, 40 | name: 'entropic-env' 41 | } 42 | } 43 | })), 44 | image: 'entropicdev/storage:latest', 45 | imagePullPolicy: 'Always', 46 | name: 'storage', 47 | ports: [{ containerPort: 3000 }], 48 | resources: {}, 49 | volumeMounts: [{ mountPath: '/var/cache/entropic', name: 'storage-claim' }], 50 | workingDir: '/services/storage' 51 | } 52 | ], 53 | volumes: [ 54 | { 55 | name: 'storage-claim', 56 | persistentVolumeClaim: { claimName: 'storage-claim' } 57 | } 58 | ], 59 | restartPolicy: 'Always' 60 | } 61 | } 62 | }, 63 | status: {} 64 | }; 65 | -------------------------------------------------------------------------------- /services/storage/models/maintainer.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const orm = require('ormnomnom'); 4 | const joi = require('@hapi/joi'); 5 | 6 | const Package = require('./package'); 7 | const Namespace = require('./namespace'); 8 | 9 | module.exports = class Maintainer { 10 | #package = null; 11 | #namespace = null; 12 | 13 | constructor({ 14 | id, 15 | package_id, 16 | package: pkg, 17 | namespace_id, 18 | namespace, 19 | created, 20 | modified, 21 | active, 22 | accepted 23 | }) { 24 | this.id = id; 25 | this.package_id = package_id; 26 | this.#package = pkg ? Promise.resolve(pkg) : null; 27 | this.namespace_id = namespace_id; 28 | this.#namespace = namespace ? Promise.resolve(namespace) : null; 29 | this.created = created; 30 | this.modified = modified; 31 | this.active = active; 32 | this.accepted = accepted; 33 | } 34 | 35 | get package() { 36 | if (this.#package === null) { 37 | this.#package = Package.objects.get({ id: this.package_id }); 38 | this.#package.catch(() => {}); 39 | } 40 | 41 | return this.#package; 42 | } 43 | 44 | set package(p) { 45 | this.#package = Promise.resolve(p); 46 | this.package_id = this.#package.id; 47 | } 48 | 49 | get namespace() { 50 | if (this.#namespace === null) { 51 | this.#namespace = Namespace.objects.get({ id: this.namespace_id }); 52 | this.#namespace.catch(() => {}); 53 | } 54 | 55 | return this.#namespace; 56 | } 57 | 58 | set namespace(n) { 59 | this.#namespace = Promise.resolve(n); 60 | this.namespace_id = this.#namespace.id; 61 | } 62 | }; 63 | 64 | module.exports.objects = orm(module.exports, { 65 | id: joi 66 | .number() 67 | .integer() 68 | .greater(-1) 69 | .required(), 70 | package: orm.fk(Package), 71 | namespace: orm.fk(Namespace), 72 | created: joi.date(), 73 | modified: joi.date(), 74 | active: joi.boolean().default(true), 75 | accepted: joi.boolean().default(true) 76 | }); 77 | -------------------------------------------------------------------------------- /services/storage/test/02-validations.spec.js: -------------------------------------------------------------------------------- 1 | /* eslint-env node, mocha */ 2 | 'use strict'; 3 | 4 | const check = require('../../lib/validations'); 5 | const demand = require('must'); 6 | 7 | describe('name validations', () => { 8 | describe('validLegacyPackage()', () => { 9 | it('JSONStream is valid', () => { 10 | const result = check.validLegacyPackage('JSONStream'); 11 | result.must.be.true(); 12 | }); 13 | 14 | it('js!%on-stream is invalid', () => { 15 | const result = check.validLegacyPackage('js!%on-stream'); 16 | result.must.be.false(); 17 | }); 18 | }); 19 | 20 | describe('packageNameOK()', () => { 21 | it('uses the legacy rules for legacy namespace', () => { 22 | const result = check.packageNameOK('JSONStream', 'legacy'); 23 | demand(result).not.exist(); 24 | }); 25 | 26 | it('uses the legacy rules for legacy namespace', () => { 27 | const result = check.packageNameOK('js!%on-stream', 'legacy'); 28 | result.must.be.a.string(); 29 | }); 30 | 31 | it('uses modern rules for modern namespaces', () => { 32 | const result = check.packageNameOK('JSONStream', 'modern'); 33 | result.must.be.a.string(); 34 | }); 35 | 36 | it('is a fussbudget about _', () => { 37 | const result = check.packageNameOK('json_stream', 'modern'); 38 | result.must.be.a.string(); 39 | }); 40 | 41 | it('passes valid package names', () => { 42 | const result = check.packageNameOK('beefy', 'chrisdickinson'); 43 | demand(result).not.exist(); 44 | }); 45 | }); 46 | 47 | it('nameOK() exists and checks things', () => { 48 | let result = check.nameOK('chrisdickinson'); 49 | demand(result.error).be.null(); 50 | result = check.nameOK('I am the very model of a modern major general'); 51 | result.error.must.exist(); 52 | result = check.nameOK('q'); 53 | result.error.must.exist(); 54 | }); 55 | 56 | describe('validDependencyName()', () => { 57 | it('has tests'); 58 | }); 59 | }); 60 | -------------------------------------------------------------------------------- /cli/lib/commands/help.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const fs = require('fs'); 4 | const path = require('path'); 5 | const { promisify } = require('util'); 6 | const readdirAsync = promisify(fs.readdir); 7 | const accessAsync = promisify(fs.access); 8 | 9 | const userHome = require('user-home'); 10 | const getLocale = require('os-locale'); 11 | 12 | module.exports = help; 13 | 14 | async function help(opts) { 15 | const command = opts.argv[0]; 16 | if (!command) { 17 | await showBasicHelp(); 18 | } else { 19 | return new Promise(async (resolve, reject) => { 20 | const locale = (await getLocale()).toLowerCase(); 21 | const localeFn = path.join(__dirname, `help-${command}-${locale}.txt`); 22 | const defaultFn = path.join(__dirname, `help-${command}-en_us.txt`); 23 | let fn = localeFn; 24 | 25 | try { 26 | await accessAsync(localeFn); 27 | } catch (err) { 28 | console.log( 29 | `Could not find a help file for locale ${locale}, defaulting to English` 30 | ); 31 | console.log(`You can contribute a translation for this help file!`); 32 | fn = defaultFn; 33 | } 34 | 35 | fs.createReadStream(fn) 36 | .on('error', async err => { 37 | if (err.code === 'ENOENT') { 38 | console.log( 39 | `help has not been implemented yet for ${command}. You could build it!` 40 | ); 41 | await showBasicHelp(); 42 | return resolve(); 43 | } 44 | reject(err); 45 | }) 46 | .on('end', () => resolve()) 47 | .pipe(process.stdout); 48 | }); 49 | } 50 | } 51 | 52 | async function showBasicHelp() { 53 | const commands = (await readdirAsync(__dirname)) 54 | .filter(cmd => cmd.endsWith('.js')) 55 | .map(cmd => `\t${cmd.split('.')[0]}`) 56 | .join('\n'); 57 | console.log('Usage: ds '); 58 | console.log('\nAvailable commands:'); 59 | console.log(commands); 60 | console.log(`\nThe configuration is located at ${userHome}/.entropicrc `); 61 | } 62 | -------------------------------------------------------------------------------- /cli/lib/main.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | 'use strict'; 4 | 5 | module.exports = main; 6 | module.exports.unpack = unpack; 7 | 8 | const minimist = require('minimist'); 9 | 10 | const config = require('./config'); 11 | const logger = require('./logger'); 12 | const Api = require('./api'); 13 | 14 | async function unpack(argv, { log = logger, load = config.load } = {}) { 15 | let [commandName = 'help'] = argv; 16 | if (/[/\\]/.test(commandName)) { 17 | log.log(`Ignoring malformed command name: ${JSON.stringify(commandName)}`); 18 | commandName = 'help'; 19 | } 20 | 21 | let cmd; 22 | try { 23 | cmd = require(`./commands/${commandName}`); 24 | } catch (e) { 25 | cmd = require('./commands/help'); 26 | } 27 | 28 | const { _, ...args } = minimist(argv.slice(1)); 29 | const config = await load(); 30 | const env = {}; 31 | for (const key in process.env) { 32 | if (key.startsWith('ent_')) { 33 | env[key.slice(4)] = process.env[key]; 34 | } 35 | } 36 | 37 | const registry = args.registry || config.registry || env.registry || 'https://registry.entropic.dev'; 38 | 39 | const registryConfig = (config.registries || {})[registry] || {}; 40 | 41 | // env is overridden by config, which is overridden by registry-specific 42 | // config, ... 43 | const bundle = { 44 | ...env, 45 | ...config, 46 | ...registryConfig, 47 | ...args, 48 | argv: _, 49 | api: new Api(registry), 50 | log 51 | }; 52 | 53 | return { 54 | cmd, 55 | bundle, 56 | }; 57 | } 58 | 59 | async function main(argv) { 60 | try { 61 | const { cmd, bundle } = await unpack(argv); 62 | await cmd(bundle); 63 | return 0; 64 | } catch (err) { 65 | console.log(err.stack); 66 | return 1; 67 | } 68 | } 69 | 70 | if (require.main === module) { 71 | main(process.argv.slice(2)) 72 | .then(code => { 73 | if (Number(code)) { 74 | process.exit(code); 75 | } 76 | }) 77 | .catch(err => { 78 | console.error(err.stack); 79 | process.exit(1); 80 | }); 81 | } 82 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "entropic", 3 | "description": "A new package registry with a new CLI, designed to be easy to stand up inside your network.", 4 | "version": "1.0.0", 5 | "author": "C J Silverio ", 6 | "bugs": { 7 | "url": "https://github.com/entropic-dev/entropic/issues" 8 | }, 9 | "contributors": [ 10 | "C J Silverio ", 11 | "Chris Dickinson (http://neversaw.us/)" 12 | ], 13 | "dependencies": {}, 14 | "devDependencies": { 15 | "husky": "2.4.0", 16 | "lint-staged": "8.2.0", 17 | "markdownlint": "~0.15.0", 18 | "markdownlint-cli": "~0.16.0", 19 | "npm-run-all": "4.1.5", 20 | "opt-cli": "1.6.0", 21 | "prettier": "~1.17.1" 22 | }, 23 | "directories": { 24 | "doc": "docs" 25 | }, 26 | "homepage": "https://github.com/entropic-dev/entropic#readme", 27 | "husky": { 28 | "hooks": { 29 | "pre-commit": "opt --in pre-commit --exec lint-staged" 30 | } 31 | }, 32 | "keywords": [], 33 | "license": "Apache-2.0", 34 | "lint-staged": { 35 | "*.js": [ 36 | "npm run lint-registry", 37 | "npm run lint-cli", 38 | "prettier --write", 39 | "git add" 40 | ], 41 | "*.md": [ 42 | "npm run lint-md", 43 | "git add" 44 | ] 45 | }, 46 | "main": "index.js", 47 | "private": true, 48 | "repository": "https://github.com/entropic-dev/entropic", 49 | "scripts": { 50 | "lint": "run-s -s lint-registry lint-cli lint-md", 51 | "lint-cli": "cd cli; npm run lint", 52 | "lint-fix": "prettier --write '**/*.js'", 53 | "lint-md": "markdownlint \"**/*.md\" -i \"**/node_modules/**\"", 54 | "lint-registry": "cd services/registry; npm run lint", 55 | "postinstall": "for d in cli $(printf 'services/%s ' 'registry' 'workers' 'web' 'storage' 'common/boltzmann'); do ( cd $d; npm i ); done", 56 | "start": "docker-compose up", 57 | "test": "for d in cli $(printf 'services/%s ' 'registry' 'workers' 'web' 'storage' 'common/boltzmann'); do ( cd $d; npm t ); done" 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /services/web/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "entropic-web", 3 | "description": "community package manager but for spiders", 4 | "version": "0.0.1", 5 | "author": "Chris Dickinson ", 6 | "bugs": { 7 | "url": "https://github.com/entropic-dev/entropic/issues" 8 | }, 9 | "dependencies": { 10 | "@hapi/iron": "^5.1.0", 11 | "@hapi/joi": "~15.0.2", 12 | "boltzmann": "file:../common/boltzmann", 13 | "cls-hooked": "^4.2.2", 14 | "cookie": "^0.3.1", 15 | "csrf": "~3.1.0", 16 | "culture-ships": "~1.0.0", 17 | "dotenv": "~8.0.0", 18 | "escape-html": "^1.0.3", 19 | "find-my-way": "~2.0.1", 20 | "graceful-fs": "^4.1.15", 21 | "is-email-maybe": "^1.0.1", 22 | "markdown": "^0.5.0", 23 | "micro": "~9.3.4", 24 | "mkdirp": "~0.5.1", 25 | "multiparty": "^4.2.1", 26 | "node-fetch": "~2.5.0", 27 | "npm-user-validate": "^1.0.0", 28 | "ormnomnom": "^5.2.2", 29 | "pacote": "~9.5.0", 30 | "pg": "~7.4.0", 31 | "semver": "^6.0.0", 32 | "ssri": "^6.0.1", 33 | "uuid": "~3.3.2", 34 | "validate-npm-package-name": "^3.0.0" 35 | }, 36 | "devDependencies": { 37 | "babel-eslint": "~10.0.1", 38 | "db-migrate": "^0.11.5", 39 | "db-migrate-pg": "^0.5.0", 40 | "eslint": "~5.16.0", 41 | "eslint-config-prettier": "~4.3.0", 42 | "eslint-plugin-prettier": "~3.1.0", 43 | "mocha": "~6.1.4", 44 | "must": "~0.13.4", 45 | "nyc": "~14.1.1", 46 | "pgtools": "^0.3.0", 47 | "prettier": "~1.17.0", 48 | "test-listen": "^1.1.0" 49 | }, 50 | "homepage": "https://github.com/entopic-dev/entropic#readme", 51 | "keywords": [ 52 | "entropy", 53 | "heat death" 54 | ], 55 | "license": "Apache-2.0", 56 | "main": "index.js", 57 | "private": true, 58 | "repository": { 59 | "type": "git", 60 | "url": "git://github.com/entopic-dev/entropic.git" 61 | }, 62 | "scripts": { 63 | "coverage": "NODE_ENV=test nyc mocha -R spec", 64 | "lint": "eslint .", 65 | "lint-fix": "prettier --write '**/*.js'", 66 | "posttest": "npm run lint", 67 | "start": "./server.js", 68 | "test": "mocha -R spec" 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /services/storage/test/models/token.spec.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const demand = require('must'); 4 | const providePostgres = require('../utils/postgres'); 5 | const Token = require('../../models/token'); 6 | 7 | const { createUser } = require('../utils/users'); 8 | 9 | describe('Token', () => { 10 | it( 11 | 'creates a token given a valid user and description', 12 | 13 | providePostgres(async () => { 14 | const newUser = await createUser('foo bar', 'baz@entropic.dev'); 15 | const token = await Token.create({ 16 | for: newUser, 17 | description: 'valid description' 18 | }); 19 | 20 | demand(typeof token).to.be('string'); 21 | }) 22 | ); 23 | 24 | it( 25 | 'creates a token prefixed with ent_v1', 26 | 27 | providePostgres(async () => { 28 | const newUser = await createUser('foo bar', 'baz@entropic.dev'); 29 | const token = await Token.create({ 30 | for: newUser, 31 | description: 'valid description' 32 | }); 33 | 34 | demand(token).startWith('ent_v1_'); 35 | }) 36 | ); 37 | 38 | describe('lookupUser', () => { 39 | it( 40 | 'A user can be looked up by a token', 41 | 42 | providePostgres(async () => { 43 | const newUser = await createUser('foo bar', 'baz@entropic.dev'); 44 | const token = await Token.create({ 45 | for: newUser, 46 | description: 'valid description' 47 | }); 48 | 49 | const foundUser = await Token.lookupUser(token); 50 | 51 | // The user we found by token should equal the new user we created. 52 | demand(foundUser).to.eql(newUser); 53 | }) 54 | ); 55 | 56 | it( 57 | 'handles invalid values', 58 | 59 | providePostgres(async () => { 60 | [null, undefined, '', 'not_valid'].forEach(async invalidValue => { 61 | let error = undefined; 62 | 63 | try { 64 | await Token.lookupUser(invalidValue); 65 | } catch (e) { 66 | error = e.message; 67 | } 68 | 69 | demand(error).to.be('Invalid lookup value received'); 70 | }); 71 | }) 72 | ); 73 | }); 74 | }); 75 | -------------------------------------------------------------------------------- /cli/lib/commands/invitations.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const fetch = require('../fetch'); 4 | const figgy = require('figgy-pudding'); 5 | const { whoAmI } = require('../utils'); 6 | 7 | module.exports = invitations; 8 | 9 | // usage: ds invitations namespace 10 | // list all invitations for the given namespace, if the logged-in user has permission 11 | 12 | const invitationsOpts = figgy({ 13 | argv: true, 14 | registry: { default: 'https://registry.entropic.dev' }, 15 | registries: { default: [] }, 16 | token: true, 17 | packages: true, 18 | log: { default: require('npmlog') } 19 | }); 20 | 21 | const getUrl = (packages, registry, invitee) => 22 | packages 23 | ? `${registry}/v1/namespaces/namespace/${invitee}/maintainerships?status=pending` 24 | : `${registry}/v1/users/user/${invitee}/memberships?status=pending`; 25 | 26 | async function invitations(opts) { 27 | opts = invitationsOpts(opts); 28 | let invitee = opts.argv[0] || (await whoAmI(opts)); 29 | if (!invitee) { 30 | console.log('Usage: ds invitations [--packages]'); 31 | process.exit(1); 32 | } 33 | 34 | if (!invitee.includes('@')) { 35 | invitee += '@' + opts.registry.replace(/^https?:\/\//, ''); 36 | } 37 | 38 | const response = await fetch(getUrl(opts.packages, opts.registry, invitee), { 39 | headers: { authorization: `Bearer ${opts.token}` } 40 | }); 41 | 42 | const pkg = await response.json(); 43 | let result = []; 44 | if (Array.isArray(pkg.objects)) { 45 | result = pkg.objects; 46 | } 47 | 48 | const qualifier = opts.packages ? 'package ' : ''; 49 | 50 | if (result.length === 0) { 51 | console.log(`${invitee} has no ${qualifier}invitations.`); 52 | return 0; 53 | } 54 | 55 | console.log( 56 | `${invitee} has ` + 57 | (result.length === 1 58 | ? `one ${qualifier}invitation.` 59 | : `${result.length} ${qualifier}invitations.`) + 60 | '\nTo accept:\n' 61 | ); 62 | 63 | result.forEach(dest => { 64 | console.log(` ds join ${dest.name} --as ${invitee}`); 65 | }); 66 | 67 | console.log( 68 | `\nTo decline an invitation: ds decline --as ${invitee}` 69 | ); 70 | } 71 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | jobs: 3 | storage: 4 | docker: 5 | - image: circleci/node:latest 6 | - image: circleci/postgres:latest 7 | 8 | working_directory: ~/repo/services/storage 9 | 10 | steps: 11 | - checkout: 12 | path: ~/repo 13 | - restore_cache: 14 | keys: 15 | - v1-dependencies-{{ checksum "package-lock.json" }} 16 | - v1-dependencies- 17 | - run: npm install 18 | - save_cache: 19 | paths: 20 | - node_modules 21 | key: v1-dependencies-{{ checksum "package-lock.json" }} 22 | 23 | - run: PGHOST=localhost EXTERNAL_HOST=http://localhost:3000 npm test 24 | 25 | cli: 26 | docker: 27 | - image: circleci/node:latest 28 | - image: circleci/postgres:latest 29 | 30 | working_directory: ~/repo/cli 31 | 32 | steps: 33 | - checkout: 34 | path: ~/repo 35 | - restore_cache: 36 | keys: 37 | - v1-dependencies-{{ checksum "package-lock.json" }} 38 | - v1-dependencies- 39 | - run: npm install 40 | - save_cache: 41 | paths: 42 | - node_modules 43 | key: v1-dependencies-{{ checksum "package-lock.json" }} 44 | 45 | - run: npm test 46 | 47 | - run: 48 | name: npm pack 49 | command: | 50 | mkdir -p /tmp/artifacts 51 | cd /tmp/artifacts 52 | npm pack ~/repo/cli 53 | - store_artifacts: 54 | path: /tmp/artifacts 55 | 56 | docs: 57 | docker: 58 | - image: circleci/node:latest 59 | 60 | working_directory: ~/repo 61 | 62 | steps: 63 | - checkout: 64 | path: ~/repo 65 | - restore_cache: 66 | keys: 67 | - v1-dependencies-{{ checksum "package-lock.json" }} 68 | - v1-dependencies- 69 | - run: npm install 70 | - save_cache: 71 | paths: 72 | - node_modules 73 | key: v1-dependencies-{{ checksum "package-lock.json" }} 74 | 75 | - run: npm run lint-md 76 | 77 | workflows: 78 | version: 2 79 | everything: 80 | jobs: 81 | - storage 82 | - cli 83 | - docs 84 | -------------------------------------------------------------------------------- /cli/lib/commands/members.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const fetch = require('../fetch'); 4 | const figgy = require('figgy-pudding'); 5 | const parsePackageSpec = require('../canonicalize-spec'); 6 | 7 | module.exports = members; 8 | 9 | // usage: ds members name@host[/pkg] 10 | 11 | const membersOpts = figgy({ 12 | argv: true, 13 | registry: true, 14 | log: { default: require('npmlog') } 15 | }); 16 | 17 | async function members(opts) { 18 | opts = membersOpts(opts); 19 | 20 | if (opts.argv.length !== 1) { 21 | console.error('Usage: ds members '); 22 | return 1; 23 | } 24 | 25 | if (opts.argv[0].includes('/')) { 26 | return listPackageMaintainers(opts); 27 | } 28 | 29 | // list namespace members 30 | let ns = opts.argv[0]; 31 | if (!ns.includes('@')) { 32 | ns += '@' + opts.registry.replace(/^https?:\/\//, ''); 33 | } 34 | const uri = `${opts.registry}/v1/namespaces/namespace/${ns}/members`; 35 | const response = await fetch(uri); 36 | const body = await response.json(); 37 | if (!Array.isArray(body.objects) || body.objects.length === 0) { 38 | console.log(`${ns} has no members.`); 39 | return 0; 40 | } 41 | 42 | console.log( 43 | `${ns} has ` + 44 | (body.objects.length == 1 45 | ? 'one member' 46 | : `${body.objects.length} members`) + 47 | ':' 48 | ); 49 | 50 | body.objects.forEach(n => { 51 | console.log(` ${n}`); 52 | }); 53 | } 54 | 55 | async function listPackageMaintainers(opts) { 56 | const { _, ...parsed } = parsePackageSpec( 57 | opts.argv[0], 58 | opts.registry.replace(/^https?:\/\//, '') 59 | ); 60 | 61 | const uri = `${opts.registry}/v1/packages/package/${ 62 | parsed.canonical 63 | }/maintainers`; 64 | 65 | const response = await fetch(uri); 66 | const body = await response.json(); 67 | 68 | if (!Array.isArray(body.objects) || body.objects.length === 0) { 69 | console.log(`${parsed.canonical} has no maintainers.`); 70 | return 0; 71 | } 72 | 73 | console.log( 74 | `${parsed.canonical} has ` + 75 | (body.objects.length == 1 76 | ? 'one maintainer' 77 | : `${body.objects.length} maintainer`) + 78 | ':' 79 | ); 80 | 81 | body.objects.forEach(n => { 82 | console.log(` ${n}`); 83 | }); 84 | } 85 | -------------------------------------------------------------------------------- /cli/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ds", 3 | "description": "ds: entropy delta; the entropic client", 4 | "version": "1.0.0", 5 | "author": "Chris Dickinson (http://neversaw.us/)", 6 | "bin": { 7 | "ds": "./lib/main.js" 8 | }, 9 | "bugs": { 10 | "url": "https://github.com/entropic-dev/entropic/issues" 11 | }, 12 | "bundledDependencies": [ 13 | "@iarna/toml", 14 | "figgy-pudding", 15 | "form-data", 16 | "minimist", 17 | "node-fetch", 18 | "npm-packlist", 19 | "npm-profile", 20 | "npmlog", 21 | "opener", 22 | "ssri", 23 | "user-home" 24 | ], 25 | "contributors": [ 26 | "C J Silverio ", 27 | "Chris Dickinson (http://neversaw.us/)" 28 | ], 29 | "dependencies": { 30 | "@iarna/toml": "^2.2.3", 31 | "chalk": "^2.4.2", 32 | "figgy-pudding": "^3.5.1", 33 | "form-data": "^2.3.3", 34 | "fs.promises": "^0.1.2", 35 | "graceful-fs": "^4.1.15", 36 | "minimist": "^1.2.0", 37 | "node-fetch": "^2.5.0", 38 | "npm-packlist": "^1.4.1", 39 | "npm-profile": "^4.0.1", 40 | "npmlog": "^4.1.2", 41 | "opener": "^1.5.1", 42 | "os-locale": "^3.1.0", 43 | "read-package-tree": "^5.2.2", 44 | "ssri": "^6.0.1", 45 | "user-home": "^2.0.0" 46 | }, 47 | "devDependencies": { 48 | "ava": "^2.0.0", 49 | "babel-eslint": "~10.0.1", 50 | "eslint": "~5.16.0", 51 | "eslint-config-prettier": "~4.3.0", 52 | "eslint-plugin-prettier": "~3.1.0", 53 | "mocha": "~6.1.4", 54 | "must": "~0.13.4", 55 | "prettier": "~1.17.1", 56 | "sinon": "^7.3.2" 57 | }, 58 | "directories": { 59 | "lib": "lib" 60 | }, 61 | "engines": { 62 | "node": ">=12" 63 | }, 64 | "homepage": "https://github.com/entopic-dev/entropic#readme", 65 | "keywords": [], 66 | "license": "Apache-2.0", 67 | "main": "index.js", 68 | "repository": { 69 | "type": "git", 70 | "url": "git://github.com/entopic-dev/entropic.git" 71 | }, 72 | "scripts": { 73 | "lint": "eslint .", 74 | "lint-fix": "prettier --write '**/*.js'", 75 | "test": "ava" 76 | }, 77 | "ava": { 78 | "files": [ 79 | "test/**/*" 80 | ], 81 | "helpers": [ 82 | "test/utils/**/*" 83 | ], 84 | "sources": [ 85 | "lib/**/*" 86 | ] 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /services/storage/models/user.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const iron = require('@hapi/iron'); 4 | const orm = require('ormnomnom'); 5 | const joi = require('@hapi/joi'); 6 | 7 | orm.describeConflict('users_name_idx', 'Usernames must be unique.'); 8 | 9 | module.exports = class User { 10 | constructor({ 11 | id, 12 | name, 13 | email, 14 | tfa_secret, 15 | backup_codes, 16 | tfa_active, 17 | created, 18 | modified, 19 | active 20 | }) { 21 | this.id = id; 22 | this.name = name; 23 | this.email = email; 24 | this.tfa_secret = tfa_secret; 25 | this.backup_codes = backup_codes; 26 | this.tfa_active = tfa_active; 27 | 28 | this.created = created; 29 | this.modified = modified; 30 | this.active = active; 31 | } 32 | 33 | toJSON() { 34 | const { tfa_secret: _0, backup_codes: _1, id: _2, ...meta } = this; 35 | return meta; 36 | } 37 | 38 | static async signup(name, email, remoteAuth) { 39 | const user = await User.objects.create({ 40 | name, 41 | email 42 | }); 43 | 44 | if (remoteAuth) { 45 | await Authentication.objects.create({ 46 | user, 47 | remote_identity: remoteAuth.id, 48 | provider: remoteAuth.provider, 49 | access_token_enc: await iron.seal( 50 | remoteAuth.token, 51 | process.env.OAUTH_PASSWORD, 52 | iron.defaults 53 | ), 54 | metadata: {} 55 | }); 56 | } 57 | 58 | const host = await Host.objects.get({ id: 1 }); 59 | const namespace = await Namespace.objects.create({ name, host }); 60 | await NamespaceMember.objects.create({ 61 | accepted: true, 62 | namespace, 63 | user 64 | }); 65 | 66 | return user; 67 | } 68 | }; 69 | 70 | const NamespaceMember = require('./namespace-member'); 71 | const Authentication = require('./authentication'); 72 | const Namespace = require('./namespace'); 73 | const Host = require('./host'); 74 | 75 | module.exports.objects = orm(module.exports, { 76 | id: joi 77 | .number() 78 | .integer() 79 | .greater(-1) 80 | .required(), 81 | name: joi.string().min(1), 82 | email: joi.string().allow(null), 83 | tfa_secret: joi.string().allow(null), 84 | backup_codes: joi.array().items(joi.string()), 85 | tfa_active: joi.boolean().allow(null), 86 | created: joi.date(), 87 | modified: joi.date(), 88 | active: joi.boolean().default(true) 89 | }); 90 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | # docker-compose is provided for development purposes only and is not 2 | # intended for use as a production entropic setup. 3 | version: '3.1' 4 | services: 5 | db: 6 | image: postgres:10.1-alpine 7 | volumes: 8 | - postgres_data:/var/lib/postgresql/data/ 9 | ports: 10 | - "5432:5432" 11 | environment: 12 | POSTGRES_USER: ${USER} 13 | networks: 14 | - entropic 15 | 16 | redis: 17 | image: redis:alpine 18 | ports: 19 | - "6379:6379" 20 | networks: 21 | - entropic 22 | 23 | queue: 24 | image: schickling/beanstalkd 25 | ports: 26 | - "11300:11300" 27 | volumes: 28 | - beanstalk_wal:/var/lib/beanstalkd/ 29 | networks: 30 | - entropic 31 | 32 | registry: 33 | image: mhart/alpine-node:12 34 | volumes: 35 | - ./services/:/services 36 | working_dir: /services/registry 37 | command: sh -c "npm run dev" 38 | networks: 39 | - entropic 40 | ports: 41 | - "3000:3000" 42 | env_file: 43 | - ./services/registry/.env 44 | environment: 45 | STORAGE_API_URL: http://storage:3002 46 | PORT: 3000 47 | WEB_HOST: http://localhost:3001 48 | REDIS_URL: redis://redis:6379 49 | EXTERNAL_HOST: http://localhost:3000 50 | 51 | web: 52 | image: mhart/alpine-node:12 53 | volumes: 54 | - ./services/:/services 55 | working_dir: /services/web 56 | command: npm start 57 | networks: 58 | - entropic 59 | ports: 60 | - "3001:3001" 61 | env_file: 62 | - ./services/web/.env 63 | environment: 64 | STORAGE_API_URL: http://storage:3002 65 | PORT: 3001 66 | WEB_HOST: http://localhost:3001 67 | REDIS_URL: redis://redis:6379 68 | EXTERNAL_HOST: http://localhost:3001 69 | 70 | storage: 71 | image: mhart/alpine-node:12 72 | volumes: 73 | - ./services/:/services 74 | working_dir: /services/storage 75 | command: npm start 76 | networks: 77 | - entropic 78 | # ports are explicitly hidden, this is an internal service only. 79 | #ports: 80 | # - "3002:3002" 81 | env_file: 82 | - ./services/storage/.env 83 | environment: 84 | POSTGRES_URL: postgres://postgres@db:5432 85 | PGHOST: db 86 | REDIS_URL: redis://redis:6379 87 | PORT: 3002 88 | PGUSER: postgres 89 | PGDATABASE: entropic_dev 90 | 91 | volumes: 92 | postgres_data: 93 | beanstalk_wal: 94 | 95 | networks: 96 | entropic: 97 | -------------------------------------------------------------------------------- /services/storage/handlers/providers.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { fork, response } = require('boltzmann'); 4 | const { json } = require('micro'); 5 | const uuid = require('uuid'); 6 | 7 | const Authentication = require('../models/authentication'); 8 | 9 | module.exports = [ 10 | fork.get('/v1/authn/providers/provider/:provider/id/:id', providerAuthDetail), 11 | 12 | fork.get('/v1/authn/sessions/session/:session', sessionDetail), 13 | fork.post('/v1/authn/sessions', sessionCreate), 14 | fork.post('/v1/authn/sessions/session/:session', sessionUpdate) 15 | ]; 16 | 17 | async function providerAuthDetail(context, { provider, id }) { 18 | const authn = await Authentication.objects 19 | .get({ 20 | active: true, 21 | remote_identity: id, 22 | provider, 23 | 'user.active': true 24 | }) 25 | .catch(Authentication.objects.NotFound, () => null); 26 | 27 | if (!authn) { 28 | return response.error.coded('auth.not_found', 404); 29 | } 30 | 31 | return response.json(await authn.serialize()); 32 | } 33 | 34 | async function sessionDetail(context, { session }) { 35 | const data = await context.redis.getAsync(`cli_${session}`); 36 | if (!data) { 37 | return response.error.coded('auth.session.not_found', 404); 38 | } 39 | 40 | let result = null; 41 | try { 42 | result = JSON.parse(data); 43 | } catch (err) { 44 | context.logger.error( 45 | `Caught error decoding session "${session}": String(data) = ${String( 46 | data 47 | )}; err = ${err}` 48 | ); 49 | return response.error.coded('auth.session.bad_session', 500); 50 | } 51 | 52 | return response.json(result); 53 | } 54 | 55 | async function sessionCreate(context, params) { 56 | const { description = 'a great login session' } = await json(context.request); 57 | const session = uuid.v4(); 58 | 59 | await context.redis.setexAsync( 60 | `cli_${session}`, 61 | 5000, 62 | JSON.stringify({ description }) 63 | ); 64 | 65 | return response.json({ session }); 66 | } 67 | 68 | async function sessionUpdate(context, { session }) { 69 | const { value } = await json(context.request); 70 | if (!value) { 71 | context.logger.error( 72 | `Bad resolution for "${session}": String(value) = ${String(value)}` 73 | ); 74 | return response.error.coded('auth.session.bad_resolution'); 75 | } 76 | 77 | await context.redis.setexAsync( 78 | `cli_${session}`, 79 | 5000, 80 | JSON.stringify({ value }) 81 | ); 82 | 83 | return response.empty(); 84 | } 85 | -------------------------------------------------------------------------------- /services/web/middleware/session.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = createSessionMW; 4 | 5 | const { Headers, Response } = require('node-fetch'); 6 | const iron = require('@hapi/iron'); 7 | const cookie = require('cookie'); 8 | const uuid = require('uuid'); 9 | 10 | const SessionMap = require('../lib/session-map'); 11 | 12 | function createSessionMW({ 13 | sessionId = 's', 14 | secret = process.env.SESSION_SECRET 15 | } = {}) { 16 | return next => { 17 | const store = new RedisStore(); 18 | return async context => { 19 | const parsed = cookie.parse(context.request.headers.cookie || ''); 20 | const id = parsed[sessionId]; 21 | const exists = Boolean(id); 22 | 23 | const unwrappedId = id 24 | ? await iron.unseal(id, secret, iron.defaults) 25 | : null; 26 | const map = await store.load(context, unwrappedId); 27 | 28 | context.session = map; 29 | const response = await next(context); 30 | 31 | if (map.dirty) { 32 | const newId = await store.save(context, unwrappedId, map); 33 | const header = [ 34 | response.headers['set-cookie'], 35 | unwrappedId !== newId 36 | ? `${sessionId}=${encodeURIComponent( 37 | await iron.seal(newId, secret, iron.defaults) 38 | )}; SameSite=Lax; HttpOnly; Max-Age=365000` 39 | : null 40 | ].filter(Boolean); 41 | 42 | const headers = new Headers(response.headers); 43 | headers.set('set-cookie', header); 44 | 45 | return new Response(response.body, { 46 | status: response.status, 47 | headers 48 | }); 49 | } 50 | 51 | return response; 52 | }; 53 | }; 54 | } 55 | 56 | class RedisStore { 57 | constructor() {} 58 | 59 | async load(context, id) { 60 | const sessionData = id 61 | ? JSON.parse((await context.redis.getAsync(id)) || '{}') 62 | : {}; 63 | 64 | return new SessionMap(Object.entries(sessionData)); 65 | } 66 | 67 | async save(context, id, map) { 68 | const object = [...map].reduce((accum, [key, value]) => { 69 | accum[key] = value; 70 | return accum; 71 | }, {}); 72 | 73 | id = id || generateSessionID(); 74 | await context.redis.setexAsync( 75 | id, 76 | Number(process.env.SESSION_EXPIRY_SECONDS) || 31536000, 77 | JSON.stringify(object) 78 | ); 79 | 80 | return id; 81 | } 82 | } 83 | module.exports.RedisStore = RedisStore; 84 | 85 | function generateSessionID() { 86 | return `sess_${uuid.v4()}`; 87 | } 88 | -------------------------------------------------------------------------------- /services/storage/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "entropic-core", 3 | "description": "the internal api of the community package manager", 4 | "version": "0.0.1", 5 | "author": "Chris Dickinson ", 6 | "bugs": { 7 | "url": "https://github.com/entropic-dev/entropic/issues" 8 | }, 9 | "dependencies": { 10 | "@hapi/iron": "^5.1.0", 11 | "@hapi/joi": "~15.0.2", 12 | "boltzmann": "file:../common/boltzmann", 13 | "cls-hooked": "^4.2.2", 14 | "cookie": "^0.3.1", 15 | "csrf": "~3.1.0", 16 | "culture-ships": "~1.0.0", 17 | "dotenv": "~8.0.0", 18 | "escape-html": "^1.0.3", 19 | "find-my-way": "~2.0.1", 20 | "graceful-fs": "^4.1.15", 21 | "is-email-maybe": "^1.0.1", 22 | "markdown": "^0.5.0", 23 | "micro": "~9.3.4", 24 | "mkdirp": "~0.5.1", 25 | "multiparty": "^4.2.1", 26 | "node-fetch": "~2.5.0", 27 | "npm-user-validate": "^1.0.0", 28 | "ormnomnom": "^5.2.2", 29 | "pacote": "~9.5.0", 30 | "pg": "~7.4.0", 31 | "semver": "^6.0.0", 32 | "ssri": "^6.0.1", 33 | "uuid": "~3.3.2", 34 | "validate-npm-package-name": "^3.0.0" 35 | }, 36 | "devDependencies": { 37 | "babel-eslint": "~10.0.1", 38 | "db-migrate": "^0.11.5", 39 | "db-migrate-pg": "^0.5.0", 40 | "eslint": "~5.16.0", 41 | "eslint-config-prettier": "~4.3.0", 42 | "eslint-plugin-prettier": "~3.1.0", 43 | "mocha": "~6.1.4", 44 | "must": "~0.13.4", 45 | "nyc": "~14.1.1", 46 | "pgtools": "^0.3.0", 47 | "prettier": "~1.17.0", 48 | "test-listen": "^1.1.0" 49 | }, 50 | "homepage": "https://github.com/entopic-dev/entropic#readme", 51 | "keywords": [ 52 | "entropy", 53 | "heat death" 54 | ], 55 | "license": "Apache-2.0", 56 | "main": "index.js", 57 | "private": true, 58 | "repository": { 59 | "type": "git", 60 | "url": "git://github.com/entopic-dev/entropic.git" 61 | }, 62 | "scripts": { 63 | "coverage": "NODE_ENV=test PGUSER=postgres PGDATABASE=entropic_test nyc mocha -R spec", 64 | "createdb": "createdbjs -h ${PGHOST:-localhost} -U postgres entropic_${NODE_ENV:-dev} || true", 65 | "dev": "NODE_ENV=dev PGUSER=postgres PGDATABASE=entropic_${NODE_ENV:-dev} ./server.js", 66 | "lint": "eslint .", 67 | "lint-fix": "prettier --write '**/*.js'", 68 | "migrate": "PGUSER=postgres PGDATABASE=entropic_${NODE_ENV:-dev} db-migrate up --config db-migrate.json --env entropic", 69 | "posttest": "npm run lint", 70 | "prestart": "NODE_ENV=${NODE_ENV:-dev} npm run createdb >/dev/null; NODE_ENV=${NODE_ENV:-dev} npm run migrate", 71 | "pretest": "NODE_ENV=test npm run createdb >/dev/null; NODE_ENV=test npm run migrate", 72 | "start": "PGUSER=postgres PGDATABASE=entropic_${NODE_ENV:-dev} ./server.js", 73 | "test": "NODE_ENV=test PGUSER=postgres PGDATABASE=entropic_test mocha test/**/*.spec.js" 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /services/storage/models/token.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const querystring = require('querystring'); 4 | const crypto = require('crypto'); 5 | const orm = require('ormnomnom'); 6 | const joi = require('@hapi/joi'); 7 | const uuid = require('uuid'); 8 | 9 | const User = require('./user'); 10 | 11 | module.exports = class Token { 12 | #user = null; 13 | 14 | constructor({ 15 | id, 16 | user, 17 | user_id, 18 | value_hash, 19 | description, 20 | created, 21 | modified, 22 | active 23 | }) { 24 | this.id = id; 25 | 26 | this.#user = user ? Promise.resolve(user) : null; 27 | this.user_id = user_id; 28 | 29 | this.value_hash = value_hash; 30 | this.description = description; 31 | 32 | this.created = created; 33 | this.modified = modified; 34 | this.active = active; 35 | } 36 | 37 | toJSON() { 38 | const { user: _0, id: _1, user_id: _2, ...meta } = this; 39 | return meta; 40 | } 41 | 42 | static hasher(value) { 43 | return crypto 44 | .createHash('sha256') 45 | .update(value) 46 | .digest('base64'); 47 | } 48 | 49 | static async create({ for: user, description }) { 50 | const value = `ent_v1_${uuid.v4()}`; 51 | await Token.objects.create({ 52 | value_hash: Token.hasher(value), 53 | description, 54 | user 55 | }); 56 | 57 | return value; 58 | } 59 | 60 | static async lookupUser(value) { 61 | if (!value || !value.startsWith('ent_v1_')) { 62 | throw new Error('Invalid lookup value received'); 63 | } 64 | 65 | const hashed = Token.hasher(value); 66 | try { 67 | const found = await Token.objects.get({ 68 | value_hash: hashed, 69 | active: true, 70 | 'user.active': true 71 | }); 72 | const user = await found.user; 73 | return user; 74 | } catch (err) { 75 | // Rethrow problems that are not a missing user, because they are likely 76 | // either bugs or operational problems. 77 | if (!(err instanceof User.objects.NotFound)) { 78 | throw err; 79 | } 80 | } 81 | } 82 | 83 | get user() { 84 | if (this.#user === null) { 85 | this.#user = User.objects.get({ id: this.user_id }); 86 | this.#user.catch(() => {}); 87 | } 88 | 89 | return this.#user; 90 | } 91 | 92 | set user(u) { 93 | this.#user = Promise.resolve(u); 94 | this.user_id = this.#user.id; 95 | } 96 | }; 97 | 98 | module.exports.objects = orm(module.exports, { 99 | id: joi 100 | .number() 101 | .integer() 102 | .greater(-1) 103 | .required(), 104 | user: orm.fk(User), 105 | value_hash: joi.string(), 106 | description: joi.string(), 107 | created: joi.date(), 108 | modified: joi.date(), 109 | active: joi.boolean().default(true) 110 | }); 111 | -------------------------------------------------------------------------------- /cli/lib/commands/maintainerships.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const fetch = require('../fetch'); 4 | const figgy = require('figgy-pudding'); 5 | 6 | module.exports = maintainerships; 7 | 8 | // usage: ds maintainerships namespace 9 | // list all packages maintained by the given namespace, if the logged-in user has permission 10 | 11 | const filterOps = figgy({ 12 | argv: true, 13 | registry: true, 14 | token: true, 15 | log: { default: require('npmlog') } 16 | }); 17 | 18 | async function maintainerships(opts) { 19 | opts = filterOps(opts); 20 | 21 | let invitee = opts.argv[0]; 22 | if (!invitee.includes('@')) { 23 | invitee += '@' + opts.registry.replace(/^https?:\/\//, ''); 24 | } 25 | const uri = `${ 26 | opts.registry 27 | }/v1/namespaces/namespace/${invitee}/maintainerships`; 28 | const response = await fetch(uri, { 29 | headers: { 30 | authorization: `Bearer ${opts.token}` 31 | } 32 | }); 33 | 34 | const body = await response.json(); 35 | if (body.error) { 36 | console.error(body.error); 37 | return 1; 38 | } 39 | if (!Array.isArray(body.objects)) { 40 | console.log(body); 41 | return 0; 42 | } 43 | 44 | if (body.objects.length === 0) { 45 | console.log(`${invitee} maintains no packages.`); 46 | return 0; 47 | } 48 | 49 | console.log( 50 | `${invitee} maintains ` + 51 | (body.objects.length == 1 52 | ? 'one package' 53 | : `${body.objects.length} packages`) + 54 | ':' 55 | ); 56 | 57 | body.objects.forEach(p => { 58 | console.log(` ${p.name}`); 59 | }); 60 | 61 | return listNamespaceMemberships(opts); 62 | } 63 | 64 | async function listNamespaceMemberships(opts) { 65 | // Note that this is ready to pop out into its own command if we want. 66 | let invitee = opts.argv[0]; 67 | if (!invitee.includes('@')) { 68 | invitee += '@' + opts.registry.replace(/^https?:\/\//, ''); 69 | } 70 | const uri = `${opts.registry}/v1/users/user/${invitee}/memberships`; 71 | const response = await fetch(uri, { 72 | headers: { 73 | authorization: `Bearer ${opts.token}` 74 | } 75 | }); 76 | 77 | const body = await response.json(); 78 | if (body.error) { 79 | console.error(body.error); 80 | return 1; 81 | } 82 | if (!Array.isArray(body.objects)) { 83 | console.log(body); 84 | return 0; 85 | } 86 | 87 | if (body.objects.length === 0) { 88 | console.log(`${invitee} is not a member of any namespaces.`); 89 | return 0; 90 | } 91 | 92 | console.log( 93 | `${invitee} is a member of ` + 94 | (body.objects.length == 1 95 | ? 'one namespace' 96 | : `${body.objects.length} namespaces`) + 97 | ':' 98 | ); 99 | 100 | body.objects.forEach(p => { 101 | console.log(` ${p.name}`); 102 | }); 103 | } 104 | -------------------------------------------------------------------------------- /services/storage/decorators/can-write-package.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const Maintainer = require('../models/maintainer'); 4 | const Namespace = require('../models/namespace'); 5 | const Package = require('../models/package'); 6 | const { response } = require('boltzmann'); 7 | 8 | module.exports = canWrite; 9 | 10 | function canWrite(next) { 11 | // does the package exist? 12 | // -> YES 13 | // is the current user a maintainer or a member of a namespace that is a maintainer of package? 14 | // does the package require 2fa to be enabled to change? 15 | // -> YES 16 | // did the user authenticate with 2fa? 17 | // are we enabling 2fa? 18 | // -> YES 19 | // does the current user have 2fa enabled? (if not 400) 20 | // -> NO 21 | // is the current user a member of namespace? 22 | 23 | return async (context, params) => { 24 | const { host, namespace, name } = params; 25 | 26 | if ( 27 | host !== String(process.env.EXTERNAL_HOST).replace(/^http(s)?:\/\//, '') 28 | ) { 29 | return response.error( 30 | `Cannot modify packages for remote host "${host}"`, 31 | 403 32 | ); 33 | } 34 | 35 | const pkg = await Package.objects 36 | .get({ 37 | active: true, 38 | name, 39 | 'namespace.active': true, 40 | 'namespace.name': namespace, 41 | 'namespace.host.name': host, 42 | 'namespace.host.active': true 43 | }) 44 | .catch(Package.objects.NotFound, () => null); 45 | 46 | if (pkg) { 47 | const [any = null] = await Maintainer.objects 48 | .filter({ 49 | package: pkg, 50 | active: true, 51 | accepted: true, 52 | 'namespace.active': true, 53 | 'namespace.namespace_members.active': true, 54 | 'namespace.namespace_members.accepted': true, 55 | 'namespace.namespace_members.user_id': context.user.id 56 | }) 57 | .then(); 58 | 59 | if (!any) { 60 | return response.error( 61 | `You are not a maintainer of "${namespace}@${host}/${name}"`, 62 | 403 63 | ); 64 | } 65 | 66 | if (pkg.require_tfa && !context.user.tfa_active) { 67 | return response.error( 68 | `You must enable 2FA to edit "${namespace}@${host}/${name}"`, 69 | 403 70 | ); 71 | } 72 | } else { 73 | const [any = null] = await Namespace.objects 74 | .filter({ 75 | active: true, 76 | name: namespace, 77 | 'host.name': host, 78 | 'host.active': true, 79 | 'namespace_members.active': true, 80 | 'namespace_members.user_id': context.user.id 81 | }) 82 | .then(); 83 | 84 | if (!any) { 85 | return response.error(`You are not a member of "${namespace}"`, 403); 86 | } 87 | } 88 | 89 | context.pkg = pkg; 90 | return next(context, params); 91 | }; 92 | } 93 | -------------------------------------------------------------------------------- /services/storage/models/package.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const orm = require('ormnomnom'); 4 | const joi = require('@hapi/joi'); 5 | 6 | module.exports = class Package { 7 | #namespace = null; 8 | 9 | constructor({ 10 | id, 11 | name, 12 | namespace_id, 13 | namespace, 14 | require_tfa, 15 | version_integrities, 16 | yanked, 17 | created, 18 | modified, 19 | active, 20 | tags 21 | }) { 22 | this.id = id; 23 | this.name = name; 24 | this.namespace_id = namespace_id; 25 | this.#namespace = namespace ? Promise.resolve(namespace) : null; 26 | this.require_tfa = require_tfa; 27 | this.yanked = yanked; 28 | this.version_integrities = version_integrities; 29 | this.created = created; 30 | this.modified = modified; 31 | this.active = active; 32 | this.tags = tags; 33 | } 34 | 35 | async serialize() { 36 | const namespace = await this.namespace; 37 | const host = await namespace.host; 38 | return { 39 | name: `${namespace.name}@${host.name}/${encodeURIComponent(this.name)}`, 40 | yanked: this.yanked, 41 | created: this.created, 42 | modified: this.modified, 43 | require_tfa: Boolean(this.require_tfa), 44 | versions: this.version_integrities, 45 | tags: this.tags 46 | }; 47 | } 48 | 49 | // TODO: precompute this on version change events. 50 | async versions() { 51 | const versions = await PackageVersion.objects 52 | .filter({ 53 | active: true, 54 | parent: this 55 | }) 56 | .then(x => x); 57 | 58 | const acc = {}; 59 | for (const version of versions) { 60 | if (version.yanked) { 61 | continue; 62 | } 63 | 64 | const [integrity, _] = await version.toSSRI(); 65 | acc[version.version] = String(integrity); 66 | } 67 | 68 | return acc; 69 | } 70 | 71 | get namespace() { 72 | if (this.#namespace === null) { 73 | this.#namespace = Namespace.objects.get({ id: this.namespace_id }); 74 | this.#namespace.catch(() => {}); 75 | } 76 | 77 | return this.#namespace; 78 | } 79 | 80 | set namespace(u) { 81 | this.#namespace = Promise.resolve(u); 82 | this.namespace_id = this.#namespace.id; 83 | } 84 | }; 85 | 86 | const PackageVersion = require('./package-version'); 87 | const Namespace = require('./namespace'); 88 | 89 | module.exports.objects = orm(module.exports, { 90 | id: joi 91 | .number() 92 | .integer() 93 | .greater(-1) 94 | .required(), 95 | name: joi.string().min(1), 96 | namespace: orm.fk(Namespace), 97 | require_tfa: joi.boolean(), 98 | version_integrities: joi.object().unknown(), 99 | yanked: joi.boolean().default(false), 100 | created: joi.date(), 101 | modified: joi.date(), 102 | active: joi.boolean().default(true), 103 | tags: joi.object().unknown() 104 | }); 105 | -------------------------------------------------------------------------------- /services/storage/test/01-packages.spec.js: -------------------------------------------------------------------------------- 1 | /* eslint-env node, mocha */ 2 | 'use strict'; 3 | 4 | process.env.EXTERNAL_HOST = 'http://localhost:3000'; 5 | 6 | const fetch = require('node-fetch'); 7 | const demand = require('must'); 8 | 9 | const { createUser, createToken } = require('../utils/users'); 10 | const providePostgres = require('../utils/postgres'); 11 | const provideRegistry = require('../utils/registry'); 12 | 13 | describe('entropic', () => { 14 | it( 15 | 'must be authenticated to create a package', 16 | providePostgres( 17 | provideRegistry(async url => { 18 | const response = await fetch( 19 | `${url}/v1/packages/package/any-namespace@localhost:3000/any-name`, 20 | { 21 | method: 'PUT', 22 | body: '{}' 23 | } 24 | ); 25 | 26 | response.status.must.eql(401); 27 | const data = await response.json(); 28 | data.must.be.an.object(); 29 | data.must.have.property('message'); 30 | data.message.must.match('You must be logged in'); 31 | }).middleware([require('../middleware/internal-auth')]) 32 | ) 33 | ); 34 | 35 | it( 36 | 'must be a member of the namespace to create a package', 37 | providePostgres( 38 | provideRegistry(async url => { 39 | await createUser('malfoy'); 40 | const token = await createToken('malfoy'); 41 | 42 | const response = await fetch( 43 | `${url}/v1/packages/package/any-namespace@localhost:3000/any-name`, 44 | { 45 | method: 'PUT', 46 | body: '{}', 47 | headers: { 48 | bearer: 'malfoy' 49 | } 50 | } 51 | ); 52 | 53 | response.status.must.eql(403); 54 | const data = await response.json(); 55 | data.must.be.an.object(); 56 | data.must.have.property('message'); 57 | data.message.must.eql('You are not a member of "any-namespace"'); 58 | }).middleware([require('../middleware/internal-auth')]) 59 | ) 60 | ); 61 | 62 | it( 63 | 'can create packages in its own namespace', 64 | providePostgres( 65 | provideRegistry(async url => { 66 | await createUser('malfoy'); 67 | const token = await createToken('malfoy'); 68 | 69 | const response = await fetch( 70 | `${url}/v1/packages/package/malfoy@localhost:3000/draco`, 71 | { 72 | method: 'PUT', 73 | body: '{}', 74 | headers: { 75 | bearer: 'malfoy' 76 | } 77 | } 78 | ); 79 | 80 | response.status.must.eql(201); 81 | const data = await response.json(); 82 | data.must.eql({ 83 | name: 'malfoy@localhost:3000/draco', 84 | yanked: false, 85 | created: data.created, 86 | modified: data.modified, 87 | require_tfa: false, 88 | versions: {}, 89 | tags: {} 90 | }); 91 | }).middleware([ 92 | require('../middleware/internal-auth'), 93 | require('./utils/logger') 94 | ]) 95 | ) 96 | ); 97 | }); 98 | -------------------------------------------------------------------------------- /services/storage/lib/validations.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const validateLegacy = require('validate-npm-package-name'); 4 | const validate = require('npm-user-validate'); 5 | const { URL } = require('url'); 6 | const joi = require('joi'); 7 | 8 | // I'm wrapping this up in a little file module because I want to hide the implementation. 9 | // We want to have different rules from vcpm, and we don't care about backcompat with the wild 10 | // days of mixed case. 11 | 12 | module.exports = { 13 | validDependencyName, 14 | validLegacyPackage, 15 | packageNameOK, 16 | nameOK 17 | }; 18 | 19 | // We only worry about whether a given string might be an existing legacy 20 | // package, of any format. Also, we don't care about errors. 21 | function validLegacyPackage(input) { 22 | const { validForOldPackages } = validateLegacy(input); 23 | return validForOldPackages; 24 | } 25 | 26 | // This right here is an opinion. Discuss. 27 | const nameSchema = joi 28 | .string() 29 | .regex(/^[a-z0-9-]+$/, { name: 'alphanumeric plus hyphen' }) 30 | .min(2) 31 | .max(256) 32 | .required(); 33 | 34 | // Returns an error message if the validation failed. 35 | function packageNameOK(name, namespace) { 36 | if (namespace === 'legacy') { 37 | const result = validateLegacy(name); 38 | // All names ok by the old rules are okay by the new ones. 39 | // Some legacy packages will use the old rules. 40 | if (!result.validForOldPackages) { 41 | return result.errors.join(', '); 42 | } 43 | return; // null response means no error 44 | } 45 | 46 | const validated = joi.validate(name, nameSchema); 47 | if (validated.error) { 48 | return validated.error.annotate(); 49 | } 50 | } 51 | 52 | function validDependencyName( 53 | spec, 54 | warnings = [], 55 | defaultHost = process.env.EXTERNAL_HOST.replace(/https?:\/\//, '') 56 | ) { 57 | if (spec[0] === '@' && spec.split('/').length === 2) { 58 | return validDependencyName(`legacy@${defaultHost}/${spec}`); 59 | } 60 | 61 | if (spec.split('/').length === 1) { 62 | return validDependencyName(`legacy@${defaultHost}/${spec}`); 63 | } 64 | 65 | const { protocol, username, password, host, pathname } = new URL( 66 | `ent://${spec}` 67 | ); 68 | 69 | if (protocol !== 'ent:') { 70 | warnings.push('Contained unexpected protocol portion'); 71 | return false; 72 | } 73 | 74 | if (password) { 75 | warnings.push('Contained unexpected password in namespace portion'); 76 | return false; 77 | } 78 | 79 | try { 80 | validate.username(username); 81 | } catch (err) { 82 | warnings.push('Username: ' + err.message); 83 | return false; 84 | } 85 | 86 | const name = pathname.slice(1); 87 | const errors = packageNameOK(name, username); 88 | if (errors) { 89 | warnings.push(String(errors)); 90 | return false; 91 | } 92 | 93 | return { 94 | canonical: `${username}@${host}/${encodeURIComponent(name)}`, 95 | namespace: username, 96 | host: host, 97 | name 98 | }; 99 | } 100 | 101 | function nameOK(input) { 102 | return joi.validate(input, nameSchema); 103 | } 104 | -------------------------------------------------------------------------------- /services/common/boltzmann/response.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { Response, Headers } = require('node-fetch'); 4 | const isDev = require('are-we-dev'); 5 | 6 | module.exports = { 7 | authneeded, 8 | empty, 9 | bytes, 10 | error, 11 | html, 12 | json, 13 | message, 14 | redirect, 15 | text 16 | }; 17 | 18 | function json(body, status = 200, extraHeaders = {}) { 19 | const headers = new Headers({ 20 | 'content-type': 'application/json', 21 | ...extraHeaders 22 | }); 23 | const r = new Response(JSON.stringify(body), { status, headers }); 24 | return r; 25 | } 26 | 27 | function text(body, status = 200, extraHeaders = {}) { 28 | const headers = new Headers({ 29 | 'content-type': 'text/plain', 30 | ...extraHeaders 31 | }); 32 | const r = new Response(body, { status, headers }); 33 | return r; 34 | } 35 | 36 | // Wrap a text messsage intended for the ds cli 37 | function message(msg, status = 200, extraHeaders = {}) { 38 | const headers = new Headers({ 39 | 'content-type': 'application/json', 40 | ...extraHeaders 41 | }); 42 | if (typeof msg === 'string') { 43 | msg = { message: msg }; 44 | } 45 | 46 | const r = new Response(JSON.stringify(msg), { status, headers }); 47 | return r; 48 | } 49 | 50 | // Unnecessary, but a hook for other work. 51 | function bytes(stream, status = 200, extraHeaders = {}) { 52 | const headers = new Headers({ 53 | 'content-type': 'application/octet-stream', 54 | ...extraHeaders 55 | }); 56 | const r = new Response(stream, { status, headers }); 57 | return r; 58 | } 59 | 60 | function html(text, status = 200, extraHeaders = {}) { 61 | const headers = new Headers({ 'content-type': 'text/html', ...extraHeaders }); 62 | const r = new Response(text, { status, headers }); 63 | return r; 64 | } 65 | 66 | function redirect(where, status = 301, extraHeaders = {}) { 67 | const headers = new Headers({ location: where, ...extraHeaders }); 68 | const r = new Response('', { status, headers }); 69 | return r; 70 | } 71 | 72 | function error(err, status = 500, extraHeaders = {}) { 73 | const headers = new Headers({ 74 | 'content-type': 'application/json', 75 | ...extraHeaders 76 | }); 77 | if (typeof err === 'string') { 78 | err = { message: err, code: 'ENOTSUPPLIED' }; 79 | } 80 | 81 | if (isDev()) { 82 | err.trace = new Error().stack; 83 | } 84 | 85 | const r = new Response(JSON.stringify(err), { status, headers }); 86 | return r; 87 | } 88 | 89 | error.coded = coded; 90 | function coded(code, ...args) { 91 | return error(Object.assign(new Error(code), { code }), ...args); 92 | } 93 | 94 | function empty(status = 204, headers = {}) { 95 | return new Response('', { status, headers }); 96 | } 97 | 98 | function authneeded(message, status = 401, extraHeaders = {}) { 99 | const headers = new Headers({ 100 | 'www-authenticate': 'bearer', 101 | 'content-type': 'application/json', 102 | ...extraHeaders 103 | }); 104 | if (typeof message === 'string') { 105 | message = { message, code: 'authneeded' }; 106 | } 107 | const r = new Response(JSON.stringify(message), { status, headers }); 108 | return r; 109 | } 110 | -------------------------------------------------------------------------------- /docs/LORE.md: -------------------------------------------------------------------------------- 1 | # LORE 2 | 3 | The intention of this document is to catalog a list of links to important or 4 | notable writings about the folklore of this project. The goal is to point at 5 | the things that shape the project without recapitulating them in full here, 6 | providing them as a resource for newcomers to the project. 7 | 8 | ## Posts / Definitions 9 | 10 | ### A community ownership pact 11 | 12 | ![Ceej and Chris agree that neither of them are in this for a retirement.](/docs/assets/community-ownership-pact.png) 13 | 14 | ### [The economics of package management](https://github.com/ceejbot/economics-of-package-management) 15 | 16 | The text and materials of the talk that introduced this project. 17 | 18 | ### [The economics of open source](https://www.youtube.com/watch?v=MO8hZlgK5zc&feature=youtu.be) 19 | 20 | The video form of the above. 21 | 22 | ### [Conway's Law](https://en.wikipedia.org/wiki/Conway%27s_law) 23 | 24 | What _is_ Conway's law, anyway? 25 | 26 | ### [Gall's Law](https://en.wikipedia.org/wiki/John_Gall_\(author\)#Gall's_law) 27 | 28 | Also, Gall's law? 29 | 30 | ### [The Tyranny of Structurelessness](https://www.jofreeman.com/joreen/tyranny.htm) 31 | 32 | Structurelessness can work to hide power structures. Very applicable to open 33 | source project development. 34 | 35 | ### [Write code that is easy to delete](https://programmingisterrible.com/post/139222674273/write-code-that-is-easy-to-delete-not-easy-to) 36 | 37 | A great post about how to think about the cost of abstraction. 38 | 39 | ### [How ENTROPIC picks packages](https://discourse.entropic.dev/t/issues-with-prs/55/2?u=chrisdickinson) 40 | 41 | See the "Tooling" section of this post. 42 | 43 | ### [HTTP API Design language](https://gist.github.com/chrisdickinson/e94ae588ba06bbcaaf21297345f22008) 44 | 45 | Chris shares his HTTP api design foibles. 46 | 47 | ### [What's a clacks overhead header?](https://xclacksoverhead.org/home/about) 48 | 49 | Why do we send clacks overhead headers? What purpose could they serve? Who is 50 | Terry? 51 | 52 | ### [VFP? GCU? (D)ROU?](https://en.wikipedia.org/wiki/List_of_spacecraft_in_the_Culture_series) 53 | 54 | You might spot one of these acronyms when `curl`'ing the `/` path of an 55 | ENTROPIC registry and wonder at their significance. Well, wonder no more! 56 | 57 | ## Quotes / Tweets / Tweetstorms 58 | 59 | ### ["Code is never the challenge."](https://mobile.twitter.com/ceejbot/status/761569569802551300) 60 | 61 | - ceejbot 62 | 63 | ### ["Process is a system, and as a system it is subject to Gall's Law"](https://mobile.twitter.com/isntitvacant/status/1138996005976756227) 64 | 65 | - chrisdickinson 66 | 67 | ### ["The cost of copied code is so much lower than the cost of the wrong abstraction."](https://mobile.twitter.com/jefflembeck/status/1141048466723835904) 68 | 69 | - aredridel 70 | 71 | ### ["We keep the registry up."](https://mobile.twitter.com/jefflembeck/status/1136640077562408965) 72 | 73 | - jefflembeck 74 | 75 | ### ["Software is fun. Easter eggs are a tradition."](https://mobile.twitter.com/ceejbot/status/1135441511624192000) 76 | 77 | - ceejbot 78 | 79 | ### ["Chris says the raccoon is named 'Static.'"](https://mobile.twitter.com/ceejbot/status/1135119921564737541) 80 | 81 | - ceejbot (but also chrisdickinson I guess?) 82 | 83 | ### ["Conway's law is a continuous force"](https://mobile.twitter.com/isntitvacant/status/1123654381679759360) 84 | 85 | - chrisdickinson 86 | -------------------------------------------------------------------------------- /services/storage/models/package-version.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const orm = require('ormnomnom'); 4 | const joi = require('@hapi/joi'); 5 | const ssri = require('ssri'); 6 | 7 | module.exports = class PackageVersion { 8 | #parent = null; 9 | 10 | constructor({ 11 | id, 12 | version, 13 | parent_id, 14 | parent, 15 | yanked, 16 | files, 17 | derivedFiles, 18 | signatures, 19 | dependencies, 20 | devDependencies, 21 | peerDependencies, 22 | optionalDependencies, 23 | bundledDependencies, 24 | 25 | created, 26 | modified, 27 | active 28 | }) { 29 | this.id = id; 30 | this.version = version; 31 | this.parent_id = parent_id; 32 | this.#parent = parent ? Promise.resolve(parent) : null; 33 | this.yanked = yanked; 34 | this.files = files; // JSON blob. {"path/to/file": ""} 35 | this.derivedFiles = derivedFiles; 36 | 37 | this.signatures = signatures; 38 | this.dependencies = dependencies; 39 | this.devDependencies = devDependencies; 40 | this.peerDependencies = peerDependencies; 41 | this.optionalDependencies = optionalDependencies; 42 | this.bundledDependencies = bundledDependencies; 43 | // TODO: list mirrors here? 44 | 45 | this.active = active; 46 | this.created = created; 47 | this.modified = modified; 48 | } 49 | 50 | async toSSRI() { 51 | const { 52 | created, 53 | modified, 54 | signatures, 55 | derivedFiles, 56 | ...content 57 | } = await this.serialize(); 58 | 59 | const json = JSON.stringify(content); 60 | return [ssri.fromData(json), json]; 61 | } 62 | 63 | async serialize() { 64 | const { 65 | files, 66 | derivedFiles, 67 | dependencies, 68 | devDependencies, 69 | peerDependencies, 70 | optionalDependencies, 71 | bundledDependencies, 72 | created, 73 | modified, 74 | signatures 75 | } = this; 76 | 77 | return { 78 | files, 79 | derivedFiles, 80 | dependencies, 81 | devDependencies, 82 | peerDependencies, 83 | optionalDependencies, 84 | bundledDependencies, 85 | created, 86 | modified, 87 | signatures 88 | }; 89 | } 90 | 91 | get parent() { 92 | if (this.#parent === null) { 93 | this.#parent = Package.objects.get({ id: this.parent_id }); 94 | this.#parent.catch(() => {}); 95 | } 96 | 97 | return this.#parent; 98 | } 99 | 100 | set parent(p) { 101 | this.#parent = Promise.resolve(p); 102 | this.parent_id = this.#parent.id; 103 | } 104 | }; 105 | 106 | const Package = require('./package'); 107 | 108 | module.exports.objects = orm(module.exports, { 109 | id: joi 110 | .number() 111 | .integer() 112 | .greater(-1) 113 | .required(), 114 | version: joi.string().min(1), 115 | parent: orm.fk(Package), 116 | yanked: joi.boolean().default(false), 117 | files: joi.object().unknown(), 118 | derivedFiles: joi.object().unknown(), 119 | signatures: joi.array().items(joi.string()), 120 | dependencies: joi.object().unknown(), 121 | devDependencies: joi.object().unknown(), 122 | peerDependencies: joi.object().unknown(), 123 | optionalDependencies: joi.object().unknown(), 124 | bundledDependencies: joi.object().unknown(), 125 | active: joi.boolean().default(true), 126 | created: joi.date(), 127 | modified: joi.date() 128 | }); 129 | -------------------------------------------------------------------------------- /services/workers/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "workers", 3 | "version": "1.0.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "argparse": { 8 | "version": "1.0.10", 9 | "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", 10 | "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", 11 | "requires": { 12 | "sprintf-js": "~1.0.2" 13 | } 14 | }, 15 | "babel-runtime": { 16 | "version": "5.8.38", 17 | "resolved": "https://registry.npmjs.org/babel-runtime/-/babel-runtime-5.8.38.tgz", 18 | "integrity": "sha1-HAsC62MxL18If/IEUIJ7QlydTBk=", 19 | "requires": { 20 | "core-js": "^1.0.0" 21 | } 22 | }, 23 | "beanstalkd": { 24 | "version": "2.2.1", 25 | "resolved": "https://registry.npmjs.org/beanstalkd/-/beanstalkd-2.2.1.tgz", 26 | "integrity": "sha512-JwrJaPfiUWt887tDKgyCsiYrTJ1n/qnTZU2ldXvHjsltYq32SDnogArvJrsdHezhri9ipMtxuMzB86Whvq02nA==", 27 | "requires": { 28 | "babel-runtime": "^5.8.25", 29 | "beanstalkd-protocol": "^1.0.0", 30 | "bluebird": "^3.4.7", 31 | "debug": "^2.2.0", 32 | "js-yaml": "^3.4.2", 33 | "lodash.camelcase": "^4.3.0" 34 | } 35 | }, 36 | "beanstalkd-protocol": { 37 | "version": "1.0.0", 38 | "resolved": "https://registry.npmjs.org/beanstalkd-protocol/-/beanstalkd-protocol-1.0.0.tgz", 39 | "integrity": "sha1-c1GVx+zeBnm5+disK62+GF8txR4=" 40 | }, 41 | "bluebird": { 42 | "version": "3.5.5", 43 | "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.5.tgz", 44 | "integrity": "sha512-5am6HnnfN+urzt4yfg7IgTbotDjIT/u8AJpEt0sIU9FtXfVeezXAPKswrG+xKUCOYAINpSdgZVDU6QFh+cuH3w==" 45 | }, 46 | "core-js": { 47 | "version": "1.2.7", 48 | "resolved": "https://registry.npmjs.org/core-js/-/core-js-1.2.7.tgz", 49 | "integrity": "sha1-ZSKUwUZR2yj6k70tX/KYOk8IxjY=" 50 | }, 51 | "debug": { 52 | "version": "2.6.9", 53 | "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", 54 | "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", 55 | "requires": { 56 | "ms": "2.0.0" 57 | } 58 | }, 59 | "esprima": { 60 | "version": "4.0.1", 61 | "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", 62 | "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" 63 | }, 64 | "js-yaml": { 65 | "version": "3.13.1", 66 | "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", 67 | "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", 68 | "requires": { 69 | "argparse": "^1.0.7", 70 | "esprima": "^4.0.0" 71 | } 72 | }, 73 | "lodash.camelcase": { 74 | "version": "4.3.0", 75 | "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", 76 | "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" 77 | }, 78 | "ms": { 79 | "version": "2.0.0", 80 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", 81 | "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" 82 | }, 83 | "sprintf-js": { 84 | "version": "1.0.3", 85 | "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", 86 | "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" 87 | } 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /services/storage/lib/object-storage.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { promises: fs } = require('graceful-fs'); 4 | const { promisify } = require('util'); 5 | const mkdirp = promisify(require('mkdirp')); 6 | const ssri = require('ssri'); 7 | const uuid = require('uuid'); 8 | 9 | module.exports = class ObjectStore { 10 | constructor( 11 | strategy, 12 | algorithms = (process.env.STORAGE_HASHES || 'sha512').split(',') 13 | ) { 14 | this.strategy = strategy; 15 | this.algorithms = algorithms; 16 | } 17 | 18 | async add(stream, { hint = null } = {}) { 19 | stream.resume(); 20 | const chunks = []; 21 | stream.on('data', chunk => chunks.push(chunk)); 22 | const integrity = await ssri.fromStream(stream, { 23 | algorithms: this.algorithms 24 | }); 25 | const data = Buffer.concat(chunks); 26 | const result = await this.addBuffer(integrity, data, { hint }); 27 | return result; 28 | } 29 | 30 | async addBuffer(integrity, data, { hint = null } = {}) { 31 | const targets = []; 32 | for (const algo of this.algorithms) { 33 | for (const { digest } of integrity[algo] || []) { 34 | targets.push( 35 | this.strategy.has(algo, digest).then(has => { 36 | if (!has) { 37 | return this.strategy.add(algo, digest, data); 38 | } 39 | }) 40 | ); 41 | } 42 | } 43 | 44 | await Promise.all(targets); 45 | 46 | return integrity.toString('base64'); 47 | } 48 | 49 | // XXX: do we even need this? we know which algo folks are asking for. 50 | async get(integrity) { 51 | integrity = ssri.parse(integrity); 52 | const streams = []; 53 | for (const algo of this.algorithms) { 54 | for (const { digest } of integrity[algo] || []) { 55 | streams.push(this.strategy.get(algo, digest)); 56 | } 57 | } 58 | 59 | if (!streams.length) { 60 | return null; 61 | } 62 | 63 | let stream; 64 | do { 65 | stream = await Promise.race(streams).catch(() => null); 66 | streams.splice(streams.indexOf(stream), 1); 67 | } while (!stream); 68 | 69 | for (const other of streams) { 70 | other.abort(); 71 | } 72 | 73 | return stream; 74 | } 75 | 76 | static FileSystemStrategy = class { 77 | constructor(dir = process.env.CACHE_DIR || '.') { 78 | this.dir = dir; 79 | this.algos = new Set(); 80 | } 81 | 82 | async get(algo, digest) { 83 | digest = encodeURIComponent(digest); 84 | return fs.readFile(`${this.dir}/${algo}/${digest}`); 85 | } 86 | 87 | async has(algo, digest) { 88 | digest = encodeURIComponent(digest); 89 | try { 90 | await fs.access(`${this.dir}/${algo}/${digest}`); 91 | return true; 92 | } catch { 93 | return false; 94 | } 95 | } 96 | 97 | async add(algo, digest, data) { 98 | digest = encodeURIComponent(digest); 99 | if (!this.algos.has(algo)) { 100 | await mkdirp(`${this.dir}/${algo}/tmp`); 101 | this.algos.add(algo); 102 | } 103 | 104 | const uniq = uuid.v4(); 105 | await fs.writeFile(`${this.dir}/${algo}/tmp/${uniq}`, data); 106 | try { 107 | await fs.link( 108 | `${this.dir}/${algo}/tmp/${uniq}`, 109 | `${this.dir}/${algo}/${digest}` 110 | ); 111 | } catch (err) { 112 | if (err.code !== 'EEXIST') { 113 | throw err; 114 | } 115 | } 116 | await fs.unlink(`${this.dir}/${algo}/tmp/${uniq}`); 117 | } 118 | }; 119 | }; 120 | -------------------------------------------------------------------------------- /docs/rfcs/0001-entropic-workflow.md: -------------------------------------------------------------------------------- 1 | # A workflow for Entropic 2 | 3 | This project has to this point been a close collaboration between two people who worked together for several years, and have an established workflow. If we want to succeed with this project, we'll need a successful workflow for collaborating asynchronously with people who don't know each other as well. I have some suggestions based on both things I've done with teams in the past and descriptions of other successful processes. 4 | 5 | In this one-pager I propose a workflow and discuss some of the tradeoffs. 6 | 7 | ## Overview 8 | 9 | There is no process that will replace talking to people. Write a problem statement and proposed solution. Share it privately with one or more contributors and ask for feedback ahead of starting a public discussion. Be most formal when the problem is especially tricky or contentious. 10 | 11 | ## A workflow 12 | 13 | Follow these steps: 14 | 15 | - Start a discussion about a problem you think needs to be solved. This might be a missing feature, or a an API that needs to be designed, or some other thing you think the project would benefit from. 16 | - This initial discussion should focus on *developing a clear problem statement* and *gathering information about the problem.* We can also discuss possible solutions & their tradeoffs. 17 | - Larger or more contentious discussions should have a moderator gently keeping people on track. If you're the proposer, you shouldn't moderate the discussion. This will free you to have opinions. 18 | - When the discussion reaches a conclusion, close it. 19 | - Write a one-pager summarizing the discussion and the conclusions reached. If it's appropriate, this document should be a concrete proposal for acting on the conclusions. This might be a spec for implementing a new feature, a plan of action for doing something, or documentation. 20 | - We then engage in a new discussion, moderated by somebody who isn't the document author, improving the proposal. This discussion shouldn't relitigate the problem statement unless new information prompts us to do so. Our goal should be *constructive comment* making the proposal the best it can possibly be. 21 | - Redraft as necessary. 22 | - Act. 23 | 24 | Following the steps above dogmatically should *not* be a goal. Instead we should follow the spirit of the workflow: discuss to understand the problem; be thoughtful & propose action; make the proposal better; act. The bigger the problem is, or the more people are involved in the discussion, the more formal our process should be. 25 | 26 | For example, discussing how to fix a small bug might take only a few comments in a Github issue. Discussing how to fingerprint Entropic instances to establish a trust network might require the full process, because it's important and we need to think carefully about security issues. (We expect to do the same with package signing!) 27 | 28 | If a consensus doesn't emerge, [@chrisdickinson](https://github.com/chrisdickinson) and [@ceejbot](https://github.com/ceejbot) will make final decisions. This isn't ideal, but I think until the projects design principles & technical goals are widely shared, we'll have to fall back to this to break stalemates & values conflicts. Also, Chris & Ceej bear the final responsibility so it's fair to make them do the hard work of saying no sometimes. 29 | 30 | ## Tradeoffs 31 | 32 | This process is going to be hard on people who aren't fluent in English. I'm not sure how to address this. 33 | 34 | This process can be slow. 35 | 36 | Sometimes reaching a decision doesn't mean everybody agrees with the decision. People acting in good faith who share the same information and goals will generally reach a consensus, however. 37 | -------------------------------------------------------------------------------- /services/registry/README.md: -------------------------------------------------------------------------------- 1 | # entropic registry 2 | 3 | This is the backend service for the Entropic package manager. 4 | 5 | ## Running your own registry 6 | 7 | Go to https://github.com/settings/developers and create a new oauth app. The authorization callback url will need to look like this: 8 | 9 | ``` 10 | http://localhost:3000/www/login/providers/github/callback 11 | ``` 12 | 13 | Note the client id and the client secret. 14 | 15 | The registry requires both a postgres database and a Redis instance. (It will need the beandstalkd work queue soon, but does not yet require it.) You can provide these any way you like. For convenience, there's a Docker compose file at the top level of the repo. Run `docker-compose up` to provide all of the requirements. 16 | 17 | To run the registry service, run `npm start`. Sadly ds does not have lifecycle scripts implemented yet. (Perhaps you need a project?) 18 | 19 | Entropic reads all of its configuration from environment variables. You may provide these to the service any way you wish. For local development, you might find it most convenient to use a `.env` file in the registry root directory. To get started, copy `.env-example` into `.env` and edit to taste. 20 | 21 | Here are the config values and what they mean: 22 | 23 | * `NODE_ENV=env`: one of `dev`, `testing`, or `production` 24 | * `DEV_LATENCY_ERROR_MS=10000`: if a middleware is slower than this in development, you'll see warning logs 25 | * `POSTGRES_URL=postgres://postgres@127.0.0.1:5432`: postgresql connection string 26 | * `PORT=3000`: the port for the registry service to listen on 27 | * `PGUSER=postgres`: the postgres user 28 | * `PGDATABASE=entropic_dev`: the name of the postgres database to use 29 | * `CACHE_DIR=../entropic-cache`: where to store package data 30 | * `OAUTH_GITHUB_CLIENT=gh_client_id_here`: the client id you created above 31 | * `OAUTH_GITHUB_SECRET=gh_secret_here`: the oauth client secret you created above 32 | * `OAUTH_PASSWORD=pw_for_encrypting_tokens_here`: a password with lots of entropy for encrypting oauth access tokens at rest in the db 33 | * `EXTERNAL_HOST=http://localhost:3000`: the web host to advertise to the npm cli 34 | * `SESSION_SECRET=long_pw_for_encrypting_sessions_here` 35 | * `SESSION_EXPIRY_SECONDS=31536000`: how long login sessions should live 36 | 37 | ## The API 38 | 39 | For a full description of the final API, see [docs/README.md](../docs/README.md). This readme documents what's implemented currently. 40 | 41 | Registry routes: 42 | 43 | * `GET /ping`: responds with 200 & a short text string if we are listening 44 | * `PUT /packages/package/:namespace/:name`: create a package 45 | * `GET /packages/package/:namespace/:name`: get a package meta object 46 | * `DELETE /packages/package/:namespace/:name`: mark a package as abandonware 47 | * `PUT /packages/package/:namespace/:name/versions/:version`: create a new package-version 48 | * `GET /packages/package/:namespace/:name/versions/:version`: fetch meta information for a new package-version 49 | * `DELETE /packages/package/:namespace/:name/versions/:version`: deprecate a package-version 50 | * `GET /objects/object/:hashalgo/*`: fetch a specific content blob 51 | * `GET /auth/whoami` - respond with the name of the logged-in user 52 | 53 | Website routes: 54 | 55 | * `GET /www/login/providers/:provider/callback` 56 | * `GET /www/login` 57 | * `GET /www/signup` 58 | * `POST /www/signup` 59 | * `GET /www/tokens` 60 | * `POST /www/tokens` 61 | 62 | These endpoints do not follow the entropic API conventions, but instead use the legacy endpoints for convenience: 63 | 64 | * `POST /-/v1/login` - log your client in 65 | * `GET /-/v1/login/poll/:session` - poll for a session token as part of the login flow 66 | -------------------------------------------------------------------------------- /services/registry/handlers/users.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { response, fork } = require('boltzmann'); 4 | const authn = require('../decorators/authn'); 5 | 6 | module.exports = [ 7 | fork.get('/v1/users/user/:username/memberships', authn.required(memberships)), 8 | fork.post( 9 | '/v1/users/user/:username/memberships/:namespace@:host', 10 | authn.required(accept) 11 | ), 12 | fork.del( 13 | '/v1/users/user/:username/memberships/:namespace@:host', 14 | authn.required(decline) 15 | ) 16 | ]; 17 | 18 | async function memberships(context, { username }) { 19 | const [err, result] = await context.storageApi 20 | .listUserMemberships({ 21 | for: username, 22 | bearer: context.user.name, 23 | page: context.url.searchParams.get('page'), 24 | status: context.url.searchParams.get('status') 25 | }) 26 | .then(xs => [null, xs], xs => [xs, null]); 27 | 28 | if (err) { 29 | // TODO: enumerate error cases 30 | return response.error('Caught error listing memberships', 500); 31 | } 32 | 33 | const { objects, next, prev, total } = result; 34 | return response.json({ objects, next, prev, total }); 35 | } 36 | 37 | async function accept(context, { username, namespace, host }) { 38 | const [err] = await context.storageApi 39 | .acceptNamespaceMembership({ 40 | bearer: context.user.name, 41 | invitee: username, 42 | namespace, 43 | host 44 | }) 45 | .then(xs => [null, xs], xs => [xs, null]); 46 | 47 | if (err) { 48 | const msg = { 49 | 'member.invite.invitee_dne': `Unknown user for invite: "${username}".`, 50 | 'member.invite.namespace_dne': `Unknown namespace: "${namespace}@${host}".`, 51 | 'member.invite.bearer_unauthorized': `You are not authorized to accept an invite for "${username}" on "${namespace}@${host}"`, 52 | 'member.invite.invite_dne': `invitation not found` 53 | }[err.code]; 54 | 55 | return response.error( 56 | msg || 57 | `Caught error accepting "${namespace}@${host}" invite for "${ 58 | context.user.name 59 | }"`, 60 | err.status 61 | ); 62 | } 63 | 64 | context.logger.info( 65 | `${context.user.name} accepted the invitation to join ${namespace}@${host}` 66 | ); 67 | return response.message( 68 | `${context.user.name} is now a member of ${namespace}@${host}` 69 | ); 70 | } 71 | 72 | async function decline(context, { username, namespace, host }) { 73 | const [err] = await context.storageApi 74 | .declineNamespaceMembership({ 75 | bearer: context.user.name, 76 | invitee: username, 77 | namespace, 78 | host 79 | }) 80 | .then(xs => [null, xs], xs => [xs, null]); 81 | 82 | if (err) { 83 | const msg = { 84 | 'member.invite.invitee_dne': `Unknown user for invite: "${username}".`, 85 | 'member.invite.namespace_dne': `Unknown namespace: "${namespace}@${host}".`, 86 | 'member.invite.bearer_unauthorized': `You are not authorized to decline an invite for "${username}" on "${namespace}@${host}"`, 87 | 'member.invite.invite_dne': `invitation not found` 88 | }[err.code]; 89 | 90 | return response.error( 91 | msg || 92 | `Caught error declining "${namespace}@${host}" invite for "${ 93 | context.user.name 94 | }"`, 95 | err.status 96 | ); 97 | } 98 | 99 | context.logger.info( 100 | `${context.user.name} declined the invitation to join ${namespace}@${host}` 101 | ); 102 | return response.message( 103 | `You have declined the invitation to join ${namespace}@${host}` 104 | ); 105 | } 106 | -------------------------------------------------------------------------------- /services/storage/migrations/20190429000000-more-tables.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var dbm; 4 | var type; 5 | var seed; 6 | 7 | /** 8 | * We receive the dbmigrate dependency from dbmigrate initially. 9 | * This enables us to not have to rely on NODE_PATH. 10 | */ 11 | exports.setup = function(options, seedLink) { 12 | dbm = options.dbmigrate; 13 | type = dbm.dataType; 14 | seed = seedLink; 15 | }; 16 | 17 | exports.up = async function(db) { 18 | return await db.runSql(` 19 | DROP TABLE "users"; 20 | 21 | CREATE TABLE "users" ( -- once more, with feeling! 22 | id SERIAL PRIMARY KEY, 23 | name text NOT NULL, 24 | email text, 25 | created TIMESTAMP DEFAULT NOW(), 26 | modified TIMESTAMP DEFAULT NOW(), 27 | active BOOLEAN DEFAULT TRUE 28 | ); 29 | 30 | CREATE UNIQUE INDEX "users_name_idx" ON "users" ( "name" ) WHERE ( "active" ); 31 | 32 | CREATE TABLE IF NOT EXISTS "namespaces" ( 33 | id SERIAL PRIMARY KEY, 34 | name text NOT NULL, 35 | created TIMESTAMP NOT NULL DEFAULT NOW(), 36 | modified TIMESTAMP NOT NULL DEFAULT NOW(), 37 | active BOOLEAN NOT NULL DEFAULT TRUE 38 | ); 39 | 40 | CREATE UNIQUE INDEX "namespaces_name_idx" ON "namespaces" ( "name" ) WHERE ( "active" ); 41 | 42 | CREATE TABLE IF NOT EXISTS "packages" ( 43 | id SERIAL PRIMARY KEY, 44 | name text NOT NULL, 45 | namespace_id integer NOT NULL REFERENCES "namespaces" ("id"), 46 | tags jsonb not null default '{}'::jsonb, 47 | yanked BOOLEAN DEFAULT FALSE, 48 | created TIMESTAMP NOT NULL DEFAULT NOW(), 49 | modified TIMESTAMP NOT NULL DEFAULT NOW(), 50 | active BOOLEAN NOT NULL DEFAULT TRUE 51 | ); 52 | 53 | CREATE UNIQUE INDEX "packages_namespace_name_idx" ON "packages" ( "namespace_id", "name" ) WHERE ( "active" ); 54 | 55 | CREATE TABLE IF NOT EXISTS "package_versions" ( 56 | id SERIAL PRIMARY KEY, 57 | version text NOT NULL, 58 | parent_id integer NOT NULL REFERENCES "packages" ("id"), 59 | namespace_id integer NOT NULL REFERENCES "namespaces" ("id"), 60 | yanked BOOLEAN NOT NULL DEFAULT FALSE, 61 | files jsonb NOT NULL default '{}'::jsonb, 62 | signatures jsonb NOT NULL default '[]'::jsonb, 63 | created TIMESTAMP NOT NULL DEFAULT NOW(), 64 | modified TIMESTAMP NOT NULL DEFAULT NOW(), 65 | active BOOLEAN NOT NULL DEFAULT TRUE 66 | ); 67 | 68 | CREATE UNIQUE INDEX "package_versions_parent_id_version_idx" ON "package_versions" ( "parent_id", "version" ) WHERE ( "active" ); 69 | 70 | CREATE TABLE IF NOT EXISTS "namespace_members" ( 71 | id SERIAL PRIMARY KEY, 72 | user_id integer NOT NULL REFERENCES "users" ("id"), 73 | namespace_id integer NOT NULL REFERENCES "namespaces" ("id"), 74 | created TIMESTAMP NOT NULL DEFAULT NOW(), 75 | modified TIMESTAMP NOT NULL DEFAULT NOW(), 76 | active BOOLEAN NOT NULL DEFAULT TRUE 77 | ); 78 | 79 | CREATE UNIQUE INDEX "namespace_members_user_id_namespace_id" ON "namespace_members" ( "namespace_id", "user_id" ) WHERE ( "active" ); 80 | 81 | CREATE TABLE IF NOT EXISTS "maintainers" ( 82 | id SERIAL PRIMARY KEY, 83 | package_id integer NOT NULL REFERENCES "packages" ("id"), 84 | namespace_id integer NOT NULL REFERENCES "namespaces" ("id"), 85 | created TIMESTAMP NOT NULL DEFAULT NOW(), 86 | modified TIMESTAMP NOT NULL DEFAULT NOW(), 87 | active BOOLEAN NOT NULL DEFAULT TRUE 88 | ); 89 | 90 | CREATE UNIQUE INDEX "maintainers_members_package_id_namespace_id" ON "maintainers" ( "namespace_id", "package_id" ) WHERE ( "active" ); 91 | `); 92 | }; 93 | 94 | exports.down = function(db) { 95 | return null; 96 | }; 97 | 98 | exports._meta = { 99 | version: 1 100 | }; 101 | -------------------------------------------------------------------------------- /docs/meetings/20190708-minutes.md: -------------------------------------------------------------------------------- 1 | # 2019-07-08 Meeting 2 | 3 | ## Attendees 4 | 5 | - [**zkat**](https://github.com/zkat/): KAT 6 | - [**chrisdickinson**](https://github.com/chrisdickinson/): CHRIS 7 | - [**ceejbot**](https://github.com/ceejbot/): CEEJ 8 | 9 | > Minutes from notes taken by CHRIS, approved by KAT and CEEJ 10 | 11 | ## Agenda 12 | 13 | ### 1. How are we going to bring new maintainers in? 14 | 15 | - CEEJ introduced. 16 | - Stop talking in $sidechannel. Move to discord. 17 | - General note: GitHub is a great place to concentrate on code, but a bad place to run a project. 18 | - We should turn off GitHub issues and redirect folks to the Discourse. 19 | - CHRIS to take on communication for this & eventually turning off the issues. 20 | - KAT to take on the rest of the Discord setup. 21 | - We note that the project is set up as a cabal of maintainers at the moment 22 | - That's okay as long as we make it easy to join the cabal, so that should be our goal 23 | - KAT notes that the website is a great place to draw in new maintainers. 24 | - CHRIS adds that we could use help deploying the latest and greatest ENTROPIC so that 25 | folks working on the website service can see it in the real world. 26 | - (CHRIS to go talk to Todd/Wes about this, CEEJ to see about uploading Wes's keys if he is interested) 27 | 28 | ### 2. Docs: What docs are most helpful right now? 29 | 30 | - CHRIS introduced, wanted to get a read on where documentation is missing 31 | - Docs need to do a better job of setting expectations. 32 | - Where the project lives, what cadence it operates at, what's expected to work and what's not. 33 | - Eventually we'll want a standalone place for the docs. 34 | 35 | ### 3. DS CLI (in entropic-dev/entropic) 36 | 37 | - CHRIS introduced. 38 | - We now have two DS's! One more and we can officially play Super Mario 3D Land. 39 | - Got on the same page: entropic-dev/ds and entropic-dev/dstopic are THE FUTURE 40 | - entropic-dev/entropic's `ds` is essentially an integration test for the registry 41 | - Expected lifetime is at least 2-3 months 42 | - CHRIS to post about the intent in Discourse. 43 | 44 | ### 4. How are we going to communicate about work 45 | 46 | - CEEJ introduced. 47 | - Weekly meetings are too much, but fortnightly seems good. (every 2 weeks) 48 | - Can have ad-hoc meetings as necessary to discuss sticking points. 49 | - Goal of fortnightly meeting is to provide a "finish line" for decisions & messaging 50 | about the state of other decisions. 51 | - E.g., `Package.toml` vs `entropic.toml` vs `ds.toml` -- we can pick one here. 52 | - CD Note: We did not pick one there. But we could've! 53 | - KAT set up the Ideas category 54 | - CEEJ to look into OpenCollective 55 | 56 | ### 5. What are the big undesigned bits that need design? 57 | 58 | - CEEJ introduced. 59 | - ENTROPIC->ENTROPIC Package Syncing 60 | - CHRIS is working on this. 61 | - Do we go with a centralized NOTARY service, as proposed, or lean on Keybase? 62 | - Downside of keybase is that it is, in fact, a VC-funded company 63 | - Key revocation (use old key to sign new key), vs. key loss 64 | - CHRIS needs to research the implications more 65 | - ENTROPIC->DS Package downloading (mostly specced, docs need updated, CHRIS feels this sin weighs on his soul) 66 | - Keeping this flow separate for now because they have slightly different replication aims. 67 | - Author package signing 68 | - CEEJ is chewing on this. 69 | - Search 70 | - Can do a quick "local search only" rev to give tools to CLI UX 71 | - Needs to be reasonably responsive, <1s response time 72 | - CEEJ to think about this long term 73 | - Introspection / stats page 74 | - For the website, probably fun to work on 75 | - Package Dependency spec 76 | - KAT is working on this. 77 | - CLI: Rust infrastructure (ongoing) 78 | - KAT is coordinating this. 79 | - CLI: installer 80 | - KAT is working on this. 81 | -------------------------------------------------------------------------------- /services/storage/lib/clone-legacy-package.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = clone; 4 | 5 | const { PassThrough, pipeline } = require('stream'); 6 | const minimist = require('minimist'); 7 | const fetch = require('node-fetch'); 8 | const orm = require('ormnomnom'); 9 | const pacote = require('pacote'); 10 | const tar = require('tar'); 11 | 12 | const PackageVersion = require('../models/package-version'); 13 | const Maintainer = require('../models/maintainer'); 14 | const Namespace = require('../models/namespace'); 15 | const Package = require('../models/package'); 16 | 17 | const enc = encodeURIComponent; 18 | 19 | async function clone(pkg, storage) { 20 | const json = await pacote.packument(pkg).catch(() => null); 21 | if (json === null) { 22 | return; 23 | } 24 | 25 | const namespace = await Namespace.objects.get({ 26 | name: 'legacy', 27 | 'host.name': process.env.EXTERNAL_HOST.replace(/^https?:\/\//g, ''), 28 | 'host.active': true, 29 | active: true 30 | }); 31 | 32 | // TODO: mark the package as "syncing." Syncs can take up to 30s or more. 33 | // Maybe we should only sync the most recent N versions before returning 34 | // a temporary "yes here are the newest items" response? 35 | const result = await Package.objects.create({ 36 | name: pkg, 37 | namespace, 38 | require_tfa: false 39 | }); 40 | 41 | await Maintainer.objects.create({ 42 | namespace, 43 | package: result 44 | }); 45 | 46 | const versions = Object.keys(json.versions); 47 | const pending = []; 48 | for (const version of versions) { 49 | const versionData = syncVersion( 50 | storage, 51 | pkg, 52 | version, 53 | json.versions[version] 54 | ); 55 | versionData.catch(() => {}); 56 | pending.push(versionData); 57 | } 58 | 59 | const versionData = (await Promise.all(pending)).filter(Boolean).map(xs => { 60 | return { 61 | parent: result, 62 | ...xs 63 | }; 64 | }); 65 | 66 | const pkgversions = await PackageVersion.objects.create(versionData); 67 | for (const pkgVersion of pkgversions) { 68 | const [integrity, versiondata] = await pkgVersion.toSSRI(); 69 | await storage.addBuffer(integrity, Buffer.from(versiondata)); 70 | } 71 | 72 | const versionIntegrities = await result.versions(); 73 | await Package.objects.filter({ id: result.id }).update({ 74 | modified: new Date(), 75 | tags: json['dist-tags'], 76 | version_integrities: versionIntegrities 77 | }); 78 | } 79 | 80 | async function syncVersion(storage, pkg, version, data) { 81 | const tarball = pacote.tarball.stream(`${pkg}@${version}`); 82 | const untar = new tar.Parse(); 83 | const files = {}; 84 | const pending = []; 85 | 86 | untar.on('entry', entry => { 87 | if (entry.type === 'File') { 88 | const filename = './' + String(entry.path).replace(/^\/+/g, ''); 89 | const passthrough = new PassThrough(); 90 | passthrough.pause(); 91 | 92 | const stream = entry.pipe(passthrough); 93 | const addFile = storage.add(stream).then(r => { 94 | files[filename] = r; 95 | }); 96 | addFile.catch(() => {}); 97 | pending.push(addFile); 98 | } else { 99 | entry.resume(); 100 | } 101 | }); 102 | 103 | try { 104 | await new Promise((resolve, reject) => { 105 | tarball.on('error', reject); 106 | untar.on('end', resolve).on('error', reject); 107 | tarball.pipe(untar); 108 | }); 109 | } catch { 110 | return; 111 | } 112 | 113 | await Promise.all(pending); 114 | 115 | return { 116 | version, 117 | signatures: [], 118 | dependencies: data.dependencies || {}, 119 | devDependencies: data.devDependencies || {}, 120 | optionalDependencies: data.optionalDependencies || {}, 121 | peerDependencies: data.peerDependencies || {}, 122 | bundledDependencies: data.bundledDependencies || {}, 123 | files, 124 | derivedFiles: {} 125 | }; 126 | } 127 | -------------------------------------------------------------------------------- /cli/lib/commands/download.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = download; 4 | 5 | const { createReadStream, promises: fs } = require('fs'); 6 | const { pipeline: _ } = require('stream'); 7 | const figgy = require('figgy-pudding'); 8 | const { promisify } = require('util'); 9 | const fetch = require('../fetch'); 10 | const cacache = require('cacache'); 11 | const home = require('user-home'); 12 | const semver = require('semver'); 13 | const path = require('path'); 14 | const ssri = require('ssri'); 15 | 16 | const fetchPackageVersion = require('../fetch-package-version'); 17 | const parsePackageSpec = require('../canonicalize-spec'); 18 | const fetchPackage = require('../fetch-package'); 19 | const fetchObject = require('../fetch-object'); 20 | 21 | const pipeline = promisify(_); 22 | 23 | const downloadOpts = figgy({ 24 | registry: { default: 'https://registry.entropic.dev' }, 25 | argv: true, 26 | expires: true, 27 | cache: { default: path.join(home, '.ds', 'cache') }, 28 | log: { default: require('npmlog') } 29 | }); 30 | 31 | async function download(opts) { 32 | opts = downloadOpts(opts); 33 | 34 | const fetching = []; 35 | const seenFiles = new Set(); 36 | const now = Date.now(); 37 | 38 | const { range, ...parsed } = parsePackageSpec( 39 | opts.argv[0], 40 | opts.registry.replace(/^https?:\/\//, '') 41 | ); 42 | const result = await visitPackage( 43 | opts, 44 | parsed, 45 | now, 46 | range, 47 | seenFiles, 48 | fetching 49 | ); 50 | await Promise.all(fetching); 51 | } 52 | 53 | async function visitPackage(opts, spec, now, range, seenFiles, fetching) { 54 | const { canonical: name } = spec; 55 | 56 | const data = await fetchPackage(opts, name, now); 57 | if (!semver.validRange(range)) { 58 | const version = data.tags[range]; 59 | if (!version) { 60 | opts.log.error(`Failed to fetch resolve range for ${spec}: ${range}`); 61 | throw new Error(); 62 | } 63 | range = version; // peg it to a version 64 | } 65 | 66 | const checks = []; 67 | for (const [version, integrity] of Object.entries(data.versions)) { 68 | if (!semver.satisfies(version, range)) { 69 | continue; 70 | } 71 | 72 | const check = cacache.get 73 | .byDigest(opts.cache, integrity) 74 | .catch(() => null) 75 | .then(content => { 76 | return content 77 | ? content 78 | : fetchPackageVersion(opts, name, version, integrity); 79 | }) 80 | .then(content => [version, content]); 81 | 82 | checks.push(check); 83 | } 84 | 85 | if (!checks.length) { 86 | opts.log.error( 87 | `Failed to fetch resolve range for ${name}: ${range} matched no versions!` 88 | ); 89 | throw new Error(); 90 | } 91 | 92 | const resolvedVersions = await Promise.all(checks); 93 | const deps = new Set(); 94 | for (const [version, content] of resolvedVersions) { 95 | const versiondata = JSON.parse(String(content)); 96 | for (const filename in versiondata.files) { 97 | if (seenFiles.has(versiondata.files[filename])) { 98 | continue; 99 | } 100 | 101 | seenFiles.add(versiondata.files[filename]); 102 | 103 | const loading = fetchObject(opts, versiondata.files[filename]); 104 | 105 | loading.catch(() => {}); 106 | fetching.push(loading); 107 | } 108 | 109 | for (const dep in versiondata.dependencies) { 110 | deps.add(`${dep}@${versiondata.dependencies[dep]}`); 111 | } 112 | 113 | for (const dep in versiondata.devDependencies) { 114 | deps.add(`${dep}@${versiondata.devDependencies[dep]}`); 115 | } 116 | 117 | for (const dep in versiondata.peerDependencies) { 118 | deps.add(`${dep}@${versiondata.peerDependencies[dep]}`); 119 | } 120 | 121 | for (const dep in versiondata.optionalDependencies) { 122 | deps.add(`${dep}@${versiondata.optionalDependencies[dep]}`); 123 | } 124 | } 125 | 126 | return [...deps]; 127 | } 128 | -------------------------------------------------------------------------------- /misc/nginx.conf: -------------------------------------------------------------------------------- 1 | upstream registry { 2 | server 127.0.0.1:3000; 3 | } 4 | 5 | server { 6 | listen 443 ssl http2; 7 | listen [::]:443 ssl http2; 8 | server_name www.entropic.dev; 9 | 10 | root /mnt/packages/website; 11 | index index.html index.htm; 12 | gzip on; 13 | 14 | ssl on; 15 | ssl_certificate /etc/ssl/private/domain.crt; 16 | ssl_certificate_key /etc/ssl/private/domain.key; 17 | ssl_dhparam /etc/ssl/private/dhparams.pem; 18 | 19 | ssl_session_timeout 5m; 20 | 21 | ssl_protocols TLSv1.3 TLSv1.2 TLSv1.1 TLSv1; 22 | ssl_prefer_server_ciphers on; 23 | ssl_ciphers EECDH+ECDSA+AESGCM:EECDH+aRSA+AESGCM:EECDH+ECDSA+SHA512:EECDH+ECDSA+SHA384:EECDH+ECDSA+SHA256:ECDH+AESGCM:ECDH+AES256:DH+AESGCM:DH+AES256:RSA+AESGCM:!aNULL:!eNULL:!LOW:!RC4:!3DES:!MD5:!EXP:!PSK:!SRP:!DSS; 24 | ssl_session_cache shared:TLS:2m; 25 | ssl_buffer_size 4k; 26 | 27 | ssl_stapling on; 28 | ssl_stapling_verify on; 29 | resolver 1.1.1.1 1.0.0.1 [2606:4700:4700::1111] [2606:4700:4700::1001]; # Cloudflare 30 | 31 | add_header Strict-Transport-Security 'max-age=31536000; includeSubDomains; preload' always; 32 | add_header X-Frame-Options sameorigin always; 33 | add_header X-Clacks-Overhead "GNU/Terry Pratchett"; 34 | } 35 | 36 | server { 37 | listen 443 default_server ssl http2; 38 | listen [::]:443 default_server ssl http2; 39 | server_name entropic.dev *.entropic.dev; 40 | 41 | root /mnt/packages/website/no; 42 | 43 | ssl on; 44 | ssl_certificate /etc/ssl/private/domain.crt; 45 | ssl_certificate_key /etc/ssl/private/domain.key; 46 | ssl_dhparam /etc/ssl/private/dhparams.pem; 47 | 48 | ssl_session_timeout 5m; 49 | 50 | ssl_protocols TLSv1.3 TLSv1.2 TLSv1.1 TLSv1; 51 | ssl_prefer_server_ciphers on; 52 | ssl_ciphers EECDH+ECDSA+AESGCM:EECDH+aRSA+AESGCM:EECDH+ECDSA+SHA512:EECDH+ECDSA+SHA384:EECDH+ECDSA+SHA256:ECDH+AESGCM:ECDH+AES256:DH+AESGCM:DH+AES256:RSA+AESGCM:!aNULL:!eNULL:!LOW:!RC4:!3DES:!MD5:!EXP:!PSK:!SRP:!DSS; 53 | ssl_session_cache shared:TLS:2m; 54 | ssl_buffer_size 4k; 55 | 56 | ssl_stapling on; 57 | ssl_stapling_verify on; 58 | resolver 1.1.1.1 1.0.0.1 [2606:4700:4700::1111] [2606:4700:4700::1001]; # Cloudflare 59 | 60 | add_header Strict-Transport-Security 'max-age=31536000; includeSubDomains; preload' always; 61 | add_header X-Frame-Options sameorigin always; 62 | add_header X-Clacks-Overhead "GNU/Terry Pratchett"; 63 | client_max_body_size 16M; 64 | 65 | # the weirdo 66 | location /ping { 67 | proxy_pass http://registry/ping; 68 | } 69 | 70 | location = / { 71 | proxy_pass http://registry/; 72 | } 73 | 74 | # the external API 75 | location /v1 { 76 | # via: https://stackoverflow.com/questions/28684300/nginx-pass-proxy-subdirectory-without-url-decoding/37584637#37584637 77 | rewrite ^ $request_uri; 78 | rewrite ^/v1/(.*) $1 break; 79 | return 400; 80 | proxy_pass http://registry/v1/$uri; 81 | proxy_pass http://registry/v1; 82 | proxy_buffering off; 83 | proxy_set_header Host $host; 84 | proxy_set_header X-Real-IP $remote_addr; 85 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 86 | proxy_set_header X-Forwarded-Proto $scheme; 87 | proxy_set_header X-Forwarded-Host $host; 88 | } 89 | 90 | # legacy urls 91 | location /- { 92 | proxy_pass http://registry/-; 93 | proxy_set_header Host $host; 94 | proxy_set_header X-Real-IP $remote_addr; 95 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 96 | proxy_set_header X-Forwarded-Proto $scheme; 97 | proxy_set_header X-Forwarded-Host $host; 98 | } 99 | 100 | # will be a web site someday 101 | location /www { 102 | proxy_pass http://registry/www; 103 | proxy_set_header Host $host; 104 | proxy_set_header X-Real-IP $remote_addr; 105 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 106 | proxy_set_header X-Forwarded-Proto $scheme; 107 | proxy_set_header X-Forwarded-Host $host; 108 | } 109 | } 110 | 111 | server { 112 | listen 80; 113 | listen [::]:80; 114 | server_name www.entropic.dev; 115 | 116 | location / { 117 | return 301 https://www.entropic.dev$request_uri; 118 | } 119 | } 120 | 121 | server { 122 | listen 80 default_server; 123 | listen [::]:80 default_server; 124 | server_name entropic.dev *.entropic.dev; 125 | 126 | location / { 127 | return 301 https://entropic.dev$request_uri; 128 | } 129 | } 130 | --------------------------------------------------------------------------------