├── packages ├── server │ ├── README.md │ ├── test.env │ ├── jest.config.js │ ├── service │ │ ├── package.json │ │ ├── index.js │ │ └── yarn.lock │ ├── example.dev.env │ ├── tsconfig.build.json │ ├── tsconfig.json │ ├── src │ │ ├── env.ts │ │ ├── context.ts │ │ ├── logger.ts │ │ ├── migrations │ │ │ ├── index.ts │ │ │ ├── 20230406T174552885Z-did-locks.ts │ │ │ ├── 20221020T204908820Z-operations-init.ts │ │ │ └── 20230223T215019669Z-refactor.ts │ │ ├── bin.ts │ │ ├── db │ │ │ ├── types.ts │ │ │ ├── mock.ts │ │ │ └── index.ts │ │ ├── error.ts │ │ ├── index.ts │ │ └── routes.ts │ ├── update-pkg.js │ ├── bin │ │ ├── did-create.ts │ │ └── migration-create.ts │ ├── pg │ │ ├── docker-compose.yaml │ │ ├── with-test-db.sh │ │ └── README.md │ ├── build.js │ ├── tests │ │ ├── _util.ts │ │ ├── migrations │ │ │ ├── did-locks.test.ts │ │ │ └── refactor.test.ts │ │ └── server.test.ts │ ├── Dockerfile │ └── package.json └── lib │ ├── README.md │ ├── tsconfig.build.json │ ├── jest.config.js │ ├── src │ ├── index.ts │ ├── error.ts │ ├── document.ts │ ├── types.ts │ ├── data.ts │ ├── client.ts │ └── operations.ts │ ├── tsconfig.json │ ├── update-pkg.js │ ├── build.js │ ├── package.json │ └── tests │ ├── compatibility.test.ts │ ├── document.test.ts │ ├── recovery.test.ts │ └── data.test.ts ├── babel.config.js ├── test-setup.ts ├── lerna.json ├── jest.config.js ├── .gitignore ├── .prettierrc ├── LICENSE ├── jest.config.base.js ├── tsconfig.json ├── LICENSE-MIT ├── .github └── workflows │ ├── build-test-verify.yaml │ ├── build-and-push-ghcr.yaml │ └── build-and-push-aws.yaml ├── .eslintrc ├── Makefile ├── package.json ├── invalidated-op-log.txt ├── LICENSE-APACHE └── README.md /packages/server/README.md: -------------------------------------------------------------------------------- 1 | # DID PLC Server 2 | -------------------------------------------------------------------------------- /packages/lib/README.md: -------------------------------------------------------------------------------- 1 | # DID PLC Library 2 | 3 | -------------------------------------------------------------------------------- /packages/server/test.env: -------------------------------------------------------------------------------- 1 | LOG_ENABLED=true 2 | LOG_DESTINATION=test.log -------------------------------------------------------------------------------- /babel.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | presets: [['@babel/preset-env']], 3 | } 4 | -------------------------------------------------------------------------------- /test-setup.ts: -------------------------------------------------------------------------------- 1 | import dotenv from 'dotenv' 2 | 3 | dotenv.config({ path: './test.env' }) 4 | -------------------------------------------------------------------------------- /lerna.json: -------------------------------------------------------------------------------- 1 | { 2 | "packages": ["packages/*"], 3 | "npmClient": "yarn", 4 | "useWorkspaces": true, 5 | "version": "0.0.1" 6 | } -------------------------------------------------------------------------------- /packages/lib/tsconfig.build.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "exclude": ["**/*.spec.ts", "**/*.test.ts"] 4 | } 5 | -------------------------------------------------------------------------------- /packages/lib/jest.config.js: -------------------------------------------------------------------------------- 1 | const base = require('../../jest.config.base.js') 2 | 3 | module.exports = { 4 | ...base, 5 | displayName: 'lib', 6 | } 7 | -------------------------------------------------------------------------------- /packages/server/jest.config.js: -------------------------------------------------------------------------------- 1 | const base = require('../../jest.config.base.js') 2 | 3 | module.exports = { 4 | ...base, 5 | displayName: 'server', 6 | } 7 | -------------------------------------------------------------------------------- /packages/server/service/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "plc-service", 3 | "private": true, 4 | "dependencies": { 5 | "dd-trace": "^3.8.0" 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | // jest.config.js 2 | const base = require('./jest.config.base.js') 3 | 4 | module.exports = { 5 | ...base, 6 | projects: ['/packages/*/jest.config.js'], 7 | } 8 | -------------------------------------------------------------------------------- /packages/lib/src/index.ts: -------------------------------------------------------------------------------- 1 | export * from './client' 2 | export * from './data' 3 | export * from './document' 4 | export * from './error' 5 | export * from './operations' 6 | export * from './types' 7 | -------------------------------------------------------------------------------- /packages/server/example.dev.env: -------------------------------------------------------------------------------- 1 | # this requires setting up a local postgresql user and database 2 | DATABASE_URL="postgres://bsky:yksb@localhost/plc_dev" 3 | DEBUG_MODE=1 4 | LOG_ENABLED="true" 5 | LOG_LEVEL=debug 6 | LOG_DESTINATION=1 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | lerna-debug.log 3 | npm-debug.log 4 | yarn-error.log 5 | packages/*/dist 6 | .idea 7 | packages/*/coverage 8 | .vscode/ 9 | .DS_Store 10 | *.log 11 | tsconfig.build.tsbuildinfo 12 | .env 13 | .*.env 14 | \#*\# 15 | *~ 16 | *.swp 17 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "trailingComma": "all", 3 | "tabWidth": 2, 4 | "semi": false, 5 | "singleQuote": true, 6 | "overrides": [ 7 | { 8 | "files": "*.hbs", 9 | "options": { 10 | "singleQuote": false 11 | } 12 | } 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /packages/lib/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "rootDir": "./src", 5 | "outDir": "./dist", 6 | "declarationDir": "./dist", 7 | "emitDeclarationOnly": true, 8 | }, 9 | "include": ["./src"] 10 | } 11 | -------------------------------------------------------------------------------- /packages/server/tsconfig.build.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "compilerOptions": { 4 | "rootDir": "./src", 5 | "outDir": "./dist", 6 | "declarationDir": "./dist", 7 | "emitDeclarationOnly": true, 8 | }, 9 | "include": ["./src"] 10 | } 11 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Dual MIT/Apache-2.0 License 2 | 3 | Copyright (c) 2022-2023 Bluesky PBC 4 | 5 | Except as otherwise noted in individual files, this software is licensed under the MIT license (), or the Apache License, Version 2.0 (), at your option. 6 | -------------------------------------------------------------------------------- /packages/server/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "rootDir": "./src", 5 | "outDir": "./dist", // Your outDir, 6 | "emitDeclarationOnly": true, 7 | }, 8 | "include": ["./src", "__tests__/**/**.ts"], 9 | "references": [ 10 | { "path": "../lib/tsconfig.build.json" } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /packages/server/src/env.ts: -------------------------------------------------------------------------------- 1 | // NOTE: this file should be imported first, particularly before `@atproto/common` (for logging), to ensure that environment variables are respected in library code 2 | import dotenv from 'dotenv' 3 | 4 | const env = process.env.ENV 5 | if (env) { 6 | dotenv.config({ path: `./.${env}.env` }) 7 | } else { 8 | dotenv.config() 9 | } 10 | -------------------------------------------------------------------------------- /packages/server/src/context.ts: -------------------------------------------------------------------------------- 1 | import { PlcDatabase } from './db' 2 | 3 | export class AppContext { 4 | constructor( 5 | private opts: { 6 | db: PlcDatabase 7 | version: string 8 | port?: number 9 | }, 10 | ) {} 11 | 12 | get db() { 13 | return this.opts.db 14 | } 15 | 16 | get version() { 17 | return this.opts.version 18 | } 19 | 20 | get port() { 21 | return this.opts.port 22 | } 23 | } 24 | 25 | export default AppContext 26 | -------------------------------------------------------------------------------- /packages/lib/update-pkg.js: -------------------------------------------------------------------------------- 1 | const pkgJson = require('@npmcli/package-json') 2 | 3 | if (process.argv.includes('--update-main-to-dist')) { 4 | return pkgJson 5 | .load(__dirname) 6 | .then((pkg) => pkg.update({ main: 'dist/index.js' })) 7 | .then((pkg) => pkg.save()) 8 | } 9 | if (process.argv.includes('--update-main-to-src')) { 10 | return pkgJson 11 | .load(__dirname) 12 | .then((pkg) => pkg.update({ main: 'src/index.ts' })) 13 | .then((pkg) => pkg.save()) 14 | } 15 | -------------------------------------------------------------------------------- /packages/server/update-pkg.js: -------------------------------------------------------------------------------- 1 | const pkgJson = require('@npmcli/package-json') 2 | 3 | if (process.argv.includes('--update-main-to-dist')) { 4 | return pkgJson 5 | .load(__dirname) 6 | .then((pkg) => pkg.update({ main: 'dist/index.js' })) 7 | .then((pkg) => pkg.save()) 8 | } 9 | if (process.argv.includes('--update-main-to-src')) { 10 | return pkgJson 11 | .load(__dirname) 12 | .then((pkg) => pkg.update({ main: 'src/index.ts' })) 13 | .then((pkg) => pkg.save()) 14 | } 15 | -------------------------------------------------------------------------------- /packages/server/src/logger.ts: -------------------------------------------------------------------------------- 1 | import pino from 'pino' 2 | import pinoHttp from 'pino-http' 3 | 4 | const enabledEnv = process.env.LOG_ENABLED 5 | const enabled = 6 | enabledEnv === 'true' || enabledEnv === 't' || enabledEnv === '1' 7 | const level = process.env.LOG_LEVEL || 'info' 8 | 9 | const config = { 10 | enabled, 11 | level, 12 | } 13 | 14 | const logger = process.env.LOG_DESTINATION 15 | ? pino(config, pino.destination(process.env.LOG_DESTINATION)) 16 | : pino(config) 17 | 18 | export const loggerMiddleware = pinoHttp({ 19 | logger, 20 | }) 21 | -------------------------------------------------------------------------------- /packages/server/src/migrations/index.ts: -------------------------------------------------------------------------------- 1 | // NOTE this file can be edited by hand, but it is also appended to by the migrations:create command. 2 | // It's important that every migration is exported from here with the proper name. We'd simplify 3 | // this with kysely's FileMigrationProvider, but it doesn't play nicely with the build process. 4 | 5 | export * as _20221020T204908820Z from './20221020T204908820Z-operations-init' 6 | export * as _20230223T215019669Z from './20230223T215019669Z-refactor' 7 | export * as _20230406T174552885Z from './20230406T174552885Z-did-locks' 8 | -------------------------------------------------------------------------------- /packages/server/src/migrations/20230406T174552885Z-did-locks.ts: -------------------------------------------------------------------------------- 1 | import { Kysely } from 'kysely' 2 | 3 | export async function up(db: Kysely): Promise { 4 | await db.schema 5 | .createTable('dids') 6 | .addColumn('did', 'text', (col) => col.primaryKey()) 7 | .execute() 8 | 9 | await db 10 | .insertInto('dids') 11 | .columns(['did']) 12 | .expression((qb) => qb.selectFrom('operations').select(['did']).distinct()) 13 | .execute() 14 | // Migration code 15 | } 16 | 17 | export async function down(db: Kysely): Promise { 18 | await db.schema.dropTable('dids').execute() 19 | // Migration code 20 | } 21 | -------------------------------------------------------------------------------- /packages/server/bin/did-create.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ts-node 2 | 3 | import { EcdsaKeypair } from '@atproto/crypto' 4 | import { Client } from '@did-plc/lib' 5 | 6 | export async function main() { 7 | const url = process.argv[2] 8 | const handle = process.argv[3] 9 | console.log({ url, handle }) 10 | const signingKey = await EcdsaKeypair.create() 11 | const recoveryKey = await EcdsaKeypair.create() 12 | const client = new Client(url) 13 | const did = await client.createDid( 14 | signingKey, 15 | recoveryKey.did(), 16 | handle, 17 | handle.split('.').slice(1).join('.'), 18 | ) 19 | console.log(`Created did: ${url}/${did}`) 20 | } 21 | 22 | main() 23 | -------------------------------------------------------------------------------- /packages/server/src/bin.ts: -------------------------------------------------------------------------------- 1 | import './env' 2 | import { Database, PlcDatabase } from './db' 3 | import PlcServer from '.' 4 | 5 | const run = async () => { 6 | const dbUrl = process.env.DATABASE_URL 7 | 8 | let db: PlcDatabase 9 | if (dbUrl) { 10 | const pgDb = Database.postgres({ url: dbUrl }) 11 | await pgDb.migrateToLatestOrThrow() 12 | db = pgDb 13 | } else { 14 | db = Database.mock() 15 | } 16 | 17 | const envPort = parseInt(process.env.PORT || '') 18 | const port = isNaN(envPort) ? 2582 : envPort 19 | 20 | const plc = PlcServer.create({ db, port }) 21 | await plc.start() 22 | console.log(`👤 PLC server is running at http://localhost:${port}`) 23 | } 24 | 25 | run() 26 | -------------------------------------------------------------------------------- /packages/server/src/migrations/20221020T204908820Z-operations-init.ts: -------------------------------------------------------------------------------- 1 | import { Kysely } from 'kysely' 2 | 3 | export async function up(db: Kysely): Promise { 4 | await db.schema 5 | .createTable('operations') 6 | .addColumn('did', 'varchar', (col) => col.notNull()) 7 | .addColumn('operation', 'text', (col) => col.notNull()) 8 | .addColumn('cid', 'varchar', (col) => col.notNull()) 9 | .addColumn('nullified', 'int2', (col) => col.defaultTo(0)) 10 | .addColumn('createdAt', 'varchar', (col) => col.notNull()) 11 | .addPrimaryKeyConstraint('primary_key', ['did', 'cid']) 12 | .execute() 13 | } 14 | 15 | export async function down(db: Kysely): Promise { 16 | await db.schema.dropTable('operations').execute() 17 | } 18 | -------------------------------------------------------------------------------- /packages/lib/build.js: -------------------------------------------------------------------------------- 1 | const pkgJson = require('@npmcli/package-json') 2 | const { nodeExternalsPlugin } = require('esbuild-node-externals') 3 | 4 | const buildShallow = 5 | process.argv.includes('--shallow') || process.env.PLC_BUILD_SHALLOW === 'true' 6 | 7 | if (process.argv.includes('--update-main-to-dist')) { 8 | return pkgJson 9 | .load(__dirname) 10 | .then((pkg) => pkg.update({ main: 'dist/index.js' })) 11 | .then((pkg) => pkg.save()) 12 | } 13 | 14 | require('esbuild').build({ 15 | logLevel: 'info', 16 | entryPoints: [ 17 | 'src/index.ts', 18 | ], 19 | bundle: true, 20 | sourcemap: true, 21 | treeShaking: true, 22 | outdir: 'dist', 23 | platform: 'node', 24 | assetNames: 'src/static', 25 | plugins: buildShallow ? [nodeExternalsPlugin()] : [], 26 | }) 27 | -------------------------------------------------------------------------------- /packages/server/pg/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | services: 3 | # An ephermerally-stored postgres database for single-use test runs 4 | db_test: &db_test 5 | image: postgres:14.4-alpine 6 | environment: 7 | - POSTGRES_USER=pg 8 | - POSTGRES_PASSWORD=password 9 | ports: 10 | - '5433:5432' 11 | # Healthcheck ensures db is queryable when `docker-compose up --wait` completes 12 | healthcheck: 13 | test: 'pg_isready -U pg' 14 | interval: 500ms 15 | timeout: 10s 16 | retries: 20 17 | # A persistently-stored postgres database 18 | db: 19 | <<: *db_test 20 | ports: 21 | - '5432:5432' 22 | healthcheck: 23 | disable: true 24 | volumes: 25 | - plc_db:/var/lib/postgresql/data 26 | volumes: 27 | plc_db: 28 | -------------------------------------------------------------------------------- /jest.config.base.js: -------------------------------------------------------------------------------- 1 | // Jest doesn't like ES modules, so we need to transpile them 2 | // For each one, add them to this list, add them to 3 | // "workspaces.nohoist" in the root package.json, and 4 | // make sure that a babel.config.js is in the package root 5 | const esModules = ['get-port', 'node-fetch'].join('|') 6 | 7 | // jestconfig.base.js 8 | module.exports = { 9 | roots: ['/src', '/tests'], 10 | transform: { 11 | '^.+\\.ts$': 'ts-jest', 12 | "^.+\\.js?$": "babel-jest" 13 | }, 14 | transformIgnorePatterns: [`/node_modules/(?!${esModules})`], 15 | testRegex: '(/tests/.*.(test|spec)).(jsx?|tsx?)$', 16 | moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], 17 | setupFiles: ["/../../test-setup.ts"], 18 | verbose: true, 19 | testTimeout: 30000 20 | } 21 | -------------------------------------------------------------------------------- /packages/server/pg/with-test-db.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # Example usage: 4 | # ./with-test-db.sh psql postgresql://pg:password@localhost:5433/postgres -c 'select 1;' 5 | 6 | dir=$(dirname $0) 7 | compose_file="$dir/docker-compose.yaml" 8 | 9 | docker compose -f $compose_file up --wait --force-recreate db_test 10 | echo # newline 11 | 12 | trap on_sigint INT 13 | on_sigint() { 14 | echo # newline 15 | docker compose -f $compose_file rm -f --stop --volumes db_test 16 | exit $? 17 | } 18 | 19 | # Based on creds in compose.yaml 20 | export PGPORT=5433 21 | export PGHOST=localhost 22 | export PGUSER=pg 23 | export PGPASSWORD=password 24 | export PGDATABASE=postgres 25 | export DATABASE_URL="postgresql://pg:password@localhost:5433/postgres" 26 | "$@" 27 | code=$? 28 | 29 | echo # newline 30 | docker compose -f $compose_file rm -f --stop --volumes db_test 31 | 32 | exit $code 33 | -------------------------------------------------------------------------------- /packages/server/build.js: -------------------------------------------------------------------------------- 1 | const pkgJson = require('@npmcli/package-json') 2 | const { nodeExternalsPlugin } = require('esbuild-node-externals') 3 | 4 | const buildShallow = 5 | process.argv.includes('--shallow') || process.env.PLC_BUILD_SHALLOW === 'true' 6 | 7 | if (process.argv.includes('--update-main-to-dist')) { 8 | return pkgJson 9 | .load(__dirname) 10 | .then((pkg) => pkg.update({ main: 'dist/index.js' })) 11 | .then((pkg) => pkg.save()) 12 | } 13 | 14 | require('esbuild').build({ 15 | logLevel: 'info', 16 | entryPoints: [ 17 | 'src/index.ts', 18 | 'src/bin.ts', 19 | 'src/db/index.ts', 20 | ], 21 | bundle: true, 22 | sourcemap: true, 23 | treeShaking: true, 24 | outdir: 'dist', 25 | platform: 'node', 26 | assetNames: 'src/static', 27 | plugins: buildShallow ? [nodeExternalsPlugin()] : [], 28 | external: [ 29 | // Referenced in pg driver, but optional and we don't use it 30 | 'pg-native', 31 | ], 32 | }) 33 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "declaration": true, 4 | "noImplicitAny": false, 5 | "removeComments": true, 6 | "noLib": false, 7 | "emitDecoratorMetadata": true, 8 | "experimentalDecorators": true, 9 | "target": "es2020", 10 | "sourceMap": true, 11 | "module": "commonjs", 12 | "jsx": "preserve", 13 | "strict": true, 14 | "strictPropertyInitialization": false, 15 | "moduleResolution": "node", 16 | "resolveJsonModule": true, 17 | "allowSyntheticDefaultImports": true, 18 | "esModuleInterop": true, 19 | "lib": ["dom", "dom.iterable", "esnext", "webworker"], 20 | "skipLibCheck": true, 21 | "forceConsistentCasingInFileNames": true, 22 | "isolatedModules": true, 23 | "composite": true 24 | }, 25 | "exclude": ["node_modules", "**/*/dist"], 26 | "references": [ 27 | { "path": "./packages/lib/tsconfig.build.json" }, 28 | { "path": "./packages/server/tsconfig.build.json" } 29 | ] 30 | } 31 | -------------------------------------------------------------------------------- /packages/server/src/db/types.ts: -------------------------------------------------------------------------------- 1 | import * as plc from '@did-plc/lib' 2 | import { Generated } from 'kysely' 3 | 4 | export interface PlcDatabase { 5 | close(): Promise 6 | healthCheck(): Promise 7 | validateAndAddOp( 8 | did: string, 9 | proposed: plc.CompatibleOpOrTombstone, 10 | ): Promise 11 | opsForDid(did: string): Promise 12 | indexedOpsForDid( 13 | did: string, 14 | includeNull?: boolean, 15 | ): Promise 16 | lastOpForDid(did: string): Promise 17 | exportOps(count: number, after?: Date): Promise 18 | } 19 | 20 | export interface DidsTable { 21 | did: string 22 | } 23 | 24 | export interface OperationsTable { 25 | did: string 26 | operation: plc.CompatibleOpOrTombstone 27 | cid: string 28 | nullified: boolean 29 | createdAt: Generated 30 | } 31 | 32 | export interface DatabaseSchema { 33 | dids: DidsTable 34 | operations: OperationsTable 35 | } 36 | -------------------------------------------------------------------------------- /packages/server/tests/_util.ts: -------------------------------------------------------------------------------- 1 | import { AddressInfo } from 'net' 2 | import PlcServer, { AppContext } from '../src' 3 | import Database from '../src/db' 4 | 5 | export type CloseFn = () => Promise 6 | export type TestServerInfo = { 7 | ctx: AppContext 8 | url: string 9 | db: Database 10 | close: CloseFn 11 | } 12 | 13 | export const runTestServer = async (opts: { 14 | dbSchema: string 15 | }): Promise => { 16 | const { dbSchema } = opts 17 | const dbUrl = process.env.DATABASE_URL 18 | if (!dbUrl) { 19 | throw new Error('No postgres url provided') 20 | } 21 | 22 | const db = Database.postgres({ 23 | url: dbUrl, 24 | schema: dbSchema, 25 | }) 26 | await db.migrateToLatestOrThrow() 27 | 28 | const plc = PlcServer.create({ db }) 29 | const plcServer = await plc.start() 30 | const { port } = plcServer.address() as AddressInfo 31 | 32 | return { 33 | ctx: plc.ctx, 34 | url: `http://localhost:${port}`, 35 | db, 36 | close: async () => { 37 | await plc.destroy() 38 | }, 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. 20 | -------------------------------------------------------------------------------- /.github/workflows/build-test-verify.yaml: -------------------------------------------------------------------------------- 1 | name: build-test-verify 2 | on: 3 | pull_request: 4 | push: 5 | branches: 6 | - main 7 | concurrency: 8 | group: '${{ github.workflow }}-${{ github.head_ref || github.ref }}' 9 | cancel-in-progress: true 10 | jobs: 11 | build: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v3 15 | - uses: actions/setup-node@v3 16 | with: 17 | node-version: 18 18 | cache: "yarn" 19 | - run: yarn install --frozen-lockfile 20 | - run: yarn build 21 | test: 22 | runs-on: ubuntu-latest 23 | steps: 24 | - uses: actions/checkout@v3 25 | - uses: actions/setup-node@v3 26 | with: 27 | node-version: 18 28 | cache: "yarn" 29 | - run: yarn install --frozen-lockfile 30 | - run: yarn test 31 | verify: 32 | runs-on: ubuntu-latest 33 | steps: 34 | - uses: actions/checkout@v3 35 | - uses: actions/setup-node@v3 36 | with: 37 | node-version: 18 38 | cache: "yarn" 39 | - run: yarn install --frozen-lockfile 40 | - run: yarn verify 41 | -------------------------------------------------------------------------------- /packages/lib/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@did-plc/lib", 3 | "version": "0.0.4", 4 | "main": "dist/index.js", 5 | "license": "MIT", 6 | "scripts": { 7 | "test": "jest", 8 | "prettier": "prettier --check src/", 9 | "prettier:fix": "prettier --write src/", 10 | "lint": "eslint . --ext .ts,.tsx", 11 | "lint:fix": "yarn lint --fix", 12 | "verify": "run-p prettier lint", 13 | "verify:fix": "yarn prettier:fix && yarn lint:fix", 14 | "build": "node ./build.js", 15 | "postbuild": "tsc --build tsconfig.build.json", 16 | "update-main-to-dist": "node ./update-pkg.js --update-main-to-dist", 17 | "update-main-to-src": "node ./update-pkg.js --update-main-to-src", 18 | "prepublish": "npm run update-main-to-dist", 19 | "postpublish": "npm run update-main-to-src" 20 | }, 21 | "dependencies": { 22 | "@atproto/common": "0.3.0", 23 | "@atproto/crypto": "0.2.2", 24 | "@ipld/dag-cbor": "^7.0.3", 25 | "axios": "^1.3.4", 26 | "multiformats": "^9.6.4", 27 | "uint8arrays": "3.0.0", 28 | "zod": "^3.21.4" 29 | }, 30 | "devDependencies": { 31 | "eslint-plugin-prettier": "^4.2.1" 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /packages/server/bin/migration-create.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ts-node 2 | 3 | import * as fs from 'fs/promises' 4 | import * as path from 'path' 5 | 6 | export async function main() { 7 | const now = new Date() 8 | const prefix = now.toISOString().replace(/[^a-z0-9]/gi, '') // Order of migrations matches alphabetical order of their names 9 | const name = process.argv[2] 10 | if (!name || !name.match(/^[a-z0-9-]+$/)) { 11 | process.exitCode = 1 12 | return console.error( 13 | 'Must pass a migration name consisting of lowercase digits, numbers, and dashes.', 14 | ) 15 | } 16 | const filename = `${prefix}-${name}` 17 | const dir = path.join(__dirname, '..', 'src', 'migrations') 18 | 19 | await fs.writeFile(path.join(dir, `${filename}.ts`), template, { flag: 'wx' }) 20 | await fs.writeFile( 21 | path.join(dir, 'index.ts'), 22 | `export * as _${prefix} from './${filename}'\n`, 23 | { flag: 'a' }, 24 | ) 25 | } 26 | 27 | const template = `import { Kysely } from 'kysely' 28 | 29 | export async function up(db: Kysely): Promise { 30 | // Migration code 31 | } 32 | 33 | export async function down(db: Kysely): Promise { 34 | // Migration code 35 | } 36 | ` 37 | 38 | main() 39 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "root": true, 3 | // parse TypeScript files 4 | // https://github.com/typescript-eslint/typescript-eslint/tree/master/packages/parser 5 | "parser": "@typescript-eslint/parser", 6 | // configure eslint using options described at 7 | // https://github.com/typescript-eslint/typescript-eslint/tree/master/packages/eslint-plugin 8 | "plugins": ["@typescript-eslint"], 9 | "extends": [ 10 | "eslint:recommended", 11 | "plugin:@typescript-eslint/eslint-recommended", 12 | "plugin:@typescript-eslint/recommended", 13 | "plugin:prettier/recommended", 14 | "prettier" 15 | ], 16 | "ignorePatterns":[ 17 | "dist", 18 | "node_modules", 19 | "jest.config.base.js", 20 | "jest.bench.config.js", 21 | "jest.config.js", 22 | "babel.config.js", 23 | "build.js", 24 | "update-pkg.js" 25 | ], 26 | "rules": { 27 | "no-var": "error", 28 | "prefer-const": "warn", 29 | "@typescript-eslint/no-unused-vars": ["warn", { "argsIgnorePattern": "^_" }], 30 | "@typescript-eslint/ban-ts-comment": "off", 31 | "@typescript-eslint/no-empty-interface": "off", 32 | "@typescript-eslint/explicit-module-boundary-types": "off", 33 | "@typescript-eslint/no-empty-function": "off" 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | 2 | SHELL = /bin/bash 3 | .SHELLFLAGS = -o pipefail -c 4 | 5 | .PHONY: help 6 | help: ## Print info about all commands 7 | @echo "Helper Commands:" 8 | @echo 9 | @grep -E '^[a-zA-Z0-9_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf " \033[01;32m%-20s\033[0m %s\n", $$1, $$2}' 10 | @echo 11 | @echo "NOTE: dependencies between commands are not automatic. Eg, you must run 'deps' and 'build' first, and after any changes" 12 | 13 | .PHONY: build 14 | build: ## Compile all modules 15 | yarn build 16 | 17 | .PHONY: test 18 | test: ## Run all tests 19 | yarn test 20 | 21 | .PHONY: fmt 22 | fmt: ## Run syntax re-formatting 23 | yarn prettier 24 | 25 | .PHONY: lint 26 | lint: ## Run style checks and verify syntax 27 | yarn verify 28 | 29 | .PHONY: nvm-setup 30 | nvm-setup: ## Use NVM to install and activate node+yarn 31 | nvm install 18 32 | nvm use 18 33 | npm install --global yarn 34 | 35 | .PHONY: deps 36 | deps: ## Installs dependent libs using 'yarn install' 37 | yarn install --frozen-lockfile 38 | 39 | .PHONY: run-dev-plc 40 | run-dev-plc: ## Run PLC server "dev" config (needs local PostgreSQL) 41 | if [ ! -f "packages/server/.dev.env" ]; then cp packages/server/example.dev.env packages/server/.dev.env; fi 42 | cd packages/server; ENV=dev yarn run start | yarn exec pino-pretty 43 | -------------------------------------------------------------------------------- /packages/server/src/error.ts: -------------------------------------------------------------------------------- 1 | import { PlcError } from '@did-plc/lib' 2 | import { ErrorRequestHandler } from 'express' 3 | 4 | export const handler: ErrorRequestHandler = (err, req, res, next) => { 5 | // normalize our PLC errors to server errors 6 | if (PlcError.is(err)) { 7 | err = ServerError.fromPlcError(err) 8 | } 9 | 10 | req.log.info( 11 | err, 12 | ServerError.is(err) 13 | ? 'handled server error' 14 | : 'unexpected internal server error', 15 | ) 16 | if (res.headersSent) { 17 | return next(err) 18 | } 19 | if (ServerError.is(err)) { 20 | return res.status(err.status).json({ message: err.message }) 21 | } else { 22 | return res.status(500).json({ message: 'Internal Server Error' }) 23 | } 24 | } 25 | 26 | export class ServerError extends Error { 27 | status: number 28 | constructor(status: number, message: string) { 29 | super(message) 30 | this.status = status 31 | } 32 | 33 | static is(obj: unknown): obj is ServerError { 34 | return ( 35 | !!obj && 36 | typeof obj === 'object' && 37 | typeof (obj as Record).message === 'string' && 38 | typeof (obj as Record).status === 'number' 39 | ) 40 | } 41 | 42 | static fromPlcError(err: PlcError): ServerError { 43 | return new ServerError(400, err.message) 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /packages/server/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:18-alpine as build 2 | 3 | # Move files into the image and install 4 | WORKDIR /app 5 | COPY ./*.* ./ 6 | # NOTE plc's transitive dependencies go here: if that changes, this needs to be updated. 7 | COPY ./packages/lib ./packages/lib 8 | COPY ./packages/server ./packages/server 9 | RUN PLC_BUILD_SHALLOW=true yarn install --frozen-lockfile > /dev/null 10 | RUN yarn workspaces run build --update-main-to-dist > /dev/null 11 | # Remove non-prod deps 12 | RUN yarn install --production --ignore-scripts --prefer-offline > /dev/null 13 | 14 | WORKDIR packages/server/service 15 | RUN yarn install --frozen-lockfile > /dev/null 16 | 17 | # Uses assets from build stage to reduce build size 18 | FROM node:18-alpine 19 | 20 | # RUN npm install -g yarn 21 | RUN apk add --update dumb-init 22 | 23 | # Avoid zombie processes, handle signal forwarding 24 | ENTRYPOINT ["dumb-init", "--"] 25 | 26 | WORKDIR /app/packages/server/service 27 | COPY --from=build /app /app 28 | 29 | EXPOSE 3000 30 | ENV PORT=3000 31 | ENV NODE_ENV=production 32 | 33 | # https://github.com/nodejs/docker-node/blob/master/docs/BestPractices.md#non-root-user 34 | USER node 35 | CMD ["node", "--enable-source-maps", "index.js"] 36 | 37 | LABEL org.opencontainers.image.source=https://github.com/bluesky-social/did-method-plc 38 | LABEL org.opencontainers.image.description="DID PLC server" 39 | LABEL org.opencontainers.image.licenses=MIT 40 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "did-method-plc", 3 | "version": "0.0.1", 4 | "repository": "git@github.com:bluesky-social/did-method-plc.git", 5 | "author": "Bluesky PBLLC ", 6 | "license": "MIT", 7 | "private": true, 8 | "engines": { 9 | "node": ">=18" 10 | }, 11 | "scripts": { 12 | "prepublish": "yarn build", 13 | "verify": "lerna run verify --stream", 14 | "prettier": "lerna run prettier", 15 | "build": "lerna run build", 16 | "test": "NODE_ENV=development lerna run test --stream" 17 | }, 18 | "devDependencies": { 19 | "@babel/core": "^7.18.6", 20 | "@babel/preset-env": "^7.18.6", 21 | "@npmcli/package-json": "^3.0.0", 22 | "@types/jest": "^28.1.4", 23 | "@types/node": "^18.0.0", 24 | "@typescript-eslint/eslint-plugin": "^5.38.1", 25 | "@typescript-eslint/parser": "^5.38.1", 26 | "babel-eslint": "^10.1.0", 27 | "babel-jest": "^28.1.2", 28 | "dotenv": "^16.0.3", 29 | "esbuild": "^0.14.48", 30 | "esbuild-node-externals": "^1.5.0", 31 | "esbuild-plugin-copy": "^1.6.0", 32 | "eslint": "^8.24.0", 33 | "eslint-config-prettier": "^8.5.0", 34 | "jest": "^28.1.2", 35 | "lerna": "^4.0.0", 36 | "npm-run-all": "^4.1.5", 37 | "pino-pretty": "^9.1.0", 38 | "prettier": "^2.7.1", 39 | "prettier-config-standard": "^5.0.0", 40 | "ts-jest": "^28.0.5", 41 | "ts-node": "^10.8.2", 42 | "typescript": "^4.8.4" 43 | }, 44 | "workspaces": { 45 | "packages": [ 46 | "packages/*" 47 | ] 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /packages/server/service/index.js: -------------------------------------------------------------------------------- 1 | 'use strict' /* eslint-disable */ 2 | 3 | require('dd-trace/init') // Only works with commonjs 4 | const { Database, PlcServer } = require('..') 5 | 6 | const main = async () => { 7 | const version = process.env.PLC_VERSION 8 | const dbCreds = JSON.parse(process.env.DB_CREDS_JSON) 9 | const dbMigrateCreds = JSON.parse(process.env.DB_MIGRATE_CREDS_JSON) 10 | const dbSchema = process.env.DB_SCHEMA || undefined 11 | // Migrate using credentialed user 12 | const migrateDb = Database.postgres({ 13 | url: pgUrl(dbMigrateCreds), 14 | schema: dbSchema, 15 | }) 16 | await migrateDb.migrateToLatestOrThrow() 17 | await migrateDb.close() 18 | // Use lower-credentialed user to run the app 19 | const db = Database.postgres({ 20 | url: pgUrl(dbCreds), 21 | schema: dbSchema, 22 | }) 23 | const port = parseInt(process.env.PORT) 24 | const plc = PlcServer.create({ db, port, version }) 25 | const server = await plc.start() 26 | server.keepAliveTimeout = 90000 27 | // Graceful shutdown (see also https://aws.amazon.com/blogs/containers/graceful-shutdowns-with-ecs/) 28 | process.on('SIGTERM', async () => { 29 | await plc.destroy() 30 | }) 31 | } 32 | 33 | const pgUrl = ({ username = "postgres", password = "postgres", host = "localhost", port = "5432", database = "postgres", sslmode }) => { 34 | const enc = encodeURIComponent 35 | return `postgresql://${username}:${enc(password)}@${host}:${port}/${database}${sslmode ? `?sslmode=${enc(sslmode)}` : ''}` 36 | } 37 | 38 | main() 39 | -------------------------------------------------------------------------------- /packages/server/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@did-plc/server", 3 | "version": "0.0.1", 4 | "main": "dist/index.js", 5 | "license": "MIT", 6 | "scripts": { 7 | "start": "node dist/bin.js", 8 | "test": "./pg/with-test-db.sh jest", 9 | "test:log": "cat test.log | pino-pretty", 10 | "prettier": "prettier --check src/", 11 | "prettier:fix": "prettier --write src/", 12 | "lint": "eslint . --ext .ts,.tsx", 13 | "lint:fix": "yarn lint --fix", 14 | "verify": "run-p prettier lint", 15 | "verify:fix": "yarn prettier:fix && yarn lint:fix", 16 | "build": "node ./build.js", 17 | "postbuild": "tsc --build tsconfig.build.json", 18 | "migration:create": "ts-node ./bin/migration-create.ts", 19 | "did:create": "ts-node ./bin/did-create.ts", 20 | "update-main-to-dist": "node ./update-pkg.js --update-main-to-dist", 21 | "update-main-to-src": "node ./update-pkg.js --update-main-to-src", 22 | "prepublish": "npm run update-main-to-dist", 23 | "postpublish": "npm run update-main-to-src" 24 | }, 25 | "dependencies": { 26 | "@atproto/common": "0.3.0", 27 | "@atproto/crypto": "0.2.2", 28 | "@did-plc/lib": "*", 29 | "axios": "^1.3.4", 30 | "cors": "^2.8.5", 31 | "dotenv": "^16.0.0", 32 | "express": "^4.18.2", 33 | "express-async-errors": "^3.1.1", 34 | "http-terminator": "^3.2.0", 35 | "kysely": "^0.23.4", 36 | "multiformats": "^9.6.4", 37 | "pg": "^8.9.0", 38 | "pino": "^8.11.0", 39 | "pino-http": "^8.3.3" 40 | }, 41 | "devDependencies": { 42 | "@types/pg": "^8.6.5", 43 | "eslint-plugin-prettier": "^4.2.1" 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /packages/lib/src/error.ts: -------------------------------------------------------------------------------- 1 | export class PlcError extends Error { 2 | plcError = true 3 | constructor(msg: string) { 4 | super(msg) 5 | } 6 | 7 | static is(obj: unknown): obj is PlcError { 8 | if (obj && typeof obj === 'object' && obj['plcError'] === true) { 9 | return true 10 | } 11 | return false 12 | } 13 | } 14 | export class ImproperOperationError extends PlcError { 15 | constructor(public reason: string, public op: unknown) { 16 | super(`Improperly formatted operation, ${reason}: ${op}`) 17 | } 18 | } 19 | 20 | export class MisorderedOperationError extends PlcError { 21 | constructor() { 22 | super('Operations not correctly ordered') 23 | } 24 | } 25 | 26 | export class LateRecoveryError extends PlcError { 27 | constructor(public timeLapsed: number) { 28 | super( 29 | `Recovery operation occurred outside of the allowed 72 hr recovery window. Time lapsed: ${timeLapsed}`, 30 | ) 31 | } 32 | } 33 | 34 | export class GenesisHashError extends PlcError { 35 | constructor(public expected: string) { 36 | super( 37 | `Hash of genesis operation does not match DID identifier: ${expected}`, 38 | ) 39 | } 40 | } 41 | 42 | export class InvalidSignatureError extends PlcError { 43 | constructor(public op: unknown) { 44 | super(`Invalid signature on op: ${JSON.stringify(op)}`) 45 | } 46 | } 47 | 48 | export class UnsupportedKeyError extends PlcError { 49 | constructor(public key: string, public err: unknown) { 50 | super(`Unsupported key type ${key}: ${err}`) 51 | } 52 | } 53 | 54 | export class ImproperlyFormattedDidError extends PlcError { 55 | constructor(public reason: string) { 56 | super(`Improperly formatted did: ${reason}`) 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /.github/workflows/build-and-push-ghcr.yaml: -------------------------------------------------------------------------------- 1 | name: build-and-push-ghcr 2 | on: 3 | push: 4 | branches: 5 | - main 6 | env: 7 | REGISTRY: ghcr.io 8 | USERNAME: ${{ github.actor }} 9 | PASSWORD: ${{ secrets.GITHUB_TOKEN }} 10 | # github.repository as / 11 | IMAGE_NAME: ${{ github.repository }} 12 | 13 | jobs: 14 | plc-container-ghcr: 15 | if: github.repository == 'bluesky-social/did-method-plc' 16 | runs-on: ubuntu-latest 17 | permissions: 18 | contents: read 19 | packages: write 20 | id-token: write 21 | 22 | steps: 23 | - name: Checkout repository 24 | uses: actions/checkout@v3 25 | 26 | - name: Setup Docker buildx 27 | uses: docker/setup-buildx-action@v1 28 | 29 | - name: Log into registry ${{ env.REGISTRY }} 30 | uses: docker/login-action@v2 31 | with: 32 | registry: ${{ env.REGISTRY }} 33 | username: ${{ env.USERNAME }} 34 | password: ${{ env.PASSWORD }} 35 | 36 | - name: Extract Docker metadata 37 | id: meta 38 | uses: docker/metadata-action@v4 39 | with: 40 | images: | 41 | ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} 42 | tags: | 43 | type=sha,enable=true,priority=100,prefix=plc:,suffix=,format=long 44 | 45 | - name: Build and push Docker image 46 | id: build-and-push 47 | uses: docker/build-push-action@v4 48 | with: 49 | context: . 50 | push: ${{ github.event_name != 'pull_request' }} 51 | file: ./packages/server/Dockerfile 52 | tags: ${{ steps.meta.outputs.tags }} 53 | labels: ${{ steps.meta.outputs.labels }} 54 | cache-from: type=gha 55 | cache-to: type=gha,mode=max 56 | -------------------------------------------------------------------------------- /.github/workflows/build-and-push-aws.yaml: -------------------------------------------------------------------------------- 1 | name: build-and-push-aws 2 | on: 3 | push: 4 | branches: 5 | - main 6 | env: 7 | REGISTRY: ${{ secrets.AWS_ECR_REGISTRY_USEAST2_PACKAGES_REGISTRY }} 8 | USERNAME: ${{ secrets.AWS_ECR_REGISTRY_USEAST2_PACKAGES_USERNAME }} 9 | PASSWORD: ${{ secrets.AWS_ECR_REGISTRY_USEAST2_PACKAGES_PASSWORD }} 10 | IMAGE_NAME: plc 11 | 12 | jobs: 13 | plc-container-aws: 14 | if: github.repository == 'bluesky-social/did-method-plc' 15 | runs-on: ubuntu-latest 16 | permissions: 17 | contents: read 18 | packages: write 19 | id-token: write 20 | 21 | steps: 22 | - name: Checkout repository 23 | uses: actions/checkout@v3 24 | 25 | - name: Setup Docker buildx 26 | uses: docker/setup-buildx-action@v1 27 | 28 | - name: Log into registry ${{ env.REGISTRY }} 29 | uses: docker/login-action@v2 30 | with: 31 | registry: ${{ env.REGISTRY }} 32 | username: ${{ env.USERNAME }} 33 | password: ${{ env.PASSWORD }} 34 | 35 | - name: Extract Docker metadata 36 | id: meta 37 | uses: docker/metadata-action@v4 38 | with: 39 | images: | 40 | ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} 41 | tags: | 42 | type=sha,enable=true,priority=100,prefix=,suffix=,format=long 43 | 44 | - name: Build and push Docker image 45 | id: build-and-push 46 | uses: docker/build-push-action@v4 47 | with: 48 | context: . 49 | push: ${{ github.event_name != 'pull_request' }} 50 | file: ./packages/server/Dockerfile 51 | tags: ${{ steps.meta.outputs.tags }} 52 | labels: ${{ steps.meta.outputs.labels }} 53 | cache-from: type=gha 54 | cache-to: type=gha,mode=max 55 | -------------------------------------------------------------------------------- /packages/server/src/index.ts: -------------------------------------------------------------------------------- 1 | // catch errors that get thrown in async route handlers 2 | // this is a relatively non-invasive change to express 3 | // they get handled in the error.handler middleware 4 | // leave at top of file before importing Routes 5 | import 'express-async-errors' 6 | 7 | import express from 'express' 8 | import cors from 'cors' 9 | import http from 'http' 10 | import events from 'events' 11 | import * as error from './error' 12 | import createRouter from './routes' 13 | import { loggerMiddleware } from './logger' 14 | import AppContext from './context' 15 | import { createHttpTerminator, HttpTerminator } from 'http-terminator' 16 | import { PlcDatabase } from './db/types' 17 | 18 | export * from './db' 19 | export * from './context' 20 | 21 | export class PlcServer { 22 | public ctx: AppContext 23 | public app: express.Application 24 | public server?: http.Server 25 | private terminator?: HttpTerminator 26 | 27 | constructor(opts: { ctx: AppContext; app: express.Application }) { 28 | this.ctx = opts.ctx 29 | this.app = opts.app 30 | } 31 | 32 | static create(opts: { 33 | db: PlcDatabase 34 | port?: number 35 | version?: string 36 | }): PlcServer { 37 | const app = express() 38 | app.use(express.json({ limit: '100kb' })) 39 | app.use(cors()) 40 | 41 | app.use(loggerMiddleware) 42 | 43 | const ctx = new AppContext({ 44 | db: opts.db, 45 | version: opts.version || '0.0.0', 46 | port: opts.port, 47 | }) 48 | 49 | app.use('/', createRouter(ctx)) 50 | app.use(error.handler) 51 | 52 | return new PlcServer({ 53 | ctx, 54 | app, 55 | }) 56 | } 57 | 58 | async start(): Promise { 59 | const server = this.app.listen(this.ctx.port) 60 | this.server = server 61 | this.terminator = createHttpTerminator({ server }) 62 | await events.once(server, 'listening') 63 | return server 64 | } 65 | 66 | async destroy() { 67 | await this.terminator?.terminate() 68 | await this.ctx.db.close() 69 | } 70 | } 71 | 72 | export default PlcServer 73 | -------------------------------------------------------------------------------- /packages/server/src/db/mock.ts: -------------------------------------------------------------------------------- 1 | import { cidForCbor } from '@atproto/common' 2 | import * as plc from '@did-plc/lib' 3 | import { ServerError } from '../error' 4 | import { PlcDatabase } from './types' 5 | 6 | type Contents = Record 7 | 8 | export class MockDatabase implements PlcDatabase { 9 | contents: Contents = {} 10 | 11 | async close(): Promise {} 12 | async healthCheck(): Promise {} 13 | 14 | async validateAndAddOp( 15 | did: string, 16 | proposed: plc.OpOrTombstone, 17 | ): Promise { 18 | this.contents[did] ??= [] 19 | const opsBefore = this.contents[did] 20 | // throws if invalid 21 | const { nullified } = await plc.assureValidNextOp(did, opsBefore, proposed) 22 | const cid = await cidForCbor(proposed) 23 | if (this.contents[did] !== opsBefore) { 24 | throw new ServerError( 25 | 409, 26 | `Proposed prev does not match the most recent operation`, 27 | ) 28 | } 29 | this.contents[did].push({ 30 | did, 31 | operation: proposed, 32 | cid, 33 | nullified: false, 34 | createdAt: new Date(), 35 | }) 36 | 37 | if (nullified.length > 0) { 38 | for (let i = 0; i < this.contents[did].length; i++) { 39 | const cid = this.contents[did][i].cid 40 | for (const toCheck of nullified) { 41 | if (toCheck.equals(cid)) { 42 | this.contents[did][i].nullified = true 43 | } 44 | } 45 | } 46 | } 47 | } 48 | 49 | async opsForDid(did: string): Promise { 50 | const ops = await this.indexedOpsForDid(did) 51 | return ops.map((op) => op.operation) 52 | } 53 | 54 | async indexedOpsForDid( 55 | did: string, 56 | includeNull = false, 57 | ): Promise { 58 | const ops = this.contents[did] ?? [] 59 | if (includeNull) { 60 | return ops 61 | } 62 | return ops.filter((op) => op.nullified === false) 63 | } 64 | 65 | async lastOpForDid(did: string): Promise { 66 | const op = this.contents[did]?.at(-1) 67 | 68 | if (!op) return null 69 | return op.operation 70 | } 71 | 72 | // disabled in mocks 73 | async exportOps(_count: number, _after?: Date): Promise { 74 | return [] 75 | } 76 | } 77 | 78 | export default MockDatabase 79 | -------------------------------------------------------------------------------- /packages/server/tests/migrations/did-locks.test.ts: -------------------------------------------------------------------------------- 1 | import { cidForCbor, DAY } from '@atproto/common' 2 | import { Secp256k1Keypair } from '@atproto/crypto' 3 | import * as plc from '@did-plc/lib' 4 | import { Kysely } from 'kysely' 5 | import { Database } from '../../src' 6 | 7 | describe('did-locks migration', () => { 8 | let db: Database 9 | let rawDb: Kysely 10 | 11 | beforeAll(async () => { 12 | const dbUrl = process.env.DATABASE_URL 13 | if (!dbUrl) { 14 | throw new Error('No postgres url provided') 15 | } 16 | db = Database.postgres({ 17 | url: dbUrl, 18 | schema: 'migration_did_locks', 19 | }) 20 | 21 | await db.migrateToOrThrow('_20230223T215019669Z') 22 | rawDb = db.db 23 | }) 24 | 25 | afterAll(async () => { 26 | await db.close() 27 | }) 28 | 29 | const dids: string[] = [] 30 | 31 | it('fills the database with some operations', async () => { 32 | const ops: any[] = [] 33 | for (let i = 0; i < 100; i++) { 34 | const signingKey = await Secp256k1Keypair.create() 35 | const recoveryKey = await Secp256k1Keypair.create() 36 | const { op, did } = await plc.createOp({ 37 | signingKey: signingKey.did(), 38 | rotationKeys: [recoveryKey.did()], 39 | handle: `user${i}.test`, 40 | pds: 'https://example.com', 41 | signer: recoveryKey, 42 | }) 43 | const cid = await cidForCbor(op) 44 | const randomOffset = Math.floor(Math.random() * DAY * 60) 45 | const time = new Date(Date.now() - randomOffset).toISOString() 46 | ops.push({ 47 | did, 48 | operation: JSON.stringify(op), 49 | cid: cid.toString(), 50 | nullified: 0, 51 | createdAt: time, 52 | }) 53 | dids.push(did) 54 | const op2 = await plc.updateHandleOp(op, recoveryKey, `user${i}-2.test`) 55 | const cid2 = await cidForCbor(op2) 56 | ops.push({ 57 | did, 58 | operation: JSON.stringify(op2), 59 | cid: cid2.toString(), 60 | nullified: 0, 61 | createdAt: new Date().toISOString(), 62 | }) 63 | } 64 | await rawDb.insertInto('operations').values(ops).execute() 65 | }) 66 | 67 | it('migrates', async () => { 68 | await db.migrateToOrThrow('_20230406T174552885Z') 69 | }) 70 | 71 | it('correctly filled in dids', async () => { 72 | const migrated = await rawDb.selectFrom('dids').selectAll().execute() 73 | const sorted = migrated.map((row) => row.did).sort() 74 | expect(sorted).toEqual(dids.sort()) 75 | }) 76 | }) 77 | -------------------------------------------------------------------------------- /packages/server/src/migrations/20230223T215019669Z-refactor.ts: -------------------------------------------------------------------------------- 1 | import { Kysely, sql } from 'kysely' 2 | 3 | export async function up(db: Kysely): Promise { 4 | await db.schema 5 | .createTable('operations_new') 6 | .addColumn('did', 'text', (col) => col.notNull()) 7 | .addColumn('operation', 'jsonb', (col) => col.notNull()) 8 | .addColumn('cid', 'text', (col) => col.notNull()) 9 | .addColumn('nullified', 'boolean', (col) => col.notNull()) 10 | .addColumn('createdAt', 'timestamptz', (col) => 11 | col.notNull().defaultTo(sql`current_timestamp`), 12 | ) 13 | .addPrimaryKeyConstraint('operations_primary_key', ['did', 'cid']) 14 | .execute() 15 | 16 | const dump = await db.selectFrom('operations').selectAll().execute() 17 | const vals = dump.map((row) => ({ 18 | did: row.did, 19 | operation: row.operation, 20 | cid: row.cid, 21 | nullified: row.nullified === 1 ? true : false, 22 | createdAt: row.createdAt, 23 | })) 24 | 25 | if (vals.length > 0) { 26 | await db.insertInto('operations_new').values(vals).execute() 27 | } 28 | 29 | await db.schema.dropTable('operations').execute() 30 | 31 | await db.schema.alterTable('operations_new').renameTo('operations').execute() 32 | 33 | await db.schema 34 | .createIndex('operations_createdAt_index') 35 | .on('operations') 36 | .column('createdAt') 37 | .execute() 38 | } 39 | 40 | export async function down(db: Kysely): Promise { 41 | await db.schema 42 | .createTable('operations_new') 43 | .addColumn('did', 'varchar', (col) => col.notNull()) 44 | .addColumn('operation', 'text', (col) => col.notNull()) 45 | .addColumn('cid', 'varchar', (col) => col.notNull()) 46 | .addColumn('nullified', 'int2', (col) => col.defaultTo(0)) 47 | .addColumn('createdAt', 'varchar', (col) => col.notNull()) 48 | .addPrimaryKeyConstraint('primary_key', ['did', 'cid']) 49 | .execute() 50 | 51 | const dump = await db.selectFrom('operations').selectAll().execute() 52 | const vals = dump.map((row) => ({ 53 | did: row.did, 54 | operation: JSON.stringify(row.operation), 55 | cid: row.cid, 56 | nullified: row.nullified ? 1 : 0, 57 | createdAt: row.createdAt.toISOString(), 58 | })) 59 | 60 | await db.insertInto('operations_new').values(vals).execute() 61 | 62 | await db.schema.dropIndex('operations_createdAt_index').execute() 63 | await db.schema.dropTable('operations').execute() 64 | 65 | await db.schema.alterTable('operations_new').renameTo('operations').execute() 66 | } 67 | -------------------------------------------------------------------------------- /packages/lib/src/document.ts: -------------------------------------------------------------------------------- 1 | import * as uint8arrays from 'uint8arrays' 2 | import * as crypto from '@atproto/crypto' 3 | import * as t from './types' 4 | import { UnsupportedKeyError } from './error' 5 | import { ParsedMultikey } from '@atproto/crypto' 6 | 7 | export const formatDidDoc = (data: t.DocumentData): t.DidDocument => { 8 | const context = ['https://www.w3.org/ns/did/v1'] 9 | 10 | const verificationMethods: VerificationMethod[] = [] 11 | for (const [keyid, key] of Object.entries(data.verificationMethods)) { 12 | const info = formatKeyAndContext(key) 13 | if (!context.includes(info.context)) { 14 | context.push(info.context) 15 | } 16 | verificationMethods.push({ 17 | id: `#${keyid}`, 18 | type: info.type, 19 | controller: data.did, 20 | publicKeyMultibase: info.publicKeyMultibase, 21 | }) 22 | } 23 | 24 | const services: Service[] = [] 25 | for (const [serviceId, service] of Object.entries(data.services)) { 26 | services.push({ 27 | id: `#${serviceId}`, 28 | type: service.type, 29 | serviceEndpoint: service.endpoint, 30 | }) 31 | } 32 | 33 | return { 34 | '@context': context, 35 | id: data.did, 36 | alsoKnownAs: data.alsoKnownAs, 37 | verificationMethod: verificationMethods, 38 | service: services, 39 | } 40 | } 41 | 42 | type VerificationMethod = { 43 | id: string 44 | type: string 45 | controller: string 46 | publicKeyMultibase: string 47 | } 48 | 49 | type Service = { 50 | id: string 51 | type: string 52 | serviceEndpoint: string 53 | } 54 | 55 | type KeyAndContext = { 56 | context: string 57 | type: string 58 | publicKeyMultibase 59 | } 60 | 61 | const formatKeyAndContext = (key: string): KeyAndContext => { 62 | let keyInfo: ParsedMultikey 63 | try { 64 | keyInfo = crypto.parseDidKey(key) 65 | } catch (err) { 66 | throw new UnsupportedKeyError(key, err) 67 | } 68 | const { jwtAlg, keyBytes } = keyInfo 69 | 70 | if (jwtAlg === crypto.P256_JWT_ALG) { 71 | return { 72 | context: 'https://w3id.org/security/suites/ecdsa-2019/v1', 73 | type: 'EcdsaSecp256r1VerificationKey2019', 74 | publicKeyMultibase: `z${uint8arrays.toString(keyBytes, 'base58btc')}`, 75 | } 76 | } else if (jwtAlg === crypto.SECP256K1_JWT_ALG) { 77 | return { 78 | context: 'https://w3id.org/security/suites/secp256k1-2019/v1', 79 | type: 'EcdsaSecp256k1VerificationKey2019', 80 | publicKeyMultibase: `z${uint8arrays.toString(keyBytes, 'base58btc')}`, 81 | } 82 | } 83 | throw new UnsupportedKeyError(key, `Unsupported key type: ${jwtAlg}`) 84 | } 85 | -------------------------------------------------------------------------------- /packages/server/pg/README.md: -------------------------------------------------------------------------------- 1 | # pg 2 | 3 | Helpers for working with postgres 4 | 5 | ## Usage 6 | 7 | ### `with-test-db.sh` 8 | 9 | This script allows you to run any command with a fresh, ephemeral/single-use postgres database available. When the script starts a Dockerized postgres container starts-up, and when the script completes that container is removed. 10 | 11 | The environment variable `DATABASE_URL` will be set with a connection string that can be used to connect to the database. The [`PG*` environment variables](https://www.postgresql.org/docs/current/libpq-envars.html) that are recognized by libpq (i.e. used by the `psql` client) are also set. 12 | 13 | **Example** 14 | 15 | ``` 16 | $ ./with-test-db.sh psql -c 'select 1;' 17 | [+] Running 1/1 18 | ⠿ Container pg-db_test-1 Healthy 1.8s 19 | 20 | ?column? 21 | ---------- 22 | 1 23 | (1 row) 24 | 25 | 26 | [+] Running 1/1 27 | ⠿ Container pg-db_test-1 Stopped 0.1s 28 | Going to remove pg-db_test-1 29 | [+] Running 1/0 30 | ⠿ Container pg-db_test-1 Removed 31 | ``` 32 | 33 | ### `docker-compose.yaml` 34 | 35 | The Docker compose file can be used to run containerized versions of postgres either for single use (as is used by `with-test-db.sh`), or for longer-term use. These are setup as separate services named `test_db` and `db` respectively. In both cases the database is available on the host machine's `localhost` and credentials are: 36 | 37 | - Username: pg 38 | - Password: password 39 | 40 | However, each service uses a different port, documented below, to avoid conflicts. 41 | 42 | #### `test_db` service for single use 43 | 44 | The single-use `test_db` service does not have any persistent storage. When the container is removed, data in the database disappears with it. 45 | 46 | This service runs on port `5433`. 47 | 48 | ``` 49 | $ docker compose up test_db # start container 50 | $ docker compose stop test_db # stop container 51 | $ docker compose rm test_db # remove container 52 | ``` 53 | 54 | #### `db` service for persistent use 55 | 56 | The `db` service has persistent storage on the host machine managed by Docker under a volume named `pg_plc_db`. When the container is removed, data in the database will remain on the host machine. In order to start fresh, you would need to remove the volume. 57 | 58 | This service runs on port `5432`. 59 | 60 | ``` 61 | $ docker compose up db -d # start container 62 | $ docker compose stop db # stop container 63 | $ docker compose rm db # remove container 64 | $ docker volume rm pg_plc_db # remove volume 65 | ``` 66 | -------------------------------------------------------------------------------- /packages/lib/tests/compatibility.test.ts: -------------------------------------------------------------------------------- 1 | import { cidForCbor, DAY } from '@atproto/common' 2 | import { Secp256k1Keypair } from '@atproto/crypto' 3 | import { 4 | assureValidNextOp, 5 | CreateOpV1, 6 | deprecatedSignCreate, 7 | didForCreateOp, 8 | normalizeOp, 9 | updateRotationKeysOp, 10 | updateAtprotoKeyOp, 11 | validateOperationLog, 12 | } from '../src' 13 | 14 | describe('compatibility', () => { 15 | let signingKey: Secp256k1Keypair 16 | let recoveryKey: Secp256k1Keypair 17 | const handle = 'alice.test' 18 | const service = 'https://example.com' 19 | let did: string 20 | 21 | let legacyOp: CreateOpV1 22 | 23 | beforeAll(async () => { 24 | signingKey = await Secp256k1Keypair.create() 25 | recoveryKey = await Secp256k1Keypair.create() 26 | }) 27 | 28 | it('normalizes legacy create ops', async () => { 29 | legacyOp = await deprecatedSignCreate( 30 | { 31 | type: 'create', 32 | signingKey: signingKey.did(), 33 | recoveryKey: recoveryKey.did(), 34 | handle, 35 | service, 36 | prev: null, 37 | }, 38 | signingKey, 39 | ) 40 | 41 | did = await didForCreateOp(legacyOp) 42 | 43 | const normalized = normalizeOp(legacyOp) 44 | expect(normalized).toEqual({ 45 | type: 'plc_operation', 46 | verificationMethods: { 47 | atproto: signingKey.did(), 48 | }, 49 | rotationKeys: [recoveryKey.did(), signingKey.did()], 50 | alsoKnownAs: [`at://${handle}`], 51 | services: { 52 | atproto_pds: { 53 | type: 'AtprotoPersonalDataServer', 54 | endpoint: service, 55 | }, 56 | }, 57 | prev: null, 58 | sig: legacyOp.sig, 59 | }) 60 | }) 61 | 62 | it('validates a log with a legacy create op', async () => { 63 | const legacyCid = await cidForCbor(legacyOp) 64 | const newSigner = await Secp256k1Keypair.create() 65 | const newRotater = await Secp256k1Keypair.create() 66 | const nextOp = await updateAtprotoKeyOp( 67 | legacyOp, 68 | signingKey, 69 | newSigner.did(), 70 | ) 71 | const anotherOp = await updateRotationKeysOp(nextOp, signingKey, [ 72 | newRotater.did(), 73 | ]) 74 | await validateOperationLog(did, [legacyOp, nextOp]) 75 | await validateOperationLog(did, [legacyOp, nextOp, anotherOp]) 76 | 77 | const indexedLegacy = { 78 | did, 79 | operation: legacyOp, 80 | cid: legacyCid, 81 | nullified: false, 82 | createdAt: new Date(Date.now() - 7 * DAY), 83 | } 84 | 85 | const result = await assureValidNextOp(did, [indexedLegacy], nextOp) 86 | expect(result.nullified.length).toBe(0) 87 | expect(result.prev?.equals(legacyCid)).toBeTruthy() 88 | }) 89 | }) 90 | -------------------------------------------------------------------------------- /packages/lib/tests/document.test.ts: -------------------------------------------------------------------------------- 1 | import * as uint8arrays from 'uint8arrays' 2 | import { P256Keypair, parseDidKey, Secp256k1Keypair } from '@atproto/crypto' 3 | import * as document from '../src/document' 4 | import * as t from '../src/types' 5 | 6 | describe('document', () => { 7 | it('formats a valid DID document', async () => { 8 | const atprotoKey = await Secp256k1Keypair.create() 9 | const otherKey = await P256Keypair.create() 10 | const rotate1 = await Secp256k1Keypair.create() 11 | const rotate2 = await P256Keypair.create() 12 | const alsoKnownAs = ['at://alice.test', 'https://bob.test'] 13 | const atpPds = 'https://example.com' 14 | const otherService = 'https://other.com' 15 | const data: t.DocumentData = { 16 | did: 'did:example:alice', 17 | verificationMethods: { 18 | atproto: atprotoKey.did(), 19 | other: otherKey.did(), 20 | }, 21 | rotationKeys: [rotate1.did(), rotate2.did()], 22 | alsoKnownAs, 23 | services: { 24 | atproto_pds: { 25 | type: 'AtprotoPersonalDataServer', 26 | endpoint: atpPds, 27 | }, 28 | other: { 29 | type: 'SomeService', 30 | endpoint: otherService, 31 | }, 32 | }, 33 | } 34 | const doc = await document.formatDidDoc(data) 35 | // only expected keys 36 | expect(Object.keys(doc).sort()).toEqual( 37 | ['@context', 'id', 'alsoKnownAs', 'verificationMethod', 'service'].sort(), 38 | ) 39 | expect(doc['@context']).toEqual([ 40 | 'https://www.w3.org/ns/did/v1', 41 | 'https://w3id.org/security/suites/secp256k1-2019/v1', 42 | 'https://w3id.org/security/suites/ecdsa-2019/v1', 43 | ]) 44 | expect(doc.id).toEqual(data.did) 45 | expect(doc.alsoKnownAs).toEqual(alsoKnownAs) 46 | 47 | expect(doc.verificationMethod.length).toBe(2) 48 | 49 | expect(doc.verificationMethod[0].id).toEqual('#atproto') 50 | expect(doc.verificationMethod[0].type).toEqual( 51 | 'EcdsaSecp256k1VerificationKey2019', 52 | ) 53 | expect(doc.verificationMethod[0].controller).toEqual(data.did) 54 | const parsedAtprotoKey = parseDidKey(atprotoKey.did()) 55 | const atprotoKeyMultibase = 56 | 'z' + uint8arrays.toString(parsedAtprotoKey.keyBytes, 'base58btc') 57 | expect(doc.verificationMethod[0].publicKeyMultibase).toEqual( 58 | atprotoKeyMultibase, 59 | ) 60 | 61 | expect(doc.verificationMethod[1].id).toEqual('#other') 62 | expect(doc.verificationMethod[1].type).toEqual( 63 | 'EcdsaSecp256r1VerificationKey2019', 64 | ) 65 | expect(doc.verificationMethod[1].controller).toEqual(data.did) 66 | const parsedOtherKey = parseDidKey(otherKey.did()) 67 | const otherKeyMultibase = 68 | 'z' + uint8arrays.toString(parsedOtherKey.keyBytes, 'base58btc') 69 | expect(doc.verificationMethod[1].publicKeyMultibase).toEqual( 70 | otherKeyMultibase, 71 | ) 72 | 73 | expect(doc.service.length).toBe(2) 74 | expect(doc.service[0].id).toEqual('#atproto_pds') 75 | expect(doc.service[0].type).toEqual('AtprotoPersonalDataServer') 76 | expect(doc.service[0].serviceEndpoint).toEqual(atpPds) 77 | expect(doc.service[1].id).toEqual('#other') 78 | expect(doc.service[1].type).toEqual('SomeService') 79 | expect(doc.service[1].serviceEndpoint).toEqual(otherService) 80 | }) 81 | }) 82 | -------------------------------------------------------------------------------- /packages/server/tests/migrations/refactor.test.ts: -------------------------------------------------------------------------------- 1 | import { cidForCbor, DAY } from '@atproto/common' 2 | import { Secp256k1Keypair } from '@atproto/crypto' 3 | import * as plc from '@did-plc/lib' 4 | import { Kysely } from 'kysely' 5 | import { Database } from '../../src' 6 | 7 | describe('refactor migration', () => { 8 | let db: Database 9 | let rawDb: Kysely 10 | 11 | beforeAll(async () => { 12 | const dbUrl = process.env.DATABASE_URL 13 | if (!dbUrl) { 14 | throw new Error('No postgres url provided') 15 | } 16 | db = Database.postgres({ 17 | url: dbUrl, 18 | schema: 'migration_refactor', 19 | }) 20 | 21 | await db.migrateToOrThrow('_20221020T204908820Z') 22 | rawDb = db.db 23 | }) 24 | 25 | afterAll(async () => { 26 | await db.close() 27 | }) 28 | 29 | const ops: any[] = [] 30 | let before: any[] 31 | 32 | it('fills the database with some operations', async () => { 33 | for (let i = 0; i < 100; i++) { 34 | const signingKey = await Secp256k1Keypair.create() 35 | const recoveryKey = await Secp256k1Keypair.create() 36 | const op = await plc.deprecatedSignCreate( 37 | { 38 | type: 'create', 39 | signingKey: signingKey.did(), 40 | recoveryKey: recoveryKey.did(), 41 | handle: `user${i}.test`, 42 | service: 'https://example.com', 43 | prev: null, 44 | }, 45 | signingKey, 46 | ) 47 | const did = await plc.didForCreateOp(op) 48 | const cid = await cidForCbor(op) 49 | const randomOffset = Math.floor(Math.random() * DAY * 60) 50 | const time = new Date(Date.now() - randomOffset).toISOString() 51 | ops.push({ 52 | did, 53 | operation: JSON.stringify(op), 54 | cid: cid.toString(), 55 | nullified: 0, 56 | createdAt: time, 57 | }) 58 | } 59 | await rawDb.insertInto('operations').values(ops).execute() 60 | 61 | before = await rawDb 62 | .selectFrom('operations') 63 | .selectAll() 64 | .orderBy('did', 'asc') 65 | .execute() 66 | }) 67 | 68 | it('migrates', async () => { 69 | await db.migrateToOrThrow('_20230223T215019669Z') 70 | }) 71 | 72 | it('correctly migrated all data', async () => { 73 | const migrated = await rawDb 74 | .selectFrom('operations') 75 | .selectAll() 76 | .orderBy('did', 'asc') 77 | .execute() 78 | const ordered = ops.sort((a, b) => a.did.localeCompare(b.did)) 79 | expect(migrated.length).toBe(ordered.length) 80 | for (let i = 0; i < migrated.length; i++) { 81 | expect(migrated[i].did).toBe(ordered[i].did) 82 | expect(migrated[i].operation).toEqual(JSON.parse(ordered[i].operation)) 83 | expect(migrated[i].cid).toBe(ordered[i].cid) 84 | expect(migrated[i].nullified).toBe( 85 | ordered[i].nullified === 1 ? true : false, 86 | ) 87 | expect(migrated[i].createdAt).toEqual(new Date(ordered[i].createdAt)) 88 | } 89 | }) 90 | 91 | it('migrates down', async () => { 92 | await db.migrateToOrThrow('_20221020T204908820Z') 93 | const migratedBack = await rawDb 94 | .selectFrom('operations') 95 | .selectAll() 96 | .orderBy('did', 'asc') 97 | .execute() 98 | expect(migratedBack.length).toBe(before.length) 99 | // normalize json 100 | const beforeNormalized = before.map((row) => ({ 101 | ...row, 102 | operation: JSON.parse(row.operation), 103 | })) 104 | const migratedNormalized = migratedBack.map((row) => ({ 105 | ...row, 106 | operation: JSON.parse(row.operation), 107 | })) 108 | 109 | expect(migratedNormalized).toEqual(beforeNormalized) 110 | }) 111 | }) 112 | -------------------------------------------------------------------------------- /packages/lib/src/types.ts: -------------------------------------------------------------------------------- 1 | import * as z from 'zod' 2 | import * as mf from 'multiformats/cid' 3 | 4 | const cid = z 5 | .any() 6 | .refine((obj: unknown) => mf.CID.asCID(obj) !== null, { 7 | message: 'Not a CID', 8 | }) 9 | .transform((obj: unknown) => mf.CID.asCID(obj) as mf.CID) 10 | 11 | const service = z.object({ 12 | type: z.string(), 13 | endpoint: z.string(), 14 | }) 15 | 16 | const documentData = z.object({ 17 | did: z.string(), 18 | rotationKeys: z.array(z.string()), 19 | verificationMethods: z.record(z.string()), 20 | alsoKnownAs: z.array(z.string()), 21 | services: z.record(service), 22 | }) 23 | export type DocumentData = z.infer 24 | 25 | const unsignedCreateOpV1 = z.object({ 26 | type: z.literal('create'), 27 | signingKey: z.string(), 28 | recoveryKey: z.string(), 29 | handle: z.string(), 30 | service: z.string(), 31 | prev: z.null(), 32 | }) 33 | export type UnsignedCreateOpV1 = z.infer 34 | const createOpV1 = unsignedCreateOpV1.extend({ sig: z.string() }) 35 | export type CreateOpV1 = z.infer 36 | 37 | const unsignedOperation = z.object({ 38 | type: z.literal('plc_operation'), 39 | rotationKeys: z.array(z.string()), 40 | verificationMethods: z.record(z.string()), 41 | alsoKnownAs: z.array(z.string()), 42 | services: z.record(service), 43 | prev: z.string().nullable(), 44 | }) 45 | export type UnsignedOperation = z.infer 46 | const operation = unsignedOperation.extend({ sig: z.string() }) 47 | export type Operation = z.infer 48 | 49 | const unsignedTombstone = z.object({ 50 | type: z.literal('plc_tombstone'), 51 | prev: z.string(), 52 | }) 53 | export type UnsignedTombstone = z.infer 54 | const tombstone = unsignedTombstone.extend({ sig: z.string() }) 55 | export type Tombstone = z.infer 56 | 57 | const opOrTombstone = z.union([operation, tombstone]) 58 | export type OpOrTombstone = z.infer 59 | const compatibleOp = z.union([createOpV1, operation]) 60 | export type CompatibleOp = z.infer 61 | const compatibleOpOrTombstone = z.union([createOpV1, operation, tombstone]) 62 | export type CompatibleOpOrTombstone = z.infer 63 | 64 | export const indexedOperation = z.object({ 65 | did: z.string(), 66 | operation: compatibleOpOrTombstone, 67 | cid: cid, 68 | nullified: z.boolean(), 69 | createdAt: z.date(), 70 | }) 71 | export type IndexedOperation = z.infer 72 | 73 | export const exportedOp = z.object({ 74 | did: z.string(), 75 | operation: compatibleOpOrTombstone, 76 | cid: z.string(), 77 | nullified: z.boolean(), 78 | createdAt: z.string(), 79 | }) 80 | export type ExportedOp = z.infer 81 | 82 | export const didDocVerificationMethod = z.object({ 83 | id: z.string(), 84 | type: z.string(), 85 | controller: z.string(), 86 | publicKeyMultibase: z.string(), 87 | }) 88 | 89 | export const didDocService = z.object({ 90 | id: z.string(), 91 | type: z.string(), 92 | serviceEndpoint: z.string(), 93 | }) 94 | 95 | export const didDocument = z.object({ 96 | '@context': z.array(z.string()), 97 | id: z.string(), 98 | alsoKnownAs: z.array(z.string()), 99 | verificationMethod: z.array(didDocVerificationMethod), 100 | service: z.array(didDocService), 101 | }) 102 | export type DidDocument = z.infer 103 | 104 | export const def = { 105 | documentData, 106 | createOpV1, 107 | unsignedOperation, 108 | operation, 109 | tombstone, 110 | opOrTombstone, 111 | compatibleOp, 112 | compatibleOpOrTombstone, 113 | indexedOperation, 114 | exportedOp, 115 | didDocument, 116 | } 117 | -------------------------------------------------------------------------------- /packages/server/src/routes.ts: -------------------------------------------------------------------------------- 1 | import express from 'express' 2 | import { cborEncode, check } from '@atproto/common' 3 | import * as plc from '@did-plc/lib' 4 | import { ServerError } from './error' 5 | import { AppContext } from './context' 6 | 7 | export const createRouter = (ctx: AppContext): express.Router => { 8 | const router = express.Router() 9 | 10 | router.get('/', async function (req, res) { 11 | // HTTP temporary redirect to project git repo 12 | res.redirect(302, 'https://github.com/bluesky-social/did-method-plc') 13 | }) 14 | 15 | router.get('/_health', async function (req, res) { 16 | const { db, version } = ctx 17 | try { 18 | await db.healthCheck() 19 | } catch (err) { 20 | req.log.error(err, 'failed health check') 21 | return res.status(503).send({ version, error: 'Service Unavailable' }) 22 | } 23 | res.send({ version }) 24 | }) 25 | 26 | // Export ops in the form of paginated json lines 27 | router.get('/export', async function (req, res) { 28 | const parsedCount = req.query.count ? parseInt(req.query.count, 10) : 1000 29 | if (isNaN(parsedCount) || parsedCount < 1) { 30 | throw new ServerError(400, 'Invalid count parameter') 31 | } 32 | const count = Math.min(parsedCount, 1000) 33 | const after = req.query.after ? new Date(req.query.after) : undefined 34 | const ops = await ctx.db.exportOps(count, after) 35 | res.setHeader('content-type', 'application/jsonlines') 36 | res.status(200) 37 | for (let i = 0; i < ops.length; i++) { 38 | if (i > 0) { 39 | res.write('\n') 40 | } 41 | const line = JSON.stringify(ops[i]) 42 | res.write(line) 43 | } 44 | res.end() 45 | }) 46 | 47 | // Get data for a DID document 48 | router.get('/:did', async function (req, res) { 49 | const { did } = req.params 50 | const last = await ctx.db.lastOpForDid(did) 51 | if (!last) { 52 | throw new ServerError(404, `DID not registered: ${did}`) 53 | } 54 | const data = plc.opToData(did, last) 55 | if (data === null) { 56 | throw new ServerError(404, `DID not available: ${did}`) 57 | } 58 | const doc = await plc.formatDidDoc(data) 59 | res.type('application/did+ld+json') 60 | res.send(JSON.stringify(doc)) 61 | }) 62 | 63 | // Get data for a DID document 64 | router.get('/:did/data', async function (req, res) { 65 | const { did } = req.params 66 | const last = await ctx.db.lastOpForDid(did) 67 | if (!last) { 68 | throw new ServerError(404, `DID not registered: ${did}`) 69 | } 70 | const data = plc.opToData(did, last) 71 | if (data === null) { 72 | throw new ServerError(404, `DID not available: ${did}`) 73 | } 74 | res.json(data) 75 | }) 76 | 77 | // Get operation log for a DID 78 | router.get('/:did/log', async function (req, res) { 79 | const { did } = req.params 80 | const log = await ctx.db.opsForDid(did) 81 | if (log.length === 0) { 82 | throw new ServerError(404, `DID not registered: ${did}`) 83 | } 84 | res.json(log) 85 | }) 86 | 87 | // Get operation log for a DID 88 | router.get('/:did/log/audit', async function (req, res) { 89 | const { did } = req.params 90 | const ops = await ctx.db.indexedOpsForDid(did, true) 91 | if (ops.length === 0) { 92 | throw new ServerError(404, `DID not registered: ${did}`) 93 | } 94 | const log = ops.map((op) => ({ 95 | ...op, 96 | cid: op.cid.toString(), 97 | createdAt: op.createdAt.toISOString(), 98 | })) 99 | 100 | res.json(log) 101 | }) 102 | 103 | // Get the most recent operation in the log for a DID 104 | router.get('/:did/log/last', async function (req, res) { 105 | const { did } = req.params 106 | const last = await ctx.db.lastOpForDid(did) 107 | if (!last) { 108 | throw new ServerError(404, `DID not registered: ${did}`) 109 | } 110 | res.json(last) 111 | }) 112 | 113 | // Update or create a DID doc 114 | router.post('/:did', async function (req, res) { 115 | const { did } = req.params 116 | const op = req.body 117 | const byteLength = cborEncode(op).byteLength 118 | if (byteLength > 7500) { 119 | throw new ServerError(400, 'Operation too large') 120 | } 121 | if (!check.is(op, plc.def.compatibleOpOrTombstone)) { 122 | throw new ServerError(400, `Not a valid operation: ${JSON.stringify(op)}`) 123 | } 124 | await ctx.db.validateAndAddOp(did, op) 125 | res.sendStatus(200) 126 | }) 127 | 128 | return router 129 | } 130 | 131 | export default createRouter 132 | -------------------------------------------------------------------------------- /packages/lib/src/data.ts: -------------------------------------------------------------------------------- 1 | import { CID } from 'multiformats/cid' 2 | import { check, cidForCbor, HOUR } from '@atproto/common' 3 | import * as t from './types' 4 | import { 5 | assureValidCreationOp, 6 | assureValidOp, 7 | assureValidSig, 8 | normalizeOp, 9 | } from './operations' 10 | import { 11 | ImproperOperationError, 12 | LateRecoveryError, 13 | MisorderedOperationError, 14 | } from './error' 15 | 16 | export const assureValidNextOp = async ( 17 | did: string, 18 | ops: t.IndexedOperation[], 19 | proposed: t.CompatibleOpOrTombstone, 20 | ): Promise<{ nullified: CID[]; prev: CID | null }> => { 21 | if (check.is(proposed, t.def.createOpV1)) { 22 | const normalized = normalizeOp(proposed) 23 | await assureValidOp(normalized) 24 | } else { 25 | await assureValidOp(proposed) 26 | } 27 | 28 | // special case if account creation 29 | if (ops.length === 0) { 30 | await assureValidCreationOp(did, proposed) 31 | return { nullified: [], prev: null } 32 | } 33 | 34 | const proposedPrev = proposed.prev ? CID.parse(proposed.prev) : undefined 35 | if (!proposedPrev) { 36 | throw new MisorderedOperationError() 37 | } 38 | 39 | const indexOfPrev = ops.findIndex((op) => proposedPrev.equals(op.cid)) 40 | if (indexOfPrev < 0) { 41 | throw new MisorderedOperationError() 42 | } 43 | 44 | // if we are forking history, these are the ops still in the proposed canonical history 45 | const opsInHistory = ops.slice(0, indexOfPrev + 1) 46 | const nullified = ops.slice(indexOfPrev + 1) 47 | const lastOp = opsInHistory.at(-1) 48 | if (!lastOp) { 49 | throw new MisorderedOperationError() 50 | } 51 | if (check.is(lastOp.operation, t.def.tombstone)) { 52 | throw new MisorderedOperationError() 53 | } 54 | const lastOpNormalized = normalizeOp(lastOp.operation) 55 | const firstNullified = nullified[0] 56 | 57 | // if this does not involve nullification 58 | if (!firstNullified) { 59 | await assureValidSig(lastOpNormalized.rotationKeys, proposed) 60 | return { nullified: [], prev: proposedPrev } 61 | } 62 | 63 | const disputedSigner = await assureValidSig( 64 | lastOpNormalized.rotationKeys, 65 | firstNullified.operation, 66 | ) 67 | 68 | const indexOfSigner = lastOpNormalized.rotationKeys.indexOf(disputedSigner) 69 | const morePowerfulKeys = lastOpNormalized.rotationKeys.slice(0, indexOfSigner) 70 | 71 | await assureValidSig(morePowerfulKeys, proposed) 72 | 73 | // recovery key gets a 72hr window to do historical re-wrties 74 | if (nullified.length > 0) { 75 | const RECOVERY_WINDOW = 72 * HOUR 76 | const timeLapsed = Date.now() - firstNullified.createdAt.getTime() 77 | if (timeLapsed > RECOVERY_WINDOW) { 78 | throw new LateRecoveryError(timeLapsed) 79 | } 80 | } 81 | 82 | return { 83 | nullified: nullified.map((op) => op.cid), 84 | prev: proposedPrev, 85 | } 86 | } 87 | 88 | export const validateOperationLog = async ( 89 | did: string, 90 | ops: t.CompatibleOpOrTombstone[], 91 | ): Promise => { 92 | // make sure they're all validly formatted operations 93 | const [first, ...rest] = ops 94 | if (!check.is(first, t.def.compatibleOp)) { 95 | throw new ImproperOperationError('incorrect structure', first) 96 | } 97 | for (const op of rest) { 98 | if (!check.is(op, t.def.opOrTombstone)) { 99 | throw new ImproperOperationError('incorrect structure', op) 100 | } 101 | } 102 | 103 | // ensure the first op is a valid & signed create operation 104 | let doc = await assureValidCreationOp(did, first) 105 | let prev = await cidForCbor(first) 106 | 107 | for (let i = 0; i < rest.length; i++) { 108 | const op = rest[i] 109 | if (!op.prev || !CID.parse(op.prev).equals(prev)) { 110 | throw new MisorderedOperationError() 111 | } 112 | await assureValidSig(doc.rotationKeys, op) 113 | const data = opToData(did, op) 114 | // if tombstone & last op, return null. else throw 115 | if (data === null) { 116 | if (i === rest.length - 1) { 117 | return null 118 | } else { 119 | throw new MisorderedOperationError() 120 | } 121 | } 122 | doc = data 123 | prev = await cidForCbor(op) 124 | } 125 | 126 | return doc 127 | } 128 | 129 | export const opToData = ( 130 | did: string, 131 | op: t.CompatibleOpOrTombstone, 132 | ): t.DocumentData | null => { 133 | if (check.is(op, t.def.tombstone)) { 134 | return null 135 | } 136 | const { verificationMethods, rotationKeys, alsoKnownAs, services } = 137 | normalizeOp(op) 138 | return { did, verificationMethods, rotationKeys, alsoKnownAs, services } 139 | } 140 | 141 | export const getLastOpWithCid = async ( 142 | ops: t.CompatibleOpOrTombstone[], 143 | ): Promise<{ op: t.CompatibleOpOrTombstone; cid: CID }> => { 144 | const op = ops.at(-1) 145 | if (!op) { 146 | throw new Error('log is empty') 147 | } 148 | const cid = await cidForCbor(op) 149 | return { op, cid } 150 | } 151 | -------------------------------------------------------------------------------- /packages/lib/src/client.ts: -------------------------------------------------------------------------------- 1 | import { check, cidForCbor } from '@atproto/common' 2 | import { Keypair } from '@atproto/crypto' 3 | import axios, { AxiosError } from 'axios' 4 | import { 5 | atprotoOp, 6 | createUpdateOp, 7 | didForCreateOp, 8 | tombstoneOp, 9 | updateAtprotoKeyOp, 10 | updateHandleOp, 11 | updatePdsOp, 12 | updateRotationKeysOp, 13 | } from './operations' 14 | import * as t from './types' 15 | 16 | export class Client { 17 | constructor(public url: string) {} 18 | 19 | private async makeGetReq(url: string) { 20 | try { 21 | const res = await axios.get(url) 22 | return res.data 23 | } catch (err) { 24 | if (!axios.isAxiosError(err)) { 25 | throw err 26 | } 27 | throw PlcClientError.fromAxiosError(err) 28 | } 29 | } 30 | 31 | async getDocument(did: string): Promise { 32 | return await this.makeGetReq(`${this.url}/${encodeURIComponent(did)}`) 33 | } 34 | 35 | async getDocumentData(did: string): Promise { 36 | return await this.makeGetReq(`${this.url}/${encodeURIComponent(did)}/data`) 37 | } 38 | 39 | async getOperationLog(did: string): Promise { 40 | return await this.makeGetReq(`${this.url}/${encodeURIComponent(did)}/log`) 41 | } 42 | 43 | async getAuditableLog(did: string): Promise { 44 | return await this.makeGetReq( 45 | `${this.url}/${encodeURIComponent(did)}/log/audit`, 46 | ) 47 | } 48 | 49 | postOpUrl(did: string): string { 50 | return `${this.url}/${encodeURIComponent(did)}` 51 | } 52 | 53 | async getLastOp(did: string): Promise { 54 | return await this.makeGetReq( 55 | `${this.url}/${encodeURIComponent(did)}/log/last`, 56 | ) 57 | } 58 | 59 | async sendOperation(did: string, op: t.OpOrTombstone) { 60 | try { 61 | await axios.post(this.postOpUrl(did), op) 62 | } catch (err) { 63 | if (!axios.isAxiosError(err)) { 64 | throw err 65 | } 66 | throw PlcClientError.fromAxiosError(err) 67 | } 68 | } 69 | 70 | async export(after?: string, count?: number): Promise { 71 | const url = new URL(`${this.url}/export`) 72 | if (after) { 73 | url.searchParams.append('after', after) 74 | } 75 | if (count !== undefined) { 76 | url.searchParams.append('count', count.toString(10)) 77 | } 78 | const res = await axios.get(url.toString()) 79 | const lines = res.data.split('\n') 80 | return lines.map((l) => JSON.parse(l)) 81 | } 82 | 83 | async createDid(opts: { 84 | signingKey: string 85 | handle: string 86 | pds: string 87 | rotationKeys: string[] 88 | signer: Keypair 89 | }): Promise { 90 | const op = await atprotoOp({ ...opts, prev: null }) 91 | const did = await didForCreateOp(op) 92 | await this.sendOperation(did, op) 93 | return did 94 | } 95 | 96 | async ensureLastOp(did) { 97 | const lastOp = await this.getLastOp(did) 98 | if (check.is(lastOp, t.def.tombstone)) { 99 | throw new Error('Cannot apply op to tombstone') 100 | } 101 | return lastOp 102 | } 103 | 104 | async updateData( 105 | did: string, 106 | signer: Keypair, 107 | fn: (lastOp: t.UnsignedOperation) => Omit, 108 | ) { 109 | const lastOp = await this.ensureLastOp(did) 110 | const op = await createUpdateOp(lastOp, signer, fn) 111 | await this.sendOperation(did, op) 112 | } 113 | 114 | async updateAtprotoKey(did: string, signer: Keypair, atprotoKey: string) { 115 | const lastOp = await this.ensureLastOp(did) 116 | const op = await updateAtprotoKeyOp(lastOp, signer, atprotoKey) 117 | await this.sendOperation(did, op) 118 | } 119 | 120 | async updateHandle(did: string, signer: Keypair, handle: string) { 121 | const lastOp = await this.ensureLastOp(did) 122 | const op = await updateHandleOp(lastOp, signer, handle) 123 | await this.sendOperation(did, op) 124 | } 125 | 126 | async updatePds(did: string, signer: Keypair, endpoint: string) { 127 | const lastOp = await this.ensureLastOp(did) 128 | const op = await updatePdsOp(lastOp, signer, endpoint) 129 | await this.sendOperation(did, op) 130 | } 131 | 132 | async updateRotationKeys(did: string, signer: Keypair, keys: string[]) { 133 | const lastOp = await this.ensureLastOp(did) 134 | const op = await updateRotationKeysOp(lastOp, signer, keys) 135 | await this.sendOperation(did, op) 136 | } 137 | 138 | async tombstone(did: string, signer: Keypair) { 139 | const lastOp = await this.ensureLastOp(did) 140 | const prev = await cidForCbor(lastOp) 141 | const op = await tombstoneOp(prev, signer) 142 | await this.sendOperation(did, op) 143 | } 144 | 145 | async health() { 146 | return await this.makeGetReq(`${this.url}/_health`) 147 | } 148 | } 149 | 150 | export class PlcClientError extends Error { 151 | constructor( 152 | public status: number, 153 | public data: unknown, 154 | public message: string, 155 | ) { 156 | super(message) 157 | } 158 | 159 | static fromAxiosError(err: AxiosError) { 160 | return new PlcClientError( 161 | err.response?.status || 500, 162 | err.response?.data, 163 | err.message, 164 | ) 165 | } 166 | } 167 | 168 | export default Client 169 | -------------------------------------------------------------------------------- /packages/lib/tests/recovery.test.ts: -------------------------------------------------------------------------------- 1 | import { cidForCbor, DAY, HOUR } from '@atproto/common' 2 | import { P256Keypair, Keypair, Secp256k1Keypair } from '@atproto/crypto' 3 | import { CID } from 'multiformats/cid' 4 | import { InvalidSignatureError, LateRecoveryError } from '../src' 5 | import * as data from '../src/data' 6 | import * as operations from '../src/operations' 7 | import * as t from '../src/types' 8 | 9 | describe('plc recovery', () => { 10 | let signingKey: Secp256k1Keypair 11 | let rotationKey1: Secp256k1Keypair 12 | let rotationKey2: P256Keypair 13 | let rotationKey3: P256Keypair 14 | let did: string 15 | const handle = 'alice.example.com' 16 | const atpPds = 'https://example.com' 17 | 18 | let log: t.IndexedOperation[] = [] 19 | 20 | let createCid: CID 21 | 22 | beforeAll(async () => { 23 | signingKey = await Secp256k1Keypair.create() 24 | rotationKey1 = await Secp256k1Keypair.create() 25 | rotationKey2 = await P256Keypair.create() 26 | rotationKey3 = await P256Keypair.create() 27 | }) 28 | 29 | const formatIndexed = async ( 30 | op: t.Operation, 31 | ): Promise => { 32 | const cid = await cidForCbor(op) 33 | 34 | return { 35 | did, 36 | operation: op, 37 | cid, 38 | nullified: false, 39 | createdAt: new Date(), 40 | } 41 | } 42 | 43 | const signOpForKeys = async ( 44 | keys: Keypair[], 45 | prev: CID | null, 46 | signer: Keypair, 47 | otherChanges: Partial = {}, 48 | ) => { 49 | const unsigned = { 50 | ...operations.formatAtprotoOp({ 51 | signingKey: signingKey.did(), 52 | rotationKeys: keys.map((k) => k.did()), 53 | handle, 54 | pds: atpPds, 55 | prev, 56 | }), 57 | ...otherChanges, 58 | } 59 | const op = await operations.addSignature(unsigned, signer) 60 | const indexed = await formatIndexed(op) 61 | return { op, indexed } 62 | } 63 | 64 | it('creates an op log with rotation', async () => { 65 | const create = await signOpForKeys( 66 | [rotationKey1, rotationKey2, rotationKey3], 67 | null, 68 | rotationKey1, 69 | ) 70 | createCid = create.indexed.cid 71 | 72 | log.push({ 73 | ...create.indexed, 74 | createdAt: new Date(Date.now() - 7 * DAY), 75 | }) 76 | 77 | // key 3 tries to usurp control 78 | const rotate = await signOpForKeys([rotationKey3], createCid, rotationKey3) 79 | 80 | log.push({ 81 | ...rotate.indexed, 82 | createdAt: new Date(Date.now() - DAY), 83 | }) 84 | 85 | // and does some additional ops 86 | const another = await signOpForKeys( 87 | [rotationKey3], 88 | rotate.indexed.cid, 89 | rotationKey3, 90 | { alsoKnownAs: ['newhandle.test'] }, 91 | ) 92 | 93 | log.push({ 94 | ...another.indexed, 95 | createdAt: new Date(Date.now() - HOUR), 96 | }) 97 | }) 98 | 99 | it('allows a rotation key with higher authority to rewrite history', async () => { 100 | // key 2 asserts control over key 3 101 | const rotate = await signOpForKeys([rotationKey2], createCid, rotationKey2) 102 | 103 | const res = await data.assureValidNextOp(did, log, rotate.op) 104 | expect(res.nullified.length).toBe(2) 105 | expect(res.nullified[0].equals(log[1].cid)) 106 | expect(res.nullified[1].equals(log[2].cid)) 107 | expect(res.prev?.equals(createCid)).toBeTruthy() 108 | 109 | log = [log[0], rotate.indexed] 110 | }) 111 | 112 | it('does not allow the lower authority key to take control back', async () => { 113 | const rotate = await signOpForKeys([rotationKey3], createCid, rotationKey3) 114 | await expect(data.assureValidNextOp(did, log, rotate.op)).rejects.toThrow( 115 | InvalidSignatureError, 116 | ) 117 | }) 118 | 119 | it('allows a rotation key with even higher authority to rewrite history', async () => { 120 | const rotate = await signOpForKeys([rotationKey1], createCid, rotationKey1) 121 | 122 | const res = await data.assureValidNextOp(did, log, rotate.op) 123 | expect(res.nullified.length).toBe(1) 124 | expect(res.nullified[0].equals(log[1].cid)) 125 | expect(res.prev?.equals(createCid)).toBeTruthy() 126 | 127 | log = [log[0], rotate.indexed] 128 | }) 129 | 130 | it('does not allow the either invalidated key to take control back', async () => { 131 | const rotate1 = await signOpForKeys([rotationKey3], createCid, rotationKey3) 132 | await expect(data.assureValidNextOp(did, log, rotate1.op)).rejects.toThrow( 133 | InvalidSignatureError, 134 | ) 135 | 136 | const rotate2 = await signOpForKeys([rotationKey2], createCid, rotationKey2) 137 | await expect(data.assureValidNextOp(did, log, rotate2.op)).rejects.toThrow( 138 | InvalidSignatureError, 139 | ) 140 | }) 141 | 142 | it('does not allow recovery outside of 72 hrs', async () => { 143 | const rotate = await signOpForKeys([rotationKey3], createCid, rotationKey3) 144 | const timeOutOps = [ 145 | log[0], 146 | { 147 | ...rotate.indexed, 148 | createdAt: new Date(Date.now() - 4 * DAY), 149 | }, 150 | ] 151 | const rotateBack = await signOpForKeys( 152 | [rotationKey2], 153 | createCid, 154 | rotationKey2, 155 | ) 156 | await expect( 157 | data.assureValidNextOp(did, timeOutOps, rotateBack.op), 158 | ).rejects.toThrow(LateRecoveryError) 159 | }) 160 | 161 | it('allows recovery from a tombstoned DID', async () => { 162 | const tombstone = await operations.tombstoneOp(createCid, rotationKey2) 163 | const cid = await cidForCbor(tombstone) 164 | const tombstoneOps = [ 165 | log[0], 166 | { 167 | did, 168 | operation: tombstone, 169 | cid, 170 | nullified: false, 171 | createdAt: new Date(), 172 | }, 173 | ] 174 | const rotateBack = await signOpForKeys( 175 | [rotationKey1], 176 | createCid, 177 | rotationKey1, 178 | ) 179 | const result = await data.assureValidNextOp( 180 | did, 181 | tombstoneOps, 182 | rotateBack.op, 183 | ) 184 | expect(result.nullified.length).toBe(1) 185 | expect(result.nullified[0].equals(cid)) 186 | expect(result.prev?.equals(createCid)).toBeTruthy() 187 | }) 188 | }) 189 | -------------------------------------------------------------------------------- /packages/lib/tests/data.test.ts: -------------------------------------------------------------------------------- 1 | import { check, cidForCbor } from '@atproto/common' 2 | import { P256Keypair, Secp256k1Keypair } from '@atproto/crypto' 3 | import { 4 | GenesisHashError, 5 | ImproperOperationError, 6 | InvalidSignatureError, 7 | MisorderedOperationError, 8 | } from '../src' 9 | import * as data from '../src/data' 10 | import * as operations from '../src/operations' 11 | import * as t from '../src/types' 12 | 13 | describe('plc did data', () => { 14 | const ops: t.Operation[] = [] 15 | 16 | let signingKey: Secp256k1Keypair 17 | let rotationKey1: Secp256k1Keypair 18 | let rotationKey2: P256Keypair 19 | let did: string 20 | let handle = 'at://alice.example.com' 21 | let atpPds = 'https://example.com' 22 | 23 | let oldRotationKey1: Secp256k1Keypair 24 | 25 | beforeAll(async () => { 26 | signingKey = await Secp256k1Keypair.create() 27 | rotationKey1 = await Secp256k1Keypair.create() 28 | rotationKey2 = await P256Keypair.create() 29 | }) 30 | 31 | const lastOp = () => { 32 | const lastOp = ops.at(-1) 33 | if (!lastOp) { 34 | throw new Error('expected an op on log') 35 | } 36 | return lastOp 37 | } 38 | 39 | const verifyDoc = (doc: t.DocumentData | null) => { 40 | if (!doc) { 41 | throw new Error('expected doc') 42 | } 43 | expect(doc.did).toEqual(did) 44 | expect(doc.verificationMethods).toEqual({ atproto: signingKey.did() }) 45 | expect(doc.rotationKeys).toEqual([rotationKey1.did(), rotationKey2.did()]) 46 | expect(doc.alsoKnownAs).toEqual([handle]) 47 | expect(doc.services).toEqual({ 48 | atproto_pds: { 49 | type: 'AtprotoPersonalDataServer', 50 | endpoint: atpPds, 51 | }, 52 | }) 53 | } 54 | 55 | it('creates a valid create op', async () => { 56 | const createOp = await operations.atprotoOp({ 57 | signingKey: signingKey.did(), 58 | rotationKeys: [rotationKey1.did(), rotationKey2.did()], 59 | handle, 60 | pds: atpPds, 61 | prev: null, 62 | signer: rotationKey1, 63 | }) 64 | const isValid = check.is(createOp, t.def.operation) 65 | expect(isValid).toBeTruthy() 66 | ops.push(createOp) 67 | did = await operations.didForCreateOp(createOp) 68 | }) 69 | 70 | it('parses an operation log with no updates', async () => { 71 | const doc = await data.validateOperationLog(did, ops) 72 | verifyDoc(doc) 73 | }) 74 | 75 | it('updates handle', async () => { 76 | const noPrefix = 'ali.exampl2.com' 77 | handle = `at://${noPrefix}` 78 | const op = await operations.updateHandleOp(lastOp(), rotationKey1, noPrefix) 79 | ops.push(op) 80 | 81 | const doc = await data.validateOperationLog(did, ops) 82 | verifyDoc(doc) 83 | }) 84 | 85 | it('updates atpPds', async () => { 86 | const noPrefix = 'example2.com' 87 | atpPds = `https://${noPrefix}` 88 | const op = await operations.updatePdsOp(lastOp(), rotationKey1, noPrefix) 89 | ops.push(op) 90 | 91 | const doc = await data.validateOperationLog(did, ops) 92 | verifyDoc(doc) 93 | }) 94 | 95 | it('rotates signingKey', async () => { 96 | const newSigningKey = await Secp256k1Keypair.create() 97 | const op = await operations.updateAtprotoKeyOp( 98 | lastOp(), 99 | rotationKey1, 100 | newSigningKey.did(), 101 | ) 102 | ops.push(op) 103 | 104 | signingKey = newSigningKey 105 | 106 | const doc = await data.validateOperationLog(did, ops) 107 | verifyDoc(doc) 108 | }) 109 | 110 | it('rotates rotation keys', async () => { 111 | const newRotationKey = await Secp256k1Keypair.create() 112 | const op = await operations.updateRotationKeysOp(lastOp(), rotationKey1, [ 113 | newRotationKey.did(), 114 | rotationKey2.did(), 115 | ]) 116 | ops.push(op) 117 | 118 | oldRotationKey1 = rotationKey1 119 | rotationKey1 = newRotationKey 120 | 121 | const doc = await data.validateOperationLog(did, ops) 122 | verifyDoc(doc) 123 | }) 124 | 125 | it('no longer allows operations from old rotation key', async () => { 126 | const op = await operations.updateHandleOp( 127 | lastOp(), 128 | oldRotationKey1, 129 | 'at://bob', 130 | ) 131 | expect(data.validateOperationLog(did, [...ops, op])).rejects.toThrow( 132 | InvalidSignatureError, 133 | ) 134 | }) 135 | 136 | it('does not allow operations from the signingKey', async () => { 137 | const op = await operations.updateHandleOp(lastOp(), signingKey, 'at://bob') 138 | expect(data.validateOperationLog(did, [...ops, op])).rejects.toThrow( 139 | InvalidSignatureError, 140 | ) 141 | }) 142 | 143 | it('allows for operations from either rotation key', async () => { 144 | const newHandle = 'at://ali.example.com' 145 | const op = await operations.updateHandleOp( 146 | lastOp(), 147 | rotationKey2, 148 | newHandle, 149 | ) 150 | ops.push(op) 151 | handle = newHandle 152 | const doc = await data.validateOperationLog(did, ops) 153 | verifyDoc(doc) 154 | }) 155 | 156 | it('allows tombstoning a DID', async () => { 157 | const last = await data.getLastOpWithCid(ops) 158 | const op = await operations.tombstoneOp(last.cid, rotationKey1) 159 | const doc = await data.validateOperationLog(did, [...ops, op]) 160 | expect(doc).toBe(null) 161 | }) 162 | 163 | it('requires operations to be in order', async () => { 164 | const op = await operations.updateHandleOp( 165 | ops[ops.length - 2], 166 | rotationKey1, 167 | 'at://bob.test', 168 | ) 169 | expect(data.validateOperationLog(did, [...ops, op])).rejects.toThrow( 170 | MisorderedOperationError, 171 | ) 172 | }) 173 | 174 | it('does not allow a create operation in the middle of the log', async () => { 175 | const op = await operations.atprotoOp({ 176 | signingKey: signingKey.did(), 177 | rotationKeys: [rotationKey1.did(), rotationKey2.did()], 178 | handle, 179 | pds: atpPds, 180 | prev: null, 181 | signer: rotationKey1, 182 | }) 183 | expect(data.validateOperationLog(did, [...ops, op])).rejects.toThrow( 184 | MisorderedOperationError, 185 | ) 186 | }) 187 | 188 | it('does not allow a tombstone in the middle of the log', async () => { 189 | const prev = await cidForCbor(ops[ops.length - 2]) 190 | const tombstone = await operations.tombstoneOp(prev, rotationKey1) 191 | expect( 192 | data.validateOperationLog(did, [ 193 | ...ops.slice(0, ops.length - 1), 194 | tombstone, 195 | ops[ops.length - 1], 196 | ]), 197 | ).rejects.toThrow(MisorderedOperationError) 198 | }) 199 | 200 | it('requires that the did is the hash of the genesis op', async () => { 201 | const rest = ops.slice(1) 202 | expect(data.validateOperationLog(did, rest)).rejects.toThrow( 203 | GenesisHashError, 204 | ) 205 | }) 206 | 207 | it('requires that the log starts with a create op (no prev)', async () => { 208 | const rest = ops.slice(1) 209 | const expectedDid = await operations.didForCreateOp(rest[0]) 210 | expect(data.validateOperationLog(expectedDid, rest)).rejects.toThrow( 211 | ImproperOperationError, 212 | ) 213 | }) 214 | }) 215 | -------------------------------------------------------------------------------- /packages/server/src/db/index.ts: -------------------------------------------------------------------------------- 1 | import { Kysely, Migrator, PostgresDialect, sql } from 'kysely' 2 | import { Pool as PgPool, types as pgTypes } from 'pg' 3 | import { CID } from 'multiformats/cid' 4 | import { cidForCbor } from '@atproto/common' 5 | import * as plc from '@did-plc/lib' 6 | import { ServerError } from '../error' 7 | import * as migrations from '../migrations' 8 | import { DatabaseSchema, PlcDatabase } from './types' 9 | import MockDatabase from './mock' 10 | 11 | export * from './mock' 12 | export * from './types' 13 | 14 | export class Database implements PlcDatabase { 15 | migrator: Migrator 16 | constructor(public db: Kysely, public schema?: string) { 17 | this.migrator = new Migrator({ 18 | db, 19 | migrationTableSchema: schema, 20 | provider: { 21 | async getMigrations() { 22 | return migrations 23 | }, 24 | }, 25 | }) 26 | } 27 | 28 | static postgres(opts: { url: string; schema?: string }): Database { 29 | const { url, schema } = opts 30 | const pool = new PgPool({ connectionString: url }) 31 | 32 | // Select count(*) and other pg bigints as js integer 33 | pgTypes.setTypeParser(pgTypes.builtins.INT8, (n) => parseInt(n, 10)) 34 | 35 | // Setup schema usage, primarily for test parallelism (each test suite runs in its own pg schema) 36 | if (schema !== undefined) { 37 | if (!/^[a-z_]+$/i.test(schema)) { 38 | throw new Error( 39 | `Postgres schema must only contain [A-Za-z_]: ${schema}`, 40 | ) 41 | } 42 | pool.on('connect', (client) => 43 | // Shared objects such as extensions will go in the public schema 44 | client.query(`SET search_path TO "${schema}",public`), 45 | ) 46 | } 47 | 48 | const db = new Kysely({ 49 | dialect: new PostgresDialect({ pool }), 50 | }) 51 | 52 | return new Database(db, schema) 53 | } 54 | 55 | static mock(): MockDatabase { 56 | return new MockDatabase() 57 | } 58 | 59 | async close(): Promise { 60 | await this.db.destroy() 61 | } 62 | 63 | async healthCheck(): Promise { 64 | await sql`select 1`.execute(this.db) 65 | } 66 | 67 | async migrateToOrThrow(migration: string) { 68 | if (this.schema !== undefined) { 69 | await this.db.schema.createSchema(this.schema).ifNotExists().execute() 70 | } 71 | const { error, results } = await this.migrator.migrateTo(migration) 72 | if (error) { 73 | throw error 74 | } 75 | if (!results) { 76 | throw new Error('An unknown failure occurred while migrating') 77 | } 78 | return results 79 | } 80 | 81 | async migrateToLatestOrThrow() { 82 | if (this.schema !== undefined) { 83 | await this.db.schema.createSchema(this.schema).ifNotExists().execute() 84 | } 85 | const { error, results } = await this.migrator.migrateToLatest() 86 | if (error) { 87 | throw error 88 | } 89 | if (!results) { 90 | throw new Error('An unknown failure occurred while migrating') 91 | } 92 | return results 93 | } 94 | 95 | async validateAndAddOp( 96 | did: string, 97 | proposed: plc.CompatibleOpOrTombstone, 98 | ): Promise { 99 | const ops = await this.indexedOpsForDid(did) 100 | // throws if invalid 101 | const { nullified, prev } = await plc.assureValidNextOp(did, ops, proposed) 102 | const cid = await cidForCbor(proposed) 103 | 104 | await this.db.transaction().execute(async (tx) => { 105 | // grab a row lock on user table 106 | const userLock = await tx 107 | .selectFrom('dids') 108 | .forUpdate() 109 | .selectAll() 110 | .where('did', '=', did) 111 | .executeTakeFirst() 112 | 113 | if (!userLock) { 114 | await tx.insertInto('dids').values({ did }).execute() 115 | } 116 | 117 | await tx 118 | .insertInto('operations') 119 | .values({ 120 | did, 121 | operation: proposed, 122 | cid: cid.toString(), 123 | nullified: false, 124 | }) 125 | .execute() 126 | 127 | if (nullified.length > 0) { 128 | const nullfiedStrs = nullified.map((cid) => cid.toString()) 129 | await tx 130 | .updateTable('operations') 131 | .set({ nullified: true }) 132 | .where('did', '=', did) 133 | .where('cid', 'in', nullfiedStrs) 134 | .execute() 135 | } 136 | 137 | // verify that the 2nd to last tx matches the proposed prev 138 | // otherwise rollback to prevent forks in history 139 | const mostRecent = await tx 140 | .selectFrom('operations') 141 | .select('cid') 142 | .where('did', '=', did) 143 | .where('nullified', '=', false) 144 | .orderBy('createdAt', 'desc') 145 | .limit(2) 146 | .execute() 147 | const isMatch = 148 | (prev === null && !mostRecent[1]) || 149 | (prev && prev.equals(CID.parse(mostRecent[1].cid))) 150 | if (!isMatch) { 151 | throw new ServerError( 152 | 409, 153 | `Proposed prev does not match the most recent operation: ${mostRecent?.toString()}`, 154 | ) 155 | } 156 | }) 157 | } 158 | 159 | async mostRecentCid(did: string, notIncluded: CID[]): Promise { 160 | const notIncludedStr = notIncluded.map((cid) => cid.toString()) 161 | 162 | const found = await this.db 163 | .selectFrom('operations') 164 | .select('cid') 165 | .where('did', '=', did) 166 | .where('nullified', '=', false) 167 | .where('cid', 'not in', notIncludedStr) 168 | .orderBy('createdAt', 'desc') 169 | .executeTakeFirst() 170 | return found ? CID.parse(found.cid) : null 171 | } 172 | 173 | async opsForDid(did: string): Promise { 174 | const ops = await this.indexedOpsForDid(did) 175 | return ops.map((op) => op.operation) 176 | } 177 | 178 | async indexedOpsForDid( 179 | did: string, 180 | includeNullified = false, 181 | ): Promise { 182 | let builder = this.db 183 | .selectFrom('operations') 184 | .selectAll() 185 | .where('did', '=', did) 186 | .orderBy('createdAt', 'asc') 187 | if (!includeNullified) { 188 | builder = builder.where('nullified', '=', false) 189 | } 190 | const res = await builder.execute() 191 | return res.map((row) => ({ 192 | did: row.did, 193 | operation: row.operation, 194 | cid: CID.parse(row.cid), 195 | nullified: row.nullified, 196 | createdAt: row.createdAt, 197 | })) 198 | } 199 | 200 | async lastOpForDid(did: string): Promise { 201 | const res = await this.db 202 | .selectFrom('operations') 203 | .selectAll() 204 | .where('did', '=', did) 205 | .where('nullified', '=', false) 206 | .orderBy('createdAt', 'desc') 207 | .limit(1) 208 | .executeTakeFirst() 209 | return res?.operation ?? null 210 | } 211 | 212 | async exportOps(count: number, after?: Date): Promise { 213 | let builder = this.db 214 | .selectFrom('operations') 215 | .selectAll() 216 | .orderBy('createdAt', 'asc') 217 | .limit(count) 218 | if (after) { 219 | builder = builder.where('createdAt', '>', after) 220 | } 221 | const res = await builder.execute() 222 | return res.map((row) => ({ 223 | ...row, 224 | createdAt: row.createdAt.toISOString(), 225 | })) 226 | } 227 | } 228 | 229 | export default Database 230 | -------------------------------------------------------------------------------- /packages/server/tests/server.test.ts: -------------------------------------------------------------------------------- 1 | import { P256Keypair } from '@atproto/crypto' 2 | import * as plc from '@did-plc/lib' 3 | import { CloseFn, runTestServer } from './_util' 4 | import { check } from '@atproto/common' 5 | import { Database } from '../src' 6 | import { didForCreateOp, PlcClientError } from '@did-plc/lib' 7 | 8 | describe('PLC server', () => { 9 | let handle = 'at://alice.example.com' 10 | let atpPds = 'https://example.com' 11 | 12 | let close: CloseFn 13 | let db: Database 14 | let client: plc.Client 15 | 16 | let signingKey: P256Keypair 17 | let rotationKey1: P256Keypair 18 | let rotationKey2: P256Keypair 19 | 20 | let did: string 21 | 22 | beforeAll(async () => { 23 | const server = await runTestServer({ 24 | dbSchema: 'server', 25 | }) 26 | 27 | db = server.db 28 | close = server.close 29 | client = new plc.Client(server.url) 30 | signingKey = await P256Keypair.create() 31 | rotationKey1 = await P256Keypair.create() 32 | rotationKey2 = await P256Keypair.create() 33 | }) 34 | 35 | afterAll(async () => { 36 | if (close) { 37 | await close() 38 | } 39 | }) 40 | 41 | const verifyDoc = (doc: plc.DocumentData | null) => { 42 | if (!doc) { 43 | throw new Error('expected doc') 44 | } 45 | expect(doc.did).toEqual(did) 46 | expect(doc.verificationMethods).toEqual({ atproto: signingKey.did() }) 47 | expect(doc.rotationKeys).toEqual([rotationKey1.did(), rotationKey2.did()]) 48 | expect(doc.alsoKnownAs).toEqual([handle]) 49 | expect(doc.services).toEqual({ 50 | atproto_pds: { 51 | type: 'AtprotoPersonalDataServer', 52 | endpoint: atpPds, 53 | }, 54 | }) 55 | } 56 | 57 | it('registers a did', async () => { 58 | did = await client.createDid({ 59 | signingKey: signingKey.did(), 60 | rotationKeys: [rotationKey1.did(), rotationKey2.did()], 61 | handle, 62 | pds: atpPds, 63 | signer: rotationKey1, 64 | }) 65 | }) 66 | 67 | it('retrieves did doc data', async () => { 68 | const doc = await client.getDocumentData(did) 69 | verifyDoc(doc) 70 | }) 71 | 72 | it('can perform some updates', async () => { 73 | const newRotationKey = await P256Keypair.create() 74 | signingKey = await P256Keypair.create() 75 | handle = 'at://ali.example2.com' 76 | atpPds = 'https://example2.com' 77 | 78 | await client.updateAtprotoKey(did, rotationKey1, signingKey.did()) 79 | await client.updateRotationKeys(did, rotationKey1, [ 80 | newRotationKey.did(), 81 | rotationKey2.did(), 82 | ]) 83 | rotationKey1 = newRotationKey 84 | 85 | await client.updateHandle(did, rotationKey1, handle) 86 | await client.updatePds(did, rotationKey1, atpPds) 87 | 88 | const doc = await client.getDocumentData(did) 89 | verifyDoc(doc) 90 | }) 91 | 92 | it('does not allow key types that we do not support', async () => { 93 | // an ed25519 key which we don't yet support 94 | const newSigningKey = 95 | 'did:key:z6MkjwbBXZnFqL8su24wGL2Fdjti6GSLv9SWdYGswfazUPm9' 96 | 97 | const promise = client.updateAtprotoKey(did, rotationKey1, newSigningKey) 98 | await expect(promise).rejects.toThrow(PlcClientError) 99 | 100 | const promise2 = client.updateRotationKeys(did, rotationKey1, [ 101 | newSigningKey, 102 | ]) 103 | await expect(promise2).rejects.toThrow(PlcClientError) 104 | }) 105 | 106 | it('retrieves the operation log', async () => { 107 | const doc = await client.getDocumentData(did) 108 | const ops = await client.getOperationLog(did) 109 | const computedDoc = await plc.validateOperationLog(did, ops) 110 | expect(computedDoc).toEqual(doc) 111 | }) 112 | 113 | it('rejects on bad updates', async () => { 114 | const newKey = await P256Keypair.create() 115 | const operation = client.updateAtprotoKey(did, newKey, newKey.did()) 116 | await expect(operation).rejects.toThrow() 117 | }) 118 | 119 | it('allows for recovery through a forked history', async () => { 120 | const attackerKey = await P256Keypair.create() 121 | await client.updateRotationKeys(did, rotationKey2, [attackerKey.did()]) 122 | 123 | const newKey = await P256Keypair.create() 124 | const ops = await client.getOperationLog(did) 125 | const forkPoint = ops.at(-2) 126 | if (!check.is(forkPoint, plc.def.operation)) { 127 | throw new Error('Could not find fork point') 128 | } 129 | const op = await plc.updateRotationKeysOp(forkPoint, rotationKey1, [ 130 | rotationKey1.did(), 131 | newKey.did(), 132 | ]) 133 | await client.sendOperation(did, op) 134 | 135 | rotationKey2 = newKey 136 | 137 | const doc = await client.getDocumentData(did) 138 | verifyDoc(doc) 139 | }) 140 | 141 | it('retrieves the auditable operation log', async () => { 142 | const log = await client.getOperationLog(did) 143 | const auditable = await client.getAuditableLog(did) 144 | // has one nullifed op 145 | expect(auditable.length).toBe(log.length + 1) 146 | expect(auditable.filter((op) => op.nullified).length).toBe(1) 147 | expect(auditable.at(-2)?.nullified).toBe(true) 148 | expect( 149 | auditable.every((op) => check.is(op, plc.def.exportedOp)), 150 | ).toBeTruthy() 151 | }) 152 | 153 | it('retrieves the did doc', async () => { 154 | const data = await client.getDocumentData(did) 155 | const doc = await client.getDocument(did) 156 | expect(doc).toEqual(plc.formatDidDoc(data)) 157 | }) 158 | 159 | it('handles concurrent requests to many docs', async () => { 160 | const COUNT = 20 161 | const keys: P256Keypair[] = [] 162 | for (let i = 0; i < COUNT; i++) { 163 | keys.push(await P256Keypair.create()) 164 | } 165 | await Promise.all( 166 | keys.map(async (key, index) => { 167 | await client.createDid({ 168 | signingKey: key.did(), 169 | rotationKeys: [key.did()], 170 | handle: `user${index}`, 171 | pds: `example.com`, 172 | signer: key, 173 | }) 174 | }), 175 | ) 176 | }) 177 | 178 | it('resolves races into a coherent history with no forks', async () => { 179 | const COUNT = 20 180 | const keys: P256Keypair[] = [] 181 | for (let i = 0; i < COUNT; i++) { 182 | keys.push(await P256Keypair.create()) 183 | } 184 | // const prev = await client.getPrev(did) 185 | 186 | let successes = 0 187 | let failures = 0 188 | await Promise.all( 189 | keys.map(async (key) => { 190 | try { 191 | await client.updateAtprotoKey(did, rotationKey1, key.did()) 192 | successes++ 193 | } catch (err) { 194 | failures++ 195 | } 196 | }), 197 | ) 198 | expect(successes).toBe(1) 199 | expect(failures).toBe(19) 200 | 201 | const ops = await client.getOperationLog(did) 202 | await plc.validateOperationLog(did, ops) 203 | }) 204 | 205 | it('tombstones the did', async () => { 206 | await client.tombstone(did, rotationKey1) 207 | 208 | const promise = client.getDocument(did) 209 | await expect(promise).rejects.toThrow(PlcClientError) 210 | const promise2 = client.getDocumentData(did) 211 | await expect(promise2).rejects.toThrow(PlcClientError) 212 | }) 213 | 214 | it('exports the data set', async () => { 215 | const data = await client.export() 216 | expect(data.every((row) => check.is(row, plc.def.exportedOp))).toBeTruthy() 217 | expect(data.length).toBe(29) 218 | for (let i = 1; i < data.length; i++) { 219 | expect(data[i].createdAt >= data[i - 1].createdAt).toBeTruthy() 220 | } 221 | }) 222 | 223 | it('still allows create v1s', async () => { 224 | const createV1 = await plc.deprecatedSignCreate( 225 | { 226 | type: 'create', 227 | signingKey: signingKey.did(), 228 | recoveryKey: rotationKey1.did(), 229 | handle, 230 | service: atpPds, 231 | prev: null, 232 | }, 233 | signingKey, 234 | ) 235 | const did = await didForCreateOp(createV1) 236 | await client.sendOperation(did, createV1 as any) 237 | }) 238 | 239 | it('healthcheck succeeds when database is available.', async () => { 240 | const res = await client.health() 241 | expect(res).toEqual({ version: '0.0.0' }) 242 | }) 243 | 244 | it('healthcheck fails when database is unavailable.', async () => { 245 | await db.db.destroy() 246 | let error: PlcClientError 247 | try { 248 | await client.health() 249 | throw new Error('Healthcheck should have failed') 250 | } catch (err) { 251 | if (err instanceof PlcClientError) { 252 | error = err 253 | } else { 254 | throw err 255 | } 256 | } 257 | expect(error.status).toEqual(503) 258 | expect(error.data).toEqual({ 259 | version: '0.0.0', 260 | error: 'Service Unavailable', 261 | }) 262 | }) 263 | }) 264 | -------------------------------------------------------------------------------- /packages/lib/src/operations.ts: -------------------------------------------------------------------------------- 1 | import * as cbor from '@ipld/dag-cbor' 2 | import { CID } from 'multiformats/cid' 3 | import * as uint8arrays from 'uint8arrays' 4 | import { Keypair, parseDidKey, sha256, verifySignature } from '@atproto/crypto' 5 | import { check, cidForCbor } from '@atproto/common' 6 | import * as t from './types' 7 | import { 8 | GenesisHashError, 9 | ImproperOperationError, 10 | InvalidSignatureError, 11 | MisorderedOperationError, 12 | UnsupportedKeyError, 13 | } from './error' 14 | 15 | export const didForCreateOp = async (op: t.CompatibleOp) => { 16 | const hashOfGenesis = await sha256(cbor.encode(op)) 17 | const hashB32 = uint8arrays.toString(hashOfGenesis, 'base32') 18 | const truncated = hashB32.slice(0, 24) 19 | return `did:plc:${truncated}` 20 | } 21 | 22 | // Operations formatting 23 | // --------------------------- 24 | 25 | export const formatAtprotoOp = (opts: { 26 | signingKey: string 27 | handle: string 28 | pds: string 29 | rotationKeys: string[] 30 | prev: CID | null 31 | }): t.UnsignedOperation => { 32 | return { 33 | type: 'plc_operation', 34 | verificationMethods: { 35 | atproto: opts.signingKey, 36 | }, 37 | rotationKeys: opts.rotationKeys, 38 | alsoKnownAs: [ensureAtprotoPrefix(opts.handle)], 39 | services: { 40 | atproto_pds: { 41 | type: 'AtprotoPersonalDataServer', 42 | endpoint: ensureHttpPrefix(opts.pds), 43 | }, 44 | }, 45 | prev: opts.prev?.toString() ?? null, 46 | } 47 | } 48 | 49 | export const atprotoOp = async (opts: { 50 | signingKey: string 51 | handle: string 52 | pds: string 53 | rotationKeys: string[] 54 | prev: CID | null 55 | signer: Keypair 56 | }) => { 57 | return addSignature(formatAtprotoOp(opts), opts.signer) 58 | } 59 | 60 | export const createOp = async (opts: { 61 | signingKey: string 62 | handle: string 63 | pds: string 64 | rotationKeys: string[] 65 | signer: Keypair 66 | }): Promise<{ op: t.Operation; did: string }> => { 67 | const op = await atprotoOp({ ...opts, prev: null }) 68 | const did = await didForCreateOp(op) 69 | return { op, did } 70 | } 71 | 72 | export const createUpdateOp = async ( 73 | lastOp: t.CompatibleOp, 74 | signer: Keypair, 75 | fn: (normalized: t.UnsignedOperation) => Omit, 76 | ): Promise => { 77 | const prev = await cidForCbor(lastOp) 78 | // omit sig so it doesn't accidentally make its way into the next operation 79 | // eslint-disable-next-line @typescript-eslint/no-unused-vars 80 | const { sig, ...normalized } = normalizeOp(lastOp) 81 | const unsigned = await fn(normalized) 82 | return addSignature( 83 | { 84 | ...unsigned, 85 | prev: prev.toString(), 86 | }, 87 | signer, 88 | ) 89 | } 90 | 91 | export const createAtprotoUpdateOp = async ( 92 | lastOp: t.CompatibleOp, 93 | signer: Keypair, 94 | opts: Partial<{ 95 | signingKey: string 96 | handle: string 97 | pds: string 98 | rotationKeys: string[] 99 | }>, 100 | ) => { 101 | return createUpdateOp(lastOp, signer, (normalized) => { 102 | const updated = { ...normalized } 103 | if (opts.signingKey) { 104 | updated.verificationMethods = { 105 | ...normalized.verificationMethods, 106 | atproto: opts.signingKey, 107 | } 108 | } 109 | if (opts.handle) { 110 | const formatted = ensureAtprotoPrefix(opts.handle) 111 | const handleI = normalized.alsoKnownAs.findIndex((h) => 112 | h.startsWith('at://'), 113 | ) 114 | if (handleI < 0) { 115 | updated.alsoKnownAs = [formatted, ...normalized.alsoKnownAs] 116 | } else { 117 | updated.alsoKnownAs = [ 118 | ...normalized.alsoKnownAs.slice(0, handleI), 119 | formatted, 120 | ...normalized.alsoKnownAs.slice(handleI + 1), 121 | ] 122 | } 123 | } 124 | if (opts.pds) { 125 | const formatted = ensureHttpPrefix(opts.pds) 126 | updated.services = { 127 | ...normalized.services, 128 | atproto_pds: { 129 | type: 'AtprotoPersonalDataServer', 130 | endpoint: formatted, 131 | }, 132 | } 133 | } 134 | if (opts.rotationKeys) { 135 | updated.rotationKeys = opts.rotationKeys 136 | } 137 | return updated 138 | }) 139 | } 140 | 141 | export const updateAtprotoKeyOp = async ( 142 | lastOp: t.CompatibleOp, 143 | signer: Keypair, 144 | signingKey: string, 145 | ): Promise => { 146 | return createAtprotoUpdateOp(lastOp, signer, { signingKey }) 147 | } 148 | 149 | export const updateHandleOp = async ( 150 | lastOp: t.CompatibleOp, 151 | signer: Keypair, 152 | handle: string, 153 | ): Promise => { 154 | return createAtprotoUpdateOp(lastOp, signer, { handle }) 155 | } 156 | 157 | export const updatePdsOp = async ( 158 | lastOp: t.CompatibleOp, 159 | signer: Keypair, 160 | pds: string, 161 | ): Promise => { 162 | return createAtprotoUpdateOp(lastOp, signer, { pds }) 163 | } 164 | 165 | export const updateRotationKeysOp = async ( 166 | lastOp: t.CompatibleOp, 167 | signer: Keypair, 168 | rotationKeys: string[], 169 | ): Promise => { 170 | return createAtprotoUpdateOp(lastOp, signer, { rotationKeys }) 171 | } 172 | 173 | export const tombstoneOp = async ( 174 | prev: CID, 175 | key: Keypair, 176 | ): Promise => { 177 | return addSignature( 178 | { 179 | type: 'plc_tombstone', 180 | prev: prev.toString(), 181 | }, 182 | key, 183 | ) 184 | } 185 | 186 | // Signing operations 187 | // --------------------------- 188 | 189 | export const addSignature = async >( 190 | object: T, 191 | key: Keypair, 192 | ): Promise => { 193 | const data = new Uint8Array(cbor.encode(object)) 194 | const sig = await key.sign(data) 195 | return { 196 | ...object, 197 | sig: uint8arrays.toString(sig, 'base64url'), 198 | } 199 | } 200 | 201 | export const signOperation = async ( 202 | op: t.UnsignedOperation, 203 | signingKey: Keypair, 204 | ): Promise => { 205 | return addSignature(op, signingKey) 206 | } 207 | 208 | // Backwards compatibility 209 | // --------------------------- 210 | 211 | export const deprecatedSignCreate = async ( 212 | op: t.UnsignedCreateOpV1, 213 | signingKey: Keypair, 214 | ): Promise => { 215 | return addSignature(op, signingKey) 216 | } 217 | 218 | export const normalizeOp = (op: t.CompatibleOp): t.Operation => { 219 | if (check.is(op, t.def.operation)) { 220 | return op 221 | } 222 | return { 223 | type: 'plc_operation', 224 | verificationMethods: { 225 | atproto: op.signingKey, 226 | }, 227 | rotationKeys: [op.recoveryKey, op.signingKey], 228 | alsoKnownAs: [ensureAtprotoPrefix(op.handle)], 229 | services: { 230 | atproto_pds: { 231 | type: 'AtprotoPersonalDataServer', 232 | endpoint: ensureHttpPrefix(op.service), 233 | }, 234 | }, 235 | prev: op.prev, 236 | sig: op.sig, 237 | } 238 | } 239 | 240 | // Verifying operations/signatures 241 | // --------------------------- 242 | 243 | export const assureValidOp = async (op: t.OpOrTombstone) => { 244 | if (check.is(op, t.def.tombstone)) { 245 | return true 246 | } 247 | // ensure we support the op's keys 248 | const keys = [...Object.values(op.verificationMethods), ...op.rotationKeys] 249 | await Promise.all( 250 | keys.map(async (k) => { 251 | try { 252 | parseDidKey(k) 253 | } catch (err) { 254 | throw new UnsupportedKeyError(k, err) 255 | } 256 | }), 257 | ) 258 | if (op.rotationKeys.length > 5) { 259 | throw new ImproperOperationError('too many rotation keys', op) 260 | } else if (op.rotationKeys.length < 1) { 261 | throw new ImproperOperationError('need at least one rotation key', op) 262 | } 263 | } 264 | 265 | export const assureValidCreationOp = async ( 266 | did: string, 267 | op: t.CompatibleOpOrTombstone, 268 | ): Promise => { 269 | if (check.is(op, t.def.tombstone)) { 270 | throw new MisorderedOperationError() 271 | } 272 | const normalized = normalizeOp(op) 273 | await assureValidOp(normalized) 274 | await assureValidSig(normalized.rotationKeys, op) 275 | const expectedDid = await didForCreateOp(op) 276 | if (expectedDid !== did) { 277 | throw new GenesisHashError(expectedDid) 278 | } 279 | if (op.prev !== null) { 280 | throw new ImproperOperationError('expected null prev on create', op) 281 | } 282 | const { verificationMethods, rotationKeys, alsoKnownAs, services } = 283 | normalized 284 | return { did, verificationMethods, rotationKeys, alsoKnownAs, services } 285 | } 286 | 287 | export const assureValidSig = async ( 288 | allowedDidKeys: string[], 289 | op: t.CompatibleOpOrTombstone, 290 | ): Promise => { 291 | const { sig, ...opData } = op 292 | const sigBytes = uint8arrays.fromString(sig, 'base64url') 293 | const dataBytes = new Uint8Array(cbor.encode(opData)) 294 | for (const didKey of allowedDidKeys) { 295 | const isValid = await verifySignature(didKey, dataBytes, sigBytes) 296 | if (isValid) { 297 | return didKey 298 | } 299 | } 300 | throw new InvalidSignatureError(op) 301 | } 302 | 303 | // Util 304 | // --------------------------- 305 | 306 | export const ensureHttpPrefix = (str: string): string => { 307 | if (str.startsWith('http://') || str.startsWith('https://')) { 308 | return str 309 | } 310 | return `https://${str}` 311 | } 312 | 313 | export const ensureAtprotoPrefix = (str: string): string => { 314 | if (str.startsWith('at://')) { 315 | return str 316 | } 317 | const stripped = str.replace('http://', '').replace('https://', '') 318 | return `at://${stripped}` 319 | } 320 | -------------------------------------------------------------------------------- /invalidated-op-log.txt: -------------------------------------------------------------------------------- 1 | The following operations were invalidated and removed from the PLC database. They will not be returned in any audit logs or dataset exports. 2 | 3 | (6/1/23) 4 | Correcting an exploit & recovery due to flexible DID length: 5 | - {"sig":"hedSBC2Sp-lj6da5sJ1Bbp9zdYxiuH0s-VbIFI90eLEnOKVYuCpNg23-4kzfLfxoP2iGMbgp5kesOMZl0UU5zA","prev":"bafyreihmuvr3frdvd6vmdhucih277prdcfcezf67lasg5oekxoimnunjoq","type":"plc_operation","services":{"atproto_pds":{"type":"AtprotoPersonalDataServer","endpoint":"https://bsky.social"}},"alsoKnownAs":["at://retr0id-was-here.bsky.social"],"rotationKeys":["did:key:zQ3shhCGUqDKjStzuDxPkTxN6ujddP4RkEKJJouJGRRkaLGbg","did:key:zQ3shpKnbdPx3g3CmPf5cRVTPe1HtSwVn5ish3wSnDPQCbLJK"],"verificationMethods":{"atproto":"did:key:zQ3shXjHeiBuRCKmM36cuYnm7YEMzhGnCmCyW92sRJ9pribSF"}} 6 | - {"sig":"qU8Yy7Szhjk2GzF2coWHYmO8VewoPNkRg-2bEHm_Y7Fn0snJnF3YEwyN98gAOLxNKNNv4RnkbCiZMCVjpkJOrw","prev":"bafyreidaxmtdx6pb3up6tznwdbdse53uytfl7laql4cdlig22zhktkhfjy","type":"plc_operation","services":{"atproto_pds":{"type":"AtprotoPersonalDataServer","endpoint":"https://bsky.social"}},"alsoKnownAs":["at://bluesky-app.bsky.social"],"rotationKeys":["did:key:zQ3shhCGUqDKjStzuDxPkTxN6ujddP4RkEKJJouJGRRkaLGbg","did:key:zQ3shpKnbdPx3g3CmPf5cRVTPe1HtSwVn5ish3wSnDPQCbLJK"],"verificationMethods":{"atproto":"did:key:zQ3shXjHeiBuRCKmM36cuYnm7YEMzhGnCmCyW92sRJ9pribSF"}} 7 | - {"sig":"Rh799SDONBpbiUuH1-w7SipE0Ny7ilRS3tq47I9HwU0bgvbIX0lsmGgCfba8rxlkBCMrvFw_iFEK_tiXNMOAgQ","prev":"bafyreidgciciiy7gc44mzzvhisqycd3qyhsh2yhpf4vbvwxmk4ru4ijvaa","type":"plc_operation","services":{"atproto_pds":{"type":"AtprotoPersonalDataServer","endpoint":"https://bsky.social"}},"alsoKnownAs":["at://bsky.app"],"rotationKeys":["did:key:zQ3shhCGUqDKjStzuDxPkTxN6ujddP4RkEKJJouJGRRkaLGbg","did:key:zQ3shpKnbdPx3g3CmPf5cRVTPe1HtSwVn5ish3wSnDPQCbLJK"],"verificationMethods":{"atproto":"did:key:zQ3shXjHeiBuRCKmM36cuYnm7YEMzhGnCmCyW92sRJ9pribSF"} 8 | 9 | (6/8/23) 10 | Removing operations for dids with identifier length > 24 11 | Note: this action was verified ahead of time with affect accounts 12 | - {"sig": "uwYlCtorebyjnDM1_AV-MbtlqEvSi4hM2e-tM92_xVtCxhq-hUQypvM_P55j6JM6URHb8K4xDrATor4HUORUcA", "prev": null, "type": "create", "handle": "syui.syui.ai", "service": "https://bsky.syui.ai", "signingKey": "did:key:zDnaeyvyunvK2SqYgq3yWdXHcVuGsKRZRiLR4TSre5hNr6dzo", "recoveryKey": "did:key:zDnaeyvyunvK2SqYgq3yWdXHcVuGsKRZRiLR4TSre5hNr6dzo"} 13 | - {"sig": "txcvJ9Wj2-YAoUVzAIacGAerHqsmSu6aMQnaBF3L49mQs_ZyE5zPRJxkKJKJ5CmjTJodY5erzrHdUSZXorB_KA", "prev": null, "type": "create", "handle": "atproto.forza7.org", "service": "https://atproto.forza7.org", "signingKey": "did:key:zDnaev1CWcwE82K2poDk5Q6vN259e6FQs5cpbDxCLL9LXyp1s", "recoveryKey": "did:key:zDnaev1CWcwE82K2poDk5Q6vN259e6FQs5cpbDxCLL9LXyp1s"} 14 | - {"sig": "AtgsodOZO65kJ1ewh_T7D7MKzMRvmmQVhCEi-XK0GnePseMXE27ZWCrkBq7t14kNWMiLcpo_8EWqEdTi7caMPw", "prev": null, "type": "create", "handle": "forza7.localhost", "service": "http://localhost:2583", "signingKey": "did:key:zDnaesXsY65c58WZmhs3R1s4dHH3euWmtjcrrWmRADywTjtUg", "recoveryKey": "did:key:zDnaesXsY65c58WZmhs3R1s4dHH3euWmtjcrrWmRADywTjtUg"} 15 | - {"sig": "C8juOvb-155ENw-kLrNtU4GjAnsPP37CKakwKAxlSFfPmkXCVLFyfSyaN7X-epW4i9RchrubEY-BCXyG2U2Ndg", "prev": null, "type": "create", "handle": "kingyosun.boobee.blue", "service": "https://boobee.blue", "signingKey": "did:key:zDnaei7mXSmviqCmsy2VrczRo1Ehtfx9dst9uA88Hb84uTVLa", "recoveryKey": "did:key:zDnaei7mXSmviqCmsy2VrczRo1Ehtfx9dst9uA88Hb84uTVLa"} 16 | - {"sig": "D94TDWey271-YwIuYdeGxRF2SdRBBhseOn76kZ-FVMTKopinJTnPLON9VyxciXL7g61pKMrNiOEr_kfief75dA", "prev": null, "type": "create", "handle": "syui.syui.ai", "service": "https://syui.ai", "signingKey": "did:key:zDnaemc7hXjywbxpJZ5G45joTY7K32oCeGkjYWmMUEfvcqGoH", "recoveryKey": "did:key:zDnaemc7hXjywbxpJZ5G45joTY7K32oCeGkjYWmMUEfvcqGoH"} 17 | - {"sig": "jLL1Dld98b2vK4ZLke9ZJuD5ODqzDmwc1elEWWtTPYVW9M3btlk1mzQqmVZe7BCxYUoJ67pOFoKfehd26QwmKw", "prev": null, "type": "create", "handle": "forza7.localhost", "service": "http://localhost:2583", "signingKey": "did:key:zDnaeg9ahcqJSHFGU3RJoFs4ppD263idBt2A1FbZh6dtnhuNn", "recoveryKey": "did:key:zDnaeg9ahcqJSHFGU3RJoFs4ppD263idBt2A1FbZh6dtnhuNn"} 18 | - {"sig": "OgK-TcULPPChzAHTghQH6kG4FTs2JaiwDkc0A4YDg8_NUV9ZTqCwMTUfVSK2hBJmn-BGUIpE9E2tru_cvHoebQ", "prev": null, "type": "create", "handle": "hatf0.s.sock.fund", "service": "https://s.sock.fund", "signingKey": "did:key:zDnaetyB8S8z9USzJCzi76FKmkSJw18FA38afK6H2APDtTfY3", "recoveryKey": "did:key:zDnaetyB8S8z9USzJCzi76FKmkSJw18FA38afK6H2APDtTfY3"} 19 | - {"sig": "SLhY9KGAVjKLTnBX6SlS26DcO9OWpYfLLt9YzpL80Rel0M1pICzFKS39XlqbiLpupDJg1218ZKseWbgkfFWp-Q", "prev": null, "type": "create", "handle": "forza7.forza7.org", "service": "https://atproto.forza7.org", "signingKey": "did:key:zDnaefVpZvdKuzBdC6AzzXmpHuLsRiq59oyJ2wTnEkpfr4ov1", "recoveryKey": "did:key:zDnaefVpZvdKuzBdC6AzzXmpHuLsRiq59oyJ2wTnEkpfr4ov1"} 20 | - {"sig": "HoCq7RGqF4veuh89-kQMItXd0POh88R5fWqyK344AYgL17zBPLZbiE3YXG-prM4ddQ3igL5hbSAbHVuNO79Tpw", "prev": null, "type": "create", "handle": "takenoko.bluesky.nokotaro.com", "service": "https://bluesky.nokotaro.com", "signingKey": "did:key:zDnaeV6N7NqaJ8kzqMivizovkUoq1jzxULrhbZMEMsVjjRdGf", "recoveryKey": "did:key:zDnaeV6N7NqaJ8kzqMivizovkUoq1jzxULrhbZMEMsVjjRdGf"} 21 | - {"sig": "H9OiysVBqUofLOwGMT5tog0hNOHth5_MgqG59J5cORIfiFjfn7PFZudVyHPi9MPUJyp2K7sAb_HL_2iD2FA9JA", "prev": "bafyreifddth26c2ilxqxxye56fnsmv252uhpkkpwvfyvugmifucgrffase", "type": "plc_operation", "services": {"atproto_pds": {"type": "AtprotoPersonalDataServer", "endpoint": "https://bsky.social"}}, "alsoKnownAs": ["at://retr0id.bsky.social"], "rotationKeys": ["did:key:zQ3shhCGUqDKjStzuDxPkTxN6ujddP4RkEKJJouJGRRkaLGbg", "did:key:zQ3shpKnbdPx3g3CmPf5cRVTPe1HtSwVn5ish3wSnDPQCbLJK"], "verificationMethods": {"atproto": "did:key:zQ3shXjHeiBuRCKmM36cuYnm7YEMzhGnCmCyW92sRJ9pribSF"}} 22 | - {"sig": "nPKcIReIEVloBq_qmqF_BK8qr1jmqziwaJmDe29KpW89_Uy0eOH60xREaaIj0BsYoFtadntX9JYXeqmx-XIpLw", "prev": null, "type": "plc_operation", "services": {"atproto_pds": {"type": "AtprotoPersonalDataServer", "endpoint": "https://bsky.social"}}, "alsoKnownAs": ["at://retr0id.bsky.social"], "rotationKeys": ["did:key:zQ3shhCGUqDKjStzuDxPkTxN6ujddP4RkEKJJouJGRRkaLGbg", "did:key:zQ3shpKnbdPx3g3CmPf5cRVTPe1HtSwVn5ish3wSnDPQCbLJK"], "verificationMethods": {"atproto": "did:key:zQ3shXjHeiBuRCKmM36cuYnm7YEMzhGnCmCyW92sRJ9pribSF"}} 23 | - {"sig": "-5Jibdx0Qx7LrwoZL9TOQCo5XbL_Ll8DQfJyZytP4a5PJjuSX0bTbw_fJE3Tt9QbUR_0gbdpc6vmdgo6RC0F9A", "prev": null, "type": "plc_operation", "vanity": "BBBBBBBBBBBBBBBB", "services": {}, "alsoKnownAs": ["at://retr0.id"], "rotationKeys": ["did:key:zQ3shQAiUEQyzEUGd8cgKH364977Pbkokqfetph32L3URhyrD", "did:key:zQ3shQAiUEQyzEUGd8cgKH364977Pbkokqfetph32L3URhyrD"], "verificationMethods": {}} 24 | - {"sig": "nPKcIReIEVloBq_qmqF_BK8qr1jmqziwaJmDe29KpW89_Uy0eOH60xREaaIj0BsYoFtadntX9JYXeqmx-XIpLw", "prev": null, "type": "plc_operation", "services": {"atproto_pds": {"type": "AtprotoPersonalDataServer", "endpoint": "https://bsky.social"}}, "alsoKnownAs": ["at://retr0id.bsky.social"], "rotationKeys": ["did:key:zQ3shhCGUqDKjStzuDxPkTxN6ujddP4RkEKJJouJGRRkaLGbg", "did:key:zQ3shpKnbdPx3g3CmPf5cRVTPe1HtSwVn5ish3wSnDPQCbLJK"], "verificationMethods": {"atproto": "did:key:zQ3shXjHeiBuRCKmM36cuYnm7YEMzhGnCmCyW92sRJ9pribSF"}} 25 | - {"sig": "1mEWzRtFOgeRXH-YCSPTxb990JOXxa__n8Qw6BOKl7Ndm6OFFmwYKiiMqMCpAbxpnGjF5abfIsKc7u3a77Cbnw", "prev": "bafyreigp6shzy6dlcxuowwoxz7u5nemdrkad2my5zwzpwilcnhih7bw6zm", "type": "plc_operation", "services": {"atproto_pds": {"type": "AtprotoPersonalDataServer", "endpoint": "https://bsky.social"}}, "alsoKnownAs": ["at://bsky.app"], "rotationKeys": ["did:key:zQ3shhCGUqDKjStzuDxPkTxN6ujddP4RkEKJJouJGRRkaLGbg", "did:key:zQ3shpKnbdPx3g3CmPf5cRVTPe1HtSwVn5ish3wSnDPQCbLJK"], "verificationMethods": {"atproto": "did:key:zQ3shXjHeiBuRCKmM36cuYnm7YEMzhGnCmCyW92sRJ9pribSF"}} 26 | - {"sig": "kgxBlmHwBobgxFodJPdFAYvPKimcxSlj7ec_alzoLHUOkqXgY2U8-VVLXpGSewptg142Gm_NYqeMvWcP0_WSyQ", "prev": "bafyreifnwnrtobqfhza7yp5u7nf76gqzwdfx6zia63pqn22wnfxs6geguu", "type": "plc_operation", "services": {"atproto_pds": {"type": "AtprotoPersonalDataServer", "endpoint": "https://bsky.social"}}, "alsoKnownAs": ["at://testing-temp.bsky.social"], "rotationKeys": ["did:key:zQ3shhCGUqDKjStzuDxPkTxN6ujddP4RkEKJJouJGRRkaLGbg", "did:key:zQ3shpKnbdPx3g3CmPf5cRVTPe1HtSwVn5ish3wSnDPQCbLJK"], "verificationMethods": {"atproto": "did:key:zQ3shXjHeiBuRCKmM36cuYnm7YEMzhGnCmCyW92sRJ9pribSF"}} 27 | - {"sig": "9NuYV7AqwHVTc0YuWzNV3CJafsSZWH7qCxHRUIP2xWlB-YexXC1OaYAnUayiCXLVzRQ8WBXIqF-SvZdNalwcjA", "prev": null, "type": "plc_operation", "services": {"atproto_pds": {"type": "AtprotoPersonalDataServer", "endpoint": "https://bsky.social"}}, "alsoKnownAs": ["at://bluesky-team.bsky.social"], "rotationKeys": ["did:key:zQ3shhCGUqDKjStzuDxPkTxN6ujddP4RkEKJJouJGRRkaLGbg", "did:key:zQ3shpKnbdPx3g3CmPf5cRVTPe1HtSwVn5ish3wSnDPQCbLJK"], "verificationMethods": {"atproto": "did:key:zQ3shXjHeiBuRCKmM36cuYnm7YEMzhGnCmCyW92sRJ9pribSF"}} 28 | - {"sig": "9NuYV7AqwHVTc0YuWzNV3CJafsSZWH7qCxHRUIP2xWlB-YexXC1OaYAnUayiCXLVzRQ8WBXIqF-SvZdNalwcjA", "prev": null, "type": "plc_operation", "services": {"atproto_pds": {"type": "AtprotoPersonalDataServer", "endpoint": "https://bsky.social"}}, "alsoKnownAs": ["at://bluesky-team.bsky.social"], "rotationKeys": ["did:key:zQ3shhCGUqDKjStzuDxPkTxN6ujddP4RkEKJJouJGRRkaLGbg", "did:key:zQ3shpKnbdPx3g3CmPf5cRVTPe1HtSwVn5ish3wSnDPQCbLJK"], "verificationMethods": {"atproto": "did:key:zQ3shXjHeiBuRCKmM36cuYnm7YEMzhGnCmCyW92sRJ9pribSF"}} 29 | - {"sig": "1mEWzRtFOgeRXH-YCSPTxb990JOXxa__n8Qw6BOKl7Ndm6OFFmwYKiiMqMCpAbxpnGjF5abfIsKc7u3a77Cbnw", "prev": "bafyreigp6shzy6dlcxuowwoxz7u5nemdrkad2my5zwzpwilcnhih7bw6zm", "type": "plc_operation", "services": {"atproto_pds": {"type": "AtprotoPersonalDataServer", "endpoint": "https://bsky.social"}}, "alsoKnownAs": ["at://bsky.app"], "rotationKeys": ["did:key:zQ3shhCGUqDKjStzuDxPkTxN6ujddP4RkEKJJouJGRRkaLGbg", "did:key:zQ3shpKnbdPx3g3CmPf5cRVTPe1HtSwVn5ish3wSnDPQCbLJK"], "verificationMethods": {"atproto": "did:key:zQ3shXjHeiBuRCKmM36cuYnm7YEMzhGnCmCyW92sRJ9pribSF"}} 30 | - {"sig": "hedSBC2Sp-lj6da5sJ1Bbp9zdYxiuH0s-VbIFI90eLEnOKVYuCpNg23-4kzfLfxoP2iGMbgp5kesOMZl0UU5zA", "prev": "bafyreihmuvr3frdvd6vmdhucih277prdcfcezf67lasg5oekxoimnunjoq", "type": "plc_operation", "services": {"atproto_pds": {"type": "AtprotoPersonalDataServer", "endpoint": "https://bsky.social"}}, "alsoKnownAs": ["at://retr0id-was-here.bsky.social"], "rotationKeys": ["did:key:zQ3shhCGUqDKjStzuDxPkTxN6ujddP4RkEKJJouJGRRkaLGbg", "did:key:zQ3shpKnbdPx3g3CmPf5cRVTPe1HtSwVn5ish3wSnDPQCbLJK"], "verificationMethods": {"atproto": "did:key:zQ3shXjHeiBuRCKmM36cuYnm7YEMzhGnCmCyW92sRJ9pribSF"}} -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /packages/server/service/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | "@datadog/native-appsec@2.0.0": 6 | version "2.0.0" 7 | resolved "https://registry.yarnpkg.com/@datadog/native-appsec/-/native-appsec-2.0.0.tgz#ad65ba19bfd68e6b6c6cf64bb8ef55d099af8edc" 8 | integrity sha512-XHARZ6MVgbnfOUO6/F3ZoZ7poXHJCNYFlgcyS2Xetuk9ITA5bfcooX2B2F7tReVB+RLJ+j8bsm0t55SyF04KDw== 9 | dependencies: 10 | node-gyp-build "^3.9.0" 11 | 12 | "@datadog/native-iast-rewriter@1.1.2": 13 | version "1.1.2" 14 | resolved "https://registry.yarnpkg.com/@datadog/native-iast-rewriter/-/native-iast-rewriter-1.1.2.tgz#793cbf92d218ec80d645be0830023656b81018ea" 15 | integrity sha512-pigRfRtAjZjMjqIXyXb98S4aDnuHz/EmqpoxAajFZsNjBLM87YonwSY5zoBdCsOyA46ddKOJRoCQd5ZalpOFMQ== 16 | dependencies: 17 | node-gyp-build "^4.5.0" 18 | 19 | "@datadog/native-iast-taint-tracking@1.1.0": 20 | version "1.1.0" 21 | resolved "https://registry.yarnpkg.com/@datadog/native-iast-taint-tracking/-/native-iast-taint-tracking-1.1.0.tgz#8f7d0016157b32dbf5c01b15b8afb1c4286b4a18" 22 | integrity sha512-TOrngpt6Qh52zWFOz1CkFXw0g43rnuUziFBtIMUsOLGzSHr9wdnTnE6HAyuvKy3f3ecAoZESlMfilGRKP93hXQ== 23 | dependencies: 24 | node-gyp-build "^3.9.0" 25 | 26 | "@datadog/native-metrics@^1.5.0": 27 | version "1.5.0" 28 | resolved "https://registry.yarnpkg.com/@datadog/native-metrics/-/native-metrics-1.5.0.tgz#e71b6b6d65f4bd58dfdffab2737890e8eef34584" 29 | integrity sha512-K63XMDx74RLhOpM8I9GGZR9ft0CNNB/RkjYPLHcVGvVnBR47zmWE2KFa7Yrtzjbk73+88PXI4nzqLyR3PJsaIQ== 30 | dependencies: 31 | node-gyp-build "^3.9.0" 32 | 33 | "@datadog/pprof@^1.1.1": 34 | version "1.1.1" 35 | resolved "https://registry.yarnpkg.com/@datadog/pprof/-/pprof-1.1.1.tgz#17e86035140523ac3a96f3662e5dd29822042d61" 36 | integrity sha512-5lYXUpikQhrJwzODtJ7aFM0oKmPccISnTCecuWhjxIj4/7UJv0DamkLak634bgEW+kiChgkKFDapHSesuXRDXQ== 37 | dependencies: 38 | delay "^5.0.0" 39 | findit2 "^2.2.3" 40 | node-gyp-build "^3.9.0" 41 | p-limit "^3.1.0" 42 | pify "^5.0.0" 43 | protobufjs "^7.0.0" 44 | source-map "^0.7.3" 45 | split "^1.0.1" 46 | 47 | "@datadog/sketches-js@^2.1.0": 48 | version "2.1.0" 49 | resolved "https://registry.yarnpkg.com/@datadog/sketches-js/-/sketches-js-2.1.0.tgz#8c7e8028a5fc22ad102fa542b0a446c956830455" 50 | integrity sha512-smLocSfrt3s53H/XSVP3/1kP42oqvrkjUPtyaFd1F79ux24oE31BKt+q0c6lsa6hOYrFzsIwyc5GXAI5JmfOew== 51 | 52 | "@protobufjs/aspromise@^1.1.1", "@protobufjs/aspromise@^1.1.2": 53 | version "1.1.2" 54 | resolved "https://registry.yarnpkg.com/@protobufjs/aspromise/-/aspromise-1.1.2.tgz#9b8b0cc663d669a7d8f6f5d0893a14d348f30fbf" 55 | integrity sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ== 56 | 57 | "@protobufjs/base64@^1.1.2": 58 | version "1.1.2" 59 | resolved "https://registry.yarnpkg.com/@protobufjs/base64/-/base64-1.1.2.tgz#4c85730e59b9a1f1f349047dbf24296034bb2735" 60 | integrity sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg== 61 | 62 | "@protobufjs/codegen@^2.0.4": 63 | version "2.0.4" 64 | resolved "https://registry.yarnpkg.com/@protobufjs/codegen/-/codegen-2.0.4.tgz#7ef37f0d010fb028ad1ad59722e506d9262815cb" 65 | integrity sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg== 66 | 67 | "@protobufjs/eventemitter@^1.1.0": 68 | version "1.1.0" 69 | resolved "https://registry.yarnpkg.com/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz#355cbc98bafad5978f9ed095f397621f1d066b70" 70 | integrity sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q== 71 | 72 | "@protobufjs/fetch@^1.1.0": 73 | version "1.1.0" 74 | resolved "https://registry.yarnpkg.com/@protobufjs/fetch/-/fetch-1.1.0.tgz#ba99fb598614af65700c1619ff06d454b0d84c45" 75 | integrity sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ== 76 | dependencies: 77 | "@protobufjs/aspromise" "^1.1.1" 78 | "@protobufjs/inquire" "^1.1.0" 79 | 80 | "@protobufjs/float@^1.0.2": 81 | version "1.0.2" 82 | resolved "https://registry.yarnpkg.com/@protobufjs/float/-/float-1.0.2.tgz#5e9e1abdcb73fc0a7cb8b291df78c8cbd97b87d1" 83 | integrity sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ== 84 | 85 | "@protobufjs/inquire@^1.1.0": 86 | version "1.1.0" 87 | resolved "https://registry.yarnpkg.com/@protobufjs/inquire/-/inquire-1.1.0.tgz#ff200e3e7cf2429e2dcafc1140828e8cc638f089" 88 | integrity sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q== 89 | 90 | "@protobufjs/path@^1.1.2": 91 | version "1.1.2" 92 | resolved "https://registry.yarnpkg.com/@protobufjs/path/-/path-1.1.2.tgz#6cc2b20c5c9ad6ad0dccfd21ca7673d8d7fbf68d" 93 | integrity sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA== 94 | 95 | "@protobufjs/pool@^1.1.0": 96 | version "1.1.0" 97 | resolved "https://registry.yarnpkg.com/@protobufjs/pool/-/pool-1.1.0.tgz#09fd15f2d6d3abfa9b65bc366506d6ad7846ff54" 98 | integrity sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw== 99 | 100 | "@protobufjs/utf8@^1.1.0": 101 | version "1.1.0" 102 | resolved "https://registry.yarnpkg.com/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570" 103 | integrity sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw== 104 | 105 | "@types/node@>=13.7.0": 106 | version "18.13.0" 107 | resolved "https://registry.yarnpkg.com/@types/node/-/node-18.13.0.tgz#0400d1e6ce87e9d3032c19eb6c58205b0d3f7850" 108 | integrity sha512-gC3TazRzGoOnoKAhUx+Q0t8S9Tzs74z7m0ipwGpSqQrleP14hKxP4/JUeEQcD3W1/aIpnWl8pHowI7WokuZpXg== 109 | 110 | crypto-randomuuid@^1.0.0: 111 | version "1.0.0" 112 | resolved "https://registry.yarnpkg.com/crypto-randomuuid/-/crypto-randomuuid-1.0.0.tgz#acf583e5e085e867ae23e107ff70279024f9e9e7" 113 | integrity sha512-/RC5F4l1SCqD/jazwUF6+t34Cd8zTSAGZ7rvvZu1whZUhD2a5MOGKjSGowoGcpj/cbVZk1ZODIooJEQQq3nNAA== 114 | 115 | dd-trace@^3.8.0: 116 | version "3.13.2" 117 | resolved "https://registry.yarnpkg.com/dd-trace/-/dd-trace-3.13.2.tgz#95b1ec480ab9ac406e1da7591a8c6f678d3799fd" 118 | integrity sha512-POO9nEcAufe5pgp2xV1X3PfWip6wh+6TpEcRSlSgZJCIIMvWVCkcIVL/J2a6KAZq6V3Yjbkl8Ktfe+MOzQf5kw== 119 | dependencies: 120 | "@datadog/native-appsec" "2.0.0" 121 | "@datadog/native-iast-rewriter" "1.1.2" 122 | "@datadog/native-iast-taint-tracking" "1.1.0" 123 | "@datadog/native-metrics" "^1.5.0" 124 | "@datadog/pprof" "^1.1.1" 125 | "@datadog/sketches-js" "^2.1.0" 126 | crypto-randomuuid "^1.0.0" 127 | diagnostics_channel "^1.1.0" 128 | ignore "^5.2.0" 129 | import-in-the-middle "^1.3.4" 130 | ipaddr.js "^2.0.1" 131 | istanbul-lib-coverage "3.2.0" 132 | koalas "^1.0.2" 133 | limiter "^1.1.4" 134 | lodash.kebabcase "^4.1.1" 135 | lodash.pick "^4.4.0" 136 | lodash.sortby "^4.7.0" 137 | lodash.uniq "^4.5.0" 138 | lru-cache "^7.14.0" 139 | methods "^1.1.2" 140 | module-details-from-path "^1.0.3" 141 | node-abort-controller "^3.0.1" 142 | opentracing ">=0.12.1" 143 | path-to-regexp "^0.1.2" 144 | protobufjs "^7.1.2" 145 | retry "^0.10.1" 146 | semver "^5.5.0" 147 | 148 | delay@^5.0.0: 149 | version "5.0.0" 150 | resolved "https://registry.yarnpkg.com/delay/-/delay-5.0.0.tgz#137045ef1b96e5071060dd5be60bf9334436bd1d" 151 | integrity sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw== 152 | 153 | diagnostics_channel@^1.1.0: 154 | version "1.1.0" 155 | resolved "https://registry.yarnpkg.com/diagnostics_channel/-/diagnostics_channel-1.1.0.tgz#bd66c49124ce3bac697dff57466464487f57cea5" 156 | integrity sha512-OE1ngLDjSBPG6Tx0YATELzYzy3RKHC+7veQ8gLa8yS7AAgw65mFbVdcsu3501abqOZCEZqZyAIemB0zXlqDSuw== 157 | 158 | findit2@^2.2.3: 159 | version "2.2.3" 160 | resolved "https://registry.yarnpkg.com/findit2/-/findit2-2.2.3.tgz#58a466697df8a6205cdfdbf395536b8bd777a5f6" 161 | integrity sha512-lg/Moejf4qXovVutL0Lz4IsaPoNYMuxt4PA0nGqFxnJ1CTTGGlEO2wKgoDpwknhvZ8k4Q2F+eesgkLbG2Mxfog== 162 | 163 | ignore@^5.2.0: 164 | version "5.2.4" 165 | resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" 166 | integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== 167 | 168 | import-in-the-middle@^1.3.4: 169 | version "1.3.4" 170 | resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-1.3.4.tgz#7074bbd4e84e8cdafd1eae400b04e6fe252a0768" 171 | integrity sha512-TUXqqEFacJ2DWAeYOhHwGZTMJtFxFVw0C1pYA+AXmuWXZGnBqUhHdtVrSkSbW5D7k2yriBG45j23iH9TRtI+bQ== 172 | dependencies: 173 | module-details-from-path "^1.0.3" 174 | 175 | ipaddr.js@^2.0.1: 176 | version "2.0.1" 177 | resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.0.1.tgz#eca256a7a877e917aeb368b0a7497ddf42ef81c0" 178 | integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== 179 | 180 | istanbul-lib-coverage@3.2.0: 181 | version "3.2.0" 182 | resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" 183 | integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== 184 | 185 | koalas@^1.0.2: 186 | version "1.0.2" 187 | resolved "https://registry.yarnpkg.com/koalas/-/koalas-1.0.2.tgz#318433f074235db78fae5661a02a8ca53ee295cd" 188 | integrity sha512-RYhBbYaTTTHId3l6fnMZc3eGQNW6FVCqMG6AMwA5I1Mafr6AflaXeoi6x3xQuATRotGYRLk6+1ELZH4dstFNOA== 189 | 190 | limiter@^1.1.4: 191 | version "1.1.5" 192 | resolved "https://registry.yarnpkg.com/limiter/-/limiter-1.1.5.tgz#8f92a25b3b16c6131293a0cc834b4a838a2aa7c2" 193 | integrity sha512-FWWMIEOxz3GwUI4Ts/IvgVy6LPvoMPgjMdQ185nN6psJyBJ4yOpzqm695/h5umdLJg2vW3GR5iG11MAkR2AzJA== 194 | 195 | lodash.kebabcase@^4.1.1: 196 | version "4.1.1" 197 | resolved "https://registry.yarnpkg.com/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz#8489b1cb0d29ff88195cceca448ff6d6cc295c36" 198 | integrity sha512-N8XRTIMMqqDgSy4VLKPnJ/+hpGZN+PHQiJnSenYqPaVV/NCqEogTnAdZLQiGKhxX+JCs8waWq2t1XHWKOmlY8g== 199 | 200 | lodash.pick@^4.4.0: 201 | version "4.4.0" 202 | resolved "https://registry.yarnpkg.com/lodash.pick/-/lodash.pick-4.4.0.tgz#52f05610fff9ded422611441ed1fc123a03001b3" 203 | integrity sha512-hXt6Ul/5yWjfklSGvLQl8vM//l3FtyHZeuelpzK6mm99pNvN9yTDruNZPEJZD1oWrqo+izBmB7oUfWgcCX7s4Q== 204 | 205 | lodash.sortby@^4.7.0: 206 | version "4.7.0" 207 | resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" 208 | integrity sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA== 209 | 210 | lodash.uniq@^4.5.0: 211 | version "4.5.0" 212 | resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" 213 | integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== 214 | 215 | long@^5.0.0: 216 | version "5.2.1" 217 | resolved "https://registry.yarnpkg.com/long/-/long-5.2.1.tgz#e27595d0083d103d2fa2c20c7699f8e0c92b897f" 218 | integrity sha512-GKSNGeNAtw8IryjjkhZxuKB3JzlcLTwjtiQCHKvqQet81I93kXslhDQruGI/QsddO83mcDToBVy7GqGS/zYf/A== 219 | 220 | lru-cache@^7.14.0: 221 | version "7.14.1" 222 | resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.14.1.tgz#8da8d2f5f59827edb388e63e459ac23d6d408fea" 223 | integrity sha512-ysxwsnTKdAx96aTRdhDOCQfDgbHnt8SK0KY8SEjO0wHinhWOFTESbjVCMPbU1uGXg/ch4lifqx0wfjOawU2+WA== 224 | 225 | methods@^1.1.2: 226 | version "1.1.2" 227 | resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" 228 | integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== 229 | 230 | module-details-from-path@^1.0.3: 231 | version "1.0.3" 232 | resolved "https://registry.yarnpkg.com/module-details-from-path/-/module-details-from-path-1.0.3.tgz#114c949673e2a8a35e9d35788527aa37b679da2b" 233 | integrity sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A== 234 | 235 | node-abort-controller@^3.0.1: 236 | version "3.1.1" 237 | resolved "https://registry.yarnpkg.com/node-abort-controller/-/node-abort-controller-3.1.1.tgz#a94377e964a9a37ac3976d848cb5c765833b8548" 238 | integrity sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ== 239 | 240 | node-gyp-build@^3.9.0: 241 | version "3.9.0" 242 | resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-3.9.0.tgz#53a350187dd4d5276750da21605d1cb681d09e25" 243 | integrity sha512-zLcTg6P4AbcHPq465ZMFNXx7XpKKJh+7kkN699NiQWisR2uWYOWNWqRHAmbnmKiL4e9aLSlmy5U7rEMUXV59+A== 244 | 245 | node-gyp-build@^4.5.0: 246 | version "4.6.0" 247 | resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.6.0.tgz#0c52e4cbf54bbd28b709820ef7b6a3c2d6209055" 248 | integrity sha512-NTZVKn9IylLwUzaKjkas1e4u2DLNcV4rdYagA4PWdPwW87Bi7z+BznyKSRwS/761tV/lzCGXplWsiaMjLqP2zQ== 249 | 250 | opentracing@>=0.12.1: 251 | version "0.14.7" 252 | resolved "https://registry.yarnpkg.com/opentracing/-/opentracing-0.14.7.tgz#25d472bd0296dc0b64d7b94cbc995219031428f5" 253 | integrity sha512-vz9iS7MJ5+Bp1URw8Khvdyw1H/hGvzHWlKQ7eRrQojSCDL1/SrWfrY9QebLw97n2deyRtzHRC3MkQfVNUCo91Q== 254 | 255 | p-limit@^3.1.0: 256 | version "3.1.0" 257 | resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" 258 | integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== 259 | dependencies: 260 | yocto-queue "^0.1.0" 261 | 262 | path-to-regexp@^0.1.2: 263 | version "0.1.7" 264 | resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" 265 | integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== 266 | 267 | pify@^5.0.0: 268 | version "5.0.0" 269 | resolved "https://registry.yarnpkg.com/pify/-/pify-5.0.0.tgz#1f5eca3f5e87ebec28cc6d54a0e4aaf00acc127f" 270 | integrity sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA== 271 | 272 | protobufjs@^7.0.0, protobufjs@^7.1.2: 273 | version "7.2.2" 274 | resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-7.2.2.tgz#2af401d8c547b9476fb37ffc65782cf302342ca3" 275 | integrity sha512-++PrQIjrom+bFDPpfmqXfAGSQs40116JRrqqyf53dymUMvvb5d/LMRyicRoF1AUKoXVS1/IgJXlEgcpr4gTF3Q== 276 | dependencies: 277 | "@protobufjs/aspromise" "^1.1.2" 278 | "@protobufjs/base64" "^1.1.2" 279 | "@protobufjs/codegen" "^2.0.4" 280 | "@protobufjs/eventemitter" "^1.1.0" 281 | "@protobufjs/fetch" "^1.1.0" 282 | "@protobufjs/float" "^1.0.2" 283 | "@protobufjs/inquire" "^1.1.0" 284 | "@protobufjs/path" "^1.1.2" 285 | "@protobufjs/pool" "^1.1.0" 286 | "@protobufjs/utf8" "^1.1.0" 287 | "@types/node" ">=13.7.0" 288 | long "^5.0.0" 289 | 290 | retry@^0.10.1: 291 | version "0.10.1" 292 | resolved "https://registry.yarnpkg.com/retry/-/retry-0.10.1.tgz#e76388d217992c252750241d3d3956fed98d8ff4" 293 | integrity sha512-ZXUSQYTHdl3uS7IuCehYfMzKyIDBNoAuUblvy5oGO5UJSUTmStUUVPXbA9Qxd173Bgre53yCQczQuHgRWAdvJQ== 294 | 295 | semver@^5.5.0: 296 | version "5.7.1" 297 | resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" 298 | integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== 299 | 300 | source-map@^0.7.3: 301 | version "0.7.4" 302 | resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656" 303 | integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== 304 | 305 | split@^1.0.1: 306 | version "1.0.1" 307 | resolved "https://registry.yarnpkg.com/split/-/split-1.0.1.tgz#605bd9be303aa59fb35f9229fbea0ddec9ea07d9" 308 | integrity sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg== 309 | dependencies: 310 | through "2" 311 | 312 | through@2: 313 | version "2.3.8" 314 | resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" 315 | integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== 316 | 317 | yocto-queue@^0.1.0: 318 | version "0.1.0" 319 | resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" 320 | integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== 321 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DID PLC Method (did:plc) 2 | 3 | DID PLC is a self-authenticating [DID](https://www.w3.org/TR/did-core/) which is strongly-consistent, recoverable, and allows for key rotation. 4 | 5 | An example DID is: `did:plc:yk4dd2qkboz2yv6tpubpc6co` 6 | 7 | Control over a `did:plc` identity rests in a set of re-configurable "rotation" keys pairs. These keys can sign update "operations" to mutate the identity (including key rotations), with each operation referencing a prior version of the identity state by hash. Each identity starts from an initial "genesis" operation, and the hash of this initial object is what defines the DID itself (that is, the DID URI "identifier" string). A central "directory" server collects and validates operations, and maintains a transparent log of operations for each DID. 8 | 9 | ## Motivation 10 | 11 | [Bluesky](https://blueskyweb.xyz/) developed DID PLC when designing the [AT Protocol](https://atproto.com) ("atproto") because we were not satisfied with any of the existing DID methods. 12 | We wanted a strongly consistent, highly available, recoverable, and cryptographically secure method with fast and cheap propagation of updates. 13 | 14 | We originally titled the method "Placeholder", because we didn't want it to stick around forever in its current form. We are actively hoping to replace it with or evolve it into something less centralized - likely a permissioned DID consortium. That being said, we do intend to support `did:plc` in the current form until after any successor is deployed, with a reasonable grace period. We would also provide a migration route to allow continued use of existing `did:plc` identifiers. 15 | 16 | ## How it works 17 | 18 | The core data fields associated with an active `did:plc` identifier at any point in time are listed below. The encoding and structure differs somewhat from DID document formatting and semantics, but this information is sufficient to render a valid DID document. 19 | 20 | - `did` (string): the full DID identifier 21 | - `rotationKeys` (array of strings): priority-ordered list of public keys in `did:key` encoding. must include least 1 key and at most 5 keys, with no duplication. control of the DID identifier rests in these keys. not included in DID document. 22 | - `verificationMethods` (map with string keys and values): a set service / public key mappings. the values are public keys `did:key` encoding; they get re-encoded in "multibase" form when rendered in DID document. the key strings should not include a `#` prefix; that will be added when rendering the DID document. used to generate `verificationMethods` of DID document. these keys do not have control over the DID document 23 | - `alsoKnownAs` (array of strings): priority-ordered list of URIs which indicate other names or aliases associated with the DID identifier 24 | - `services` (map with string keys; values are maps with `type` and `endpoint` string fields): a set of service / URL mappings. the key strings should not include a `#` prefix; that will be added when rendering the DID document. 25 | 26 | Every update "operation" to the DID identifier, including the initial creation operation ("genesis" operation), contains all of the above information, except for the `did` field. The DID itself is generated from a hash of the signed genesis operation (details described below), which makes the DID entirely self-certifying. Updates after initial creation contain a pointer to the most-recent previous operation (by hash). 27 | 28 | "Operations" are signed and submitted to the central PLC directory server over an un-authenticated HTTP request. The PLC server validates operations against any and all existing operations on the DID (including signature validation, recovery time windows, etc), and either rejects the operation or accepts and permanently stores the operation, along with a server-generated timestamp. 29 | 30 | A special operation type is a "tombstone", which clears all of the data fields and permanently "de-activates" the DID. Note that the usual recovery time window applies to "tombstone" operations. 31 | 32 | Note that `rotationKeys` and `verificationMethods` ("signing keys") may have public keys which are re-used across many accounts. There is not necessarily a one-to-one mapping between a DID and either "rotation" keys or "signing" keys. 33 | 34 | Only `secp256k1` ("k256") and NIST P-256 ("p256") keys are currently supported, for both "rotation" and "signing" keys. 35 | 36 | ### Use with AT Protocol 37 | 38 | The following information should be included for use with atproto: 39 | 40 | - `verificationMethods`: an `atproto` entry with a "blessed" public key type, to be used as a "signing key" for authenticating updates to the account's repository. the signing key does not have any control over the DID identity unless also included in the `rotationKeys` list. best practice is to maintain separation between rotation keys and atproto signing keys 41 | - `alsoKnownAs`: should include an `at://` URI indicating a "handle" (hostname) for the account. note that the handle/DID mapping needs to be validated bi-directionally (via handle resolution), and needs to be re-verified periodically 42 | - `services`: an `atproto_pds` entry with an `AtprotoPersonalDataServer` type and http/https URL `endpoint` indicating the account's current PDS hostname. for example, `https://pds.example.com` (no `/xrpc/` suffix needed). 43 | 44 | ### Operation Serialization, Signing, and Validation 45 | 46 | There are a couple variations on the "operation" data object schema. The operations are also serialized both as simple JSON objects, or binary DAG-CBOR encoding for the purpose of hashing or signing. 47 | 48 | A regular creation or update operation contains the following fields: 49 | 50 | - `type` (string): with fixed value `plc_operation` 51 | - `rotationKeys` (array of strings): as described above 52 | - `verificationMethods` (mapping of string keys and values): as described above 53 | - `alsoKnownAs` (array of strings): as described above 54 | - `services` (mapping of string keys and object values): as described above 55 | - `prev` (string, nullable): a "CID" hash pointer to a previous operation if an update, or `null` for a creation. if `null`, the key should actually be part of the object, with value `null`, not simply omitted. in DAG-CBOR encoding, the CID is string-encoded, not a binary IPLD "Link" 56 | - `sig` (string): signature of the operation in `base64url` encoding 57 | 58 | A tombstone operation contains: 59 | 60 | - `type` (string): with fixed value `plc_tombstone` 61 | - `prev` (string): same as above, but not nullable 62 | - `sig` (string): signature of the operation (same as above) 63 | 64 | There is also a deprecated legacy operation format, supported *only* for creation ("genesis") operations: 65 | 66 | - `type` (string): with fixed value `create` 67 | - `signingKey` (string): single `did:key` value (not an array of strings) 68 | - `recoveryKey` (string): single `did:key` value (not an array of strings); and note "recovery" terminology, not "rotation" 69 | - `handle` (string): single value, indicating atproto handle, instead of `alsoKnownAs`. bare handle, with no `at://` prefix 70 | - `service` (string): single value, http/https URL of atproto PDS 71 | - `prev` (null): always include, but always with value `null` 72 | - `sig` (string): signature of the operation (same as above) 73 | 74 | Legacy `create` operations are stored in the PLC registry and may be returned in responses, so validating software needs to support that format. Conversion of the legacy format to "regular" operation format is relatively straight-forward, but there exist many `did:plc` identifiers where the DID identifier itself is based on the hash of the old format, so they will unfortunately be around forever. 75 | 76 | The process for signing and hashing operation objects is to first encode them in the DAG-CBOR binary serialization format. [DAG-CBOR](https://ipld.io/specs/codecs/dag-cbor/spec/) is a restricted subset of the Concise Binary Object Representation (CBOR), an IETF standard (RFC 8949), with semantics and value types similar to JSON. 77 | 78 | As an anti-abuse mechanism, operations have a maximum size when encoded as DAG-CBOR. The current limit is 7500 bytes. 79 | 80 | For signatures, the object is first encoded as DAG-CBOR *without* the `sig` field at all (as opposed to a `null` value in that field). Those bytes are signed, and then the signature bytes are encoded as a string using `base64url` encoding. The `sig` value is then populated with the string. In strongly typed programming languages it is a best practice to have distinct "signed" and "unsigned" types. 81 | 82 | When working with signatures, note that ECDSA signatures are not necessarily *deterministic* or *unique*. That is, the same key signing the same bytes *might* generate the same signature every time, or it might generate a *different* signature every time, depending on the cryptographic library and configuration. In some cases it is also easy for a third party to take a valid signature and transform it in to a new, distinct signature, which also validates. Be sure to always use the "validate signature" routine from a cryptographic library, instead of re-signing bytes and directly comparing the signature bytes. 83 | 84 | For `prev` references, the SHA-256 of the previous operation's bytes are encoded as a "[CID](https://github.com/multiformats/cid)", with the following parameters: 85 | 86 | - CIDv1 87 | - `base32` multibase encoding (prefix: `b`) 88 | - `dag-cbor` multibase type (code: 0x71) 89 | - `sha-256` multihash (code: 0x12) 90 | 91 | Rotation keys are serialized as strings using [did:key](https://w3c-ccg.github.io/did-method-key/), and only `secp256k1` ("k256") and NIST P-256 ("p256") are currently supported. 92 | 93 | The signing keys (`verificationMethods`) are also serialized using `did:key` in operations. When rendered in a DID document, signing keys are represented as objects, with the actual keys in multibase encoding, as required by the DID Core specification. 94 | 95 | The DID itself is derived from the hash of the first operation in the log, called the "genesis" operation. The signed operation is encoded in DAG-CBOR; the bytes are hashed with SHA-256; the hash bytes are `base32`-encoded (not hex encoded) as a string; and that string is truncated to 24 chars to yield the "identifier" segment of the DID. 96 | 97 | In pseudo-code: 98 | `did:plc:${base32Encode(sha256(createOp)).slice(0,24)}` 99 | 100 | ### Identifier Syntax 101 | 102 | The DID PLC method name is `plc`. The identifier part is 24 characters 103 | long, including only characters from the `base32` encoding set. An example is 104 | `did:plc:yk4dd2qkboz2yv6tpubpc6co`. This means: 105 | 106 | - the overall identifier length is 32 characters 107 | - the entire identifier is lower-case (and should be normalized to lower-case) 108 | - the entire identifier is ASCII, and includes only the characters `a-z`, `0-9`, and `:` (and does not even use digits `0189`) 109 | 110 | 111 | ### Key Rotation & Account Recovery 112 | 113 | Any key specified in `rotationKeys` has the ability to sign operations for the DID document. 114 | 115 | The set of rotation keys for a DID is not included in the DID document. They are an internal detail of PLC, and are stored in the operation log. 116 | 117 | Keys are listed in the `rotationKeys` field of operations in order of descending authority. 118 | 119 | The PLC server provides a 72hr window during which a higher authority rotation key can "rewrite" history, clobbering any operations (or chain of operations) signed by a lower-authority rotation key. 120 | 121 | To do so, that key must sign a new operation that points to the CID of the last "valid" operation - ie the fork point. 122 | The PLC server will accept this recovery operation as long as: 123 | 124 | - it is submitted within 72hrs of the referenced operation 125 | - the key used for the signature has a lower index in the `rotationKeys` array than the key that signed the to-be-invalidated operation 126 | 127 | 128 | ### Privacy and Security Concerns 129 | 130 | The full history of DID operations and updates, including timestamps, is permanently publicly accessible. This is true even after DID deactivation. It is important to recognize (and communicate to account holders) that any personally identifiable information (PII) encoded in `alsoKnownAs` URIs will be publicly visible even after DID deactivation, and can not be redacted or purged. 131 | 132 | In the context of atproto, this includes the full history of handle updates and PDS locations (URLs) over time. To be explicit, it does not include any other account metadata such as email addresses or IP addresses. Handle history could potentially de-anonymize account holders if they switch handles between a known identity and an anonymous or pseudonymous identity. 133 | 134 | The PLC server does not cross-validate `alsoKnownAs` or `service` entries in operations. This means that any DID can "claim" to have any identity, or to have an active account with any service (identified by URL). This data should *not* be trusted without bi-directionally verification, for example using handle resolution. 135 | 136 | The timestamp metadata encoded in the PLC audit log could be cross-verified against network traffic or other information to de-anonymize account holders. It also makes the "identity creation date" public. 137 | 138 | If "rotation" and "signing" keys are re-used across multiple account, it could reveal non-public identity details or relationships. For example, if two individuals cross-share rotation keys as a trusted backup, that information is public. If device-local recovery or signing keys are uniquely shared by two identifiers, that would indicate that those identities may actually be the same person. 139 | 140 | 141 | #### PLC Server Trust Model 142 | 143 | The PLC server has a public endpoint to receive operation objects from any client (without authentication). The server verifies operations, orders them according to recovery rules, and makes the log of operations publicly available. 144 | 145 | The operation log is self-certifying, and contains all the information needed to construct (or verify) the the current state of the DID document. 146 | 147 | Some trust is required in the PLC server. Its attacks are limited to: 148 | 149 | - Denial of service: rejecting valid operations, or refusing to serve some information about the DID 150 | - Misordering: In the event of a fork in DID document history, the server could choose to serve the "wrong" fork 151 | 152 | 153 | ### DID Creation 154 | 155 | To summarize the process of creating a new `did:plc` identifier: 156 | 157 | - collect values for all of the core data fields, including generating new secure key pairs if necessary 158 | - construct an "unsigned" regular operation object. include a `prev` field with `null` value. do not use the deprecated/legacy operation format for new DID creations 159 | - serialize the "unsigned" operation with DAG-CBOR, and sign the resulting bytes with one of the initial `rotationKeys`. encode the signature as `base64url`, and use that to construct a "signed" operation object 160 | - serialize the "signed" operation with DAG-CBOR, take the SHA-256 hash of those bytes, and encode the hash bytes in `base32`. use the first 24 characters to generate DID value (`did:plc:`) 161 | - serialize the "signed" operation as simple JSON, and submit it via HTTP POST to `https://plc.directory/:did` 162 | - if the HTTP status code is successful, the DID has been registered 163 | 164 | When "signing" using a "`rotationKey`", what is meant is to sign using the private key associated the public key in the `rotationKey` list. 165 | 166 | ### DID Update 167 | 168 | To summarize the process of updating a new `did:plc` identifier: 169 | 170 | - if the current DID state isn't known, fetch the current state from `https://plc.directory/:did/data` 171 | - if the most recent valid DID operation CID (hash) isn't known, fetch the audit log from `https://plc.directory/:did/log/audit`, identify the most recent valid operation, and get the `cid` value. if this is a recovery operation, the relevant "valid" operation to fork from may not be the most recent in the audit log 172 | - collect updated values for all of the core data fields, including generating new secure key pairs if necessary (eg, key rotation) 173 | - construct an "unsigned" regular operation object. include a `prev` field with the CID (hash) of the previous valid operation 174 | - serialize the "unsigned" operation with DAG-CBOR, and sign the resulting bytes with one of the previously-existing `rotationKeys`. encode the signature as `base64url`, and use that to construct a "signed" operation object 175 | - serialize the "signed" operation as simple JSON, and submit it via HTTP POST to `https://plc.directory/:did` 176 | - if the HTTP status code is successful, the DID has been updated 177 | - the DID update may be nullified by a "rotation" operation during the recovery window (currently 72hr) 178 | 179 | ### DID Deactivation 180 | 181 | To summarize the process of de-activating an existing `did:plc` identifier: 182 | 183 | - if the most recent valid DID operation CID (hash) isn't known, fetch the audit log from `https://plc.directory/:did/log/audit`, identify the most recent valid operation, and get the `cid` value 184 | - construct an "unsigned" tombstone operation object. include a `prev` field with the CID (hash) of the previous valid operation 185 | - serialize the "unsigned" tombstone operation with DAG-CBOR, and sign the resulting bytes with one of the previously-existing `rotationKeys`. encode the signature as `base64url`, and use that to construct a "signed" tombstone operation object 186 | - serialize the "signed" tombstone operation as simple JSON, and submit it via HTTP POST to `https://plc.directory/:did` 187 | - if the HTTP status code is successful, the DID has been deactivated 188 | - the DID deactivation may be nullified by a "rotation" operation during the recovery window (currently 72hr) 189 | 190 | ### DID Resolution 191 | 192 | PLC DIDs are resolved to a DID document (JSON) by making simple HTTP GET request to the PLC server. The resolution endpoint is: `https://plc.directory/:did` 193 | 194 | The PLC-specific state data (based on the most recent operation) can be fetched as a JSON object at: `https://plc.directory/:did/data` 195 | 196 | 197 | ### Audit Logs 198 | 199 | As an additional check against abuse by the PLC server, and to promote resiliency, the set of all identifiers is enumerable, and the set of all operations for all identifiers (even "nullified" operations) can be enumerated and audited. 200 | 201 | The log of currently-valid operations for a given DID, as JSON, can be found at: `https://plc.directory/:did/log/audit` 202 | 203 | The audit history of a given DID (complete with timestamps and invalidated forked histories), as JSON, can be found at: `https://plc.directory/:did/log/audit` 204 | 205 | To fully validate a DID document against the operation log: 206 | 207 | - fetch the full audit log 208 | - for the genesis operation, validate the DID 209 | - note that the genesis operation may be in deprecated/legacy format, and should be encoded and verified in that format 210 | - see the "DID Creation" section above for details 211 | - for each operation in the log, validate signatures: 212 | - identify the set of valid `rotationKeys` at that point of time: either the initial keys for a "genesis" operation, or the keys in the `prev` operation 213 | - remove any `sig` field and serialize the "unsigned" operation with DAG-CBOR, yielding bytes 214 | - decode the `base64url` `sig` field to bytes 215 | - for each of the `rotationKeys`, attempt to verify the signature against the "unsigned" bytes 216 | - if no key matches, there has been a trust violation; the PLC server should never have accepted the operation 217 | - verify the correctness of "nullified" operations and the current active operation log using the rules around rotation keys and recovery windows 218 | 219 | The complete log of operations for all DIDs on the PLC server can be enumerated efficiently: 220 | 221 | - HTTP endpoint: `https://plc.directory/export` 222 | - output format: [JSON lines](https://jsonlines.org/) 223 | - `count` query parameter, as an integer, maximum 1000 lines per request 224 | - `after` query parameter, based on `createdAt` timestamp, for pagination 225 | 226 | 227 | ## Example 228 | 229 | ```ts 230 | // note: we use shorthand for keys for ease of reference, but consider them valid did:keys 231 | 232 | // Genesis operation 233 | const genesisOp = { 234 | type: 'plc_operation', 235 | verificationMethods: { 236 | atproto: "did:key:zSigningKey" 237 | }, 238 | rotationKeys: [ 239 | "did:key:zRecoveryKey", 240 | "did:key:zRotationKey" 241 | ], 242 | alsoKnownAs: [ 243 | "at://alice.test" 244 | ], 245 | services: { 246 | atproto_pds: { 247 | type: "AtprotoPersonalDataServer", 248 | endpoint: "https://example.test" 249 | } 250 | }, 251 | prev: null, 252 | sig: 'sig_from_did:key:zRotationKey' 253 | } 254 | 255 | // Operation to update recovery key 256 | const updateKeys = { 257 | type: 'plc_operation', 258 | verificationMethods: { 259 | atproto: "did:key:zSigningKey" 260 | }, 261 | rotationKeys: [ 262 | "did:key:zNewRecoveryKey", 263 | "did:key:zRotationKey" 264 | ], 265 | alsoKnownAs: [ 266 | "at://alice.test" 267 | ], 268 | services: { 269 | atproto_pds: { 270 | type: "AtprotoPersonalDataServer", 271 | endpoint: "https://example.test" 272 | } 273 | }, 274 | prev: CID(genesisOp), 275 | sig: 'sig_from_did:key:zRotationKey' 276 | } 277 | 278 | // Invalid operation that will be rejected 279 | // because did:key:zAttackerKey is not listed in rotationKeys 280 | const invalidUpdate = { 281 | type: 'plc_operation', 282 | verificationMethods: { 283 | atproto: "did:key:zAttackerKey" 284 | }, 285 | rotationKeys: [ 286 | "did:key:zAttackerKey" 287 | ], 288 | alsoKnownAs: [ 289 | "at://bob.test" 290 | ], 291 | services: { 292 | atproto_pds: { 293 | type: "AtprotoPersonalDataServer", 294 | endpoint: "https://example.test" 295 | } 296 | }, 297 | prev: CID(updateKeys), 298 | sig: 'sig_from_did:key:zAttackerKey' 299 | } 300 | 301 | // Valid recovery operation that "undoes" updateKeys 302 | const recoveryOp = { 303 | type: 'plc_operation', 304 | verificationMethods: { 305 | atproto: "did:key:zSigningKey" 306 | }, 307 | rotationKeys: [ 308 | "did:key:zRecoveryKey" 309 | ], 310 | alsoKnownAs: [ 311 | "at://alice.test" 312 | ], 313 | services: { 314 | atproto_pds: { 315 | type: "AtprotoPersonalDataServer", 316 | endpoint: "https://example.test" 317 | } 318 | }, 319 | prev: CID(genesisOp), 320 | sig: 'sig_from_did:key:zRecoveryKey' 321 | } 322 | ``` 323 | 324 | ## Presentation as DID Document 325 | 326 | The following data: 327 | 328 | ```ts 329 | { 330 | did: 'did:plc:7iza6de2dwap2sbkpav7c6c6', 331 | verificationMethods: { 332 | atproto: 'did:key:zDnaeh9v2RmcMo13Du2d6pjUf5bZwtauYxj3n9dYjw4EZUAR7' 333 | }, 334 | rotationKeys: [ 335 | 'did:key:zDnaedvvAsDE6H3BDdBejpx9ve2Tz95cymyCAKF66JbyMh1Lt', 336 | 'did:key:zDnaeh9v2RmcMo13Du2d6pjUf5bZwtauYxj3n9dYjw4EZUAR7' 337 | ], 338 | alsoKnownAs: [ 339 | 'at://alice.test' 340 | ], 341 | services: { 342 | atproto_pds: { 343 | type: "AtprotoPersonalDataServer", 344 | endpoint: "https://example.test" 345 | } 346 | } 347 | } 348 | ``` 349 | 350 | Will be presented as the following DID document: 351 | 352 | ```ts 353 | { 354 | '@context': [ 355 | 'https://www.w3.org/ns/did/v1', 356 | 'https://w3id.org/security/suites/ecdsa-2019/v1' 357 | ], 358 | id: 'did:plc:7iza6de2dwap2sbkpav7c6c6', 359 | alsoKnownAs: [ 'at://alice.test' ], 360 | verificationMethod: [ 361 | { 362 | id: '#atproto', 363 | type: 'EcdsaSecp256r1VerificationKey2019', 364 | controller: 'did:plc:7iza6de2dwap2sbkpav7c6c6', 365 | publicKeyMultibase: 'zSSa7w8s5aApu6td45gWTAAFkqCnaWY6ZsJ8DpyzDdYmVy4fARKqbn5F1UYBUMeVvYTBsoSoLvZnPdjd3pVHbmAHP' 366 | } 367 | ], 368 | service: [ 369 | { 370 | id: '#atproto_pds', 371 | type: 'AtprotoPersonalDataServer', 372 | serviceEndpoint: 'https://example2.com' 373 | } 374 | ] 375 | } 376 | ``` 377 | 378 | ## Possible Future Changes 379 | 380 | The set of allowed ("blessed") public key cryptographic algorithms (aka, curves) may expanded over time, slowly. Likewise, support for additional "blessed" CID types and parameters may be expanded over time, slowly. 381 | 382 | The recovery time window may become configurable, within constraints, as part of the DID metadata itself. 383 | 384 | Support for "DID Controller Delegation" could be useful (eg, in the context of atproto PDS hosts), and may be incorporated. 385 | 386 | In the context of atproto, support for multiple "handles" for the same DID is being considered, with a single "primary" handle. But no final decision has been made yet. 387 | 388 | We welcome proposals for small additions to make `did:plc` more generic and reusable for applications other than atproto. But no promises: atproto will remain the focus for the near future. 389 | 390 | Moving governance of the `did:plc` method, and operation of registry servers, out of the sole control of Bluesky PBLLC is something we are enthusiastic about. Audit log snapshots, mirroring, and automated third-party auditing have all been considered as mechanisms to mitigate the centralized nature of the PLC server. 391 | 392 | The size of the `verificationMethods`, `alsoKnownAs`, and `service` mappings/arrays may be specifically constrained. And the maximum DAG-CBOR size may be constrained. 393 | 394 | As an anti-abuse mechanisms, the PLC server load balancer restricts the number of HTTP requests per time window. The limits are generous, and operating large services or scraping the operation log should not run in to limits. Specific per-DID limits on operation rate may be introduced over time. For example, no more than N operations per DID per rotation key per 24 hour window. 395 | 396 | A "DID PLC history explorer" web interface would make the public nature of the DID audit log more publicly understandable. 397 | 398 | It is concievable that longer DID PLCs, with more of the SHA-256 characters, will be supported in the future. It is also concievable that a different hash algorithm would be allowed. Any such changes would allow existing DIDs in their existing syntax to continue being used. 399 | 400 | ## License 401 | 402 | This project is dual-licensed under MIT and Apache 2.0 terms: 403 | 404 | - Apache License, Version 2.0, ([LICENSE-APACHE](https://github.com/ipfs/kubo/blob/master/LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) 405 | - MIT license ([LICENSE-MIT](https://github.com/ipfs/kubo/blob/master/LICENSE-MIT) or http://opensource.org/licenses/MIT) 406 | 407 | Downstream projects and users may chose either license, or both, at their discretion. The motivation for this dual-licensing is the additional software patent assurance provided by Apache 2.0. 408 | --------------------------------------------------------------------------------