├── .npmignore ├── test ├── setup │ └── indexeddb-setup.ts ├── distribution │ ├── distributionDescriptionSpec.ts │ ├── distributionUnitTestSpec.ts │ ├── distributionDebugInfoSpec.ts │ └── distributionEngineDirectSpec.ts ├── storage │ └── referenceSpec.ts ├── utils │ └── async-test-utils.ts ├── orderModel.ts ├── specification │ ├── infiniteLoopSpec.ts │ ├── versioningSpec.ts │ ├── predecessorDuplicationSpec.ts │ └── missingFactSpec.ts ├── single-use │ ├── singleUseStoreSpec.ts │ └── singleUseForkSpec.ts ├── blogModel.ts ├── fact │ ├── factReferenceCompanySpec.ts │ ├── factReferenceSpec.ts │ └── knownHashSpec.ts ├── authorization │ └── authorizationExampleSpec.ts ├── purge │ └── realTimePurgeSpec.ts ├── cryptography │ └── keyPairSpec.ts └── http │ ├── serializerSpec.ts │ └── deserializerSpec.ts ├── .npmrc ├── documentation ├── diagrams │ ├── blog.png │ ├── blogPostRule.png │ ├── blogCommentRule.png │ ├── blogGuestBloggerRule.png │ ├── observerSubsequentLoad.gv │ ├── blog.gv │ ├── observerInitialLoad.gv │ ├── blogGuestBloggerRule.gv │ ├── blogPostRule.gv │ └── blogCommentRule.gv ├── successors.md ├── indexeddb-queue.md └── predecessor.md ├── src ├── user-identity.ts ├── util │ ├── promise.ts │ ├── encoding.ts │ ├── obj.ts │ ├── fn.ts │ └── trace.ts ├── http │ ├── authenticationProvider.ts │ ├── ContentType.ts │ ├── messages.ts │ ├── httpNetwork.ts │ ├── messageParsers.ts │ ├── serializer.ts │ └── deserializer.ts ├── fork │ ├── fork.ts │ ├── serialize.ts │ ├── pass-through-fork.ts │ ├── web-client-saver.ts │ ├── transient-fork.ts │ └── persistent-fork.ts ├── specification │ ├── declaration.ts │ └── feed-cache.ts ├── authentication │ ├── authentication.ts │ ├── authentication-noop.ts │ ├── authentication-web-client.ts │ ├── authentication-offline.ts │ └── authentication-test.ts ├── ws │ ├── types.ts │ ├── bookmark-manager.ts │ ├── inverse-specification-engine.ts │ ├── control-frame-handler.ts │ ├── protocol-router.ts │ └── wsGraphNetwork.ts ├── model │ └── user.ts ├── authorization │ ├── authorization.ts │ └── authorization-noop.ts ├── indexeddb │ ├── indexeddb-login-store.ts │ └── driver.ts ├── purge │ ├── purgeConditions.ts │ └── validate.ts ├── cryptography │ ├── key-pair.ts │ └── verify.ts ├── rules │ └── RuleSet.ts ├── managers │ ├── PurgeManager.ts │ └── QueueProcessor.ts ├── fact │ ├── sorter.ts │ └── hash.ts ├── distribution │ └── distribution-rules.ts ├── observer │ └── subscriber.ts ├── jinaga-test.ts └── storage.ts ├── .gitignore ├── types └── keypair.d.ts ├── eslint.config.mjs ├── examples ├── blog.http └── README.md ├── jest.config.js ├── scripts └── release.sh ├── SECURITY.md ├── .github └── workflows │ ├── publish.yml │ ├── publish-github.yml │ └── main.yml ├── tsconfig.test.json ├── tsconfig.json ├── issues ├── 152.md └── 1.md ├── LICENSE ├── .cursor └── rules │ ├── project-overview.mdc │ ├── network-patterns.mdc │ ├── authentication-patterns.mdc │ ├── development-workflow.mdc │ ├── storage-patterns.mdc │ ├── summary.mdc │ ├── typescript-standards.mdc │ ├── cryptography-patterns.mdc │ ├── fact-patterns.mdc │ └── implement.mdc ├── package.json └── README.md /.npmignore: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/setup/indexeddb-setup.ts: -------------------------------------------------------------------------------- 1 | import 'fake-indexeddb/auto'; -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | @jinaga:registry=https://npm.pkg.github.com 2 | registry=https://registry.npmjs.org 3 | -------------------------------------------------------------------------------- /documentation/diagrams/blog.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jinaga/jinaga.js/HEAD/documentation/diagrams/blog.png -------------------------------------------------------------------------------- /src/user-identity.ts: -------------------------------------------------------------------------------- 1 | 2 | export interface UserIdentity { 3 | provider: string; 4 | id: string; 5 | } 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | .idea/ 3 | dist/ 4 | .vscode 5 | npm-debug.log 6 | .DS_Store 7 | .env.local 8 | jinaga-*.tgz 9 | -------------------------------------------------------------------------------- /documentation/diagrams/blogPostRule.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jinaga/jinaga.js/HEAD/documentation/diagrams/blogPostRule.png -------------------------------------------------------------------------------- /documentation/diagrams/blogCommentRule.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jinaga/jinaga.js/HEAD/documentation/diagrams/blogCommentRule.png -------------------------------------------------------------------------------- /documentation/diagrams/blogGuestBloggerRule.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jinaga/jinaga.js/HEAD/documentation/diagrams/blogGuestBloggerRule.png -------------------------------------------------------------------------------- /documentation/diagrams/observerSubsequentLoad.gv: -------------------------------------------------------------------------------- 1 | digraph { 2 | rankdir=TB 3 | 4 | Uninitialized -> Loaded [label=" read"] 5 | Loaded -> Loaded [label=" fetch"] 6 | } -------------------------------------------------------------------------------- /src/util/promise.ts: -------------------------------------------------------------------------------- 1 | export function delay(ms: number) { 2 | return new Promise((resolve, reject) => { 3 | setTimeout(() => resolve(), ms); 4 | }); 5 | } 6 | -------------------------------------------------------------------------------- /types/keypair.d.ts: -------------------------------------------------------------------------------- 1 | declare module 'keypair' { 2 | function Keypair(options: { bits: number }): { private: string, public: string }; 3 | export = Keypair; 4 | } 5 | -------------------------------------------------------------------------------- /documentation/diagrams/blog.gv: -------------------------------------------------------------------------------- 1 | digraph { 2 | rankdir=BT 3 | 4 | Site -> User [label=" creator "] 5 | Post -> Site 6 | Comment -> Post, User [label=" author "] 7 | GuestBlogger -> Site, User 8 | } -------------------------------------------------------------------------------- /documentation/diagrams/observerInitialLoad.gv: -------------------------------------------------------------------------------- 1 | digraph { 2 | rankdir=TB; 3 | 4 | Uninitialized -> Loading [label=" immediate"] 5 | Loading -> Loaded [label=" fetch and read"] 6 | Loading -> Error [label=" error"] 7 | } -------------------------------------------------------------------------------- /src/http/authenticationProvider.ts: -------------------------------------------------------------------------------- 1 | export interface HttpHeaders { 2 | "Authorization"?: string; 3 | [key: string]: string | undefined; 4 | } 5 | 6 | export interface AuthenticationProvider { 7 | getHeaders(): Promise; 8 | reauthenticate(): Promise; 9 | } -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import globals from "globals"; 2 | import tseslint from "typescript-eslint"; 3 | 4 | export default [ 5 | { 6 | files: ["**/*.{js,mjs,cjs,ts}"], 7 | languageOptions: { globals: globals.browser }, 8 | ...tseslint.configs.recommended, 9 | }, 10 | ]; 11 | -------------------------------------------------------------------------------- /src/fork/fork.ts: -------------------------------------------------------------------------------- 1 | import { FactEnvelope, FactReference } from "../storage"; 2 | 3 | export interface Fork { 4 | save(envelopes: FactEnvelope[]): Promise; 5 | load(references: FactReference[]): Promise; 6 | processQueueNow(): Promise; 7 | close(): Promise; 8 | } -------------------------------------------------------------------------------- /documentation/diagrams/blogGuestBloggerRule.gv: -------------------------------------------------------------------------------- 1 | digraph { 2 | rankdir=BT 3 | 4 | Site -> User [label=" creator " color=gray fontcolor=gray] 5 | Post -> Site 6 | Comment -> Post, User [label=" author " color=gray fontcolor=gray] 7 | GuestBlogger -> Site, User 8 | 9 | Comment [color=gray fontcolor=gray] 10 | } -------------------------------------------------------------------------------- /src/specification/declaration.ts: -------------------------------------------------------------------------------- 1 | import { FactRecord, FactReference } from "../storage"; 2 | 3 | export interface DeclaredFact { 4 | reference: FactReference; 5 | fact: FactRecord | null; 6 | } 7 | 8 | export type Declaration = { 9 | name: string; 10 | declared: DeclaredFact; 11 | }[]; 12 | -------------------------------------------------------------------------------- /src/authentication/authentication.ts: -------------------------------------------------------------------------------- 1 | import { LoginResponse } from '../http/messages'; 2 | import { FactEnvelope, FactRecord } from '../storage'; 3 | 4 | export interface Authentication { 5 | login(): Promise; 6 | local(): Promise; 7 | authorize(envelopes: FactEnvelope[]): Promise; 8 | } -------------------------------------------------------------------------------- /documentation/diagrams/blogPostRule.gv: -------------------------------------------------------------------------------- 1 | digraph { 2 | rankdir=BT 3 | 4 | Site -> User [label=" creator "] 5 | Post -> Site 6 | Comment -> Post, User [label=" author " color=gray fontcolor=gray] 7 | GuestBlogger -> Site, User [color=gray] 8 | 9 | Comment [color=gray fontcolor=gray] 10 | GuestBlogger [color=gray fontcolor=gray] 11 | } -------------------------------------------------------------------------------- /src/http/ContentType.ts: -------------------------------------------------------------------------------- 1 | export const ContentTypeText = "text/plain" as const; 2 | export const ContentTypeJson = "application/json" as const; 3 | export const ContentTypeGraph = "application/x-jinaga-graph-v1" as const; 4 | 5 | export type PostContentType = typeof ContentTypeText | typeof ContentTypeJson | typeof ContentTypeGraph; 6 | export type PostAccept = typeof ContentTypeJson | undefined; 7 | -------------------------------------------------------------------------------- /examples/blog.http: -------------------------------------------------------------------------------- 1 | POST {{replicatorUrl}}/write HTTP/1.1 2 | Authorization: oauth2 authorization_code 3 | Content-Type: text/plain 4 | 5 | let creator: Jinaga.User = me 6 | let site: Blog.Site = {creator, domain: "michaelperry.net"} 7 | 8 | let post: Blog.Post = { site, author: creator, createdAt: "2023-06-22T13:36:00.000Z" } 9 | let publish: Blog.Post.Publish = { post, date: "2023-06-22T13:37:00.000Z" } -------------------------------------------------------------------------------- /src/util/encoding.ts: -------------------------------------------------------------------------------- 1 | import { encode as encodeBase64 } from '@stablelib/base64'; 2 | import { hash } from '@stablelib/sha512'; 3 | import { encode as encodeUTF8 } from '@stablelib/utf8'; 4 | 5 | export function computeStringHash(str: string) { 6 | const bytes = encodeUTF8(str); 7 | const result = hash(bytes); 8 | const b64 = encodeBase64(result); 9 | return b64; 10 | } 11 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */ 2 | module.exports = { 3 | preset: 'ts-jest', 4 | testEnvironment: 'node', 5 | testMatch: [ 6 | '**/test/**/*Spec.ts', 7 | ], 8 | reporters: ['jest-progress-bar-reporter'], 9 | moduleNameMapper: { 10 | '^@src$': '/src', 11 | }, 12 | setupFilesAfterEnv: ['/test/setup/indexeddb-setup.ts'], 13 | }; -------------------------------------------------------------------------------- /scripts/release.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | # The parameter must be patch, minor or major 5 | if [ "$1" != "patch" ] && [ "$1" != "minor" ] && [ "$1" != "major" ]; then 6 | echo "Usage: $0 [patch|minor|major]" 7 | exit 1 8 | fi 9 | 10 | git c main 11 | git pull 12 | npm version $1 13 | git push --follow-tags 14 | gh release create v$(node -p "require('./package.json').version") --generate-notes --verify-tag 15 | -------------------------------------------------------------------------------- /documentation/diagrams/blogCommentRule.gv: -------------------------------------------------------------------------------- 1 | digraph { 2 | rankdir=BT 3 | 4 | Site -> User [label=" creator " color=gray fontcolor=gray] 5 | Post -> Site [label=" author " color=gray fontcolor=gray] 6 | Comment -> Post [color=gray fontcolor=gray] 7 | Comment -> User [label=" author "] 8 | GuestBlogger -> Site, User [color=gray] 9 | 10 | Post [color=gray fontcolor=gray] 11 | Site [color=gray fontcolor=gray] 12 | GuestBlogger [color=gray fontcolor=gray] 13 | } -------------------------------------------------------------------------------- /src/fork/serialize.ts: -------------------------------------------------------------------------------- 1 | import { LoadMessage, SaveMessage } from '../http/messages'; 2 | import { FactEnvelope, FactReference } from '../storage'; 3 | 4 | export function serializeSave(envelopes: FactEnvelope[]) : SaveMessage { 5 | return { 6 | facts: envelopes.map(e => e.fact) 7 | }; 8 | } 9 | 10 | export function serializeLoad(references: FactReference[]) : LoadMessage { 11 | return { 12 | references: references 13 | }; 14 | } 15 | -------------------------------------------------------------------------------- /src/ws/types.ts: -------------------------------------------------------------------------------- 1 | export type ControlKeyword = "BOOK" | "ERR" | "SUB" | "UNSUB" | "ACK"; 2 | 3 | export interface ControlFrame { 4 | keyword: ControlKeyword; 5 | payload: string[]; 6 | } 7 | 8 | export interface ProtocolMessageRouterCallbacks { 9 | onGraphLine: (line: string) => void; 10 | } 11 | 12 | export interface AuthorizationContext { 13 | userIdentity?: { 14 | provider: string; 15 | id: string; 16 | } | null; 17 | metadata?: Record; 18 | } -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | | Version | Supported | 6 | | ------- | ------------------ | 7 | | 2.5.x | :white_check_mark: | 8 | | < 2.5 | :x: | 9 | 10 | ## Reporting a Vulnerability 11 | 12 | Please report vulnerabilities to michael@qedcode.com. 13 | Expect a confirmation of receipt within 24 hours, and analysis within 72 hours. 14 | If the issue cannot be resolved within that timeframe, we will coordinate a disclosure schedule. 15 | 16 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Node.js Package 2 | on: 3 | release: 4 | types: [created] 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v4 10 | # Setup .npmrc file to publish to npm 11 | - uses: actions/setup-node@v4 12 | with: 13 | node-version: '20.x' 14 | registry-url: 'https://registry.npmjs.org' 15 | - run: npm install 16 | - run: npm publish 17 | env: 18 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 19 | -------------------------------------------------------------------------------- /src/util/obj.ts: -------------------------------------------------------------------------------- 1 | export function toJSON(value: any) { 2 | if (hasProperty(value, "toJSON")) { 3 | return value.toJSON(); 4 | } 5 | else { 6 | return value; 7 | } 8 | } 9 | 10 | function hasProperty(value: any, name: string) { 11 | while (value !== null) { 12 | if (typeof(value) !== "object") { 13 | return false; 14 | } 15 | if (value.hasOwnProperty(name)) { 16 | return true; 17 | } 18 | value = Object.getPrototypeOf(value); 19 | } 20 | return false; 21 | } -------------------------------------------------------------------------------- /src/authentication/authentication-noop.ts: -------------------------------------------------------------------------------- 1 | import { LoginResponse } from "../http/messages"; 2 | import { FactEnvelope, FactRecord } from "../storage"; 3 | import { Authentication } from "./authentication"; 4 | 5 | export class AuthenticationNoOp implements Authentication { 6 | login(): Promise { 7 | throw new Error('No logged in user.'); 8 | } 9 | local(): Promise { 10 | throw new Error('No persistent device.'); 11 | } 12 | authorize(envelopes: FactEnvelope[]): Promise { 13 | return Promise.resolve(envelopes); 14 | } 15 | } -------------------------------------------------------------------------------- /src/model/user.ts: -------------------------------------------------------------------------------- 1 | export class User { 2 | static Type = "Jinaga.User" as const; 3 | type = User.Type; 4 | 5 | constructor( 6 | public publicKey: string 7 | ) { } 8 | } 9 | 10 | export class UserName { 11 | static Type = "Jinaga.User.Name" as const; 12 | public type = UserName.Type; 13 | 14 | constructor( 15 | public prior: UserName[], 16 | public user: User, 17 | public value: string 18 | ) { } 19 | } 20 | 21 | export class Device { 22 | static Type = "Jinaga.Device" as const; 23 | public type = Device.Type; 24 | 25 | constructor( 26 | public publicKey: string 27 | ) { } 28 | } -------------------------------------------------------------------------------- /src/authentication/authentication-web-client.ts: -------------------------------------------------------------------------------- 1 | import { WebClient } from '../http/web-client'; 2 | import { FactEnvelope, FactRecord } from '../storage'; 3 | import { Authentication } from './authentication'; 4 | 5 | export class AuthenticationWebClient implements Authentication { 6 | constructor(private client: WebClient) { 7 | } 8 | 9 | login() { 10 | return this.client.login(); 11 | } 12 | 13 | local(): Promise { 14 | throw new Error('Local device has no persistence.'); 15 | } 16 | 17 | authorize(envelopes: FactEnvelope[]): Promise { 18 | return Promise.resolve(envelopes); 19 | } 20 | } -------------------------------------------------------------------------------- /tsconfig.test.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es6", 4 | "module": "commonjs", 5 | "sourceMap": true, 6 | "declaration": true, 7 | "strict": true, 8 | "noImplicitAny": true, 9 | "forceConsistentCasingInFileNames": true, 10 | "skipLibCheck": true, 11 | "esModuleInterop": true, 12 | "outDir": "dist", 13 | "baseUrl": ".", 14 | "paths": { 15 | "@src": ["./src"], 16 | "*": [ 17 | "./node_modules/*", 18 | "./types/*" 19 | ] 20 | } 21 | }, 22 | "include": [ 23 | "test/**/*.ts" 24 | ] 25 | } -------------------------------------------------------------------------------- /src/fork/pass-through-fork.ts: -------------------------------------------------------------------------------- 1 | import { FactEnvelope, FactReference, Storage } from "../storage"; 2 | import { Fork } from "./fork"; 3 | 4 | export class PassThroughFork implements Fork { 5 | constructor( 6 | private storage: Storage 7 | ) { } 8 | 9 | async close(): Promise { 10 | return Promise.resolve(); 11 | } 12 | 13 | save(envelopes: FactEnvelope[]): Promise { 14 | return Promise.resolve(); 15 | } 16 | 17 | load(references: FactReference[]): Promise { 18 | return this.storage.load(references); 19 | } 20 | 21 | processQueueNow(): Promise { 22 | return Promise.resolve(); 23 | } 24 | } -------------------------------------------------------------------------------- /src/http/messages.ts: -------------------------------------------------------------------------------- 1 | import { FactRecord, FactReference } from '../storage'; 2 | 3 | export interface ProfileMessage { 4 | displayName: string; 5 | }; 6 | 7 | export interface LoginResponse { 8 | userFact: FactRecord, 9 | profile: ProfileMessage 10 | }; 11 | 12 | export interface SaveMessage { 13 | facts: FactRecord[] 14 | }; 15 | 16 | export interface LoadMessage { 17 | references: FactReference[] 18 | }; 19 | 20 | export interface LoadResponse { 21 | facts: FactRecord[] 22 | }; 23 | 24 | export interface FeedsResponse { 25 | feeds: string[]; 26 | } 27 | 28 | export interface FeedResponse { 29 | references: FactReference[]; 30 | bookmark: string; 31 | } 32 | -------------------------------------------------------------------------------- /examples/README.md: -------------------------------------------------------------------------------- 1 | # Examples 2 | 3 | To run examples, install the [httpYac](https://marketplace.visualstudio.com/items?itemName=anweber.vscode-httpyac) extension. 4 | 5 | Create a replicator. 6 | When you set up authentication, make sure to add the callback URL `http://localhost:3000/callback` 7 | 8 | Create a file called `.env.local` in the `examples` directory. Enter the URL of a replicator. 9 | 10 | ``` 11 | replicatorUrl=https://repdev.jinaga.com/xxxxxxxxxxxxxxx 12 | oauth2_tokenEndpoint=https://repdev.jinaga.com/xxxxxxxxxxxxxxx/auth/token 13 | oauth2_authorizationEndpoint=https://repdev.jinaga.com/xxxxxxxxxxxxxxx/auth/apple 14 | oauth2_clientId=xxxxxxxxxxxxxxx 15 | oauth2_usePkce=true 16 | ``` 17 | 18 | Then choose your favorite example and run it. -------------------------------------------------------------------------------- /test/distribution/distributionDescriptionSpec.ts: -------------------------------------------------------------------------------- 1 | import { DistributionRules, describeDistributionRules } from "@src"; 2 | import { distribution } from "../blogModel"; 3 | 4 | describe("Distribution rules from description", () => { 5 | it("should be able to save distribution rules", () => { 6 | const description = describeDistributionRules(distribution); 7 | expect(description).not.toBeNull(); 8 | }); 9 | 10 | it("should be able to load distribution rules", () => { 11 | const description = describeDistributionRules(distribution); 12 | const loaded = DistributionRules.loadFromDescription(description); 13 | const roundTrip = describeDistributionRules(_ => loaded); 14 | expect(roundTrip).toEqual(description); 15 | }); 16 | }); -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es6", 4 | "module": "CommonJS", 5 | "moduleResolution": "node", 6 | "sourceMap": true, 7 | "declaration": true, 8 | "declarationMap": true, 9 | "strict": true, 10 | "noImplicitAny": true, 11 | "forceConsistentCasingInFileNames": true, 12 | "skipLibCheck": true, 13 | "esModuleInterop": true, 14 | "outDir": "dist", 15 | "rootDir": "src", 16 | "baseUrl": ".", 17 | "paths": { 18 | "@src": ["./src"], 19 | "*": [ 20 | "./node_modules/*", 21 | "./types/*" 22 | ] 23 | } 24 | }, 25 | "include": [ 26 | "src/**/*.ts" 27 | ], 28 | "exclude": [ 29 | "node_modules", 30 | "dist" 31 | ] 32 | } -------------------------------------------------------------------------------- /src/authorization/authorization.ts: -------------------------------------------------------------------------------- 1 | import { Specification } from "../specification/specification"; 2 | import { FactEnvelope, FactFeed, FactRecord, FactReference, ProjectedResult, ReferencesByName } from "../storage"; 3 | import { UserIdentity } from "../user-identity"; 4 | 5 | export interface Authorization { 6 | getOrCreateUserFact(userIdentity: UserIdentity): Promise; 7 | read(userIdentity: UserIdentity | null, start: FactReference[], specification: Specification): Promise; 8 | feed(userIdentity: UserIdentity | null, feed: Specification, start: FactReference[], bookmark: string): Promise; 9 | load(userIdentity: UserIdentity | null, references: FactReference[]): Promise; 10 | save(userIdentity: UserIdentity | null, facts: FactEnvelope[]): Promise; 11 | verifyDistribution(userIdentity: UserIdentity | null, feeds: Specification[], namedStart: ReferencesByName): Promise; 12 | } -------------------------------------------------------------------------------- /issues/152.md: -------------------------------------------------------------------------------- 1 | ### Updated Details for Issue #152 2 | 3 | 1. **Client Recovery Process**: 4 | - In case of a mismatch, the client should abandon its current `GraphSerializer` and create a new one. This ensures that the client can recover gracefully from a hash mismatch without requiring a full re-upload of all predecessors. 5 | 6 | 2. **Error Code for Mismatches**: 7 | - The replicator should return a specific HTTP status code (e.g., `409 Conflict`) when the starting hash does not match its stored final hash. This provides clear guidance to the client about the need to reset the stream. 8 | 9 | 3. **Session-Based Hashing Advantages**: 10 | - The state-based hashing mechanism eliminates the need for a session-specific graph ID, streamlining the protocol and reducing reliance on external session management. 11 | 12 | 4. **Streaming Use Case**: 13 | - This protocol aligns well with streaming scenarios, allowing facts to be sent incrementally and processed in real-time by the replicator. -------------------------------------------------------------------------------- /src/fork/web-client-saver.ts: -------------------------------------------------------------------------------- 1 | import { WebClient } from "../http/web-client"; 2 | import { Saver } from "../managers/QueueProcessor"; 3 | import { Queue } from "../storage"; 4 | import { Trace } from "../util/trace"; 5 | 6 | /** 7 | * A Saver implementation that uses a WebClient to save facts. 8 | */ 9 | export class WebClientSaver implements Saver { 10 | constructor( 11 | private readonly client: WebClient, 12 | private readonly queue: Queue 13 | ) { } 14 | 15 | /** 16 | * Saves facts to the server and removes them from the queue. 17 | */ 18 | async save(): Promise { 19 | const envelopes = await this.queue.peek(); 20 | if (envelopes.length > 0) { 21 | try { 22 | await this.client.saveWithRetry(envelopes); 23 | await this.queue.dequeue(envelopes); 24 | } 25 | catch (error) { 26 | Trace.error(error); 27 | } 28 | } 29 | } 30 | } -------------------------------------------------------------------------------- /src/ws/bookmark-manager.ts: -------------------------------------------------------------------------------- 1 | export class BookmarkManager { 2 | private readonly bookmarks = new Map(); 3 | private counter = 0; 4 | 5 | getBookmark(feed: string): string { 6 | return this.bookmarks.get(feed) ?? ""; 7 | } 8 | 9 | setBookmark(feed: string, bookmark: string): void { 10 | this.bookmarks.set(feed, bookmark); 11 | } 12 | 13 | async advanceBookmark(feed: string): Promise { 14 | const next = `${Date.now()}:${this.counter++}`; 15 | this.bookmarks.set(feed, next); 16 | return next; 17 | } 18 | 19 | /** 20 | * If a bookmark is already known for the feed and differs from the provided value, 21 | * return the known bookmark so callers can synchronize the client. 22 | * Returns null if no sync is necessary. 23 | */ 24 | syncBookmarkIfMismatch(feed: string, provided: string): string | null { 25 | const current = this.bookmarks.get(feed); 26 | if (current && current !== provided) { 27 | return current; 28 | } 29 | return null; 30 | } 31 | } -------------------------------------------------------------------------------- /src/ws/inverse-specification-engine.ts: -------------------------------------------------------------------------------- 1 | import { Specification } from "../specification/specification"; 2 | import { ProjectedResult } from "../storage"; 3 | import { SpecificationListener } from "../observable/observable"; 4 | 5 | export type AddListenerFn = ( 6 | specification: Specification, 7 | onResult: (results: ProjectedResult[]) => Promise 8 | ) => SpecificationListener; 9 | 10 | export type RemoveListenerFn = (listener: SpecificationListener) => void; 11 | 12 | export class InverseSpecificationEngine { 13 | constructor( 14 | private readonly addListener: AddListenerFn, 15 | private readonly removeListener: RemoveListenerFn 16 | ) {} 17 | 18 | addSpecificationListener( 19 | specification: Specification, 20 | onResult: (results: ProjectedResult[]) => Promise 21 | ): SpecificationListener { 22 | return this.addListener(specification, onResult); 23 | } 24 | 25 | removeSpecificationListener(listener: SpecificationListener): void { 26 | this.removeListener(listener); 27 | } 28 | } -------------------------------------------------------------------------------- /.github/workflows/publish-github.yml: -------------------------------------------------------------------------------- 1 | name: Publish to GitHub Registry 2 | on: 3 | workflow_dispatch: 4 | jobs: 5 | publish: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - uses: actions/checkout@v4 9 | with: 10 | fetch-depth: 0 11 | - uses: actions/setup-node@v4 12 | with: 13 | node-version: '20.x' 14 | registry-url: 'https://npm.pkg.github.com' 15 | - run: npm install 16 | - name: Generate Prerelease Version 17 | run: | 18 | CURRENT_VERSION=$(node -p "require('./package.json').version") 19 | LAST_TAG=$(git describe --tags --abbrev=0) 20 | COMMIT_COUNT=$(git rev-list --count ${LAST_TAG}..HEAD) 21 | PRERELEASE_VERSION=$(node -p "require('semver').inc('$CURRENT_VERSION', 'prerelease', 'beta.' + '$COMMIT_COUNT').replace(/\.0$/, '')") 22 | npm version $PRERELEASE_VERSION --no-git-tag-version 23 | - run: npm publish --registry=https://npm.pkg.github.com 24 | env: 25 | NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /src/indexeddb/indexeddb-login-store.ts: -------------------------------------------------------------------------------- 1 | import { FactRecord } from '../storage'; 2 | import { execRequest, withDatabase, withTransaction } from './driver'; 3 | 4 | export interface LoginRecord { 5 | userFact: FactRecord; 6 | displayName: string; 7 | } 8 | 9 | export class IndexedDBLoginStore { 10 | constructor ( 11 | private indexName: string 12 | ) { } 13 | 14 | saveLogin(sessionToken: string, userFact: FactRecord, displayName: string) { 15 | return withDatabase(this.indexName, db => { 16 | return withTransaction(db, ['login'], 'readwrite', async tx => { 17 | const loginObjectStore = tx.objectStore('login'); 18 | await execRequest(loginObjectStore.put({ userFact, displayName }, sessionToken)); 19 | }); 20 | }); 21 | } 22 | 23 | loadLogin(sessionToken: string): Promise { 24 | return withDatabase(this.indexName, async db => { 25 | return withTransaction(db, ['login'], 'readonly', tx => { 26 | const loginObjectStore = tx.objectStore('login'); 27 | return execRequest(loginObjectStore.get(sessionToken)); 28 | }); 29 | }); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2022 Michael L Perry 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /src/purge/purgeConditions.ts: -------------------------------------------------------------------------------- 1 | import { describeSpecification } from "../specification/description"; 2 | import { SpecificationOf } from "../specification/model"; 3 | import { Specification } from "../specification/specification"; 4 | 5 | export class PurgeConditions { 6 | static empty: PurgeConditions = new PurgeConditions([]); 7 | 8 | constructor( 9 | public specifications: Specification[] 10 | ) { } 11 | 12 | whenExists(specification: SpecificationOf): PurgeConditions { 13 | return new PurgeConditions([ 14 | ...this.specifications, 15 | specification.specification 16 | ]); 17 | } 18 | 19 | with(fn: (p: PurgeConditions) => PurgeConditions): PurgeConditions { 20 | return fn(this); 21 | } 22 | 23 | merge(purgeConditions: PurgeConditions): PurgeConditions { 24 | return new PurgeConditions([ 25 | ...this.specifications, 26 | ...purgeConditions.specifications 27 | ]); 28 | } 29 | 30 | saveToDescription(): string { 31 | const specificationDescriptions = this.specifications.map(s => describeSpecification(s, 1)).join(""); 32 | return `purge {\n${specificationDescriptions}}\n`; 33 | } 34 | } -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | # This is a basic workflow to help you get started with Actions 2 | 3 | name: CI 4 | 5 | # Controls when the workflow will run 6 | on: 7 | # Triggers the workflow on push or pull request events but only for the main branch 8 | push: 9 | branches: [ main ] 10 | pull_request: 11 | branches: [ main ] 12 | 13 | # Allows you to run this workflow manually from the Actions tab 14 | workflow_dispatch: 15 | 16 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel 17 | jobs: 18 | # This workflow contains a single job called "build" 19 | build: 20 | # The type of runner that the job will run on 21 | runs-on: ubuntu-latest 22 | 23 | # Steps represent a sequence of tasks that will be executed as part of the job 24 | steps: 25 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 26 | - uses: actions/checkout@v4 27 | 28 | - uses: actions/setup-node@v4 29 | with: 30 | node-version: '20.x' 31 | registry-url: 'https://registry.npmjs.org' 32 | 33 | - name: Build and Test 34 | run: | 35 | npm ci 36 | npm run build 37 | npm test 38 | -------------------------------------------------------------------------------- /src/purge/validate.ts: -------------------------------------------------------------------------------- 1 | import { Condition, ExistentialCondition, Match, Specification } from "../specification/specification"; 2 | 3 | export function validatePurgeSpecification(specification: Specification): string[] { 4 | // Validate that the specification has only one given. 5 | if (specification.given.length !== 1) { 6 | return ["A purge specification must have exactly one given."]; 7 | } 8 | var purgeRoot = specification.given[0]; 9 | 10 | // Search for negative existential conditions. 11 | // Those indicate that the specification will reverse a purge. 12 | var failures: string[] = specification.matches.map(match => match.conditions 13 | .filter(isNegativeExistentialCondition) 14 | .map(condition => 15 | `A specified purge condition would reverse the purge of ${purgeRoot.label.type} with ${describeTuple(condition.matches)}.` 16 | ) 17 | ).flat(); 18 | return failures; 19 | } 20 | 21 | function isNegativeExistentialCondition(condition: Condition): condition is ExistentialCondition { 22 | return condition.type === "existential" && !condition.exists; 23 | } 24 | 25 | function describeTuple(matches: Match[]): string { 26 | return matches.map(match => match.unknown.type).join(", "); 27 | } -------------------------------------------------------------------------------- /src/ws/control-frame-handler.ts: -------------------------------------------------------------------------------- 1 | import { ControlFrame } from "./types"; 2 | 3 | export class ControlFrameHandler { 4 | constructor( 5 | private readonly onBookmark: (feed: string, bookmark: string) => void, 6 | private readonly onErrorMessage: (feed: string, message: string) => void 7 | ) {} 8 | 9 | handle(frame: ControlFrame): void { 10 | const { keyword, payload } = frame; 11 | if (keyword === "BOOK") { 12 | if (payload.length !== 2) { 13 | throw new Error(`Invalid BOOK frame payload length: ${payload.length}`); 14 | } 15 | const feed = JSON.parse(payload[0]) as string; 16 | const bookmark = JSON.parse(payload[1]) as string; 17 | this.onBookmark(feed, bookmark); 18 | return; 19 | } 20 | if (keyword === "ERR") { 21 | if (payload.length !== 2) { 22 | throw new Error(`Invalid ERR frame payload length: ${payload.length}`); 23 | } 24 | const feed = JSON.parse(payload[0]) as string; 25 | const message = JSON.parse(payload[1]) as string; 26 | this.onErrorMessage(feed, message); 27 | return; 28 | } 29 | if (keyword === "ACK") { 30 | // ACK confirms subscription is active, payload contains feed 31 | // Currently no action needed, but could be used for tracking 32 | return; 33 | } 34 | // Ignore SUB/UNSUB sent from server (defensive) 35 | } 36 | } -------------------------------------------------------------------------------- /test/storage/referenceSpec.ts: -------------------------------------------------------------------------------- 1 | import 'source-map-support/register'; 2 | 3 | import { uniqueFactReferences } from '@src'; 4 | 5 | describe('Fact reference', () => { 6 | 7 | it('should find unique in empty list', () => { 8 | const unique = uniqueFactReferences([]); 9 | expect(unique.length).toEqual(0); 10 | }); 11 | 12 | it('should find unique in singleton', () => { 13 | const unique = uniqueFactReferences([{type:'', hash:''}]); 14 | expect(unique.length).toEqual(1); 15 | }); 16 | 17 | it('should find unique in double', () => { 18 | const unique = uniqueFactReferences([{type:'', hash:''}, {type:'', hash:''}]); 19 | expect(unique.length).toEqual(1); 20 | }); 21 | 22 | it('should find unique in same type', () => { 23 | const unique = uniqueFactReferences([{type:'a', hash:''}, {type:'a', hash:''}]); 24 | expect(unique.length).toEqual(1); 25 | }); 26 | 27 | it('should find unique in different type', () => { 28 | const unique = uniqueFactReferences([{type:'a', hash:''}, {type:'b', hash:''}]); 29 | expect(unique.length).toEqual(2); 30 | }); 31 | 32 | it('should find unique in same hash', () => { 33 | const unique = uniqueFactReferences([{type:'a', hash:'x'}, {type:'a', hash:'x'}]); 34 | expect(unique.length).toEqual(1); 35 | }); 36 | 37 | it('should find unique in different hash', () => { 38 | const unique = uniqueFactReferences([{type:'a', hash:'x'}, {type:'a', hash:'y'}]); 39 | expect(unique.length).toEqual(2); 40 | }); 41 | 42 | }); -------------------------------------------------------------------------------- /src/authentication/authentication-offline.ts: -------------------------------------------------------------------------------- 1 | import { LoginResponse } from '../http/messages'; 2 | import { WebClient } from '../http/web-client'; 3 | import { IndexedDBLoginStore } from '../indexeddb/indexeddb-login-store'; 4 | import { FactEnvelope, FactRecord } from '../storage'; 5 | import { Authentication } from './authentication'; 6 | 7 | export class AuthenticationOffline implements Authentication { 8 | constructor( 9 | private store: IndexedDBLoginStore, 10 | private client: WebClient 11 | ) { } 12 | 13 | async login() { 14 | try { 15 | return await this.loginRemote(); 16 | } 17 | catch (err) { 18 | if (err === 'Unauthorized') { 19 | throw err; 20 | } 21 | 22 | try { 23 | return await this.loginLocal(); 24 | } 25 | catch (err2) { 26 | throw err; 27 | } 28 | } 29 | } 30 | 31 | local(): Promise { 32 | throw new Error('Local device has no persistence.'); 33 | } 34 | 35 | authorize(envelopes: FactEnvelope[]): Promise { 36 | return Promise.resolve(envelopes); 37 | } 38 | 39 | private async loginRemote() { 40 | const result = await this.client.login(); 41 | if (result && result.userFact && result.profile) { 42 | await this.store.saveLogin('token', result.userFact, result.profile.displayName); 43 | } 44 | return result; 45 | } 46 | 47 | private async loginLocal(): Promise { 48 | const result = await this.store.loadLogin('token'); 49 | return { 50 | userFact: result.userFact, 51 | profile: { 52 | displayName: result.displayName 53 | } 54 | }; 55 | } 56 | } -------------------------------------------------------------------------------- /src/authorization/authorization-noop.ts: -------------------------------------------------------------------------------- 1 | import { FactManager } from "../managers/factManager"; 2 | import { Specification } from "../specification/specification"; 3 | import { FactEnvelope, FactFeed, FactRecord, FactReference, ProjectedResult, ReferencesByName, Storage } from "../storage"; 4 | import { UserIdentity } from "../user-identity"; 5 | import { Authorization } from './authorization'; 6 | import { Forbidden } from './authorization-engine'; 7 | 8 | export class AuthorizationNoOp implements Authorization { 9 | constructor( 10 | private factManager: FactManager, 11 | private store: Storage 12 | ) { } 13 | 14 | getOrCreateUserFact(userIdentity: UserIdentity): Promise { 15 | throw new Forbidden(); 16 | } 17 | 18 | read(userIdentity: UserIdentity, start: FactReference[], specification: Specification): Promise { 19 | return this.factManager.read(start, specification); 20 | } 21 | 22 | load(userIdentity: UserIdentity, references: FactReference[]): Promise { 23 | return this.factManager.load(references); 24 | } 25 | 26 | feed(userIdentity: UserIdentity, specification: Specification, start: FactReference[], bookmark: string): Promise { 27 | return this.store.feed(specification, start, bookmark); 28 | } 29 | 30 | async save(userIdentity: UserIdentity, envelopes: FactEnvelope[]): Promise { 31 | return await this.factManager.save(envelopes); 32 | } 33 | 34 | verifyDistribution(userIdentity: UserIdentity, feeds: Specification[], namedStart: ReferencesByName): Promise { 35 | return Promise.resolve(); 36 | } 37 | } -------------------------------------------------------------------------------- /src/cryptography/key-pair.ts: -------------------------------------------------------------------------------- 1 | import { md, pki, util } from "node-forge"; 2 | import { canonicalizeFact } from "../fact/hash"; 3 | import { FactEnvelope, FactRecord } from "../storage"; 4 | import { Trace } from "../util/trace"; 5 | 6 | export interface KeyPair { 7 | publicPem: string; 8 | privatePem: string; 9 | } 10 | 11 | export function generateKeyPair(): KeyPair { 12 | const keypair = pki.rsa.generateKeyPair({ bits: 2048 }); 13 | const privatePem = pki.privateKeyToPem(keypair.privateKey); 14 | const publicPem = pki.publicKeyToPem(keypair.publicKey); 15 | return { privatePem, publicPem }; 16 | } 17 | 18 | export function signFacts(keyPair: KeyPair, facts: FactRecord[]): FactEnvelope[] { 19 | const privateKey = pki.privateKeyFromPem(keyPair.privatePem); 20 | const envelopes: FactEnvelope[] = facts.map(fact => signFact(fact, keyPair.publicPem, privateKey)); 21 | return envelopes; 22 | } 23 | 24 | function signFact(fact: FactRecord, publicPem: string, privateKey: pki.rsa.PrivateKey): FactEnvelope { 25 | const canonicalString = canonicalizeFact(fact.fields, fact.predecessors); 26 | const encodedString = util.encodeUtf8(canonicalString); 27 | const digest = md.sha512.create().update(encodedString); 28 | const hash = util.encode64(digest.digest().getBytes()); 29 | if (fact.hash !== hash) { 30 | Trace.error(`Hash does not match. "${fact.hash}" !== "${hash}"\nFact: ${canonicalString}`); 31 | return { 32 | fact, 33 | signatures: [] 34 | }; 35 | } 36 | const signature = util.encode64(privateKey.sign(digest)); 37 | return { 38 | fact, 39 | signatures: [{ 40 | signature, 41 | publicKey: publicPem 42 | }] 43 | }; 44 | } -------------------------------------------------------------------------------- /.cursor/rules/project-overview.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | alwaysApply: true 3 | description: Use when starting work on the Jinaga.js project to understand the overall architecture, key concepts, and development guidelines 4 | --- 5 | # Jinaga.js Project Overview 6 | 7 | Jinaga.js is an end-to-end application state management framework written in TypeScript. The main entry point is [src/index.ts](mdc:src/index.ts) which exports all public APIs. 8 | 9 | ## Core Architecture 10 | 11 | The framework is built around the concept of **facts** - immutable data records that form a directed acyclic graph (DAG). Key components include: 12 | 13 | - **Jinaga Class**: Main API entry point in [src/jinaga.ts](mdc:src/jinaga.ts) 14 | - **Specifications**: Query patterns defined in [src/specification/](mdc:src/specification/) 15 | - **Authentication**: User identity management in [src/authentication/](mdc:src/authentication/) 16 | - **Authorization**: Access control rules in [src/authorization/](mdc:src/authorization/) 17 | - **Storage**: Fact persistence in [src/storage.ts](mdc:src/storage.ts) 18 | - **Network**: HTTP communication in [src/http/](mdc:src/http/) 19 | 20 | ## Key Patterns 21 | 22 | 1. **Facts**: All data is represented as immutable facts with type and hash 23 | 2. **Specifications**: Declarative queries that define data relationships 24 | 3. **Observers**: Reactive data watching with automatic updates 25 | 4. **Forks**: Isolated data contexts for testing and transactions 26 | 5. **Cryptography**: Digital signatures for fact verification 27 | 28 | ## Development Guidelines 29 | 30 | - Use TypeScript with strict mode enabled 31 | - Follow the existing module structure in [src/](mdc:src/) 32 | - Write comprehensive tests in [test/](mdc:test/) 33 | - Document breaking changes in [README.md](mdc:README.md) 34 | - Use Jest for testing as configured in [jest.config.js](mdc:jest.config.js) 35 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "jinaga", 3 | "version": "6.7.15", 4 | "description": "Data management for web and mobile applications.", 5 | "keywords": [ 6 | "state", 7 | "immutable", 8 | "middleware", 9 | "api", 10 | "offline", 11 | "pwa" 12 | ], 13 | "homepage": "http://jinaga.com", 14 | "bugs": { 15 | "url": "https://github.com/jinaga/jinaga.js/issues" 16 | }, 17 | "repository": { 18 | "type": "git", 19 | "url": "https://github.com/jinaga/jinaga.js.git" 20 | }, 21 | "license": "MIT", 22 | "author": "Michael L Perry", 23 | "main": "dist/index.js", 24 | "types": "dist/index.d.ts", 25 | "scripts": { 26 | "build": "tsc", 27 | "clean": "rimraf dist/ && rimraf integration-test/jinaga-test/jinaga/ && rimraf integration-test/jinaga-test/node_modules/", 28 | "prepack": "npm run clean && npm run build && npm run test", 29 | "prepublishOnly": "npm run clean && npm run build && npm run test", 30 | "test": "npx tsc --noEmit --project tsconfig.test.json && jest", 31 | "test:watch": "jest --watch" 32 | }, 33 | "dependencies": { 34 | "@stablelib/base64": "^1.0.1", 35 | "@stablelib/sha512": "^1.0.1", 36 | "@stablelib/utf8": "^1.0.2", 37 | "node-forge": "^1.3.1" 38 | }, 39 | "devDependencies": { 40 | "@types/jest": "^27.5.1", 41 | "@types/node-forge": "^1.3.11", 42 | "@types/ws": "^8.18.1", 43 | "eslint": "^9.9.1", 44 | "fake-indexeddb": "^6.2.2", 45 | "globals": "^15.9.0", 46 | "jest": "^28.1.0", 47 | "jest-environment-jsdom": "^30.1.2", 48 | "jest-progress-bar-reporter": "^1.0.25", 49 | "rimraf": "^3.0.2", 50 | "source-map-support": "^0.5.21", 51 | "ts-jest": "^28.0.3", 52 | "typescript": "^4.9.5", 53 | "typescript-eslint": "^8.3.0", 54 | "ws": "^8.18.3" 55 | }, 56 | "engines": { 57 | "node": "^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0" 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /src/util/fn.ts: -------------------------------------------------------------------------------- 1 | function safeFlatten(results: U[][]) { 2 | const flat = results.reduce((a, b) => 3 | (!a || a.length === 0) ? b : 4 | (!b || b.length === 0) ? a : 5 | a.concat(b)); 6 | return flat ? flat : []; 7 | } 8 | 9 | export async function flattenAsync(collection: T[], selector: (element: T) => Promise) { 10 | if (collection.length === 0) { 11 | return []; 12 | } 13 | else { 14 | const results = await Promise.all(collection.map(selector)); 15 | return safeFlatten(results); 16 | } 17 | } 18 | 19 | export function flatten(collection: T[], selector: (element: T) => U[]) { 20 | if (collection.length === 0) { 21 | return []; 22 | } 23 | else { 24 | return safeFlatten(collection.map(selector)); 25 | } 26 | } 27 | 28 | export async function mapAsync(collection: T[], action: (element: T) => Promise) { 29 | if (collection.length === 0) { 30 | return []; 31 | } 32 | else { 33 | return await Promise.all(collection.map(action)); 34 | } 35 | } 36 | 37 | export async function filterAsync(collection: T[], predicate: (element: T) => Promise) { 38 | if (collection.length === 0) { 39 | return []; 40 | } 41 | else { 42 | const filters = await Promise.all(collection.map(async element => ({ 43 | include: await predicate(element), 44 | element 45 | }))); 46 | 47 | return filters.filter(f => f.include).map(f => f.element); 48 | } 49 | } 50 | 51 | export function findIndex(array: T[], predicate: ((element: T) => boolean)): number { 52 | for (let index = 0; index < array.length; index++) { 53 | if (predicate(array[index])) { 54 | return index; 55 | } 56 | } 57 | 58 | return -1; 59 | } 60 | 61 | export function distinct(value: T, index: number, self: T[]) { 62 | return self.indexOf(value) === index; 63 | } -------------------------------------------------------------------------------- /src/cryptography/verify.ts: -------------------------------------------------------------------------------- 1 | import { md, pki, util } from "node-forge"; 2 | import { canonicalizeFact } from "../fact/hash"; 3 | import { FactEnvelope, FactSignature } from "../storage"; 4 | import { Trace } from "../util/trace"; 5 | 6 | type PublicKeyCache = { [key: string]: pki.rsa.PublicKey }; 7 | 8 | export function verifyEnvelopes(envelopes: FactEnvelope[]): boolean { 9 | // Cache public keys to avoid parsing them multiple times 10 | const publicKeyCache: PublicKeyCache = {}; 11 | 12 | for (const envelope of envelopes) { 13 | for (const signature of envelope.signatures) { 14 | if (!publicKeyCache[signature.publicKey]) { 15 | publicKeyCache[signature.publicKey] = pki.publicKeyFromPem(signature.publicKey); 16 | } 17 | } 18 | } 19 | 20 | return envelopes.every(e => verifySignatures(e, publicKeyCache)); 21 | } 22 | 23 | function verifySignatures(envelope: FactEnvelope, publicKeyCache: PublicKeyCache): boolean { 24 | const canonicalString = canonicalizeFact(envelope.fact.fields, envelope.fact.predecessors); 25 | const encodedString = util.encodeUtf8(canonicalString); 26 | const digest = md.sha512.create().update(encodedString); 27 | const digestBytes = digest.digest().getBytes(); 28 | const hash = util.encode64(digestBytes); 29 | if (envelope.fact.hash !== hash) { 30 | Trace.error(`Hash does not match. "${envelope.fact.hash}" !== "${hash}"\nFact: ${canonicalString}`); 31 | return false; 32 | } 33 | return envelope.signatures.every(s => verifySignature(s, digestBytes, publicKeyCache)); 34 | } 35 | 36 | function verifySignature(signature: FactSignature, digestBytes: string, publicKeyCache: PublicKeyCache) { 37 | const publicKey = publicKeyCache[signature.publicKey]; 38 | const signatureBytes = util.decode64(signature.signature); 39 | try { 40 | return publicKey.verify(digestBytes, signatureBytes); 41 | } 42 | catch (e) { 43 | Trace.error(`Failed to verify signature. ${e}`); 44 | return false; 45 | } 46 | } -------------------------------------------------------------------------------- /.cursor/rules/network-patterns.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Use when working with HTTP communication, network requests, synchronization, or offline functionality to implement proper network patterns 3 | --- 4 | # Network and HTTP Patterns 5 | 6 | ## HTTP Client 7 | The HTTP client is implemented in [src/http/](mdc:src/http/): 8 | - `WebClient`: Main HTTP client for browser environments 9 | - `HttpNetwork`: Network layer for fact synchronization 10 | - `FetchConnection`: HTTP connection implementation 11 | 12 | ## Connection Configuration 13 | ```typescript 14 | const j = JinagaBrowser.create({ 15 | httpEndpoint: "http://localhost:8080/jinaga", 16 | authenticationProvider: new AuthenticationProvider() 17 | }); 18 | ``` 19 | 20 | ## Sync Status 21 | Monitor synchronization status using `onSyncStatus()`: 22 | ```typescript 23 | j.onSyncStatus((status) => { 24 | console.log('Sync status:', status); 25 | }); 26 | ``` 27 | 28 | ## Error Handling 29 | Network errors are handled through error callbacks: 30 | ```typescript 31 | j.onError((message) => { 32 | console.error('Jinaga error:', message); 33 | }); 34 | ``` 35 | 36 | ## Offline Support 37 | Jinaga supports offline operation: 38 | - Facts are queued when offline 39 | - Automatic retry when connection is restored 40 | - Progress tracking with `onProgress()` 41 | 42 | ## Message Types 43 | HTTP messages are defined in [src/http/messages.ts](mdc:src/http/messages.ts): 44 | - `LoadMessage`: Request facts from server 45 | - `SaveMessage`: Send facts to server 46 | - `FeedResponse`: Server response with facts 47 | - `LoginResponse`: Authentication response 48 | 49 | ## Serialization 50 | Fact serialization is handled by: 51 | - `GraphSerializer`: Serialize facts for transmission 52 | - `GraphDeserializer`: Deserialize facts from server 53 | - `ContentType`: Define message content types 54 | 55 | ## Best Practices 56 | - Handle network errors gracefully 57 | - Implement retry logic for failed requests 58 | - Monitor sync status for user feedback 59 | - Use appropriate timeouts for requests 60 | - Implement offline-first patterns 61 | -------------------------------------------------------------------------------- /src/specification/feed-cache.ts: -------------------------------------------------------------------------------- 1 | import { computeObjectHash } from "../fact/hash"; 2 | import { FactReference, ReferencesByName } from "../storage"; 3 | import { Skeleton, skeletonOfSpecification } from "./skeleton"; 4 | import { Specification } from "./specification"; 5 | 6 | interface FeedIdentifier { 7 | start: { 8 | factReference: FactReference; 9 | index: number; 10 | }[]; 11 | skeleton: Skeleton; 12 | } 13 | 14 | export interface FeedObject { 15 | namedStart: ReferencesByName; 16 | feed: Specification; 17 | } 18 | 19 | type FeedByHash = { 20 | [hash: string]: FeedObject; 21 | }; 22 | 23 | export class FeedCache { 24 | private feedByHash: FeedByHash = {}; 25 | 26 | addFeeds(feeds: Specification[], namedStart: ReferencesByName): string[] { 27 | const feedsByHash = feeds.reduce((map, feed) => { 28 | const skeleton = skeletonOfSpecification(feed); 29 | const indexedStart = skeleton.inputs.map(input => ({ 30 | factReference: namedStart[feed.given[input.inputIndex].label.name], 31 | index: input.inputIndex 32 | })); 33 | const feedIdentifier: FeedIdentifier = { 34 | start: indexedStart, 35 | skeleton 36 | }; 37 | const feedObject: FeedObject = { 38 | namedStart, 39 | feed 40 | }; 41 | const hash = urlSafe(computeObjectHash(feedIdentifier)); 42 | return ({ 43 | ...map, 44 | [hash]: feedObject 45 | }); 46 | }, {} as FeedByHash); 47 | const feedHashes = Object.keys(feedsByHash); 48 | this.feedByHash = { 49 | ...this.feedByHash, 50 | ...feedsByHash 51 | }; 52 | return feedHashes; 53 | } 54 | 55 | getFeed(feed: string): FeedObject | undefined { 56 | return this.feedByHash[feed]; 57 | } 58 | } 59 | 60 | function urlSafe(hash: string): string { 61 | return hash.replace(/\+/g, '-').replace(/\//g, '_').replace(/=/g, ''); 62 | } -------------------------------------------------------------------------------- /.cursor/rules/authentication-patterns.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Use when implementing authentication, authorization, user identity, or security features to follow Jinaga's auth patterns and security best practices 3 | --- 4 | # Authentication and Authorization Patterns 5 | 6 | ## Authentication Providers 7 | Authentication is handled by providers in [src/authentication/](mdc:src/authentication/): 8 | - `AuthenticationWebClient`: Browser-based authentication 9 | - `AuthenticationTest`: Testing authentication 10 | - `AuthenticationNoOp`: No-op authentication for development 11 | 12 | ## User Identity 13 | User identity is managed through the `User` model in [src/model/user.ts](mdc:src/model/user.ts): 14 | - Users are facts with type `"Jinaga.User"` 15 | - User identity is cryptographically verified 16 | - Profile information includes display name 17 | 18 | ## Login Process 19 | ```typescript 20 | const j = JinagaBrowser.create({ 21 | httpEndpoint: "http://localhost:8080/jinaga" 22 | }); 23 | 24 | const { userFact, profile } = await j.login(); 25 | ``` 26 | 27 | ## Authorization Rules 28 | Authorization rules are defined using `describeAuthorizationRules()` in [src/authorization/authorizationRules.ts](mdc:src/authorization/authorizationRules.ts): 29 | - Rules are evaluated before fact creation 30 | - Rules can reference predecessor facts 31 | - Use `Forbidden` error for access violations 32 | - Rules support complex authorization patterns 33 | 34 | ## Example Authorization Rule 35 | ```typescript 36 | export const authorizationRules = describeAuthorizationRules({ 37 | "Blog.Post": (post, facts) => { 38 | const user = facts.ofType("Jinaga.User").withHash(post.author); 39 | return facts.any(user); 40 | } 41 | }); 42 | ``` 43 | 44 | ## Device Identity 45 | For server-side applications, use `j.local()` to access device identity: 46 | ```typescript 47 | const device = await j.local(); 48 | ``` 49 | 50 | ## Security Considerations 51 | - Always validate user permissions before fact creation 52 | - Use cryptographic signatures for fact verification 53 | - Implement proper session management 54 | - Handle authentication errors gracefully 55 | -------------------------------------------------------------------------------- /src/http/httpNetwork.ts: -------------------------------------------------------------------------------- 1 | import { Specification } from "../specification/specification"; 2 | import { FactReference, FactEnvelope } from "../storage"; 3 | import { Network } from "../managers/NetworkManager"; 4 | import { FeedResponse, FeedsResponse, LoadResponse } from "./messages"; 5 | import { WebClient } from "./web-client"; 6 | import { describeDeclaration, describeSpecification } from "../specification/description"; 7 | 8 | export class HttpNetwork implements Network { 9 | constructor( 10 | private readonly webClient: WebClient 11 | ) { } 12 | 13 | async feeds(start: FactReference[], specification: Specification): Promise { 14 | const declarationString = describeDeclaration(start, specification.given.map(g => g.label)); 15 | const specificationString = describeSpecification(specification, 0); 16 | const request = `${declarationString}\n${specificationString}`; 17 | const response: FeedsResponse = await this.webClient.feeds(request); 18 | return response.feeds; 19 | } 20 | 21 | async fetchFeed(feed: string, bookmark: string): Promise { 22 | const response: FeedResponse = await this.webClient.feed(feed, bookmark); 23 | return response; 24 | } 25 | 26 | streamFeed(feed: string, bookmark: string, onResponse: (factReferences: FactReference[], nextBookmark: string) => Promise, onError: (err: Error) => void, feedRefreshIntervalSeconds: number): () => void { 27 | return this.webClient.streamFeed(feed, bookmark, async (response: FeedResponse) => { 28 | await onResponse(response.references, response.bookmark); 29 | }, onError, feedRefreshIntervalSeconds); 30 | } 31 | 32 | async load(factReferences: FactReference[]): Promise { 33 | const response: LoadResponse = await this.webClient.load({ 34 | references: factReferences 35 | }); 36 | const envelopes = response.facts.map(fact => { 37 | fact, 38 | signatures: [] 39 | }); 40 | return envelopes; 41 | } 42 | 43 | } -------------------------------------------------------------------------------- /.cursor/rules/development-workflow.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Use when setting up the development environment, building the project, running tests, or managing releases to follow the established workflow 3 | --- 4 | # Development Workflow 5 | 6 | ## Build Process 7 | The build process is configured in [package.json](mdc:package.json): 8 | - `npm run build`: Compile TypeScript to JavaScript 9 | - `npm run clean`: Clean build artifacts 10 | - `npm run test`: Run all tests 11 | - `npm run test:watch`: Run tests in watch mode 12 | 13 | ## TypeScript Configuration 14 | TypeScript is configured in [tsconfig.json](mdc:tsconfig.json): 15 | - Target: ES6 16 | - Module: CommonJS 17 | - Strict mode enabled 18 | - Declaration files generated 19 | - Source maps enabled 20 | 21 | ## Testing 22 | Tests use Jest as configured in [jest.config.js](mdc:jest.config.js): 23 | - TypeScript support with ts-jest 24 | - Coverage reporting enabled 25 | - Test files in [test/](mdc:test/) directory 26 | - Integration tests in separate directories 27 | 28 | ## Code Quality 29 | ESLint is configured in [eslint.config.mjs](mdc:eslint.config.mjs): 30 | - TypeScript-aware linting 31 | - Enforce coding standards 32 | - Catch common errors 33 | - Maintain code consistency 34 | 35 | ## Release Process 36 | Release process is documented in [README.md](mdc:README.md): 37 | 1. Bump version in package.json 38 | 2. Create and push git tag 39 | 3. Create GitHub release 40 | 4. GitHub Actions builds and publishes 41 | 42 | ## Development Commands 43 | ```bash 44 | # Install dependencies 45 | npm ci 46 | 47 | # Build the project 48 | npm run build 49 | 50 | # Run tests 51 | npm test 52 | 53 | # Run tests in watch mode 54 | npm run test:watch 55 | 56 | # Clean build artifacts 57 | npm run clean 58 | ``` 59 | 60 | ## File Organization 61 | - Source code in [src/](mdc:src/) 62 | - Tests in [test/](mdc:test/) 63 | - Documentation in [docs/](mdc:docs/) 64 | - Examples in [examples/](mdc:examples/) 65 | - Build output in `dist/` 66 | 67 | ## Git Workflow 68 | - Use descriptive commit messages 69 | - Test before committing 70 | - Update documentation for breaking changes 71 | - Follow semantic versioning 72 | -------------------------------------------------------------------------------- /.cursor/rules/storage-patterns.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Use when working with data persistence, storage interfaces, fact references, or queue management to implement proper storage patterns 3 | --- 4 | # Storage and Persistence Patterns 5 | 6 | ## Storage Interfaces 7 | Storage is defined in [src/storage.ts](mdc:src/storage.ts): 8 | - `Storage`: Main storage interface 9 | - `FactReference`: Reference to a fact by hash 10 | - `FactEnvelope`: Fact with metadata 11 | - `Queue`: Queue for pending operations 12 | 13 | ## Storage Implementations 14 | - `MemoryStore`: In-memory storage for testing 15 | - `IndexedDBStore`: Browser storage using IndexedDB 16 | - `IndexedDBQueue`: Queue implementation for offline support 17 | 18 | ## Fact References 19 | Facts are referenced by their cryptographic hash: 20 | ```typescript 21 | const factRef: FactReference = { 22 | type: "Blog.Post", 23 | hash: "abc123..." 24 | }; 25 | ``` 26 | 27 | ## Fact Envelopes 28 | Facts are wrapped in envelopes with metadata: 29 | ```typescript 30 | interface FactEnvelope { 31 | fact: Fact; 32 | signatures: FactSignature[]; 33 | hash: string; 34 | } 35 | ``` 36 | 37 | ## Queue Management 38 | The queue system handles offline operations: 39 | - Facts are queued when offline 40 | - Automatic retry when connection is restored 41 | - Progress tracking for user feedback 42 | - Queue processing in [src/managers/QueueProcessor.ts](mdc:src/managers/QueueProcessor.ts) 43 | 44 | ## Purge Operations 45 | Purge functionality is in [src/purge/](mdc:src/purge/): 46 | - `PurgeConditions`: Define when facts can be purged 47 | - `validatePurgeSpecification`: Validate purge rules 48 | - `PurgeManager`: Manage purge operations 49 | 50 | ## IndexedDB Usage 51 | For browser storage, use IndexedDB: 52 | ```typescript 53 | import { IndexedDBStore } from 'jinaga'; 54 | 55 | const store = new IndexedDBStore('jinaga-db'); 56 | ``` 57 | 58 | ## Best Practices 59 | - Use appropriate storage for environment (memory for tests, IndexedDB for browser) 60 | - Handle storage errors gracefully 61 | - Implement proper cleanup for purged facts 62 | - Monitor queue size for performance 63 | - Use transactions for multi-fact operations 64 | -------------------------------------------------------------------------------- /test/utils/async-test-utils.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Observer interface with the processed() method. 3 | * This is a minimal interface for testing purposes. 4 | */ 5 | interface ObserverWithProcessed { 6 | processed(): Promise; 7 | } 8 | 9 | /** 10 | * Wait for an observer to complete all pending notifications. 11 | * This is useful in tests to ensure all async operations have completed. 12 | * 13 | * @param observer The observer to wait for 14 | */ 15 | export async function waitForObserver(observer: ObserverWithProcessed): Promise { 16 | await observer.processed(); 17 | } 18 | 19 | /** 20 | * Wait for a condition to become true, with timeout. 21 | * Polls the predicate at regular intervals until it returns true or timeout is reached. 22 | * 23 | * @param predicate Function that returns true when the condition is met 24 | * @param timeoutMs Maximum time to wait in milliseconds (default: 2000) 25 | * @param intervalMs Polling interval in milliseconds (default: 20) 26 | */ 27 | export async function waitForCondition( 28 | predicate: () => boolean, 29 | timeoutMs = 2000, 30 | intervalMs = 20 31 | ): Promise { 32 | const start = Date.now(); 33 | return new Promise((resolve, reject) => { 34 | const check = () => { 35 | if (predicate()) return resolve(); 36 | if (Date.now() - start > timeoutMs) { 37 | return reject(new Error(`Timeout waiting for condition after ${timeoutMs}ms`)); 38 | } 39 | setTimeout(check, intervalMs); 40 | }; 41 | check(); 42 | }); 43 | } 44 | 45 | /** 46 | * Wait for a specific number of callbacks to be invoked. 47 | * Useful for testing observer notifications. 48 | * 49 | * @param getCount Function that returns the current count 50 | * @param expectedCount The expected count to wait for 51 | * @param timeoutMs Maximum time to wait in milliseconds (default: 2000) 52 | */ 53 | export async function waitForCallbackCount( 54 | getCount: () => number, 55 | expectedCount: number, 56 | timeoutMs = 2000 57 | ): Promise { 58 | return waitForCondition(() => getCount() >= expectedCount, timeoutMs); 59 | } 60 | 61 | -------------------------------------------------------------------------------- /test/distribution/distributionUnitTestSpec.ts: -------------------------------------------------------------------------------- 1 | import { buildModel, DistributionRules, JinagaTest, LabelOf, User } from "@src"; 2 | 3 | describe("Distribution rules in unit tests", () => { 4 | it("should pass when distribution rule allows", async () => { 5 | const loggedInUser = new User("user1"); 6 | const jinaga = JinagaTest.create({ 7 | model, 8 | user: loggedInUser, 9 | distribution, 10 | initialState: [ 11 | loggedInUser 12 | ] 13 | }); 14 | 15 | const namesSpec = model.given(User).match(user => UserName.current(user)); 16 | const names = await jinaga.query(namesSpec, loggedInUser); 17 | expect(names).toStrictEqual([]); 18 | }); 19 | 20 | it("should throw when querying as a different user", async () => { 21 | const user1 = new User("user1"); 22 | const user2 = new User("user2"); 23 | const jinaga = JinagaTest.create({ 24 | model, 25 | user: user2, 26 | distribution, 27 | initialState: [ 28 | user1, 29 | user2 30 | ] 31 | }); 32 | 33 | const namesSpec = model.given(User).match(user => UserName.current(user)); 34 | await expect(jinaga.query(namesSpec, user1)).rejects.toThrow(); 35 | }); 36 | }); 37 | 38 | class UserName { 39 | static Type = "UserName" as const; 40 | type = UserName.Type; 41 | 42 | constructor( 43 | public user: User, 44 | public value: string, 45 | public prior: UserName[]) {} 46 | 47 | static current(user: LabelOf) { 48 | return user.successors(UserName, userName => userName.user) 49 | .notExists(userName => userName.successors(UserName, next => next.prior)); 50 | } 51 | } 52 | 53 | const model = buildModel(b => b 54 | .type(User) 55 | .type(UserName, f => f 56 | .predecessor("user", User) 57 | .predecessor("prior", UserName) 58 | ) 59 | ); 60 | 61 | function distribution(r: DistributionRules) { 62 | return r 63 | .share(model.given(User).match(user => UserName.current(user))) 64 | .with(model.given(User).match(user => user)); 65 | } -------------------------------------------------------------------------------- /src/rules/RuleSet.ts: -------------------------------------------------------------------------------- 1 | import { AuthorizationRules } from "../authorization/authorizationRules"; 2 | import { DistributionRules } from "../distribution/distribution-rules"; 3 | import { PurgeConditions } from "../purge/purgeConditions"; 4 | import { SpecificationParser } from "../specification/specification-parser"; 5 | 6 | export class RuleSet { 7 | static empty: RuleSet = new RuleSet( 8 | AuthorizationRules.empty, 9 | DistributionRules.empty, 10 | PurgeConditions.empty 11 | ); 12 | 13 | constructor( 14 | public authorizationRules: AuthorizationRules, 15 | public distributionRules: DistributionRules, 16 | public purgeConditions: PurgeConditions 17 | ) {} 18 | 19 | public static loadFromDescription(description: string): RuleSet { 20 | const parser = new SpecificationParser(description); 21 | parser.skipWhitespace(); 22 | let authorizationRules: AuthorizationRules = AuthorizationRules.empty; 23 | let distributionRules: DistributionRules = DistributionRules.empty; 24 | let purgeConditions: PurgeConditions = PurgeConditions.empty; 25 | while (!parser.atEnd()) { 26 | if (parser.continues("authorization")) { 27 | authorizationRules = authorizationRules.with(a => parser.parseAuthorizationRules()); 28 | } 29 | else if (parser.continues("distribution")) { 30 | distributionRules = distributionRules.with(d => parser.parseDistributionRules()); 31 | } 32 | else if (parser.continues("purge")) { 33 | purgeConditions = purgeConditions.with(p => parser.parsePurgeConditions()); 34 | } 35 | else { 36 | // Throws an error. 37 | parser.expectEnd(); 38 | } 39 | } 40 | return new RuleSet(authorizationRules, distributionRules, purgeConditions); 41 | } 42 | 43 | merge(ruleSet2: RuleSet): RuleSet { 44 | return new RuleSet( 45 | this.authorizationRules.merge(ruleSet2.authorizationRules), 46 | this.distributionRules.merge(ruleSet2.distributionRules), 47 | this.purgeConditions.merge(ruleSet2.purgeConditions) 48 | ); 49 | } 50 | } -------------------------------------------------------------------------------- /.cursor/rules/summary.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Use as a quick reference guide to find the appropriate rule for any Jinaga.js development task 3 | alwaysApply: false 4 | --- 5 | # Jinaga.js Development Quick Reference 6 | 7 | ## Core Rules Overview 8 | - **[Project Overview](mdc:.cursor/rules/project-overview.mdc)**: Framework architecture and key concepts 9 | - **[TypeScript Standards](mdc:.cursor/rules/typescript-standards.mdc)**: Coding standards and conventions 10 | - **[Fact Patterns](mdc:.cursor/rules/fact-patterns.mdc)**: Working with facts and specifications 11 | - **[Testing Standards](mdc:.cursor/rules/testing-standards.mdc)**: Testing patterns and requirements 12 | - **[Authentication Patterns](mdc:.cursor/rules/authentication-patterns.mdc)**: Auth and authorization 13 | - **[Network Patterns](mdc:.cursor/rules/network-patterns.mdc)**: HTTP communication 14 | - **[Storage Patterns](mdc:.cursor/rules/storage-patterns.mdc)**: Data persistence 15 | - **[Cryptography Patterns](mdc:.cursor/rules/cryptography-patterns.mdc)**: Security and verification 16 | - **[Development Workflow](mdc:.cursor/rules/development-workflow.mdc)**: Build and release process 17 | 18 | ## Key Files 19 | - [src/index.ts](mdc:src/index.ts): Main exports 20 | - [src/jinaga.ts](mdc:src/jinaga.ts): Core Jinaga class 21 | - [src/storage.ts](mdc:src/storage.ts): Storage interfaces 22 | - [package.json](mdc:package.json): Project configuration 23 | - [tsconfig.json](mdc:tsconfig.json): TypeScript configuration 24 | - [jest.config.js](mdc:jest.config.js): Test configuration 25 | 26 | ## Quick Start 27 | ```typescript 28 | import { JinagaBrowser } from 'jinaga'; 29 | 30 | const j = JinagaBrowser.create({ 31 | httpEndpoint: "http://localhost:8080/jinaga" 32 | }); 33 | 34 | // Create a fact 35 | const user = await j.fact(new User()); 36 | 37 | // Query with specification 38 | const posts = await j.query(blogPosts, user); 39 | 40 | // Watch for changes 41 | const observer = j.watch(blogPosts, user, (posts) => { 42 | console.log('Posts updated:', posts); 43 | }); 44 | ``` 45 | 46 | ## Common Patterns 47 | - Use `j.fact()` to create facts 48 | - Use `j.query()` for one-time queries 49 | - Use `j.watch()` for reactive data 50 | - Use `j.fork()` for isolated contexts 51 | - Use `JinagaTest` for testing 52 | -------------------------------------------------------------------------------- /.cursor/rules/typescript-standards.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Use when writing or modifying TypeScript code to ensure consistent coding standards, type safety, and best practices 3 | globs: *.ts 4 | alwaysApply: false 5 | --- 6 | # TypeScript Coding Standards 7 | 8 | ## Type Safety 9 | - Use strict TypeScript as configured in [tsconfig.json](mdc:tsconfig.json) 10 | - Always define explicit return types for public functions 11 | - Use generic types for reusable components 12 | - Prefer `interface` over `type` for object shapes 13 | - Use `Fact` type from [src/storage.ts](mdc:src/storage.ts) for all data records 14 | 15 | ## Naming Conventions 16 | - Use PascalCase for classes and interfaces (e.g., `Jinaga`, `FactManager`) 17 | - Use camelCase for functions and variables 18 | - Use UPPER_SNAKE_CASE for constants 19 | - Prefix private methods with underscore (e.g., `_validateFact`) 20 | 21 | ## Module Structure 22 | - Export public APIs from [src/index.ts](mdc:src/index.ts) 23 | - Use barrel exports for related functionality 24 | - Keep modules focused on single responsibility 25 | - Use relative imports within the project 26 | 27 | ## Error Handling 28 | - Use custom error classes extending `Error` 29 | - Provide meaningful error messages 30 | - Use `try/catch` for async operations 31 | - Validate inputs early and fail fast 32 | 33 | ## Async/Await 34 | - Prefer async/await over Promises 35 | - Handle errors in async functions 36 | - Use proper typing for async return values 37 | - Avoid mixing Promise chains with async/await 38 | 39 | ## Jinaga.js Framework Patterns 40 | 41 | ### Observer Pattern 42 | - Observer callbacks must return `void` 43 | - Use proper typing for fact references 44 | - Handle async operations correctly 45 | 46 | ### Fact Creation 47 | - All facts must have valid predecessor relationships 48 | - Use proper constructors with required parameters 49 | - Validate fact structure before testing 50 | 51 | ### Specification Patterns 52 | - Specifications follow specific builder patterns 53 | - No union operators exist - use separate specifications 54 | - Match patterns to existing test examples 55 | 56 | ### Error Handling 57 | - Understand framework validation constraints 58 | - Test expected errors, not framework violations 59 | - Use proper error boundaries 60 | -------------------------------------------------------------------------------- /.cursor/rules/cryptography-patterns.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Use when implementing cryptographic features, fact signing, verification, or security measures to ensure proper cryptography patterns 3 | --- 4 | # Cryptography and Security Patterns 5 | 6 | ## Key Management 7 | Cryptography is implemented in [src/cryptography/](mdc:src/cryptography/): 8 | - `KeyPair`: Generate and manage cryptographic key pairs 9 | - `generateKeyPair()`: Create new key pairs 10 | - `signFacts()`: Sign facts with private keys 11 | 12 | ## Fact Signatures 13 | Facts are cryptographically signed for verification: 14 | ```typescript 15 | import { generateKeyPair, signFacts } from 'jinaga'; 16 | 17 | const keyPair = generateKeyPair(); 18 | const signedFacts = signFacts(facts, keyPair.privateKey); 19 | ``` 20 | 21 | ## Verification 22 | Fact verification is handled by `verifyEnvelopes()`: 23 | ```typescript 24 | import { verifyEnvelopes } from 'jinaga'; 25 | 26 | const isValid = verifyEnvelopes(factEnvelopes, publicKey); 27 | ``` 28 | 29 | ## Hash Computation 30 | Fact hashing is implemented in [src/fact/hash.ts](mdc:src/fact/hash.ts): 31 | - `computeHash()`: Compute hash of a fact 32 | - `canonicalizeFact()`: Canonicalize fact for consistent hashing 33 | - `computeObjectHash()`: Hash arbitrary objects 34 | 35 | ## Security Best Practices 36 | - Never expose private keys in client code 37 | - Use secure key storage for private keys 38 | - Verify all facts before processing 39 | - Implement proper key rotation 40 | - Use strong cryptographic algorithms 41 | 42 | ## Fact Integrity 43 | - All facts are cryptographically signed 44 | - Signatures prevent tampering 45 | - Hash-based references ensure integrity 46 | - Canonicalization prevents replay attacks 47 | 48 | ## Key Pair Generation 49 | ```typescript 50 | const keyPair = generateKeyPair(); 51 | console.log('Public key:', keyPair.publicKey); 52 | console.log('Private key:', keyPair.privateKey); 53 | ``` 54 | 55 | ## Verification Process 56 | 1. Extract fact from envelope 57 | 2. Verify signature using public key 58 | 3. Check hash matches computed hash 59 | 4. Validate predecessor references 60 | 61 | ## Error Handling 62 | - Handle signature verification failures 63 | - Implement proper error messages 64 | - Log security-related errors 65 | - Fail fast on verification errors 66 | -------------------------------------------------------------------------------- /test/orderModel.ts: -------------------------------------------------------------------------------- 1 | import { buildModel } from "@src"; 2 | 3 | export function createModel() { 4 | return buildModel(b => b 5 | .type(Store) 6 | .type(Order, x => x 7 | .predecessor("store", Store) 8 | ) 9 | .type(Item, x => x 10 | .predecessor("order", Order) 11 | .predecessor("product", Product) 12 | ) 13 | .type(OrderCancelled, x => x 14 | .predecessor("order", Order) 15 | ) 16 | .type(OrderCancelledReason, x => x 17 | .predecessor("orderCancelled", OrderCancelled) 18 | ) 19 | .type(OrderShipped, x => x 20 | .predecessor("order", Order) 21 | ) 22 | ); 23 | } 24 | export class Store { 25 | static Type = "Store" as const; 26 | type = Store.Type; 27 | 28 | constructor( 29 | public identifier: string 30 | ) {} 31 | } 32 | export class Order { 33 | static Type = "Order" as const; 34 | type = Order.Type; 35 | 36 | constructor( 37 | public store: Store, 38 | public createdAt: Date | string 39 | ) {} 40 | } 41 | export class Product { 42 | static Type = "Product" as const; 43 | type = Product.Type; 44 | 45 | constructor( 46 | public store: Store, 47 | public identifier: string 48 | ) {} 49 | } 50 | export class Item { 51 | static Type = "Order.Item" as const; 52 | type = Item.Type; 53 | 54 | constructor( 55 | public order: Order, 56 | public product: Product, 57 | public quantity: number 58 | ) {} 59 | } 60 | export class OrderCancelled { 61 | static Type = "Order.Cancelled" as const; 62 | type = OrderCancelled.Type; 63 | 64 | constructor( 65 | public order: Order, 66 | public cancelledAt: Date | string 67 | ) {} 68 | } 69 | export class OrderCancelledReason { 70 | static Type = "Order.Cancelled.Reason" as const; 71 | type = OrderCancelledReason.Type; 72 | 73 | constructor( 74 | public orderCancelled: OrderCancelled, 75 | public reason: string 76 | ) {} 77 | } 78 | export class OrderShipped { 79 | static Type = "Order.Shipped" as const; 80 | type = OrderShipped.Type; 81 | 82 | constructor( 83 | public order: Order, 84 | public shippedAt: Date | string 85 | ) {} 86 | } 87 | -------------------------------------------------------------------------------- /src/fork/transient-fork.ts: -------------------------------------------------------------------------------- 1 | import { TopologicalSorter } from '../fact/sorter'; 2 | import { WebClient } from '../http/web-client'; 3 | import { FactEnvelope, factEnvelopeEquals, FactRecord, FactReference, Storage } from '../storage'; 4 | import { Trace } from "../util/trace"; 5 | import { Fork } from "./fork"; 6 | import { serializeLoad } from './serialize'; 7 | 8 | export class TransientFork implements Fork { 9 | constructor( 10 | private storage: Storage, 11 | private client: WebClient 12 | ) { 13 | 14 | } 15 | 16 | close() { 17 | return Promise.resolve(); 18 | } 19 | 20 | async save(envelopes: FactEnvelope[]): Promise { 21 | await this.client.save(envelopes); 22 | } 23 | 24 | async load(references: FactReference[]): Promise { 25 | const known = await this.storage.load(references); 26 | const remaining = references.filter(reference => !known.some(factEnvelopeEquals(reference))); 27 | if (remaining.length === 0) { 28 | return known; 29 | } 30 | else { 31 | const records = await this.loadEnvelopes(remaining); 32 | return records.concat(known); 33 | } 34 | } 35 | 36 | private async loadEnvelopes(references: FactReference[]) { 37 | const sorter = new TopologicalSorter(); 38 | let loaded: FactEnvelope[] = []; 39 | for (let start = 0; start < references.length; start += 300) { 40 | const chunk = references.slice(start, start + 300); 41 | const response = await this.client.load(serializeLoad(chunk)); 42 | const facts = sorter.sort(response.facts, (p, f) => f); 43 | const envelopes = facts.map(fact => { 44 | return { 45 | fact: fact, 46 | signatures: [] 47 | }; 48 | }); 49 | const saved = await this.storage.save(envelopes); 50 | if (saved.length > 0) { 51 | Trace.counter("facts_saved", saved.length); 52 | } 53 | loaded = loaded.concat(envelopes); 54 | } 55 | return loaded; 56 | } 57 | 58 | processQueueNow(): Promise { 59 | // No-op for transient fork 60 | return Promise.resolve(); 61 | } 62 | } -------------------------------------------------------------------------------- /src/authentication/authentication-test.ts: -------------------------------------------------------------------------------- 1 | import { Authentication } from '../authentication/authentication'; 2 | import { AuthorizationEngine } from '../authorization/authorization-engine'; 3 | import { AuthorizationRules } from '../authorization/authorizationRules'; 4 | import { LoginResponse } from '../http/messages'; 5 | import { FactEnvelope, FactRecord, Storage, factEnvelopeEquals } from '../storage'; 6 | 7 | export class AuthenticationTest implements Authentication { 8 | private authorizationEngine: AuthorizationEngine | null; 9 | 10 | constructor ( 11 | store: Storage, 12 | authorizationRules: AuthorizationRules | null, 13 | private userFact: FactRecord | null, 14 | private deviceFact: FactRecord | null 15 | ) { 16 | this.authorizationEngine = authorizationRules && 17 | new AuthorizationEngine(authorizationRules, store); 18 | } 19 | 20 | async login() { 21 | if (!this.userFact) { 22 | throw new Error("No logged in user."); 23 | } 24 | 25 | return { 26 | userFact: this.userFact, 27 | profile: { 28 | displayName: "Test user" 29 | } 30 | }; 31 | } 32 | 33 | async local() { 34 | if (!this.deviceFact) { 35 | throw new Error("No persistent device."); 36 | } 37 | 38 | return this.deviceFact; 39 | } 40 | 41 | async authorize(envelopes: FactEnvelope[]): Promise { 42 | if (this.authorizationEngine) { 43 | const results = await this.authorizationEngine.authorizeFacts(envelopes, this.userFact); 44 | const authorizedEnvelopes: FactEnvelope[] = results.map(r => { 45 | const envelope = envelopes.find(factEnvelopeEquals(r.fact)); 46 | if (!envelope) { 47 | throw new Error("Fact not found in envelopes."); 48 | } 49 | if (r.verdict === "Accept") { 50 | return { 51 | fact: r.fact, 52 | signatures: envelope.signatures 53 | .filter(s => r.newPublicKeys.includes(s.publicKey)) 54 | }; 55 | } 56 | else if (r.verdict === "Existing") { 57 | return envelope; 58 | } 59 | else { 60 | throw new Error("Unexpected verdict."); 61 | } 62 | }); 63 | return authorizedEnvelopes; 64 | } 65 | else { 66 | return envelopes; 67 | } 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/ws/protocol-router.ts: -------------------------------------------------------------------------------- 1 | import { ControlKeyword, ControlFrame, ProtocolMessageRouterCallbacks, AuthorizationContext } from "./types"; 2 | import { ControlFrameHandler } from "./control-frame-handler"; 3 | 4 | const CONTROL_KEYWORDS: ReadonlySet = new Set(["BOOK", "ERR", "SUB", "UNSUB", "ACK"]); 5 | 6 | export class WebSocketMessageRouter { 7 | private buffer: string = ""; 8 | private authorizationContext: AuthorizationContext | undefined; 9 | 10 | constructor( 11 | private readonly callbacks: ProtocolMessageRouterCallbacks, 12 | private readonly controlHandler: ControlFrameHandler, 13 | authorizationContext?: AuthorizationContext 14 | ) { 15 | this.authorizationContext = authorizationContext; 16 | } 17 | 18 | setAuthorizationContext(ctx: AuthorizationContext | undefined) { 19 | this.authorizationContext = ctx; 20 | } 21 | 22 | pushChunk(chunk: string): void { 23 | this.buffer += typeof chunk === "string" ? chunk : String(chunk); 24 | this.flush(); 25 | } 26 | 27 | private flush(): void { 28 | const parts = this.buffer.split(/\r?\n/); 29 | this.buffer = parts.pop() ?? ""; 30 | 31 | for (let i = 0; i < parts.length; i++) { 32 | const line = parts[i]; 33 | if (CONTROL_KEYWORDS.has(line)) { 34 | const keyword = line as ControlKeyword; 35 | const payload: string[] = []; 36 | // Collect until blank terminator; payload lines are JSON-encoded strings 37 | i++; 38 | while (i < parts.length) { 39 | const next = parts[i]; 40 | if (next === "") { 41 | break; 42 | } 43 | payload.push(next); 44 | i++; 45 | } 46 | // If we ran out without a blank line terminator, stash partial back into buffer 47 | if (i >= parts.length || parts[i] !== "") { 48 | // Reconstruct the partial frame back into buffer including keyword and payload 49 | const remainder = [keyword, ...payload].join("\n"); 50 | this.buffer = remainder + (this.buffer ? "\n" + this.buffer : ""); 51 | return; 52 | } 53 | // Process complete control frame 54 | const frame: ControlFrame = { keyword, payload }; 55 | try { 56 | this.controlHandler.handle(frame); 57 | } catch (err) { 58 | // Swallow handler errors; protocol continues 59 | } 60 | continue; 61 | } 62 | 63 | // Not a control keyword; forward to graph line consumer 64 | this.callbacks.onGraphLine(line); 65 | } 66 | } 67 | } -------------------------------------------------------------------------------- /test/specification/infiniteLoopSpec.ts: -------------------------------------------------------------------------------- 1 | import { invertSpecification, Specification } from "@src"; 2 | 3 | // Test case to reproduce the infinite loop issue 4 | describe("specification inverse infinite loop bug", () => { 5 | it("should not cause infinite loop with complex match structures", () => { 6 | // Create a specification that previously caused infinite loop 7 | const specification: Specification = { 8 | given: [{ label: { name: "p1", type: "User" }, conditions: [] }], 9 | matches: [ 10 | { 11 | unknown: { name: "u1", type: "GameChallenge" }, 12 | conditions: [ 13 | { 14 | type: "path", 15 | rolesLeft: [{ name: "gameHub", predecessorType: "GameHub" }], 16 | labelRight: "u2", 17 | rolesRight: [] 18 | } 19 | ] 20 | }, 21 | { 22 | unknown: { name: "u2", type: "GameHub" }, 23 | conditions: [] 24 | }, 25 | { 26 | unknown: { name: "u3", type: "Player" }, 27 | conditions: [] 28 | }, 29 | { 30 | unknown: { name: "u4", type: "GameSession" }, 31 | conditions: [ 32 | { 33 | type: "path", 34 | rolesLeft: [{ name: "gameHub", predecessorType: "GameHub" }], 35 | labelRight: "u2", 36 | rolesRight: [] 37 | } 38 | ] 39 | }, 40 | { 41 | unknown: { name: "u5", type: "PlayerMove" }, 42 | conditions: [ 43 | { 44 | type: "path", 45 | rolesLeft: [{ name: "player", predecessorType: "Player" }], 46 | labelRight: "u3", 47 | rolesRight: [] 48 | } 49 | ] 50 | } 51 | ], 52 | projection: { type: "composite", components: [] } 53 | }; 54 | 55 | // This should throw an error (specific labels may vary due to processing order) 56 | expect(() => invertSpecification(specification)) 57 | .toThrow("Disconnected specification detected"); 58 | }); 59 | }); -------------------------------------------------------------------------------- /test/specification/versioningSpec.ts: -------------------------------------------------------------------------------- 1 | import { JinagaTest, buildModel } from "@src"; 2 | 3 | class Parent { 4 | static Type = "Parent"; 5 | type = Parent.Type; 6 | 7 | constructor( 8 | public readonly id: string 9 | ) {} 10 | } 11 | 12 | interface Child { 13 | parent: Parent; 14 | name: string; 15 | } 16 | 17 | class ChildVersion1 implements Child { 18 | static Type = "Child"; 19 | type = ChildVersion1.Type; 20 | 21 | constructor( 22 | public readonly parent: Parent, 23 | public readonly name: string 24 | ) {} 25 | } 26 | 27 | class ChildVersion2 implements Child { 28 | static Type = "Child"; 29 | type = ChildVersion2.Type; 30 | 31 | constructor( 32 | public readonly parent: Parent, 33 | public readonly name: string, 34 | public readonly age: number | undefined 35 | ) {} 36 | } 37 | 38 | const model = buildModel(b => b 39 | .type(Parent) 40 | .type(ChildVersion2, m => m 41 | .predecessor("parent", Parent) 42 | ) 43 | ); 44 | 45 | const childrenOfParentAsFacts = model.given(Parent).match((parent, facts) => 46 | facts.ofType(ChildVersion2) 47 | .join(child => child.parent, parent) 48 | ); 49 | 50 | const childrenOfParentWithFields = model.given(Parent).match((parent, facts) => 51 | facts.ofType(ChildVersion2) 52 | .join(child => child.parent, parent) 53 | .select(child => ({ 54 | name: child.name, 55 | age: child.age 56 | })) 57 | ); 58 | 59 | describe("versioning", () => { 60 | it("should read version 1 into version 2", async () => { 61 | const j = JinagaTest.create({ 62 | model, 63 | initialState: [ 64 | new Parent("parent"), 65 | new ChildVersion1(new Parent("parent"), "child") 66 | ] 67 | }); 68 | 69 | const parent = await j.fact(new Parent("parent")); 70 | const children = await j.query(childrenOfParentWithFields, parent); 71 | 72 | expect(children).toHaveLength(1); 73 | expect(children[0].name).toEqual("child"); 74 | expect(children[0].age).toBeUndefined(); 75 | }); 76 | 77 | it("should have the same hash", async () => { 78 | const j = JinagaTest.create({ 79 | model, 80 | initialState: [ 81 | new Parent("parent"), 82 | new ChildVersion1(new Parent("parent"), "child") 83 | ] 84 | }); 85 | 86 | const parent = await j.fact(new Parent("parent")); 87 | const children = await j.query(childrenOfParentAsFacts, parent); 88 | 89 | expect(children).toHaveLength(1); 90 | expect(j.hash(children[0])).toEqual( 91 | j.hash(new ChildVersion1(new Parent("parent"), "child")) 92 | ); 93 | }); 94 | }); -------------------------------------------------------------------------------- /documentation/successors.md: -------------------------------------------------------------------------------- 1 | # Querying for `successors` 2 | 3 | As an alternative to the `join` method, you can use the `successors` method to query for facts that are successors of a given fact. 4 | Rather than joining to `facts.ofType(T)`, you can find `successors(T, ...)` directly. 5 | 6 | When using `successors`, it is often possible to remove the `facts` parameter from the match function. 7 | 8 | ## Example of Using the `successors` Method 9 | 10 | In the context of a company model, you can use the `successors` method to find all offices of a company by specifying the relationship between the company and its offices. Here is an example: 11 | 12 | ```typescript 13 | const specification = model.given(Company).match(company => 14 | company.successors(Office, office => office.company) 15 | ); 16 | 17 | const result = await j.query(specification, company); 18 | ``` 19 | 20 | In this example, the `successors` method is used to find all offices of a company by specifying the relationship between the company and its offices. 21 | 22 | ## Compared to the `join` Method 23 | 24 | The alternative to the `successors` syntax in Jinaga is to use the `join` method. 25 | Here is that same query expressed using the `join` method: 26 | 27 | ```typescript 28 | const specification = model.given(Company).match((company, facts) => 29 | facts.ofType(Office) 30 | .join(office => office.company, company) 31 | ); 32 | 33 | const result = await j.query(specification, company); 34 | ``` 35 | 36 | Notice that we need to pass the `facts` parameter to the match function when using the `join` method. 37 | Then we use `facts.ofType(Office)` to find all offices of the company, and `join` to specify the relationship between the company and its offices. 38 | 39 | ## Using the `successors` Method with Composite Projections 40 | 41 | Composite projections allow you to define a structure for the results of a query. 42 | You can define nested projections and collections. 43 | The `successors` method can be used within composite projections. 44 | Here is an example: 45 | 46 | ```typescript 47 | const specification = model.given(Company).match(company => 48 | company.successors(Office, office => office.company) 49 | .select(office => ({ 50 | identifier: office.identifier, 51 | employees: office.successors(Employee, employee => employee.office) 52 | })) 53 | ); 54 | 55 | const result = await j.query(specification, company); 56 | ``` 57 | 58 | In this example, the `successors` method is used to find all offices of a company and include additional information about each office, such as its employees, in the projection. 59 | -------------------------------------------------------------------------------- /src/http/messageParsers.ts: -------------------------------------------------------------------------------- 1 | import { FactRecord, FactReference, PredecessorCollection } from "../storage"; 2 | import { LoadMessage, SaveMessage } from "./messages"; 3 | 4 | function parseFactReference(factReference: any): FactReference { 5 | if (typeof factReference !== 'object') throw new Error("Expected FactReference to be an object."); 6 | if (typeof factReference.type !== 'string') throw new Error("Expected a string 'type' property."); 7 | if (typeof factReference.hash !== 'string') throw new Error("Expected a string 'hash' property."); 8 | return { 9 | type: factReference.type, 10 | hash: factReference.hash 11 | }; 12 | } 13 | 14 | function parsePredecessor(predecessor: any): FactReference | FactReference[] { 15 | if (Array.isArray(predecessor)) { 16 | return predecessor.map(parseFactReference); 17 | } 18 | else { 19 | return parseFactReference(predecessor); 20 | } 21 | } 22 | 23 | function parsePredecessorCollection(predecessors: any): PredecessorCollection { 24 | if (typeof predecessors !== 'object') throw new Error("Expected PredecessorCollection to be an object."); 25 | return Object.keys(predecessors).reduce((result, key) => ({ 26 | ...result, 27 | [key]: parsePredecessor(predecessors[key]) 28 | }), {} as PredecessorCollection); 29 | } 30 | 31 | function parseFactRecord(factRecord: any): FactRecord { 32 | if (typeof factRecord !== 'object') throw new Error("Expected FactRecord to be an object."); 33 | if (typeof factRecord.type !== 'string') throw new Error("Expected a string 'type' property."); 34 | if (typeof factRecord.hash !== 'string') throw new Error("Expected a string 'hash' property."); 35 | if (typeof factRecord.fields !== 'object') throw new Error("Expected an object 'fields' property."); 36 | return { 37 | type: factRecord.type, 38 | hash: factRecord.hash, 39 | predecessors: parsePredecessorCollection(factRecord.predecessors), 40 | fields: factRecord.fields 41 | }; 42 | } 43 | 44 | export function parseSaveMessage(message: any): SaveMessage { 45 | if (typeof message !== 'object') throw new Error("Expected an object. Check the content type of the request."); 46 | if (!Array.isArray(message.facts)) throw new Error("Expected an array 'facts' property."); 47 | return { 48 | facts: message.facts.map(parseFactRecord) 49 | }; 50 | } 51 | 52 | export function parseLoadMessage(message: any): LoadMessage { 53 | if (typeof message !== 'object') throw new Error("Expected an object. Check the content type of the request."); 54 | if (!Array.isArray(message.references)) throw new Error("Expected an array 'references' property."); 55 | return { 56 | references: message.references.map(parseFactReference) 57 | }; 58 | } -------------------------------------------------------------------------------- /test/single-use/singleUseStoreSpec.ts: -------------------------------------------------------------------------------- 1 | import { AuthenticationTest, FactManager, Jinaga, MemoryStore, ObservableSource, PassThroughFork, User } from '@src'; 2 | 3 | // Define a test fact type that will be owned by the single-use principal 4 | class TestFact { 5 | static Type = "TestFact" as const; 6 | type = TestFact.Type; 7 | 8 | constructor( 9 | public owner: User, 10 | public value: string 11 | ) { } 12 | } 13 | 14 | describe('SingleUse with Store', () => { 15 | it('should create and sign facts with a single-use principal', async () => { 16 | // Arrange 17 | const store = new MemoryStore(); 18 | const fork = new PassThroughFork(store); 19 | const observableSource = new ObservableSource(store); 20 | const authentication = new AuthenticationTest(store, null, null, null); 21 | const factManager = new FactManager(fork, observableSource, store, { 22 | feeds: async () => [], 23 | fetchFeed: async () => ({ references: [], bookmark: '' }), 24 | streamFeed: () => () => {}, 25 | load: async () => [] 26 | }, []); 27 | const j = new Jinaga(authentication, factManager, null); 28 | 29 | // Act 30 | const result = await j.singleUse(async (principal: User) => { 31 | // Create a fact owned by the principal 32 | const fact = await j.fact(new TestFact(principal, 'test value')); 33 | return fact; 34 | }); 35 | 36 | // Assert 37 | expect(result).toBeDefined(); 38 | expect(result.type).toBe('TestFact'); 39 | expect(result.owner.type).toBe('Jinaga.User'); 40 | expect(result.owner.publicKey).toBeDefined(); 41 | expect(result.value).toBe('test value'); 42 | 43 | // Verify that the fact was saved to the store 44 | const facts = await store.load([{ 45 | type: 'TestFact', 46 | hash: Jinaga.hash(result) 47 | }]); 48 | 49 | // Find the TestFact in the returned facts 50 | const testFact = facts.find(f => f.fact.type === 'TestFact'); 51 | expect(testFact).toBeDefined(); 52 | expect(testFact!.fact.fields.value).toBe('test value'); 53 | 54 | // Verify that the fact has a signature 55 | expect(testFact!.signatures.length).toBeGreaterThan(0); 56 | 57 | // Verify that the user fact was saved to the store 58 | const userFacts = await store.load([{ 59 | type: 'Jinaga.User', 60 | hash: Jinaga.hash(result.owner) 61 | }]); 62 | expect(userFacts.length).toBe(1); 63 | expect(userFacts[0].fact.type).toBe('Jinaga.User'); 64 | expect(userFacts[0].fact.fields.publicKey).toBeDefined(); 65 | 66 | // Verify that the user fact has a signature 67 | expect(userFacts[0].signatures.length).toBeGreaterThan(0); 68 | }); 69 | }); 70 | -------------------------------------------------------------------------------- /test/distribution/distributionDebugInfoSpec.ts: -------------------------------------------------------------------------------- 1 | import { JinagaTest, Trace, User } from "@src"; 2 | import { Blog, Comment, Post, distribution, model } from "../blogModel"; 3 | 4 | describe("distribution debug information", () => { 5 | Trace.off(); 6 | 7 | const creator = new User("creator"); 8 | const reader = new User("reader"); 9 | const blog = new Blog(creator, "domain"); 10 | const post = new Post(blog, creator, new Date()); 11 | 12 | it("should provide detailed failure information in test mode", async () => { 13 | const specification = model.given(Blog).match((blog, facts) => 14 | facts.ofType(Post) 15 | .join(post => post.blog, blog) 16 | ); 17 | 18 | const j = JinagaTest.create({ 19 | model, 20 | user: reader, 21 | initialState: [ 22 | creator, 23 | reader, 24 | blog, 25 | post 26 | ], 27 | distribution 28 | }); 29 | 30 | try { 31 | await j.query(specification, blog); 32 | fail("Expected query to throw 'Not authorized' error"); 33 | } catch (error: any) { 34 | expect(error.message).toContain("Not authorized"); 35 | expect(error.message).toContain("The user does not match"); 36 | 37 | // Check for enhanced debug information that should be present in test mode 38 | expect(error.message).toContain("Expected hashes:"); 39 | expect(error.message).toContain("User hash:"); 40 | 41 | // Verify the user fact contains the reader's hash 42 | expect(error.message).toContain(j.hash(reader)); 43 | } 44 | }); 45 | 46 | it("should include matching set information when available", async () => { 47 | const specification = model.given(Blog).match((blog, facts) => 48 | facts.ofType(Comment) 49 | .join(comment => comment.post.blog, blog) 50 | ); 51 | 52 | const comment = new Comment(post, reader, "test comment", new Date()); 53 | 54 | const j = JinagaTest.create({ 55 | model, 56 | user: reader, 57 | initialState: [ 58 | creator, 59 | reader, 60 | blog, 61 | post, 62 | comment 63 | ], 64 | distribution 65 | }); 66 | 67 | try { 68 | await j.query(specification, blog); 69 | fail("Expected query to throw 'Not authorized' error"); 70 | } catch (error: any) { 71 | expect(error.message).toContain("Not authorized"); 72 | expect(error.message).toContain("The user does not match"); 73 | 74 | // Verify that detailed debug information is present 75 | expect(error.message).toContain("Expected hashes:"); 76 | expect(error.message).toContain("User hash:"); 77 | 78 | // Verify the user fact contains the reader's information 79 | expect(error.message).toContain(j.hash(reader)); 80 | } 81 | }); 82 | }); -------------------------------------------------------------------------------- /src/managers/PurgeManager.ts: -------------------------------------------------------------------------------- 1 | import { testSpecificationForCompliance } from "../purge/purgeCompliance"; 2 | import { SpecificationInverse, invertSpecification } from "../specification/inverse"; 3 | import { Specification } from "../specification/specification"; 4 | import { FactEnvelope, FactReference, ProjectedResult, Storage } from "../storage"; 5 | import { Trace } from "../util/trace"; 6 | 7 | export class PurgeManager { 8 | private purgeInverses: SpecificationInverse[]; 9 | 10 | constructor(private readonly store: Storage, private readonly purgeConditions: Specification[]) { 11 | this.purgeInverses = purgeConditions.map(pc => invertSpecification(pc)).flat(); 12 | } 13 | 14 | async purge(): Promise { 15 | const count = await this.store.purge(this.purgeConditions); 16 | if (count > 0) { 17 | Trace.counter("facts_purged", count); 18 | } 19 | } 20 | 21 | async triggerPurge(factsAdded: FactEnvelope[]): Promise { 22 | for (const envelope of factsAdded) { 23 | const fact = envelope.fact; 24 | for (const purgeInverse of this.purgeInverses) { 25 | // Only run the purge inverse if the given type matches the fact type 26 | if (purgeInverse.inverseSpecification.given[0].label.type !== fact.type) { 27 | continue; 28 | } 29 | 30 | const givenReference = { 31 | type: fact.type, 32 | hash: fact.hash 33 | }; 34 | const results: ProjectedResult[] = await this.store.read([givenReference], purgeInverse.inverseSpecification); 35 | for (const result of results) { 36 | const givenName = purgeInverse.givenSubset[0]; 37 | // The given is the purge root 38 | const purgeRoot: FactReference = result.tuple[givenName]; 39 | // All other members of the result tuple are triggers 40 | const triggers: FactReference[] = Object.keys(result.tuple) 41 | .filter(k => k !== givenName) 42 | .map(k => result.tuple[k]); 43 | 44 | // Purge all descendants of the purge root except for the triggers 45 | const count = await this.store.purgeDescendants(purgeRoot, triggers); 46 | if (count > 0) { 47 | Trace.counter("facts_purged", count); 48 | } 49 | } 50 | } 51 | } 52 | } 53 | 54 | public checkCompliance(specification: Specification): void { 55 | const failures = testSpecificationForCompliance(specification, this.purgeConditions); 56 | if (failures.length > 0) { 57 | const message = failures.join("\n"); 58 | throw new Error(message); 59 | } 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /test/blogModel.ts: -------------------------------------------------------------------------------- 1 | import { DistributionRules, User, buildModel } from "@src"; 2 | 3 | export class Blog { 4 | static Type = "Blog" as const; 5 | type = Blog.Type; 6 | 7 | constructor( 8 | public creator: User, 9 | public domain: string 10 | ) { } 11 | } 12 | 13 | export class Post { 14 | static Type = "Post" as const; 15 | type = Post.Type; 16 | 17 | constructor( 18 | public blog: Blog, 19 | public author: User, 20 | public createdAt: Date | string 21 | ) { } 22 | } 23 | 24 | export class Publish { 25 | static Type = "Publish" as const; 26 | type = Publish.Type; 27 | 28 | constructor( 29 | public post: Post, 30 | public date: Date | string 31 | ) { } 32 | } 33 | 34 | export class Comment { 35 | static Type = "Comment" as const; 36 | type = Comment.Type; 37 | 38 | constructor( 39 | public post: Post, 40 | public author: User, 41 | public text: string, 42 | public createdAt: Date | string 43 | ) { } 44 | } 45 | 46 | export class CommentApproved { 47 | static Type = "CommentApproved" as const; 48 | type = CommentApproved.Type; 49 | 50 | constructor( 51 | public comment: Comment, 52 | public approvedAt: Date | string 53 | ) { } 54 | } 55 | 56 | export const model = buildModel(b => b 57 | .type(User) 58 | .type(Blog, x => x 59 | .predecessor("creator", User) 60 | ) 61 | .type(Post, x => x 62 | .predecessor("blog", Blog) 63 | .predecessor("author", User) 64 | ) 65 | .type(Publish, x => x 66 | .predecessor("post", Post) 67 | ) 68 | .type(Comment, x => x 69 | .predecessor("post", Post) 70 | .predecessor("author", User) 71 | ) 72 | .type(CommentApproved, x => x 73 | .predecessor("comment", Comment) 74 | ) 75 | ); 76 | 77 | export const distribution = (r: DistributionRules) => r 78 | // Everyone can see published posts 79 | .share(model.given(Blog).match(blog => 80 | blog.successors(Post, post => post.blog) 81 | .exists(post => post.successors(Publish, publish => publish.post)) 82 | )).withEveryone() 83 | // The creator can see all posts and comments 84 | .share(model.given(Blog).select(blog => ({ 85 | posts: blog.successors(Post, post => post.blog), 86 | comments: blog.successors(Comment, comment => comment.post.blog) 87 | }))).with(model.given(Blog).match(blog => 88 | blog.creator.predecessor() 89 | )) 90 | // A comment author can see their own comments on published posts 91 | .share(model.given(Blog, User).match((blog, author) => 92 | blog.successors(Post, post => post.blog) 93 | .exists(post => post.successors(Publish, publish => publish.post)) 94 | .selectMany(post => post.successors(Comment, comment => comment.post) 95 | .join(comment => comment.author, author) 96 | ) 97 | )).with(model.given(Blog, User).select((blog, author) => 98 | author 99 | )); 100 | -------------------------------------------------------------------------------- /test/fact/factReferenceCompanySpec.ts: -------------------------------------------------------------------------------- 1 | import { Jinaga, JinagaTest, User } from '@src'; 2 | import { Company, model } from '../companyModel'; 3 | 4 | describe('factReference with company model', () => { 5 | let j: Jinaga; 6 | 7 | beforeEach(async () => { 8 | j = JinagaTest.create({}); 9 | }); 10 | 11 | it('should work with real company model facts', async () => { 12 | // Create an actual user fact 13 | const realUser = await j.fact(new User('test-public-key')); 14 | const userHash = j.hash(realUser); 15 | 16 | console.log('Real user:', realUser); 17 | console.log('Real user hash:', userHash); 18 | 19 | // Create a fact reference 20 | const userRef = j.factReference(User, userHash); 21 | console.log('User ref:', userRef); 22 | console.log('User ref hash:', j.hash(userRef)); 23 | 24 | // They should have the same hash and type 25 | expect(j.hash(userRef)).toBe(userHash); 26 | expect(userRef.type).toBe(realUser.type); 27 | }); 28 | 29 | it('should work with company creation queries', async () => { 30 | // Create a user and company 31 | const user = await j.fact(new User('creator-key')); 32 | const company = await j.fact(new Company(user, 'TestCorp')); 33 | 34 | const userHash = j.hash(user); 35 | const userRef = j.factReference(User, userHash); 36 | 37 | // Query for companies created by this user using the reference 38 | const companies = await j.query( 39 | model.given(User).match((u, facts) => 40 | facts.ofType(Company).join(c => c.creator, u) 41 | ), 42 | userRef 43 | ); 44 | 45 | console.log('Companies found:', companies); 46 | expect(companies).toHaveLength(1); 47 | expect(companies[0].identifier).toBe('TestCorp'); 48 | }); 49 | 50 | it('should work for identity queries', async () => { 51 | // Create a user 52 | const user = await j.fact(new User('identity-test-key')); 53 | const userHash = j.hash(user); 54 | const userRef = j.factReference(User, userHash); 55 | 56 | // Simple identity query - just return the user itself 57 | const realUserResult = await j.query( 58 | model.given(User).select(u => u), 59 | user 60 | ); 61 | 62 | const refUserResult = await j.query( 63 | model.given(User).select(u => u), 64 | userRef 65 | ); 66 | 67 | console.log('Real user result:', realUserResult); 68 | console.log('Ref user result:', refUserResult); 69 | 70 | // Both should return the same user 71 | expect(realUserResult).toHaveLength(1); 72 | expect(refUserResult).toHaveLength(1); 73 | expect(j.hash(realUserResult[0])).toBe(j.hash(refUserResult[0])); 74 | }); 75 | }); -------------------------------------------------------------------------------- /test/specification/predecessorDuplicationSpec.ts: -------------------------------------------------------------------------------- 1 | import { Jinaga, JinagaTest, User } from "@src"; 2 | import { Company, Office, model } from "../companyModel"; 3 | 4 | describe("Predecessor Pattern Observer", () => { 5 | let j: Jinaga; 6 | let creator: User; 7 | let company: Company; 8 | 9 | beforeEach(() => { 10 | creator = new User("--- PUBLIC KEY GOES HERE ---"); 11 | company = new Company(creator, "TestCo"); 12 | 13 | // Start with company but NO office yet 14 | j = JinagaTest.create({ 15 | initialState: [creator, company] 16 | }); 17 | }); 18 | 19 | it("should invoke callback exactly once when given fact arrives", async () => { 20 | // Specification that navigates from Office (given) to Company (predecessor) 21 | const specification = model.given(Office).match((office, facts) => 22 | facts.ofType(Company) 23 | .join(company => company, office.company) 24 | ); 25 | 26 | // Track callback invocations 27 | const callbacks: Company[] = []; 28 | 29 | // Subscribe before the Office exists 30 | const observer = j.watch(specification, new Office(company, "TestOffice"), companyResult => { 31 | callbacks.push(companyResult); 32 | }); 33 | 34 | await observer.loaded(); 35 | 36 | // At this point, Office doesn't exist, so no callbacks yet 37 | expect(callbacks.length).toBe(0); 38 | 39 | // Now save the Office fact - this triggers the observer 40 | const office = await j.fact(new Office(company, "TestOffice")); 41 | await observer.processed(); 42 | 43 | // Callback should be invoked EXACTLY ONCE, not twice 44 | expect(callbacks.length).toBe(1); 45 | expect(callbacks[0]).toMatchObject({ 46 | type: "Company", 47 | identifier: "TestCo" 48 | }); 49 | 50 | observer.stop(); 51 | }); 52 | 53 | it("should invoke callback exactly once when given fact exists before subscription", async () => { 54 | // Create office before subscribing 55 | const office = await j.fact(new Office(company, "TestOffice")); 56 | 57 | const specification = model.given(Office).match((office, facts) => 58 | facts.ofType(Company) 59 | .join(company => company, office.company) 60 | ); 61 | 62 | const callbacks: Company[] = []; 63 | 64 | const observer = j.watch(specification, office, companyResult => { 65 | callbacks.push(companyResult); 66 | }); 67 | 68 | await observer.loaded(); 69 | 70 | // Callback should be invoked EXACTLY ONCE during initial load 71 | expect(callbacks.length).toBe(1); 72 | expect(callbacks[0]).toMatchObject({ 73 | type: "Company", 74 | identifier: "TestCo" 75 | }); 76 | 77 | observer.stop(); 78 | }); 79 | }); 80 | 81 | -------------------------------------------------------------------------------- /src/util/trace.ts: -------------------------------------------------------------------------------- 1 | export interface Tracer { 2 | info(message: string): void; 3 | warn(message: string): void; 4 | error(error: any): void; 5 | dependency(name: string, data: string, operation: () => Promise): Promise; 6 | metric(message: string, measurements: { [key: string]: number }): void; 7 | counter(name: string, value: number): void; 8 | } 9 | 10 | export class NoOpTracer implements Tracer { 11 | info(message: string): void { 12 | } 13 | warn(message: string): void { 14 | } 15 | error(error: any): void { 16 | } 17 | dependency(name: string, data: string, operation: () => Promise): Promise { 18 | return operation(); 19 | } 20 | metric(message: string, measurements: { [key: string]: number }): void { 21 | } 22 | 23 | counter(name: string, value: number): void { 24 | } 25 | } 26 | 27 | export class ConsoleTracer implements Tracer { 28 | info(message: string): void { 29 | console.log(message); 30 | } 31 | warn(message: string): void { 32 | console.warn(message); 33 | } 34 | error(error: any): void { 35 | console.error(error); 36 | } 37 | async dependency(name: string, data: string, operation: () => Promise): Promise { 38 | const start = new Date().getTime(); 39 | try { 40 | return await operation(); 41 | } 42 | finally { 43 | const end = new Date().getTime(); 44 | const duration = end - start; 45 | 46 | // Log the dependency 47 | console.log(`Dependency: ${name} (${data}) took ${duration}ms`); 48 | } 49 | } 50 | 51 | metric(message: string, measurements: { [key: string]: number }): void { 52 | console.log(`Metric: ${message}`, measurements); 53 | } 54 | 55 | counter(name: string, value: number): void { 56 | console.log(`Counter: ${name} = ${value}`); 57 | } 58 | } 59 | 60 | export class Trace { 61 | private static tracer: Tracer = new ConsoleTracer(); 62 | 63 | static configure(tracer: Tracer) { 64 | Trace.tracer = tracer; 65 | } 66 | 67 | static off() { 68 | Trace.tracer = new NoOpTracer(); 69 | } 70 | 71 | static getTracer(): Tracer { 72 | return Trace.tracer; 73 | } 74 | 75 | static info(message: string): void { 76 | this.tracer.info(message); 77 | } 78 | 79 | static warn(message: string): void { 80 | this.tracer.warn(message); 81 | } 82 | 83 | static error(error: any): void { 84 | this.tracer.error(error); 85 | } 86 | 87 | static dependency(name: string, data: string, operation: () => Promise): Promise { 88 | return this.tracer.dependency(name, data, operation); 89 | } 90 | 91 | static metric(message: string, measurements: { [key: string]: number }): void { 92 | this.tracer.metric(message, measurements); 93 | } 94 | 95 | static counter(name: string, value: number): void { 96 | this.tracer.counter(name, value); 97 | } 98 | } -------------------------------------------------------------------------------- /test/specification/missingFactSpec.ts: -------------------------------------------------------------------------------- 1 | import { Jinaga, JinagaTest, User } from "@src"; 2 | import { Company, Office, model } from "../companyModel"; 3 | 4 | describe("missing fact handling", () => { 5 | let creator: User; 6 | let company: Company; 7 | let office: Office; 8 | let j: Jinaga; 9 | 10 | beforeEach(() => { 11 | creator = new User("--- PUBLIC KEY GOES HERE ---"); 12 | company = new Company(creator, "TestCo"); 13 | office = new Office(company, "TestOffice"); 14 | j = JinagaTest.create({ 15 | initialState: [ 16 | creator, 17 | company, 18 | office 19 | ] 20 | }); 21 | }); 22 | 23 | it("should return empty result when querying with non-persisted given", async () => { 24 | // Create a company that is not persisted 25 | const nonPersistedCompany = new Company(creator, "NonPersistedCo"); 26 | 27 | // Create a specification that uses the non-persisted company as given 28 | const specification = model.given(Company).match((company, facts) => 29 | facts.ofType(Office) 30 | .join(office => office.company, company) 31 | ); 32 | 33 | // This should return an empty result instead of throwing an error 34 | const result = await j.query(specification, nonPersistedCompany); 35 | expect(result).toEqual([]); 36 | }); 37 | 38 | it("should return empty result when fact projection references missing fact", async () => { 39 | // Create a company that is not persisted 40 | const nonPersistedCompany = new Company(creator, "NonPersistedCo"); 41 | 42 | // Create a specification that selects the company fact itself 43 | const specification = model.given(Company).select((company, facts) => company); 44 | 45 | // This should return an empty result instead of throwing an error 46 | const result = await j.query(specification, nonPersistedCompany); 47 | expect(result).toEqual([]); 48 | }); 49 | 50 | it("should return empty result when querying with fact that has persisted predecessors", async () => { 51 | // Create a specification that looks for offices belonging to a given company 52 | const specification = model.given(Company).match((company, facts) => 53 | facts.ofType(Office) 54 | .join(office => office.company, company) 55 | ); 56 | 57 | // This should work for the persisted company 58 | const persistedResult = await j.query(specification, company); 59 | expect(persistedResult.length).toBe(1); 60 | expect(persistedResult[0].identifier).toBe(office.identifier); 61 | expect(persistedResult[0].type).toBe(office.type); 62 | 63 | // Create a company that was not in initial state 64 | const newCompany = new Company(creator, "NewCo"); 65 | 66 | // Querying with a company that wasn't persisted should return empty result 67 | const newCompanyResult = await j.query(specification, newCompany); 68 | expect(newCompanyResult).toEqual([]); 69 | }); 70 | }); -------------------------------------------------------------------------------- /.cursor/rules/fact-patterns.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Use when working with facts, specifications, queries, or data relationships to follow Jinaga's data patterns and best practices 3 | --- 4 | # Fact and Specification Patterns 5 | 6 | ## Fact Structure 7 | 8 | Facts should be classes with a static `Type` property: 9 | ```typescript 10 | export class BlogPost { 11 | static Type = "Blog.Post" as const; 12 | type = BlogPost.Type; 13 | 14 | constructor( 15 | public author: User, 16 | public title: string, 17 | public content: string 18 | ) { } 19 | } 20 | ``` 21 | 22 | ## Creating Facts 23 | 24 | - Use the `j.fact()` method to create new facts 25 | - Facts are immutable and automatically hashed 26 | - Include all required predecessor references 27 | - Use descriptive type names (e.g., `"Blog.Post"`, `"User.Profile"`) 28 | 29 | ## Model Builder Pattern 30 | 31 | Specifications are created using the model builder pattern: 32 | ```typescript 33 | export const model = buildModel(b => b 34 | .type(User) 35 | .type(BlogPost, x => x 36 | .predecessor("author", User) 37 | ) 38 | ); 39 | ``` 40 | 41 | ## Specifications 42 | 43 | Specifications define query patterns using the model: 44 | - Use `model.given().match()` to create specifications 45 | - Use `facts.ofType()` to find facts by type 46 | - Use `join()` to traverse predecessor relationships 47 | - Use `successors()` to find successor facts 48 | - Use `select()` to project specific fields 49 | 50 | ## Example Specification Pattern 51 | 52 | ```typescript 53 | const blogPostsSpec = model.given(User).match(user => 54 | user.successors(BlogPost, post => post.author) 55 | ); 56 | 57 | const posts = await j.query(blogPostsSpec, user); 58 | ``` 59 | 60 | ## Observers and Watchers 61 | 62 | - Use `j.watch()` for reactive data watching with automatic updates 63 | - Use `j.subscribe()` for server-sent events 64 | - Observers automatically update when facts change 65 | - Handle observer lifecycle properly 66 | 67 | ## Authorization Rules 68 | 69 | Authorization rules are defined using the `AuthorizationRules` class in [src/authorization/authorizationRules.ts](mdc:src/authorization/authorizationRules.ts): 70 | 71 | ```typescript 72 | const authorization = (a: AuthorizationRules) => a 73 | .any(User) // Allow all users 74 | .type(Post, post => post.author) // Allow post authors 75 | .type(Comment, comment => comment.author) // Allow comment authors 76 | .no(PrivatePost); // Deny private posts 77 | ``` 78 | 79 | Authorization rules require the model: 80 | ```typescript 81 | const authorizationRules = new AuthorizationRules(model); 82 | const rules = authorization(authorizationRules); 83 | ``` 84 | 85 | Common patterns: 86 | - `.any(Type)` - Allow all facts of a type 87 | - `.no(Type)` - Deny all facts of a type 88 | - `.type(Fact, fact => fact.predecessor)` - Allow based on predecessor 89 | - `.type(Fact, (fact, facts) => facts.ofType(User).join(...))` - Complex rules 90 | 91 | Rules are evaluated on both client and server: 92 | - Tests: Validates facts during creation only when authorization rules are configured in test setup 93 | - Client: Does not validate facts against authorization rules 94 | - Server: Validates facts upon receipt 95 | -------------------------------------------------------------------------------- /src/fork/persistent-fork.ts: -------------------------------------------------------------------------------- 1 | import { TopologicalSorter } from '../fact/sorter'; 2 | import { WebClient } from '../http/web-client'; 3 | import { QueueProcessor } from '../managers/QueueProcessor'; 4 | import { FactEnvelope, factEnvelopeEquals, FactRecord, FactReference, Queue, Storage } from '../storage'; 5 | import { Trace } from "../util/trace"; 6 | import { Fork } from "./fork"; 7 | import { serializeLoad } from './serialize'; 8 | import { WebClientSaver } from './web-client-saver'; 9 | 10 | export class PersistentFork implements Fork { 11 | private queueProcessor: QueueProcessor; 12 | 13 | constructor( 14 | private storage: Storage, 15 | private queue: Queue, 16 | private client: WebClient, 17 | private delayMilliseconds: number 18 | ) { 19 | const saver = new WebClientSaver(client, queue); 20 | this.queueProcessor = new QueueProcessor(saver, delayMilliseconds); 21 | } 22 | 23 | initialize() { 24 | // Schedule processing of any existing items in the queue 25 | this.queueProcessor.scheduleProcessing(); 26 | } 27 | 28 | async close(): Promise { 29 | // Process any pending facts before closing 30 | try { 31 | await this.processQueueNow(); 32 | } catch (error) { 33 | Trace.error(error); 34 | } 35 | this.queueProcessor.dispose(); 36 | return Promise.resolve(); 37 | } 38 | async save(envelopes: FactEnvelope[]): Promise { 39 | await this.queue.enqueue(envelopes); 40 | this.queueProcessor.scheduleProcessing(); 41 | } 42 | 43 | async load(references: FactReference[]): Promise { 44 | const known = await this.storage.load(references); 45 | const remaining = references.filter(reference => !known.some(factEnvelopeEquals(reference))); 46 | if (remaining.length === 0) { 47 | return known; 48 | } 49 | else { 50 | const records = await this.loadEnvelopes(remaining); 51 | return records.concat(known); 52 | } 53 | } 54 | 55 | /** 56 | * Processes the queue immediately, bypassing any delay. 57 | */ 58 | async processQueueNow(): Promise { 59 | await this.queueProcessor.processQueueNow(); 60 | } 61 | 62 | private async loadEnvelopes(references: FactReference[]) { 63 | const sorter = new TopologicalSorter(); 64 | let loaded: FactEnvelope[] = []; 65 | for (let start = 0; start < references.length; start += 300) { 66 | const chunk = references.slice(start, start + 300); 67 | const response = await this.client.loadWithRetry(serializeLoad(chunk)); 68 | const facts = sorter.sort(response.facts, (p, f) => f); 69 | const envelopes = facts.map(fact => { 70 | return { 71 | fact: fact, 72 | signatures: [] 73 | }; 74 | }); 75 | await this.storage.save(envelopes); 76 | loaded = loaded.concat(envelopes); 77 | } 78 | return loaded; 79 | } 80 | } -------------------------------------------------------------------------------- /src/indexeddb/driver.ts: -------------------------------------------------------------------------------- 1 | import { FactReference } from '../storage'; 2 | 3 | function upgradingToVersion({ newVersion, oldVersion }: IDBVersionChangeEvent, ver: number) { 4 | return newVersion && newVersion >= ver && oldVersion < ver; 5 | } 6 | 7 | function openDatabase(indexName: string): Promise { 8 | return new Promise((resolve, reject) => { 9 | const request = global.indexedDB.open(indexName, 2); 10 | request.onsuccess = _ => resolve(request.result); 11 | request.onerror = _ => reject(`Error opening database ${indexName}: ${JSON.stringify(request.error, null, 2)}.`); 12 | request.onupgradeneeded = ev => { 13 | const db = request.result; 14 | if (upgradingToVersion(ev, 1)) { 15 | db.createObjectStore('login'); 16 | db.createObjectStore('fact'); 17 | db.createObjectStore('ancestor'); 18 | const edgeObjectStore = db.createObjectStore('edge', { 19 | keyPath: ['successor', 'predecessor', 'role'] 20 | }); 21 | edgeObjectStore.createIndex('predecessor', ['predecessor', 'role'], { unique: false }); 22 | edgeObjectStore.createIndex('successor', ['successor', 'role'], { unique: false }); 23 | edgeObjectStore.createIndex('all', 'successor', { unique: false }); 24 | db.createObjectStore('queue'); 25 | } 26 | if (upgradingToVersion(ev, 2)) { 27 | db.createObjectStore('bookmark'); 28 | const specificationObjectStore = db.createObjectStore('specification'); 29 | specificationObjectStore.createIndex('mru', '', { unique: false }); 30 | } 31 | } 32 | }); 33 | } 34 | 35 | export async function withDatabase(indexName: string, action: (db: IDBDatabase) => Promise) { 36 | const db = await openDatabase(indexName); 37 | const result = await action(db); 38 | db.close(); 39 | return result; 40 | } 41 | 42 | export async function withTransaction(db: IDBDatabase, storeNames: string[], mode: IDBTransactionMode, action: (transaction: IDBTransaction) => Promise) { 43 | const transaction = db.transaction(storeNames, mode); 44 | const transactionComplete = new Promise((resolve, reject) => { 45 | transaction.oncomplete = _ => resolve(); 46 | transaction.onerror = _ => reject(`Error executing transaction ${JSON.stringify(transaction.error?.message, null, 2)}`); 47 | }); 48 | const [result, v] = await Promise.all([action(transaction), transactionComplete]); 49 | return result; 50 | } 51 | 52 | export function execRequest(request: IDBRequest) { 53 | return new Promise((resolve, reject) => { 54 | request.onsuccess = (_: Event) => resolve(request.result); 55 | request.onerror = (_: Event) => reject(`Error executing request ${JSON.stringify(request.error?.message, null, 2)}`); 56 | }); 57 | } 58 | 59 | export function factKey(fact: FactReference) { 60 | return `${fact.type}:${fact.hash}`; 61 | } 62 | 63 | export function keyToReference(key: string): FactReference { 64 | const regex = /([^:]*):(.*)/; 65 | const match = regex.exec(key); 66 | if (!match) { 67 | throw new Error(`Invalid key ${key}`); 68 | } 69 | const [ _, type, hash ] = match; 70 | return { type, hash }; 71 | } 72 | -------------------------------------------------------------------------------- /src/fact/sorter.ts: -------------------------------------------------------------------------------- 1 | import { FactRecord, FactReference } from '../storage'; 2 | 3 | export class TopologicalSorter { 4 | private factsVisited: { [key: string]: boolean } = {}; 5 | private factsWaiting: { [key: string]: FactRecord[] } = {}; 6 | private factValue: { [key: string]: T } = {}; 7 | 8 | sort(facts: FactRecord[], map: (predecessors: T[], fact: FactRecord) => T): T[] { 9 | const factsReceived: T[] = []; 10 | const factQueue = facts.slice(0); 11 | 12 | while (factQueue.length > 0) { 13 | const fact = factQueue.shift()!; 14 | const predecessorKeys = this.allPredecessors(fact); 15 | const waitingPredecessors = predecessorKeys.filter(key => { 16 | return !this.factsVisited[key]; 17 | }); 18 | if (waitingPredecessors.length === 0) { 19 | const key = this.factKey(fact); 20 | this.factsVisited[key] = true; 21 | const predecessorValues = predecessorKeys.map(k => { 22 | return this.factValue[k]; 23 | }); 24 | const factValue = map(predecessorValues, fact); 25 | this.factValue[key] = factValue; 26 | factsReceived.push(factValue); 27 | const retry = this.factsWaiting[key]; 28 | if (retry) { 29 | retry.forEach(r => { 30 | if (!factQueue.some(f => f.type === r.type && f.hash === r.hash)) { 31 | factQueue.push(r); 32 | } 33 | }); 34 | delete this.factsWaiting[key]; 35 | } 36 | } 37 | else { 38 | waitingPredecessors.forEach(key => { 39 | let list = this.factsWaiting[key]; 40 | if (!list) { 41 | list = []; 42 | this.factsWaiting[key] = list; 43 | } 44 | if (!list.some(f => f.type === fact.type && f.hash === fact.hash)) { 45 | list.push(fact); 46 | } 47 | }); 48 | } 49 | } 50 | 51 | return factsReceived; 52 | } 53 | 54 | finished(): boolean { 55 | for (const key in this.factsWaiting) { 56 | if (this.factsWaiting[key]) { 57 | return false; 58 | } 59 | } 60 | 61 | return true; 62 | } 63 | 64 | private allPredecessors(fact: FactRecord): string[] { 65 | let predecessors: string[] = []; 66 | 67 | for (const role in fact.predecessors) { 68 | const references = fact.predecessors[role]; 69 | if (Array.isArray(references)) { 70 | predecessors = predecessors.concat(references.map(r => this.factKey(r))); 71 | } 72 | else { 73 | predecessors.push(this.factKey(references)); 74 | } 75 | } 76 | 77 | return predecessors; 78 | } 79 | 80 | private factKey(fact: FactReference): string { 81 | return `${fact.type}:${fact.hash}`; 82 | } 83 | } -------------------------------------------------------------------------------- /src/distribution/distribution-rules.ts: -------------------------------------------------------------------------------- 1 | import { User } from "../model/user"; 2 | import { describeSpecification } from "../specification/description"; 3 | import { buildFeeds } from "../specification/feed-builder"; 4 | import { SpecificationOf } from "../specification/model"; 5 | import { Specification } from "../specification/specification"; 6 | import { SpecificationParser } from "../specification/specification-parser"; 7 | 8 | interface DistributionRule { 9 | specification: Specification; 10 | feeds: Specification[]; 11 | user: Specification | null; 12 | } 13 | 14 | class ShareTarget { 15 | constructor( 16 | private specification: Specification, 17 | private rules: DistributionRule[] 18 | ) { } 19 | 20 | with(user: SpecificationOf): DistributionRules { 21 | return new DistributionRules([ 22 | ...this.rules, 23 | { 24 | specification: this.specification, 25 | feeds: buildFeeds(this.specification), 26 | user: user.specification 27 | } 28 | ]); 29 | } 30 | 31 | withEveryone(): DistributionRules { 32 | return new DistributionRules([ 33 | ...this.rules, 34 | { 35 | specification: this.specification, 36 | feeds: buildFeeds(this.specification), 37 | user: null 38 | } 39 | ]); 40 | } 41 | } 42 | 43 | export class DistributionRules { 44 | static empty: DistributionRules = new DistributionRules([]); 45 | 46 | constructor( 47 | public rules: DistributionRule[] 48 | ) { } 49 | 50 | with(rules: (r: DistributionRules) => DistributionRules): DistributionRules { 51 | return rules(this); 52 | } 53 | 54 | share(specification: SpecificationOf): ShareTarget { 55 | return new ShareTarget(specification.specification, this.rules); 56 | } 57 | 58 | saveToDescription(): string { 59 | let description = "distribution {\n"; 60 | for (const rule of this.rules) { 61 | const specificationDescription = describeSpecification(rule.specification, 1).trimStart(); 62 | const userDescription = rule.user ? describeSpecification(rule.user, 1).trimStart() : "everyone\n"; 63 | description += ` share ${specificationDescription} with ${userDescription}`; 64 | } 65 | description += "}\n"; 66 | return description; 67 | } 68 | 69 | merge(distributionRules2: DistributionRules): DistributionRules { 70 | return new DistributionRules([ 71 | ...this.rules, 72 | ...distributionRules2.rules 73 | ]); 74 | } 75 | 76 | public static combine(distributionRules: DistributionRules, specification: Specification, user: Specification | null) { 77 | return new DistributionRules([ 78 | ...distributionRules.rules, 79 | { 80 | specification, 81 | feeds: buildFeeds(specification), 82 | user 83 | } 84 | ]); 85 | } 86 | 87 | static loadFromDescription(description: string): DistributionRules { 88 | const parser = new SpecificationParser(description); 89 | parser.skipWhitespace(); 90 | const distributionRules = parser.parseDistributionRules(); 91 | return distributionRules; 92 | } 93 | } 94 | 95 | export function describeDistributionRules(rules: (r: DistributionRules) => DistributionRules): string { 96 | const distributionRules = rules(new DistributionRules([])); 97 | return distributionRules.saveToDescription(); 98 | } -------------------------------------------------------------------------------- /src/observer/subscriber.ts: -------------------------------------------------------------------------------- 1 | import { Network } from "../managers/NetworkManager"; 2 | import { Storage, FactEnvelope, FactReference } from "../storage"; 3 | import { Trace } from "../util/trace"; 4 | 5 | export class Subscriber { 6 | private refCount: number = 0; 7 | private bookmark: string = ""; 8 | private resolved: boolean = false; 9 | private disconnect: (() => void) | undefined; 10 | private timer: NodeJS.Timer | undefined; 11 | 12 | constructor( 13 | private readonly feed: string, 14 | private readonly network: Network, 15 | private readonly store: Storage, 16 | private readonly notifyFactsAdded: (envelopes: FactEnvelope[]) => Promise, 17 | private readonly refreshIntervalSeconds: number 18 | ) {} 19 | 20 | addRef() { 21 | this.refCount++; 22 | return this.refCount === 1; 23 | } 24 | 25 | release() { 26 | this.refCount--; 27 | return this.refCount === 0; 28 | } 29 | 30 | async start(): Promise { 31 | this.bookmark = await this.store.loadBookmark(this.feed); 32 | await new Promise((resolve, reject) => { 33 | this.resolved = false; 34 | // Refresh the connection at the configured interval. 35 | this.disconnect = this.connectToFeed(resolve, reject); 36 | this.timer = setInterval(() => { 37 | if (this.disconnect) { 38 | this.disconnect(); 39 | } 40 | this.disconnect = this.connectToFeed(resolve, reject); 41 | }, this.refreshIntervalSeconds * 1000); 42 | }); 43 | } 44 | 45 | stop() { 46 | if (this.timer) { 47 | clearInterval(this.timer); 48 | this.timer = undefined; 49 | } 50 | if (this.disconnect) { 51 | this.disconnect(); 52 | this.disconnect = undefined; 53 | } 54 | } 55 | 56 | private connectToFeed(resolve: (value: void | PromiseLike) => void, reject: (reason?: any) => void) { 57 | return this.network.streamFeed(this.feed, this.bookmark, async (factReferences, nextBookmark) => { 58 | const knownFactReferences: FactReference[] = await this.store.whichExist(factReferences); 59 | const unknownFactReferences: FactReference[] = factReferences.filter(fr => !knownFactReferences.includes(fr)); 60 | if (unknownFactReferences.length > 0) { 61 | const graph = await this.network.load(unknownFactReferences); 62 | await this.store.save(graph); 63 | if (graph.length > 0) { 64 | Trace.counter("facts_saved", graph.length); 65 | } 66 | await this.store.saveBookmark(this.feed, nextBookmark); 67 | this.bookmark = nextBookmark; 68 | await this.notifyFactsAdded(graph); 69 | } else { 70 | // Treat empty-reference responses as bookmark advance from WS graph transport 71 | if (nextBookmark && nextBookmark !== this.bookmark) { 72 | await this.store.saveBookmark(this.feed, nextBookmark); 73 | this.bookmark = nextBookmark; 74 | } 75 | } 76 | if (!this.resolved) { 77 | this.resolved = true; 78 | resolve(); 79 | } 80 | }, err => { 81 | // Do not reject on errors to allow FetchConnection's retry logic to work. 82 | // The promise will resolve when the first successful data is received. 83 | // Don't log AbortError as it's expected during periodic reconnection. 84 | if (err.name !== 'AbortError') { 85 | Trace.warn(`Subscriber connection error: ${err}`); 86 | } 87 | }, this.refreshIntervalSeconds); 88 | } 89 | } -------------------------------------------------------------------------------- /.cursor/rules/implement.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | alwaysApply: false 3 | --- 4 | # Implementation Rule 5 | 6 | ## Requirements 7 | Review the mentioned section of the plan against the current application as implemented. Identify any issues, inconsistencies, or points of confusion. 8 | 9 | ## Clarification Process 10 | If the instructions are unclear: 11 | 1. Respond with "Before we implement the plan" followed by your question or clarification. 12 | 2. If the user insists that we proceed, then proceed with the plan as documented with no further questions. 13 | 3. If the user provides the necessary clarification, update the plan and then proceed. 14 | 15 | ## Scope Management 16 | If you would like to extend or deviate from the plan: 17 | 1. Explain to the user the extension or deviation you would like to take, and ask their advice. 18 | 2. If the user agrees, update the plan and then proceed. 19 | 3. If the user disagrees, proceed with the plan as documented. 20 | 21 | ## Implementation Steps 22 | If the instructions are clear, or if we are to proceed, then: 23 | 1. Implement the plan faithfully as documented - NO ADDITIONS 24 | 2. Update the plan to indicate progress 25 | 3. Report progress to the user 26 | 4. Suggest a concise commit description for the progress achieved 27 | 28 | ## Progress Tracking Requirements ⭐ **CRITICAL** 29 | **NEVER mark a task as complete without actual implementation:** 30 | 31 | ### Code Implementation Tasks 32 | - ✅ **ONLY mark complete** after the actual code/function is written and saved to the file 33 | - ✅ **ONLY mark complete** after verifying the code compiles/runs without syntax errors 34 | - ✅ **ONLY mark complete** after confirming the function exists in the correct file location 35 | 36 | ### Testing Tasks 37 | - ✅ **ONLY mark complete** after actual test files are created and saved 38 | - ✅ **ONLY mark complete** after test framework is set up and configured 39 | - ✅ **ONLY mark complete** after tests are written and can be executed 40 | - ❌ **NEVER mark complete** based on "planning to test" or "testing approach defined" 41 | 42 | ### Integration Tasks 43 | - ✅ **ONLY mark complete** after actual integration code is implemented 44 | - ✅ **ONLY mark complete** after verifying components work together 45 | - ✅ **ONLY mark complete** after confirming no breaking changes 46 | 47 | ### Documentation Tasks 48 | - ✅ **ONLY mark complete** after actual documentation is written and saved 49 | - ❌ **NEVER mark complete** based on "planning to document" or "documentation approach defined" 50 | 51 | ### Verification Process 52 | Before marking any task complete: 53 | 1. **Verify the actual code/function exists** in the specified file location 54 | 2. **Check that it compiles/runs** without syntax errors 55 | 3. **Confirm it meets the acceptance criteria** specified in the plan 56 | 4. **Ensure it's properly integrated** if it's an integration task 57 | 58 | ## Validation (ONLY if specified in plan) 59 | - Run tests ONLY if explicitly documented 60 | - If no tests specified: Skip testing and report "No testing required" 61 | - Testing means: executing functions, running scripts, integration checks 62 | - Testing does NOT mean: print statements, console output, status messages 63 | 64 | ## Success Criteria 65 | - All plan checkboxes marked complete **ONLY after actual implementation** 66 | - All required functions exist and are syntactically correct 67 | - Plan file updated with **accurate** completion status 68 | - No syntax errors in modified files 69 | - **No premature completion marking** - implementation must be real and verifiable 70 | - No syntax errors in modified files -------------------------------------------------------------------------------- /src/http/serializer.ts: -------------------------------------------------------------------------------- 1 | import { FactEnvelope, FactReference, PredecessorCollection } from "../storage"; 2 | 3 | export type IndexPredecessorCollection = { 4 | [role: string]: number | number[]; 5 | }; 6 | 7 | export class GraphSerializer 8 | { 9 | private index = 0; 10 | private indexByFactReference: { [key: string]: number } = {}; 11 | private publicKeys: string[] = []; 12 | 13 | constructor( 14 | private readonly write: (chunk: string) => void 15 | ) {} 16 | 17 | serialize(result: FactEnvelope[]) { 18 | // Write the facts 19 | for (const fact of result) { 20 | // Skip facts that have already been written 21 | const key = fact.fact.type + ":" + fact.fact.hash; 22 | if (this.indexByFactReference.hasOwnProperty(key)) { 23 | continue; 24 | } 25 | 26 | // Write any new public keys 27 | for (const signature of fact.signatures) { 28 | if (!this.publicKeys.includes(signature.publicKey)) { 29 | const pkIndex = this.publicKeys.length; 30 | const publicKey = JSON.stringify(signature.publicKey); 31 | this.write(`PK${pkIndex.toString()}\n${publicKey}\n\n`); 32 | this.publicKeys.push(signature.publicKey); 33 | } 34 | } 35 | 36 | // Write the fact 37 | const factType = JSON.stringify(fact.fact.type); 38 | const predecessorIndexes = JSON.stringify(this.getPredecessorIndexes(fact.fact.predecessors)); 39 | const factFields = JSON.stringify(fact.fact.fields); 40 | 41 | let output = `${factType}\n${predecessorIndexes}\n${factFields}`; 42 | 43 | // Write the signatures 44 | for (const signature of fact.signatures) { 45 | const publicKeyIndex = this.publicKeys.indexOf(signature.publicKey); 46 | const publicKey = `PK${publicKeyIndex.toString()}`; 47 | const signatureString = JSON.stringify(signature.signature); 48 | 49 | output += `\n${publicKey}\n${signatureString}`; 50 | } 51 | 52 | output += "\n\n"; 53 | 54 | this.write(output); 55 | 56 | this.indexByFactReference[key] = this.index; 57 | this.index++; 58 | } 59 | } 60 | 61 | private getPredecessorIndexes(predecessors: PredecessorCollection): IndexPredecessorCollection { 62 | const result: IndexPredecessorCollection = {}; 63 | for (const role in predecessors) { 64 | const reference = predecessors[role]; 65 | if (Array.isArray(reference)) { 66 | result[role] = reference.map(r => this.getFactIndex(r)); 67 | } else { 68 | result[role] = this.getFactIndex(reference); 69 | } 70 | } 71 | return result; 72 | } 73 | 74 | private getFactIndex(reference: FactReference): number { 75 | const key = reference.type + ":" + reference.hash; 76 | if (!this.indexByFactReference.hasOwnProperty(key)) { 77 | throw new Error(`Fact reference not found in graph: ${key}`); 78 | } 79 | return this.indexByFactReference[key]; 80 | } 81 | } 82 | 83 | export function serializeGraph(graph: FactEnvelope[]) { 84 | const serializedData: string[] = []; 85 | const serializer = new GraphSerializer(chunk => serializedData.push(chunk)); 86 | serializer.serialize(graph); 87 | const body = serializedData.join(''); 88 | return body; 89 | } 90 | -------------------------------------------------------------------------------- /src/fact/hash.ts: -------------------------------------------------------------------------------- 1 | import { FactRecord, FactReference, PredecessorCollection } from '../storage'; 2 | import { computeStringHash } from '../util/encoding'; 3 | import { HashMap } from './hydrate'; 4 | 5 | export function computeHash(fields: {}, predecessors: PredecessorCollection) { 6 | return computeObjectHash({ 7 | fields: fields, 8 | predecessors: canonicalPredecessors(predecessors) 9 | }); 10 | } 11 | 12 | export function canonicalizeFact(fields: {}, predecessors: PredecessorCollection) { 13 | return canonicalize({ 14 | fields: fields, 15 | predecessors: canonicalPredecessors(predecessors) 16 | }); 17 | } 18 | 19 | export function verifyHash(fact: FactRecord) { 20 | const computedHash = computeHash(fact.fields, fact.predecessors); 21 | return fact.hash === computedHash; 22 | } 23 | 24 | export function canonicalPredecessors(predecessors: PredecessorCollection) { 25 | const result: PredecessorCollection = {}; 26 | for(const role in predecessors) { 27 | const referenceMessages = predecessors[role]; 28 | if (Array.isArray(referenceMessages)) { 29 | result[role] = sortedPredecessors(referenceMessages); 30 | } 31 | else { 32 | result[role] = referenceMessages; 33 | } 34 | } 35 | return result; 36 | } 37 | 38 | function sortedPredecessors(predecessors: FactReference[]) { 39 | return predecessors.slice().sort((a,b) => { 40 | if (a.hash < b.hash) 41 | return -1; 42 | else if (a.hash > b.hash) 43 | return 1; 44 | if (a.type < b.type) 45 | return -1; 46 | else if (a.type > b.type) 47 | return 1; 48 | else 49 | return 0; 50 | }); 51 | } 52 | 53 | export function computeObjectHash(obj: {}) { 54 | if (!obj) 55 | return ''; 56 | 57 | const str = canonicalize(obj); 58 | return computeStringHash(str); 59 | } 60 | 61 | type Pair = { key: string, value: any }; 62 | 63 | function canonicalize(obj: HashMap) { 64 | const pairs: Pair[] = []; 65 | for (const key in obj) { 66 | const value = obj[key]; 67 | pairs.push({ key, value }); 68 | } 69 | pairs.sort((a, b) => { 70 | if (a.key < b.key) 71 | return -1; 72 | else if (a.key > b.key) 73 | return 1; 74 | else 75 | return 0; 76 | }); 77 | const members = pairs.reduce((text, pair) => { 78 | if (text.length > 0) 79 | text += ','; 80 | text += '"' + pair.key + '":' + serialize(pair.value); 81 | return text; 82 | }, ''); 83 | return '{' + members + '}'; 84 | } 85 | 86 | function serialize(value: any) { 87 | if (typeof(value) === 'object') { 88 | if (value instanceof Date) { 89 | return 'Date.parse("' + value.toISOString() + '")'; 90 | } 91 | else if (Array.isArray(value)) { 92 | const values = value.reduce((text, element) => { 93 | if (text.length > 0) 94 | text += ','; 95 | text += serialize(element); 96 | return text; 97 | }, ''); 98 | return '[' + values + ']'; 99 | } 100 | else { 101 | return canonicalize(value); 102 | } 103 | } 104 | else { 105 | return JSON.stringify(value); 106 | } 107 | } -------------------------------------------------------------------------------- /test/distribution/distributionEngineDirectSpec.ts: -------------------------------------------------------------------------------- 1 | import { DistributionEngine, DistributionRules, MemoryStore, User, dehydrateFact } from "@src"; 2 | import { Blog, Post, distribution, model } from "../blogModel"; 3 | 4 | describe("DistributionEngine direct usage", () => { 5 | const creator = new User("creator"); 6 | const reader = new User("reader"); 7 | const blog = new Blog(creator, "domain"); 8 | const post = new Post(blog, creator, new Date()); 9 | 10 | it("should provide detailed debug info when isTest=true", async () => { 11 | const store = new MemoryStore(); 12 | const distributionRules = distribution(new DistributionRules([])); 13 | 14 | // Create engine with isTest=true 15 | const engine = new DistributionEngine(distributionRules, store, true); 16 | 17 | const specification = model.given(Blog).match((blog, facts) => 18 | facts.ofType(Post) 19 | .join(post => post.blog, blog) 20 | ).specification; 21 | 22 | const namedStart = { "blog": dehydrateFact(blog)[0] }; 23 | const userFact = dehydrateFact(reader)[0]; 24 | 25 | const result = await engine.canDistributeToAll([specification], namedStart, userFact); 26 | 27 | expect(result.type).toBe('failure'); 28 | if (result.type === 'failure') { 29 | expect(result.reason).toContain("The user does not match"); 30 | expect(result.reason).toContain("Expected hashes: []"); 31 | expect(result.reason).toContain("User hash:"); 32 | } 33 | }); 34 | 35 | it("should NOT provide detailed debug info when isTest=false", async () => { 36 | const store = new MemoryStore(); 37 | const distributionRules = distribution(new DistributionRules([])); 38 | 39 | // Create engine with isTest=false (default) 40 | const engine = new DistributionEngine(distributionRules, store, false); 41 | 42 | const specification = model.given(Blog).match((blog, facts) => 43 | facts.ofType(Post) 44 | .join(post => post.blog, blog) 45 | ).specification; 46 | 47 | const namedStart = { "blog": dehydrateFact(blog)[0] }; 48 | const userFact = dehydrateFact(reader)[0]; 49 | 50 | const result = await engine.canDistributeToAll([specification], namedStart, userFact); 51 | 52 | expect(result.type).toBe('failure'); 53 | if (result.type === 'failure') { 54 | expect(result.reason).toContain("The user does not match"); 55 | expect(result.reason).not.toContain("Matching set:"); 56 | expect(result.reason).not.toContain("User fact:"); 57 | } 58 | }); 59 | 60 | it("should NOT provide detailed debug info when isTest is omitted (default behavior)", async () => { 61 | const store = new MemoryStore(); 62 | const distributionRules = distribution(new DistributionRules([])); 63 | 64 | // Create engine without isTest parameter (should default to false) 65 | const engine = new DistributionEngine(distributionRules, store); 66 | 67 | const specification = model.given(Blog).match((blog, facts) => 68 | facts.ofType(Post) 69 | .join(post => post.blog, blog) 70 | ).specification; 71 | 72 | const namedStart = { "blog": dehydrateFact(blog)[0] }; 73 | const userFact = dehydrateFact(reader)[0]; 74 | 75 | const result = await engine.canDistributeToAll([specification], namedStart, userFact); 76 | 77 | expect(result.type).toBe('failure'); 78 | if (result.type === 'failure') { 79 | expect(result.reason).toContain("The user does not match"); 80 | expect(result.reason).not.toContain("Expected hashes:"); 81 | expect(result.reason).not.toContain("User hash:"); 82 | } 83 | }); 84 | }); -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Jinaga 2 | 3 | End-to-end application state management framework. 4 | 5 | Add Jinaga.JS to a client app and point it at a Replicator. 6 | Updates are sent to the Replicator as the user works with the app. 7 | Any changes that the app needs are pulled from the Replicator. 8 | 9 | ## Install 10 | 11 | Install Jinaga.JS from the NPM package. 12 | 13 | ```bash 14 | npm i jinaga 15 | ``` 16 | 17 | This installs just the client side components. 18 | See [jinaga.com](https://jinaga.com) for details on how to use them. 19 | 20 | ## Running a Replicator 21 | 22 | A Jinaga front end connects to a device called a Replicator. 23 | The Jinaga Replicator is a single machine in a network. 24 | It stores and shares facts. 25 | To get started, create a Replicator of your very own using [Docker](https://www.docker.com/products/docker-desktop/). 26 | 27 | ``` 28 | docker pull jinaga/jinaga-replicator 29 | docker run --name my-replicator -p8080:8080 jinaga/jinaga-replicator 30 | ``` 31 | 32 | This creates and starts a new container called `my-replicator`. 33 | The container is listening at port 8080 for commands. 34 | Configure Jinaga to use the replicator: 35 | 36 | ```typescript 37 | import { JinagaBrowser } from "jinaga"; 38 | 39 | export const j = JinagaBrowser.create({ 40 | httpEndpoint: "http://localhost:8080/jinaga" 41 | }); 42 | ``` 43 | 44 | ## Breaking Changes 45 | 46 | If you are upgrading from an older version, you may need to update your code. 47 | 48 | ### Changes in version 4.0.0 49 | 50 | In version 4.0.0, the server side code has been moved to a separate package. 51 | This allows you to build a client using Create React App and connect it to a Replicator. 52 | 53 | When upgrading, take the following steps: 54 | - Install the `jinaga-server` package. 55 | - Remove the 'jinaga' alias from 'webpack.config.js'. 56 | - Import `JinagaServer` from 'jinaga-server'. 57 | - Rename any references of `Specification` to `SpecificationOf`, and `Condition` to `ConditionOf`. These are used as return types of specification functions. It is uncommon to be explicit about them. 58 | 59 | ### Changes in version 3.1.0 60 | 61 | The name of the client-side script changed from `jinaga.js` to `jinaga-client.js`. 62 | In `webpack.config.js`, update the `jinaga` alias from `jinaga/dist/jinaga` to `jinaga/dist/jinaga-client`. 63 | 64 | ### Changes in version 3.0.0 65 | 66 | In version 3 of Jinaga.JS, the `has` function takes two parameters. 67 | The second is the name of the predecessor type. 68 | In version 2, the function took only one parameter: the field name. 69 | 70 | To upgrade, change this: 71 | 72 | ```javascript 73 | function assignmentUser(assignment) { 74 | ensure(assignment).has("user"); 75 | return j.match(assignment.user); 76 | } 77 | ``` 78 | 79 | To this: 80 | 81 | ```javascript 82 | function assignmentUser(assignment) { 83 | ensure(assignment).has("user", "Jinaga.User"); 84 | return j.match(assignment.user); 85 | } 86 | ``` 87 | 88 | ## Build 89 | 90 | To build Jinaga.JS, you will need Node 16. 91 | 92 | ```bash 93 | npm ci 94 | npm run build 95 | npm test 96 | ``` 97 | 98 | ## Release 99 | 100 | To release a new version of Jinaga.JS, bump the version number, create and push a tag, 101 | and create a release. The GitHub Actions workflow will build and publish the package. 102 | 103 | ```bash 104 | git c main 105 | git pull 106 | npm version patch 107 | git push --follow-tags 108 | gh release create v$(node -p "require('./package.json').version") --generate-notes --verify-tag 109 | ``` -------------------------------------------------------------------------------- /test/authorization/authorizationExampleSpec.ts: -------------------------------------------------------------------------------- 1 | import { AuthorizationRules, buildModel, Jinaga, JinagaTest } from '@src'; 2 | 3 | describe("Feedback authorization", () => { 4 | let j: Jinaga; 5 | let site: Site; 6 | 7 | beforeEach(async () => { 8 | site = new Site(new User("Site creator"), "site identifier"); 9 | 10 | j = JinagaTest.create({ 11 | model, 12 | authorization, 13 | user: new User("Logged in user"), 14 | initialState: [ 15 | site 16 | ] 17 | }); 18 | }); 19 | 20 | it("should have logged in user", async () => { 21 | const { userFact: user } = await j.login(); 22 | 23 | expect(user.publicKey).toEqual("Logged in user"); 24 | }); 25 | 26 | it("should allow a user", async () => { 27 | const creator = await j.fact(new User("Other user")); 28 | 29 | expect(creator.publicKey).toEqual("Other user"); 30 | }); 31 | 32 | it("should not allow site created by a different user", async () => { 33 | const creator = await j.fact(new User("Other user")); 34 | 35 | const promise = j.fact(new Site(creator, "site identifier")); 36 | 37 | await expect(promise).rejects.not.toBeNull(); 38 | }); 39 | 40 | it("should allow a site created by the logged in user", async () => { 41 | const creator = await j.fact(new User("Logged in user")); 42 | 43 | const site = await j.fact(new Site(creator, "site identifier")); 44 | 45 | expect(site.creator.publicKey).toEqual("Logged in user"); 46 | }); 47 | 48 | it("should not allow a comment from another user", async () => { 49 | const user = await j.fact(new User("Another user")); 50 | const content = await j.fact(new Content(site, "/path/to/content")); 51 | 52 | const promise = j.fact(new Comment("comment unique id", content, user)); 53 | 54 | await expect(promise).rejects.not.toBeNull(); 55 | }); 56 | 57 | it("should allow a comment from logged in user", async () => { 58 | const { userFact: user } = await j.login(); 59 | const content = await j.fact(new Content(site, "/path/to/content")); 60 | const comment = await j.fact(new Comment("comment unique id", content, user)); 61 | 62 | expect(comment.author.publicKey).toEqual(user.publicKey); 63 | }); 64 | }); 65 | 66 | const j = Jinaga; 67 | 68 | class User { 69 | static Type = "Jinaga.User" as const; 70 | type = User.Type; 71 | 72 | constructor ( 73 | public publicKey: string 74 | ) { } 75 | } 76 | 77 | class Site { 78 | static Type = "Feedback.Site" as const; 79 | type = Site.Type; 80 | 81 | constructor ( 82 | public creator: User, 83 | public identifier: string 84 | ) { } 85 | } 86 | 87 | class Content { 88 | static Type = "Feedback.Content" as const; 89 | type = Content.Type; 90 | 91 | constructor ( 92 | public site: Site, 93 | public path: string 94 | ) { } 95 | } 96 | 97 | class Comment { 98 | static Type = "Feedback.Comment" as const; 99 | type = Comment.Type; 100 | 101 | constructor ( 102 | public uniqueId: string, 103 | public content: Content, 104 | public author: User 105 | ) { } 106 | } 107 | 108 | const model = buildModel(b => b 109 | .type(User) 110 | .type(Site, m => m 111 | .predecessor("creator", User) 112 | ) 113 | .type(Content, m => m 114 | .predecessor("site", Site) 115 | ) 116 | .type(Comment, m => m 117 | .predecessor("content", Content) 118 | .predecessor("author", User) 119 | ) 120 | ); 121 | 122 | function authorization(a: AuthorizationRules) { 123 | return a 124 | .any(User) 125 | .type(Site, site => site.creator) 126 | .any(Content) 127 | .type(Comment, comment => comment.author) 128 | ; 129 | } 130 | -------------------------------------------------------------------------------- /issues/1.md: -------------------------------------------------------------------------------- 1 | ### **Enhancement Proposal** 2 | 3 | Enhance the Jinaga graph protocol to improve stream continuity and version negotiation using state-based hashing and content negotiation mechanisms. 4 | 5 | --- 6 | 7 | ### **Proposed Features** 8 | 9 | 1. **Version Negotiation**: 10 | - Use `Accept` headers and `OPTIONS` requests to negotiate protocol versions between the client and replicator. 11 | - The client sends an `OPTIONS` request to the replicator to determine supported protocol versions for the `/save` endpoint. 12 | - Ensure backward compatibility by allowing clients to adjust to the replicator's supported versions. 13 | 14 | 2. **Hash-Based Stream Continuity**: 15 | - Replace the use of a graph ID with a state-based hashing mechanism: 16 | - The client computes a **starting hash** that represents the state of the graph at the beginning of the `/save` call. 17 | - The replicator computes a **final hash** after processing the payload. 18 | - If the starting hash of the next `/save` call matches the replicator’s final hash from the previous call, the replicator resumes processing from that state. 19 | - In case of a mismatch, the replicator responds with an appropriate HTTP error code, prompting the client to reinitialize its state. 20 | 21 | --- 22 | 23 | ### **Implementation Details** 24 | 25 | #### **Protocol Details** 26 | 27 | - **Client-Side Changes**: 28 | 1. The network manager on the client maintains a single `GraphSerializer` for its communication with the replicator. 29 | 2. The client generates a unique starting hash based on the state of the graph (e.g., using a cryptographic hash of the serialized graph state). 30 | 3. The `/save` endpoint sends the starting hash along with the serialized graph payload. 31 | 4. After receiving a mismatch response from the replicator, the client abandons its current `GraphSerializer` and starts a new one. 32 | 33 | - **Replicator-Side Changes**: 34 | 1. The replicator maintains a `GraphDeserializer` for each active graph stream. 35 | 2. The replicator computes a final hash based on the processed graph state at the end of each `/save` call. 36 | 3. On receiving a new `/save` call, the replicator validates the starting hash against its stored final hash. 37 | 4. If the hashes match, the replicator resumes processing. Otherwise, it returns an HTTP error code (e.g., `409 Conflict`) to signal a mismatch. 38 | 39 | #### **Content Negotiation** 40 | 41 | - Extend the `OPTIONS` handler for the `/save` endpoint to include supported protocol versions in the `Accept` header. 42 | - Update the client to dynamically select the appropriate protocol version based on the replicator’s response. 43 | 44 | --- 45 | 46 | ### **Benefits** 47 | 48 | - **Resilience**: The hash-based mechanism ensures robust recovery from network interruptions, allowing streams to resume seamlessly. 49 | - **Efficiency**: Reduces redundant data transfer by eliminating the need to re-upload predecessors in subsequent `/save` calls. 50 | - **Compatibility**: The use of version negotiation allows the protocol to evolve without breaking existing implementations. 51 | 52 | --- 53 | 54 | ### **Constraints** 55 | 56 | - **Stream Exclusivity**: 57 | - Ensure that only one active stream exists for a given graph state to prevent parallel processing conflicts. 58 | 59 | - **Hash Validation**: 60 | - Both client and replicator must implement consistent hashing algorithms to avoid mismatches due to serialization differences. 61 | 62 | --- 63 | 64 | ### **Next Steps** 65 | 66 | 1. Define the hashing algorithm and graph serialization format. 67 | 2. Implement the content negotiation mechanism in both the client and replicator. 68 | 3. Update the `/save` endpoint to include starting and final hashes. 69 | 4. Add unit and integration tests to verify protocol behavior under various scenarios. 70 | 71 | --- 72 | 73 | Let me know if you need further clarification or additional details for implementation! -------------------------------------------------------------------------------- /test/single-use/singleUseForkSpec.ts: -------------------------------------------------------------------------------- 1 | import { AuthenticationTest, FactEnvelope, FactManager, FactReference, Fork, Jinaga, MemoryStore, ObservableSource, User } from '@src'; 2 | 3 | // Define a fake Fork implementation that captures saved facts 4 | class FakeFork implements Fork { 5 | public savedEnvelopes: FactEnvelope[] = []; 6 | 7 | async save(envelopes: FactEnvelope[]): Promise { 8 | this.savedEnvelopes = this.savedEnvelopes.concat(envelopes); 9 | return Promise.resolve(); 10 | } 11 | 12 | async load(references: FactReference[]): Promise { 13 | return Promise.resolve([]); 14 | } 15 | 16 | async processQueueNow(): Promise { 17 | return Promise.resolve(); 18 | } 19 | 20 | async close(): Promise { 21 | return Promise.resolve(); 22 | } 23 | } 24 | 25 | // Define an Environment fact type that will be owned by the single-use principal 26 | class Environment { 27 | static Type = "Enterprise.Environment" as const; 28 | type = Environment.Type; 29 | 30 | constructor( 31 | public creator: User, 32 | public identifier: string 33 | ) { } 34 | } 35 | 36 | describe('SingleUse with FakeFork', () => { 37 | it('should create single-use principal', async () => { 38 | // Arrange 39 | const store = new MemoryStore(); 40 | const fakeFork = new FakeFork(); 41 | const observableSource = new ObservableSource(store); 42 | const authentication = new AuthenticationTest(store, null, null, null); 43 | const factManager = new FactManager(fakeFork, observableSource, store, { 44 | feeds: async () => [], 45 | fetchFeed: async () => ({ references: [], bookmark: '' }), 46 | streamFeed: () => () => {}, 47 | load: async () => [] 48 | }, []); 49 | const j = new Jinaga(authentication, factManager, null); 50 | 51 | // Act 52 | await j.singleUse(async (principal: User) => { 53 | // Assert 54 | expect(principal).toBeDefined(); 55 | expect(principal.type).toBe('Jinaga.User'); 56 | expect(principal.publicKey).toContain('-----BEGIN PUBLIC KEY-----'); 57 | return 0; 58 | }); 59 | }); 60 | 61 | it('should sign facts created by single-use principal', async () => { 62 | // Arrange 63 | const store = new MemoryStore(); 64 | const fakeFork = new FakeFork(); 65 | const observableSource = new ObservableSource(store); 66 | const authentication = new AuthenticationTest(store, null, null, null); 67 | const factManager = new FactManager(fakeFork, observableSource, store, { 68 | feeds: async () => [], 69 | fetchFeed: async () => ({ references: [], bookmark: '' }), 70 | streamFeed: () => () => {}, 71 | load: async () => [] 72 | }, []); 73 | const j = new Jinaga(authentication, factManager, null); 74 | 75 | // Act 76 | const publicKey = await j.singleUse(async (principal: User) => { 77 | await j.fact(new Environment(principal, "Production")); 78 | return principal.publicKey; 79 | }); 80 | 81 | // Assert 82 | // Find the Environment fact in the saved envelopes 83 | const environmentFact = fakeFork.savedEnvelopes 84 | .filter(envelope => envelope.fact.type === "Enterprise.Environment") 85 | .map(envelope => envelope.fact); 86 | expect(environmentFact.length).toBe(1); 87 | 88 | // Find the signature for the Environment fact 89 | const environmentSignature = fakeFork.savedEnvelopes 90 | .filter(envelope => envelope.fact.type === "Enterprise.Environment") 91 | .flatMap(envelope => envelope.signatures); 92 | expect(environmentSignature.length).toBe(1); 93 | 94 | // Verify the signature uses the principal's public key 95 | expect(environmentSignature[0].publicKey).toBe(publicKey); 96 | }); 97 | }); 98 | -------------------------------------------------------------------------------- /src/jinaga-test.ts: -------------------------------------------------------------------------------- 1 | import { Authentication } from './authentication/authentication'; 2 | import { AuthenticationTest } from './authentication/authentication-test'; 3 | import { AuthorizationRules } from './authorization/authorizationRules'; 4 | import { DistributionEngine } from './distribution/distribution-engine'; 5 | import { DistributionRules } from './distribution/distribution-rules'; 6 | import { dehydrateFact, Dehydration } from './fact/hydrate'; 7 | import { PassThroughFork } from './fork/pass-through-fork'; 8 | import { SyncStatusNotifier } from './http/web-client'; 9 | import { Jinaga } from './jinaga'; 10 | import { FactManager } from './managers/factManager'; 11 | import { Network, NetworkDistribution, NetworkNoOp } from './managers/NetworkManager'; 12 | import { MemoryStore } from './memory/memory-store'; 13 | import { ObservableSource } from './observable/observable'; 14 | import { PurgeConditions } from "./purge/purgeConditions"; 15 | import { Model } from './specification/model'; 16 | import { Specification } from "./specification/specification"; 17 | import { FactEnvelope, Storage } from './storage'; 18 | 19 | export type JinagaTestConfig = { 20 | model?: Model, 21 | authorization?: (a: AuthorizationRules) => AuthorizationRules, 22 | distribution?: (d: DistributionRules) => DistributionRules, 23 | user?: {}, 24 | device?: {}, 25 | initialState?: {}[], 26 | purgeConditions?: (p: PurgeConditions) => PurgeConditions, 27 | feedRefreshIntervalSeconds?: number 28 | } 29 | 30 | export class JinagaTest { 31 | static create(config: JinagaTestConfig) { 32 | const store = new MemoryStore(); 33 | this.saveInitialState(config, store); 34 | const observableSource = new ObservableSource(store); 35 | const syncStatusNotifier = new SyncStatusNotifier(); 36 | const fork = new PassThroughFork(store); 37 | const authentication = this.createAuthentication(config, store); 38 | const network = this.createNetwork(config, store); 39 | const purgeConditions = this.createPurgeConditions(config); 40 | const factManager = new FactManager(fork, observableSource, store, network, purgeConditions, config.feedRefreshIntervalSeconds); 41 | return new Jinaga(authentication, factManager, syncStatusNotifier); 42 | } 43 | 44 | static saveInitialState(config: JinagaTestConfig, store: MemoryStore) { 45 | if (config.initialState) { 46 | const dehydrate = new Dehydration(); 47 | config.initialState.forEach(obj => dehydrate.dehydrate(obj)); 48 | store.save(dehydrate.factRecords().map(f => { 49 | fact: f, 50 | signatures: [] 51 | })); 52 | } 53 | } 54 | 55 | static createAuthentication(config: JinagaTestConfig, store: Storage): Authentication { 56 | const authorizationRules = config.authorization ? 57 | config.authorization(new AuthorizationRules(config.model)) : null; 58 | const userFact = JinagaTest.getUserFact(config); 59 | const deviceFact = JinagaTest.getDeviceFact(config); 60 | 61 | return new AuthenticationTest(store, authorizationRules, userFact, deviceFact); 62 | } 63 | 64 | static createNetwork(config: JinagaTestConfig, store: MemoryStore): Network { 65 | if (config.distribution) { 66 | const distributionRules = config.distribution(new DistributionRules([])); 67 | const distributionEngine = new DistributionEngine(distributionRules, store, true); 68 | return new NetworkDistribution(distributionEngine, this.getUserFact(config)); 69 | } 70 | else { 71 | return new NetworkNoOp(); 72 | } 73 | } 74 | 75 | static createPurgeConditions(config: JinagaTestConfig): Specification[] { 76 | if (config.purgeConditions) { 77 | return config.purgeConditions(new PurgeConditions([])).specifications; 78 | } 79 | else { 80 | return []; 81 | } 82 | } 83 | 84 | private static getUserFact(config: JinagaTestConfig) { 85 | return config.user ? dehydrateFact(config.user)[0] : null; 86 | } 87 | 88 | private static getDeviceFact(config: JinagaTestConfig) { 89 | return config.device ? dehydrateFact(config.device)[0] : null; 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /documentation/indexeddb-queue.md: -------------------------------------------------------------------------------- 1 | # IndexedDB Queue 2 | 3 | The IndexedDB Queue is an implementation of the Queue interface that uses the browser's IndexedDB API for storage. It provides methods for enqueueing, dequeueing, and peeking at fact envelopes. 4 | 5 | ## Overview 6 | 7 | The IndexedDB Queue ensures that facts are processed in the correct order by maintaining strict topological ordering of facts. This means that for any two facts where one depends on the other, the prerequisite fact will always appear earlier in the list. 8 | 9 | ## Peek Function 10 | 11 | The `peek` function retrieves all fact envelopes currently in the queue, along with their complete transitive closure of predecessors. This ensures that when facts are processed from the queue, all of their dependencies are available. The function returns the envelopes in strict topological order, guaranteeing that prerequisite facts appear before the facts that depend on them. 12 | 13 | ```typescript 14 | peek(): Promise 15 | ``` 16 | 17 | ### Return Value 18 | 19 | The function returns a Promise that resolves to an array of FactEnvelope objects. This array includes: 20 | 21 | 1. All fact envelopes currently in the queue 22 | 2. All transitive predecessors of those facts (the complete ancestor chain) 23 | 24 | ### Implementation Details 25 | 26 | The function: 27 | 28 | 1. Opens a transaction on the 'queue', 'fact', and 'ancestor' object stores 29 | 2. Retrieves all fact envelopes from the queue 30 | 3. For each envelope, retrieves its ancestors from the ancestor table 31 | 4. For each ancestor, retrieves the corresponding fact from the fact table 32 | 5. Creates fact envelopes for all ancestors 33 | 6. Sorts the combined array of envelopes in topological order 34 | 7. Validates the topological ordering to ensure correctness 35 | 8. Returns the sorted array of envelopes 36 | 37 | ### Performance Considerations 38 | 39 | - The function uses the `distinct` utility to remove duplicate ancestors, ensuring that each fact is only included once in the result 40 | - The function uses the `TopologicalSorter` to sort facts in topological order 41 | - The function validates the topological ordering to ensure that prerequisite facts appear before the facts that depend on them 42 | - The function detects and reports circular dependencies and other ordering violations 43 | - Facts that are already in the queue are not duplicated in the ancestor list 44 | - The implementation efficiently handles potentially large ancestor sets 45 | 46 | ### Example Usage 47 | 48 | ```typescript 49 | const queue = new IndexedDBQueue('my-index'); 50 | 51 | // Peek at the queue 52 | const envelopes = await queue.peek(); 53 | 54 | // Process the envelopes (guaranteed to be in topological order) 55 | for (const envelope of envelopes) { 56 | // Process each fact envelope 57 | // All predecessors are guaranteed to be included in the array 58 | // and to appear before the facts that depend on them 59 | } 60 | ``` 61 | 62 | ## Other Queue Methods 63 | 64 | ### Enqueue 65 | 66 | ```typescript 67 | enqueue(envelopes: FactEnvelope[]): Promise 68 | ``` 69 | 70 | Adds fact envelopes to the queue for later processing. 71 | 72 | ## Error Handling 73 | 74 | The `peek` function includes comprehensive validation logic to ensure the correctness of the topological ordering: 75 | 76 | 1. **Circular Dependencies**: If a circular dependency is detected (where fact A depends on fact B, which depends on fact C, which depends on fact A), the function will throw an error with a detailed message. 77 | 78 | 2. **Missing Prerequisites**: If a fact depends on a prerequisite that is not included in the result, the function will throw an error identifying the missing prerequisite. 79 | 80 | 3. **Topological Ordering Violations**: If the topological ordering is violated (where a fact appears before one of its prerequisites), the function will throw an error with details about the specific violation. 81 | 82 | These error messages provide detailed information to help diagnose and fix issues with the fact dependency graph. 83 | 84 | ### Dequeue 85 | 86 | ```typescript 87 | dequeue(envelopes: FactEnvelope[]): Promise 88 | ``` 89 | 90 | Removes fact envelopes from the queue after they have been processed. -------------------------------------------------------------------------------- /src/storage.ts: -------------------------------------------------------------------------------- 1 | import { computeObjectHash } from "./fact/hash"; 2 | import { Specification } from "./specification/specification"; 3 | import { findIndex } from './util/fn'; 4 | 5 | export type FactReference = { 6 | type: string; 7 | hash: string; 8 | }; 9 | 10 | export interface FactTuple { 11 | facts: FactReference[]; 12 | bookmark: string; 13 | } 14 | 15 | export interface FactFeed { 16 | tuples: FactTuple[]; 17 | bookmark: string; 18 | } 19 | 20 | export type PredecessorCollection = { 21 | [role: string]: FactReference[] | FactReference 22 | }; 23 | 24 | export type FactRecord = { 25 | type: string; 26 | hash: string; 27 | predecessors: PredecessorCollection, 28 | fields: { [field: string]: any }; 29 | }; 30 | 31 | export type FactSignature = { 32 | publicKey: string; 33 | signature: string; 34 | } 35 | 36 | export type FactEnvelope = { 37 | fact: FactRecord; 38 | signatures: FactSignature[]; 39 | } 40 | 41 | export type ReferencesByName = { [name: string]: FactReference }; 42 | 43 | export interface ProjectedResult { 44 | tuple: ReferencesByName; 45 | result: any; 46 | } 47 | 48 | export interface Storage { 49 | close(): Promise; 50 | save(envelopes: FactEnvelope[]): Promise; 51 | read(start: FactReference[], specification: Specification): Promise; 52 | feed(feed: Specification, start: FactReference[], bookmark: string): Promise; 53 | whichExist(references: FactReference[]): Promise; 54 | load(references: FactReference[]): Promise; 55 | purge(purgeConditions: Specification[]): Promise; 56 | purgeDescendants(purgeRoot: FactReference, triggers: FactReference[]): Promise; 57 | 58 | loadBookmark(feed: string): Promise; 59 | saveBookmark(feed: string, bookmark: string): Promise; 60 | 61 | getMruDate(specificationHash: string): Promise; 62 | setMruDate(specificationHash: string, mruDate: Date): Promise; 63 | } 64 | 65 | export interface Queue { 66 | peek(): Promise; 67 | enqueue(envelopes: FactEnvelope[]): Promise; 68 | dequeue(envelopes: FactEnvelope[]): Promise; 69 | } 70 | 71 | export function factReferenceEquals(a: FactReference) { 72 | return (r: FactReference) => r.hash === a.hash && r.type === a.type; 73 | } 74 | 75 | export function factEnvelopeEquals(r: FactReference) { 76 | return (e: FactEnvelope) => e.fact.hash === r.hash && e.fact.type === r.type; 77 | } 78 | 79 | export function uniqueFactReferences(references: FactReference[]): FactReference[] { 80 | return references.filter((value, index, array) => { 81 | return findIndex(array, factReferenceEquals(value)) === index; 82 | }); 83 | } 84 | 85 | export function computeTupleSubsetHash(tuple: ReferencesByName, subset: string[]) { 86 | const parentTuple = Object.getOwnPropertyNames(tuple) 87 | .filter(name => subset.some(s => s === name)) 88 | .reduce((t, name) => ({ 89 | ...t, 90 | [name]: tuple[name] 91 | }), 92 | {} as ReferencesByName); 93 | const parentTupleHash = computeObjectHash(parentTuple); 94 | return parentTupleHash; 95 | } 96 | 97 | export function validateGiven(start: FactReference[], specification: Specification) { 98 | // Verify that the number of start facts equals the number of inputs 99 | if (start.length !== specification.given.length) { 100 | throw new Error(`The number of start facts (${start.length}) does not equal the number of inputs (${specification.given.length})`); 101 | } 102 | // Verify that the input type matches the start fact type 103 | for (let i = 0; i < start.length; i++) { 104 | if (start[i].type !== specification.given[i].label.type) { 105 | throw new Error(`The type of start fact ${i} (${start[i].type}) does not match the type of input ${i} (${specification.given[i].label.type})`); 106 | } 107 | } 108 | } -------------------------------------------------------------------------------- /test/fact/factReferenceSpec.ts: -------------------------------------------------------------------------------- 1 | import { Jinaga, JinagaTest } from '@src'; 2 | 3 | // Test fact classes 4 | class TestFact { 5 | static Type = "TestFact" as const; 6 | type = TestFact.Type; 7 | 8 | constructor( 9 | public value: string 10 | ) { } 11 | } 12 | 13 | class TestFactWithPredecessor { 14 | static Type = "TestFactWithPredecessor" as const; 15 | type = TestFactWithPredecessor.Type; 16 | 17 | constructor( 18 | public parent: TestFact, 19 | public value: string 20 | ) { } 21 | } 22 | 23 | // Invalid test class without Type property 24 | class InvalidFact { 25 | constructor(public value: string) { } 26 | } 27 | 28 | describe('factReference', () => { 29 | let j: Jinaga; 30 | 31 | beforeEach(() => { 32 | j = JinagaTest.create({}); 33 | }); 34 | 35 | it('should create a fact reference with correct type', () => { 36 | const hash = 'test-hash-123'; 37 | const factRef = Jinaga.factReference(TestFact, hash); 38 | 39 | expect(factRef.type).toBe('TestFact'); 40 | expect(typeof factRef).toBe('object'); 41 | }); 42 | 43 | it('should allow the fact reference to be hashed', () => { 44 | const hash = 'test-hash-123'; 45 | const factRef = Jinaga.factReference(TestFact, hash); 46 | 47 | const retrievedHash = Jinaga.hash(factRef); 48 | expect(retrievedHash).toBe(hash); 49 | }); 50 | 51 | it('should work with instance method', () => { 52 | const hash = 'test-hash-456'; 53 | const factRef = j.factReference(TestFact, hash); 54 | 55 | expect(factRef.type).toBe('TestFact'); 56 | expect(j.hash(factRef)).toBe(hash); 57 | }); 58 | 59 | it('should have proper TypeScript typing', () => { 60 | const hash = 'test-hash-789'; 61 | const factRef = Jinaga.factReference(TestFact, hash); 62 | 63 | // This should compile without type errors 64 | const type: string = factRef.type; 65 | expect(type).toBe('TestFact'); 66 | 67 | // The returned object should be typed as TestFact 68 | // Note: We can't actually access .value since we only created a reference 69 | // but TypeScript should treat it as a TestFact 70 | }); 71 | 72 | it('should work with more complex fact types', () => { 73 | const hash = 'complex-hash-123'; 74 | const factRef = Jinaga.factReference(TestFactWithPredecessor, hash); 75 | 76 | expect(factRef.type).toBe('TestFactWithPredecessor'); 77 | expect(Jinaga.hash(factRef)).toBe(hash); 78 | }); 79 | 80 | it('should throw error for constructor without Type property', () => { 81 | const hash = 'invalid-hash'; 82 | 83 | expect(() => { 84 | Jinaga.factReference(InvalidFact as any, hash); 85 | }).toThrow('Constructor must have a static Type property of type string'); 86 | }); 87 | 88 | it('should throw error for constructor with non-string Type property', () => { 89 | class BadFact { 90 | static Type = 123; // Wrong type 91 | } 92 | 93 | const hash = 'bad-hash'; 94 | 95 | expect(() => { 96 | Jinaga.factReference(BadFact as any, hash); 97 | }).toThrow('Constructor must have a static Type property of type string'); 98 | }); 99 | 100 | it('should work with valid hash string', () => { 101 | const hash = 'validHashExample123=='; 102 | const factRef = Jinaga.factReference(TestFact, hash); 103 | 104 | expect(factRef.type).toBe('TestFact'); 105 | expect(Jinaga.hash(factRef)).toBe(hash); 106 | }); 107 | 108 | it('should preserve hash through multiple operations', () => { 109 | const hash = 'persistent-hash-123'; 110 | const factRef = Jinaga.factReference(TestFact, hash); 111 | 112 | // Hash should be consistent 113 | expect(Jinaga.hash(factRef)).toBe(hash); 114 | expect(Jinaga.hash(factRef)).toBe(hash); 115 | 116 | // Should work with instance method too 117 | expect(j.hash(factRef)).toBe(hash); 118 | }); 119 | }); -------------------------------------------------------------------------------- /documentation/predecessor.md: -------------------------------------------------------------------------------- 1 | # Querying for `predecessor` 2 | 3 | The `predecessor()` method allows you to navigate from a fact to its direct predecessor in a specification. This provides a convenient way to traverse relationships in the reverse direction. 4 | 5 | ## Example of Using the `predecessor` Method 6 | 7 | In the context of a company model, you can use the `predecessor()` method to find the company that an office belongs to: 8 | 9 | ```typescript 10 | const specification = model.given(Office).match(office => 11 | office.company.predecessor() 12 | ); 13 | 14 | const result = await j.query(specification, office); 15 | ``` 16 | 17 | In this example, the `predecessor()` method is used to navigate from an office to its company. 18 | 19 | ## Compared to the `join` Method 20 | 21 | The alternative to the `predecessor()` syntax in Jinaga is to use the `join` method with `facts.ofType()`: 22 | 23 | ```typescript 24 | const specification = model.given(Office).match((office, facts) => 25 | facts.ofType(Company) 26 | .join(company => company, office.company) 27 | ); 28 | 29 | const result = await j.query(specification, office); 30 | ``` 31 | 32 | The `predecessor()` method provides a more concise and readable way to express the same query. 33 | 34 | ## Using the `predecessor` Method with Projections 35 | 36 | You can use the `predecessor()` method with projections to select specific fields or create composite results: 37 | 38 | ```typescript 39 | const specification = model.given(Office).match(office => 40 | office.company.predecessor() 41 | .select(company => company.identifier) 42 | ); 43 | 44 | const result = await j.query(specification, office); 45 | ``` 46 | 47 | For composite projections that include predecessor relationships, you need to properly label the predecessor facts: 48 | 49 | ```typescript 50 | const specification = model.given(Office).match((office, facts) => 51 | office.company.predecessor() 52 | .select(company => ({ 53 | identifier: company.identifier, 54 | creator: facts.ofType(User) 55 | .join(user => user, company.creator) 56 | })) 57 | ); 58 | 59 | const result = await j.query(specification, office); 60 | ``` 61 | 62 | ## Chaining Predecessor Calls 63 | 64 | You can chain multiple `predecessor()` calls to navigate through multiple levels of relationships: 65 | 66 | ```typescript 67 | const specification = model.given(President).match(president => 68 | president.office.company.predecessor() 69 | ); 70 | 71 | const result = await j.query(specification, president); 72 | ``` 73 | 74 | This example navigates from a president to their office, and then to the company of that office. 75 | 76 | ## Combining with Existential Conditions 77 | 78 | The `predecessor()` method can be used with existential conditions: 79 | 80 | ```typescript 81 | const specification = model.given(OfficeClosed).match(officeClosed => 82 | officeClosed.office.predecessor() 83 | .exists(office => office.company.predecessor()) 84 | ); 85 | 86 | const result = await j.query(specification, officeClosed); 87 | ``` 88 | 89 | ## Combining with Successors 90 | 91 | You can combine `predecessor()` and `successors()` methods in the same query: 92 | 93 | ```typescript 94 | const specification = model.given(Company).match(company => 95 | company.successors(Office, office => office.company) 96 | .select(office => ({ 97 | identifier: office.identifier, 98 | presidents: office.successors(President, president => president.office) 99 | .selectMany(president => president.user.predecessor() 100 | .select(user => ({ 101 | user: user, 102 | names: user.successors(UserName, userName => userName.user) 103 | .select(userName => userName.value) 104 | })) 105 | ) 106 | })) 107 | ); 108 | 109 | const result = await j.query(specification, company); 110 | ``` 111 | 112 | This example shows how to navigate from a company to its offices (using `successors`), then to the presidents of those offices (using `successors`), then to the users who are those presidents (using `predecessor`), and finally to the names of those users (using `successors`). 113 | -------------------------------------------------------------------------------- /test/purge/realTimePurgeSpec.ts: -------------------------------------------------------------------------------- 1 | import { JinagaClient, Model, PurgeConditions } from "@src"; 2 | import { createModel, Item, Order, OrderCancelled, OrderCancelledReason, Product, Store } from "../orderModel"; 3 | 4 | describe("Real-time purge", () => { 5 | it("Should find descendants if purge condition is not met", async () => { 6 | const model = createModel(); 7 | const j = givenClientWithPurgeCondition(model); 8 | 9 | const store = await j.fact(new Store("storeId")); 10 | const order = await j.fact(new Order(store, new Date())); 11 | const item1 = await j.fact(new Item(order, new Product(store, "product1"), 1)); 12 | const item2 = await j.fact(new Item(order, new Product(store, "product2"), 1)); 13 | 14 | const itemsInOrder = model.given(Order).match(order => 15 | order.successors(Item, item => item.order) 16 | ); 17 | 18 | const items = await j.query(itemsInOrder, order); 19 | expect(items).toEqual([item1, item2]); 20 | }); 21 | 22 | it("Should purge successors when condition is met", async () => { 23 | const model = createModel(); 24 | const j = givenClientWithPurgeCondition(model); 25 | 26 | const store = await j.fact(new Store("storeId")); 27 | const order = await j.fact(new Order(store, new Date())); 28 | const item1 = await j.fact(new Item(order, new Product(store, "product1"), 1)); 29 | const item2 = await j.fact(new Item(order, new Product(store, "product2"), 1)); 30 | const orderCancelled = await j.fact(new OrderCancelled(order, new Date())); 31 | 32 | const itemsInOrder = model.given(Order).match(order => 33 | order.successors(Item, item => item.order) 34 | ); 35 | 36 | const items = await j.query(itemsInOrder, order); 37 | expect(items).toEqual([]); 38 | }); 39 | 40 | it("Should not purge the trigger fact", async () => { 41 | const model = createModel(); 42 | const j = givenClientWithPurgeCondition(model); 43 | 44 | const store = await j.fact(new Store("storeId")); 45 | const order = await j.fact(new Order(store, new Date())); 46 | const item1 = await j.fact(new Item(order, new Product(store, "product1"), 1)); 47 | const item2 = await j.fact(new Item(order, new Product(store, "product2"), 1)); 48 | const orderCancelled = await j.fact(new OrderCancelled(order, new Date())); 49 | 50 | const cancelOfOrder = model.given(Order).match(order => 51 | order.successors(OrderCancelled, cancelled => cancelled.order) 52 | ); 53 | 54 | const cancels = await j.query(cancelOfOrder, order); 55 | expect(cancels).toEqual([orderCancelled]); 56 | }); 57 | 58 | it("Should not purge ancestors of the trigger fact", async () => { 59 | const model = createModel(); 60 | const j = createJinagaClient(p => p 61 | .whenExists(model.given(Order).match(order => 62 | order.successors(OrderCancelledReason, reason => reason.orderCancelled.order) 63 | )) 64 | ); 65 | 66 | const store = await j.fact(new Store("storeId")); 67 | const order = await j.fact(new Order(store, new Date())); 68 | const item1 = await j.fact(new Item(order, new Product(store, "product1"), 1)); 69 | const item2 = await j.fact(new Item(order, new Product(store, "product2"), 1)); 70 | const orderCancelled = await j.fact(new OrderCancelled(order, new Date())); 71 | const reason = await j.fact(new OrderCancelledReason(orderCancelled, "reason")); 72 | 73 | const cancelOfOrder = model.given(Order).match(order => 74 | order.successors(OrderCancelled, cancelled => cancelled.order) 75 | ); 76 | 77 | const cancels = await j.query(cancelOfOrder, order); 78 | expect(cancels).toEqual([orderCancelled]); 79 | }); 80 | }); 81 | 82 | function givenClientWithPurgeCondition(model: Model) { 83 | return createJinagaClient(p => p 84 | .whenExists(model.given(Order).match((order, facts) => facts.ofType(OrderCancelled) 85 | .join(orderCancelled => orderCancelled.order, order) 86 | )) 87 | ); 88 | } 89 | 90 | function createJinagaClient(purgeConditions: (p: PurgeConditions) => PurgeConditions) { 91 | return JinagaClient.create({ 92 | purgeConditions 93 | }); 94 | } 95 | -------------------------------------------------------------------------------- /test/cryptography/keyPairSpec.ts: -------------------------------------------------------------------------------- 1 | import { FactRecord, KeyPair, dehydrateFact, generateKeyPair, signFacts, verifyEnvelopes } from "@src"; 2 | 3 | describe("keyPair", () => { 4 | it("should generate consistent signature", async () => { 5 | const keyPair = givenKnownKeyPair() 6 | const factEnvelopes = givenSignedFact(keyPair); 7 | 8 | expect(factEnvelopes.length).toBe(1); 9 | expect(factEnvelopes[0].signatures.length).toBe(1); 10 | expect(factEnvelopes[0].signatures[0].publicKey).toBe(keyPair.publicPem); 11 | expect(factEnvelopes[0].signatures[0].signature).toBe("bfbj+2E49gqpL2A3ihvt6ybLJjrgJYCWzhjHb56F9QNLDe+K5h+NGLpCwXKMOI/gQPY7nkRW5snbugvq2C2vTTEpAdE7kEMKsg4fId+ujEwB4w+N9cXAlOr9mLAEDxZ2/pxI+BeF3BZiqnp72AY8VHE/gVMcmUcaIfgFXw7TWKrXUQ9/tJXp5N3Ph8QBH0j9L9+/GFQrquXg8M2MYmkidp+fL8tuiIMQSryCUuX4xMCTmooyTB0o2XJE6KpoJwEBQRv+FhJJGDqdaAoawNIoBEIVn5gwx7UGkJ53KgYQzL4IPSTW9OxiembNc8E7aYfyMhSG1+wFl45xpJThRuFRcA=="); 12 | }); 13 | 14 | it("should verify a signature", async () => { 15 | const keyPair = givenKnownKeyPair() 16 | const factEnvelopes = givenSignedFact(keyPair); 17 | 18 | const verified = verifyEnvelopes(factEnvelopes); 19 | 20 | expect(verified).toBe(true); 21 | }); 22 | 23 | it("should sign with a new key pair", async () => { 24 | const keyPair = generateKeyPair(); 25 | const factEnvelopes = givenSignedFact(keyPair); 26 | 27 | const verified = verifyEnvelopes(factEnvelopes); 28 | 29 | expect(verified).toBe(true); 30 | }); 31 | 32 | it("should not verify a signature if the content has been modified", async () => { 33 | const keyPair = generateKeyPair(); 34 | const factEnvelopes = givenSignedFact(keyPair); 35 | 36 | factEnvelopes[0].fact.fields["identifier"] = "staging"; 37 | const verified = verifyEnvelopes(factEnvelopes); 38 | 39 | expect(verified).toBe(false); 40 | }); 41 | }); 42 | 43 | function givenKnownKeyPair(): KeyPair { 44 | return { 45 | publicPem: "-----BEGIN PUBLIC KEY-----\r\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4wP7IyUZICcZ5JC+UUxB\r\nZZOo8mE7R2zj8Zba5krMAqDMFbQ8bWS+nTbFVHgun1Z+5HUCZ9HHv7d7KPLu+zuI\r\nfBi5CuiJy4LJkIUuL2eRBvy8VJPeyDfvRuZ6Dc5r+vp25omx5bWbPtjPczatUphl\r\nQ83GXvITQ4ZQN/C8w7/cewq4/qVrT+TfwvIiynBSFbU5NXE6dmbE1PbJFjtBlJJ+\r\nM2uiTKMKgrC7hpluEdO3oz1itV3CTHo4DGChARLia/ZRGTUlheunbSOnFupl/Rts\r\ny/wfvEh+CBt2MduUFBo2pLCe6NMTlhEpC+/jOhQnIaU8NWy5aUh6D6pIDGwond9Y\r\nCwIDAQAB\r\n-----END PUBLIC KEY-----\r\n", 46 | privatePem: "-----BEGIN RSA PRIVATE KEY-----\r\nMIIEpQIBAAKCAQEA4wP7IyUZICcZ5JC+UUxBZZOo8mE7R2zj8Zba5krMAqDMFbQ8\r\nbWS+nTbFVHgun1Z+5HUCZ9HHv7d7KPLu+zuIfBi5CuiJy4LJkIUuL2eRBvy8VJPe\r\nyDfvRuZ6Dc5r+vp25omx5bWbPtjPczatUphlQ83GXvITQ4ZQN/C8w7/cewq4/qVr\r\nT+TfwvIiynBSFbU5NXE6dmbE1PbJFjtBlJJ+M2uiTKMKgrC7hpluEdO3oz1itV3C\r\nTHo4DGChARLia/ZRGTUlheunbSOnFupl/Rtsy/wfvEh+CBt2MduUFBo2pLCe6NMT\r\nlhEpC+/jOhQnIaU8NWy5aUh6D6pIDGwond9YCwIDAQABAoIBAB8Ei7tdFcZFYW3P\r\n8xkTlLnmx4Y6j8luEOURCh6+KIrRYqEyi7Ecu0iq06J7e09NF7BqZmY+DQ9eaAcL\r\nzmhoVXkzPZFGfZFfcN/8undCrNeqD6d0vtNXhSuIUTPyuOFFeJp+RN7QhgI7yHiD\r\nB4KKDQgLJSgS5lvrDanfDEOowtzSs9Q0TS9dJzzJy4D/UddrSauducdEn2sAbx60\r\nUJ6JcAJcQjIi7T/AJLrkFOMrc23DUDeucR/qgRgx7BadU7TuPpbE1Phtrlryg6pi\r\n95V8fR1qRXVgVDKd69ky8HGWVcGVxyuvhzp2+JLOgjokf8vPtRdxfxlQQVfHzZDt\r\nqpDQDPUCgYEA+LbfXPvKVDj++exLZ/sn+0dPWGIE1IXPAyZNVfVxmOP/Wg3yOuwI\r\nNIYiZruP66ZxFGY7DPO198DRxn4FmSxMeDhdyYHkmJOOuT1f5Q5dJKPBW+oS0Elg\r\nd1EdrYouNNk5L1hN4JJ1CKe8lqnvgo0M7Rt+iAHtr0PKtVuNfTWBnO0CgYEA6apj\r\nOh9C94y42BTMDNxwQAjEFfCxKvEPGPD9MSPSltbbCMfFdJWwecOeY0H0/Q3qe2w0\r\nvUFy1/8yRjvwcei9dWq2vzKfjsvVjR8uE4nHfBAs2IBD9O7im0yOkUHQAjKDuMSE\r\nmfLsKQgZQOiiQ72euuCfDrdocF5Q8m3Dy8yyIdcCgYEA7jS32SaOsfukuVlHH1+8\r\n+z1hERVP6vv9ONcGWr2o/vXfKzEQPr6xXRza9enN/bR7uT3wcIc6UP+r6p/oXLvA\r\nwaO6r7RobHlmyKOvpIINU3pDRvT47+RXL+/QrNUbTCKAUogQjnW3AYMlbGd1rWPK\r\nbY1XsoumSaZ0Dx6QdMs6SEECgYEAl91XtFTRD1b0Y+vQWqOCiPuphaDzZLdbWph1\r\n1lQz8DkgDmrYGFeZZOoQrO4XLci3lxPSixZYb626nQ8jzMS5LfD3aPib3xD3cErN\r\nhYFMl4NjwipLAIup18k/94RQjr0KAzImBHBvsJNE5nzLyT8aRNbsSYJGbJHABm/0\r\niyY0t+0CgYEArmBe1THrvDmhjXsOeLBNX5n+e4lDffQ8CAWqwCthQT0FhQAu1cp9\r\nApgmMSSGjvWEvZDqeLdIXp8mVMoDDQWg38oiWoUCKl4yK87cR6PJcu38eJPixYW3\r\nzBc0D/fIthqccFxz5cKe2WzFbJKQW2q2VtZ35/WTAgeLueR9ewoFY60=\r\n-----END RSA PRIVATE KEY-----\r\n" 47 | }; 48 | } 49 | 50 | function givenSignedFact(keyPair: KeyPair) { 51 | const factRecords: FactRecord[] = dehydrateFact({ 52 | type: "MyApplication.Environment", 53 | identifier: "production" 54 | }); 55 | return signFacts(keyPair, factRecords); 56 | } -------------------------------------------------------------------------------- /test/http/serializerSpec.ts: -------------------------------------------------------------------------------- 1 | import { GraphSerializer } from "@src"; 2 | 3 | describe("GraphSerializer", () => { 4 | it("should write an empty graph", () => { 5 | let output = ""; 6 | const serializer = new GraphSerializer(chunk => { 7 | output += chunk; 8 | }); 9 | 10 | serializer.serialize([]); 11 | 12 | expect(output).toBe(""); 13 | }); 14 | 15 | it("should write a graph with one fact without signatures", () => { 16 | let output = ""; 17 | const serializer = new GraphSerializer(chunk => { 18 | output += chunk; 19 | }); 20 | 21 | serializer.serialize([{ 22 | fact: { 23 | type: "MyApp.Root", 24 | hash: "roothash", 25 | fields: { 26 | identifier: "root" 27 | }, 28 | predecessors: {} 29 | }, 30 | signatures: [] 31 | }]); 32 | 33 | expect(output).toBe("\"MyApp.Root\"\n{}\n{\"identifier\":\"root\"}\n\n"); 34 | }); 35 | 36 | it("should write a graph with two facts without signatures", () => { 37 | let output = ""; 38 | const serializer = new GraphSerializer(chunk => { 39 | output += chunk; 40 | }); 41 | 42 | serializer.serialize([{ 43 | fact: { 44 | type: "MyApp.Root", 45 | hash: "roothash", 46 | fields: {}, 47 | predecessors: {} 48 | }, 49 | signatures: [] 50 | }, { 51 | fact: { 52 | type: "MyApp.Child", 53 | hash: "childhash", 54 | fields: {}, 55 | predecessors: { 56 | root: { 57 | type: "MyApp.Root", 58 | hash: "roothash" 59 | } 60 | } 61 | }, 62 | signatures: [] 63 | }]); 64 | 65 | expect(output).toBe("\"MyApp.Root\"\n{}\n{}\n\n\"MyApp.Child\"\n{\"root\":0}\n{}\n\n"); 66 | }); 67 | 68 | it("should not repeat a fact", () => { 69 | let output = ""; 70 | const serializer = new GraphSerializer(chunk => { 71 | output += chunk; 72 | }); 73 | 74 | serializer.serialize([{ 75 | fact: { 76 | type: "MyApp.Root", 77 | hash: "roothash", 78 | fields: {}, 79 | predecessors: {} 80 | }, 81 | signatures: [] 82 | }, { 83 | fact: { 84 | type: "MyApp.Root", 85 | hash: "roothash", 86 | fields: {}, 87 | predecessors: {} 88 | }, 89 | signatures: [] 90 | }]); 91 | 92 | expect(output).toBe("\"MyApp.Root\"\n{}\n{}\n\n"); 93 | }); 94 | 95 | it("should write a graph with two facts with signatures", () => { 96 | let output = ""; 97 | const serializer = new GraphSerializer(chunk => { 98 | output += chunk; 99 | }); 100 | 101 | serializer.serialize([{ 102 | fact: { 103 | type: "MyApp.Root", 104 | hash: "roothash", 105 | fields: {}, 106 | predecessors: {} 107 | }, 108 | signatures: [{ 109 | publicKey: "public", 110 | signature: "signature" 111 | }] 112 | }, { 113 | fact: { 114 | type: "MyApp.Child", 115 | hash: "childhash", 116 | fields: {}, 117 | predecessors: { 118 | root: { 119 | type: "MyApp.Root", 120 | hash: "roothash" 121 | } 122 | } 123 | }, 124 | signatures: [{ 125 | publicKey: "public", 126 | signature: "signature1" 127 | }, { 128 | publicKey: "public2", 129 | signature: "signature2" 130 | }] 131 | }]); 132 | 133 | expect(output).toBe( 134 | "PK0\n\"public\"\n\n" + 135 | "\"MyApp.Root\"\n{}\n{}\nPK0\n\"signature\"\n\n" + 136 | "PK1\n\"public2\"\n\n" + 137 | "\"MyApp.Child\"\n{\"root\":0}\n{}\nPK0\n\"signature1\"\nPK1\n\"signature2\"\n\n" 138 | ); 139 | }); 140 | }); -------------------------------------------------------------------------------- /src/managers/QueueProcessor.ts: -------------------------------------------------------------------------------- 1 | import { Trace } from "../util/trace"; 2 | 3 | /** 4 | * Interface for a component that can save data. 5 | */ 6 | export interface Saver { 7 | /** 8 | * Saves data to the network. 9 | */ 10 | save(): Promise; 11 | } 12 | 13 | class Batch { 14 | private isActive = false; 15 | private hasWork = false; 16 | private isTerminated = false; 17 | private delay: NodeJS.Timeout | null = null; 18 | private nextBatch: Batch | null = null; 19 | private notifyResolver: (() => void) | null = null; 20 | private notifyRejector: ((error: Error) => void) | null = null; 21 | private notifyPromise: Promise | null = null; 22 | 23 | constructor( 24 | private readonly saver: Saver, 25 | private readonly delayMilliseconds: number, 26 | private readonly setBatch: (batch: Batch) => void 27 | ) { 28 | } 29 | 30 | activate() { 31 | this.isActive = true; 32 | this.beginWaiting(); 33 | } 34 | 35 | workArrived() { 36 | this.hasWork = true; 37 | this.beginWaiting(); 38 | } 39 | 40 | runNow(): Promise { 41 | if (this.isTerminated) { 42 | return Promise.resolve(); 43 | } 44 | if (!this.notifyPromise) { 45 | this.notifyPromise = new Promise((resolve, reject) => { 46 | this.notifyResolver = resolve; 47 | this.notifyRejector = reject; 48 | }); 49 | this.beginWorking(); 50 | } 51 | return this.notifyPromise; 52 | } 53 | 54 | terminate() { 55 | this.isTerminated = true; 56 | if (this.delay) { 57 | clearTimeout(this.delay); 58 | this.delay = null; 59 | } 60 | if (this.notifyRejector) { 61 | this.notifyRejector(new Error("QueueProcessor terminated")); 62 | } 63 | } 64 | 65 | private beginWaiting() { 66 | if (this.isTerminated || !this.isActive || !this.hasWork || this.delay) { 67 | return; 68 | } 69 | if (this.delayMilliseconds === 0) { 70 | this.beginWorking(); 71 | } else { 72 | this.delay = setTimeout(() => { 73 | this.beginWorking(); 74 | }, this.delayMilliseconds); 75 | } 76 | } 77 | 78 | private beginWorking() { 79 | if (this.nextBatch) { 80 | return; 81 | } 82 | this.nextBatch = new Batch(this.saver, this.delayMilliseconds, this.setBatch); 83 | this.setBatch(this.nextBatch); 84 | this.saver.save() 85 | .then(() => this.done(null)) 86 | .catch((error) => this.done(error)); 87 | } 88 | 89 | private done(error: Error | null) { 90 | if (this.notifyResolver) { 91 | if (error) { 92 | this.notifyRejector!(error); 93 | } else { 94 | this.notifyResolver!(); 95 | } 96 | } else if (error) { 97 | Trace.error(error); 98 | } 99 | if (this.nextBatch) { 100 | this.nextBatch.activate(); 101 | } 102 | } 103 | } 104 | 105 | /** 106 | * Processes a queue with a debouncing mechanism. 107 | * This improves performance by batching multiple operations together. 108 | */ 109 | export class QueueProcessor { 110 | 111 | private currentBatch: Batch; 112 | 113 | /** 114 | * Creates a new QueueProcessor. 115 | * @param saver The component that will save the data. 116 | * @param delayMilliseconds The delay in milliseconds before processing the queue. 117 | */ 118 | constructor( 119 | saver: Saver, 120 | delayMilliseconds: number 121 | ) { 122 | this.currentBatch = new Batch(saver, delayMilliseconds, (batch) => { 123 | this.currentBatch = batch; 124 | }); 125 | this.currentBatch.activate(); 126 | } 127 | 128 | /** 129 | * Schedules processing of the queue with a delay. 130 | * This allows multiple operations to be batched together. 131 | */ 132 | public scheduleProcessing(): void { 133 | this.currentBatch.workArrived(); 134 | } 135 | 136 | /** 137 | * Processes the queue immediately, bypassing any delay. 138 | */ 139 | public async processQueueNow(): Promise { 140 | await this.currentBatch.runNow(); 141 | } 142 | 143 | /** 144 | * Disposes of the QueueProcessor. 145 | */ 146 | public dispose() { 147 | this.currentBatch.terminate(); 148 | } 149 | } -------------------------------------------------------------------------------- /src/ws/wsGraphNetwork.ts: -------------------------------------------------------------------------------- 1 | import { Network } from "../managers/NetworkManager"; 2 | import { Specification } from "../specification/specification"; 3 | import { FactEnvelope, FactReference, Storage } from "../storage"; 4 | import { FeedResponse } from "../http/messages"; 5 | import { HttpNetwork } from "../http/httpNetwork"; 6 | import { WsGraphClient } from "./ws-graph-client"; 7 | import { UserIdentity } from "../user-identity"; 8 | 9 | export class WsGraphNetwork implements Network { 10 | private readonly wsClient: WsGraphClient; 11 | private factsAddedListener?: (envelopes: FactEnvelope[]) => Promise; 12 | 13 | constructor( 14 | private readonly httpNetwork: HttpNetwork, 15 | store: Storage, 16 | wsEndpoint: string, 17 | getUserIdentity?: () => Promise, 18 | getAuthorizationHeader?: () => Promise 19 | ) { 20 | const getWsUrl = async () => { 21 | try { 22 | const url = new URL(wsEndpoint); 23 | // Append Authorization token if provided (browsers cannot set custom WS headers) 24 | if (getAuthorizationHeader) { 25 | try { 26 | const auth = await getAuthorizationHeader(); 27 | if (auth) { 28 | url.searchParams.set("authorization", auth); 29 | } 30 | } 31 | catch { 32 | // ignore auth retrieval failures 33 | } 34 | } 35 | // Optionally append user identity 36 | if (getUserIdentity) { 37 | try { 38 | const id = await getUserIdentity(); 39 | if (id) { 40 | url.searchParams.set("uid", `${encodeURIComponent(id.provider)}:${encodeURIComponent(id.id)}`); 41 | } 42 | } 43 | catch { 44 | // ignore identity retrieval failures 45 | } 46 | } 47 | return url.toString(); 48 | } 49 | catch { 50 | return wsEndpoint; 51 | } 52 | }; 53 | this.wsClient = new WsGraphClient( 54 | getWsUrl, 55 | store, 56 | (feed, bookmark) => this.onBookmarkAdvance(feed, bookmark), 57 | (err) => this.onGlobalError(err), 58 | getUserIdentity, 59 | (envelopes) => this.onFactsAdded(envelopes) 60 | ); 61 | } 62 | 63 | feeds(start: FactReference[], specification: Specification): Promise { 64 | return this.httpNetwork.feeds(start, specification); 65 | } 66 | 67 | fetchFeed(feed: string, bookmark: string): Promise { 68 | return this.httpNetwork.fetchFeed(feed, bookmark); 69 | } 70 | 71 | streamFeed( 72 | feed: string, 73 | bookmark: string, 74 | onResponse: (factReferences: FactReference[], nextBookmark: string) => Promise, 75 | onError: (err: Error) => void, 76 | feedRefreshIntervalSeconds: number 77 | ): () => void { 78 | // Register a temporary handler for BOOK events for this feed 79 | this.onResponseHandlers.set(feed, onResponse); 80 | this.onErrorHandlers.set(feed, onError); 81 | const unsubscribe = this.wsClient.subscribe(feed, bookmark, feedRefreshIntervalSeconds); 82 | return () => { 83 | this.onResponseHandlers.delete(feed); 84 | this.onErrorHandlers.delete(feed); 85 | unsubscribe(); 86 | }; 87 | } 88 | 89 | load(factReferences: FactReference[]): Promise { 90 | return this.httpNetwork.load(factReferences); 91 | } 92 | 93 | // Internal per-feed event maps 94 | private readonly onResponseHandlers = new Map Promise>(); 95 | private readonly onErrorHandlers = new Map void>(); 96 | 97 | private async onBookmarkAdvance(feed: string, bookmark: string) { 98 | const handler = this.onResponseHandlers.get(feed); 99 | if (handler) { 100 | // Facts already persisted via graph stream, notify empty refs with updated bookmark 101 | await handler([], bookmark); 102 | } 103 | } 104 | 105 | private onGlobalError(err: Error) { 106 | // Broadcast error to all active feeds 107 | for (const h of this.onErrorHandlers.values()) { 108 | h(err); 109 | } 110 | } 111 | 112 | // Phase 3.4: Observer-notification bridge 113 | setFactsAddedListener(listener: (envelopes: FactEnvelope[]) => Promise) { 114 | this.factsAddedListener = listener; 115 | } 116 | 117 | // Called by WsGraphClient when facts are added via WS 118 | async onFactsAdded(envelopes: FactEnvelope[]) { 119 | if (this.factsAddedListener) { 120 | await this.factsAddedListener(envelopes); 121 | } 122 | } 123 | } -------------------------------------------------------------------------------- /test/fact/knownHashSpec.ts: -------------------------------------------------------------------------------- 1 | import { Dehydration, HashMap } from "@src"; 2 | 3 | describe("Known hash", () => { 4 | it("String field", () => { 5 | const hash = hashOf({ 6 | type: "Skylane.Airline", 7 | identifier: "value" 8 | }); 9 | expect(hash).toEqual("uXcsBceLFAkZdRD71Ztvc+QwASayHA0Zg7wC2mc3zl28N1hKTbGBfBA2OnEHAWo+0yYVeUnABMn9MCRH8cRHWg=="); 10 | }); 11 | 12 | it("Predecessor", () => { 13 | const hash = hashOf({ 14 | type: "Skylane.Airline.Day", 15 | airline: { 16 | type: "Skylane.Airline", 17 | identifier: "value" 18 | }, 19 | date: "2021-07-04T00:00:00.000Z" 20 | }); 21 | expect(hash).toEqual("cQaErYsizavFrTIGjD1C0g3shMG/uq+hVUXzs/kCzcvev9gPrVDom3pbrszUsmeRelNv8bRdIvOb6AbaYrVC7w=="); 22 | }); 23 | 24 | it("Integer field", () => { 25 | const hash = hashOf({ 26 | type: "Skylane.Flight", 27 | airlineDay: { 28 | type: "Skylane.Airline.Day", 29 | airline: { 30 | type: "Skylane.Airline", 31 | identifier: "value" 32 | }, 33 | date: "2021-07-04T00:00:00.000Z" 34 | }, 35 | flightNumber: 4247 36 | }); 37 | expect(hash).toEqual("PyXT7pCvBq7Vw63kEZGgbIVJxqA7jhoO+QbmeM3YC9laayG0gjln58khyOd4D/cmxXzocPaIuwXGWusVJxqEjQ=="); 38 | }); 39 | 40 | it("Empty predecessor list", () => { 41 | const hash = hashOf({ 42 | type: "Skylane.Passenger.Name", 43 | passenger: { 44 | type: "Skylane.Passenger", 45 | airline: { 46 | type: "Skylane.Airline", 47 | identifier: "IA" 48 | }, 49 | user: { 50 | type: "Jinaga.User", 51 | publicKey: "---PUBLIC KEY---" 52 | } 53 | }, 54 | value: "Charles Rane", 55 | prior: [] 56 | }); 57 | expect(hash).toEqual("GsMMA/8Nv401P6RXvugFYzYCemGehnXSFZuaKNcoVFoXKmxzMJkpqI9rs/SRlKHZlnRP1QsBxFWKFt6143OpYA=="); 58 | }); 59 | 60 | it("Single predecessor list", () => { 61 | const passenger = { 62 | type: "Skylane.Passenger", 63 | airline: { 64 | type: "Skylane.Airline", 65 | identifier: "IA" 66 | }, 67 | user: { 68 | type: "Jinaga.User", 69 | publicKey: "---PUBLIC KEY---" 70 | } 71 | }; 72 | const first = { 73 | type: "Skylane.Passenger.Name", 74 | passenger, 75 | value: "Charles Rane", 76 | prior: [] 77 | }; 78 | const hash = hashOf({ 79 | type: "Skylane.Passenger.Name", 80 | passenger, 81 | value: "Charley Rane", 82 | prior: [ first ] 83 | }); 84 | expect(hash).toEqual("BYLtR7XddbhchlyBdGdrnRHGkPsDecynDjLHFvqtKH7zug46ymxNDpPC4QNb+T14Bhzs8M1F3VfCnlgzinNHPg=="); 85 | }); 86 | 87 | it("Multiple predecessor list", () => { 88 | const passenger = { 89 | type: "Skylane.Passenger", 90 | airline: { 91 | type: "Skylane.Airline", 92 | identifier: "IA" 93 | }, 94 | user: { 95 | type: "Jinaga.User", 96 | publicKey: "---PUBLIC KEY---" 97 | } 98 | }; 99 | const first = { 100 | type: "Skylane.Passenger.Name", 101 | passenger, 102 | value: "Charles Rane", 103 | prior: [] 104 | }; 105 | const middle = [1,2,3,4,5,6,7,8,9,10] 106 | .map(id => ({ 107 | type: "Skylane.Passenger.Name", 108 | passenger, 109 | value: `Charley Rane ${id}`, 110 | prior: [ first ] 111 | })); 112 | const hash = hashOf({ 113 | type: "Skylane.Passenger.Name", 114 | passenger, 115 | value: "Charley Rane", 116 | prior: middle 117 | }); 118 | expect(hash).toEqual("4Os8M2Tt7+lCEe6WQ6iAJwQ/wbmK6CTLqwF8DCS6Bc4tgXE268BanI0sHDeSYhbKYbSDAyRzarMkrciveBoDTQ=="); 119 | }); 120 | }); 121 | 122 | function hashOf(fact: HashMap) { 123 | const dehydration = new Dehydration(); 124 | const record = dehydration.dehydrate(fact); 125 | return record.hash; 126 | } -------------------------------------------------------------------------------- /test/http/deserializerSpec.ts: -------------------------------------------------------------------------------- 1 | import { FactEnvelope, GraphDeserializer } from "@src"; 2 | 3 | describe("GraphDeserializer", () => { 4 | it("should read an empty graph", async () => { 5 | const input = ""; 6 | const readLine = createReadLine(input); 7 | const deserializer = new GraphDeserializer(readLine); 8 | const envelopes = await readAll(deserializer); 9 | expect(envelopes).toEqual([]); 10 | }); 11 | 12 | it("should read a graph with one fact without signatures", async () => { 13 | const input = "\"MyApp.Root\"\n{}\n{\"identifier\":\"root\"}\n\n"; 14 | const readLine = createReadLine(input); 15 | const deserializer = new GraphDeserializer(readLine); 16 | const envelopes = await readAll(deserializer); 17 | expect(envelopes).toEqual([{ 18 | fact: { 19 | type: "MyApp.Root", 20 | hash: "2nxJF8sJEFIuY70VLJvhOR+9V28FoH98lLaL3cCXGqpDpX/lYz0mjohvHxvjHBgDAleJ5L2Dq4Qa2ybGE5NNww==", 21 | fields: { 22 | identifier: "root" 23 | }, 24 | predecessors: {} 25 | }, 26 | signatures: [] 27 | }]); 28 | }); 29 | 30 | it("should read a graph with two facts without signatures", async () => { 31 | const input = "\"MyApp.Root\"\n{}\n{}\n\n\"MyApp.Child\"\n{\"root\":0}\n{}\n\n"; 32 | const readLine = createReadLine(input); 33 | const deserializer = new GraphDeserializer(readLine); 34 | const envelopes = await readAll(deserializer); 35 | expect(envelopes).toEqual([{ 36 | fact: { 37 | type: "MyApp.Root", 38 | hash: "fSS1hK7OGAeSX4ocN3acuFF87jvzCdPN3vLFUtcej0lOAsVV859UIYZLRcHUoMbyd/J31TdVn5QuE7094oqUPg==", 39 | fields: {}, 40 | predecessors: {} 41 | }, 42 | signatures: [] 43 | }, { 44 | fact: { 45 | type: "MyApp.Child", 46 | hash: "9m4j5fur76Ofg2PnOxtlufPDKt7DKqqJewylpt0T6HluB5OhyqBaKTtO9SjtkKmI6CxLWmgGdZzdV1Al0YVtRg==", 47 | fields: {}, 48 | predecessors: { 49 | root: { 50 | type: "MyApp.Root", 51 | hash: "fSS1hK7OGAeSX4ocN3acuFF87jvzCdPN3vLFUtcej0lOAsVV859UIYZLRcHUoMbyd/J31TdVn5QuE7094oqUPg==" 52 | } 53 | } 54 | }, 55 | signatures: [] 56 | }]); 57 | }); 58 | 59 | it("should read a graph with two facts with signatures", async () => { 60 | const input = 61 | "PK0\n\"public\"\n\n" + 62 | "\"MyApp.Root\"\n{}\n{}\nPK0\n\"signature\"\n\n" + 63 | "PK1\n\"public2\"\n\n" + 64 | "\"MyApp.Child\"\n{\"root\":0}\n{}\nPK0\n\"signature1\"\nPK1\n\"signature2\"\n\n"; 65 | const readLine = createReadLine(input); 66 | const deserializer = new GraphDeserializer(readLine); 67 | const envelopes = await readAll(deserializer); 68 | expect(envelopes).toEqual([{ 69 | fact: { 70 | type: "MyApp.Root", 71 | hash: "fSS1hK7OGAeSX4ocN3acuFF87jvzCdPN3vLFUtcej0lOAsVV859UIYZLRcHUoMbyd/J31TdVn5QuE7094oqUPg==", 72 | fields: {}, 73 | predecessors: {} 74 | }, 75 | signatures: [{ 76 | publicKey: "public", 77 | signature: "signature" 78 | }] 79 | }, { 80 | fact: { 81 | type: "MyApp.Child", 82 | hash: "9m4j5fur76Ofg2PnOxtlufPDKt7DKqqJewylpt0T6HluB5OhyqBaKTtO9SjtkKmI6CxLWmgGdZzdV1Al0YVtRg==", 83 | fields: {}, 84 | predecessors: { 85 | root: { 86 | type: "MyApp.Root", 87 | hash: "fSS1hK7OGAeSX4ocN3acuFF87jvzCdPN3vLFUtcej0lOAsVV859UIYZLRcHUoMbyd/J31TdVn5QuE7094oqUPg==" 88 | } 89 | } 90 | }, 91 | signatures: [ 92 | { 93 | "publicKey": "public", 94 | "signature": "signature1", 95 | }, { 96 | "publicKey": "public2", 97 | "signature": "signature2", 98 | }, 99 | ], 100 | }]); 101 | }); 102 | }); 103 | 104 | function createReadLine(input: string) { 105 | const lines = input.split("\n"); 106 | if (lines[lines.length - 1] === "") { 107 | lines.pop(); 108 | } 109 | return async () => { 110 | const line = lines.shift(); 111 | return line !== undefined ? line : null; 112 | }; 113 | } 114 | 115 | async function readAll(deserializer: GraphDeserializer) { 116 | const envelopes: FactEnvelope[] = []; 117 | await deserializer.read(async (batch) => { 118 | envelopes.push(...batch); 119 | }); 120 | return envelopes; 121 | } -------------------------------------------------------------------------------- /src/http/deserializer.ts: -------------------------------------------------------------------------------- 1 | import { computeHash } from "../fact/hash"; 2 | import { FactEnvelope, FactReference, FactRecord, PredecessorCollection, FactSignature } from "../storage"; 3 | 4 | export interface GraphSource { 5 | read( 6 | onEnvelopes: (envelopes: FactEnvelope[]) => Promise 7 | ): Promise; 8 | } 9 | 10 | export class GraphDeserializer implements GraphSource { 11 | private factReferences: FactReference[] = []; 12 | private publicKeys: string[] = []; 13 | 14 | constructor( 15 | private readonly readLine: () => Promise, 16 | private readonly flushThreshold: number = 20 17 | ) {} 18 | 19 | async read( 20 | onEnvelopes: (envelopes: FactEnvelope[]) => Promise 21 | ) { 22 | let envelopes: FactEnvelope[] = []; 23 | let line: string | null; 24 | while ((line = await this.readLine()) !== null) { 25 | if (line === "") { 26 | // Skip stray blank lines between blocks 27 | continue; 28 | } 29 | if (line.startsWith("PK")) { 30 | const index = parseInt(line.substring(2)); 31 | await this.readPublicKey(index); 32 | } 33 | else { 34 | const type = JSON.parse(line); 35 | envelopes = await this.readEnvelope(type, envelopes, onEnvelopes); 36 | } 37 | } 38 | if (envelopes.length > 0) { 39 | await onEnvelopes(envelopes); 40 | } 41 | } 42 | 43 | private async readPublicKey(index: number) { 44 | if (index !== this.publicKeys.length) { 45 | throw new Error(`Public key index ${index} is out of order`); 46 | } 47 | const publicKey = await this.parseNextJSONLine(); 48 | const emptyLine = await this.readLine(); 49 | if (emptyLine !== "") { 50 | throw new Error(`Expected empty line after public key, but got "${emptyLine}"`); 51 | } 52 | this.publicKeys.push(publicKey); 53 | } 54 | 55 | private async readEnvelope(type: string, envelopes: FactEnvelope[], onEnvelopes: (envelopes: FactEnvelope[]) => Promise) { 56 | const predecessorIndexes = await this.parseNextJSONLine(); 57 | const fields = await this.parseNextJSONLine(); 58 | 59 | const predecessors = this.getPredecessorReferences(predecessorIndexes); 60 | 61 | const hash = computeHash(fields, predecessors); 62 | this.factReferences.push({ type, hash }); 63 | const fact: FactRecord = { type, hash, predecessors, fields }; 64 | 65 | const signatures = await this.readSignatures(); 66 | 67 | envelopes.push({ fact, signatures }); 68 | 69 | // Periodically handle a batch of envelopes 70 | if (envelopes.length >= this.flushThreshold) { 71 | await onEnvelopes(envelopes); 72 | envelopes = []; 73 | } 74 | return envelopes; 75 | } 76 | 77 | private getPredecessorReferences(predecessorIndexes: any) { 78 | const predecessors: PredecessorCollection = {}; 79 | for (const role in predecessorIndexes) { 80 | const index = predecessorIndexes[role]; 81 | if (Array.isArray(index)) { 82 | predecessors[role] = index.map(i => { 83 | if (i >= this.factReferences.length) { 84 | throw new Error(`Predecessor reference ${i} is out of range`); 85 | } 86 | return this.factReferences[i]; 87 | }); 88 | } else { 89 | if (index >= this.factReferences.length) { 90 | throw new Error(`Predecessor reference ${index} is out of range`); 91 | } 92 | predecessors[role] = this.factReferences[index]; 93 | } 94 | } 95 | return predecessors; 96 | } 97 | 98 | private async readSignatures(): Promise { 99 | const signatures: FactSignature[] = []; 100 | let line: string | null; 101 | while ((line = await this.readLine()) !== null && line !== "") { 102 | if (!line.startsWith("PK")) { 103 | throw new Error(`Expected public key reference, but got "${line}"`); 104 | } 105 | const publicKeyIndex = parseInt(line.substring(2)); 106 | if (publicKeyIndex >= this.publicKeys.length) { 107 | throw new Error(`Public key reference ${publicKeyIndex} is out of range`); 108 | } 109 | const publicKey = this.publicKeys[publicKeyIndex]; 110 | const signature = await this.parseNextJSONLine(); 111 | 112 | signatures.push({ publicKey, signature }); 113 | } 114 | return signatures; 115 | } 116 | 117 | private async parseNextJSONLine() { 118 | const line = await this.readLine(); 119 | if (!line) { 120 | throw new Error("Expected JSON line, but got end of file"); 121 | } 122 | return JSON.parse(line); 123 | } 124 | } --------------------------------------------------------------------------------