├── .github └── workflows │ ├── main.yml │ ├── publish-github.yml │ └── publish.yml ├── .gitignore ├── .npmignore ├── .npmrc ├── .roomodes ├── LICENSE ├── README.md ├── SECURITY.md ├── contributing.md ├── documentation ├── authorization.md ├── diagrams │ ├── blog.gv │ ├── blog.png │ ├── blogCommentRule.gv │ ├── blogCommentRule.png │ ├── blogGuestBloggerRule.gv │ ├── blogGuestBloggerRule.png │ ├── blogPostRule.gv │ ├── blogPostRule.png │ ├── observerInitialLoad.gv │ └── observerSubsequentLoad.gv ├── indexeddb-queue.md ├── predecessor.md ├── purge-conditions.md ├── specification.md └── successors.md ├── eslint.config.mjs ├── examples ├── README.md └── blog.http ├── jest.config.cjs ├── package-lock.json ├── package.json ├── scripts └── release.sh ├── src ├── authentication │ ├── authentication-noop.ts │ ├── authentication-offline.ts │ ├── authentication-test.ts │ ├── authentication-web-client.ts │ └── authentication.ts ├── authorization │ ├── authorization-engine.ts │ ├── authorization-noop.ts │ ├── authorization.ts │ └── authorizationRules.ts ├── cryptography │ ├── key-pair.ts │ └── verify.ts ├── distribution │ ├── distribution-engine.ts │ └── distribution-rules.ts ├── fact │ ├── hash.ts │ ├── hydrate.ts │ └── sorter.ts ├── fork │ ├── fork.ts │ ├── pass-through-fork.ts │ ├── persistent-fork.ts │ ├── serialize.ts │ ├── transient-fork.ts │ └── web-client-saver.ts ├── http │ ├── ContentType.ts │ ├── authenticationProvider.ts │ ├── deserializer.ts │ ├── fetch.ts │ ├── httpNetwork.ts │ ├── messageParsers.ts │ ├── messages.ts │ ├── serializer.ts │ └── web-client.ts ├── index.ts ├── indexeddb │ ├── driver.ts │ ├── indexeddb-login-store.ts │ ├── indexeddb-queue.ts │ └── indexeddb-store.ts ├── jinaga-browser.ts ├── jinaga-test.ts ├── jinaga.ts ├── managers │ ├── NetworkManager.ts │ ├── PurgeManager.ts │ ├── QueueProcessor.ts │ └── factManager.ts ├── memory │ └── memory-store.ts ├── model │ └── user.ts ├── observable │ └── observable.ts ├── observer │ ├── observer.ts │ └── subscriber.ts ├── purge │ ├── purgeCompliance.ts │ ├── purgeConditions.ts │ └── validate.ts ├── rules │ └── RuleSet.ts ├── specification │ ├── declaration.ts │ ├── description.ts │ ├── feed-builder.ts │ ├── feed-cache.ts │ ├── inverse.ts │ ├── model.ts │ ├── skeleton.ts │ ├── specification-parser.ts │ ├── specification-runner.ts │ └── specification.ts ├── storage.ts ├── user-identity.ts └── util │ ├── encoding.ts │ ├── fn.ts │ ├── obj.ts │ ├── promise.ts │ └── trace.ts ├── test ├── authorization │ ├── authorizationExampleSpec.ts │ ├── authorizationRulesSpec.ts │ └── authorizationSpecificationSpec.ts ├── blogModel.ts ├── companyModel.ts ├── cryptography │ └── keyPairSpec.ts ├── distribution │ ├── distributionDescriptionSpec.ts │ └── distributionRuleSpec.ts ├── fact │ ├── factSpec.ts │ ├── knownHashSpec.ts │ └── sorterSpec.ts ├── http │ ├── deserializerSpec.ts │ └── serializerSpec.ts ├── indexeddb │ ├── indexeddbQueueSpec.ts │ └── indexeddbQueueTopologicalSpec.ts ├── managers │ └── QueueProcessorSpec.ts ├── orderModel.ts ├── purge │ ├── purgeConditionSpec.ts │ └── realTimePurgeSpec.ts ├── rules │ └── RuleSetSpec.ts ├── single-use │ ├── singleUseForkSpec.ts │ └── singleUseStoreSpec.ts ├── specification │ ├── feedBuilderSpec.ts │ ├── givenSpec.ts │ ├── inverseSpec.ts │ ├── querySpec.ts │ ├── skeletonSpec.ts │ ├── specificationSpec.ts │ ├── splitSpecificationSpec.ts │ ├── versioningSpec.ts │ └── watchSpec.ts └── storage │ └── referenceSpec.ts ├── tsconfig.cjs.json ├── tsconfig.json ├── tsconfig.test.json └── types └── keypair.d.ts /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | # This is a basic workflow to help you get started with Actions 2 | 3 | name: CI 4 | 5 | # Controls when the workflow will run 6 | on: 7 | # Triggers the workflow on push or pull request events but only for the main branch 8 | push: 9 | branches: [ main ] 10 | pull_request: 11 | branches: [ main ] 12 | 13 | # Allows you to run this workflow manually from the Actions tab 14 | workflow_dispatch: 15 | 16 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel 17 | jobs: 18 | # This workflow contains a single job called "build" 19 | build: 20 | # The type of runner that the job will run on 21 | runs-on: ubuntu-latest 22 | 23 | # Steps represent a sequence of tasks that will be executed as part of the job 24 | steps: 25 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 26 | - uses: actions/checkout@v4 27 | 28 | - uses: actions/setup-node@v4 29 | with: 30 | node-version: '20.x' 31 | registry-url: 'https://registry.npmjs.org' 32 | 33 | - name: Build and Test 34 | run: | 35 | npm ci 36 | npm run build 37 | npm test 38 | -------------------------------------------------------------------------------- /.github/workflows/publish-github.yml: -------------------------------------------------------------------------------- 1 | name: Publish to GitHub Registry 2 | on: 3 | workflow_dispatch: 4 | jobs: 5 | publish: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - uses: actions/checkout@v4 9 | with: 10 | fetch-depth: 0 11 | - uses: actions/setup-node@v4 12 | with: 13 | node-version: '20.x' 14 | registry-url: 'https://npm.pkg.github.com' 15 | - run: npm install 16 | - name: Generate Prerelease Version 17 | run: | 18 | CURRENT_VERSION=$(node -p "require('./package.json').version") 19 | LAST_TAG=$(git describe --tags --abbrev=0) 20 | COMMIT_COUNT=$(git rev-list --count ${LAST_TAG}..HEAD) 21 | PRERELEASE_VERSION=$(node -p "require('semver').inc('$CURRENT_VERSION', 'prerelease', 'beta.' + '$COMMIT_COUNT').replace(/\.0$/, '')") 22 | npm version $PRERELEASE_VERSION --no-git-tag-version 23 | - run: npm publish --registry=https://npm.pkg.github.com 24 | env: 25 | NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Node.js Package 2 | on: 3 | release: 4 | types: [created] 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v4 10 | # Setup .npmrc file to publish to npm 11 | - uses: actions/setup-node@v4 12 | with: 13 | node-version: '20.x' 14 | registry-url: 'https://registry.npmjs.org' 15 | - run: npm install 16 | - run: npm publish 17 | env: 18 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | .idea/ 3 | dist/ 4 | .vscode 5 | npm-debug.log 6 | .DS_Store 7 | .env.local 8 | jinaga-*.tgz 9 | dist-cjs/ 10 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jinaga/jinaga.js/c66a365d7a65f8d13a28b44363da393e73548902/.npmignore -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | @jinaga:registry=https://npm.pkg.github.com 2 | registry=https://registry.npmjs.org 3 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2022 Michael L Perry 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Jinaga 2 | 3 | End-to-end application state management framework. 4 | 5 | Add Jinaga.JS to a client app and point it at a Replicator. 6 | Updates are sent to the Replicator as the user works with the app. 7 | Any changes that the app needs are pulled from the Replicator. 8 | 9 | ## Install 10 | 11 | Install Jinaga.JS from the NPM package. 12 | 13 | ```bash 14 | npm i jinaga 15 | ``` 16 | 17 | This installs just the client side components. 18 | See [jinaga.com](https://jinaga.com) for details on how to use them. 19 | 20 | ## Running a Replicator 21 | 22 | A Jinaga front end connects to a device called a Replicator. 23 | The Jinaga Replicator is a single machine in a network. 24 | It stores and shares facts. 25 | To get started, create a Replicator of your very own using [Docker](https://www.docker.com/products/docker-desktop/). 26 | 27 | ``` 28 | docker pull jinaga/jinaga-replicator 29 | docker run --name my-replicator -p8080:8080 jinaga/jinaga-replicator 30 | ``` 31 | 32 | This creates and starts a new container called `my-replicator`. 33 | The container is listening at port 8080 for commands. 34 | Configure Jinaga to use the replicator: 35 | 36 | ```typescript 37 | import { JinagaBrowser } from "jinaga"; 38 | 39 | export const j = JinagaBrowser.create({ 40 | httpEndpoint: "http://localhost:8080/jinaga" 41 | }); 42 | ``` 43 | 44 | ## Breaking Changes 45 | 46 | If you are upgrading from an older version, you may need to update your code. 47 | 48 | ### Changes in version 4.0.0 49 | 50 | In version 4.0.0, the server side code has been moved to a separate package. 51 | This allows you to build a client using Create React App and connect it to a Replicator. 52 | 53 | When upgrading, take the following steps: 54 | - Install the `jinaga-server` package. 55 | - Remove the 'jinaga' alias from 'webpack.config.js'. 56 | - Import `JinagaServer` from 'jinaga-server'. 57 | - Rename any references of `Specification` to `SpecificationOf`, and `Condition` to `ConditionOf`. These are used as return types of specification functions. It is uncommon to be explicit about them. 58 | 59 | ### Changes in version 3.1.0 60 | 61 | The name of the client-side script changed from `jinaga.js` to `jinaga-client.js`. 62 | In `webpack.config.js`, update the `jinaga` alias from `jinaga/dist/jinaga` to `jinaga/dist/jinaga-client`. 63 | 64 | ### Changes in version 3.0.0 65 | 66 | In version 3 of Jinaga.JS, the `has` function takes two parameters. 67 | The second is the name of the predecessor type. 68 | In version 2, the function took only one parameter: the field name. 69 | 70 | To upgrade, change this: 71 | 72 | ```javascript 73 | function assignmentUser(assignment) { 74 | ensure(assignment).has("user"); 75 | return j.match(assignment.user); 76 | } 77 | ``` 78 | 79 | To this: 80 | 81 | ```javascript 82 | function assignmentUser(assignment) { 83 | ensure(assignment).has("user", "Jinaga.User"); 84 | return j.match(assignment.user); 85 | } 86 | ``` 87 | 88 | ## Build 89 | 90 | To build Jinaga.JS, you will need Node 16. 91 | 92 | ```bash 93 | npm ci 94 | npm run build 95 | npm test 96 | ``` 97 | 98 | ## Release 99 | 100 | To release a new version of Jinaga.JS, bump the version number, create and push a tag, 101 | and create a release. The GitHub Actions workflow will build and publish the package. 102 | 103 | ```bash 104 | git c main 105 | git pull 106 | npm version patch 107 | git push --follow-tags 108 | gh release create v$(node -p "require('./package.json').version") --generate-notes --verify-tag 109 | ``` -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | | Version | Supported | 6 | | ------- | ------------------ | 7 | | 2.5.x | :white_check_mark: | 8 | | < 2.5 | :x: | 9 | 10 | ## Reporting a Vulnerability 11 | 12 | Please report vulnerabilities to michael@qedcode.com. 13 | Expect a confirmation of receipt within 24 hours, and analysis within 72 hours. 14 | If the issue cannot be resolved within that timeframe, we will coordinate a disclosure schedule. 15 | 16 | -------------------------------------------------------------------------------- /contributing.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | I have a vision for historical modeling, but I need your help to realize this vision in JavaScript. Here are some of the ways you can help. 4 | 5 | ## Building 6 | 7 | Clone the repository and build the code. You will need to: 8 | 9 | Once: 10 | 11 | - Install [node](https://nodejs.org/) 12 | 13 | Each time you pull: 14 | 15 | - Run `npm ci` 16 | 17 | As you work: 18 | 19 | - Run `npm run test:watch` to compile and run the tests during development 20 | 21 | ## Testing 22 | 23 | Follow the instructions on [jinaga.com](https://jinaga.com) to create a new app. 24 | Let me know what issues you find. 25 | Open [issues](https://github.com/jinaga/jinaga/issues) in this repository. 26 | 27 | ## Recommendations 28 | 29 | Create [issues](https://github.com/jinaga/jinaga/issues) for anything you would like to see changed. We will discuss it in the public arena. Keep in mind, however, that recommendations for changes are to be defended. Be prepared to provide evidence that it makes the system more resilient, more secure, or easier to use. Personal preference is not evidence. 30 | 31 | ## Sending pull requests 32 | 33 | Before making large changes, please let me know what you are working on. Comment on an issue, or open a new one. 34 | 35 | Clone the repository in GitHub. Create a branch. Make your changes. Submit a pull request from your working branch against master. 36 | 37 | If you prefer some other repository host, please open an issue and paste in your git URL and working branch name. 38 | 39 | ## Asking questions 40 | 41 | Reach out to me on Twitter [@michaellperry](https://twitter.com/michaellperry). Ask me about the library in particular, or historical modeling in general. 42 | 43 | ## Spreading the word 44 | 45 | When you talk about the project, you can send people to http://jinaga.com. That will take them to this repository until I create a home page for the library. Then it will take them to documentation on getting started and a reference manual. 46 | 47 | When you talk about historical modeling in general, you can send people to https://immutablearchitecture.com. The book The Art of Immutable Architecture describes the principles of building distributed systems using immutable data structures. Jinaga is a realization of those principles. -------------------------------------------------------------------------------- /documentation/authorization.md: -------------------------------------------------------------------------------- 1 | # Authorization 2 | 3 | Authorization in Jinaga answers the question "can this user assert this fact?" 4 | Given a fact, an authorization rule returns the set of all users who are 5 | permitted to create it. 6 | 7 | ## The Blog Example 8 | 9 | Let's look at an example. Suppose we have a blog application. We want to 10 | allow the creator of the blog site to create posts. We also want to allow 11 | anyone to create comments on posts. A comment is attributed to its author, 12 | so we want assurance that the attributed author is the one who created 13 | the comment. 14 | 15 | ![Blog model](./diagrams/blog.png) 16 | 17 | To express the first rule, we write a specification that takes a Post and 18 | returns the creator of the blog site. 19 | 20 | ``` 21 | (post: Post) { 22 | user: User [ 23 | user = post->site: Site->creator: User 24 | ] 25 | } => user 26 | ``` 27 | 28 | To express the second rule, we write a specification that takes a Comment 29 | and returns the author of the comment. 30 | 31 | ``` 32 | (comment: Comment) { 33 | user: User [ 34 | user = comment->author: User 35 | ] 36 | } => user 37 | ``` 38 | 39 | ## Authorization Rules 40 | 41 | When the server receives a fact, it runs the authorization rules starting 42 | from that fact. If the user is in the set of users returned by any of the 43 | rules, then the user is authorized to create the fact. 44 | 45 | So when the server receives a Post, it runs the first rule. 46 | 47 | ![From a Post to the creator of the blog site](./diagrams/blogPostRule.png) 48 | 49 | If the user who submitted the Post is the creator of the site, then the 50 | fact is accepted. Otherwise, the fact is rejected. 51 | 52 | When the server receives a Comment, it runs the second rule. 53 | 54 | ![From a Comment to the author of the comment](./diagrams/blogCommentRule.png) 55 | 56 | If the user who submitted the Comment is the author of the comment, then 57 | the fact is accepted. Otherwise, the fact is rejected. This rule will 58 | not allow a user to impersonate another user in a comment. 59 | 60 | ## Predecessors and Successors 61 | 62 | The example rules that we examined above both start with a fact and return 63 | predecessors. But some rules can also include successors. For example, we 64 | might want to allow the creator of a site to invite a guest blogger. The 65 | guest blogger would be allowed to create a Post. 66 | 67 | The following rule allows a guest blogger to create a Post. 68 | 69 | ``` 70 | (post: Post) { 71 | guestBlogger: GuestBlogger [ 72 | guestBlogger->site: Site = post->site: Site 73 | ] 74 | user: User [ 75 | user = guestBlogger->user: User 76 | ] 77 | } => user 78 | ``` 79 | 80 | It follows a path from the Post up to the Site, and then from the Site 81 | down to the GuestBlogger. Then it bounces back up to the User. 82 | 83 | ![From a Post to the guest blogger](./diagrams/blogGuestBloggerRule.png) 84 | 85 | The first step of this rule is a predecessor step. It follows the edge 86 | from the Post that was submitted up to the Site. As we generalize over 87 | authorization rules, we find that *all* rules must begin with a predecessor 88 | step. If they started with a successor step, then the rule would be 89 | unsatisfiable. The submitted fact is not yet in the database. It 90 | certainly cannot have any successors yet. 91 | 92 | ## Splitting Rules 93 | 94 | Taking advantage of this observation, we define the following algorithm. 95 | The server splits each authorization rule into two halves. The first half 96 | contains only predecessor steps. The second half contains the rest of the 97 | steps. The server runs the first half -- the *head* -- on the transitive 98 | closure of the submitted fact. It runs the second half -- the *tail* -- 99 | on the database. 100 | 101 | The transitive closure of a fact includes its predecessors, all of 102 | *their* predecessors, and so on up to the root. This guarantees that 103 | the head can be executed on that graph. If *all* of the steps in the rule 104 | are predecessor steps, then there is no tail, and we have the set of users with no additional work. 105 | 106 | If there *is* a tail, then the server runs the tail on the database 107 | starting where the head left off. The edges that the tail traverses are 108 | likely not in the transitive closure. It therefore needs to be run on the 109 | database. 110 | 111 | The server cannot start the query on the database. The database does 112 | not yet contain the submitted fact. So the server must first take at least 113 | one predecessor step to find a good starting point. It then runs the tail 114 | on the database starting from that point. 115 | 116 | ## Race Conditions 117 | 118 | Authorization rules with successor steps are the source of the only 119 | race conditions that occur in a historical model. If the server learns 120 | about a fact before it learns about the successors in the authorization 121 | rule, then the server will reject the fact. But if it learns about the 122 | successors before it learns about the fact, then the server will accept 123 | it. 124 | 125 | This problem is compounded when an authorization rule contains a negative 126 | existential condition. More commonly, this is known as revocation. For 127 | example, suppose we want to allow the creator of a site to revoke the 128 | access of a guest blogger. The rule would contain a "not exists" condition. 129 | If the guest blogger got their post in before the revocation, then the 130 | server would accept it. But if they waited until after the revocation, 131 | then the server would reject it. 132 | 133 | Race conditions like these lead to a loss of consistency. Different 134 | replicas may see successor facts in a different order. They will therefore 135 | reach different conclusions about the authorization of a fact. -------------------------------------------------------------------------------- /documentation/diagrams/blog.gv: -------------------------------------------------------------------------------- 1 | digraph { 2 | rankdir=BT 3 | 4 | Site -> User [label=" creator "] 5 | Post -> Site 6 | Comment -> Post, User [label=" author "] 7 | GuestBlogger -> Site, User 8 | } -------------------------------------------------------------------------------- /documentation/diagrams/blog.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jinaga/jinaga.js/c66a365d7a65f8d13a28b44363da393e73548902/documentation/diagrams/blog.png -------------------------------------------------------------------------------- /documentation/diagrams/blogCommentRule.gv: -------------------------------------------------------------------------------- 1 | digraph { 2 | rankdir=BT 3 | 4 | Site -> User [label=" creator " color=gray fontcolor=gray] 5 | Post -> Site [label=" author " color=gray fontcolor=gray] 6 | Comment -> Post [color=gray fontcolor=gray] 7 | Comment -> User [label=" author "] 8 | GuestBlogger -> Site, User [color=gray] 9 | 10 | Post [color=gray fontcolor=gray] 11 | Site [color=gray fontcolor=gray] 12 | GuestBlogger [color=gray fontcolor=gray] 13 | } -------------------------------------------------------------------------------- /documentation/diagrams/blogCommentRule.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jinaga/jinaga.js/c66a365d7a65f8d13a28b44363da393e73548902/documentation/diagrams/blogCommentRule.png -------------------------------------------------------------------------------- /documentation/diagrams/blogGuestBloggerRule.gv: -------------------------------------------------------------------------------- 1 | digraph { 2 | rankdir=BT 3 | 4 | Site -> User [label=" creator " color=gray fontcolor=gray] 5 | Post -> Site 6 | Comment -> Post, User [label=" author " color=gray fontcolor=gray] 7 | GuestBlogger -> Site, User 8 | 9 | Comment [color=gray fontcolor=gray] 10 | } -------------------------------------------------------------------------------- /documentation/diagrams/blogGuestBloggerRule.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jinaga/jinaga.js/c66a365d7a65f8d13a28b44363da393e73548902/documentation/diagrams/blogGuestBloggerRule.png -------------------------------------------------------------------------------- /documentation/diagrams/blogPostRule.gv: -------------------------------------------------------------------------------- 1 | digraph { 2 | rankdir=BT 3 | 4 | Site -> User [label=" creator "] 5 | Post -> Site 6 | Comment -> Post, User [label=" author " color=gray fontcolor=gray] 7 | GuestBlogger -> Site, User [color=gray] 8 | 9 | Comment [color=gray fontcolor=gray] 10 | GuestBlogger [color=gray fontcolor=gray] 11 | } -------------------------------------------------------------------------------- /documentation/diagrams/blogPostRule.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jinaga/jinaga.js/c66a365d7a65f8d13a28b44363da393e73548902/documentation/diagrams/blogPostRule.png -------------------------------------------------------------------------------- /documentation/diagrams/observerInitialLoad.gv: -------------------------------------------------------------------------------- 1 | digraph { 2 | rankdir=TB; 3 | 4 | Uninitialized -> Loading [label=" immediate"] 5 | Loading -> Loaded [label=" fetch and read"] 6 | Loading -> Error [label=" error"] 7 | } -------------------------------------------------------------------------------- /documentation/diagrams/observerSubsequentLoad.gv: -------------------------------------------------------------------------------- 1 | digraph { 2 | rankdir=TB 3 | 4 | Uninitialized -> Loaded [label=" read"] 5 | Loaded -> Loaded [label=" fetch"] 6 | } -------------------------------------------------------------------------------- /documentation/indexeddb-queue.md: -------------------------------------------------------------------------------- 1 | # IndexedDB Queue 2 | 3 | The IndexedDB Queue is an implementation of the Queue interface that uses the browser's IndexedDB API for storage. It provides methods for enqueueing, dequeueing, and peeking at fact envelopes. 4 | 5 | ## Overview 6 | 7 | The IndexedDB Queue ensures that facts are processed in the correct order by maintaining strict topological ordering of facts. This means that for any two facts where one depends on the other, the prerequisite fact will always appear earlier in the list. 8 | 9 | ## Peek Function 10 | 11 | The `peek` function retrieves all fact envelopes currently in the queue, along with their complete transitive closure of predecessors. This ensures that when facts are processed from the queue, all of their dependencies are available. The function returns the envelopes in strict topological order, guaranteeing that prerequisite facts appear before the facts that depend on them. 12 | 13 | ```typescript 14 | peek(): Promise 15 | ``` 16 | 17 | ### Return Value 18 | 19 | The function returns a Promise that resolves to an array of FactEnvelope objects. This array includes: 20 | 21 | 1. All fact envelopes currently in the queue 22 | 2. All transitive predecessors of those facts (the complete ancestor chain) 23 | 24 | ### Implementation Details 25 | 26 | The function: 27 | 28 | 1. Opens a transaction on the 'queue', 'fact', and 'ancestor' object stores 29 | 2. Retrieves all fact envelopes from the queue 30 | 3. For each envelope, retrieves its ancestors from the ancestor table 31 | 4. For each ancestor, retrieves the corresponding fact from the fact table 32 | 5. Creates fact envelopes for all ancestors 33 | 6. Sorts the combined array of envelopes in topological order 34 | 7. Validates the topological ordering to ensure correctness 35 | 8. Returns the sorted array of envelopes 36 | 37 | ### Performance Considerations 38 | 39 | - The function uses the `distinct` utility to remove duplicate ancestors, ensuring that each fact is only included once in the result 40 | - The function uses the `TopologicalSorter` to sort facts in topological order 41 | - The function validates the topological ordering to ensure that prerequisite facts appear before the facts that depend on them 42 | - The function detects and reports circular dependencies and other ordering violations 43 | - Facts that are already in the queue are not duplicated in the ancestor list 44 | - The implementation efficiently handles potentially large ancestor sets 45 | 46 | ### Example Usage 47 | 48 | ```typescript 49 | const queue = new IndexedDBQueue('my-index'); 50 | 51 | // Peek at the queue 52 | const envelopes = await queue.peek(); 53 | 54 | // Process the envelopes (guaranteed to be in topological order) 55 | for (const envelope of envelopes) { 56 | // Process each fact envelope 57 | // All predecessors are guaranteed to be included in the array 58 | // and to appear before the facts that depend on them 59 | } 60 | ``` 61 | 62 | ## Other Queue Methods 63 | 64 | ### Enqueue 65 | 66 | ```typescript 67 | enqueue(envelopes: FactEnvelope[]): Promise 68 | ``` 69 | 70 | Adds fact envelopes to the queue for later processing. 71 | 72 | ## Error Handling 73 | 74 | The `peek` function includes comprehensive validation logic to ensure the correctness of the topological ordering: 75 | 76 | 1. **Circular Dependencies**: If a circular dependency is detected (where fact A depends on fact B, which depends on fact C, which depends on fact A), the function will throw an error with a detailed message. 77 | 78 | 2. **Missing Prerequisites**: If a fact depends on a prerequisite that is not included in the result, the function will throw an error identifying the missing prerequisite. 79 | 80 | 3. **Topological Ordering Violations**: If the topological ordering is violated (where a fact appears before one of its prerequisites), the function will throw an error with details about the specific violation. 81 | 82 | These error messages provide detailed information to help diagnose and fix issues with the fact dependency graph. 83 | 84 | ### Dequeue 85 | 86 | ```typescript 87 | dequeue(envelopes: FactEnvelope[]): Promise 88 | ``` 89 | 90 | Removes fact envelopes from the queue after they have been processed. -------------------------------------------------------------------------------- /documentation/predecessor.md: -------------------------------------------------------------------------------- 1 | # Querying for `predecessor` 2 | 3 | The `predecessor()` method allows you to navigate from a fact to its direct predecessor in a specification. This provides a convenient way to traverse relationships in the reverse direction. 4 | 5 | ## Example of Using the `predecessor` Method 6 | 7 | In the context of a company model, you can use the `predecessor()` method to find the company that an office belongs to: 8 | 9 | ```typescript 10 | const specification = model.given(Office).match(office => 11 | office.company.predecessor() 12 | ); 13 | 14 | const result = await j.query(specification, office); 15 | ``` 16 | 17 | In this example, the `predecessor()` method is used to navigate from an office to its company. 18 | 19 | ## Compared to the `join` Method 20 | 21 | The alternative to the `predecessor()` syntax in Jinaga is to use the `join` method with `facts.ofType()`: 22 | 23 | ```typescript 24 | const specification = model.given(Office).match((office, facts) => 25 | facts.ofType(Company) 26 | .join(company => company, office.company) 27 | ); 28 | 29 | const result = await j.query(specification, office); 30 | ``` 31 | 32 | The `predecessor()` method provides a more concise and readable way to express the same query. 33 | 34 | ## Using the `predecessor` Method with Projections 35 | 36 | You can use the `predecessor()` method with projections to select specific fields or create composite results: 37 | 38 | ```typescript 39 | const specification = model.given(Office).match(office => 40 | office.company.predecessor() 41 | .select(company => company.identifier) 42 | ); 43 | 44 | const result = await j.query(specification, office); 45 | ``` 46 | 47 | For composite projections that include predecessor relationships, you need to properly label the predecessor facts: 48 | 49 | ```typescript 50 | const specification = model.given(Office).match((office, facts) => 51 | office.company.predecessor() 52 | .select(company => ({ 53 | identifier: company.identifier, 54 | creator: facts.ofType(User) 55 | .join(user => user, company.creator) 56 | })) 57 | ); 58 | 59 | const result = await j.query(specification, office); 60 | ``` 61 | 62 | ## Chaining Predecessor Calls 63 | 64 | You can chain multiple `predecessor()` calls to navigate through multiple levels of relationships: 65 | 66 | ```typescript 67 | const specification = model.given(President).match(president => 68 | president.office.company.predecessor() 69 | ); 70 | 71 | const result = await j.query(specification, president); 72 | ``` 73 | 74 | This example navigates from a president to their office, and then to the company of that office. 75 | 76 | ## Combining with Existential Conditions 77 | 78 | The `predecessor()` method can be used with existential conditions: 79 | 80 | ```typescript 81 | const specification = model.given(OfficeClosed).match(officeClosed => 82 | officeClosed.office.predecessor() 83 | .exists(office => office.company.predecessor()) 84 | ); 85 | 86 | const result = await j.query(specification, officeClosed); 87 | ``` 88 | 89 | ## Combining with Successors 90 | 91 | You can combine `predecessor()` and `successors()` methods in the same query: 92 | 93 | ```typescript 94 | const specification = model.given(Company).match(company => 95 | company.successors(Office, office => office.company) 96 | .select(office => ({ 97 | identifier: office.identifier, 98 | presidents: office.successors(President, president => president.office) 99 | .selectMany(president => president.user.predecessor() 100 | .select(user => ({ 101 | user: user, 102 | names: user.successors(UserName, userName => userName.user) 103 | .select(userName => userName.value) 104 | })) 105 | ) 106 | })) 107 | ); 108 | 109 | const result = await j.query(specification, company); 110 | ``` 111 | 112 | This example shows how to navigate from a company to its offices (using `successors`), then to the presidents of those offices (using `successors`), then to the users who are those presidents (using `predecessor`), and finally to the names of those users (using `successors`). 113 | -------------------------------------------------------------------------------- /documentation/purge-conditions.md: -------------------------------------------------------------------------------- 1 | # Purge Conditions 2 | 3 | A Jinaga replica is an immutable database. 4 | It typically does not allow for updates or deletions. 5 | 6 | However, there is a way to purge data from a replica. 7 | If you can prove that the data will have no effect on the results of any specification, then the runtime will purge it. 8 | Purge conditions are how you provide that proof. 9 | 10 | ## Example: Contact List 11 | 12 | Declare purge conditions when defining a model. 13 | For example, consider a model that describes a contact list. 14 | 15 | A list belongs to a user. 16 | A list contains contacts. 17 | A contact has a name and an email address. 18 | 19 | ```typescript 20 | export class List { 21 | static Type = "CRM.List" as const; 22 | type = CRM.Type; 23 | 24 | constructor( 25 | public owner: User, 26 | public uuid: string 27 | ) { } 28 | } 29 | 30 | export class Contact { 31 | static Type = "CRM.Contact" as const; 32 | type = CRM.Type; 33 | 34 | constructor( 35 | public list: List, 36 | public createdAt: Date | string 37 | ) { } 38 | } 39 | 40 | export class Name { 41 | static Type = "CRM.Contact.Name" as const; 42 | type = CRM.Type; 43 | 44 | constructor( 45 | public contact: Contact, 46 | public value: string, 47 | public prior: Name[] 48 | ) { } 49 | } 50 | 51 | export class Email { 52 | static Type = "CRM.Contact.Email" as const; 53 | type = CRM.Type; 54 | 55 | constructor( 56 | public contact: Contact, 57 | public value: string, 58 | public prior: Email[] 59 | ) { } 60 | } 61 | ``` 62 | 63 | Suppose we wanted to delete a contact from the list. 64 | We could express that as a `Contact.Deleted` fact. 65 | 66 | ```typescript 67 | export class ContactDeleted { 68 | static Type = "CRM.Contact.Deleted" as const; 69 | type = CRM.Type; 70 | 71 | constructor( 72 | public contact: Contact 73 | ) { } 74 | } 75 | ``` 76 | 77 | ### Build a Model 78 | 79 | With all of the fact types defined, we can build a model that lets us write specifications. 80 | 81 | ```typescript 82 | const model = buildModel(m => m 83 | .type(List, x => x 84 | .predecessor("owner", User) 85 | ) 86 | .type(Contact, x => x 87 | .predecessor("list", List) 88 | ) 89 | .type(ContactDeleted, x => x 90 | .predecessor("contact", Contact) 91 | ) 92 | .type(Name, x => x 93 | .predecessor("contact", Contact) 94 | .predecessor("prior", Name) 95 | ) 96 | .type(Email, x => x 97 | .predecessor("contact", Contact) 98 | .predecessor("prior", Email) 99 | ) 100 | ); 101 | ``` 102 | 103 | ### Write a Specification 104 | 105 | To show all of the contacts in a list, we would write a specification. 106 | 107 | ```typescript 108 | const contactsInList = model.given(List).match((list, facts) => 109 | facts.ofType(Contact) 110 | .join(contact => contact.list, list) 111 | .notExists(contact => 112 | facts.ofType(ContactDeleted) 113 | .join(contactDeleted => contactDeleted.contact, contact)) 114 | .select(contact => ({ 115 | contact, 116 | name: facts.ofType(Name) 117 | .join(name => name.contact, contact) 118 | .notExists(name => 119 | facts.ofType(Name) 120 | .join(next => next.prior, name) 121 | ) 122 | .select(name => name.value), 123 | email: facts.ofType(Email) 124 | .join(email => email.contact, contact) 125 | .notExists(email => 126 | facts.ofType(Email) 127 | .join(next => next.prior, email) 128 | ) 129 | .select(email => email.value) 130 | })); 131 | ); 132 | ``` 133 | 134 | Notice how the specification excludes contacts that have been deleted. 135 | If all specifications did so, then we could safely purge information about deleted contacts from the replica. 136 | 137 | ### Declare Purge Conditions 138 | 139 | To declare purge conditions, write a function that takes a `PurgeConditions` object and adds conditions to it. 140 | 141 | ```typescript 142 | const purgeConditions = (p: PurgeConditions) => p 143 | .whenExists(model.given(Contact).match((contact, facts) => 144 | facts.ofType(ContactDeleted) 145 | .join(contactDeleted => contactDeleted.contact, contact) 146 | )); 147 | ``` 148 | 149 | Use that function when initializing the Jinaga client. 150 | 151 | ```typescript 152 | const j = JinagaClient.create({ 153 | purgeConditions 154 | }); 155 | ``` 156 | 157 | The effect of this declaration is that when the application uses this jinaga client to query, watch, or subscribe to a specification, the runtime will verify that the purge conditions are included. 158 | If a specification matches a `Contact` and does not include `notExists` for `ContactDeleted`, then the runtime will throw an exception. 159 | This proves that no specification will return information about a deleted contact. 160 | 161 | ### Purge the Data 162 | 163 | To purge the data, call the `purge` method on the Jinaga client. 164 | 165 | ```typescript 166 | await j.purge(); 167 | ``` 168 | 169 | This will remove all successors of `Contact` facts when a `ContactDeleted` fact exists for that contact. 170 | The runtime must keep the `Contact` and the `ContactDeleted` facts to ensure that the replica doesn't later learn about the deleted contact. 171 | But it can remove the `Name` and `Email` facts for that contact. -------------------------------------------------------------------------------- /documentation/successors.md: -------------------------------------------------------------------------------- 1 | # Querying for `successors` 2 | 3 | As an alternative to the `join` method, you can use the `successors` method to query for facts that are successors of a given fact. 4 | Rather than joining to `facts.ofType(T)`, you can find `successors(T, ...)` directly. 5 | 6 | When using `successors`, it is often possible to remove the `facts` parameter from the match function. 7 | 8 | ## Example of Using the `successors` Method 9 | 10 | In the context of a company model, you can use the `successors` method to find all offices of a company by specifying the relationship between the company and its offices. Here is an example: 11 | 12 | ```typescript 13 | const specification = model.given(Company).match(company => 14 | company.successors(Office, office => office.company) 15 | ); 16 | 17 | const result = await j.query(specification, company); 18 | ``` 19 | 20 | In this example, the `successors` method is used to find all offices of a company by specifying the relationship between the company and its offices. 21 | 22 | ## Compared to the `join` Method 23 | 24 | The alternative to the `successors` syntax in Jinaga is to use the `join` method. 25 | Here is that same query expressed using the `join` method: 26 | 27 | ```typescript 28 | const specification = model.given(Company).match((company, facts) => 29 | facts.ofType(Office) 30 | .join(office => office.company, company) 31 | ); 32 | 33 | const result = await j.query(specification, company); 34 | ``` 35 | 36 | Notice that we need to pass the `facts` parameter to the match function when using the `join` method. 37 | Then we use `facts.ofType(Office)` to find all offices of the company, and `join` to specify the relationship between the company and its offices. 38 | 39 | ## Using the `successors` Method with Composite Projections 40 | 41 | Composite projections allow you to define a structure for the results of a query. 42 | You can define nested projections and collections. 43 | The `successors` method can be used within composite projections. 44 | Here is an example: 45 | 46 | ```typescript 47 | const specification = model.given(Company).match(company => 48 | company.successors(Office, office => office.company) 49 | .select(office => ({ 50 | identifier: office.identifier, 51 | employees: office.successors(Employee, employee => employee.office) 52 | })) 53 | ); 54 | 55 | const result = await j.query(specification, company); 56 | ``` 57 | 58 | In this example, the `successors` method is used to find all offices of a company and include additional information about each office, such as its employees, in the projection. 59 | -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import globals from "globals"; 2 | import tseslint from "typescript-eslint"; 3 | 4 | export default [ 5 | { 6 | files: ["**/*.{js,mjs,cjs,ts}"], 7 | languageOptions: { globals: globals.browser }, 8 | ...tseslint.configs.recommended, 9 | }, 10 | ]; 11 | -------------------------------------------------------------------------------- /examples/README.md: -------------------------------------------------------------------------------- 1 | # Examples 2 | 3 | To run examples, install the [httpYac](https://marketplace.visualstudio.com/items?itemName=anweber.vscode-httpyac) extension. 4 | 5 | Create a replicator. 6 | When you set up authentication, make sure to add the callback URL `http://localhost:3000/callback` 7 | 8 | Create a file called `.env.local` in the `examples` directory. Enter the URL of a replicator. 9 | 10 | ``` 11 | replicatorUrl=https://repdev.jinaga.com/xxxxxxxxxxxxxxx 12 | oauth2_tokenEndpoint=https://repdev.jinaga.com/xxxxxxxxxxxxxxx/auth/token 13 | oauth2_authorizationEndpoint=https://repdev.jinaga.com/xxxxxxxxxxxxxxx/auth/apple 14 | oauth2_clientId=xxxxxxxxxxxxxxx 15 | oauth2_usePkce=true 16 | ``` 17 | 18 | Then choose your favorite example and run it. -------------------------------------------------------------------------------- /examples/blog.http: -------------------------------------------------------------------------------- 1 | POST {{replicatorUrl}}/write HTTP/1.1 2 | Authorization: oauth2 authorization_code 3 | Content-Type: text/plain 4 | 5 | let creator: Jinaga.User = me 6 | let site: Blog.Site = {creator, domain: "michaelperry.net"} 7 | 8 | let post: Blog.Post = { site, author: creator, createdAt: "2023-06-22T13:36:00.000Z" } 9 | let publish: Blog.Post.Publish = { post, date: "2023-06-22T13:37:00.000Z" } -------------------------------------------------------------------------------- /jest.config.cjs: -------------------------------------------------------------------------------- 1 | /** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */ 2 | module.exports = { 3 | preset: 'ts-jest', 4 | testEnvironment: 'node', 5 | testMatch: [ 6 | '**/test/**/*Spec.ts', 7 | ], 8 | reporters: ['jest-progress-bar-reporter'], 9 | }; -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "jinaga", 3 | "version": "6.7.6", 4 | "description": "Data management for web and mobile applications.", 5 | "keywords": [ 6 | "state", 7 | "immutable", 8 | "middleware", 9 | "api", 10 | "offline", 11 | "pwa" 12 | ], 13 | "homepage": "http://jinaga.com", 14 | "bugs": { 15 | "url": "https://github.com/jinaga/jinaga.js/issues" 16 | }, 17 | "repository": { 18 | "type": "git", 19 | "url": "https://github.com/jinaga/jinaga.js.git" 20 | }, 21 | "license": "MIT", 22 | "author": "Michael L Perry", 23 | "main": "dist/index.cjs", 24 | "module": "dist/index.js", 25 | "types": "dist/index.d.ts", 26 | "type": "module", 27 | "exports": { 28 | ".": { 29 | "import": "./dist/index.js", 30 | "require": "./dist/index.cjs" 31 | } 32 | }, 33 | "scripts": { 34 | "build": "npm run build:esm && npm run build:cjs && npm run build:finalize", 35 | "build:esm": "tsc", 36 | "build:cjs": "tsc -p tsconfig.cjs.json", 37 | "build:finalize": "shx cp dist-cjs/index.js dist/index.cjs && shx rm -rf dist-cjs", 38 | "clean": "shx rm -rf dist/ dist-cjs/ integration-test/jinaga-test/jinaga/ integration-test/jinaga-test/node_modules/", 39 | "prepack": "npm run clean && npm run build && npm run test", 40 | "prepublishOnly": "npm run clean && npm run build && npm run test", 41 | "test": "npx tsc --noEmit --project tsconfig.test.json && jest --config jest.config.cjs", 42 | "test:watch": "jest --watch --config jest.config.cjs" 43 | }, 44 | "files": [ 45 | "dist/", 46 | "src/", 47 | "types/" 48 | ], 49 | "dependencies": { 50 | "@stablelib/base64": "^1.0.1", 51 | "@stablelib/sha512": "^1.0.1", 52 | "@stablelib/utf8": "^1.0.2", 53 | "node-forge": "^1.3.1" 54 | }, 55 | "devDependencies": { 56 | "@types/jest": "^27.5.1", 57 | "@types/node-forge": "^1.3.11", 58 | "eslint": "^9.9.1", 59 | "globals": "^15.9.0", 60 | "jest": "^28.1.0", 61 | "jest-progress-bar-reporter": "^1.0.25", 62 | "shx": "^0.4.0", 63 | "source-map-support": "^0.5.21", 64 | "ts-jest": "^28.0.3", 65 | "typescript": "^4.7.2", 66 | "typescript-eslint": "^8.3.0" 67 | }, 68 | "engines": { 69 | "node": "^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0" 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /scripts/release.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | # The parameter must be patch, minor or major 5 | if [ "$1" != "patch" ] && [ "$1" != "minor" ] && [ "$1" != "major" ]; then 6 | echo "Usage: $0 [patch|minor|major]" 7 | exit 1 8 | fi 9 | 10 | git c main 11 | git pull 12 | npm version $1 13 | git push --follow-tags 14 | gh release create v$(node -p "require('./package.json').version") --generate-notes --verify-tag 15 | -------------------------------------------------------------------------------- /src/authentication/authentication-noop.ts: -------------------------------------------------------------------------------- 1 | import { LoginResponse } from "../http/messages"; 2 | import { FactEnvelope, FactRecord } from "../storage"; 3 | import { Authentication } from "./authentication"; 4 | 5 | export class AuthenticationNoOp implements Authentication { 6 | login(): Promise { 7 | throw new Error('No logged in user.'); 8 | } 9 | local(): Promise { 10 | throw new Error('No persistent device.'); 11 | } 12 | authorize(envelopes: FactEnvelope[]): Promise { 13 | return Promise.resolve(envelopes); 14 | } 15 | } -------------------------------------------------------------------------------- /src/authentication/authentication-offline.ts: -------------------------------------------------------------------------------- 1 | import { LoginResponse } from '../http/messages'; 2 | import { WebClient } from '../http/web-client'; 3 | import { IndexedDBLoginStore } from '../indexeddb/indexeddb-login-store'; 4 | import { FactEnvelope, FactRecord } from '../storage'; 5 | import { Authentication } from './authentication'; 6 | 7 | export class AuthenticationOffline implements Authentication { 8 | constructor( 9 | private store: IndexedDBLoginStore, 10 | private client: WebClient 11 | ) { } 12 | 13 | async login() { 14 | try { 15 | return await this.loginRemote(); 16 | } 17 | catch (err) { 18 | if (err === 'Unauthorized') { 19 | throw err; 20 | } 21 | 22 | try { 23 | return await this.loginLocal(); 24 | } 25 | catch (err2) { 26 | throw err; 27 | } 28 | } 29 | } 30 | 31 | local(): Promise { 32 | throw new Error('Local device has no persistence.'); 33 | } 34 | 35 | authorize(envelopes: FactEnvelope[]): Promise { 36 | return Promise.resolve(envelopes); 37 | } 38 | 39 | private async loginRemote() { 40 | const result = await this.client.login(); 41 | if (result && result.userFact && result.profile) { 42 | await this.store.saveLogin('token', result.userFact, result.profile.displayName); 43 | } 44 | return result; 45 | } 46 | 47 | private async loginLocal(): Promise { 48 | const result = await this.store.loadLogin('token'); 49 | return { 50 | userFact: result.userFact, 51 | profile: { 52 | displayName: result.displayName 53 | } 54 | }; 55 | } 56 | } -------------------------------------------------------------------------------- /src/authentication/authentication-test.ts: -------------------------------------------------------------------------------- 1 | import { Authentication } from '../authentication/authentication'; 2 | import { AuthorizationEngine } from '../authorization/authorization-engine'; 3 | import { AuthorizationRules } from '../authorization/authorizationRules'; 4 | import { LoginResponse } from '../http/messages'; 5 | import { FactEnvelope, FactRecord, Storage, factEnvelopeEquals } from '../storage'; 6 | 7 | export class AuthenticationTest implements Authentication { 8 | private authorizationEngine: AuthorizationEngine | null; 9 | 10 | constructor ( 11 | store: Storage, 12 | authorizationRules: AuthorizationRules | null, 13 | private userFact: FactRecord | null, 14 | private deviceFact: FactRecord | null 15 | ) { 16 | this.authorizationEngine = authorizationRules && 17 | new AuthorizationEngine(authorizationRules, store); 18 | } 19 | 20 | async login() { 21 | if (!this.userFact) { 22 | throw new Error("No logged in user."); 23 | } 24 | 25 | return { 26 | userFact: this.userFact, 27 | profile: { 28 | displayName: "Test user" 29 | } 30 | }; 31 | } 32 | 33 | async local() { 34 | if (!this.deviceFact) { 35 | throw new Error("No persistent device."); 36 | } 37 | 38 | return this.deviceFact; 39 | } 40 | 41 | async authorize(envelopes: FactEnvelope[]): Promise { 42 | if (this.authorizationEngine) { 43 | const results = await this.authorizationEngine.authorizeFacts(envelopes, this.userFact); 44 | const authorizedEnvelopes: FactEnvelope[] = results.map(r => { 45 | const envelope = envelopes.find(factEnvelopeEquals(r.fact)); 46 | if (!envelope) { 47 | throw new Error("Fact not found in envelopes."); 48 | } 49 | if (r.verdict === "Accept") { 50 | return { 51 | fact: r.fact, 52 | signatures: envelope.signatures 53 | .filter(s => r.newPublicKeys.includes(s.publicKey)) 54 | }; 55 | } 56 | else if (r.verdict === "Existing") { 57 | return envelope; 58 | } 59 | else { 60 | throw new Error("Unexpected verdict."); 61 | } 62 | }); 63 | return authorizedEnvelopes; 64 | } 65 | else { 66 | return envelopes; 67 | } 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/authentication/authentication-web-client.ts: -------------------------------------------------------------------------------- 1 | import { WebClient } from '../http/web-client'; 2 | import { FactEnvelope, FactRecord } from '../storage'; 3 | import { Authentication } from './authentication'; 4 | 5 | export class AuthenticationWebClient implements Authentication { 6 | constructor(private client: WebClient) { 7 | } 8 | 9 | login() { 10 | return this.client.login(); 11 | } 12 | 13 | local(): Promise { 14 | throw new Error('Local device has no persistence.'); 15 | } 16 | 17 | authorize(envelopes: FactEnvelope[]): Promise { 18 | return Promise.resolve(envelopes); 19 | } 20 | } -------------------------------------------------------------------------------- /src/authentication/authentication.ts: -------------------------------------------------------------------------------- 1 | import { LoginResponse } from '../http/messages'; 2 | import { FactEnvelope, FactRecord } from '../storage'; 3 | 4 | export interface Authentication { 5 | login(): Promise; 6 | local(): Promise; 7 | authorize(envelopes: FactEnvelope[]): Promise; 8 | } -------------------------------------------------------------------------------- /src/authorization/authorization-engine.ts: -------------------------------------------------------------------------------- 1 | import { computeHash, verifyHash } from '../fact/hash'; 2 | import { TopologicalSorter } from '../fact/sorter'; 3 | import { FactEnvelope, FactRecord, FactReference, Storage, factEnvelopeEquals, factReferenceEquals } from '../storage'; 4 | import { distinct, mapAsync } from '../util/fn'; 5 | import { Trace } from '../util/trace'; 6 | import { AuthorizationRules } from './authorizationRules'; 7 | 8 | export class Forbidden extends Error { 9 | __proto__: Error; 10 | constructor(message?: string) { 11 | const trueProto = new.target.prototype; 12 | super(message); 13 | 14 | this.__proto__ = trueProto; 15 | } 16 | } 17 | 18 | type AuthorizationResultReject = { 19 | verdict: "Reject"; 20 | }; 21 | 22 | type AuthorizationResultAccept = { 23 | verdict: "Accept"; 24 | newPublicKeys: string[]; 25 | }; 26 | 27 | type AuthorizationResultExisting = { 28 | verdict: "Existing"; 29 | }; 30 | 31 | export type AuthorizationResult = { 32 | fact: FactRecord; 33 | } & (AuthorizationResultReject | AuthorizationResultAccept | AuthorizationResultExisting); 34 | 35 | export class AuthorizationEngine { 36 | constructor( 37 | private authorizationRules: AuthorizationRules, 38 | private store: Storage 39 | ) { } 40 | 41 | async authorizeFacts(factEnvelopes: FactEnvelope[], userFact: FactRecord | null): Promise { 42 | const facts = factEnvelopes.map(e => e.fact); 43 | const existing = await this.store.whichExist(facts); 44 | const sorter = new TopologicalSorter>(); 45 | const userKeys : string[] = (userFact && userFact.fields.hasOwnProperty("publicKey")) 46 | ? [ userFact.fields.publicKey ] 47 | : []; 48 | const results = await mapAsync(sorter.sort(facts, (p, f) => this.visit(p, f, userKeys, facts, factEnvelopes, existing)), x => x); 49 | const rejected = results.filter(r => r.verdict === "Reject"); 50 | if (rejected.length > 0) { 51 | const distinctTypes = rejected 52 | .map(r => r.fact.type) 53 | .filter(distinct) 54 | .join(", "); 55 | const count = rejected.length === 1 ? "1 fact" : `${rejected.length} facts`; 56 | const message = `Rejected ${count} of type ${distinctTypes}.`; 57 | throw new Forbidden(message); 58 | } 59 | return results; 60 | } 61 | 62 | private async visit(predecessors: Promise[], fact: FactRecord, userKeys: string[], factRecords: FactRecord[], factEnvelopes: FactEnvelope[], existing: FactReference[]): Promise { 63 | const predecessorResults = await mapAsync(predecessors, p => p); 64 | if (predecessorResults.some(p => p.verdict === "Reject")) { 65 | const predecessor = predecessorResults 66 | .filter(p => p.verdict === "Reject") 67 | .map(p => p.fact.type) 68 | .join(', '); 69 | Trace.warn(`The fact ${fact.type} cannot be authorized because its predecessor ${predecessor} is not authorized.`); 70 | return { fact, verdict: "Reject" }; 71 | } 72 | 73 | if (!verifyHash(fact)) { 74 | const computedHash = computeHash(fact.fields, fact.predecessors); 75 | Trace.warn(`The hash of ${fact.type} does not match: computed ${computedHash}, provided ${fact.hash}.`); 76 | return { fact, verdict: "Reject" }; 77 | } 78 | 79 | if (existing.some(factReferenceEquals(fact))) { 80 | return { fact, verdict: "Existing" }; 81 | } 82 | 83 | const envelope = factEnvelopes.find(factEnvelopeEquals(fact)); 84 | const envelopeKeys = envelope ? envelope.signatures.map(s => s.publicKey) : []; 85 | const candidateKeys = envelopeKeys.concat(userKeys); 86 | 87 | const population = await this.authorizationRules.getAuthorizedPopulation(candidateKeys, fact, factRecords, this.store); 88 | if (population.quantifier === "none") { 89 | if (this.authorizationRules.hasRule(fact.type)) { 90 | Trace.warn(`The user is not authorized to create a fact of type ${fact.type}.`); 91 | } else { 92 | Trace.warn(`The fact ${fact.type} has no authorization rules.`); 93 | } 94 | return { fact, verdict: "Reject" }; 95 | } 96 | else if (population.quantifier === "some") { 97 | if (population.authorizedKeys.length === 0) { 98 | Trace.warn(`The user is not authorized to create a fact of type ${fact.type}.`); 99 | return { fact, verdict: "Reject" }; 100 | } 101 | return { fact, verdict: "Accept", newPublicKeys: population.authorizedKeys }; 102 | } 103 | else if (population.quantifier === "everyone") { 104 | return { fact, verdict: "Accept", newPublicKeys: [] }; 105 | } 106 | else { 107 | const _exhaustiveCheck: never = population; 108 | throw new Error(`Unknown quantifier ${(_exhaustiveCheck as any).quantifier}.`); 109 | } 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /src/authorization/authorization-noop.ts: -------------------------------------------------------------------------------- 1 | import { FactManager } from "../managers/factManager"; 2 | import { Specification } from "../specification/specification"; 3 | import { FactEnvelope, FactFeed, FactRecord, FactReference, ProjectedResult, ReferencesByName, Storage } from "../storage"; 4 | import { UserIdentity } from "../user-identity"; 5 | import { Authorization } from './authorization'; 6 | import { Forbidden } from './authorization-engine'; 7 | 8 | export class AuthorizationNoOp implements Authorization { 9 | constructor( 10 | private factManager: FactManager, 11 | private store: Storage 12 | ) { } 13 | 14 | getOrCreateUserFact(userIdentity: UserIdentity): Promise { 15 | throw new Forbidden(); 16 | } 17 | 18 | read(userIdentity: UserIdentity, start: FactReference[], specification: Specification): Promise { 19 | return this.factManager.read(start, specification); 20 | } 21 | 22 | load(userIdentity: UserIdentity, references: FactReference[]): Promise { 23 | return this.factManager.load(references); 24 | } 25 | 26 | feed(userIdentity: UserIdentity, specification: Specification, start: FactReference[], bookmark: string): Promise { 27 | return this.store.feed(specification, start, bookmark); 28 | } 29 | 30 | async save(userIdentity: UserIdentity, envelopes: FactEnvelope[]): Promise { 31 | return await this.factManager.save(envelopes); 32 | } 33 | 34 | verifyDistribution(userIdentity: UserIdentity, feeds: Specification[], namedStart: ReferencesByName): Promise { 35 | return Promise.resolve(); 36 | } 37 | } -------------------------------------------------------------------------------- /src/authorization/authorization.ts: -------------------------------------------------------------------------------- 1 | import { Specification } from "../specification/specification"; 2 | import { FactEnvelope, FactFeed, FactRecord, FactReference, ProjectedResult, ReferencesByName } from "../storage"; 3 | import { UserIdentity } from "../user-identity"; 4 | 5 | export interface Authorization { 6 | getOrCreateUserFact(userIdentity: UserIdentity): Promise; 7 | read(userIdentity: UserIdentity | null, start: FactReference[], specification: Specification): Promise; 8 | feed(userIdentity: UserIdentity | null, feed: Specification, start: FactReference[], bookmark: string): Promise; 9 | load(userIdentity: UserIdentity | null, references: FactReference[]): Promise; 10 | save(userIdentity: UserIdentity | null, facts: FactEnvelope[]): Promise; 11 | verifyDistribution(userIdentity: UserIdentity | null, feeds: Specification[], namedStart: ReferencesByName): Promise; 12 | } -------------------------------------------------------------------------------- /src/cryptography/key-pair.ts: -------------------------------------------------------------------------------- 1 | import { md, pki, util } from "node-forge"; 2 | import { canonicalizeFact } from "../fact/hash"; 3 | import { FactEnvelope, FactRecord } from "../storage"; 4 | import { Trace } from "../util/trace"; 5 | 6 | export interface KeyPair { 7 | publicPem: string; 8 | privatePem: string; 9 | } 10 | 11 | export function generateKeyPair(): KeyPair { 12 | const keypair = pki.rsa.generateKeyPair({ bits: 2048 }); 13 | const privatePem = pki.privateKeyToPem(keypair.privateKey); 14 | const publicPem = pki.publicKeyToPem(keypair.publicKey); 15 | return { privatePem, publicPem }; 16 | } 17 | 18 | export function signFacts(keyPair: KeyPair, facts: FactRecord[]): FactEnvelope[] { 19 | const privateKey = pki.privateKeyFromPem(keyPair.privatePem); 20 | const envelopes: FactEnvelope[] = facts.map(fact => signFact(fact, keyPair.publicPem, privateKey)); 21 | return envelopes; 22 | } 23 | 24 | function signFact(fact: FactRecord, publicPem: string, privateKey: pki.rsa.PrivateKey): FactEnvelope { 25 | const canonicalString = canonicalizeFact(fact.fields, fact.predecessors); 26 | const encodedString = util.encodeUtf8(canonicalString); 27 | const digest = md.sha512.create().update(encodedString); 28 | const hash = util.encode64(digest.digest().getBytes()); 29 | if (fact.hash !== hash) { 30 | Trace.error(`Hash does not match. "${fact.hash}" !== "${hash}"\nFact: ${canonicalString}`); 31 | return { 32 | fact, 33 | signatures: [] 34 | }; 35 | } 36 | const signature = util.encode64(privateKey.sign(digest)); 37 | return { 38 | fact, 39 | signatures: [{ 40 | signature, 41 | publicKey: publicPem 42 | }] 43 | }; 44 | } -------------------------------------------------------------------------------- /src/cryptography/verify.ts: -------------------------------------------------------------------------------- 1 | import { md, pki, util } from "node-forge"; 2 | import { canonicalizeFact } from "../fact/hash"; 3 | import { FactEnvelope, FactSignature } from "../storage"; 4 | import { Trace } from "../util/trace"; 5 | 6 | type PublicKeyCache = { [key: string]: pki.rsa.PublicKey }; 7 | 8 | export function verifyEnvelopes(envelopes: FactEnvelope[]): boolean { 9 | // Cache public keys to avoid parsing them multiple times 10 | const publicKeyCache: PublicKeyCache = {}; 11 | 12 | for (const envelope of envelopes) { 13 | for (const signature of envelope.signatures) { 14 | if (!publicKeyCache[signature.publicKey]) { 15 | publicKeyCache[signature.publicKey] = pki.publicKeyFromPem(signature.publicKey); 16 | } 17 | } 18 | } 19 | 20 | return envelopes.every(e => verifySignatures(e, publicKeyCache)); 21 | } 22 | 23 | function verifySignatures(envelope: FactEnvelope, publicKeyCache: PublicKeyCache): boolean { 24 | const canonicalString = canonicalizeFact(envelope.fact.fields, envelope.fact.predecessors); 25 | const encodedString = util.encodeUtf8(canonicalString); 26 | const digest = md.sha512.create().update(encodedString); 27 | const digestBytes = digest.digest().getBytes(); 28 | const hash = util.encode64(digestBytes); 29 | if (envelope.fact.hash !== hash) { 30 | Trace.error(`Hash does not match. "${envelope.fact.hash}" !== "${hash}"\nFact: ${canonicalString}`); 31 | return false; 32 | } 33 | return envelope.signatures.every(s => verifySignature(s, digestBytes, publicKeyCache)); 34 | } 35 | 36 | function verifySignature(signature: FactSignature, digestBytes: string, publicKeyCache: PublicKeyCache) { 37 | const publicKey = publicKeyCache[signature.publicKey]; 38 | const signatureBytes = util.decode64(signature.signature); 39 | try { 40 | return publicKey.verify(digestBytes, signatureBytes); 41 | } 42 | catch (e) { 43 | Trace.error(`Failed to verify signature. ${e}`); 44 | return false; 45 | } 46 | } -------------------------------------------------------------------------------- /src/distribution/distribution-rules.ts: -------------------------------------------------------------------------------- 1 | import { User } from "../model/user"; 2 | import { describeSpecification } from "../specification/description"; 3 | import { buildFeeds } from "../specification/feed-builder"; 4 | import { SpecificationOf } from "../specification/model"; 5 | import { Specification } from "../specification/specification"; 6 | import { SpecificationParser } from "../specification/specification-parser"; 7 | 8 | interface DistributionRule { 9 | specification: Specification; 10 | feeds: Specification[]; 11 | user: Specification | null; 12 | } 13 | 14 | class ShareTarget { 15 | constructor( 16 | private specification: Specification, 17 | private rules: DistributionRule[] 18 | ) { } 19 | 20 | with(user: SpecificationOf): DistributionRules { 21 | return new DistributionRules([ 22 | ...this.rules, 23 | { 24 | specification: this.specification, 25 | feeds: buildFeeds(this.specification), 26 | user: user.specification 27 | } 28 | ]); 29 | } 30 | 31 | withEveryone(): DistributionRules { 32 | return new DistributionRules([ 33 | ...this.rules, 34 | { 35 | specification: this.specification, 36 | feeds: buildFeeds(this.specification), 37 | user: null 38 | } 39 | ]); 40 | } 41 | } 42 | 43 | export class DistributionRules { 44 | static empty: DistributionRules = new DistributionRules([]); 45 | 46 | constructor( 47 | public rules: DistributionRule[] 48 | ) { } 49 | 50 | with(rules: (r: DistributionRules) => DistributionRules): DistributionRules { 51 | return rules(this); 52 | } 53 | 54 | share(specification: SpecificationOf): ShareTarget { 55 | return new ShareTarget(specification.specification, this.rules); 56 | } 57 | 58 | saveToDescription(): string { 59 | let description = "distribution {\n"; 60 | for (const rule of this.rules) { 61 | const specificationDescription = describeSpecification(rule.specification, 1).trimStart(); 62 | const userDescription = rule.user ? describeSpecification(rule.user, 1).trimStart() : "everyone\n"; 63 | description += ` share ${specificationDescription} with ${userDescription}`; 64 | } 65 | description += "}\n"; 66 | return description; 67 | } 68 | 69 | merge(distributionRules2: DistributionRules): DistributionRules { 70 | return new DistributionRules([ 71 | ...this.rules, 72 | ...distributionRules2.rules 73 | ]); 74 | } 75 | 76 | public static combine(distributionRules: DistributionRules, specification: Specification, user: Specification | null) { 77 | return new DistributionRules([ 78 | ...distributionRules.rules, 79 | { 80 | specification, 81 | feeds: buildFeeds(specification), 82 | user 83 | } 84 | ]); 85 | } 86 | 87 | static loadFromDescription(description: string): DistributionRules { 88 | const parser = new SpecificationParser(description); 89 | parser.skipWhitespace(); 90 | const distributionRules = parser.parseDistributionRules(); 91 | return distributionRules; 92 | } 93 | } 94 | 95 | export function describeDistributionRules(rules: (r: DistributionRules) => DistributionRules): string { 96 | const distributionRules = rules(new DistributionRules([])); 97 | return distributionRules.saveToDescription(); 98 | } -------------------------------------------------------------------------------- /src/fact/hash.ts: -------------------------------------------------------------------------------- 1 | import { FactRecord, FactReference, PredecessorCollection } from '../storage'; 2 | import { computeStringHash } from '../util/encoding'; 3 | import { HashMap } from './hydrate'; 4 | 5 | export function computeHash(fields: {}, predecessors: PredecessorCollection) { 6 | return computeObjectHash({ 7 | fields: fields, 8 | predecessors: canonicalPredecessors(predecessors) 9 | }); 10 | } 11 | 12 | export function canonicalizeFact(fields: {}, predecessors: PredecessorCollection) { 13 | return canonicalize({ 14 | fields: fields, 15 | predecessors: canonicalPredecessors(predecessors) 16 | }); 17 | } 18 | 19 | export function verifyHash(fact: FactRecord) { 20 | const computedHash = computeHash(fact.fields, fact.predecessors); 21 | return fact.hash === computedHash; 22 | } 23 | 24 | export function canonicalPredecessors(predecessors: PredecessorCollection) { 25 | const result: PredecessorCollection = {}; 26 | for(const role in predecessors) { 27 | const referenceMessages = predecessors[role]; 28 | if (Array.isArray(referenceMessages)) { 29 | result[role] = sortedPredecessors(referenceMessages); 30 | } 31 | else { 32 | result[role] = referenceMessages; 33 | } 34 | } 35 | return result; 36 | } 37 | 38 | function sortedPredecessors(predecessors: FactReference[]) { 39 | return predecessors.slice().sort((a,b) => { 40 | if (a.hash < b.hash) 41 | return -1; 42 | else if (a.hash > b.hash) 43 | return 1; 44 | if (a.type < b.type) 45 | return -1; 46 | else if (a.type > b.type) 47 | return 1; 48 | else 49 | return 0; 50 | }); 51 | } 52 | 53 | export function computeObjectHash(obj: {}) { 54 | if (!obj) 55 | return ''; 56 | 57 | const str = canonicalize(obj); 58 | return computeStringHash(str); 59 | } 60 | 61 | type Pair = { key: string, value: any }; 62 | 63 | function canonicalize(obj: HashMap) { 64 | const pairs: Pair[] = []; 65 | for (const key in obj) { 66 | const value = obj[key]; 67 | pairs.push({ key, value }); 68 | } 69 | pairs.sort((a, b) => { 70 | if (a.key < b.key) 71 | return -1; 72 | else if (a.key > b.key) 73 | return 1; 74 | else 75 | return 0; 76 | }); 77 | const members = pairs.reduce((text, pair) => { 78 | if (text.length > 0) 79 | text += ','; 80 | text += '"' + pair.key + '":' + serialize(pair.value); 81 | return text; 82 | }, ''); 83 | return '{' + members + '}'; 84 | } 85 | 86 | function serialize(value: any) { 87 | if (typeof(value) === 'object') { 88 | if (value instanceof Date) { 89 | return 'Date.parse("' + value.toISOString() + '")'; 90 | } 91 | else if (Array.isArray(value)) { 92 | const values = value.reduce((text, element) => { 93 | if (text.length > 0) 94 | text += ','; 95 | text += serialize(element); 96 | return text; 97 | }, ''); 98 | return '[' + values + ']'; 99 | } 100 | else { 101 | return canonicalize(value); 102 | } 103 | } 104 | else { 105 | return JSON.stringify(value); 106 | } 107 | } -------------------------------------------------------------------------------- /src/fact/hydrate.ts: -------------------------------------------------------------------------------- 1 | import { FactRecord, FactReference, PredecessorCollection } from '../storage'; 2 | import { toJSON } from '../util/obj'; 3 | import { computeHash } from './hash'; 4 | 5 | export type HashMap = { [key: string]: any }; 6 | 7 | type DehydrationEntry = { 8 | fact: HashMap, 9 | record: FactRecord, 10 | reference: FactReference 11 | }; 12 | 13 | export class Dehydration { 14 | private entries : DehydrationEntry[] = []; 15 | 16 | factRecords() { 17 | return this.entries.map(entry => entry.record); 18 | } 19 | 20 | dehydrate(fact: HashMap) { 21 | const entry = this.entries.find(entry => { 22 | return entry.fact === fact; 23 | }); 24 | 25 | if (entry) { 26 | return entry.reference; 27 | } 28 | 29 | const record = this.createFactRecord(fact); 30 | const reference = { 31 | type: record.type, 32 | hash: record.hash 33 | }; 34 | if (!this.entries.find(entry => { 35 | return entry.reference.hash === reference.hash && 36 | entry.reference.type === reference.type; 37 | })) { 38 | this.entries.push({ fact, record, reference }); 39 | } 40 | 41 | return reference; 42 | } 43 | 44 | private createFactRecord(fact: HashMap): FactRecord { 45 | let type: string | null = null; 46 | const fields: HashMap = {}; 47 | const predecessors: PredecessorCollection = {}; 48 | for (const field in fact) { 49 | const value = toJSON(fact[field]); 50 | if (value === null || value === undefined) { 51 | // Skip 52 | } 53 | else if (field === 'type' && typeof(value) === 'string') { 54 | type = value; 55 | } 56 | else if (typeof(value) === 'object') { 57 | if (Array.isArray(value)) { 58 | predecessors[field] = value 59 | .filter(element => element) 60 | .map(element => this.dehydrate(element)); 61 | } 62 | else { 63 | predecessors[field] = this.dehydrate(value); 64 | } 65 | } 66 | else { 67 | fields[field] = value; 68 | } 69 | } 70 | const hash = computeHash(fields, predecessors); 71 | if (!type) { 72 | throw new Error('Specify the type of the fact and all of its predecessors.'); 73 | } 74 | return { type, hash, predecessors, fields }; 75 | } 76 | } 77 | 78 | type HydrationEntry = { 79 | record: FactRecord, 80 | fact: HashMap | null 81 | } 82 | 83 | export const hashSymbol = typeof(Symbol) === "undefined" ? null : Symbol("hash"); 84 | 85 | export class Hydration { 86 | private entries: HydrationEntry[]; 87 | 88 | constructor(records: FactRecord[]) { 89 | this.entries = records.map(r => { 90 | return { 91 | record: r, 92 | fact: null 93 | }; 94 | }); 95 | } 96 | 97 | hydrate(reference: FactReference): HashMap { 98 | const entry = this.entries.find(r => r.record.hash === reference.hash && r.record.type === reference.type); 99 | if (!entry) { 100 | throw new Error('Referenced fact not found in tree'); 101 | } 102 | 103 | if (entry.fact) { 104 | return entry.fact; 105 | } 106 | 107 | const fields: HashMap = entry.record.fields; 108 | const fact: HashMap = {}; 109 | for (const field in fields) { 110 | fact[field] = fields[field]; 111 | } 112 | fact.type = entry.record.type; 113 | 114 | for (const role in entry.record.predecessors) { 115 | const value = entry.record.predecessors[role]; 116 | fact[role] = this.hydratePredecessors(value); 117 | } 118 | 119 | entry.fact = fact; 120 | 121 | if (hashSymbol) { 122 | (fact as any)[hashSymbol] = reference.hash; 123 | } 124 | return fact; 125 | } 126 | 127 | private hydratePredecessors(references: FactReference | FactReference[]): HashMap | HashMap[] { 128 | if (Array.isArray(references)) { 129 | return references.map(p => this.hydrate(p)); 130 | } 131 | else { 132 | return this.hydrate(references); 133 | } 134 | } 135 | } 136 | 137 | export function lookupHash(fact: T) { 138 | return hashSymbol && (fact as any)[hashSymbol] as string; 139 | } 140 | 141 | export function hydrate(record: FactRecord) { 142 | const fact: any = { 143 | ...record.fields, 144 | type: record.type 145 | }; 146 | return fact; 147 | } 148 | 149 | export function hydrateFromTree(references: FactReference[], records: FactRecord[]) { 150 | const hydration = new Hydration(records); 151 | return references.map(r => { 152 | try { 153 | return hydration.hydrate(r); 154 | } 155 | catch (e) { 156 | return null; 157 | } 158 | }).filter(f => f) as T[]; 159 | } 160 | 161 | export function dehydrateFact(fact: HashMap): FactRecord[] { 162 | const dehydration = new Dehydration(); 163 | dehydration.dehydrate(fact); 164 | return dehydration.factRecords(); 165 | } 166 | 167 | export function dehydrateReference(fact: HashMap): FactReference { 168 | const dehydration = new Dehydration(); 169 | return dehydration.dehydrate(fact); 170 | } -------------------------------------------------------------------------------- /src/fact/sorter.ts: -------------------------------------------------------------------------------- 1 | import { FactRecord, FactReference } from '../storage'; 2 | 3 | export class TopologicalSorter { 4 | private factsVisited: { [key: string]: boolean } = {}; 5 | private factsWaiting: { [key: string]: FactRecord[] } = {}; 6 | private factValue: { [key: string]: T } = {}; 7 | 8 | sort(facts: FactRecord[], map: (predecessors: T[], fact: FactRecord) => T): T[] { 9 | const factsReceived: T[] = []; 10 | const factQueue = facts.slice(0); 11 | 12 | while (factQueue.length > 0) { 13 | const fact = factQueue.shift()!; 14 | const predecessorKeys = this.allPredecessors(fact); 15 | const waitingPredecessors = predecessorKeys.filter(key => { 16 | return !this.factsVisited[key]; 17 | }); 18 | if (waitingPredecessors.length === 0) { 19 | const key = this.factKey(fact); 20 | this.factsVisited[key] = true; 21 | const predecessorValues = predecessorKeys.map(k => { 22 | return this.factValue[k]; 23 | }); 24 | const factValue = map(predecessorValues, fact); 25 | this.factValue[key] = factValue; 26 | factsReceived.push(factValue); 27 | const retry = this.factsWaiting[key]; 28 | if (retry) { 29 | retry.forEach(r => { 30 | if (!factQueue.some(f => f.type === r.type && f.hash === r.hash)) { 31 | factQueue.push(r); 32 | } 33 | }); 34 | delete this.factsWaiting[key]; 35 | } 36 | } 37 | else { 38 | waitingPredecessors.forEach(key => { 39 | let list = this.factsWaiting[key]; 40 | if (!list) { 41 | list = []; 42 | this.factsWaiting[key] = list; 43 | } 44 | if (!list.some(f => f.type === fact.type && f.hash === fact.hash)) { 45 | list.push(fact); 46 | } 47 | }); 48 | } 49 | } 50 | 51 | return factsReceived; 52 | } 53 | 54 | finished(): boolean { 55 | for (const key in this.factsWaiting) { 56 | if (this.factsWaiting[key]) { 57 | return false; 58 | } 59 | } 60 | 61 | return true; 62 | } 63 | 64 | private allPredecessors(fact: FactRecord): string[] { 65 | let predecessors: string[] = []; 66 | 67 | for (const role in fact.predecessors) { 68 | const references = fact.predecessors[role]; 69 | if (Array.isArray(references)) { 70 | predecessors = predecessors.concat(references.map(r => this.factKey(r))); 71 | } 72 | else { 73 | predecessors.push(this.factKey(references)); 74 | } 75 | } 76 | 77 | return predecessors; 78 | } 79 | 80 | private factKey(fact: FactReference): string { 81 | return `${fact.type}:${fact.hash}`; 82 | } 83 | } -------------------------------------------------------------------------------- /src/fork/fork.ts: -------------------------------------------------------------------------------- 1 | import { FactEnvelope, FactReference } from "../storage"; 2 | 3 | export interface Fork { 4 | save(envelopes: FactEnvelope[]): Promise; 5 | load(references: FactReference[]): Promise; 6 | processQueueNow(): Promise; 7 | close(): Promise; 8 | } -------------------------------------------------------------------------------- /src/fork/pass-through-fork.ts: -------------------------------------------------------------------------------- 1 | import { FactEnvelope, FactReference, Storage } from "../storage"; 2 | import { Fork } from "./fork"; 3 | 4 | export class PassThroughFork implements Fork { 5 | constructor( 6 | private storage: Storage 7 | ) { } 8 | 9 | async close(): Promise { 10 | return Promise.resolve(); 11 | } 12 | 13 | save(envelopes: FactEnvelope[]): Promise { 14 | return Promise.resolve(); 15 | } 16 | 17 | load(references: FactReference[]): Promise { 18 | return this.storage.load(references); 19 | } 20 | 21 | processQueueNow(): Promise { 22 | return Promise.resolve(); 23 | } 24 | } -------------------------------------------------------------------------------- /src/fork/persistent-fork.ts: -------------------------------------------------------------------------------- 1 | import { TopologicalSorter } from '../fact/sorter'; 2 | import { WebClient } from '../http/web-client'; 3 | import { QueueProcessor } from '../managers/QueueProcessor'; 4 | import { FactEnvelope, factEnvelopeEquals, FactRecord, FactReference, Queue, Storage } from '../storage'; 5 | import { Trace } from "../util/trace"; 6 | import { Fork } from "./fork"; 7 | import { serializeLoad } from './serialize'; 8 | import { WebClientSaver } from './web-client-saver'; 9 | 10 | export class PersistentFork implements Fork { 11 | private queueProcessor: QueueProcessor; 12 | 13 | constructor( 14 | private storage: Storage, 15 | private queue: Queue, 16 | private client: WebClient, 17 | private delayMilliseconds: number 18 | ) { 19 | const saver = new WebClientSaver(client, queue); 20 | this.queueProcessor = new QueueProcessor(saver, delayMilliseconds); 21 | } 22 | 23 | initialize() { 24 | // Schedule processing of any existing items in the queue 25 | this.queueProcessor.scheduleProcessing(); 26 | } 27 | 28 | async close(): Promise { 29 | // Process any pending facts before closing 30 | try { 31 | await this.processQueueNow(); 32 | } catch (error) { 33 | Trace.error(error); 34 | } 35 | this.queueProcessor.dispose(); 36 | return Promise.resolve(); 37 | } 38 | async save(envelopes: FactEnvelope[]): Promise { 39 | await this.queue.enqueue(envelopes); 40 | this.queueProcessor.scheduleProcessing(); 41 | } 42 | 43 | async load(references: FactReference[]): Promise { 44 | const known = await this.storage.load(references); 45 | const remaining = references.filter(reference => !known.some(factEnvelopeEquals(reference))); 46 | if (remaining.length === 0) { 47 | return known; 48 | } 49 | else { 50 | const records = await this.loadEnvelopes(remaining); 51 | return records.concat(known); 52 | } 53 | } 54 | 55 | /** 56 | * Processes the queue immediately, bypassing any delay. 57 | */ 58 | async processQueueNow(): Promise { 59 | await this.queueProcessor.processQueueNow(); 60 | } 61 | 62 | private async loadEnvelopes(references: FactReference[]) { 63 | const sorter = new TopologicalSorter(); 64 | let loaded: FactEnvelope[] = []; 65 | for (let start = 0; start < references.length; start += 300) { 66 | const chunk = references.slice(start, start + 300); 67 | const response = await this.client.loadWithRetry(serializeLoad(chunk)); 68 | const facts = sorter.sort(response.facts, (p, f) => f); 69 | const envelopes = facts.map(fact => { 70 | return { 71 | fact: fact, 72 | signatures: [] 73 | }; 74 | }); 75 | await this.storage.save(envelopes); 76 | loaded = loaded.concat(envelopes); 77 | } 78 | return loaded; 79 | } 80 | } -------------------------------------------------------------------------------- /src/fork/serialize.ts: -------------------------------------------------------------------------------- 1 | import { LoadMessage, SaveMessage } from '../http/messages'; 2 | import { FactEnvelope, FactReference } from '../storage'; 3 | 4 | export function serializeSave(envelopes: FactEnvelope[]) : SaveMessage { 5 | return { 6 | facts: envelopes.map(e => e.fact) 7 | }; 8 | } 9 | 10 | export function serializeLoad(references: FactReference[]) : LoadMessage { 11 | return { 12 | references: references 13 | }; 14 | } 15 | -------------------------------------------------------------------------------- /src/fork/transient-fork.ts: -------------------------------------------------------------------------------- 1 | import { TopologicalSorter } from '../fact/sorter'; 2 | import { WebClient } from '../http/web-client'; 3 | import { FactEnvelope, factEnvelopeEquals, FactRecord, FactReference, Storage } from '../storage'; 4 | import { Trace } from "../util/trace"; 5 | import { Fork } from "./fork"; 6 | import { serializeLoad } from './serialize'; 7 | 8 | export class TransientFork implements Fork { 9 | constructor( 10 | private storage: Storage, 11 | private client: WebClient 12 | ) { 13 | 14 | } 15 | 16 | close() { 17 | return Promise.resolve(); 18 | } 19 | 20 | async save(envelopes: FactEnvelope[]): Promise { 21 | await this.client.save(envelopes); 22 | } 23 | 24 | async load(references: FactReference[]): Promise { 25 | const known = await this.storage.load(references); 26 | const remaining = references.filter(reference => !known.some(factEnvelopeEquals(reference))); 27 | if (remaining.length === 0) { 28 | return known; 29 | } 30 | else { 31 | const records = await this.loadEnvelopes(remaining); 32 | return records.concat(known); 33 | } 34 | } 35 | 36 | private async loadEnvelopes(references: FactReference[]) { 37 | const sorter = new TopologicalSorter(); 38 | let loaded: FactEnvelope[] = []; 39 | for (let start = 0; start < references.length; start += 300) { 40 | const chunk = references.slice(start, start + 300); 41 | const response = await this.client.load(serializeLoad(chunk)); 42 | const facts = sorter.sort(response.facts, (p, f) => f); 43 | const envelopes = facts.map(fact => { 44 | return { 45 | fact: fact, 46 | signatures: [] 47 | }; 48 | }); 49 | const saved = await this.storage.save(envelopes); 50 | if (saved.length > 0) { 51 | Trace.counter("facts_saved", saved.length); 52 | } 53 | loaded = loaded.concat(envelopes); 54 | } 55 | return loaded; 56 | } 57 | 58 | processQueueNow(): Promise { 59 | // No-op for transient fork 60 | return Promise.resolve(); 61 | } 62 | } -------------------------------------------------------------------------------- /src/fork/web-client-saver.ts: -------------------------------------------------------------------------------- 1 | import { WebClient } from "../http/web-client"; 2 | import { Saver } from "../managers/QueueProcessor"; 3 | import { Queue } from "../storage"; 4 | import { Trace } from "../util/trace"; 5 | 6 | /** 7 | * A Saver implementation that uses a WebClient to save facts. 8 | */ 9 | export class WebClientSaver implements Saver { 10 | constructor( 11 | private readonly client: WebClient, 12 | private readonly queue: Queue 13 | ) { } 14 | 15 | /** 16 | * Saves facts to the server and removes them from the queue. 17 | */ 18 | async save(): Promise { 19 | const envelopes = await this.queue.peek(); 20 | if (envelopes.length > 0) { 21 | try { 22 | await this.client.saveWithRetry(envelopes); 23 | await this.queue.dequeue(envelopes); 24 | } 25 | catch (error) { 26 | Trace.error(error); 27 | } 28 | } 29 | } 30 | } -------------------------------------------------------------------------------- /src/http/ContentType.ts: -------------------------------------------------------------------------------- 1 | export const ContentTypeText = "text/plain" as const; 2 | export const ContentTypeJson = "application/json" as const; 3 | export const ContentTypeGraph = "application/x-jinaga-graph-v1" as const; 4 | 5 | export type PostContentType = typeof ContentTypeText | typeof ContentTypeJson | typeof ContentTypeGraph; 6 | export type PostAccept = typeof ContentTypeJson | undefined; 7 | -------------------------------------------------------------------------------- /src/http/authenticationProvider.ts: -------------------------------------------------------------------------------- 1 | export interface HttpHeaders { 2 | "Authorization"?: string; 3 | [key: string]: string | undefined; 4 | } 5 | 6 | export interface AuthenticationProvider { 7 | getHeaders(): Promise; 8 | reauthenticate(): Promise; 9 | } -------------------------------------------------------------------------------- /src/http/deserializer.ts: -------------------------------------------------------------------------------- 1 | import { computeHash } from "../fact/hash"; 2 | import { FactEnvelope, FactReference, FactRecord, PredecessorCollection, FactSignature } from "../storage"; 3 | 4 | export interface GraphSource { 5 | read( 6 | onEnvelopes: (envelopes: FactEnvelope[]) => Promise 7 | ): Promise; 8 | } 9 | 10 | export class GraphDeserializer implements GraphSource { 11 | private factReferences: FactReference[] = []; 12 | private publicKeys: string[] = []; 13 | 14 | constructor( 15 | private readonly readLine: () => Promise 16 | ) {} 17 | 18 | async read( 19 | onEnvelopes: (envelopes: FactEnvelope[]) => Promise 20 | ) { 21 | let envelopes: FactEnvelope[] = []; 22 | let line: string | null; 23 | while ((line = await this.readLine()) !== null) { 24 | if (line.startsWith("PK")) { 25 | const index = parseInt(line.substring(2)); 26 | await this.readPublicKey(index); 27 | } 28 | else { 29 | const type = JSON.parse(line); 30 | envelopes = await this.readEnvelope(type, envelopes, onEnvelopes); 31 | } 32 | } 33 | if (envelopes.length > 0) { 34 | await onEnvelopes(envelopes); 35 | } 36 | } 37 | 38 | private async readPublicKey(index: number) { 39 | if (index !== this.publicKeys.length) { 40 | throw new Error(`Public key index ${index} is out of order`); 41 | } 42 | const publicKey = await this.parseNextJSONLine(); 43 | const emptyLine = await this.readLine(); 44 | if (emptyLine !== "") { 45 | throw new Error(`Expected empty line after public key, but got "${emptyLine}"`); 46 | } 47 | this.publicKeys.push(publicKey); 48 | } 49 | 50 | private async readEnvelope(type: string, envelopes: FactEnvelope[], onEnvelopes: (envelopes: FactEnvelope[]) => Promise) { 51 | const predecessorIndexes = await this.parseNextJSONLine(); 52 | const fields = await this.parseNextJSONLine(); 53 | 54 | const predecessors = this.getPredecessorReferences(predecessorIndexes); 55 | 56 | const hash = computeHash(fields, predecessors); 57 | this.factReferences.push({ type, hash }); 58 | const fact: FactRecord = { type, hash, predecessors, fields }; 59 | 60 | const signatures = await this.readSignatures(); 61 | 62 | envelopes.push({ fact, signatures }); 63 | 64 | // Periodically handle a batch of envelopes 65 | if (envelopes.length >= 20) { 66 | await onEnvelopes(envelopes); 67 | envelopes = []; 68 | } 69 | return envelopes; 70 | } 71 | 72 | private getPredecessorReferences(predecessorIndexes: any) { 73 | const predecessors: PredecessorCollection = {}; 74 | for (const role in predecessorIndexes) { 75 | const index = predecessorIndexes[role]; 76 | if (Array.isArray(index)) { 77 | predecessors[role] = index.map(i => { 78 | if (i >= this.factReferences.length) { 79 | throw new Error(`Predecessor reference ${i} is out of range`); 80 | } 81 | return this.factReferences[i]; 82 | }); 83 | } else { 84 | if (index >= this.factReferences.length) { 85 | throw new Error(`Predecessor reference ${index} is out of range`); 86 | } 87 | predecessors[role] = this.factReferences[index]; 88 | } 89 | } 90 | return predecessors; 91 | } 92 | 93 | private async readSignatures(): Promise { 94 | const signatures: FactSignature[] = []; 95 | let line: string | null; 96 | while ((line = await this.readLine()) !== null && line !== "") { 97 | if (!line.startsWith("PK")) { 98 | throw new Error(`Expected public key reference, but got "${line}"`); 99 | } 100 | const publicKeyIndex = parseInt(line.substring(2)); 101 | if (publicKeyIndex >= this.publicKeys.length) { 102 | throw new Error(`Public key reference ${publicKeyIndex} is out of range`); 103 | } 104 | const publicKey = this.publicKeys[publicKeyIndex]; 105 | const signature = await this.parseNextJSONLine(); 106 | 107 | signatures.push({ publicKey, signature }); 108 | } 109 | return signatures; 110 | } 111 | 112 | private async parseNextJSONLine() { 113 | const line = await this.readLine(); 114 | if (!line) { 115 | throw new Error("Expected JSON line, but got end of file"); 116 | } 117 | return JSON.parse(line); 118 | } 119 | } -------------------------------------------------------------------------------- /src/http/httpNetwork.ts: -------------------------------------------------------------------------------- 1 | import { Specification } from "../specification/specification"; 2 | import { FactReference, FactEnvelope } from "../storage"; 3 | import { Network } from "../managers/NetworkManager"; 4 | import { FeedResponse, FeedsResponse, LoadResponse } from "./messages"; 5 | import { WebClient } from "./web-client"; 6 | import { describeDeclaration, describeSpecification } from "../specification/description"; 7 | 8 | export class HttpNetwork implements Network { 9 | constructor( 10 | private readonly webClient: WebClient 11 | ) { } 12 | 13 | async feeds(start: FactReference[], specification: Specification): Promise { 14 | const declarationString = describeDeclaration(start, specification.given); 15 | const specificationString = describeSpecification(specification, 0); 16 | const request = `${declarationString}\n${specificationString}`; 17 | const response: FeedsResponse = await this.webClient.feeds(request); 18 | return response.feeds; 19 | } 20 | 21 | async fetchFeed(feed: string, bookmark: string): Promise { 22 | const response: FeedResponse = await this.webClient.feed(feed, bookmark); 23 | return response; 24 | } 25 | 26 | streamFeed(feed: string, bookmark: string, onResponse: (factReferences: FactReference[], nextBookmark: string) => Promise, onError: (err: Error) => void): () => void { 27 | return this.webClient.streamFeed(feed, bookmark, async (response: FeedResponse) => { 28 | await onResponse(response.references, response.bookmark); 29 | }, onError); 30 | } 31 | 32 | async load(factReferences: FactReference[]): Promise { 33 | const response: LoadResponse = await this.webClient.load({ 34 | references: factReferences 35 | }); 36 | const envelopes = response.facts.map(fact => { 37 | fact, 38 | signatures: [] 39 | }); 40 | return envelopes; 41 | } 42 | 43 | } -------------------------------------------------------------------------------- /src/http/messageParsers.ts: -------------------------------------------------------------------------------- 1 | import { FactRecord, FactReference, PredecessorCollection } from "../storage"; 2 | import { LoadMessage, SaveMessage } from "./messages"; 3 | 4 | function parseFactReference(factReference: any): FactReference { 5 | if (typeof factReference !== 'object') throw new Error("Expected FactReference to be an object."); 6 | if (typeof factReference.type !== 'string') throw new Error("Expected a string 'type' property."); 7 | if (typeof factReference.hash !== 'string') throw new Error("Expected a string 'hash' property."); 8 | return { 9 | type: factReference.type, 10 | hash: factReference.hash 11 | }; 12 | } 13 | 14 | function parsePredecessor(predecessor: any): FactReference | FactReference[] { 15 | if (Array.isArray(predecessor)) { 16 | return predecessor.map(parseFactReference); 17 | } 18 | else { 19 | return parseFactReference(predecessor); 20 | } 21 | } 22 | 23 | function parsePredecessorCollection(predecessors: any): PredecessorCollection { 24 | if (typeof predecessors !== 'object') throw new Error("Expected PredecessorCollection to be an object."); 25 | return Object.keys(predecessors).reduce((result, key) => ({ 26 | ...result, 27 | [key]: parsePredecessor(predecessors[key]) 28 | }), {} as PredecessorCollection); 29 | } 30 | 31 | function parseFactRecord(factRecord: any): FactRecord { 32 | if (typeof factRecord !== 'object') throw new Error("Expected FactRecord to be an object."); 33 | if (typeof factRecord.type !== 'string') throw new Error("Expected a string 'type' property."); 34 | if (typeof factRecord.hash !== 'string') throw new Error("Expected a string 'hash' property."); 35 | if (typeof factRecord.fields !== 'object') throw new Error("Expected an object 'fields' property."); 36 | return { 37 | type: factRecord.type, 38 | hash: factRecord.hash, 39 | predecessors: parsePredecessorCollection(factRecord.predecessors), 40 | fields: factRecord.fields 41 | }; 42 | } 43 | 44 | export function parseSaveMessage(message: any): SaveMessage { 45 | if (typeof message !== 'object') throw new Error("Expected an object. Check the content type of the request."); 46 | if (!Array.isArray(message.facts)) throw new Error("Expected an array 'facts' property."); 47 | return { 48 | facts: message.facts.map(parseFactRecord) 49 | }; 50 | } 51 | 52 | export function parseLoadMessage(message: any): LoadMessage { 53 | if (typeof message !== 'object') throw new Error("Expected an object. Check the content type of the request."); 54 | if (!Array.isArray(message.references)) throw new Error("Expected an array 'references' property."); 55 | return { 56 | references: message.references.map(parseFactReference) 57 | }; 58 | } -------------------------------------------------------------------------------- /src/http/messages.ts: -------------------------------------------------------------------------------- 1 | import { FactRecord, FactReference } from '../storage'; 2 | 3 | export interface ProfileMessage { 4 | displayName: string; 5 | }; 6 | 7 | export interface LoginResponse { 8 | userFact: FactRecord, 9 | profile: ProfileMessage 10 | }; 11 | 12 | export interface SaveMessage { 13 | facts: FactRecord[] 14 | }; 15 | 16 | export interface LoadMessage { 17 | references: FactReference[] 18 | }; 19 | 20 | export interface LoadResponse { 21 | facts: FactRecord[] 22 | }; 23 | 24 | export interface FeedsResponse { 25 | feeds: string[]; 26 | } 27 | 28 | export interface FeedResponse { 29 | references: FactReference[]; 30 | bookmark: string; 31 | } 32 | -------------------------------------------------------------------------------- /src/http/serializer.ts: -------------------------------------------------------------------------------- 1 | import { FactEnvelope, FactReference, PredecessorCollection } from "../storage"; 2 | 3 | export type IndexPredecessorCollection = { 4 | [role: string]: number | number[]; 5 | }; 6 | 7 | export class GraphSerializer 8 | { 9 | private index = 0; 10 | private indexByFactReference: { [key: string]: number } = {}; 11 | private publicKeys: string[] = []; 12 | 13 | constructor( 14 | private readonly write: (chunk: string) => void 15 | ) {} 16 | 17 | serialize(result: FactEnvelope[]) { 18 | // Write the facts 19 | for (const fact of result) { 20 | // Skip facts that have already been written 21 | const key = fact.fact.type + ":" + fact.fact.hash; 22 | if (this.indexByFactReference.hasOwnProperty(key)) { 23 | continue; 24 | } 25 | 26 | // Write any new public keys 27 | for (const signature of fact.signatures) { 28 | if (!this.publicKeys.includes(signature.publicKey)) { 29 | const pkIndex = this.publicKeys.length; 30 | const publicKey = JSON.stringify(signature.publicKey); 31 | this.write(`PK${pkIndex.toString()}\n${publicKey}\n\n`); 32 | this.publicKeys.push(signature.publicKey); 33 | } 34 | } 35 | 36 | // Write the fact 37 | const factType = JSON.stringify(fact.fact.type); 38 | const predecessorIndexes = JSON.stringify(this.getPredecessorIndexes(fact.fact.predecessors)); 39 | const factFields = JSON.stringify(fact.fact.fields); 40 | 41 | let output = `${factType}\n${predecessorIndexes}\n${factFields}`; 42 | 43 | // Write the signatures 44 | for (const signature of fact.signatures) { 45 | const publicKeyIndex = this.publicKeys.indexOf(signature.publicKey); 46 | const publicKey = `PK${publicKeyIndex.toString()}`; 47 | const signatureString = JSON.stringify(signature.signature); 48 | 49 | output += `\n${publicKey}\n${signatureString}`; 50 | } 51 | 52 | output += "\n\n"; 53 | 54 | this.write(output); 55 | 56 | this.indexByFactReference[key] = this.index; 57 | this.index++; 58 | } 59 | } 60 | 61 | private getPredecessorIndexes(predecessors: PredecessorCollection): IndexPredecessorCollection { 62 | const result: IndexPredecessorCollection = {}; 63 | for (const role in predecessors) { 64 | const reference = predecessors[role]; 65 | if (Array.isArray(reference)) { 66 | result[role] = reference.map(r => this.getFactIndex(r)); 67 | } else { 68 | result[role] = this.getFactIndex(reference); 69 | } 70 | } 71 | return result; 72 | } 73 | 74 | private getFactIndex(reference: FactReference): number { 75 | const key = reference.type + ":" + reference.hash; 76 | if (!this.indexByFactReference.hasOwnProperty(key)) { 77 | throw new Error(`Fact reference not found in graph: ${key}`); 78 | } 79 | return this.indexByFactReference[key]; 80 | } 81 | } 82 | 83 | export function serializeGraph(graph: FactEnvelope[]) { 84 | const serializedData: string[] = []; 85 | const serializer = new GraphSerializer(chunk => serializedData.push(chunk)); 86 | serializer.serialize(graph); 87 | const body = serializedData.join(''); 88 | return body; 89 | } 90 | -------------------------------------------------------------------------------- /src/http/web-client.ts: -------------------------------------------------------------------------------- 1 | import { serializeSave } from "../fork/serialize"; 2 | import { FactEnvelope } from "../storage"; 3 | import { Trace } from "../util/trace"; 4 | import { ContentTypeGraph, ContentTypeJson, ContentTypeText, PostAccept, PostContentType } from "./ContentType"; 5 | import { FeedResponse, FeedsResponse, LoadMessage, LoadResponse, LoginResponse } from "./messages"; 6 | import { serializeGraph } from "./serializer"; 7 | 8 | export type SyncStatus = { 9 | sending: boolean; 10 | retrying: boolean; 11 | retryInSeconds: number; 12 | warning: string; 13 | } 14 | 15 | export class SyncStatusNotifier { 16 | private syncStatusHandlers: ((status: SyncStatus) => void)[] = []; 17 | 18 | onSyncStatus(handler: (status: SyncStatus) => void) { 19 | this.syncStatusHandlers.push(handler); 20 | } 21 | 22 | notify(status: SyncStatus) { 23 | this.syncStatusHandlers.forEach(handler => { 24 | handler(status); 25 | }); 26 | } 27 | } 28 | 29 | export interface HttpSuccess { 30 | result: "success"; 31 | response: {} 32 | } 33 | 34 | export interface HttpFailure { 35 | result: "failure"; 36 | error: string; 37 | } 38 | 39 | export interface HttpRetry { 40 | result: "retry"; 41 | error: string 42 | } 43 | 44 | export type HttpResponse = HttpSuccess | HttpFailure | HttpRetry; 45 | 46 | export interface HttpConnection { 47 | get(path: string): Promise<{}>; 48 | getStream(path: string, onResponse: (response: {}) => Promise, onError: (err: Error) => void): () => void; 49 | post(path: string, contentType: PostContentType, accept: PostAccept, body: string, timeoutSeconds: number): Promise; 50 | getAcceptedContentTypes(path: string): Promise; 51 | } 52 | 53 | function delay(timeSeconds: number): Promise { 54 | return new Promise((resolve, reject) => { 55 | setTimeout(resolve, timeSeconds * 1000); 56 | }); 57 | } 58 | 59 | export interface WebClientConfig { 60 | timeoutSeconds: number; 61 | } 62 | 63 | export class WebClient { 64 | private saveContentTypes: string[] | null = null; 65 | 66 | constructor( 67 | private httpConnection: HttpConnection, 68 | private syncStatusNotifier: SyncStatusNotifier, 69 | private config: WebClientConfig) { 70 | } 71 | 72 | async login() { 73 | return await this.httpConnection.get('/login'); 74 | } 75 | 76 | async save(envelopes: FactEnvelope[]) { 77 | if (this.saveContentTypes === null) { 78 | this.saveContentTypes = await this.httpConnection.getAcceptedContentTypes('/save'); 79 | } 80 | 81 | if (this.saveContentTypes.includes(ContentTypeGraph)) { 82 | await this.post('/save', ContentTypeGraph, undefined, serializeGraph(envelopes)); 83 | } else { 84 | await this.post('/save', ContentTypeJson, ContentTypeJson, JSON.stringify(serializeSave(envelopes))); 85 | } 86 | } 87 | 88 | async saveWithRetry(envelopes: FactEnvelope[]) { 89 | if (this.saveContentTypes === null) { 90 | this.saveContentTypes = await this.httpConnection.getAcceptedContentTypes('/save'); 91 | } 92 | 93 | if (this.saveContentTypes.includes(ContentTypeGraph)) { 94 | await this.postWithLimitedRetry('/save', ContentTypeGraph, undefined, serializeGraph(envelopes)); 95 | } else { 96 | await this.postWithLimitedRetry('/save', ContentTypeJson, ContentTypeJson, JSON.stringify(serializeSave(envelopes))); 97 | } 98 | } 99 | 100 | async load(load: LoadMessage) { 101 | return await this.post('/load', ContentTypeJson, ContentTypeJson, JSON.stringify(load)); 102 | } 103 | 104 | async loadWithRetry(load: LoadMessage) { 105 | return await this.postWithLimitedRetry('/load', ContentTypeJson, ContentTypeJson, JSON.stringify(load)); 106 | } 107 | 108 | async feeds(request: string): Promise { 109 | return await this.post('/feeds', ContentTypeText, ContentTypeJson, request); 110 | } 111 | 112 | async feed(feed: string, bookmark: string): Promise { 113 | return await this.httpConnection.get(`/feeds/${feed}?b=${bookmark}`); 114 | } 115 | 116 | streamFeed(feed: string, bookmark: string, onResponse: (response: FeedResponse) => Promise, onError: (err: Error) => void): () => void { 117 | return this.httpConnection.getStream(`/feeds/${feed}?b=${bookmark}`, r => onResponse(r as FeedResponse), onError); 118 | } 119 | 120 | private async post(path: string, contentType: PostContentType, accept: PostAccept, body: string) { 121 | const response = await this.httpConnection.post(path, contentType, accept, body, this.config.timeoutSeconds); 122 | if (response.result === 'success') { 123 | return response.response; 124 | } 125 | else { 126 | throw new Error(response.error); 127 | } 128 | } 129 | 130 | private async postWithLimitedRetry(path: string, contentType: PostContentType, accept: PostAccept, body: string) { 131 | let timeoutSeconds = this.config.timeoutSeconds; 132 | let retrySeconds = 1; 133 | 134 | while (true) { 135 | const response = await this.httpConnection.post(path, contentType, accept, body, this.config.timeoutSeconds); 136 | if (response.result === 'success') { 137 | return response.response; 138 | } 139 | else if (response.result === 'failure') { 140 | throw new Error(response.error); 141 | } 142 | else { 143 | if (retrySeconds <= 4) { 144 | Trace.warn(`Retrying in ${retrySeconds} seconds: ${response.error}`); 145 | await delay(retrySeconds + Math.random()); 146 | timeoutSeconds = Math.min(timeoutSeconds * 2, 60); 147 | retrySeconds = retrySeconds * 2; 148 | } 149 | else { 150 | throw new Error(response.error); 151 | } 152 | } 153 | } 154 | } 155 | } 156 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export { Authentication } from './authentication/authentication'; 2 | export { AuthenticationNoOp } from './authentication/authentication-noop'; 3 | export { AuthenticationTest } from './authentication/authentication-test'; 4 | export { Authorization } from './authorization/authorization'; 5 | export { AuthorizationEngine, Forbidden } from './authorization/authorization-engine'; 6 | export { AuthorizationNoOp } from "./authorization/authorization-noop"; 7 | export { AuthorizationRules, describeAuthorizationRules } from "./authorization/authorizationRules"; 8 | export { generateKeyPair, KeyPair, signFacts } from "./cryptography/key-pair"; 9 | export { verifyEnvelopes } from "./cryptography/verify"; 10 | export { DistributionEngine } from './distribution/distribution-engine'; 11 | export { describeDistributionRules, DistributionRules } from './distribution/distribution-rules'; 12 | export { canonicalizeFact, canonicalPredecessors, computeHash, computeObjectHash } from './fact/hash'; 13 | export { dehydrateFact, dehydrateReference, hydrate, hydrateFromTree } from "./fact/hydrate"; 14 | export { TopologicalSorter } from './fact/sorter'; 15 | export { Fork } from "./fork/fork"; 16 | export { PassThroughFork } from "./fork/pass-through-fork"; 17 | export { PersistentFork } from "./fork/persistent-fork"; 18 | export { TransientFork } from './fork/transient-fork'; 19 | export { AuthenticationProvider, HttpHeaders } from "./http/authenticationProvider"; 20 | export { GraphDeserializer, GraphSource } from "./http/deserializer"; 21 | export { FetchConnection } from "./http/fetch"; 22 | export { HttpNetwork } from "./http/httpNetwork"; 23 | export { parseLoadMessage, parseSaveMessage } from './http/messageParsers'; 24 | export { 25 | FeedResponse, 26 | FeedsResponse, 27 | LoadMessage, 28 | LoadResponse, 29 | LoginResponse, 30 | ProfileMessage, 31 | SaveMessage 32 | } from './http/messages'; 33 | export { GraphSerializer } from "./http/serializer"; 34 | export { HttpConnection, HttpResponse, SyncStatus, SyncStatusNotifier, WebClient } from "./http/web-client"; 35 | export { Fact, Jinaga, MakeObservable, Profile } from './jinaga'; 36 | export { JinagaBrowser, JinagaBrowserConfig } from "./jinaga-browser"; 37 | export { JinagaTest, JinagaTestConfig } from "./jinaga-test"; 38 | export { FactManager } from "./managers/factManager"; 39 | export { Network, NetworkManager, NetworkNoOp } from "./managers/NetworkManager"; 40 | export { MemoryStore } from './memory/memory-store'; 41 | export { Device, User, UserName } from "./model/user"; 42 | export { ObservableSource, ObservableSource as ObservableSourceImpl, SpecificationListener } from './observable/observable'; 43 | export { ObservableCollection } from './observer/observer'; 44 | export { PurgeConditions } from './purge/purgeConditions'; 45 | export { validatePurgeSpecification } from './purge/validate'; 46 | export { RuleSet } from './rules/RuleSet'; 47 | export { Declaration } from './specification/declaration'; 48 | export { describeDeclaration, describeSpecification } from './specification/description'; 49 | export { buildFeeds } from './specification/feed-builder'; 50 | export { FeedCache, FeedObject } from "./specification/feed-cache"; 51 | export { invertSpecification, SpecificationInverse } from "./specification/inverse"; 52 | export { buildModel, FactRepository, LabelOf, Model, ModelBuilder, ProjectionOf, SpecificationOf } from './specification/model'; 53 | export { EdgeDescription, emptySkeleton, FactDescription, InputDescription, NotExistsConditionDescription, OutputDescription, Skeleton, skeletonOfSpecification } from './specification/skeleton'; 54 | export { ComponentProjection, CompositeProjection, FactProjection, FieldProjection, getAllFactTypes, getAllRoles, HashProjection, Label, Match, PathCondition, Projection, SingularProjection, Specification, specificationIsDeterministic, specificationIsNotDeterministic, SpecificationProjection, splitBeforeFirstSuccessor } from './specification/specification'; 55 | export { Invalid, SpecificationParser } from './specification/specification-parser'; 56 | export { computeTupleSubsetHash, FactEnvelope, factEnvelopeEquals, FactFeed, FactRecord, FactReference, factReferenceEquals, FactSignature, FactTuple, PredecessorCollection, ProjectedResult, Queue, ReferencesByName, Storage, validateGiven } from './storage'; 57 | export { UserIdentity } from './user-identity'; 58 | export { ConsoleTracer, NoOpTracer, Trace, Tracer } from './util/trace'; 59 | 60 | // Export the JinagaBrowser class using the alias JinagaClient 61 | export { JinagaBrowser as JinagaClient } from "./jinaga-browser"; 62 | -------------------------------------------------------------------------------- /src/indexeddb/driver.ts: -------------------------------------------------------------------------------- 1 | import { FactReference } from '../storage'; 2 | 3 | function upgradingToVersion({ newVersion, oldVersion }: IDBVersionChangeEvent, ver: number) { 4 | return newVersion && newVersion >= ver && oldVersion < ver; 5 | } 6 | 7 | function openDatabase(indexName: string): Promise { 8 | return new Promise((resolve, reject) => { 9 | const request = self.indexedDB.open(indexName, 2); 10 | request.onsuccess = _ => resolve(request.result); 11 | request.onerror = _ => reject(`Error opening database ${indexName}: ${JSON.stringify(request.error, null, 2)}.`); 12 | request.onupgradeneeded = ev => { 13 | const db = request.result; 14 | if (upgradingToVersion(ev, 1)) { 15 | db.createObjectStore('login'); 16 | db.createObjectStore('fact'); 17 | db.createObjectStore('ancestor'); 18 | const edgeObjectStore = db.createObjectStore('edge', { 19 | keyPath: ['successor', 'predecessor', 'role'] 20 | }); 21 | edgeObjectStore.createIndex('predecessor', ['predecessor', 'role'], { unique: false }); 22 | edgeObjectStore.createIndex('successor', ['successor', 'role'], { unique: false }); 23 | edgeObjectStore.createIndex('all', 'successor', { unique: false }); 24 | db.createObjectStore('queue'); 25 | } 26 | if (upgradingToVersion(ev, 2)) { 27 | db.createObjectStore('bookmark'); 28 | const specificationObjectStore = db.createObjectStore('specification'); 29 | specificationObjectStore.createIndex('mru', '', { unique: false }); 30 | } 31 | } 32 | }); 33 | } 34 | 35 | export async function withDatabase(indexName: string, action: (db: IDBDatabase) => Promise) { 36 | const db = await openDatabase(indexName); 37 | const result = await action(db); 38 | db.close(); 39 | return result; 40 | } 41 | 42 | export async function withTransaction(db: IDBDatabase, storeNames: string[], mode: IDBTransactionMode, action: (transaction: IDBTransaction) => Promise) { 43 | const transaction = db.transaction(storeNames, mode); 44 | const transactionComplete = new Promise((resolve, reject) => { 45 | transaction.oncomplete = _ => resolve(); 46 | transaction.onerror = _ => reject(`Error executing transaction ${JSON.stringify(transaction.error?.message, null, 2)}`); 47 | }); 48 | const [result, v] = await Promise.all([action(transaction), transactionComplete]); 49 | return result; 50 | } 51 | 52 | export function execRequest(request: IDBRequest) { 53 | return new Promise((resolve, reject) => { 54 | request.onsuccess = (_: Event) => resolve(request.result); 55 | request.onerror = (_: Event) => reject(`Error executing request ${JSON.stringify(request.error?.message, null, 2)}`); 56 | }); 57 | } 58 | 59 | export function factKey(fact: FactReference) { 60 | return `${fact.type}:${fact.hash}`; 61 | } 62 | 63 | export function keyToReference(key: string): FactReference { 64 | const regex = /([^:]*):(.*)/; 65 | const match = regex.exec(key); 66 | if (!match) { 67 | throw new Error(`Invalid key ${key}`); 68 | } 69 | const [ _, type, hash ] = match; 70 | return { type, hash }; 71 | } 72 | -------------------------------------------------------------------------------- /src/indexeddb/indexeddb-login-store.ts: -------------------------------------------------------------------------------- 1 | import { FactRecord } from '../storage'; 2 | import { execRequest, withDatabase, withTransaction } from './driver'; 3 | 4 | export interface LoginRecord { 5 | userFact: FactRecord; 6 | displayName: string; 7 | } 8 | 9 | export class IndexedDBLoginStore { 10 | constructor ( 11 | private indexName: string 12 | ) { } 13 | 14 | saveLogin(sessionToken: string, userFact: FactRecord, displayName: string) { 15 | return withDatabase(this.indexName, db => { 16 | return withTransaction(db, ['login'], 'readwrite', async tx => { 17 | const loginObjectStore = tx.objectStore('login'); 18 | await execRequest(loginObjectStore.put({ userFact, displayName }, sessionToken)); 19 | }); 20 | }); 21 | } 22 | 23 | loadLogin(sessionToken: string): Promise { 24 | return withDatabase(this.indexName, async db => { 25 | return withTransaction(db, ['login'], 'readonly', tx => { 26 | const loginObjectStore = tx.objectStore('login'); 27 | return execRequest(loginObjectStore.get(sessionToken)); 28 | }); 29 | }); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/jinaga-browser.ts: -------------------------------------------------------------------------------- 1 | import { Authentication } from "./authentication/authentication"; 2 | import { AuthenticationNoOp } from "./authentication/authentication-noop"; 3 | import { AuthenticationOffline } from "./authentication/authentication-offline"; 4 | import { AuthenticationWebClient } from "./authentication/authentication-web-client"; 5 | import { Fork } from "./fork/fork"; 6 | import { PassThroughFork } from "./fork/pass-through-fork"; 7 | import { PersistentFork } from "./fork/persistent-fork"; 8 | import { TransientFork } from "./fork/transient-fork"; 9 | import { AuthenticationProvider } from "./http/authenticationProvider"; 10 | import { FetchConnection } from "./http/fetch"; 11 | import { HttpNetwork } from "./http/httpNetwork"; 12 | import { SyncStatusNotifier, WebClient } from "./http/web-client"; 13 | import { IndexedDBLoginStore } from "./indexeddb/indexeddb-login-store"; 14 | import { IndexedDBQueue } from "./indexeddb/indexeddb-queue"; 15 | import { IndexedDBStore } from "./indexeddb/indexeddb-store"; 16 | import { Jinaga } from "./jinaga"; 17 | import { FactManager } from "./managers/factManager"; 18 | import { Network, NetworkNoOp } from "./managers/NetworkManager"; 19 | import { MemoryStore } from "./memory/memory-store"; 20 | import { ObservableSource } from "./observable/observable"; 21 | import { PurgeConditions } from "./purge/purgeConditions"; 22 | import { validatePurgeSpecification } from "./purge/validate"; 23 | import { Specification } from "./specification/specification"; 24 | import { Storage } from "./storage"; 25 | 26 | export type JinagaBrowserConfig = { 27 | httpEndpoint?: string, 28 | wsEndpoint?: string, 29 | indexedDb?: string, 30 | httpTimeoutSeconds?: number, 31 | httpAuthenticationProvider?: AuthenticationProvider, 32 | queueProcessingDelayMs?: number, 33 | purgeConditions?: (p: PurgeConditions) => PurgeConditions 34 | } 35 | 36 | export class JinagaBrowser { 37 | static create(config: JinagaBrowserConfig) { 38 | const store = createStore(config); 39 | const observableSource = new ObservableSource(store); 40 | const syncStatusNotifier = new SyncStatusNotifier(); 41 | const webClient = createWebClient(config, syncStatusNotifier); 42 | const fork = createFork(config, store, webClient); 43 | const authentication = createAuthentication(config, webClient); 44 | const network = createNetwork(webClient); 45 | const purgeConditions = createPurgeConditions(config); 46 | const factManager = new FactManager(fork, observableSource, store, network, purgeConditions); 47 | return new Jinaga(authentication, factManager, syncStatusNotifier); 48 | } 49 | } 50 | 51 | function createStore(config: JinagaBrowserConfig): Storage { 52 | if (config.indexedDb) { 53 | return new IndexedDBStore(config.indexedDb); 54 | } 55 | else { 56 | return new MemoryStore(); 57 | } 58 | } 59 | 60 | function createWebClient( 61 | config: JinagaBrowserConfig, 62 | syncStatusNotifier: SyncStatusNotifier 63 | ): WebClient | null { 64 | if (config.httpEndpoint) { 65 | const provider = config.httpAuthenticationProvider; 66 | const getHeaders = provider 67 | ? () => provider.getHeaders() 68 | : () => Promise.resolve({}); 69 | const reauthenticate = provider 70 | ? () => provider.reauthenticate() 71 | : () => Promise.resolve(false); 72 | const httpConnection = new FetchConnection(config.httpEndpoint, getHeaders, reauthenticate); 73 | const httpTimeoutSeconds = config.httpTimeoutSeconds || 30; 74 | const webClient = new WebClient(httpConnection, syncStatusNotifier, { 75 | timeoutSeconds: httpTimeoutSeconds 76 | }); 77 | return webClient; 78 | } 79 | else { 80 | return null; 81 | } 82 | } 83 | 84 | function createFork( 85 | config: JinagaBrowserConfig, 86 | store: Storage, 87 | webClient: WebClient | null 88 | ): Fork { 89 | if (webClient) { 90 | if (config.indexedDb) { 91 | const queue = new IndexedDBQueue(config.indexedDb); 92 | const queueProcessingDelay = config.queueProcessingDelayMs || 100; 93 | const fork = new PersistentFork(store, queue, webClient, queueProcessingDelay); 94 | fork.initialize(); 95 | return fork; 96 | } 97 | else { 98 | const fork = new TransientFork(store, webClient); 99 | return fork; 100 | } 101 | } 102 | else { 103 | const fork = new PassThroughFork(store); 104 | return fork; 105 | } 106 | } 107 | 108 | function createAuthentication( 109 | config: JinagaBrowserConfig, 110 | webClient: WebClient | null 111 | ): Authentication { 112 | if (webClient) { 113 | if (config.indexedDb) { 114 | const loginStore = new IndexedDBLoginStore(config.indexedDb); 115 | const authentication = new AuthenticationOffline(loginStore, webClient); 116 | return authentication; 117 | } 118 | else { 119 | const authentication = new AuthenticationWebClient(webClient); 120 | return authentication; 121 | } 122 | } 123 | else { 124 | const authentication = new AuthenticationNoOp(); 125 | return authentication; 126 | } 127 | } 128 | 129 | function createNetwork( 130 | webClient: WebClient | null 131 | ): Network { 132 | if (webClient) { 133 | const network = new HttpNetwork(webClient); 134 | return network; 135 | } 136 | else { 137 | return new NetworkNoOp(); 138 | } 139 | } 140 | 141 | function createPurgeConditions( 142 | config: JinagaBrowserConfig 143 | ): Specification[] { 144 | if (config.purgeConditions) { 145 | var specifications = config.purgeConditions(new PurgeConditions([])).specifications; 146 | var validationFailures: string[] = specifications.map(specification => 147 | validatePurgeSpecification(specification)).flat(); 148 | if (validationFailures.length > 0) { 149 | throw new Error(validationFailures.join("\n")); 150 | } 151 | return specifications; 152 | } 153 | else { 154 | return []; 155 | } 156 | } -------------------------------------------------------------------------------- /src/jinaga-test.ts: -------------------------------------------------------------------------------- 1 | import { Authentication } from './authentication/authentication'; 2 | import { AuthenticationTest } from './authentication/authentication-test'; 3 | import { AuthorizationRules } from './authorization/authorizationRules'; 4 | import { DistributionEngine } from './distribution/distribution-engine'; 5 | import { DistributionRules } from './distribution/distribution-rules'; 6 | import { dehydrateFact, Dehydration } from './fact/hydrate'; 7 | import { PassThroughFork } from './fork/pass-through-fork'; 8 | import { SyncStatusNotifier } from './http/web-client'; 9 | import { Jinaga } from './jinaga'; 10 | import { FactManager } from './managers/factManager'; 11 | import { Network, NetworkDistribution, NetworkNoOp } from './managers/NetworkManager'; 12 | import { MemoryStore } from './memory/memory-store'; 13 | import { ObservableSource } from './observable/observable'; 14 | import { PurgeConditions } from "./purge/purgeConditions"; 15 | import { Model } from './specification/model'; 16 | import { Specification } from "./specification/specification"; 17 | import { FactEnvelope, Storage } from './storage'; 18 | 19 | export type JinagaTestConfig = { 20 | model?: Model, 21 | authorization?: (a: AuthorizationRules) => AuthorizationRules, 22 | distribution?: (d: DistributionRules) => DistributionRules, 23 | user?: {}, 24 | device?: {}, 25 | initialState?: {}[], 26 | purgeConditions?: (p: PurgeConditions) => PurgeConditions 27 | } 28 | 29 | export class JinagaTest { 30 | static create(config: JinagaTestConfig) { 31 | const store = new MemoryStore(); 32 | this.saveInitialState(config, store); 33 | const observableSource = new ObservableSource(store); 34 | const syncStatusNotifier = new SyncStatusNotifier(); 35 | const fork = new PassThroughFork(store); 36 | const authentication = this.createAuthentication(config, store); 37 | const network = this.createNetwork(config, store); 38 | const purgeConditions = this.createPurgeConditions(config); 39 | const factManager = new FactManager(fork, observableSource, store, network, purgeConditions); 40 | return new Jinaga(authentication, factManager, syncStatusNotifier); 41 | } 42 | 43 | static saveInitialState(config: JinagaTestConfig, store: MemoryStore) { 44 | if (config.initialState) { 45 | const dehydrate = new Dehydration(); 46 | config.initialState.forEach(obj => dehydrate.dehydrate(obj)); 47 | store.save(dehydrate.factRecords().map(f => { 48 | fact: f, 49 | signatures: [] 50 | })); 51 | } 52 | } 53 | 54 | static createAuthentication(config: JinagaTestConfig, store: Storage): Authentication { 55 | const authorizationRules = config.authorization ? 56 | config.authorization(new AuthorizationRules(config.model)) : null; 57 | const userFact = JinagaTest.getUserFact(config); 58 | const deviceFact = JinagaTest.getDeviceFact(config); 59 | 60 | return new AuthenticationTest(store, authorizationRules, userFact, deviceFact); 61 | } 62 | 63 | static createNetwork(config: JinagaTestConfig, store: MemoryStore): Network { 64 | if (config.distribution) { 65 | const distributionRules = config.distribution(new DistributionRules([])); 66 | const distributionEngine = new DistributionEngine(distributionRules, store); 67 | return new NetworkDistribution(distributionEngine, this.getUserFact(config)); 68 | } 69 | else { 70 | return new NetworkNoOp(); 71 | } 72 | } 73 | 74 | static createPurgeConditions(config: JinagaTestConfig): Specification[] { 75 | if (config.purgeConditions) { 76 | return config.purgeConditions(new PurgeConditions([])).specifications; 77 | } 78 | else { 79 | return []; 80 | } 81 | } 82 | 83 | private static getUserFact(config: JinagaTestConfig) { 84 | return config.user ? dehydrateFact(config.user)[0] : null; 85 | } 86 | 87 | private static getDeviceFact(config: JinagaTestConfig) { 88 | return config.device ? dehydrateFact(config.device)[0] : null; 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /src/managers/PurgeManager.ts: -------------------------------------------------------------------------------- 1 | import { testSpecificationForCompliance } from "../purge/purgeCompliance"; 2 | import { SpecificationInverse, invertSpecification } from "../specification/inverse"; 3 | import { Specification } from "../specification/specification"; 4 | import { FactEnvelope, FactReference, ProjectedResult, Storage } from "../storage"; 5 | import { Trace } from "../util/trace"; 6 | 7 | export class PurgeManager { 8 | private purgeInverses: SpecificationInverse[]; 9 | 10 | constructor(private readonly store: Storage, private readonly purgeConditions: Specification[]) { 11 | this.purgeInverses = purgeConditions.map(pc => invertSpecification(pc)).flat(); 12 | } 13 | 14 | async purge(): Promise { 15 | const count = await this.store.purge(this.purgeConditions); 16 | if (count > 0) { 17 | Trace.counter("facts_purged", count); 18 | } 19 | } 20 | 21 | async triggerPurge(factsAdded: FactEnvelope[]): Promise { 22 | for (const envelope of factsAdded) { 23 | const fact = envelope.fact; 24 | for (const purgeInverse of this.purgeInverses) { 25 | // Only run the purge inverse if the given type matches the fact type 26 | if (purgeInverse.inverseSpecification.given[0].type !== fact.type) { 27 | continue; 28 | } 29 | 30 | const givenReference = { 31 | type: fact.type, 32 | hash: fact.hash 33 | }; 34 | const results: ProjectedResult[] = await this.store.read([givenReference], purgeInverse.inverseSpecification); 35 | for (const result of results) { 36 | const givenName = purgeInverse.givenSubset[0]; 37 | // The given is the purge root 38 | const purgeRoot: FactReference = result.tuple[givenName]; 39 | // All other members of the result tuple are triggers 40 | const triggers: FactReference[] = Object.keys(result.tuple) 41 | .filter(k => k !== givenName) 42 | .map(k => result.tuple[k]); 43 | 44 | // Purge all descendants of the purge root except for the triggers 45 | const count = await this.store.purgeDescendants(purgeRoot, triggers); 46 | if (count > 0) { 47 | Trace.counter("facts_purged", count); 48 | } 49 | } 50 | } 51 | } 52 | } 53 | 54 | public checkCompliance(specification: Specification): void { 55 | const failures = testSpecificationForCompliance(specification, this.purgeConditions); 56 | if (failures.length > 0) { 57 | const message = failures.join("\n"); 58 | throw new Error(message); 59 | } 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/managers/QueueProcessor.ts: -------------------------------------------------------------------------------- 1 | import { Trace } from "../util/trace"; 2 | 3 | /** 4 | * Interface for a component that can save data. 5 | */ 6 | export interface Saver { 7 | /** 8 | * Saves data to the network. 9 | */ 10 | save(): Promise; 11 | } 12 | 13 | class Batch { 14 | private isActive = false; 15 | private hasWork = false; 16 | private isTerminated = false; 17 | private delay: NodeJS.Timeout | null = null; 18 | private nextBatch: Batch | null = null; 19 | private notifyResolver: (() => void) | null = null; 20 | private notifyRejector: ((error: Error) => void) | null = null; 21 | private notifyPromise: Promise | null = null; 22 | 23 | constructor( 24 | private readonly saver: Saver, 25 | private readonly delayMilliseconds: number, 26 | private readonly setBatch: (batch: Batch) => void 27 | ) { 28 | } 29 | 30 | activate() { 31 | this.isActive = true; 32 | this.beginWaiting(); 33 | } 34 | 35 | workArrived() { 36 | this.hasWork = true; 37 | this.beginWaiting(); 38 | } 39 | 40 | runNow(): Promise { 41 | if (this.isTerminated) { 42 | return Promise.resolve(); 43 | } 44 | if (!this.notifyPromise) { 45 | this.notifyPromise = new Promise((resolve, reject) => { 46 | this.notifyResolver = resolve; 47 | this.notifyRejector = reject; 48 | }); 49 | this.beginWorking(); 50 | } 51 | return this.notifyPromise; 52 | } 53 | 54 | terminate() { 55 | this.isTerminated = true; 56 | if (this.delay) { 57 | clearTimeout(this.delay); 58 | this.delay = null; 59 | } 60 | if (this.notifyRejector) { 61 | this.notifyRejector(new Error("QueueProcessor terminated")); 62 | } 63 | } 64 | 65 | private beginWaiting() { 66 | if (this.isTerminated || !this.isActive || !this.hasWork || this.delay) { 67 | return; 68 | } 69 | if (this.delayMilliseconds === 0) { 70 | this.beginWorking(); 71 | } else { 72 | this.delay = setTimeout(() => { 73 | this.beginWorking(); 74 | }, this.delayMilliseconds); 75 | } 76 | } 77 | 78 | private beginWorking() { 79 | if (this.nextBatch) { 80 | return; 81 | } 82 | this.nextBatch = new Batch(this.saver, this.delayMilliseconds, this.setBatch); 83 | this.setBatch(this.nextBatch); 84 | this.saver.save() 85 | .then(() => this.done(null)) 86 | .catch((error) => this.done(error)); 87 | } 88 | 89 | private done(error: Error | null) { 90 | if (this.notifyResolver) { 91 | if (error) { 92 | this.notifyRejector!(error); 93 | } else { 94 | this.notifyResolver!(); 95 | } 96 | } else if (error) { 97 | Trace.error(error); 98 | } 99 | if (this.nextBatch) { 100 | this.nextBatch.activate(); 101 | } 102 | } 103 | } 104 | 105 | /** 106 | * Processes a queue with a debouncing mechanism. 107 | * This improves performance by batching multiple operations together. 108 | */ 109 | export class QueueProcessor { 110 | 111 | private currentBatch: Batch; 112 | 113 | /** 114 | * Creates a new QueueProcessor. 115 | * @param saver The component that will save the data. 116 | * @param delayMilliseconds The delay in milliseconds before processing the queue. 117 | */ 118 | constructor( 119 | saver: Saver, 120 | delayMilliseconds: number 121 | ) { 122 | this.currentBatch = new Batch(saver, delayMilliseconds, (batch) => { 123 | this.currentBatch = batch; 124 | }); 125 | this.currentBatch.activate(); 126 | } 127 | 128 | /** 129 | * Schedules processing of the queue with a delay. 130 | * This allows multiple operations to be batched together. 131 | */ 132 | public scheduleProcessing(): void { 133 | this.currentBatch.workArrived(); 134 | } 135 | 136 | /** 137 | * Processes the queue immediately, bypassing any delay. 138 | */ 139 | public async processQueueNow(): Promise { 140 | await this.currentBatch.runNow(); 141 | } 142 | 143 | /** 144 | * Disposes of the QueueProcessor. 145 | */ 146 | public dispose() { 147 | this.currentBatch.terminate(); 148 | } 149 | } -------------------------------------------------------------------------------- /src/model/user.ts: -------------------------------------------------------------------------------- 1 | export class User { 2 | static Type = "Jinaga.User" as const; 3 | type = User.Type; 4 | 5 | constructor( 6 | public publicKey: string 7 | ) { } 8 | } 9 | 10 | export class UserName { 11 | static Type = "Jinaga.User.Name" as const; 12 | public type = UserName.Type; 13 | 14 | constructor( 15 | public prior: UserName[], 16 | public user: User, 17 | public value: string 18 | ) { } 19 | } 20 | 21 | export class Device { 22 | static Type = "Jinaga.Device" as const; 23 | public type = Device.Type; 24 | 25 | constructor( 26 | public publicKey: string 27 | ) { } 28 | } -------------------------------------------------------------------------------- /src/observable/observable.ts: -------------------------------------------------------------------------------- 1 | import { describeSpecification } from '../specification/description'; 2 | import { Specification } from "../specification/specification"; 3 | import { FactEnvelope, FactRecord, ProjectedResult, Storage } from '../storage'; 4 | import { computeStringHash } from '../util/encoding'; 5 | 6 | export interface SpecificationListener { 7 | onResult(results: ProjectedResult[]): Promise; 8 | } 9 | 10 | export class ObservableSource { 11 | private listenersByTypeAndSpecification: Map> = new Map(); 15 | 16 | constructor(private store: Storage) { 17 | } 18 | 19 | async notify(saved: FactEnvelope[]): Promise { 20 | for (let index = 0; index < saved.length; index++) { 21 | const envelope = saved[index]; 22 | await this.notifyFactSaved(envelope.fact); 23 | } 24 | } 25 | 26 | public addSpecificationListener(specification: Specification, onResult: (results: ProjectedResult[]) => Promise): SpecificationListener { 27 | if (specification.given.length !== 1) { 28 | throw new Error("Specification must have exactly one given fact"); 29 | } 30 | const givenType = specification.given[0].type; 31 | const specificationKey = computeStringHash(describeSpecification(specification, 0)); 32 | 33 | let listenersBySpecification = this.listenersByTypeAndSpecification.get(givenType); 34 | if (!listenersBySpecification) { 35 | listenersBySpecification = new Map(); 36 | this.listenersByTypeAndSpecification.set(givenType, listenersBySpecification); 37 | } 38 | 39 | let listeners = listenersBySpecification.get(specificationKey); 40 | if (!listeners) { 41 | listeners = { 42 | specification, 43 | listeners: [] 44 | }; 45 | listenersBySpecification.set(specificationKey, listeners); 46 | } 47 | 48 | const specificationListener = { 49 | onResult 50 | }; 51 | listeners.listeners.push(specificationListener); 52 | return specificationListener; 53 | } 54 | 55 | public removeSpecificationListener(specificationListener: SpecificationListener) { 56 | for (const [givenType, listenersBySpecification] of this.listenersByTypeAndSpecification) { 57 | for (const [specificationKey, listeners] of listenersBySpecification) { 58 | const index = listeners.listeners.indexOf(specificationListener); 59 | if (index >= 0) { 60 | listeners.listeners.splice(index, 1); 61 | 62 | if (listeners.listeners.length === 0) { 63 | listenersBySpecification.delete(specificationKey); 64 | 65 | if (Object.keys(listenersBySpecification).length === 0) { 66 | this.listenersByTypeAndSpecification.delete(givenType); 67 | } 68 | } 69 | } 70 | } 71 | } 72 | } 73 | 74 | private async notifyFactSaved(fact: FactRecord) { 75 | const listenersBySpecification = this.listenersByTypeAndSpecification.get(fact.type); 76 | if (listenersBySpecification) { 77 | for (const [specificationKey, listeners] of listenersBySpecification) { 78 | if (listeners && listeners.listeners.length > 0) { 79 | const specification = listeners.specification; 80 | const givenReference = { 81 | type: fact.type, 82 | hash: fact.hash 83 | }; 84 | const results = await this.store.read([givenReference], specification); 85 | for (const specificationListener of listeners.listeners) { 86 | await specificationListener.onResult(results); 87 | } 88 | } 89 | } 90 | } 91 | } 92 | } -------------------------------------------------------------------------------- /src/observer/subscriber.ts: -------------------------------------------------------------------------------- 1 | import { Network } from "../managers/NetworkManager"; 2 | import { Storage, FactEnvelope, FactReference } from "../storage"; 3 | import { Trace } from "../util/trace"; 4 | 5 | export class Subscriber { 6 | private refCount: number = 0; 7 | private bookmark: string = ""; 8 | private resolved: boolean = false; 9 | private disconnect: (() => void) | undefined; 10 | private timer: NodeJS.Timer | undefined; 11 | 12 | constructor( 13 | private readonly feed: string, 14 | private readonly network: Network, 15 | private readonly store: Storage, 16 | private readonly notifyFactsAdded: (envelopes: FactEnvelope[]) => Promise 17 | ) {} 18 | 19 | addRef() { 20 | this.refCount++; 21 | return this.refCount === 1; 22 | } 23 | 24 | release() { 25 | this.refCount--; 26 | return this.refCount === 0; 27 | } 28 | 29 | async start(): Promise { 30 | this.bookmark = await this.store.loadBookmark(this.feed); 31 | await new Promise((resolve, reject) => { 32 | this.resolved = false; 33 | // Refresh the connection every 4 minutes. 34 | this.disconnect = this.connectToFeed(resolve, reject); 35 | this.timer = setInterval(() => { 36 | if (this.disconnect) { 37 | this.disconnect(); 38 | } 39 | this.disconnect = this.connectToFeed(resolve, reject); 40 | }, 4 * 60 * 1000); 41 | }); 42 | } 43 | 44 | stop() { 45 | if (this.timer) { 46 | clearInterval(this.timer); 47 | this.timer = undefined; 48 | } 49 | if (this.disconnect) { 50 | this.disconnect(); 51 | this.disconnect = undefined; 52 | } 53 | } 54 | 55 | private connectToFeed(resolve: (value: void | PromiseLike) => void, reject: (reason?: any) => void) { 56 | return this.network.streamFeed(this.feed, this.bookmark, async (factReferences, nextBookmark) => { 57 | const knownFactReferences: FactReference[] = await this.store.whichExist(factReferences); 58 | const unknownFactReferences: FactReference[] = factReferences.filter(fr => !knownFactReferences.includes(fr)); 59 | if (unknownFactReferences.length > 0) { 60 | const graph = await this.network.load(unknownFactReferences); 61 | await this.store.save(graph); 62 | if (graph.length > 0) { 63 | Trace.counter("facts_saved", graph.length); 64 | } 65 | await this.store.saveBookmark(this.feed, nextBookmark); 66 | this.bookmark = nextBookmark; 67 | await this.notifyFactsAdded(graph); 68 | } 69 | if (!this.resolved) { 70 | this.resolved = true; 71 | resolve(); 72 | } 73 | }, err => { 74 | if (!this.resolved) { 75 | this.resolved = true; 76 | reject(err); 77 | } 78 | }); 79 | } 80 | } -------------------------------------------------------------------------------- /src/purge/purgeCompliance.ts: -------------------------------------------------------------------------------- 1 | import { describeSpecification } from "../specification/description"; 2 | import { Condition, Match, Role, Specification } from "../specification/specification"; 3 | 4 | export function testSpecificationForCompliance(specification: Specification, purgeConditions: Specification[]): string[] { 5 | return specification.matches.map(m => testMatchForCompliance(m, purgeConditions)).flat(); 6 | } 7 | 8 | function testMatchForCompliance(match: Match, purgeConditions: Specification[]): string[] { 9 | var failedUnknownConditions = purgeConditions.filter(pc => 10 | pc.given[0].type === match.unknown.type && 11 | !hasCondition(match.conditions, pc) 12 | ); 13 | if (failedUnknownConditions.length > 0) { 14 | const specificationDescriptions = failedUnknownConditions.map(pc => describePurgeCondition(pc)).join(""); 15 | return [`The match for ${match.unknown.type} is missing purge conditions:\n${specificationDescriptions}`]; 16 | } 17 | 18 | var failedIntermediateConditions = purgeConditions.filter(pc => 19 | match.conditions.some(c => hasIntermediateType(c, pc.given[0].type)) 20 | ) 21 | if (failedIntermediateConditions.length > 0) { 22 | const specificationDescriptions = failedIntermediateConditions.map(pc => describePurgeCondition(pc)).join(""); 23 | return [`The match for ${match.unknown.type} passes through types that should have purge conditions:\n${specificationDescriptions}`]; 24 | } 25 | 26 | return []; 27 | } 28 | 29 | function hasCondition(conditions: Condition[], purgeCondition: Specification) { 30 | return conditions.some(c => conditionMatches(c, purgeCondition)); 31 | } 32 | 33 | function conditionMatches(condition: Condition, purgeCondition: Specification) { 34 | if (condition.type === "existential") { 35 | if (condition.exists) { 36 | // We only match negative existential conditions. 37 | return false; 38 | } 39 | // Compare the matches of the condition with the matches of the purge condition. 40 | if (condition.matches.length !== purgeCondition.matches.length) { 41 | return false; 42 | } 43 | return condition.matches.every((m, i) => matchesAreEquivalent(m, purgeCondition.matches[i])); 44 | } 45 | } 46 | 47 | function matchesAreEquivalent(match: Match, purgeMatch: Match): boolean { 48 | if (match.unknown.type !== purgeMatch.unknown.type) { 49 | return false; 50 | } 51 | if (match.conditions.length !== purgeMatch.conditions.length) { 52 | return false; 53 | } 54 | return match.conditions.every((c, i) => conditionsAreEquivalent(c, purgeMatch.conditions[i])); 55 | } 56 | 57 | function conditionsAreEquivalent(condition: Condition, purgeCondition: Condition) { 58 | if (condition.type === "path") { 59 | if (purgeCondition.type !== "path") { 60 | return false; 61 | } 62 | if (condition.rolesLeft.length !== purgeCondition.rolesLeft.length) { 63 | return false; 64 | } 65 | if (condition.rolesRight.length !== purgeCondition.rolesRight.length) { 66 | return false; 67 | } 68 | return condition.rolesLeft.every((r, i) => rolesAreEquivalent(r, purgeCondition.rolesLeft[i])) 69 | && condition.rolesRight.every((r, i) => rolesAreEquivalent(r, purgeCondition.rolesRight[i])); 70 | } 71 | else if (condition.type === "existential") { 72 | if (purgeCondition.type !== "existential") { 73 | return false; 74 | } 75 | if (condition.exists !== purgeCondition.exists) { 76 | return false; 77 | } 78 | if (condition.matches.length !== purgeCondition.matches.length) { 79 | return false; 80 | } 81 | return condition.matches.every((m, i) => matchesAreEquivalent(m, purgeCondition.matches[i])); 82 | } 83 | } 84 | 85 | function rolesAreEquivalent(role: Role, purgeRole: Role) { 86 | return role.predecessorType === purgeRole.predecessorType && 87 | role.name === purgeRole.name; 88 | } 89 | 90 | function hasIntermediateType(condition: Condition, type: string) { 91 | if (condition.type === "path") { 92 | var leftOnly = condition.rolesRight.length === 0; 93 | var rightOnly = condition.rolesLeft.length === 0; 94 | 95 | // If we only have left roles, then ignore the last role on the right. 96 | // If any of the roles is the type we're looking for, then we have an intermediate type. 97 | if (leftOnly) { 98 | var found = condition.rolesLeft.some((r, i) => 99 | r.predecessorType === type && 100 | i < condition.rolesLeft.length - 1); 101 | if (found) { 102 | return true; 103 | } 104 | } 105 | else { 106 | var found = condition.rolesLeft.some(r => r.predecessorType === type); 107 | if (found) { 108 | return true; 109 | } 110 | } 111 | 112 | // If we only have right roles, then ignore the last role on the left. 113 | // If any of the roles is the type we're looking for, then we have an intermediate type. 114 | if (rightOnly) { 115 | var found = condition.rolesRight.some((r, i) => 116 | r.predecessorType === type && 117 | i < condition.rolesRight.length - 1); 118 | if (found) { 119 | return true; 120 | } 121 | } 122 | else { 123 | var found = condition.rolesRight.some(r => r.predecessorType === type); 124 | if (found) { 125 | return true; 126 | } 127 | } 128 | } 129 | return false; 130 | } 131 | 132 | function describePurgeCondition(specification: Specification): string { 133 | var specificationWithoutProjection: Specification = { 134 | ...specification, 135 | projection: { 136 | type: "composite", 137 | components: [] 138 | } 139 | }; 140 | var description = describeSpecification(specificationWithoutProjection, 0); 141 | return `!E ${description}`; 142 | } 143 | 144 | -------------------------------------------------------------------------------- /src/purge/purgeConditions.ts: -------------------------------------------------------------------------------- 1 | import { describeSpecification } from "../specification/description"; 2 | import { SpecificationOf } from "../specification/model"; 3 | import { Specification } from "../specification/specification"; 4 | 5 | export class PurgeConditions { 6 | static empty: PurgeConditions = new PurgeConditions([]); 7 | 8 | constructor( 9 | public specifications: Specification[] 10 | ) { } 11 | 12 | whenExists(specification: SpecificationOf): PurgeConditions { 13 | return new PurgeConditions([ 14 | ...this.specifications, 15 | specification.specification 16 | ]); 17 | } 18 | 19 | with(fn: (p: PurgeConditions) => PurgeConditions): PurgeConditions { 20 | return fn(this); 21 | } 22 | 23 | merge(purgeConditions: PurgeConditions): PurgeConditions { 24 | return new PurgeConditions([ 25 | ...this.specifications, 26 | ...purgeConditions.specifications 27 | ]); 28 | } 29 | 30 | saveToDescription(): string { 31 | const specificationDescriptions = this.specifications.map(s => describeSpecification(s, 1)).join(""); 32 | return `purge {\n${specificationDescriptions}}\n`; 33 | } 34 | } -------------------------------------------------------------------------------- /src/purge/validate.ts: -------------------------------------------------------------------------------- 1 | import { Condition, ExistentialCondition, Match, Specification } from "../specification/specification"; 2 | 3 | export function validatePurgeSpecification(specification: Specification): string[] { 4 | // Validate that the specification has only one given. 5 | if (specification.given.length !== 1) { 6 | return ["A purge specification must have exactly one given."]; 7 | } 8 | var purgeRoot = specification.given[0]; 9 | 10 | // Search for negative existential conditions. 11 | // Those indicate that the specification will reverse a purge. 12 | var failures: string[] = specification.matches.map(match => match.conditions 13 | .filter(isNegativeExistentialCondition) 14 | .map(condition => 15 | `A specified purge condition would reverse the purge of ${purgeRoot.type} with ${describeTuple(condition.matches)}.` 16 | ) 17 | ).flat(); 18 | return failures; 19 | } 20 | 21 | function isNegativeExistentialCondition(condition: Condition): condition is ExistentialCondition { 22 | return condition.type === "existential" && !condition.exists; 23 | } 24 | 25 | function describeTuple(matches: Match[]): string { 26 | return matches.map(match => match.unknown.type).join(", "); 27 | } -------------------------------------------------------------------------------- /src/rules/RuleSet.ts: -------------------------------------------------------------------------------- 1 | import { AuthorizationRules } from "../authorization/authorizationRules"; 2 | import { DistributionRules } from "../distribution/distribution-rules"; 3 | import { PurgeConditions } from "../purge/purgeConditions"; 4 | import { SpecificationParser } from "../specification/specification-parser"; 5 | 6 | export class RuleSet { 7 | static empty: RuleSet = new RuleSet( 8 | AuthorizationRules.empty, 9 | DistributionRules.empty, 10 | PurgeConditions.empty 11 | ); 12 | 13 | constructor( 14 | public authorizationRules: AuthorizationRules, 15 | public distributionRules: DistributionRules, 16 | public purgeConditions: PurgeConditions 17 | ) {} 18 | 19 | public static loadFromDescription(description: string): RuleSet { 20 | const parser = new SpecificationParser(description); 21 | parser.skipWhitespace(); 22 | let authorizationRules: AuthorizationRules = AuthorizationRules.empty; 23 | let distributionRules: DistributionRules = DistributionRules.empty; 24 | let purgeConditions: PurgeConditions = PurgeConditions.empty; 25 | while (!parser.atEnd()) { 26 | if (parser.continues("authorization")) { 27 | authorizationRules = authorizationRules.with(a => parser.parseAuthorizationRules()); 28 | } 29 | else if (parser.continues("distribution")) { 30 | distributionRules = distributionRules.with(d => parser.parseDistributionRules()); 31 | } 32 | else if (parser.continues("purge")) { 33 | purgeConditions = purgeConditions.with(p => parser.parsePurgeConditions()); 34 | } 35 | else { 36 | // Throws an error. 37 | parser.expectEnd(); 38 | } 39 | } 40 | return new RuleSet(authorizationRules, distributionRules, purgeConditions); 41 | } 42 | 43 | merge(ruleSet2: RuleSet): RuleSet { 44 | return new RuleSet( 45 | this.authorizationRules.merge(ruleSet2.authorizationRules), 46 | this.distributionRules.merge(ruleSet2.distributionRules), 47 | this.purgeConditions.merge(ruleSet2.purgeConditions) 48 | ); 49 | } 50 | } -------------------------------------------------------------------------------- /src/specification/declaration.ts: -------------------------------------------------------------------------------- 1 | import { FactRecord, FactReference } from "../storage"; 2 | 3 | export interface DeclaredFact { 4 | reference: FactReference; 5 | fact: FactRecord | null; 6 | } 7 | 8 | export type Declaration = { 9 | name: string; 10 | declared: DeclaredFact; 11 | }[]; 12 | -------------------------------------------------------------------------------- /src/specification/description.ts: -------------------------------------------------------------------------------- 1 | import { ComponentProjection, Condition, Label, Match, Projection, Specification, SpecificationProjection } from "../../src/specification/specification"; 2 | import { FactReference } from "../storage"; 3 | 4 | export function describeDeclaration(references: FactReference[], labels: Label[]) { 5 | const declaration = references.map((reference, index) => { 6 | const label = labels[index]; 7 | return `let ${label.name}: ${label.type} = #${reference.hash}\n`; 8 | }).join(""); 9 | return declaration; 10 | } 11 | 12 | export function describeSpecification(specification: Specification, depth: number) { 13 | const indent = " ".repeat(depth); 14 | const given = specification.given.map(given => describeGiven(given)).join(", "); 15 | const matches = specification.matches.map(match => describeMatch(match, depth + 1)).join(""); 16 | const projection = (specification.projection.type === "composite" && specification.projection.components.length === 0) ? "" : 17 | " => " + describeProjection(specification.projection, depth); 18 | 19 | return `${indent}(${given}) {\n${matches}${indent}}${projection}\n`; 20 | } 21 | 22 | function describeGiven(given: Label) { 23 | return `${given.name}: ${given.type}`; 24 | } 25 | 26 | function describeMatch(match: Match, depth: number) { 27 | const indent = " ".repeat(depth); 28 | const conditions = match.conditions.map(condition => describeCondition(condition, match.unknown.name, depth + 1)).join(""); 29 | 30 | return `${indent}${match.unknown.name}: ${match.unknown.type} [\n${conditions}${indent}]\n`; 31 | } 32 | 33 | function describeCondition(condition: Condition, unknown: string, depth: number): string { 34 | const indent = " ".repeat(depth); 35 | if (condition.type === "path") { 36 | const rolesLeft = condition.rolesLeft.map(r => `->${r.name}: ${r.predecessorType}`).join(""); 37 | const rolesRight = condition.rolesRight.map(r => `->${r.name}: ${r.predecessorType}`).join(""); 38 | return `${indent}${unknown}${rolesLeft} = ${condition.labelRight}${rolesRight}\n`; 39 | } 40 | else if (condition.type === "existential") { 41 | const matches = condition.matches.map(match => describeMatch(match, depth + 1)).join(""); 42 | const op = condition.exists ? "" : "!"; 43 | return `${indent}${op}E {\n${matches}${indent}}\n`; 44 | } 45 | else { 46 | throw new Error("Not implemented"); 47 | } 48 | } 49 | 50 | function describeProjection(projection: Projection, depth: number): string { 51 | if (projection.type === "composite") { 52 | const indent = " ".repeat(depth); 53 | const orderedProjections = projection.components.sort((a, b) => a.name.localeCompare(b.name)); 54 | const projectionDescriptions = orderedProjections.map(projection => ` ${indent}${projection.name} = ${describeComponentProjection(projection, depth + 1)}\n`).join(""); 55 | return `{\n${projectionDescriptions}${indent}}`; 56 | } 57 | else if (projection.type === "field") { 58 | return `${projection.label}.${projection.field}`; 59 | } 60 | else if (projection.type === "fact") { 61 | return projection.label; 62 | } 63 | else if (projection.type === "hash") { 64 | return `#${projection.label}`; 65 | } 66 | else { 67 | const _exhaustiveCheck: never = projection; 68 | throw new Error(`Unknown projection type: ${(_exhaustiveCheck as any).type}`); 69 | } 70 | } 71 | 72 | function describeComponentProjection(projection: ComponentProjection, depth: number): string { 73 | if (projection.type === "specification") { 74 | return describeChildSpecification(projection, depth); 75 | } 76 | else if (projection.type === "field") { 77 | return `${projection.label}.${projection.field}`; 78 | } 79 | else if (projection.type === "fact") { 80 | return projection.label; 81 | } 82 | else if (projection.type === "hash") { 83 | return `#${projection.label}`; 84 | } 85 | else { 86 | const _exhaustiveCheck: never = projection; 87 | throw new Error(`Unknown projection type: ${(_exhaustiveCheck as any).type}`); 88 | } 89 | } 90 | 91 | function describeChildSpecification(specification: SpecificationProjection, depth: number) { 92 | const indent = " ".repeat(depth); 93 | const matches = specification.matches.map(match => describeMatch(match, depth + 1)).join(""); 94 | const projection = (specification.projection.type === "composite" && specification.projection.components.length === 0) ? "" : 95 | " => " + describeProjection(specification.projection, depth); 96 | 97 | return `{\n${matches}${indent}}${projection}`; 98 | } 99 | -------------------------------------------------------------------------------- /src/specification/feed-cache.ts: -------------------------------------------------------------------------------- 1 | import { computeObjectHash } from "../fact/hash"; 2 | import { FactReference, ReferencesByName } from "../storage"; 3 | import { Skeleton, skeletonOfSpecification } from "./skeleton"; 4 | import { Specification } from "./specification"; 5 | 6 | interface FeedIdentifier { 7 | start: { 8 | factReference: FactReference; 9 | index: number; 10 | }[]; 11 | skeleton: Skeleton; 12 | } 13 | 14 | export interface FeedObject { 15 | namedStart: ReferencesByName; 16 | feed: Specification; 17 | } 18 | 19 | type FeedByHash = { 20 | [hash: string]: FeedObject; 21 | }; 22 | 23 | export class FeedCache { 24 | private feedByHash: FeedByHash = {}; 25 | 26 | addFeeds(feeds: Specification[], namedStart: ReferencesByName): string[] { 27 | const feedsByHash = feeds.reduce((map, feed) => { 28 | const skeleton = skeletonOfSpecification(feed); 29 | const indexedStart = skeleton.inputs.map(input => ({ 30 | factReference: namedStart[feed.given[input.inputIndex].name], 31 | index: input.inputIndex 32 | })); 33 | const feedIdentifier: FeedIdentifier = { 34 | start: indexedStart, 35 | skeleton 36 | }; 37 | const feedObject: FeedObject = { 38 | namedStart, 39 | feed 40 | }; 41 | const hash = urlSafe(computeObjectHash(feedIdentifier)); 42 | return ({ 43 | ...map, 44 | [hash]: feedObject 45 | }); 46 | }, {} as FeedByHash); 47 | const feedHashes = Object.keys(feedsByHash); 48 | this.feedByHash = { 49 | ...this.feedByHash, 50 | ...feedsByHash 51 | }; 52 | return feedHashes; 53 | } 54 | 55 | getFeed(feed: string): FeedObject | undefined { 56 | return this.feedByHash[feed]; 57 | } 58 | } 59 | 60 | function urlSafe(hash: string): string { 61 | return hash.replace(/\+/g, '-').replace(/\//g, '_').replace(/=/g, ''); 62 | } -------------------------------------------------------------------------------- /src/storage.ts: -------------------------------------------------------------------------------- 1 | import { computeObjectHash } from "./fact/hash"; 2 | import { Specification } from "./specification/specification"; 3 | import { findIndex } from './util/fn'; 4 | 5 | export type FactReference = { 6 | type: string; 7 | hash: string; 8 | }; 9 | 10 | export interface FactTuple { 11 | facts: FactReference[]; 12 | bookmark: string; 13 | } 14 | 15 | export interface FactFeed { 16 | tuples: FactTuple[]; 17 | bookmark: string; 18 | } 19 | 20 | export type PredecessorCollection = { 21 | [role: string]: FactReference[] | FactReference 22 | }; 23 | 24 | export type FactRecord = { 25 | type: string; 26 | hash: string; 27 | predecessors: PredecessorCollection, 28 | fields: { [field: string]: any }; 29 | }; 30 | 31 | export type FactSignature = { 32 | publicKey: string; 33 | signature: string; 34 | } 35 | 36 | export type FactEnvelope = { 37 | fact: FactRecord; 38 | signatures: FactSignature[]; 39 | } 40 | 41 | export type ReferencesByName = { [name: string]: FactReference }; 42 | 43 | export interface ProjectedResult { 44 | tuple: ReferencesByName; 45 | result: any; 46 | } 47 | 48 | export interface Storage { 49 | close(): Promise; 50 | save(envelopes: FactEnvelope[]): Promise; 51 | read(start: FactReference[], specification: Specification): Promise; 52 | feed(feed: Specification, start: FactReference[], bookmark: string): Promise; 53 | whichExist(references: FactReference[]): Promise; 54 | load(references: FactReference[]): Promise; 55 | purge(purgeConditions: Specification[]): Promise; 56 | purgeDescendants(purgeRoot: FactReference, triggers: FactReference[]): Promise; 57 | 58 | loadBookmark(feed: string): Promise; 59 | saveBookmark(feed: string, bookmark: string): Promise; 60 | 61 | getMruDate(specificationHash: string): Promise; 62 | setMruDate(specificationHash: string, mruDate: Date): Promise; 63 | } 64 | 65 | export interface Queue { 66 | peek(): Promise; 67 | enqueue(envelopes: FactEnvelope[]): Promise; 68 | dequeue(envelopes: FactEnvelope[]): Promise; 69 | } 70 | 71 | export function factReferenceEquals(a: FactReference) { 72 | return (r: FactReference) => r.hash === a.hash && r.type === a.type; 73 | } 74 | 75 | export function factEnvelopeEquals(r: FactReference) { 76 | return (e: FactEnvelope) => e.fact.hash === r.hash && e.fact.type === r.type; 77 | } 78 | 79 | export function uniqueFactReferences(references: FactReference[]): FactReference[] { 80 | return references.filter((value, index, array) => { 81 | return findIndex(array, factReferenceEquals(value)) === index; 82 | }); 83 | } 84 | 85 | export function computeTupleSubsetHash(tuple: ReferencesByName, subset: string[]) { 86 | const parentTuple = Object.getOwnPropertyNames(tuple) 87 | .filter(name => subset.some(s => s === name)) 88 | .reduce((t, name) => ({ 89 | ...t, 90 | [name]: tuple[name] 91 | }), 92 | {} as ReferencesByName); 93 | const parentTupleHash = computeObjectHash(parentTuple); 94 | return parentTupleHash; 95 | } 96 | 97 | export function validateGiven(start: FactReference[], specification: Specification) { 98 | // Verify that the number of start facts equals the number of inputs 99 | if (start.length !== specification.given.length) { 100 | throw new Error(`The number of start facts (${start.length}) does not equal the number of inputs (${specification.given.length})`); 101 | } 102 | // Verify that the input type matches the start fact type 103 | for (let i = 0; i < start.length; i++) { 104 | if (start[i].type !== specification.given[i].type) { 105 | throw new Error(`The type of start fact ${i} (${start[i].type}) does not match the type of input ${i} (${specification.given[i].type})`); 106 | } 107 | } 108 | } -------------------------------------------------------------------------------- /src/user-identity.ts: -------------------------------------------------------------------------------- 1 | 2 | export interface UserIdentity { 3 | provider: string; 4 | id: string; 5 | } 6 | -------------------------------------------------------------------------------- /src/util/encoding.ts: -------------------------------------------------------------------------------- 1 | import { encode as encodeBase64 } from '@stablelib/base64'; 2 | import { hash } from '@stablelib/sha512'; 3 | import { encode as encodeUTF8 } from '@stablelib/utf8'; 4 | 5 | export function computeStringHash(str: string) { 6 | const bytes = encodeUTF8(str); 7 | const result = hash(bytes); 8 | const b64 = encodeBase64(result); 9 | return b64; 10 | } 11 | -------------------------------------------------------------------------------- /src/util/fn.ts: -------------------------------------------------------------------------------- 1 | function safeFlatten(results: U[][]) { 2 | const flat = results.reduce((a, b) => 3 | (!a || a.length === 0) ? b : 4 | (!b || b.length === 0) ? a : 5 | a.concat(b)); 6 | return flat ? flat : []; 7 | } 8 | 9 | export async function flattenAsync(collection: T[], selector: (element: T) => Promise) { 10 | if (collection.length === 0) { 11 | return []; 12 | } 13 | else { 14 | const results = await Promise.all(collection.map(selector)); 15 | return safeFlatten(results); 16 | } 17 | } 18 | 19 | export function flatten(collection: T[], selector: (element: T) => U[]) { 20 | if (collection.length === 0) { 21 | return []; 22 | } 23 | else { 24 | return safeFlatten(collection.map(selector)); 25 | } 26 | } 27 | 28 | export async function mapAsync(collection: T[], action: (element: T) => Promise) { 29 | if (collection.length === 0) { 30 | return []; 31 | } 32 | else { 33 | return await Promise.all(collection.map(action)); 34 | } 35 | } 36 | 37 | export async function filterAsync(collection: T[], predicate: (element: T) => Promise) { 38 | if (collection.length === 0) { 39 | return []; 40 | } 41 | else { 42 | const filters = await Promise.all(collection.map(async element => ({ 43 | include: await predicate(element), 44 | element 45 | }))); 46 | 47 | return filters.filter(f => f.include).map(f => f.element); 48 | } 49 | } 50 | 51 | export function findIndex(array: T[], predicate: ((element: T) => boolean)): number { 52 | for (let index = 0; index < array.length; index++) { 53 | if (predicate(array[index])) { 54 | return index; 55 | } 56 | } 57 | 58 | return -1; 59 | } 60 | 61 | export function distinct(value: T, index: number, self: T[]) { 62 | return self.indexOf(value) === index; 63 | } -------------------------------------------------------------------------------- /src/util/obj.ts: -------------------------------------------------------------------------------- 1 | export function toJSON(value: any) { 2 | if (hasProperty(value, "toJSON")) { 3 | return value.toJSON(); 4 | } 5 | else { 6 | return value; 7 | } 8 | } 9 | 10 | function hasProperty(value: any, name: string) { 11 | while (value !== null) { 12 | if (typeof(value) !== "object") { 13 | return false; 14 | } 15 | if (value.hasOwnProperty(name)) { 16 | return true; 17 | } 18 | value = Object.getPrototypeOf(value); 19 | } 20 | return false; 21 | } -------------------------------------------------------------------------------- /src/util/promise.ts: -------------------------------------------------------------------------------- 1 | export function delay(ms: number) { 2 | return new Promise((resolve, reject) => { 3 | setTimeout(() => resolve(), ms); 4 | }); 5 | } 6 | -------------------------------------------------------------------------------- /src/util/trace.ts: -------------------------------------------------------------------------------- 1 | export interface Tracer { 2 | info(message: string): void; 3 | warn(message: string): void; 4 | error(error: any): void; 5 | dependency(name: string, data: string, operation: () => Promise): Promise; 6 | metric(message: string, measurements: { [key: string]: number }): void; 7 | counter(name: string, value: number): void; 8 | } 9 | 10 | export class NoOpTracer implements Tracer { 11 | info(message: string): void { 12 | } 13 | warn(message: string): void { 14 | } 15 | error(error: any): void { 16 | } 17 | dependency(name: string, data: string, operation: () => Promise): Promise { 18 | return operation(); 19 | } 20 | metric(message: string, measurements: { [key: string]: number }): void { 21 | } 22 | 23 | counter(name: string, value: number): void { 24 | } 25 | } 26 | 27 | export class ConsoleTracer implements Tracer { 28 | info(message: string): void { 29 | console.log(message); 30 | } 31 | warn(message: string): void { 32 | console.warn(message); 33 | } 34 | error(error: any): void { 35 | console.error(error); 36 | } 37 | async dependency(name: string, data: string, operation: () => Promise): Promise { 38 | const start = new Date().getTime(); 39 | try { 40 | return await operation(); 41 | } 42 | finally { 43 | const end = new Date().getTime(); 44 | const duration = end - start; 45 | 46 | // Log the dependency 47 | console.log(`Dependency: ${name} (${data}) took ${duration}ms`); 48 | } 49 | } 50 | 51 | metric(message: string, measurements: { [key: string]: number }): void { 52 | console.log(`Metric: ${message}`, measurements); 53 | } 54 | 55 | counter(name: string, value: number): void { 56 | console.log(`Counter: ${name} = ${value}`); 57 | } 58 | } 59 | 60 | export class Trace { 61 | private static tracer: Tracer = new ConsoleTracer(); 62 | 63 | static configure(tracer: Tracer) { 64 | Trace.tracer = tracer; 65 | } 66 | 67 | static off() { 68 | Trace.tracer = new NoOpTracer(); 69 | } 70 | 71 | static info(message: string): void { 72 | this.tracer.info(message); 73 | } 74 | 75 | static warn(message: string): void { 76 | this.tracer.warn(message); 77 | } 78 | 79 | static error(error: any): void { 80 | this.tracer.error(error); 81 | } 82 | 83 | static dependency(name: string, data: string, operation: () => Promise): Promise { 84 | return this.tracer.dependency(name, data, operation); 85 | } 86 | 87 | static metric(message: string, measurements: { [key: string]: number }): void { 88 | this.tracer.metric(message, measurements); 89 | } 90 | 91 | static counter(name: string, value: number): void { 92 | this.tracer.counter(name, value); 93 | } 94 | } -------------------------------------------------------------------------------- /test/authorization/authorizationExampleSpec.ts: -------------------------------------------------------------------------------- 1 | import { AuthorizationRules, buildModel, Jinaga, JinagaTest } from '../../src'; 2 | 3 | describe("Feedback authorization", () => { 4 | let j: Jinaga; 5 | let site: Site; 6 | 7 | beforeEach(async () => { 8 | site = new Site(new User("Site creator"), "site identifier"); 9 | 10 | j = JinagaTest.create({ 11 | model, 12 | authorization, 13 | user: new User("Logged in user"), 14 | initialState: [ 15 | site 16 | ] 17 | }); 18 | }); 19 | 20 | it("should have logged in user", async () => { 21 | const { userFact: user } = await j.login(); 22 | 23 | expect(user.publicKey).toEqual("Logged in user"); 24 | }); 25 | 26 | it("should allow a user", async () => { 27 | const creator = await j.fact(new User("Other user")); 28 | 29 | expect(creator.publicKey).toEqual("Other user"); 30 | }); 31 | 32 | it("should not allow site created by a different user", async () => { 33 | const creator = await j.fact(new User("Other user")); 34 | 35 | const promise = j.fact(new Site(creator, "site identifier")); 36 | 37 | await expect(promise).rejects.not.toBeNull(); 38 | }); 39 | 40 | it("should allow a site created by the logged in user", async () => { 41 | const creator = await j.fact(new User("Logged in user")); 42 | 43 | const site = await j.fact(new Site(creator, "site identifier")); 44 | 45 | expect(site.creator.publicKey).toEqual("Logged in user"); 46 | }); 47 | 48 | it("should not allow a comment from another user", async () => { 49 | const user = await j.fact(new User("Another user")); 50 | const content = await j.fact(new Content(site, "/path/to/content")); 51 | 52 | const promise = j.fact(new Comment("comment unique id", content, user)); 53 | 54 | await expect(promise).rejects.not.toBeNull(); 55 | }); 56 | 57 | it("should allow a comment from logged in user", async () => { 58 | const { userFact: user } = await j.login(); 59 | const content = await j.fact(new Content(site, "/path/to/content")); 60 | const comment = await j.fact(new Comment("comment unique id", content, user)); 61 | 62 | expect(comment.author.publicKey).toEqual(user.publicKey); 63 | }); 64 | }); 65 | 66 | const j = Jinaga; 67 | 68 | class User { 69 | static Type = "Jinaga.User" as const; 70 | type = User.Type; 71 | 72 | constructor ( 73 | public publicKey: string 74 | ) { } 75 | } 76 | 77 | class Site { 78 | static Type = "Feedback.Site" as const; 79 | type = Site.Type; 80 | 81 | constructor ( 82 | public creator: User, 83 | public identifier: string 84 | ) { } 85 | } 86 | 87 | class Content { 88 | static Type = "Feedback.Content" as const; 89 | type = Content.Type; 90 | 91 | constructor ( 92 | public site: Site, 93 | public path: string 94 | ) { } 95 | } 96 | 97 | class Comment { 98 | static Type = "Feedback.Comment" as const; 99 | type = Comment.Type; 100 | 101 | constructor ( 102 | public uniqueId: string, 103 | public content: Content, 104 | public author: User 105 | ) { } 106 | } 107 | 108 | const model = buildModel(b => b 109 | .type(User) 110 | .type(Site, m => m 111 | .predecessor("creator", User) 112 | ) 113 | .type(Content, m => m 114 | .predecessor("site", Site) 115 | ) 116 | .type(Comment, m => m 117 | .predecessor("content", Content) 118 | .predecessor("author", User) 119 | ) 120 | ); 121 | 122 | function authorization(a: AuthorizationRules) { 123 | return a 124 | .any(User) 125 | .type(Site, site => site.creator) 126 | .any(Content) 127 | .type(Comment, comment => comment.author) 128 | ; 129 | } 130 | -------------------------------------------------------------------------------- /test/blogModel.ts: -------------------------------------------------------------------------------- 1 | import { DistributionRules, User, buildModel } from "../src"; 2 | 3 | export class Blog { 4 | static Type = "Blog" as const; 5 | type = Blog.Type; 6 | 7 | constructor( 8 | public creator: User, 9 | public domain: string 10 | ) { } 11 | } 12 | 13 | export class Post { 14 | static Type = "Post" as const; 15 | type = Post.Type; 16 | 17 | constructor( 18 | public blog: Blog, 19 | public author: User, 20 | public createdAt: Date | string 21 | ) { } 22 | } 23 | 24 | export class Publish { 25 | static Type = "Publish" as const; 26 | type = Publish.Type; 27 | 28 | constructor( 29 | public post: Post, 30 | public date: Date | string 31 | ) { } 32 | } 33 | 34 | export class Comment { 35 | static Type = "Comment" as const; 36 | type = Comment.Type; 37 | 38 | constructor( 39 | public post: Post, 40 | public author: User, 41 | public text: string, 42 | public createdAt: Date | string 43 | ) { } 44 | } 45 | 46 | export class CommentApproved { 47 | static Type = "CommentApproved" as const; 48 | type = CommentApproved.Type; 49 | 50 | constructor( 51 | public comment: Comment, 52 | public approvedAt: Date | string 53 | ) { } 54 | } 55 | 56 | export const model = buildModel(b => b 57 | .type(User) 58 | .type(Blog, x => x 59 | .predecessor("creator", User) 60 | ) 61 | .type(Post, x => x 62 | .predecessor("blog", Blog) 63 | .predecessor("author", User) 64 | ) 65 | .type(Publish, x => x 66 | .predecessor("post", Post) 67 | ) 68 | .type(Comment, x => x 69 | .predecessor("post", Post) 70 | .predecessor("author", User) 71 | ) 72 | .type(CommentApproved, x => x 73 | .predecessor("comment", Comment) 74 | ) 75 | ); 76 | 77 | export const distribution = (r: DistributionRules) => r 78 | // Everyone can see published posts 79 | .share(model.given(Blog).match(blog => 80 | blog.successors(Post, post => post.blog) 81 | .exists(post => post.successors(Publish, publish => publish.post)) 82 | )).withEveryone() 83 | // The creator can see all posts and comments 84 | .share(model.given(Blog).select(blog => ({ 85 | posts: blog.successors(Post, post => post.blog), 86 | comments: blog.successors(Comment, comment => comment.post.blog) 87 | }))).with(model.given(Blog).match(blog => 88 | blog.creator.predecessor() 89 | )) 90 | // A comment author can see their own comments on published posts 91 | .share(model.given(Blog, User).match((blog, author) => 92 | blog.successors(Post, post => post.blog) 93 | .exists(post => post.successors(Publish, publish => publish.post)) 94 | .selectMany(post => post.successors(Comment, comment => comment.post) 95 | .join(comment => comment.author, author) 96 | ) 97 | )).with(model.given(Blog, User).select((blog, author) => 98 | author 99 | )); 100 | -------------------------------------------------------------------------------- /test/companyModel.ts: -------------------------------------------------------------------------------- 1 | import { buildModel, FactRepository, LabelOf, ModelBuilder } from "../src/specification/model"; 2 | 3 | export class User { 4 | static Type = "User" as const; 5 | type = User.Type; 6 | constructor( 7 | public publicKey: string 8 | ) { } 9 | } 10 | 11 | export class UserName { 12 | static Type = "User.Name" as const; 13 | type = UserName.Type; 14 | constructor( 15 | public user: User, 16 | public value: string, 17 | public prior: UserName[] 18 | ) { } 19 | } 20 | 21 | export class Company { 22 | static Type = "Company" as const; 23 | type = Company.Type; 24 | constructor( 25 | public creator: User, 26 | public identifier: string 27 | ) { } 28 | } 29 | 30 | export class Administrator { 31 | static Type = "Administrator" as const; 32 | type = Administrator.Type; 33 | constructor( 34 | public company: Company, 35 | public user: User, 36 | public date: Date | string 37 | ) { } 38 | } 39 | 40 | export class AdministratorRevoked { 41 | static Type = "Administrator.Revoked" as const; 42 | type = AdministratorRevoked.Type; 43 | constructor( 44 | public administrator: Administrator 45 | ) { } 46 | } 47 | 48 | export class Office { 49 | static Type = "Office" as const; 50 | type = Office.Type; 51 | constructor( 52 | public company: Company, 53 | public identifier: string 54 | ) { } 55 | 56 | static inCompany(facts: FactRepository, company: LabelOf) { 57 | return facts.ofType(Office) 58 | .join(office => office.company, company) 59 | .notExists(office => facts.ofType(OfficeClosed) 60 | .join(officeClosed => officeClosed.office, office) 61 | .notExists(officeClosed => facts.ofType(OfficeReopened) 62 | .join(officeReopened => officeReopened.officeClosed, officeClosed) 63 | ) 64 | ); 65 | } 66 | } 67 | 68 | export class OfficeClosed { 69 | static Type = "Office.Closed" as const; 70 | type = OfficeClosed.Type; 71 | constructor( 72 | public office: Office, 73 | public date: Date | string 74 | ) { } 75 | } 76 | 77 | export class OfficeReopened { 78 | static Type = "Office.Reopened" as const; 79 | type = OfficeReopened.Type; 80 | constructor( 81 | public officeClosed: OfficeClosed 82 | ) { } 83 | } 84 | 85 | export class President { 86 | static Type = "President" as const; 87 | type = President.Type; 88 | constructor( 89 | public office: Office, 90 | public user: User 91 | ) { } 92 | } 93 | 94 | export class Manager { 95 | static Type = "Manager" as const; 96 | type = Manager.Type; 97 | constructor( 98 | public office: Office, 99 | public employeeNumber: number 100 | ) { } 101 | } 102 | 103 | export class ManagerName { 104 | static Type = "Manager.Name" as const; 105 | type = ManagerName.Type; 106 | constructor( 107 | public manager: Manager, 108 | public value: string, 109 | public prior: ManagerName[] 110 | ) { } 111 | } 112 | 113 | export class ManagerTerminated { 114 | static Type = "Manager.Terminated" as const; 115 | type = ManagerTerminated.Type; 116 | constructor( 117 | public manager: Manager, 118 | public date: Date | string 119 | ) { } 120 | } 121 | 122 | export class Employee { 123 | static Type = "Employee" as const; 124 | type = Employee.Type; 125 | constructor( 126 | public office: Office, 127 | public user: User 128 | ) { } 129 | } 130 | 131 | const officeFacts = (m: ModelBuilder) => m 132 | .type(User) 133 | .type(UserName, f => f 134 | .predecessor("user", User) 135 | .predecessor("prior", UserName) 136 | ) 137 | .type(Company, f => f 138 | .predecessor("creator", User) 139 | ) 140 | .type(Administrator, f => f 141 | .predecessor("company", Company) 142 | .predecessor("user", User) 143 | ) 144 | .type(AdministratorRevoked, f => f 145 | .predecessor("administrator", Administrator) 146 | ) 147 | .type(Office, f => f 148 | .predecessor("company", Company) 149 | ) 150 | .type(OfficeClosed, f => f 151 | .predecessor("office", Office) 152 | ) 153 | .type(OfficeReopened, f => f 154 | .predecessor("officeClosed", OfficeClosed) 155 | ) 156 | .type(President, f => f 157 | .predecessor("office", Office) 158 | .predecessor("user", User) 159 | ) 160 | .type(Manager, f => f 161 | .predecessor("office", Office) 162 | ) 163 | .type(ManagerName, f => f 164 | .predecessor("manager", Manager) 165 | .predecessor("prior", ManagerName) 166 | ) 167 | .type(ManagerTerminated, f => f 168 | .predecessor("manager", Manager) 169 | ) 170 | .type(Employee, f => f 171 | .predecessor("office", Office) 172 | .predecessor("user", User) 173 | ); 174 | 175 | export const model = buildModel(officeFacts); 176 | -------------------------------------------------------------------------------- /test/cryptography/keyPairSpec.ts: -------------------------------------------------------------------------------- 1 | import { FactRecord, KeyPair, dehydrateFact, generateKeyPair, signFacts, verifyEnvelopes } from "../../src"; 2 | 3 | describe("keyPair", () => { 4 | it("should generate consistent signature", async () => { 5 | const keyPair = givenKnownKeyPair() 6 | const factEnvelopes = givenSignedFact(keyPair); 7 | 8 | expect(factEnvelopes.length).toBe(1); 9 | expect(factEnvelopes[0].signatures.length).toBe(1); 10 | expect(factEnvelopes[0].signatures[0].publicKey).toBe(keyPair.publicPem); 11 | expect(factEnvelopes[0].signatures[0].signature).toBe("bfbj+2E49gqpL2A3ihvt6ybLJjrgJYCWzhjHb56F9QNLDe+K5h+NGLpCwXKMOI/gQPY7nkRW5snbugvq2C2vTTEpAdE7kEMKsg4fId+ujEwB4w+N9cXAlOr9mLAEDxZ2/pxI+BeF3BZiqnp72AY8VHE/gVMcmUcaIfgFXw7TWKrXUQ9/tJXp5N3Ph8QBH0j9L9+/GFQrquXg8M2MYmkidp+fL8tuiIMQSryCUuX4xMCTmooyTB0o2XJE6KpoJwEBQRv+FhJJGDqdaAoawNIoBEIVn5gwx7UGkJ53KgYQzL4IPSTW9OxiembNc8E7aYfyMhSG1+wFl45xpJThRuFRcA=="); 12 | }); 13 | 14 | it("should verify a signature", async () => { 15 | const keyPair = givenKnownKeyPair() 16 | const factEnvelopes = givenSignedFact(keyPair); 17 | 18 | const verified = verifyEnvelopes(factEnvelopes); 19 | 20 | expect(verified).toBe(true); 21 | }); 22 | 23 | it("should sign with a new key pair", async () => { 24 | const keyPair = generateKeyPair(); 25 | const factEnvelopes = givenSignedFact(keyPair); 26 | 27 | const verified = verifyEnvelopes(factEnvelopes); 28 | 29 | expect(verified).toBe(true); 30 | }); 31 | 32 | it("should not verify a signature if the content has been modified", async () => { 33 | const keyPair = generateKeyPair(); 34 | const factEnvelopes = givenSignedFact(keyPair); 35 | 36 | factEnvelopes[0].fact.fields["identifier"] = "staging"; 37 | const verified = verifyEnvelopes(factEnvelopes); 38 | 39 | expect(verified).toBe(false); 40 | }); 41 | }); 42 | 43 | function givenKnownKeyPair(): KeyPair { 44 | return { 45 | publicPem: "-----BEGIN PUBLIC KEY-----\r\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4wP7IyUZICcZ5JC+UUxB\r\nZZOo8mE7R2zj8Zba5krMAqDMFbQ8bWS+nTbFVHgun1Z+5HUCZ9HHv7d7KPLu+zuI\r\nfBi5CuiJy4LJkIUuL2eRBvy8VJPeyDfvRuZ6Dc5r+vp25omx5bWbPtjPczatUphl\r\nQ83GXvITQ4ZQN/C8w7/cewq4/qVrT+TfwvIiynBSFbU5NXE6dmbE1PbJFjtBlJJ+\r\nM2uiTKMKgrC7hpluEdO3oz1itV3CTHo4DGChARLia/ZRGTUlheunbSOnFupl/Rts\r\ny/wfvEh+CBt2MduUFBo2pLCe6NMTlhEpC+/jOhQnIaU8NWy5aUh6D6pIDGwond9Y\r\nCwIDAQAB\r\n-----END PUBLIC KEY-----\r\n", 46 | privatePem: "-----BEGIN RSA PRIVATE KEY-----\r\nMIIEpQIBAAKCAQEA4wP7IyUZICcZ5JC+UUxBZZOo8mE7R2zj8Zba5krMAqDMFbQ8\r\nbWS+nTbFVHgun1Z+5HUCZ9HHv7d7KPLu+zuIfBi5CuiJy4LJkIUuL2eRBvy8VJPe\r\nyDfvRuZ6Dc5r+vp25omx5bWbPtjPczatUphlQ83GXvITQ4ZQN/C8w7/cewq4/qVr\r\nT+TfwvIiynBSFbU5NXE6dmbE1PbJFjtBlJJ+M2uiTKMKgrC7hpluEdO3oz1itV3C\r\nTHo4DGChARLia/ZRGTUlheunbSOnFupl/Rtsy/wfvEh+CBt2MduUFBo2pLCe6NMT\r\nlhEpC+/jOhQnIaU8NWy5aUh6D6pIDGwond9YCwIDAQABAoIBAB8Ei7tdFcZFYW3P\r\n8xkTlLnmx4Y6j8luEOURCh6+KIrRYqEyi7Ecu0iq06J7e09NF7BqZmY+DQ9eaAcL\r\nzmhoVXkzPZFGfZFfcN/8undCrNeqD6d0vtNXhSuIUTPyuOFFeJp+RN7QhgI7yHiD\r\nB4KKDQgLJSgS5lvrDanfDEOowtzSs9Q0TS9dJzzJy4D/UddrSauducdEn2sAbx60\r\nUJ6JcAJcQjIi7T/AJLrkFOMrc23DUDeucR/qgRgx7BadU7TuPpbE1Phtrlryg6pi\r\n95V8fR1qRXVgVDKd69ky8HGWVcGVxyuvhzp2+JLOgjokf8vPtRdxfxlQQVfHzZDt\r\nqpDQDPUCgYEA+LbfXPvKVDj++exLZ/sn+0dPWGIE1IXPAyZNVfVxmOP/Wg3yOuwI\r\nNIYiZruP66ZxFGY7DPO198DRxn4FmSxMeDhdyYHkmJOOuT1f5Q5dJKPBW+oS0Elg\r\nd1EdrYouNNk5L1hN4JJ1CKe8lqnvgo0M7Rt+iAHtr0PKtVuNfTWBnO0CgYEA6apj\r\nOh9C94y42BTMDNxwQAjEFfCxKvEPGPD9MSPSltbbCMfFdJWwecOeY0H0/Q3qe2w0\r\nvUFy1/8yRjvwcei9dWq2vzKfjsvVjR8uE4nHfBAs2IBD9O7im0yOkUHQAjKDuMSE\r\nmfLsKQgZQOiiQ72euuCfDrdocF5Q8m3Dy8yyIdcCgYEA7jS32SaOsfukuVlHH1+8\r\n+z1hERVP6vv9ONcGWr2o/vXfKzEQPr6xXRza9enN/bR7uT3wcIc6UP+r6p/oXLvA\r\nwaO6r7RobHlmyKOvpIINU3pDRvT47+RXL+/QrNUbTCKAUogQjnW3AYMlbGd1rWPK\r\nbY1XsoumSaZ0Dx6QdMs6SEECgYEAl91XtFTRD1b0Y+vQWqOCiPuphaDzZLdbWph1\r\n1lQz8DkgDmrYGFeZZOoQrO4XLci3lxPSixZYb626nQ8jzMS5LfD3aPib3xD3cErN\r\nhYFMl4NjwipLAIup18k/94RQjr0KAzImBHBvsJNE5nzLyT8aRNbsSYJGbJHABm/0\r\niyY0t+0CgYEArmBe1THrvDmhjXsOeLBNX5n+e4lDffQ8CAWqwCthQT0FhQAu1cp9\r\nApgmMSSGjvWEvZDqeLdIXp8mVMoDDQWg38oiWoUCKl4yK87cR6PJcu38eJPixYW3\r\nzBc0D/fIthqccFxz5cKe2WzFbJKQW2q2VtZ35/WTAgeLueR9ewoFY60=\r\n-----END RSA PRIVATE KEY-----\r\n" 47 | }; 48 | } 49 | 50 | function givenSignedFact(keyPair: KeyPair) { 51 | const factRecords: FactRecord[] = dehydrateFact({ 52 | type: "MyApplication.Environment", 53 | identifier: "production" 54 | }); 55 | return signFacts(keyPair, factRecords); 56 | } -------------------------------------------------------------------------------- /test/distribution/distributionDescriptionSpec.ts: -------------------------------------------------------------------------------- 1 | import { DistributionRules, describeDistributionRules } from "../../src"; 2 | import { distribution } from "../blogModel"; 3 | 4 | describe("Distribution rules from description", () => { 5 | it("should be able to save distribution rules", () => { 6 | const description = describeDistributionRules(distribution); 7 | expect(description).not.toBeNull(); 8 | }); 9 | 10 | it("should be able to load distribution rules", () => { 11 | const description = describeDistributionRules(distribution); 12 | const loaded = DistributionRules.loadFromDescription(description); 13 | const roundTrip = describeDistributionRules(_ => loaded); 14 | expect(roundTrip).toEqual(description); 15 | }); 16 | }); -------------------------------------------------------------------------------- /test/fact/knownHashSpec.ts: -------------------------------------------------------------------------------- 1 | import { Dehydration, HashMap } from "../../src/fact/hydrate"; 2 | 3 | describe("Known hash", () => { 4 | it("String field", () => { 5 | const hash = hashOf({ 6 | type: "Skylane.Airline", 7 | identifier: "value" 8 | }); 9 | expect(hash).toEqual("uXcsBceLFAkZdRD71Ztvc+QwASayHA0Zg7wC2mc3zl28N1hKTbGBfBA2OnEHAWo+0yYVeUnABMn9MCRH8cRHWg=="); 10 | }); 11 | 12 | it("Predecessor", () => { 13 | const hash = hashOf({ 14 | type: "Skylane.Airline.Day", 15 | airline: { 16 | type: "Skylane.Airline", 17 | identifier: "value" 18 | }, 19 | date: "2021-07-04T00:00:00.000Z" 20 | }); 21 | expect(hash).toEqual("cQaErYsizavFrTIGjD1C0g3shMG/uq+hVUXzs/kCzcvev9gPrVDom3pbrszUsmeRelNv8bRdIvOb6AbaYrVC7w=="); 22 | }); 23 | 24 | it("Integer field", () => { 25 | const hash = hashOf({ 26 | type: "Skylane.Flight", 27 | airlineDay: { 28 | type: "Skylane.Airline.Day", 29 | airline: { 30 | type: "Skylane.Airline", 31 | identifier: "value" 32 | }, 33 | date: "2021-07-04T00:00:00.000Z" 34 | }, 35 | flightNumber: 4247 36 | }); 37 | expect(hash).toEqual("PyXT7pCvBq7Vw63kEZGgbIVJxqA7jhoO+QbmeM3YC9laayG0gjln58khyOd4D/cmxXzocPaIuwXGWusVJxqEjQ=="); 38 | }); 39 | 40 | it("Empty predecessor list", () => { 41 | const hash = hashOf({ 42 | type: "Skylane.Passenger.Name", 43 | passenger: { 44 | type: "Skylane.Passenger", 45 | airline: { 46 | type: "Skylane.Airline", 47 | identifier: "IA" 48 | }, 49 | user: { 50 | type: "Jinaga.User", 51 | publicKey: "---PUBLIC KEY---" 52 | } 53 | }, 54 | value: "Charles Rane", 55 | prior: [] 56 | }); 57 | expect(hash).toEqual("GsMMA/8Nv401P6RXvugFYzYCemGehnXSFZuaKNcoVFoXKmxzMJkpqI9rs/SRlKHZlnRP1QsBxFWKFt6143OpYA=="); 58 | }); 59 | 60 | it("Single predecessor list", () => { 61 | const passenger = { 62 | type: "Skylane.Passenger", 63 | airline: { 64 | type: "Skylane.Airline", 65 | identifier: "IA" 66 | }, 67 | user: { 68 | type: "Jinaga.User", 69 | publicKey: "---PUBLIC KEY---" 70 | } 71 | }; 72 | const first = { 73 | type: "Skylane.Passenger.Name", 74 | passenger, 75 | value: "Charles Rane", 76 | prior: [] 77 | }; 78 | const hash = hashOf({ 79 | type: "Skylane.Passenger.Name", 80 | passenger, 81 | value: "Charley Rane", 82 | prior: [ first ] 83 | }); 84 | expect(hash).toEqual("BYLtR7XddbhchlyBdGdrnRHGkPsDecynDjLHFvqtKH7zug46ymxNDpPC4QNb+T14Bhzs8M1F3VfCnlgzinNHPg=="); 85 | }); 86 | 87 | it("Multiple predecessor list", () => { 88 | const passenger = { 89 | type: "Skylane.Passenger", 90 | airline: { 91 | type: "Skylane.Airline", 92 | identifier: "IA" 93 | }, 94 | user: { 95 | type: "Jinaga.User", 96 | publicKey: "---PUBLIC KEY---" 97 | } 98 | }; 99 | const first = { 100 | type: "Skylane.Passenger.Name", 101 | passenger, 102 | value: "Charles Rane", 103 | prior: [] 104 | }; 105 | const middle = [1,2,3,4,5,6,7,8,9,10] 106 | .map(id => ({ 107 | type: "Skylane.Passenger.Name", 108 | passenger, 109 | value: `Charley Rane ${id}`, 110 | prior: [ first ] 111 | })); 112 | const hash = hashOf({ 113 | type: "Skylane.Passenger.Name", 114 | passenger, 115 | value: "Charley Rane", 116 | prior: middle 117 | }); 118 | expect(hash).toEqual("4Os8M2Tt7+lCEe6WQ6iAJwQ/wbmK6CTLqwF8DCS6Bc4tgXE268BanI0sHDeSYhbKYbSDAyRzarMkrciveBoDTQ=="); 119 | }); 120 | }); 121 | 122 | function hashOf(fact: HashMap) { 123 | const dehydration = new Dehydration(); 124 | const record = dehydration.dehydrate(fact); 125 | return record.hash; 126 | } -------------------------------------------------------------------------------- /test/http/deserializerSpec.ts: -------------------------------------------------------------------------------- 1 | import { FactEnvelope, GraphDeserializer } from "../../src"; 2 | 3 | describe("GraphDeserializer", () => { 4 | it("should read an empty graph", async () => { 5 | const input = ""; 6 | const readLine = createReadLine(input); 7 | const deserializer = new GraphDeserializer(readLine); 8 | const envelopes = await readAll(deserializer); 9 | expect(envelopes).toEqual([]); 10 | }); 11 | 12 | it("should read a graph with one fact without signatures", async () => { 13 | const input = "\"MyApp.Root\"\n{}\n{\"identifier\":\"root\"}\n\n"; 14 | const readLine = createReadLine(input); 15 | const deserializer = new GraphDeserializer(readLine); 16 | const envelopes = await readAll(deserializer); 17 | expect(envelopes).toEqual([{ 18 | fact: { 19 | type: "MyApp.Root", 20 | hash: "2nxJF8sJEFIuY70VLJvhOR+9V28FoH98lLaL3cCXGqpDpX/lYz0mjohvHxvjHBgDAleJ5L2Dq4Qa2ybGE5NNww==", 21 | fields: { 22 | identifier: "root" 23 | }, 24 | predecessors: {} 25 | }, 26 | signatures: [] 27 | }]); 28 | }); 29 | 30 | it("should read a graph with two facts without signatures", async () => { 31 | const input = "\"MyApp.Root\"\n{}\n{}\n\n\"MyApp.Child\"\n{\"root\":0}\n{}\n\n"; 32 | const readLine = createReadLine(input); 33 | const deserializer = new GraphDeserializer(readLine); 34 | const envelopes = await readAll(deserializer); 35 | expect(envelopes).toEqual([{ 36 | fact: { 37 | type: "MyApp.Root", 38 | hash: "fSS1hK7OGAeSX4ocN3acuFF87jvzCdPN3vLFUtcej0lOAsVV859UIYZLRcHUoMbyd/J31TdVn5QuE7094oqUPg==", 39 | fields: {}, 40 | predecessors: {} 41 | }, 42 | signatures: [] 43 | }, { 44 | fact: { 45 | type: "MyApp.Child", 46 | hash: "9m4j5fur76Ofg2PnOxtlufPDKt7DKqqJewylpt0T6HluB5OhyqBaKTtO9SjtkKmI6CxLWmgGdZzdV1Al0YVtRg==", 47 | fields: {}, 48 | predecessors: { 49 | root: { 50 | type: "MyApp.Root", 51 | hash: "fSS1hK7OGAeSX4ocN3acuFF87jvzCdPN3vLFUtcej0lOAsVV859UIYZLRcHUoMbyd/J31TdVn5QuE7094oqUPg==" 52 | } 53 | } 54 | }, 55 | signatures: [] 56 | }]); 57 | }); 58 | 59 | it("should read a graph with two facts with signatures", async () => { 60 | const input = 61 | "PK0\n\"public\"\n\n" + 62 | "\"MyApp.Root\"\n{}\n{}\nPK0\n\"signature\"\n\n" + 63 | "PK1\n\"public2\"\n\n" + 64 | "\"MyApp.Child\"\n{\"root\":0}\n{}\nPK0\n\"signature1\"\nPK1\n\"signature2\"\n\n"; 65 | const readLine = createReadLine(input); 66 | const deserializer = new GraphDeserializer(readLine); 67 | const envelopes = await readAll(deserializer); 68 | expect(envelopes).toEqual([{ 69 | fact: { 70 | type: "MyApp.Root", 71 | hash: "fSS1hK7OGAeSX4ocN3acuFF87jvzCdPN3vLFUtcej0lOAsVV859UIYZLRcHUoMbyd/J31TdVn5QuE7094oqUPg==", 72 | fields: {}, 73 | predecessors: {} 74 | }, 75 | signatures: [{ 76 | publicKey: "public", 77 | signature: "signature" 78 | }] 79 | }, { 80 | fact: { 81 | type: "MyApp.Child", 82 | hash: "9m4j5fur76Ofg2PnOxtlufPDKt7DKqqJewylpt0T6HluB5OhyqBaKTtO9SjtkKmI6CxLWmgGdZzdV1Al0YVtRg==", 83 | fields: {}, 84 | predecessors: { 85 | root: { 86 | type: "MyApp.Root", 87 | hash: "fSS1hK7OGAeSX4ocN3acuFF87jvzCdPN3vLFUtcej0lOAsVV859UIYZLRcHUoMbyd/J31TdVn5QuE7094oqUPg==" 88 | } 89 | } 90 | }, 91 | signatures: [ 92 | { 93 | "publicKey": "public", 94 | "signature": "signature1", 95 | }, { 96 | "publicKey": "public2", 97 | "signature": "signature2", 98 | }, 99 | ], 100 | }]); 101 | }); 102 | }); 103 | 104 | function createReadLine(input: string) { 105 | const lines = input.split("\n"); 106 | if (lines[lines.length - 1] === "") { 107 | lines.pop(); 108 | } 109 | return async () => { 110 | const line = lines.shift(); 111 | return line !== undefined ? line : null; 112 | }; 113 | } 114 | 115 | async function readAll(deserializer: GraphDeserializer) { 116 | const envelopes: FactEnvelope[] = []; 117 | await deserializer.read(async (batch) => { 118 | envelopes.push(...batch); 119 | }); 120 | return envelopes; 121 | } -------------------------------------------------------------------------------- /test/http/serializerSpec.ts: -------------------------------------------------------------------------------- 1 | import { GraphSerializer } from "../../src"; 2 | 3 | describe("GraphSerializer", () => { 4 | it("should write an empty graph", () => { 5 | let output = ""; 6 | const serializer = new GraphSerializer(chunk => { 7 | output += chunk; 8 | }); 9 | 10 | serializer.serialize([]); 11 | 12 | expect(output).toBe(""); 13 | }); 14 | 15 | it("should write a graph with one fact without signatures", () => { 16 | let output = ""; 17 | const serializer = new GraphSerializer(chunk => { 18 | output += chunk; 19 | }); 20 | 21 | serializer.serialize([{ 22 | fact: { 23 | type: "MyApp.Root", 24 | hash: "roothash", 25 | fields: { 26 | identifier: "root" 27 | }, 28 | predecessors: {} 29 | }, 30 | signatures: [] 31 | }]); 32 | 33 | expect(output).toBe("\"MyApp.Root\"\n{}\n{\"identifier\":\"root\"}\n\n"); 34 | }); 35 | 36 | it("should write a graph with two facts without signatures", () => { 37 | let output = ""; 38 | const serializer = new GraphSerializer(chunk => { 39 | output += chunk; 40 | }); 41 | 42 | serializer.serialize([{ 43 | fact: { 44 | type: "MyApp.Root", 45 | hash: "roothash", 46 | fields: {}, 47 | predecessors: {} 48 | }, 49 | signatures: [] 50 | }, { 51 | fact: { 52 | type: "MyApp.Child", 53 | hash: "childhash", 54 | fields: {}, 55 | predecessors: { 56 | root: { 57 | type: "MyApp.Root", 58 | hash: "roothash" 59 | } 60 | } 61 | }, 62 | signatures: [] 63 | }]); 64 | 65 | expect(output).toBe("\"MyApp.Root\"\n{}\n{}\n\n\"MyApp.Child\"\n{\"root\":0}\n{}\n\n"); 66 | }); 67 | 68 | it("should not repeat a fact", () => { 69 | let output = ""; 70 | const serializer = new GraphSerializer(chunk => { 71 | output += chunk; 72 | }); 73 | 74 | serializer.serialize([{ 75 | fact: { 76 | type: "MyApp.Root", 77 | hash: "roothash", 78 | fields: {}, 79 | predecessors: {} 80 | }, 81 | signatures: [] 82 | }, { 83 | fact: { 84 | type: "MyApp.Root", 85 | hash: "roothash", 86 | fields: {}, 87 | predecessors: {} 88 | }, 89 | signatures: [] 90 | }]); 91 | 92 | expect(output).toBe("\"MyApp.Root\"\n{}\n{}\n\n"); 93 | }); 94 | 95 | it("should write a graph with two facts with signatures", () => { 96 | let output = ""; 97 | const serializer = new GraphSerializer(chunk => { 98 | output += chunk; 99 | }); 100 | 101 | serializer.serialize([{ 102 | fact: { 103 | type: "MyApp.Root", 104 | hash: "roothash", 105 | fields: {}, 106 | predecessors: {} 107 | }, 108 | signatures: [{ 109 | publicKey: "public", 110 | signature: "signature" 111 | }] 112 | }, { 113 | fact: { 114 | type: "MyApp.Child", 115 | hash: "childhash", 116 | fields: {}, 117 | predecessors: { 118 | root: { 119 | type: "MyApp.Root", 120 | hash: "roothash" 121 | } 122 | } 123 | }, 124 | signatures: [{ 125 | publicKey: "public", 126 | signature: "signature1" 127 | }, { 128 | publicKey: "public2", 129 | signature: "signature2" 130 | }] 131 | }]); 132 | 133 | expect(output).toBe( 134 | "PK0\n\"public\"\n\n" + 135 | "\"MyApp.Root\"\n{}\n{}\nPK0\n\"signature\"\n\n" + 136 | "PK1\n\"public2\"\n\n" + 137 | "\"MyApp.Child\"\n{\"root\":0}\n{}\nPK0\n\"signature1\"\nPK1\n\"signature2\"\n\n" 138 | ); 139 | }); 140 | }); -------------------------------------------------------------------------------- /test/managers/QueueProcessorSpec.ts: -------------------------------------------------------------------------------- 1 | import { QueueProcessor, Saver } from "../../src/managers/QueueProcessor"; 2 | import { delay } from "../../src/util/promise"; 3 | 4 | describe("QueueProcessor", () => { 5 | // Mock implementation of the Saver interface 6 | class MockSaver implements Saver { 7 | public saveCount = 0; 8 | public lastSaveTime = 0; 9 | public saveTimes: number[] = []; 10 | public savePromise: Promise | null = null; 11 | public delayMs = 0; 12 | 13 | async save(): Promise { 14 | this.saveCount++; 15 | this.lastSaveTime = Date.now(); 16 | this.saveTimes.push(this.lastSaveTime); 17 | 18 | if (this.delayMs > 0) { 19 | await delay(this.delayMs); 20 | } 21 | 22 | if (this.savePromise) { 23 | return this.savePromise; 24 | } 25 | 26 | return Promise.resolve(); 27 | } 28 | 29 | reset(): void { 30 | this.saveCount = 0; 31 | this.lastSaveTime = 0; 32 | this.saveTimes = []; 33 | this.savePromise = null; 34 | } 35 | } 36 | 37 | let saver: MockSaver; 38 | let queueProcessor: QueueProcessor; 39 | 40 | beforeEach(() => { 41 | saver = new MockSaver(); 42 | }); 43 | 44 | afterEach(async () => { 45 | if (queueProcessor) { 46 | await queueProcessor.dispose(); 47 | } 48 | }); 49 | 50 | it("should process the queue immediately when delay is 0", async () => { 51 | // Arrange 52 | queueProcessor = new QueueProcessor(saver, 0); 53 | 54 | // Act 55 | queueProcessor.scheduleProcessing(); 56 | 57 | // Assert 58 | expect(saver.saveCount).toBe(1); 59 | }, 1000); 60 | 61 | it("should debounce multiple calls when delay is greater than 0", async () => { 62 | // Arrange 63 | const delayMs = 100; 64 | queueProcessor = new QueueProcessor(saver, delayMs); 65 | 66 | // Act 67 | queueProcessor.scheduleProcessing(); 68 | queueProcessor.scheduleProcessing(); 69 | queueProcessor.scheduleProcessing(); 70 | 71 | // Wait for less than the delay 72 | await delay(50); 73 | 74 | // Assert - should not have processed yet 75 | expect(saver.saveCount).toBe(0); 76 | 77 | // Wait for the delay to complete 78 | await delay(delayMs + 50); 79 | 80 | // Assert - should have processed once 81 | expect(saver.saveCount).toBe(1); 82 | }, 1000); 83 | 84 | it("should process immediately when processQueueNow is called", async () => { 85 | // Arrange 86 | const delayMs = 1000; // Long delay 87 | queueProcessor = new QueueProcessor(saver, delayMs); 88 | 89 | // Act 90 | queueProcessor.scheduleProcessing(); 91 | 92 | // Wait a bit, but less than the delay 93 | await delay(50); 94 | 95 | // Assert - should not have processed yet 96 | expect(saver.saveCount).toBe(0); 97 | 98 | // Act - process immediately 99 | await queueProcessor.processQueueNow(); 100 | 101 | // Assert - should have processed once 102 | expect(saver.saveCount).toBe(1); 103 | }, 10000); 104 | 105 | it("should batch multiple operations into a single save", async () => { 106 | // Arrange 107 | const delayMs = 100; 108 | queueProcessor = new QueueProcessor(saver, delayMs); 109 | 110 | // Act - schedule multiple times in quick succession 111 | queueProcessor.scheduleProcessing(); 112 | await delay(10); 113 | queueProcessor.scheduleProcessing(); 114 | await delay(10); 115 | queueProcessor.scheduleProcessing(); 116 | 117 | // Wait for the delay to complete 118 | await delay(delayMs + 50); 119 | 120 | // Assert - should have processed only once 121 | expect(saver.saveCount).toBe(1); 122 | }, 1000); 123 | 124 | it("should process multiple times when calls are spaced out", async () => { 125 | // Arrange 126 | const delayMs = 100; 127 | queueProcessor = new QueueProcessor(saver, delayMs); 128 | 129 | // Act - schedule with delays longer than the debounce period 130 | queueProcessor.scheduleProcessing(); 131 | 132 | // Wait for the first processing to complete 133 | await delay(delayMs + 50); 134 | 135 | // Schedule again 136 | queueProcessor.scheduleProcessing(); 137 | 138 | // Wait for the second processing to complete 139 | await delay(delayMs + 50); 140 | 141 | // Assert - should have processed twice 142 | expect(saver.saveCount).toBe(2); 143 | }, 1000); 144 | 145 | it("should handle errors during save", async () => { 146 | // Arrange 147 | queueProcessor = new QueueProcessor(saver, 0); 148 | saver.savePromise = Promise.reject(new Error("Test error")); 149 | 150 | // Spy on Trace.error 151 | const originalTraceError = global.console.error; 152 | const mockTraceError = jest.fn(); 153 | global.console.error = mockTraceError; 154 | 155 | try { 156 | // Act 157 | queueProcessor.scheduleProcessing(); 158 | 159 | // Wait a bit to ensure processing completes 160 | await delay(50); 161 | 162 | // Assert 163 | expect(mockTraceError).toHaveBeenCalled(); 164 | } finally { 165 | // Restore original Trace.error 166 | global.console.error = originalTraceError; 167 | } 168 | }, 1000); 169 | 170 | it("should stop processing when disposed", async () => { 171 | // Arrange 172 | queueProcessor = new QueueProcessor(saver, 100); 173 | 174 | // Act 175 | await queueProcessor.dispose(); 176 | 177 | // Try to run a process immediately 178 | await queueProcessor.processQueueNow(); 179 | 180 | // Assert - should not have processed 181 | expect(saver.saveCount).toBe(0); 182 | }, 1000); 183 | }); -------------------------------------------------------------------------------- /test/orderModel.ts: -------------------------------------------------------------------------------- 1 | import { buildModel } from "../src"; 2 | 3 | export function createModel() { 4 | return buildModel(b => b 5 | .type(Store) 6 | .type(Order, x => x 7 | .predecessor("store", Store) 8 | ) 9 | .type(Item, x => x 10 | .predecessor("order", Order) 11 | .predecessor("product", Product) 12 | ) 13 | .type(OrderCancelled, x => x 14 | .predecessor("order", Order) 15 | ) 16 | .type(OrderCancelledReason, x => x 17 | .predecessor("orderCancelled", OrderCancelled) 18 | ) 19 | .type(OrderShipped, x => x 20 | .predecessor("order", Order) 21 | ) 22 | ); 23 | } 24 | export class Store { 25 | static Type = "Store" as const; 26 | type = Store.Type; 27 | 28 | constructor( 29 | public identifier: string 30 | ) {} 31 | } 32 | export class Order { 33 | static Type = "Order" as const; 34 | type = Order.Type; 35 | 36 | constructor( 37 | public store: Store, 38 | public createdAt: Date | string 39 | ) {} 40 | } 41 | export class Product { 42 | static Type = "Product" as const; 43 | type = Product.Type; 44 | 45 | constructor( 46 | public store: Store, 47 | public identifier: string 48 | ) {} 49 | } 50 | export class Item { 51 | static Type = "Order.Item" as const; 52 | type = Item.Type; 53 | 54 | constructor( 55 | public order: Order, 56 | public product: Product, 57 | public quantity: number 58 | ) {} 59 | } 60 | export class OrderCancelled { 61 | static Type = "Order.Cancelled" as const; 62 | type = OrderCancelled.Type; 63 | 64 | constructor( 65 | public order: Order, 66 | public cancelledAt: Date | string 67 | ) {} 68 | } 69 | export class OrderCancelledReason { 70 | static Type = "Order.Cancelled.Reason" as const; 71 | type = OrderCancelledReason.Type; 72 | 73 | constructor( 74 | public orderCancelled: OrderCancelled, 75 | public reason: string 76 | ) {} 77 | } 78 | export class OrderShipped { 79 | static Type = "Order.Shipped" as const; 80 | type = OrderShipped.Type; 81 | 82 | constructor( 83 | public order: Order, 84 | public shippedAt: Date | string 85 | ) {} 86 | } 87 | -------------------------------------------------------------------------------- /test/purge/realTimePurgeSpec.ts: -------------------------------------------------------------------------------- 1 | import { JinagaClient, Model, PurgeConditions } from "../../src"; 2 | import { createModel, Item, Order, OrderCancelled, OrderCancelledReason, Product, Store } from "../orderModel"; 3 | 4 | describe("Real-time purge", () => { 5 | it("Should find descendants if purge condition is not met", async () => { 6 | const model = createModel(); 7 | const j = givenClientWithPurgeCondition(model); 8 | 9 | const store = await j.fact(new Store("storeId")); 10 | const order = await j.fact(new Order(store, new Date())); 11 | const item1 = await j.fact(new Item(order, new Product(store, "product1"), 1)); 12 | const item2 = await j.fact(new Item(order, new Product(store, "product2"), 1)); 13 | 14 | const itemsInOrder = model.given(Order).match(order => 15 | order.successors(Item, item => item.order) 16 | ); 17 | 18 | const items = await j.query(itemsInOrder, order); 19 | expect(items).toEqual([item1, item2]); 20 | }); 21 | 22 | it("Should purge successors when condition is met", async () => { 23 | const model = createModel(); 24 | const j = givenClientWithPurgeCondition(model); 25 | 26 | const store = await j.fact(new Store("storeId")); 27 | const order = await j.fact(new Order(store, new Date())); 28 | const item1 = await j.fact(new Item(order, new Product(store, "product1"), 1)); 29 | const item2 = await j.fact(new Item(order, new Product(store, "product2"), 1)); 30 | const orderCancelled = await j.fact(new OrderCancelled(order, new Date())); 31 | 32 | const itemsInOrder = model.given(Order).match(order => 33 | order.successors(Item, item => item.order) 34 | ); 35 | 36 | const items = await j.query(itemsInOrder, order); 37 | expect(items).toEqual([]); 38 | }); 39 | 40 | it("Should not purge the trigger fact", async () => { 41 | const model = createModel(); 42 | const j = givenClientWithPurgeCondition(model); 43 | 44 | const store = await j.fact(new Store("storeId")); 45 | const order = await j.fact(new Order(store, new Date())); 46 | const item1 = await j.fact(new Item(order, new Product(store, "product1"), 1)); 47 | const item2 = await j.fact(new Item(order, new Product(store, "product2"), 1)); 48 | const orderCancelled = await j.fact(new OrderCancelled(order, new Date())); 49 | 50 | const cancelOfOrder = model.given(Order).match(order => 51 | order.successors(OrderCancelled, cancelled => cancelled.order) 52 | ); 53 | 54 | const cancels = await j.query(cancelOfOrder, order); 55 | expect(cancels).toEqual([orderCancelled]); 56 | }); 57 | 58 | it("Should not purge ancestors of the trigger fact", async () => { 59 | const model = createModel(); 60 | const j = createJinagaClient(p => p 61 | .whenExists(model.given(Order).match(order => 62 | order.successors(OrderCancelledReason, reason => reason.orderCancelled.order) 63 | )) 64 | ); 65 | 66 | const store = await j.fact(new Store("storeId")); 67 | const order = await j.fact(new Order(store, new Date())); 68 | const item1 = await j.fact(new Item(order, new Product(store, "product1"), 1)); 69 | const item2 = await j.fact(new Item(order, new Product(store, "product2"), 1)); 70 | const orderCancelled = await j.fact(new OrderCancelled(order, new Date())); 71 | const reason = await j.fact(new OrderCancelledReason(orderCancelled, "reason")); 72 | 73 | const cancelOfOrder = model.given(Order).match(order => 74 | order.successors(OrderCancelled, cancelled => cancelled.order) 75 | ); 76 | 77 | const cancels = await j.query(cancelOfOrder, order); 78 | expect(cancels).toEqual([orderCancelled]); 79 | }); 80 | }); 81 | 82 | function givenClientWithPurgeCondition(model: Model) { 83 | return createJinagaClient(p => p 84 | .whenExists(model.given(Order).match((order, facts) => facts.ofType(OrderCancelled) 85 | .join(orderCancelled => orderCancelled.order, order) 86 | )) 87 | ); 88 | } 89 | 90 | function createJinagaClient(purgeConditions: (p: PurgeConditions) => PurgeConditions) { 91 | return JinagaClient.create({ 92 | purgeConditions 93 | }); 94 | } 95 | -------------------------------------------------------------------------------- /test/single-use/singleUseForkSpec.ts: -------------------------------------------------------------------------------- 1 | import { AuthenticationTest, FactEnvelope, FactManager, FactReference, Fork, Jinaga, MemoryStore, ObservableSource, User } from '../../src'; 2 | 3 | // Define a fake Fork implementation that captures saved facts 4 | class FakeFork implements Fork { 5 | public savedEnvelopes: FactEnvelope[] = []; 6 | 7 | async save(envelopes: FactEnvelope[]): Promise { 8 | this.savedEnvelopes = this.savedEnvelopes.concat(envelopes); 9 | return Promise.resolve(); 10 | } 11 | 12 | async load(references: FactReference[]): Promise { 13 | return Promise.resolve([]); 14 | } 15 | 16 | async processQueueNow(): Promise { 17 | return Promise.resolve(); 18 | } 19 | 20 | async close(): Promise { 21 | return Promise.resolve(); 22 | } 23 | } 24 | 25 | // Define an Environment fact type that will be owned by the single-use principal 26 | class Environment { 27 | static Type = "Enterprise.Environment" as const; 28 | type = Environment.Type; 29 | 30 | constructor( 31 | public creator: User, 32 | public identifier: string 33 | ) { } 34 | } 35 | 36 | describe('SingleUse with FakeFork', () => { 37 | it('should create single-use principal', async () => { 38 | // Arrange 39 | const store = new MemoryStore(); 40 | const fakeFork = new FakeFork(); 41 | const observableSource = new ObservableSource(store); 42 | const authentication = new AuthenticationTest(store, null, null, null); 43 | const factManager = new FactManager(fakeFork, observableSource, store, { 44 | feeds: async () => [], 45 | fetchFeed: async () => ({ references: [], bookmark: '' }), 46 | streamFeed: () => () => {}, 47 | load: async () => [] 48 | }, []); 49 | const j = new Jinaga(authentication, factManager, null); 50 | 51 | // Act 52 | await j.singleUse(async (principal: User) => { 53 | // Assert 54 | expect(principal).toBeDefined(); 55 | expect(principal.type).toBe('Jinaga.User'); 56 | expect(principal.publicKey).toContain('-----BEGIN PUBLIC KEY-----'); 57 | return 0; 58 | }); 59 | }); 60 | 61 | it('should sign facts created by single-use principal', async () => { 62 | // Arrange 63 | const store = new MemoryStore(); 64 | const fakeFork = new FakeFork(); 65 | const observableSource = new ObservableSource(store); 66 | const authentication = new AuthenticationTest(store, null, null, null); 67 | const factManager = new FactManager(fakeFork, observableSource, store, { 68 | feeds: async () => [], 69 | fetchFeed: async () => ({ references: [], bookmark: '' }), 70 | streamFeed: () => () => {}, 71 | load: async () => [] 72 | }, []); 73 | const j = new Jinaga(authentication, factManager, null); 74 | 75 | // Act 76 | const publicKey = await j.singleUse(async (principal: User) => { 77 | await j.fact(new Environment(principal, "Production")); 78 | return principal.publicKey; 79 | }); 80 | 81 | // Assert 82 | // Find the Environment fact in the saved envelopes 83 | const environmentFact = fakeFork.savedEnvelopes 84 | .filter(envelope => envelope.fact.type === "Enterprise.Environment") 85 | .map(envelope => envelope.fact); 86 | expect(environmentFact.length).toBe(1); 87 | 88 | // Find the signature for the Environment fact 89 | const environmentSignature = fakeFork.savedEnvelopes 90 | .filter(envelope => envelope.fact.type === "Enterprise.Environment") 91 | .flatMap(envelope => envelope.signatures); 92 | expect(environmentSignature.length).toBe(1); 93 | 94 | // Verify the signature uses the principal's public key 95 | expect(environmentSignature[0].publicKey).toBe(publicKey); 96 | }); 97 | }); 98 | -------------------------------------------------------------------------------- /test/single-use/singleUseStoreSpec.ts: -------------------------------------------------------------------------------- 1 | import { AuthenticationTest, FactManager, Jinaga, MemoryStore, ObservableSource, PassThroughFork, User } from '../../src'; 2 | 3 | // Define a test fact type that will be owned by the single-use principal 4 | class TestFact { 5 | static Type = "TestFact" as const; 6 | type = TestFact.Type; 7 | 8 | constructor( 9 | public owner: User, 10 | public value: string 11 | ) { } 12 | } 13 | 14 | describe('SingleUse with Store', () => { 15 | it('should create and sign facts with a single-use principal', async () => { 16 | // Arrange 17 | const store = new MemoryStore(); 18 | const fork = new PassThroughFork(store); 19 | const observableSource = new ObservableSource(store); 20 | const authentication = new AuthenticationTest(store, null, null, null); 21 | const factManager = new FactManager(fork, observableSource, store, { 22 | feeds: async () => [], 23 | fetchFeed: async () => ({ references: [], bookmark: '' }), 24 | streamFeed: () => () => {}, 25 | load: async () => [] 26 | }, []); 27 | const j = new Jinaga(authentication, factManager, null); 28 | 29 | // Act 30 | const result = await j.singleUse(async (principal: User) => { 31 | // Create a fact owned by the principal 32 | const fact = await j.fact(new TestFact(principal, 'test value')); 33 | return fact; 34 | }); 35 | 36 | // Assert 37 | expect(result).toBeDefined(); 38 | expect(result.type).toBe('TestFact'); 39 | expect(result.owner.type).toBe('Jinaga.User'); 40 | expect(result.owner.publicKey).toBeDefined(); 41 | expect(result.value).toBe('test value'); 42 | 43 | // Verify that the fact was saved to the store 44 | const facts = await store.load([{ 45 | type: 'TestFact', 46 | hash: Jinaga.hash(result) 47 | }]); 48 | 49 | // Find the TestFact in the returned facts 50 | const testFact = facts.find(f => f.fact.type === 'TestFact'); 51 | expect(testFact).toBeDefined(); 52 | expect(testFact!.fact.fields.value).toBe('test value'); 53 | 54 | // Verify that the fact has a signature 55 | expect(testFact!.signatures.length).toBeGreaterThan(0); 56 | 57 | // Verify that the user fact was saved to the store 58 | const userFacts = await store.load([{ 59 | type: 'Jinaga.User', 60 | hash: Jinaga.hash(result.owner) 61 | }]); 62 | expect(userFacts.length).toBe(1); 63 | expect(userFacts[0].fact.type).toBe('Jinaga.User'); 64 | expect(userFacts[0].fact.fields.publicKey).toBeDefined(); 65 | 66 | // Verify that the user fact has a signature 67 | expect(userFacts[0].signatures.length).toBeGreaterThan(0); 68 | }); 69 | }); 70 | -------------------------------------------------------------------------------- /test/specification/versioningSpec.ts: -------------------------------------------------------------------------------- 1 | import { JinagaTest, buildModel } from "../../src"; 2 | 3 | class Parent { 4 | static Type = "Parent"; 5 | type = Parent.Type; 6 | 7 | constructor( 8 | public readonly id: string 9 | ) {} 10 | } 11 | 12 | interface Child { 13 | parent: Parent; 14 | name: string; 15 | } 16 | 17 | class ChildVersion1 implements Child { 18 | static Type = "Child"; 19 | type = ChildVersion1.Type; 20 | 21 | constructor( 22 | public readonly parent: Parent, 23 | public readonly name: string 24 | ) {} 25 | } 26 | 27 | class ChildVersion2 implements Child { 28 | static Type = "Child"; 29 | type = ChildVersion2.Type; 30 | 31 | constructor( 32 | public readonly parent: Parent, 33 | public readonly name: string, 34 | public readonly age: number | undefined 35 | ) {} 36 | } 37 | 38 | const model = buildModel(b => b 39 | .type(Parent) 40 | .type(ChildVersion2, m => m 41 | .predecessor("parent", Parent) 42 | ) 43 | ); 44 | 45 | const childrenOfParentAsFacts = model.given(Parent).match((parent, facts) => 46 | facts.ofType(ChildVersion2) 47 | .join(child => child.parent, parent) 48 | ); 49 | 50 | const childrenOfParentWithFields = model.given(Parent).match((parent, facts) => 51 | facts.ofType(ChildVersion2) 52 | .join(child => child.parent, parent) 53 | .select(child => ({ 54 | name: child.name, 55 | age: child.age 56 | })) 57 | ); 58 | 59 | describe("versioning", () => { 60 | it("should read version 1 into version 2", async () => { 61 | const j = JinagaTest.create({ 62 | model, 63 | initialState: [ 64 | new Parent("parent"), 65 | new ChildVersion1(new Parent("parent"), "child") 66 | ] 67 | }); 68 | 69 | const parent = await j.fact(new Parent("parent")); 70 | const children = await j.query(childrenOfParentWithFields, parent); 71 | 72 | expect(children).toHaveLength(1); 73 | expect(children[0].name).toEqual("child"); 74 | expect(children[0].age).toBeUndefined(); 75 | }); 76 | 77 | it("should have the same hash", async () => { 78 | const j = JinagaTest.create({ 79 | model, 80 | initialState: [ 81 | new Parent("parent"), 82 | new ChildVersion1(new Parent("parent"), "child") 83 | ] 84 | }); 85 | 86 | const parent = await j.fact(new Parent("parent")); 87 | const children = await j.query(childrenOfParentAsFacts, parent); 88 | 89 | expect(children).toHaveLength(1); 90 | expect(j.hash(children[0])).toEqual( 91 | j.hash(new ChildVersion1(new Parent("parent"), "child")) 92 | ); 93 | }); 94 | }); -------------------------------------------------------------------------------- /test/storage/referenceSpec.ts: -------------------------------------------------------------------------------- 1 | import 'source-map-support/register'; 2 | 3 | import { uniqueFactReferences } from '../../src/storage'; 4 | 5 | describe('Fact reference', () => { 6 | 7 | it('should find unique in empty list', () => { 8 | const unique = uniqueFactReferences([]); 9 | expect(unique.length).toEqual(0); 10 | }); 11 | 12 | it('should find unique in singleton', () => { 13 | const unique = uniqueFactReferences([{type:'', hash:''}]); 14 | expect(unique.length).toEqual(1); 15 | }); 16 | 17 | it('should find unique in double', () => { 18 | const unique = uniqueFactReferences([{type:'', hash:''}, {type:'', hash:''}]); 19 | expect(unique.length).toEqual(1); 20 | }); 21 | 22 | it('should find unique in same type', () => { 23 | const unique = uniqueFactReferences([{type:'a', hash:''}, {type:'a', hash:''}]); 24 | expect(unique.length).toEqual(1); 25 | }); 26 | 27 | it('should find unique in different type', () => { 28 | const unique = uniqueFactReferences([{type:'a', hash:''}, {type:'b', hash:''}]); 29 | expect(unique.length).toEqual(2); 30 | }); 31 | 32 | it('should find unique in same hash', () => { 33 | const unique = uniqueFactReferences([{type:'a', hash:'x'}, {type:'a', hash:'x'}]); 34 | expect(unique.length).toEqual(1); 35 | }); 36 | 37 | it('should find unique in different hash', () => { 38 | const unique = uniqueFactReferences([{type:'a', hash:'x'}, {type:'a', hash:'y'}]); 39 | expect(unique.length).toEqual(2); 40 | }); 41 | 42 | }); -------------------------------------------------------------------------------- /tsconfig.cjs.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "compilerOptions": { 4 | "module": "CommonJS", 5 | "outDir": "dist-cjs", 6 | "declaration": false, 7 | "declarationMap": false 8 | } 9 | } -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es6", 4 | "module": "ESNext", 5 | "moduleResolution": "node", 6 | "sourceMap": true, 7 | "declaration": true, 8 | "declarationMap": true, 9 | "strict": true, 10 | "noImplicitAny": true, 11 | "forceConsistentCasingInFileNames": true, 12 | "skipLibCheck": true, 13 | "esModuleInterop": true, 14 | "outDir": "dist", 15 | "baseUrl": ".", 16 | "paths": { 17 | "*": [ 18 | "./node_modules/*", 19 | "./types/*" 20 | ] 21 | } 22 | }, 23 | "include": [ 24 | "src/**/*.ts" 25 | ] 26 | } -------------------------------------------------------------------------------- /tsconfig.test.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es6", 4 | "module": "commonjs", 5 | "sourceMap": true, 6 | "declaration": true, 7 | "strict": true, 8 | "noImplicitAny": true, 9 | "forceConsistentCasingInFileNames": true, 10 | "skipLibCheck": true, 11 | "esModuleInterop": true, 12 | "outDir": "dist", 13 | "baseUrl": ".", 14 | "paths": { 15 | "*": [ 16 | "./node_modules/*", 17 | "./types/*" 18 | ] 19 | } 20 | }, 21 | "include": [ 22 | "test/**/*.ts" 23 | ] 24 | } -------------------------------------------------------------------------------- /types/keypair.d.ts: -------------------------------------------------------------------------------- 1 | declare module 'keypair' { 2 | function Keypair(options: { bits: number }): { private: string, public: string }; 3 | export = Keypair; 4 | } 5 | --------------------------------------------------------------------------------