├── .envrc ├── .github └── workflows │ ├── combine-prs.yml │ └── main.yml ├── .gitignore ├── .gitlab-ci.yml ├── .vscode └── settings.json ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── babel.config.js ├── flake.lock ├── flake.nix ├── package.json ├── src ├── datalog.test.ts ├── datalog.ts ├── index.ts ├── view-ext.test.ts └── view-ext.ts ├── tsconfig.json └── yarn.lock /.envrc: -------------------------------------------------------------------------------- 1 | if ! has nix_direnv_version || ! nix_direnv_version 1.4.0; then 2 | source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/1.4.0/direnvrc" "sha256-4XfVDjv75eHMWN4G725VW7BoOV4Vl3vAabK4YXIfPyE=" 3 | fi 4 | 5 | use flake 6 | -------------------------------------------------------------------------------- /.github/workflows/combine-prs.yml: -------------------------------------------------------------------------------- 1 | name: 'Combine PRs' 2 | 3 | # Controls when the action will run - in this case triggered manually 4 | on: 5 | workflow_dispatch: 6 | inputs: 7 | branchPrefix: 8 | description: 'Branch prefix to find combinable PRs based on' 9 | required: true 10 | default: 'dependabot' 11 | mustBeGreen: 12 | description: 'Only combine PRs that are green (status is success)' 13 | required: true 14 | default: true 15 | combineBranchName: 16 | description: 'Name of the branch to combine PRs into' 17 | required: true 18 | default: 'combine-prs-branch' 19 | ignoreLabel: 20 | description: 'Exclude PRs with this label' 21 | required: true 22 | default: 'nocombine' 23 | 24 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel 25 | jobs: 26 | # This workflow contains a single job called "combine-prs" 27 | combine-prs: 28 | # The type of runner that the job will run on 29 | runs-on: ubuntu-latest 30 | 31 | # Steps represent a sequence of tasks that will be executed as part of the job 32 | steps: 33 | - uses: actions/github-script@v3 34 | id: fetch-branch-names 35 | name: Fetch branch names 36 | with: 37 | github-token: ${{secrets.GITHUB_TOKEN}} 38 | script: | 39 | const pulls = await github.paginate('GET /repos/:owner/:repo/pulls', { 40 | owner: context.repo.owner, 41 | repo: context.repo.repo 42 | }); 43 | branches = []; 44 | prs = []; 45 | base_branch = null; 46 | for (const pull of pulls) { 47 | const branch = pull['head']['ref']; 48 | console.log('Pull for branch: ' + branch); 49 | if (branch.startsWith('${{ github.event.inputs.branchPrefix }}')) { 50 | console.log('Branch matched: ' + branch); 51 | statusOK = true; 52 | if(${{ github.event.inputs.mustBeGreen }}) { 53 | console.log('Checking green status: ' + branch); 54 | const statuses = await github.paginate('GET /repos/{owner}/{repo}/commits/{ref}/status', { 55 | owner: context.repo.owner, 56 | repo: context.repo.repo, 57 | ref: branch 58 | }); 59 | if(statuses.length > 0) { 60 | const latest_status = statuses[0]['state']; 61 | console.log('Validating status: ' + latest_status); 62 | if(latest_status != 'success') { 63 | console.log('Discarding ' + branch + ' with status ' + latest_status); 64 | statusOK = false; 65 | } 66 | } 67 | } 68 | console.log('Checking labels: ' + branch); 69 | const labels = pull['labels']; 70 | for(const label of labels) { 71 | const labelName = label['name']; 72 | console.log('Checking label: ' + labelName); 73 | if(labelName == '${{ github.event.inputs.ignoreLabel }}') { 74 | console.log('Discarding ' + branch + ' with label ' + labelName); 75 | statusOK = false; 76 | } 77 | } 78 | if (statusOK) { 79 | console.log('Adding branch to array: ' + branch); 80 | branches.push(branch); 81 | prs.push('#' + pull['number'] + ' ' + pull['title']); 82 | base_branch = pull['base']['ref']; 83 | } 84 | } 85 | } 86 | if (branches.length == 0) { 87 | core.setFailed('No PRs/branches matched criteria'); 88 | return; 89 | } 90 | core.setOutput('base-branch', base_branch); 91 | core.setOutput('prs-string', prs.join('\n')); 92 | 93 | combined = branches.join(' ') 94 | console.log('Combined: ' + combined); 95 | return combined 96 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 97 | - uses: actions/checkout@v2.3.3 98 | with: 99 | fetch-depth: 0 100 | # Creates a branch with other PR branches merged together 101 | - name: Created combined branch 102 | env: 103 | BASE_BRANCH: ${{ steps.fetch-branch-names.outputs.base-branch }} 104 | BRANCHES_TO_COMBINE: ${{ steps.fetch-branch-names.outputs.result }} 105 | COMBINE_BRANCH_NAME: ${{ github.event.inputs.combineBranchName }} 106 | run: | 107 | echo "$BRANCHES_TO_COMBINE" 108 | sourcebranches="${BRANCHES_TO_COMBINE%\"}" 109 | sourcebranches="${sourcebranches#\"}" 110 | 111 | basebranch="${BASE_BRANCH%\"}" 112 | basebranch="${basebranch#\"}" 113 | 114 | git config pull.rebase false 115 | git config user.name github-actions 116 | git config user.email github-actions@github.com 117 | 118 | git branch $COMBINE_BRANCH_NAME $basebranch 119 | git checkout $COMBINE_BRANCH_NAME 120 | git pull origin $sourcebranches --no-edit 121 | git push origin $COMBINE_BRANCH_NAME 122 | # Creates a PR with the new combined branch 123 | - uses: actions/github-script@v3 124 | name: Create Combined Pull Request 125 | env: 126 | PRS_STRING: ${{ steps.fetch-branch-names.outputs.prs-string }} 127 | with: 128 | github-token: ${{secrets.GITHUB_TOKEN}} 129 | script: | 130 | const prString = process.env.PRS_STRING; 131 | const body = 'This PR was created by the Combine PRs action by combining the following PRs:\n' + prString; 132 | await github.pulls.create({ 133 | owner: context.repo.owner, 134 | repo: context.repo.repo, 135 | title: 'Combined PR', 136 | head: '${{ github.event.inputs.combineBranchName }}', 137 | base: '${{ steps.fetch-branch-names.outputs.base-branch }}', 138 | body: body 139 | }); -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | # This is a basic workflow to help you get started with Actions 2 | 3 | name: CI 4 | 5 | # Controls when the action will run. Triggers the workflow on push or pull request 6 | # events but only for the master branch 7 | on: 8 | push: 9 | branches: [ master ] 10 | pull_request: 11 | branches: [ master ] 12 | 13 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel 14 | jobs: 15 | # This workflow contains a single job called "build" 16 | build: 17 | # The type of runner that the job will run on 18 | runs-on: ubuntu-latest 19 | 20 | # Steps represent a sequence of tasks that will be executed as part of the job 21 | steps: 22 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 23 | - uses: actions/checkout@v2 24 | - run: yarn install --frozen-lockfile 25 | - run: yarn jest --ci -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | yarn_error.log 3 | dist 4 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | # .gitlab-ci.yml 2 | image: node:12.16.2 3 | 4 | before_script: 5 | - yarn install --frozen-lockfile 6 | 7 | test: 8 | stage: test 9 | script: yarn jest --ci 10 | cache: 11 | paths: 12 | - node_modules/ 13 | - .yarn -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "editor.formatOnSave": true, 3 | "editor.defaultFormatter": "vscode.typescript-language-features", 4 | "eslint.format.enable": false, 5 | "[typescript]": { 6 | "editor.defaultFormatter": "vscode.typescript-language-features" 7 | } 8 | } -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Permission is hereby granted, free of charge, to any 2 | person obtaining a copy of this software and associated 3 | documentation files (the "Software"), to deal in the 4 | Software without restriction, including without 5 | limitation the rights to use, copy, modify, merge, 6 | publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software 8 | is furnished to do so, subject to the following 9 | conditions: 10 | 11 | The above copyright notice and this permission notice 12 | shall be included in all copies or substantial portions 13 | of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 17 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 18 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 19 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 22 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 23 | DEALINGS IN THE SOFTWARE. 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## A Differential Datalog Implementation in JS 2 | 3 | An implementation of Datalog with a focus on managing UIs & UI state. 4 | 5 | ## Features 6 | 7 | * Expressive and simple querying syntax 8 | * Differential updates. 9 | * Only run queries on the differences in data. Don't run the query on everything every time. 10 | * Query your Queries 11 | * Run queries on the results of your queries. It's queries all the way down. 12 | * Typed schema and types. 13 | * Works with [React](https://gitlab.com/datalogui/react). 14 | 15 | ## Examples 16 | 17 | Who is the parent of Alice? 18 | 19 | ```ts 20 | import * as datalog from '@datalogui/datalog' 21 | 22 | // First we create our Datalog Table. This is what holds our data 23 | const People = datalog.newTable<{ id: number, name: string }>({ 24 | id: datalog.NumberType, 25 | name: datalog.StringType, 26 | }) 27 | 28 | // Add some data 29 | People.assert({id: 0, name: "Alice"}) 30 | People.assert({id: 1, name: "Charles"}) 31 | People.assert({id: 2, name: "Helen"}) 32 | 33 | // Define a new table for the ParentOf Relation 34 | const ParentOf = datalog.newTable<{ parentID: number, childID: number }>({ 35 | parentID: datalog.NumberType, 36 | childID: datalog.NumberType, 37 | }) 38 | 39 | ParentOf.assert({parentID: 1, childID: 0}) 40 | ParentOf.assert({parentID: 2, childID: 0}) 41 | 42 | // Our query. You can think of this as saying: 43 | // Find me a parentName, parentID, and childID such that 44 | // There is a there is a person named "Alice" and their id is childID 45 | // The parent of childID should be parentID 46 | // and and the name of parentID should be parentName 47 | const Query = datalog.query<{parentName: string, parentID: number, childID: number}>(({parentName, parentID, childID}) => { 48 | People({name: "Alice", id: childID}) 49 | ParentOf({childID, parentID}) 50 | People({id: parentID, name: parentName}) 51 | }) 52 | 53 | // See the results of the query: 54 | Query.view().readAllData() 55 | // => [{childID: 0, parentID: 1, parentName: "Charles"}, {childID: 0, parentID: 2, parentName: "Helen"}] 56 | ``` 57 | 58 | And what if we wanted to query those results? 59 | 60 | ```ts 61 | // Give me the ID of anyone named "Helen" from the query above. 62 | const QueryQuery = datalog.query<{parentID: number}>(({parentID}) => { 63 | Query({parentID, parentName: "Helen"}) 64 | }) 65 | 66 | QueryQuery.view().readAllData() 67 | ``` 68 | Play with this example [here](https://runkit.com/marcopolo/5ea20f05b9b04d001a07291a). 69 | 70 | 71 | -------------------------------------------------------------------------------- /babel.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | presets: [ 3 | ['@babel/preset-env', { targets: { node: 'current' } }], 4 | '@babel/preset-typescript', 5 | ], 6 | }; 7 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "flake-utils": { 4 | "locked": { 5 | "lastModified": 1648297722, 6 | "narHash": "sha256-W+qlPsiZd8F3XkzXOzAoR+mpFqzm3ekQkJNa+PIh1BQ=", 7 | "owner": "numtide", 8 | "repo": "flake-utils", 9 | "rev": "0f8662f1319ad6abf89b3380dd2722369fc51ade", 10 | "type": "github" 11 | }, 12 | "original": { 13 | "owner": "numtide", 14 | "repo": "flake-utils", 15 | "type": "github" 16 | } 17 | }, 18 | "nixpkgs": { 19 | "locked": { 20 | "lastModified": 1648553562, 21 | "narHash": "sha256-xQhRKu6h0phd56oCzGjkhHkY4eDI1XKedGqkFtlXapk=", 22 | "owner": "NixOS", 23 | "repo": "nixpkgs", 24 | "rev": "9b168e5e62406fa2e55e132f390379a6ba22b402", 25 | "type": "github" 26 | }, 27 | "original": { 28 | "id": "nixpkgs", 29 | "type": "indirect" 30 | } 31 | }, 32 | "root": { 33 | "inputs": { 34 | "flake-utils": "flake-utils", 35 | "nixpkgs": "nixpkgs" 36 | } 37 | } 38 | }, 39 | "root": "root", 40 | "version": 7 41 | } 42 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "Basic typescript environment. Run `setup`."; 3 | inputs.flake-utils.url = "github:numtide/flake-utils"; 4 | 5 | outputs = { self, nixpkgs, flake-utils }: 6 | flake-utils.lib.eachDefaultSystem (system: 7 | let 8 | pkgs = import nixpkgs { system = system; }; 9 | setupScript = pkgs.writeScriptBin "setup" '' 10 | #!/usr/bin/env bash 11 | 12 | yarn init 13 | 14 | # Install dependencies 15 | yarn add --dev typescript jest @babel/preset-typescript @types/jest babel-jest @babel/core @babel/preset-env 16 | 17 | mkdir src || true 18 | 19 | cat > src/sum.ts < src/sum.test.ts < { 29 | expect(sum(1, 2)).toBe(3); 30 | }); 31 | EOF 32 | 33 | cat > babel.config.js < .gitignore <(v: Array): Array> { 5 | return v.map(datum => ({ kind: datalog.Added, datum })) 6 | } 7 | 8 | type PersonID = number 9 | 10 | describe('Relation', () => { 11 | const newPerson = () => new datalog.RelationIndex<"id", PersonID, { name: string }>([], ["id", "name"]) 12 | 13 | test('Inserts something in the correct place', () => { 14 | // { id: PersonID, name: String} 15 | const person = newPerson() 16 | 17 | expect(person.elements).toEqual([]) 18 | person.assert({ id: 0, name: "marco" }) 19 | expect(person.elements).toEqual([[0, "marco"]]) 20 | person.assert({ id: 1, name: "daiyi" }) 21 | expect(person.elements).toEqual([[0, "marco"], [1, "daiyi"]]) 22 | }); 23 | 24 | test('Can filter by constants', () => { 25 | // { id: PersonID, name: String} 26 | const person = newPerson() 27 | 28 | person.assert({ id: 0, name: "marco" }) 29 | person.assert({ id: 1, name: "daiyi" }) 30 | const filteredPeople = person.filterElements({ name: "marco" }) 31 | expect(filteredPeople.elements).toEqual([[0, "marco"]]) 32 | }); 33 | 34 | test('indexBy', () => { 35 | const A = new datalog.RelationIndex<"a", number, { b: number }>([], ["a", "b"]) 36 | A.assert({ a: 1, b: 2 }) 37 | 38 | const B = A.indexBy(['b', 'a']) 39 | expect(B.elements).toEqual([[2, 1]]) 40 | }) 41 | 42 | test("join key ordering", () => { 43 | const out = datalog.joinKeyOrdering([ 44 | ["a", "b"], 45 | ["b", "c"], 46 | ["a", "c", "d"] 47 | ]) 48 | 49 | expect(out).toEqual(["a", "b", "c", "d"]) 50 | }) 51 | 52 | test("join key ordering 2", () => { 53 | const out = datalog.joinKeyOrdering([ 54 | ["a", "b"], 55 | ["b", "a"], 56 | ]) 57 | 58 | expect(out).toEqual(["a", "b"]) 59 | }) 60 | 61 | test('Using the Unconstrained symbol in joins to specify columns that are not constrained', () => { 62 | const A = new datalog.RelationIndex<"a", number, { b: number }>([], ["a", "b"]) 63 | const B = new datalog.RelationIndex<"b", number, { c: number }>([], ["b", "c"]) 64 | const C = new datalog.RelationIndex<"a", number, { c: number, d: number }>([], ["a", "c", "d"]) 65 | 66 | A.assert({ a: 1, b: 2 }) 67 | B.assert({ b: 2, c: 3 }) 68 | B.assert({ b: 2, c: 4 }) 69 | C.assert({ a: 1, c: 3, d: 0 }) 70 | C.assert({ a: 1, c: 3, d: 2 }) 71 | C.assert({ a: 1, c: 3, d: 4 }) 72 | 73 | // const BLeaper = new datalog.ExtendWithUnconstrained( 74 | // ([_a, b]: [number, number]) => b, 75 | // ["c", "d"], 76 | // B 77 | // ) 78 | // expect(BLeaper.outputTupleFunc([3])).toEqual([3, datalog.Unconstrained]) 79 | 80 | const out: Array<[number, number, ...(number | symbol)[]]> = [] 81 | 82 | datalog.leapJoinHelper(A, [ 83 | new datalog.ExtendWithUnconstrained( 84 | ([_a, b]) => [b], 85 | 1, 86 | ["c", "d"], 87 | B, 88 | ["b", "c"] 89 | ), 90 | new datalog.ExtendWithUnconstrained( 91 | ([a, _b]) => [a], 92 | 1, 93 | ["c", "d"], 94 | C, 95 | ["a", "c", "d"] 96 | ), 97 | ], ([a, b], rest) => { 98 | out.push([a, b, ...rest]) 99 | }) 100 | expect(out).toEqual([[1, 2, 3, 0], [1, 2, 3, 2], [1, 2, 3, 4]]) 101 | 102 | expect(datalog.sortTuple([datalog.Unconstrained, 1], [2, 1])).toEqual(0) 103 | }) 104 | 105 | test('Test AntiExtendWithUnconstrained', () => { 106 | const A = new datalog.RelationIndex<"a", number, { b: number }>([], ["a", "b"]) 107 | const B = new datalog.RelationIndex<"b", number, { c: number }>([], ["b", "c"]) 108 | const C = new datalog.RelationIndex<"a", number, { c: number }>([], ["a", "c"]) 109 | const Cneg = new datalog.RelationIndex<"a", number, { c: number }>([], ["a", "c"]) 110 | 111 | A.assert({ a: 1, b: 2 }) 112 | B.assert({ b: 2, c: 3 }) 113 | B.assert({ b: 2, c: 4 }) 114 | C.assert({ a: 1, c: 3 }) 115 | C.assert({ a: 1, c: 4 }) 116 | Cneg.assert({ a: 1, c: 3 }) 117 | 118 | // Cneg.assert({ a: 1, c: 3 }) 119 | 120 | // const BLeaper = new datalog.ExtendWithUnconstrained( 121 | // ([_a, b]: [number, number]) => b, 122 | // ["c", "d"], 123 | // B 124 | // ) 125 | // expect(BLeaper.outputTupleFunc([3])).toEqual([3, datalog.Unconstrained]) 126 | 127 | const out: Array<[number, number, ...(number | symbol)[]]> = [] 128 | 129 | datalog.leapJoinHelper(A, [ 130 | new datalog.ExtendWithUnconstrained( 131 | ([_a, b]) => [b], 132 | 1, 133 | ["c"], 134 | B, 135 | ["b", "c"] 136 | ), 137 | new datalog.ExtendWithUnconstrained( 138 | ([a, _b]) => [a], 139 | 1, 140 | ["c"], 141 | C, 142 | ["a", "c"] 143 | ), 144 | new datalog.ExtendWithUnconstrained( 145 | ([a, _b]) => [a], 146 | 1, 147 | ["c"], 148 | Cneg, 149 | ["a", "c"], 150 | true 151 | ), 152 | ], ([a, b], rest) => { 153 | out.push([a, b, ...rest]) 154 | }) 155 | expect(out).toEqual([[1, 2, 4]]) 156 | 157 | expect(datalog.sortTuple([datalog.Unconstrained, 1], [2, 1])).toEqual(0) 158 | }) 159 | 160 | test('Filter out missing keys', () => { 161 | // Say relation B has keys ['c', 'b'] 162 | // and our output tuple key order is ['a', 'b', 'c', 'd'] 163 | // We should index B by ['b', 'c'] 164 | 165 | expect( 166 | datalog.filterKeys(['b', 'c'], ['a', 'b', 'c', 'd']) 167 | ).toEqual(['b', 'c']) 168 | 169 | 170 | 171 | }) 172 | 173 | test('Order leaper keys', () => { 174 | let restKeys = [ 175 | ['c', 'e'], 176 | ['c', 'd'], 177 | ['c', 'd', 'e'], 178 | ] 179 | 180 | let minIdx = 0 181 | restKeys.forEach((ks, i) => { 182 | if (ks.length < restKeys[minIdx].length) { 183 | minIdx = i 184 | } 185 | }) 186 | 187 | let setWithLeastKeys = new Set(restKeys[minIdx]) 188 | restKeys = restKeys.filter((_, i) => i !== minIdx) 189 | 190 | let currentIdx = 0 191 | 192 | // Split between common keys and rest of the keys 193 | const restKeysSplit: Array<[Array, Array]> = restKeys.map(() => [[], []]) 194 | 195 | restKeys.forEach((ks, j) => { 196 | ks.forEach(rest_k => { 197 | if (setWithLeastKeys.has(rest_k)) { 198 | restKeysSplit[j][0].push(rest_k) 199 | } else { 200 | restKeysSplit[j][1].push(rest_k) 201 | } 202 | }) 203 | }) 204 | 205 | restKeys = restKeysSplit.map(([common, rest]) => common.concat(rest)) 206 | // while (currentIdx < setWithLeastKeys.length) { 207 | // const k = setWithLeastKeys[currentIdx] 208 | // restKeys.forEach(ks => { 209 | // ks 210 | // }) 211 | // } 212 | }) 213 | }) 214 | 215 | describe("MultiIndexRelations", () => { 216 | test("MultiIndex can indexBy", () => { 217 | const A = new datalog.Relation<{ a: number, b: number }>() 218 | A.assert({ a: 1, b: 2 }) 219 | A.indexBy(['a', 'b']) 220 | expect(A.relations[0].elements).toEqual([[1, 2]]) 221 | A.indexBy(['b', 'a']) 222 | expect(A.relations[0].elements).toEqual([[1, 2]]) 223 | expect(A.relations[1].elements).toEqual([[2, 1]]) 224 | }) 225 | }) 226 | 227 | describe("Variables", () => { 228 | test("Duplicates should not be in stable", () => { 229 | const v = new datalog.Variable() 230 | v.assert({ a: 1, b: 2 }) 231 | v.assert({ a: 1, b: 2 }) 232 | while (v.changed()) { } 233 | expect(v.readAllData()).toEqual([{ a: 1, b: 2 }]) 234 | }) 235 | 236 | test("Variables can be told stuff", () => { 237 | const A = new datalog.Variable<{ a: number, b: number }>() 238 | A.assert({ a: 1, b: 2 }) 239 | expect(A.changed()).toEqual(true) 240 | expect(A.changed()).toEqual(false) 241 | expect(A.stable.relations[0].keyOrdering).toEqual(["a", "b"]) 242 | expect(A.stable.relations[0].elements).toEqual([[1, 2]]) 243 | expect(A.changed()).toEqual(false) 244 | 245 | A.assert({ a: 1, b: 2 }) 246 | expect(A.changed()).toEqual(false) 247 | expect(A.stable.relations[0].elements).toEqual([[1, 2]]) 248 | 249 | A.assert({ a: 2, b: 3 }) 250 | expect(A.changed()).toEqual(true) 251 | expect(A.changed()).toEqual(false) 252 | expect(A.stable.relations[0].elements).toEqual([[1, 2], [2, 3]]) 253 | }) 254 | 255 | test("Joining Variables", () => { 256 | const A = new datalog.Variable<{ a: number, b: number }>() 257 | const B = new datalog.Variable<{ b: number, c: number }>() 258 | const C = new datalog.Variable<{ c: number, a: number, d: number }>() 259 | A.assert({ a: 1, b: 2 }) 260 | B.assert({ b: 2, c: 3 }) 261 | B.assert({ b: 2, c: 4 }) 262 | C.assert({ a: 1, c: 3, d: 5 }) 263 | C.assert({ a: 1, c: 3, d: 7 }) 264 | 265 | let out: Array<{ a: number, b: number, c: number, d: number }> = [] 266 | datalog.variableJoinHelper(join => out.push(join), [A, B, C], [{ a: 'a', b: 'b' }, { b: 'b', c: 'c' }, { a: 'a', c: 'c', d: 'd' }], [{}, {}, {}]) 267 | 268 | expect(out).toEqual([ 269 | { a: 1, b: 2, c: 3, d: 5 }, 270 | { a: 1, b: 2, c: 3, d: 7 } 271 | ]) 272 | }) 273 | 274 | test("Joining Empty Variables", () => { 275 | const A = new datalog.Variable<{ a: number, b: number }>() 276 | const B = new datalog.Variable<{ b: number, c: number }>() 277 | const C = new datalog.Variable<{ c: number, a: number, d: number }>() 278 | 279 | let out: Array<{ a: number, b: number, c: number, d: number }> = [...datalog.variableJoinHelperGen([A, B, C], [{ a: 'a', b: 'b' }, { b: 'b', c: 'c' }, { a: 'a', c: 'c', d: 'd' }], [{}, {}, {}])] 280 | 281 | expect(out).toEqual([]) 282 | }) 283 | 284 | test("Joining 1 Variable", () => { 285 | const A = new datalog.Variable<{ a: number, b: number }>() 286 | 287 | let out: Array<{ a: number, b: number }> = [...datalog.variableJoinHelperGen([A], [{ a: 'a', b: 'b' }], [{}])] 288 | expect(out).toEqual([]) 289 | 290 | A.assert({ a: 1, b: 2 }) 291 | out = [...datalog.variableJoinHelperGen([A], [{ a: 'a', b: 'b' }], [{}])] 292 | expect(out).toEqual([{ a: 1, b: 2 }]) 293 | }) 294 | 295 | test("Joining 1 Variable with constants", () => { 296 | const A = new datalog.Variable<{ a: number, b: number }>() 297 | 298 | let out: Array<{ a: number, b: number }> = [...datalog.variableJoinHelperGen([A], [{ a: 'a', b: 'b' }], [{}])] 299 | expect(out).toEqual([]) 300 | 301 | A.assert({ a: 1, b: 2 }) 302 | A.assert({ a: 2, b: 3 }) 303 | out = [...datalog.variableJoinHelperGen([A], [{ a: 'a', b: 'b' }], [{ a: 1 }])] 304 | expect(out).toEqual([{ a: 1, b: 2 }]) 305 | }) 306 | 307 | test("Joining 2 Variables", () => { 308 | const A = new datalog.Variable<{ a: number, b: number }>() 309 | const B = new datalog.Variable<{ b: number, c: number }>() 310 | 311 | let out: Array<{ a: number, b: number, c: number }> = [...datalog.variableJoinHelperGen([A, B], [{ a: 'a', b: 'b' }, { b: 'b', c: 'c' }], [{}, {}])] 312 | expect(out).toEqual([]) 313 | 314 | A.assert({ a: 1, b: 2 }) 315 | A.assert({ a: 1, b: 4 }) 316 | B.assert({ b: 2, c: 3 }) 317 | out = [...datalog.variableJoinHelperGen([A, B], [{ a: 'a', b: 'b' }, { b: 'b', c: 'c' }], [{}, {}])] 318 | expect(out).toEqual([{ a: 1, b: 2, c: 3 }]) 319 | }) 320 | 321 | 322 | test("Joining 2 Variables with constants", () => { 323 | const A = new datalog.Variable<{ a: number, b: number }>() 324 | const B = new datalog.Variable<{ b: number, c: number }>() 325 | 326 | let out: Array<{ a: number, b: number, c: number }> = [...datalog.variableJoinHelperGen([A, B], [{ a: 'a', b: 'b' }, { b: 'b', c: 'c' }], [{}, {}])] 327 | expect(out).toEqual([]) 328 | 329 | A.assert({ a: 1, b: 2 }) 330 | A.assert({ a: 1, b: 4 }) 331 | A.assert({ a: 2, b: 2 }) 332 | A.assert({ a: 3, b: 2 }) 333 | B.assert({ b: 2, c: 3 }) 334 | out = [...datalog.variableJoinHelperGen([A, B], [{ a: 'a', b: 'b' }, { b: 'b', c: 'c' }], [{ a: 1 }, {}])] 335 | expect(out).toEqual([{ a: 1, b: 2, c: 3 }]) 336 | 337 | }) 338 | 339 | test("Joining 2 Variables with constants 2", () => { 340 | const A = new datalog.Variable<{ a: number, b: number }>() 341 | const B = new datalog.Variable<{ b: number, c: number }>() 342 | 343 | let out: Array<{ a: number, b: number, c: number }> = [...datalog.variableJoinHelperGen([A, B], [{ a: 'a', b: 'b' }, { b: 'b', c: 'c' }], [{}, {}])] 344 | expect(out).toEqual([]) 345 | 346 | A.assert({ a: 1, b: 2 }) 347 | A.assert({ a: 1, b: 4 }) 348 | A.assert({ a: 2, b: 5 }) 349 | A.assert({ a: 3, b: 4 }) 350 | B.assert({ b: 2, c: 3 }) 351 | 352 | out = [...datalog.variableJoinHelperGen([A, B], [{ a: 'a', b: 'b' }, { b: 'b', c: 'c' }], [{}, { c: 3 }])] 353 | expect(out).toEqual([{ a: 1, b: 2, c: 3 }]) 354 | }) 355 | 356 | test("Joining 1 Variable with remapped keys", () => { 357 | const A = new datalog.Variable<{ a: number, b: number }>() 358 | 359 | // let out: Array<{ a2: number, b2: number }> = [...datalog.variableJoinHelperGen([A], [{ a: 'a', b: 'b' }])] 360 | // expect(out).toEqual([]) 361 | 362 | A.assert({ a: 1, b: 2 }) 363 | // @ts-ignore 364 | let out = [...datalog.variableJoinHelperGen<{ a: number, b: number }, { a2: number, b2: number }>([A], [{ a: 'a2', b: 'b2' }], [{}])] 365 | expect(out).toEqual([{ a2: 1, b2: 2 }]) 366 | }) 367 | 368 | test("Joining same Variable with itself with remapped keys", () => { 369 | const A = new datalog.Variable<{ a: number, b: number }>() 370 | 371 | A.assert({ a: 1, b: 2 }) 372 | A.assert({ a: 2, b: 1 }) 373 | A.assert({ a: 3, b: 1 }) 374 | A.assert({ a: 5, b: 1 }) 375 | A.assert({ a: 3, b: 2 }) 376 | // @ts-ignore 377 | let out = [...datalog.variableJoinHelperGen<{ a: number, b: number }, { b: number, a: number }>([A, A], [{ a: 'a', b: 'b' }, { a: 'b', b: 'a' }], [{}, {}])] 378 | expect(out).toEqual([{ a: 1, b: 2 }, { a: 2, b: 1 }]) 379 | }) 380 | 381 | test("Joining same Variable with itself with remapped keys", () => { 382 | const A = new datalog.Variable<{ a: number, b: number }>() 383 | 384 | A.assert({ a: 3, b: 4 }) 385 | A.assert({ a: 1, b: 2 }) 386 | A.assert({ a: 1, b: 1 }) 387 | // @ts-ignore 388 | let out = [...datalog.variableJoinHelperGen([A, A], [{ a: 'a' }, { b: 'a' }], [{}, {}])] 389 | // TODO this should be just one value 390 | expect(out).toEqual([{ a: 1 }, { a: 1 }]) 391 | }) 392 | 393 | test("Joining non-overlapping relations", () => { 394 | const A = new datalog.Variable<{ a: number, b: number }>() 395 | const B = new datalog.Variable<{ c: number, d: number }>() 396 | 397 | A.assert({ a: 1, b: 2 }) 398 | A.assert({ a: 3, b: 4 }) 399 | B.assert({ c: 5, d: 6 }) 400 | B.assert({ c: 7, d: 8 }) 401 | 402 | // @ts-ignore 403 | let out = [...datalog.variableJoinHelperGen<{ a: number, b: number }, { c: number, d: number }>([A, B], [{ a: 'a', b: 'b' }, { c: 'c', d: 'd' }], [{}, {}])] 404 | expect(out).toEqual([ 405 | { 406 | "a": 1, 407 | "b": 2, 408 | "c": 5, 409 | "d": 6, 410 | }, 411 | { 412 | "a": 1, 413 | "b": 2, 414 | "c": 7, 415 | "d": 8, 416 | }, 417 | { 418 | "a": 3, 419 | "b": 4, 420 | "c": 5, 421 | "d": 6, 422 | }, 423 | { 424 | "a": 3, 425 | "b": 4, 426 | "c": 7, 427 | "d": 8, 428 | } 429 | ]) 430 | }) 431 | }) 432 | 433 | describe("recursiveForLoopJoin", () => { 434 | const A = [{ a: 1, b: 2 }] 435 | const B = [{ b: 2, c: 3 }] 436 | const C = [{ a: 1, c: 3 }] 437 | 438 | // resultSoFar is an array of datums i.e.: [{a: 1, b: 2}, {b: 2, c: 3}] 439 | const mockJoinerHelper = function* (rels: Array, resultSoFar: any): Generator { 440 | if (rels.length === 0) { 441 | const allKeys = new Set(resultSoFar.map((datum: any) => Object.keys(datum)).flat()) 442 | const keysPerDatum = resultSoFar.map((datum: any) => new Set(Object.keys(datum))) 443 | const commonKeys = [...allKeys].filter(k => keysPerDatum.every((s: any) => s.has(k))) 444 | const areCommonKeysTheSame = commonKeys.every((k: any) => { 445 | const s = new Set(resultSoFar.map((datum: any) => datum[k])) 446 | return s.size === 1 447 | }) 448 | if (areCommonKeysTheSame) { 449 | yield resultSoFar.reduce((acc: any, o: any) => ({ ...acc, ...o }), {}) 450 | } 451 | } else { 452 | const [head, ...tail] = rels 453 | for (let item of head) { 454 | yield* mockJoinerHelper(tail, resultSoFar.concat([item])) 455 | } 456 | } 457 | } 458 | 459 | 460 | const mockJoiner = function* (...rels: Array) { 461 | yield* mockJoinerHelper(rels, []) 462 | } 463 | 464 | 465 | const mockRemapKeys = (rel: Array, keyMap: { [key: string]: string }): Array & { [key: string]: any }> => { 466 | return rel.map(datum => Object.keys(datum).reduce((acc, k) => { 467 | const newK = keyMap[k] 468 | if (newK) { 469 | acc[newK] = datum[k] 470 | } 471 | 472 | return acc 473 | }, {} as any)) 474 | 475 | } 476 | 477 | 478 | test("Mock joiner works", () => { 479 | expect([...mockJoiner(A, B)]).toEqual([{ a: 1, b: 2, c: 3 }]) 480 | expect([...mockJoiner(A, B, C)]).toEqual([{ a: 1, b: 2, c: 3 }]); 481 | { 482 | const C = [{ a: 1, c: 3 }, { a: 1, c: 4 }] 483 | expect([...mockJoiner(A, B, C)]).toEqual([{ a: 1, b: 2, c: 3 }, { a: 1, b: 2, c: 4 }]) 484 | } 485 | }) 486 | 487 | test("Mock remap keys works", () => { 488 | expect(mockRemapKeys(A, { a: 'a2', b: 'b' })).toEqual([{ a2: 1, b: 2 }]) 489 | }) 490 | }) 491 | 492 | describe("Helpers", () => { 493 | test("Remap keys", () => { 494 | const inKeys = ["a", "b", "c", "d"] 495 | const mapping = { a: "a2", c: "c4", d: "d" } 496 | const out = datalog.remapKeys(inKeys, mapping) 497 | expect(out).toEqual(["a2", "b", "c4", "d"]) 498 | expect(datalog.reverseRemapKeys(out, mapping)).toEqual(inKeys) 499 | }) 500 | }) 501 | 502 | describe("Query", () => { 503 | test("Hello World join", () => { 504 | const A = datalog._newTable<{ a: number, b: number }>() 505 | const B = datalog._newTable<{ b: number, c: number }>() 506 | A.assert({ a: 1, b: 2 }) 507 | B.assert({ b: 2, c: 3 }) 508 | 509 | const queryResult = datalog.query(({ a, b, c }: any) => { 510 | A({ a, b }) 511 | B({ b, c }) 512 | }) 513 | expect([...queryResult.view().recentData()]).toEqual(intoAddedDatums([{ a: 1, b: 2, c: 3 }])) 514 | }) 515 | 516 | test("People Example", () => { 517 | type ID = number 518 | const People = datalog._newTable<{ name: string, id: ID }>() 519 | const ParentOf = datalog._newTable<{ parentID: ID, childID: ID }>() 520 | 521 | let ids = 0 522 | 523 | People.assert({ name: "FooChild", id: ids++ }) 524 | People.assert({ name: "FooDad", id: ids++ }) 525 | People.assert({ name: "FooMom", id: ids++ }) 526 | 527 | People.assert({ name: "BarChild", id: ids++ }) 528 | People.assert({ name: "BarDad", id: ids++ }) 529 | People.assert({ name: "BarMom", id: ids++ }) 530 | 531 | ParentOf.assert({ parentID: 1, childID: 0 }) // 1 = FooDad, 0 = FooChild 532 | ParentOf.assert({ parentID: 2, childID: 0 }) // 2 = FooMom, 0 = FooChild 533 | 534 | ParentOf.assert({ parentID: 4, childID: 3 }) // 4 = BarDad, 3 = BarChild 535 | ParentOf.assert({ parentID: 5, childID: 3 }) // 5 = BarMom, 3 = BarChild 536 | 537 | // Find every parent 538 | let queryResult = datalog.query<{ parentName: string, parentID: number }>(({ parentName, parentID }) => { 539 | ParentOf({ parentID }) 540 | People({ id: parentID, name: parentName }) 541 | }) 542 | 543 | expect([...queryResult.view().recentData()]).toEqual(intoAddedDatums([{ parentID: 1, parentName: "FooDad" }, { parentID: 2, parentName: "FooMom" }, { parentID: 4, parentName: "BarDad" }, { parentID: 5, parentName: "BarMom" }])) 544 | }) 545 | 546 | test("People Example", () => { 547 | type ID = number 548 | const People = datalog._newTable<{ name: string, id: ID }>() 549 | const ParentOf = datalog._newTable<{ parentID: ID, childID: ID }>() 550 | 551 | let ids = 0 552 | 553 | People.assert({ name: "FooChild", id: ids++ }) 554 | People.assert({ name: "FooDad", id: ids++ }) 555 | People.assert({ name: "FooMom", id: ids++ }) 556 | 557 | ParentOf.assert({ parentID: 1, childID: 0 }) // 1 = FooDad, 0 = FooChild 558 | ParentOf.assert({ parentID: 2, childID: 0 }) // 1 = FooMom, 0 = FooChild 559 | 560 | // Who's FooChild's parent? 561 | let queryResult = datalog.query<{ parentName: string, parentID: number }>(({ parentName, childID, parentID }: any) => { 562 | People({ name: "FooChild", id: childID }) 563 | ParentOf({ childID, parentID }) 564 | People({ id: parentID, name: parentName }) 565 | }) 566 | // Equivalent SQL query: (https://www.db-fiddle.com/f/t1TA5umdcoBuG8ZPcyMWTx/1) 567 | // select child.name as childName, ParentOf.childID, ParentOf.parentID, parent.name as parentName 568 | // from People child, People parent, ParentOf 569 | // where child.name = 'FooChild' and child.id = childID and parent.id = parentID 570 | 571 | 572 | expect([...queryResult.view().recentData()]).toEqual(intoAddedDatums([{ parentID: 1, parentName: "FooDad", childID: 0 }, { parentID: 2, parentName: "FooMom", childID: 0 }])) 573 | }) 574 | 575 | test("People Example. Then query result", () => { 576 | type ID = number 577 | const People = datalog._newTable<{ name: string, id: ID }>() 578 | const ParentOf = datalog._newTable<{ parentID: ID, childID: ID }>() 579 | 580 | let ids = 0 581 | 582 | People.assert({ name: "FooChild", id: ids++ }) 583 | People.assert({ name: "FooDad", id: ids++ }) 584 | People.assert({ name: "FooMom", id: ids++ }) 585 | 586 | ParentOf.assert({ parentID: 1, childID: 0 }) // 1 = FooDad, 0 = FooChild 587 | ParentOf.assert({ parentID: 2, childID: 0 }) // 1 = FooMom, 0 = FooChild 588 | 589 | // Who's FooChild's parent? 590 | let QueryResult = datalog.query<{ parentName: string, parentID: number }>(({ parentName, childID, parentID }: any) => { 591 | People({ name: "FooChild", id: childID }) 592 | ParentOf({ childID, parentID }) 593 | People({ id: parentID, name: parentName }) 594 | }) 595 | 596 | let QueryResult2 = datalog.query(({ parentID }: { parentID: number }) => { 597 | QueryResult({ parentName: "FooMom", parentID }) 598 | }) 599 | 600 | expect([...QueryResult2.view().recentData()]).toEqual(intoAddedDatums([{ parentID: 2 }])) 601 | }) 602 | 603 | test("People Example 3 joins", () => { 604 | type ID = number 605 | const People = datalog._newTable<{ name: string, id: ID }>() 606 | const ParentOf = datalog._newTable<{ parentID: ID, childID: ID }>() 607 | const A = datalog._newTable<{ a: number, b: number }>() 608 | const B = datalog._newTable<{ b: number, c: number }>() 609 | 610 | let ids = 0 611 | 612 | People.assert({ name: "FooChild", id: ids++ }) 613 | People.assert({ name: "FooDad", id: ids++ }) 614 | People.assert({ name: "FooMom", id: ids++ }) 615 | 616 | ParentOf.assert({ parentID: 1, childID: 0 }) // 1 = FooDad, 0 = FooChild 617 | ParentOf.assert({ parentID: 2, childID: 0 }) // 1 = FooMom, 0 = FooChild 618 | 619 | A.assert({ a: 1, b: 2 }) 620 | B.assert({ b: 2, c: 3 }) 621 | 622 | // Who's FooChild's parent? 623 | let queryResult = datalog.query<{ id: number, parentName: string, parentID: number, a: number, b: number, c: number }>(({ parentName, childID, parentID, a, b, c }: any) => { 624 | People({ name: "FooChild", id: childID }) 625 | ParentOf({ childID, parentID }) 626 | People({ id: parentID, name: parentName }) 627 | A({ a, b }) 628 | B({ b, c }) 629 | }) 630 | // Equivalent SQL query: (https://www.db-fiddle.com/f/t1TA5umdcoBuG8ZPcyMWTx/1) 631 | // select child.name as childName, ParentOf.childID, ParentOf.parentID, parent.name as parentName 632 | // from People child, People parent, ParentOf 633 | // where child.name = 'FooChild' and child.id = childID and parent.id = parentID 634 | 635 | // console.log("data", queryResult) 636 | const data = queryResult.view().recentData() 637 | 638 | expect(data).toEqual(intoAddedDatums([{ parentID: 1, parentName: "FooDad", childID: 0, a: 1, b: 2, c: 3 }, { parentID: 2, parentName: "FooMom", childID: 0, a: 1, b: 2, c: 3 }])) 639 | }) 640 | 641 | test("Chain Queries", () => { 642 | type ID = number 643 | const People = datalog._newTable<{ name: string, id: ID }>() 644 | const ParentOf = datalog._newTable<{ parentID: ID, childID: ID }>() 645 | let ids = 0 646 | 647 | People.assert({ name: "FooChild", id: ids++ }) 648 | People.assert({ name: "FooDad", id: ids++ }) 649 | People.assert({ name: "FooMom", id: ids++ }) 650 | People.assert({ name: "BarDad", id: ids++ }) 651 | 652 | ParentOf.assert({ parentID: 1, childID: 0 }) // 1 = FooDad, 0 = FooChild 653 | ParentOf.assert({ parentID: 2, childID: 0 }) // 2 = FooMom, 0 = FooChild 654 | 655 | // Who are the children of FooMom? 656 | const QueryResult = datalog.query<{ childName: string, childID: number, parentID: number }>(({ childName, childID, parentID }) => { 657 | People({ name: "FooMom", id: parentID }) 658 | ParentOf({ parentID, childID }) 659 | People({ id: childID, name: childName, }) 660 | }) 661 | 662 | // console.log("data", queryResult) 663 | // Query the result 664 | let QueryView = QueryResult.view() 665 | expect(QueryView.recentData()).toEqual(intoAddedDatums([{ parentID: 2, childName: "FooChild", childID: 0 }])) 666 | 667 | // Who are the parents of these children? 668 | const QueryResult2 = datalog.query<{ childID: number, parentID: number, parentName: string }>(({ childID, parentID, parentName }) => { 669 | QueryResult({ childID }) 670 | ParentOf({ childID, parentID }) 671 | People({ id: parentID, name: parentName }) 672 | }) 673 | 674 | let QueryView2 = QueryResult2.view() 675 | expect(QueryView2.recentData()).toEqual(intoAddedDatums([{ parentID: 1, childID: 0, parentName: "FooDad" }, { parentID: 2, childID: 0, parentName: "FooMom" }])) 676 | 677 | // Now add new data to the Table and see how the queries change 678 | // Note that FooBrother's dad is BarDad 679 | // Foo and FooBrother are half brothers. 680 | People.assert({ name: "FooBrother", id: ids++ }) 681 | ParentOf.assert({ parentID: 3, childID: ids - 1 }) // 1 = BarDad 682 | ParentOf.assert({ parentID: 2, childID: ids - 1 }) // 2 = FooMom 683 | 684 | // Run the second query again 685 | QueryResult2.runQuery() 686 | // Nothing new, because there's nothing new from the first QueryView. 687 | expect(QueryView2.recentData()).toEqual(null) 688 | 689 | // Run the first query again. 690 | // Note we are asking the query to run again. This is to prevent the case where a change in the Table will cause unnecessary work. 691 | // For example: If QueryView2 was offscreen, we wouldn't want to waste work updating it's state. Better to do that when necessary. 692 | QueryResult.runQuery() 693 | expect(QueryView.recentData()).toEqual(intoAddedDatums([{ parentID: 2, childName: "FooBrother", childID: 4 }])) 694 | 695 | // Now see the results of the second query 696 | // Note that FooMom appeared again. This is because the query runs on each child from QueryResult 697 | QueryResult2.runQuery() 698 | expect(QueryView2.recentData()).toEqual(intoAddedDatums([{ parentID: 2, childID: 4, parentName: "FooMom" }, { parentID: 3, childID: 4, parentName: "BarDad" }])) 699 | }) 700 | 701 | test("Not Queries", () => { 702 | type ID = number 703 | const People = datalog._newTable<{ name: string, id: ID }>() 704 | const PeopleNeg = datalog._newTable<{ name: string, id: ID }>() 705 | const ParentOf = datalog._newTable<{ parentID: ID, childID: ID }>() 706 | let ids = 0 707 | 708 | People.assert({ name: "FooChild", id: ids++ }) 709 | People.assert({ name: "FooDad", id: ids++ }) 710 | People.assert({ name: "FooMom", id: ids++ }) 711 | People.assert({ name: "BarDad", id: ids++ }) 712 | PeopleNeg.assert({ name: "FooMom", id: 2 }) 713 | 714 | ParentOf.assert({ parentID: 1, childID: 0 }) // 1 = FooDad, 0 = FooChild 715 | ParentOf.assert({ parentID: 2, childID: 0 }) // 2 = FooMom, 0 = FooChild 716 | 717 | // Who are the children of FooMom? 718 | const QueryResult = datalog.query<{ parentName: string, parentID: number }>(({ parentName, parentID }) => { 719 | People({ id: parentID, name: parentName }) 720 | ParentOf({ parentID }) 721 | People.not({ id: parentID, name: "FooMom" }) 722 | }) 723 | 724 | // // console.log("data", queryResult) 725 | // // Query the result 726 | let QueryView = QueryResult.view() 727 | let recentData = QueryView.recentData() 728 | expect(recentData?.filter(v => v.kind === datalog.Removed).length).toBe(0) 729 | 730 | expect( 731 | recentData?.map(v => v.datum).map(({ parentName }) => parentName) 732 | ).toEqual(["FooDad"]) 733 | 734 | // // Who are the parents of these children? 735 | // const QueryResult2 = datalog.query<{ childID: number, parentID: number, parentName: string }>(({ childID, parentID, parentName }) => { 736 | // QueryResult({ childID }) 737 | // ParentOf({ childID, parentID }) 738 | // // TODO it would be cool to use a not here. To not have Foo Mom appear 739 | // People({ id: parentID, name: parentName }) 740 | // People.not({ id: parentID, name: "FooMom" }) 741 | // }) 742 | 743 | // let QueryView2 = QueryResult2.view() 744 | // expect(QueryView2.recentData()).toEqual([{ parentID: 1, childID: 0, parentName: "FooDad" }]) 745 | 746 | // Now add new data to the Table and see how the queries change 747 | // Note that FooBrother's dad is BarDad 748 | // Foo and FooBrother are half brothers. 749 | // People.assert({ name: "FooBrother", id: ids++ }) 750 | // ParentOf.assert({ parentID: 3, childID: ids - 1 }) // 1 = BarDad 751 | // ParentOf.assert({ parentID: 2, childID: ids - 1 }) // 2 = FooMom 752 | 753 | // // Run the second query again 754 | // QueryResult2.runQuery() 755 | // // Nothing new, because there's nothing new from the first QueryView. 756 | // expect(QueryView2.recentData()).toEqual(null) 757 | 758 | // Run the first query again. 759 | // Note we are asking the query to run again. This is to prevent the case where a change in the Table will cause unnecessary work. 760 | // For example: If QueryView2 was offscreen, we wouldn't want to waste work updating it's state. Better to do that when necessary. 761 | // QueryResult.runQuery() 762 | // expect(QueryView.recentData()).toEqual([{ parentID: 2, childName: "FooBrother", childID: 4 }]) 763 | 764 | // // Now see the results of the second query 765 | // // Note that FooMom appeared again. This is because the query runs on each child from QueryResult 766 | // QueryResult2.runQuery() 767 | // expect(QueryView2.recentData()).toEqual([{ parentID: 2, childID: 4, parentName: "FooMom" }, { parentID: 3, childID: 4, parentName: "BarDad" }]) 768 | }) 769 | }) 770 | 771 | describe("Retractions", () => { 772 | test("Datum Counts", () => { 773 | type ID = number 774 | const People = datalog._newTable<{ name: string, id: ID }>() 775 | let ids = 0 776 | 777 | People.assert({ name: "FooChild", id: ids++ }) 778 | People.assert({ name: "FooDad", id: ids++ }) 779 | 780 | // @ts-ignore 781 | const PeopleInnerVar: datalog.Variable<{ name: string, id: ID }> = People._innerVar 782 | 783 | expect(PeopleInnerVar.counts).toEqual(new Map([ 784 | ["[[\"id\",0],[\"name\",\"FooChild\"]]", 1], 785 | ["[[\"id\",1],[\"name\",\"FooDad\"]]", 1] 786 | ])) 787 | 788 | People.assert({ name: "FooDad", id: 1 }) 789 | expect(PeopleInnerVar.counts).toEqual(new Map([ 790 | ["[[\"id\",0],[\"name\",\"FooChild\"]]", 1], 791 | ["[[\"id\",1],[\"name\",\"FooDad\"]]", 2] 792 | ])) 793 | 794 | People.retract({ name: "FooChild", id: 0 }) 795 | expect(PeopleInnerVar.counts).toEqual(new Map([ 796 | ["[[\"id\",0],[\"name\",\"FooChild\"]]", 0], 797 | ["[[\"id\",1],[\"name\",\"FooDad\"]]", 2] 798 | ])) 799 | }) 800 | 801 | test("Retractions in Variable's toAdd cancel each other out", () => { 802 | const v = new datalog.Variable() 803 | v.assert({ a: "1", b: 1 }) 804 | while (v.changed()) { } 805 | expect(v.readAllData()).toEqual([ 806 | { a: "1", b: 1 } 807 | ]) 808 | v.retract({ a: "1", b: 1 }) 809 | v.assert({ a: "1", b: 1 }) 810 | expect(v.readAllData()).toEqual([ 811 | { a: "1", b: 1 } 812 | ]) 813 | 814 | 815 | }) 816 | 817 | test("Relations get updated", () => { 818 | type ID = number 819 | const People = datalog._newTable<{ name: string, id: ID }>() 820 | let ids = 0 821 | 822 | People.assert({ name: "FooChild", id: ids++ }) 823 | People.assert({ name: "FooDad", id: ids++ }) 824 | 825 | // @ts-ignore 826 | const PeopleInnerVar: datalog.Variable<{ name: string, id: ID }> = People._innerVar 827 | 828 | while (PeopleInnerVar.changed()) { } 829 | 830 | expect(PeopleInnerVar.stable.relations[0].elements).toEqual([["FooChild", 0], ["FooDad", 1]]) 831 | 832 | People.assert({ name: "FooDad", id: 1 }) 833 | while (PeopleInnerVar.changed()) { } 834 | expect(PeopleInnerVar.stable.relations[0].elements).toEqual([["FooChild", 0], ["FooDad", 1]]) 835 | 836 | People.retract({ name: "FooChild", id: 0 }) 837 | while (PeopleInnerVar.changed()) { } 838 | expect(PeopleInnerVar.stable.relations[0].elements).toEqual([["FooDad", 1]]) 839 | }) 840 | 841 | test("Relations get updated", () => { 842 | type ID = number 843 | const People = datalog._newTable<{ name: string, id: ID }>() 844 | let ids = 0 845 | 846 | People.assert({ name: "FooChild", id: ids++ }) 847 | People.assert({ name: "FooDad", id: ids++ }) 848 | 849 | // @ts-ignore 850 | const PeopleInnerVar: datalog.Variable<{ name: string, id: ID }> = People._innerVar 851 | 852 | while (PeopleInnerVar.changed()) { } 853 | 854 | expect(PeopleInnerVar.stable.relations[0].elements).toEqual([["FooChild", 0], ["FooDad", 1]]) 855 | 856 | People.assert({ name: "FooDad", id: 1 }) 857 | while (PeopleInnerVar.changed()) { } 858 | expect(PeopleInnerVar.stable.relations[0].elements).toEqual([["FooChild", 0], ["FooDad", 1]]) 859 | 860 | People.retract({ name: "FooChild", id: 0 }) 861 | while (PeopleInnerVar.changed()) { } 862 | expect(PeopleInnerVar.stable.relations[0].elements).toEqual([["FooDad", 1]]) 863 | }) 864 | 865 | test("Retractions propagate through a join", () => { 866 | const A = datalog._newTable<{ a: number, b: number }>() 867 | const B = datalog._newTable<{ b: number, c: number }>() 868 | const C = datalog._newTable<{ a: number, c: number }>() 869 | A.assert({ a: 1, b: 2 }) 870 | B.assert({ b: 2, c: 3 }) 871 | B.assert({ b: 2, c: 4 }) 872 | C.assert({ a: 1, c: 3 }) 873 | C.assert({ a: 1, c: 4 }) 874 | 875 | const QueryResult = datalog.query<{ a: number, b: number, c: number }>(({ a, b, c }) => { 876 | A({ a, b }) 877 | B({ b, c }) 878 | C({ a, c }) 879 | }) 880 | 881 | const QueryView = QueryResult.view() 882 | expect(QueryView.recentData()).toEqual(intoAddedDatums([ 883 | { 884 | "a": 1, 885 | "b": 2, 886 | "c": 3, 887 | }, { 888 | "a": 1, 889 | "b": 2, 890 | "c": 4, 891 | } 892 | ])) 893 | C.retract({ a: 1, c: 3 }) 894 | QueryResult.runQuery() 895 | let recent = QueryView.recentData() 896 | expect(recent).toEqual([ 897 | { 898 | kind: datalog.Removed, 899 | datum: { 900 | "a": 1, 901 | "b": 2, 902 | "c": 3, 903 | } 904 | } 905 | ]) 906 | }) 907 | }) 908 | 909 | describe("Reading Data", () => { 910 | test("Read all Data", () => { 911 | const A = datalog._newTable<{ a: number, b: number }>() 912 | const B = datalog._newTable<{ b: number, c: number }>() 913 | const C = datalog._newTable<{ a: number, c: number }>() 914 | A.assert({ a: 1, b: 2 }) 915 | B.assert({ b: 2, c: 3 }) 916 | B.assert({ b: 2, c: 4 }) 917 | C.assert({ a: 1, c: 3 }) 918 | C.assert({ a: 1, c: 4 }) 919 | 920 | const QueryResult = datalog.query<{ a: number, b: number, c: number }>(({ a, b, c }) => { 921 | A({ a, b }) 922 | B({ b, c }) 923 | C({ a, c }) 924 | }) 925 | 926 | const QueryView = QueryResult.view() 927 | expect(QueryView.readAllData()).toEqual([ 928 | { 929 | "a": 1, 930 | "b": 2, 931 | "c": 3, 932 | }, { 933 | "a": 1, 934 | "b": 2, 935 | "c": 4, 936 | } 937 | ]) 938 | }) 939 | }) 940 | 941 | // describe.only("Leaper tests", () => { 942 | // test("Intersection test with unconstrained", () => { 943 | // const NextActorLeaper = new datalog.ExtendWithUnconstrained( 944 | // () => ['Edward Asner', 'Change of Habit'].slice(1), 945 | // 2, 946 | // ["NextActor", "BaconNumber", "CurrentBaconNumber", "___number1edgxy6u"], 947 | // new datalog.RelationIndex() 948 | // ) 949 | // }) 950 | // }) 951 | 952 | describe("Examples from docs", () => { 953 | test("Simple Retraction implication", () => { 954 | const A = datalog.intoTable([{ a: 1, b: 2 }]) 955 | const B = datalog.intoTable([ 956 | { b: 2, c: 3, }, 957 | { b: 2, c: 4 } 958 | ]) 959 | 960 | const retractedState = {} 961 | datalog.query(({ a, b, c }) => { 962 | A({ a, b }) 963 | B({ b, c }) 964 | }).implies(({ a, b }, kind) => { 965 | if (kind === datalog.Added && !(retractedState[a]?.[b])) { 966 | retractedState[a] = retractedState[a] || {} 967 | retractedState[a][b] = true 968 | A.retract({ a, b }) 969 | } 970 | }) 971 | 972 | expect(A.view().readAllData()).toEqual([]) 973 | }) 974 | test("Home page demo", () => { 975 | // console.time("Bacon Number") 976 | const InMovie = datalog.intoTable([ 977 | { MovieName: "Change of Habit", Actor: "Elvis Presley" }, 978 | { MovieName: "Foo", Actor: "A" }, 979 | { MovieName: "Foo", Actor: "B" }, 980 | { MovieName: "Bar", Actor: "B" }, 981 | { MovieName: "Bar", Actor: "C" }, 982 | { MovieName: "JFK", Actor: "C" }, 983 | { MovieName: "Change of Habit", Actor: "Edward Asner" }, 984 | { MovieName: "JFK", Actor: "Edward Asner" }, 985 | { MovieName: "JFK", Actor: "Kevin Bacon" }, 986 | // ... More Movies 987 | ]) 988 | const BaconNumbers = datalog.intoTable([ 989 | { Actor: "Kevin Bacon", number: 0 }, 990 | ]) 991 | 992 | datalog.query<{ BaconNumber: number, Actor: string, NextActor: string, MovieName: string }>(({ BaconNumber, Actor, NextActor, MovieName }) => { 993 | InMovie({ Actor }) 994 | BaconNumbers.not({ Actor }) 995 | }).view().readAllData().map(({ Actor }) => { 996 | BaconNumbers.assert({ Actor, number: Infinity }) 997 | }) 998 | 999 | 1000 | const BaconNumberQuery = datalog.query<{ BaconNumber: number, Actor: string, NextActor: string, CurrentBaconNumber: number, MovieName: string }>(({ BaconNumber, Actor, NextActor, CurrentBaconNumber, MovieName }) => { 1001 | InMovie({ Actor, MovieName }) 1002 | InMovie({ MovieName, Actor: NextActor }) 1003 | BaconNumbers({ Actor, number: BaconNumber }) 1004 | BaconNumbers({ Actor: NextActor, number: CurrentBaconNumber }) 1005 | BaconNumbers.not({ Actor: Actor, number: Infinity }) 1006 | }) 1007 | 1008 | 1009 | BaconNumberQuery.viewExt() 1010 | .mapEffect((recentDatum) => { 1011 | // If it's a join on the same actor, we'll pass 1012 | if (recentDatum.datum.Actor === recentDatum.datum.NextActor) { 1013 | return 1014 | } 1015 | switch (recentDatum.kind) { 1016 | case datalog.Added: { 1017 | const { NextActor: Actor, BaconNumber, CurrentBaconNumber } = recentDatum.datum 1018 | if (CurrentBaconNumber > BaconNumber + 1) { 1019 | // Swap the old bacon number (CurrentBaconNumber with the new one) 1020 | BaconNumbers.retract({ Actor, number: CurrentBaconNumber }) 1021 | BaconNumbers.assert({ Actor, number: BaconNumber + 1 }) 1022 | } 1023 | break 1024 | } 1025 | case datalog.Removed: { 1026 | const { NextActor: Actor, BaconNumber, CurrentBaconNumber } = recentDatum.datum 1027 | if (CurrentBaconNumber === BaconNumber + 1) { 1028 | console.log("Removing: ", Actor, BaconNumber, CurrentBaconNumber) 1029 | BaconNumbers.retract({ Actor, number: BaconNumber + 1 }) 1030 | } else { 1031 | // console.log("Not Removing: ", Actor, BaconNumber, CurrentBaconNumber) 1032 | } 1033 | break 1034 | } 1035 | case datalog.Modified: 1036 | throw new Error("Unhandled. We don't expect queries to give us a modified change.") 1037 | } 1038 | }).onChange(() => { 1039 | // After we've mapped the effect, we'll run the query again to 1040 | // update our results 1041 | BaconNumberQuery.runQuery() 1042 | 1043 | }) 1044 | // BaconNumberQuery.runQuery() 1045 | 1046 | // console.timeEnd("Bacon Number") 1047 | // console.log("Query:", BaconNumberQuery.view().readAllData()) 1048 | expect(BaconNumbers.view().readAllData()).toEqual([ 1049 | { Actor: "A", number: 3 }, 1050 | { Actor: "B", number: 2 }, 1051 | { Actor: "C", number: 1 }, 1052 | { Actor: "Edward Asner", number: 1 }, 1053 | { Actor: "Elvis Presley", number: 2 }, 1054 | { Actor: "Kevin Bacon", number: 0 }, 1055 | ]) 1056 | }) 1057 | 1058 | test("Hello World Example", () => { 1059 | const Greetings = datalog.intoTable([ 1060 | { language: "en", greeting: "Hello" }, 1061 | { language: "es", greeting: "Hola" } 1062 | // ... 1063 | ]) 1064 | const Nouns = datalog.intoTable([ 1065 | { language: "en", noun: "world" }, 1066 | { language: "es", noun: "todos" } 1067 | // ... 1068 | ]) 1069 | 1070 | const GreetingQueryFn = (language: 'en' | 'es') => datalog.query(({ greeting, noun }) => { 1071 | Greetings({ language, greeting }) 1072 | Nouns({ language, noun }) 1073 | }) 1074 | expect(GreetingQueryFn('en').view().readAllData()).toEqual([{ greeting: "Hello", noun: "world" }]) 1075 | expect(GreetingQueryFn('en').view().readAllData().map(({ greeting, noun }) => `${greeting} ${noun}`)[0]).toEqual("Hello world") 1076 | expect(GreetingQueryFn('es').view().readAllData().map(({ greeting, noun }) => `${greeting} ${noun}`)[0]).toEqual("Hola todos") 1077 | 1078 | }) 1079 | 1080 | describe("Usage", () => { 1081 | const People = datalog.newTable<{ id: number, name: string }>({ 1082 | id: datalog.NumberType, 1083 | name: datalog.StringType, 1084 | }) 1085 | 1086 | test("Add Data", () => { 1087 | People.assert({ id: 0, name: "Alice" }) 1088 | People.assert({ id: 1, name: "Bob" }) 1089 | }) 1090 | 1091 | test("Retract data", () => { 1092 | People.retract({ id: 1, name: "Bob" }) 1093 | }) 1094 | test("Find everyone's name", () => { 1095 | // Returns all the names of everyone in the People database 1096 | const Query = datalog.query<{ name: string }>(({ name }) => { 1097 | People({ name }) 1098 | }) 1099 | 1100 | // Read the QueryResult 1101 | expect( 1102 | Query.view().readAllData() 1103 | ).toEqual([{ name: "Alice" }]) 1104 | }) 1105 | 1106 | describe("Querying Data with joins", () => { 1107 | const People = datalog.newTable<{ id: number, name: string }>({ 1108 | id: datalog.NumberType, 1109 | name: datalog.StringType, 1110 | }) 1111 | People.assert({ id: 0, name: "Alice" }) 1112 | People.assert({ id: 1, name: "Bob" }) 1113 | 1114 | type ID = number 1115 | const Manages = datalog.newTable<{ manager: ID, managee: ID }>({ 1116 | manager: datalog.NumberType, 1117 | managee: datalog.NumberType, 1118 | }) 1119 | 1120 | // Alice manages Bob 1121 | Manages.assert({ manager: 0, managee: 1 }) 1122 | 1123 | test("Everyone with a manager", () => { 1124 | const Query = datalog.query<{ managerName: string, personName: string, personID, managerID }>(({ managerName, personName, managerID, personID }) => { 1125 | People({ id: personID, name: personName }) 1126 | Manages({ managee: personID, manager: managerID }) 1127 | People({ id: managerID, name: managerName }) 1128 | }) 1129 | 1130 | expect( 1131 | Query.view().readAllData() 1132 | ).toEqual([{ 1133 | managerID: 0, 1134 | managerName: "Alice", 1135 | personID: 1, 1136 | personName: "Bob", 1137 | }]) 1138 | }) 1139 | 1140 | test("Everyone without a manager", () => { 1141 | const Query = datalog.query<{ personID: number, personName: string }>(({ personName, personID }) => { 1142 | People({ id: personID, name: personName }) 1143 | Manages.not({ managee: personID }) 1144 | }) 1145 | 1146 | expect( 1147 | Query.view().readAllData() 1148 | ).toEqual([{ 1149 | personID: 0, 1150 | personName: "Alice", 1151 | }]) 1152 | }) 1153 | }) 1154 | 1155 | 1156 | test("Differential Updates", () => { 1157 | const People = datalog.newTable<{ id: number, name: string }>({ 1158 | id: datalog.NumberType, 1159 | name: datalog.StringType, 1160 | }) 1161 | People.assert({ id: 0, name: "Alice" }) 1162 | const Query = datalog.query<{ name: string }>(({ name }) => { 1163 | People({ name }) 1164 | }) 1165 | 1166 | const queryView = Query.view() 1167 | 1168 | expect( 1169 | queryView.recentData() 1170 | ).toEqual( 1171 | [{ 1172 | kind: datalog.Added, 1173 | datum: { name: "Alice" } 1174 | }] 1175 | ) 1176 | 1177 | expect( 1178 | queryView.recentData() 1179 | ).toEqual(null) 1180 | 1181 | 1182 | People.assert({ id: 2, name: "Eve" }) 1183 | expect( 1184 | queryView.recentData() 1185 | ).toEqual(null) 1186 | // We changed the data, but go nothing back! what gives? 1187 | // Queries won't run themselves (except the first time when created). 1188 | // so we have to ask the query to run itself to see the updates 1189 | Query.runQuery() 1190 | 1191 | // This is an optimization so you don't pay for queries you aren't using. 1192 | // If you do want to run a query every time you can hook up the runQuery function to happen on dependency change 1193 | Query.onDependencyChange(() => Query.runQuery()) 1194 | 1195 | expect( 1196 | queryView.recentData() 1197 | ).toEqual([{ 1198 | kind: datalog.Added, 1199 | datum: { name: "Eve" } 1200 | }]) 1201 | 1202 | People.retract({ id: 2, name: "Eve" }) 1203 | 1204 | expect( 1205 | queryView.recentData() 1206 | ).toEqual([{ 1207 | kind: datalog.Removed, 1208 | datum: { name: "Eve" } 1209 | }]) 1210 | }) 1211 | }) 1212 | 1213 | describe("Usage", () => { 1214 | test("Update Data", () => { 1215 | const People = datalog.newTable<{ id: number, name: string }>({ 1216 | id: datalog.NumberType, 1217 | name: datalog.StringType, 1218 | }) 1219 | 1220 | People.assert({ id: 0, name: "Alice" }) 1221 | People.assert({ id: 1, name: "Bob" }) 1222 | People.assert({ id: 2, name: "Eve" }) 1223 | const Query = datalog.query(({ id, name }) => { 1224 | People({ id, name }) 1225 | }) 1226 | 1227 | 1228 | expect(Query.view().readAllData()).toEqual([ 1229 | { name: 'Alice', id: 0 }, 1230 | { id: 1, name: "Bob" }, 1231 | { id: 2, name: "Eve" } 1232 | ]) 1233 | 1234 | People.update({ id: 0 }, { name: "Alice 2" }) 1235 | Query.runQuery() 1236 | expect(Query.view().readAllData()).toEqual([ 1237 | { name: 'Alice 2', id: 0 }, 1238 | { id: 1, name: "Bob" }, 1239 | { id: 2, name: "Eve" } 1240 | ]) 1241 | }) 1242 | 1243 | test("Update Data with string id", () => { 1244 | const Todos = datalog.newTable<{ id: string, text: string }>({ 1245 | id: datalog.StringType, 1246 | text: datalog.StringType, 1247 | }) 1248 | 1249 | // const randomId = () => Math.random().toString(36).substring(2, 15) 1250 | // Generated with randomId above, but hardcoded for testing 1251 | const idA = "gqsosf5i7zm" 1252 | const idB = "p7dpx04wmu8" 1253 | const idC = "2zxs51rq6ph" 1254 | 1255 | 1256 | Todos.assert({ id: idA, text: "Alice" }) 1257 | Todos.assert({ id: idB, text: "Bob" }) 1258 | Todos.assert({ id: idC, text: "Eve" }) 1259 | const Query = datalog.query(({ id, text }) => { 1260 | Todos({ id, text }) 1261 | }) 1262 | 1263 | 1264 | expect(Query.view().readAllData()).toEqual([ 1265 | { id: idC, text: "Eve" }, 1266 | { id: idA, text: "Alice" }, 1267 | { id: idB, text: "Bob" }, 1268 | ]) 1269 | 1270 | Todos.update({ id: idA }, { text: "Alice 2" }) 1271 | Query.runQuery() 1272 | expect(Query.view().readAllData()).toEqual([ 1273 | { id: idC, text: "Eve" }, 1274 | { text: 'Alice 2', id: idA }, 1275 | { id: idB, text: "Bob" }, 1276 | ]) 1277 | }) 1278 | 1279 | test("Update Data and then query", () => { 1280 | const People = datalog.newTable<{ id: number, name: string }>({ 1281 | id: datalog.NumberType, 1282 | name: datalog.StringType, 1283 | }) 1284 | 1285 | People.assert({ id: 0, name: "Alice" }) 1286 | People.assert({ id: 1, name: "Bob" }) 1287 | People.assert({ id: 2, name: "Eve" }) 1288 | const Query = datalog.query(({ id, name }) => { 1289 | People({ id, name }) 1290 | }) 1291 | 1292 | 1293 | expect(Query.view().readAllData()).toEqual([ 1294 | { name: 'Alice', id: 0 }, 1295 | { id: 1, name: "Bob" }, 1296 | { id: 2, name: "Eve" } 1297 | ]) 1298 | 1299 | People.update({ id: 0 }, { name: "Alice 2" }) 1300 | 1301 | const Query2 = datalog.query(({ id, name }) => { 1302 | People({ id, name }) 1303 | }) 1304 | 1305 | expect(Query2.view().readAllData()).toEqual([ 1306 | { name: 'Alice 2', id: 0 }, 1307 | { id: 1, name: "Bob" }, 1308 | { id: 2, name: "Eve" } 1309 | ]) 1310 | }) 1311 | }) 1312 | 1313 | }) 1314 | 1315 | describe("Implications", () => { 1316 | test("Basic Recursion", () => { 1317 | const Nodes: datalog.Table<{ from: number, to: number }> = datalog.newTable({ 1318 | from: datalog.NumberType, 1319 | to: datalog.NumberType, 1320 | }) 1321 | 1322 | const initialData = [ 1323 | [1, 2], 1324 | [2, 3], 1325 | [3, 4], 1326 | [4, 5] 1327 | ] 1328 | 1329 | initialData.forEach(([from, to]) => { 1330 | Nodes.assert({ from, to }) 1331 | }) 1332 | 1333 | const Query = datalog.query(({ from, to, nextTo }) => { 1334 | Nodes({ from, to }) 1335 | Nodes({ from: to, to: nextTo }) 1336 | }).implies(({ from, nextTo }) => { 1337 | Nodes({ from, to: nextTo }) 1338 | }) 1339 | 1340 | expect(Nodes.view().readAllData().map(({ from, to }) => [from, to])).toEqual([ 1341 | [1, 2], 1342 | [1, 3], 1343 | [1, 4], 1344 | [1, 5], 1345 | [2, 3], 1346 | [2, 4], 1347 | [2, 5], 1348 | [3, 4], 1349 | [3, 5], 1350 | [4, 5], 1351 | ]) 1352 | }) 1353 | 1354 | test("Removing edges", () => { 1355 | const Nodes = datalog.newTable({ 1356 | node: datalog.NumberType, 1357 | }) 1358 | 1359 | const Edges: datalog.Table<{ from: number, to: number }> = datalog.newTable({ 1360 | from: datalog.NumberType, 1361 | to: datalog.NumberType, 1362 | }) 1363 | 1364 | const initialEdgesData = [ 1365 | [1, 2], 1366 | [2, 3], 1367 | [3, 4], 1368 | [4, 5] 1369 | ] 1370 | initialEdgesData.forEach(([from, to]) => { 1371 | Edges.assert({ from, to }) 1372 | }) 1373 | 1374 | const initialNodesData = [ 1375 | [1], 1376 | ] 1377 | initialNodesData.forEach(([node]) => { 1378 | Nodes.assert({ node }) 1379 | }) 1380 | 1381 | const Query = datalog.query(({ node, to }) => { 1382 | Nodes({ node }) 1383 | Edges({ from: node, to }) 1384 | }).implies(({ to }) => { 1385 | Nodes({ node: to }) 1386 | }) 1387 | 1388 | expect(Nodes.view().readAllData().map(({ node }) => node)).toEqual([ 1389 | 1, 2, 3, 4, 5 1390 | ]) 1391 | 1392 | // We remove the edge from 2 to 3 1393 | Edges.retract({ from: 2, to: 3 }) 1394 | Query.runQuery() 1395 | 1396 | expect(Nodes.view().readAllData().map(({ node }) => node)).toEqual([ 1397 | 1, 2 1398 | ]) 1399 | }) 1400 | }) 1401 | 1402 | describe("Into Table", () => { 1403 | test("Should be able turn an array of objects into a table", () => { 1404 | const People = datalog.intoTable([ 1405 | { name: "Alice" }, 1406 | { name: "Bob" }, 1407 | { name: "Carol" }, 1408 | ]) 1409 | 1410 | const Parents = datalog.intoTable([ 1411 | { parent: "Alice", child: "Carol" } 1412 | ]) 1413 | 1414 | // Find parents 1415 | expect( 1416 | datalog.query(({ parent, child }) => { 1417 | People({ name: parent }) 1418 | Parents({ parent, child }) 1419 | }).view().readAllData().map(({ parent }) => parent) 1420 | ).toEqual(["Alice"]) 1421 | }) 1422 | }) -------------------------------------------------------------------------------- /src/datalog.ts: -------------------------------------------------------------------------------- 1 | import { ViewExt, Impl as ViewExtImpl } from "./view-ext"; 2 | 3 | export const Unconstrained = Symbol('Unconstrained') 4 | 5 | type ValueOf = T[keyof T]; 6 | 7 | type Tupleized = Array> 8 | type TupleizedUnconstrained = Array | typeof Unconstrained> 9 | 10 | type DEBUG_LEVEL_ENUM = 0 | 1 | 2 11 | const DEBUG_LEVEL = 0 12 | 13 | /** 14 | * Finds the first index for which predicate is false. Returns an index of 15 | * array.length if it will never be false 16 | * predFn takes the form of (tuple) => boolean 17 | * @param array 18 | * @param predFnx 19 | * @param startIdx 20 | */ 21 | export function gallop(array: Array, predFn: (tuple: T) => boolean, startIdx = 0): number { 22 | if (array.length - startIdx <= 0 || !predFn(array[startIdx])) { 23 | return startIdx; 24 | } 25 | 26 | let step = 1; 27 | 28 | // Step up until we've seen a false result from predFn 29 | while (startIdx + step < array.length && predFn(array[startIdx + step])) { 30 | startIdx += step; 31 | step = step << 1; 32 | } 33 | 34 | // Now step down until we get a false result 35 | step = step >> 1; 36 | while (step > 0) { 37 | if (startIdx + step < array.length && predFn(array[startIdx + step])) { 38 | startIdx += step; 39 | } 40 | step = step >> 1; 41 | } 42 | 43 | return startIdx + 1; 44 | } 45 | 46 | function debugPrintElements(elements: any) { 47 | return `\n${elements.map((el: any) => (hasRetractionMeta(el) ? "RETRACTION" : "") + JSON.stringify(el.map((v: any) => v === Unconstrained ? "¿¿" : v))).join("\n").toString()}` 48 | } 49 | 50 | // Mutates the input array! 51 | // See https://doc.rust-lang.org/1.40.0/src/core/slice/mod.rs.html#1891 for a 52 | // great explanation of this algorithm. 53 | // Basically we bubble duplicates to the end of the array, then split the array 54 | // to before dupes and after dupes. O(n) 55 | // If the array is sorted, this will remove all duplicates. 56 | // comparatorFn should return true if the items are the same. 57 | function dedupBy(array: Array, comparatorFn: (a: T, b: T) => boolean) { 58 | let w = 1 59 | for (let r = 1; r < array.length; r++) { 60 | const rElement = array[r]; 61 | const wElementPrev = array[w - 1]; 62 | if (comparatorFn(rElement, wElementPrev)) { 63 | // The same so we keep `w` where it is 64 | } else { 65 | // We need to swap the elements 66 | // But only swap if their indices are different (otherwise it's no-op) 67 | if (r !== w) { 68 | array[r] = array[w] 69 | array[w] = rElement 70 | } 71 | w++ 72 | } 73 | } 74 | array.splice(w) 75 | } 76 | 77 | 78 | export const sortTuple = (a: any, b: any): -1 | 0 | 1 => { 79 | if (a === Unconstrained || b === Unconstrained) { 80 | return 0 81 | } 82 | 83 | if (typeof a !== "object" && typeof b !== "object") { 84 | return a < b ? -1 : a === b ? 0 : 1 85 | } 86 | 87 | if (a.length != b.length) { 88 | throw new Error('Can\'t sort different sized tuples. Tuples are not the same length') 89 | } 90 | 91 | for (let index = 0; index < a.length; index++) { 92 | const elementA = a[index]; 93 | const elementB = b[index]; 94 | 95 | if (elementA === Unconstrained || elementB === Unconstrained) { 96 | continue 97 | } 98 | 99 | if (elementA === elementB) { 100 | continue 101 | } 102 | 103 | if (Array.isArray(elementA)) { 104 | return sortTuple(elementA, elementB) 105 | } 106 | 107 | if (typeof elementA == 'string') { 108 | return elementA < elementB ? -1 : 1 109 | } 110 | 111 | return elementA < elementB ? -1 : 1 112 | } 113 | 114 | return 0 115 | }; 116 | 117 | 118 | interface Leaper { 119 | count: (prefix: P, isAntiFilterOnly?: boolean) => number 120 | propose: (prefix: P) => Array 121 | // Could be faster if we mutate vals 122 | intersect: (prefix: P, vals: Array) => Array 123 | } 124 | 125 | type OutputKeys = Array 126 | // Like extend but supports output tuples with unconstrained values that may be resolved by other leapers 127 | export class ExtendWithUnconstrained implements Leaper> { 128 | keyFunc: (P: P) => Array 129 | outputKeys: OutputKeys 130 | relation: RelationIndex 131 | outputTupleFunc: (relationVals: Tupleized) => TupleizedUnconstrained 132 | isAnti: boolean 133 | 134 | startIdx: number = 0 135 | endIdx: number = 0 136 | // DEBUG only! 137 | __cachedKeyLen = 0 138 | 139 | constructor(keyFunc: (P: P) => Array, keyLength: number, outputKeys: OutputKeys, relation: RelationIndex, relationKeyOrder: any, isAnti: boolean = false) { 140 | this.keyFunc = keyFunc 141 | this.outputKeys = outputKeys 142 | this.relation = relation 143 | this.isAnti = isAnti 144 | const myKs = relationKeyOrder.slice(keyLength) 145 | const mapping = myKs.reduce((acc: { [key: number]: number }, k: any, i: number) => { 146 | acc[i] = outputKeys.indexOf(k) 147 | return acc 148 | }, {}) 149 | this.outputTupleFunc = (relationVals) => { 150 | const out = new Array(outputKeys.length) 151 | out.fill(Unconstrained) 152 | relationVals.map((val, i) => { 153 | if (mapping[i] > -1) { 154 | out[mapping[i]] = val 155 | } 156 | }) 157 | return out 158 | } 159 | } 160 | 161 | toString() { 162 | return ` 163 | ${JSON.stringify(this.relation.keyOrdering)} 164 | ${JSON.stringify(this.outputKeys)} 165 | isAnti:${this.isAnti}:\n${debugPrintElements(this.relation.elements.map(el => this._reshape(el as any, this.__cachedKeyLen)))} 166 | ` 167 | } 168 | 169 | /** 170 | * 171 | * @param prefix 172 | * @param isAntiFilterOnly Should this act as a filter. In the case of only anti leapers 173 | */ 174 | count(prefix: P, isAntiFilterOnly?: boolean): number { 175 | const key = this.keyFunc(prefix) 176 | this.__cachedKeyLen = key.length 177 | if (this.relation.elements.length === 0) { 178 | return 0 179 | } 180 | 181 | // First check if our first item is past the key. This means this row doesn't exist here 182 | if (sortTuple(this.relation.elements[0].slice(0, key.length), key) === 1) { 183 | this.startIdx = this.relation.elements.length 184 | } else { 185 | this.startIdx = gallop(this.relation.elements, (row: any) => sortTuple(row.slice(0, key.length), key) === -1) 186 | } 187 | 188 | if (DEBUG_LEVEL > 1) { 189 | if (this.isAnti) { 190 | console.log("Anti count:", prefix, key, this.relation.elements, this.startIdx) 191 | } 192 | } 193 | 194 | // Nothing found 195 | if (this.startIdx === this.relation.elements.length) { 196 | this.endIdx = this.startIdx 197 | if (this.isAnti) { 198 | return 1e12 199 | } 200 | return 0 201 | } 202 | 203 | this.endIdx = gallop(this.relation.elements, (row: any) => sortTuple(row.slice(0, key.length), key) === 0, this.startIdx) 204 | const count = this.endIdx - this.startIdx 205 | if (this.isAnti) { 206 | if (isAntiFilterOnly) { 207 | return count > 0 ? 0 : 1e12 208 | } 209 | return 1e12 210 | } 211 | return count 212 | } 213 | 214 | isRetraction(tuple: any): boolean { 215 | return hasRetractionMeta(tuple) 216 | } 217 | 218 | _reshape(tuple: Tupleized, keyLen: number) { 219 | const outputTuple = this.outputTupleFunc(tuple.slice(keyLen) as any) 220 | if (this.isRetraction(tuple)) { 221 | // @ts-ignore 222 | outputTuple[MetaSymbol] = { isRetraction: true } 223 | } 224 | return outputTuple 225 | } 226 | 227 | propose(prefix: P): Array> { 228 | const keyLen = this.keyFunc(prefix).length 229 | if (this.isAnti) { 230 | throw new Error("Antis shouldn't propose") 231 | } 232 | // console.log("In propose", this.relation.elements[0]) 233 | return this.relation.elements.slice(this.startIdx, this.endIdx).map((tuple) => this._reshape(tuple as any, keyLen)) 234 | } 235 | 236 | // Could be faster if we mutate vals 237 | // TODO rewrite this. It's gotten very messy 238 | intersect(prefix: P, vals: Array>): Array> { 239 | const keyLen = this.keyFunc(prefix).length 240 | if (DEBUG_LEVEL > 1 && this.isAnti) { 241 | console.log("INTERSECTION", this) 242 | console.log("key is", prefix, this.keyFunc(prefix)) 243 | console.log("output key is", this.outputKeys) 244 | console.log("Vals is", vals) 245 | // @ts-ignore 246 | console.log("My elements are", this.relation.elements.map(e => this._reshape(e, keyLen))) 247 | console.log("My start/end", this.startIdx, this.endIdx) 248 | } 249 | let startIdx = this.startIdx; 250 | const out: Array> = [] 251 | 252 | let valIndex = 0 253 | while (valIndex < vals.length && startIdx < this.endIdx) { 254 | const val = vals[valIndex] 255 | // If the input was a retraction it should taint the derived values 256 | const valIsRetraction = hasRetractionMeta(val) 257 | 258 | // @ts-ignore 259 | const output = this._reshape(this.relation.elements[startIdx], keyLen) 260 | const outputisRetraction = hasRetractionMeta(output) 261 | const ordResult = sortTuple(output, val) 262 | 263 | // No more results for this val 264 | if (ordResult > 0) { 265 | // If there are any unconstrained in our tuple, we have to reset 266 | // the start idx. I'm not sure if there's a way around this. 267 | // TODO 268 | if (output.some((item: any) => item === Unconstrained)) { 269 | startIdx = this.startIdx 270 | } 271 | if (this.isAnti) { 272 | out.push(val) 273 | } 274 | 275 | valIndex++ 276 | continue 277 | } 278 | 279 | const hasMatch = ordResult === 0 280 | if (hasMatch && this.isAnti) { 281 | valIndex++ 282 | 283 | // If there are any unconstrained in our tuple, we have to reset 284 | // the start idx. I'm not sure if there's a way around this. 285 | // TODO 286 | if (output.some((item: any) => item === Unconstrained)) { 287 | startIdx = this.startIdx 288 | } 289 | 290 | continue 291 | } 292 | 293 | if (!hasMatch) { 294 | startIdx = gallop(this.relation.elements, (tuple: any) => { 295 | const output = this._reshape(tuple, keyLen) 296 | return sortTuple(output, val) === -1 297 | }, startIdx) 298 | continue 299 | } 300 | startIdx++ 301 | 302 | // @ts-ignore 303 | if (DEBUG_LEVEL > 1) { 304 | console.log("Comparing my output:", output, "val", val, this.relation, hasMatch) 305 | } 306 | 307 | // If this is anti, we don't add it 308 | if (hasMatch && !this.isAnti) { 309 | // Check for unconstrained 310 | let filledInUnconstrained: null | TupleizedUnconstrained = null 311 | val.forEach((column, i) => { 312 | if (column === Unconstrained && output[i] !== Unconstrained) { 313 | if (filledInUnconstrained == null) { 314 | filledInUnconstrained = [...val] 315 | } 316 | filledInUnconstrained[i] = output[i] 317 | } 318 | }) 319 | 320 | if (filledInUnconstrained) { 321 | if (valIsRetraction || outputisRetraction) { 322 | setRetractionMeta(filledInUnconstrained, true) 323 | } 324 | out.push(filledInUnconstrained) 325 | 326 | // If there are any unconstrained in our tuple, we have to reset 327 | // the start idx. I'm not sure if there's a way around this. 328 | // TODO 329 | if (startIdx === this.endIdx && output.some((item: any) => item === Unconstrained)) { 330 | valIndex++ 331 | startIdx = this.startIdx 332 | } 333 | continue 334 | } 335 | out.push(val) 336 | } 337 | 338 | valIndex++ 339 | } 340 | 341 | // If it's an anti query then add the rest of the vals because we add what didn't match. 342 | if (this.isAnti) { 343 | for (let index = valIndex; index < vals.length; index++) { 344 | out.push(vals[index]) 345 | } 346 | } 347 | if (DEBUG_LEVEL > 1) { 348 | console.log("Returning: ", out) 349 | } 350 | 351 | 352 | return out 353 | } 354 | } 355 | 356 | /** 357 | * Returns a keyOrdering representing the joined key ordering 358 | * @param keyOrderings An array of keyOrderings 359 | */ 360 | export function joinKeyOrdering(keyOrderings: Array>): Array { 361 | if (keyOrderings.length === 0) { 362 | return [] 363 | } else if (keyOrderings.length === 1) { 364 | return keyOrderings[0] 365 | } 366 | 367 | let set = new Set([]) 368 | keyOrderings.forEach((keyOrdering) => { 369 | keyOrdering.forEach(k => set.add(k)) 370 | }) 371 | return [...set] 372 | } 373 | 374 | export function filterKeys(keysToKeep: Array, keyOrder: Array) { 375 | const set = new Set(keysToKeep) 376 | return keyOrder.filter(k => set.has(k)) 377 | } 378 | 379 | type LeapJoinLogicFn = (sourceRow: [KVal, ...Array>], extension: Extension, isRetraction: boolean) => void 380 | export function leapJoinHelper(source: RelationIndex, leapers: Array>], Extension> & { isAnti?: boolean }>, logic: LeapJoinLogicFn) { 381 | // Special case: no leapers 382 | if (leapers.length === 0) { 383 | source.elements.forEach(row => { 384 | const rowIsRetraction = hasRetractionMeta(row) 385 | // @ts-ignore 386 | logic(row, [], rowIsRetraction) 387 | }) 388 | return 389 | } 390 | // Special case: only anti-leapers 391 | if (leapers.every(l => l.isAnti)) { 392 | source.elements.forEach(row => { 393 | // Do any leaper reject this row? 394 | if (leapers.some(l => l.count(row, true) === 0)) { 395 | return 396 | } 397 | 398 | const rowIsRetraction = hasRetractionMeta(row) 399 | // @ts-ignore 400 | logic(row, [], rowIsRetraction) 401 | }) 402 | return 403 | } 404 | 405 | for (const row of source.elements) { 406 | // 1. Determine which leaper would propose the fewest values. 407 | let minIndex = Infinity; 408 | let minCount = Infinity; 409 | 410 | for (let index = 0; index < leapers.length; index++) { 411 | const leaper = leapers[index]; 412 | if (!leaper.count) { 413 | console.warn('!!!!! - No leapers!', leaper) 414 | } 415 | const count = leaper.count(row) 416 | if (DEBUG_LEVEL > 1) { 417 | console.log("Leaper", leaper, "is proposing", count, "vals") 418 | } 419 | if (count < minCount) { 420 | minCount = count 421 | minIndex = index 422 | } 423 | } 424 | 425 | // 2. Have the least-proposing leaper propose their values. 426 | if (minCount > 0) { 427 | let vals = leapers[minIndex].propose(row) 428 | if (DEBUG_LEVEL > 1) { 429 | console.log("Leaper", leapers[minIndex], "proposed", vals) 430 | } 431 | // 3. Have the other leapers restrict the proposals. 432 | 433 | for (let index = 0; index < leapers.length; index++) { 434 | if (index !== minIndex) { 435 | const leaper = leapers[index]; 436 | vals = leaper.intersect(row, vals) 437 | } 438 | } 439 | 440 | if (DEBUG_LEVEL > 0) { 441 | console.log("Joining Src:\n" + source.toString()) 442 | let i = 0 443 | for (const leaper of leapers) { 444 | console.log(` Leaper[${i++}]${leapers[minIndex] === leaper ? "*" : ""} = ${leaper.toString()}`) 445 | } 446 | 447 | console.log("Src row is:", hasRetractionMeta(row) ? "RETRACTION:" : "", row) 448 | let proposals = leapers[minIndex].propose(row) 449 | console.log(`Leaper[${minIndex}] proposes: ${debugPrintElements(leapers[minIndex].propose(row))}`) 450 | i = 0 451 | for (const leaper of leapers) { 452 | if (i !== minIndex) { 453 | proposals = leaper.intersect(row, proposals) 454 | console.log(` Leaper[${i}] intersection result: ${debugPrintElements(proposals)}`) 455 | } 456 | i++ 457 | } 458 | if (vals.length === 0) { 459 | console.log(`With no join`) 460 | } else { 461 | console.log(`With join results of: ${debugPrintElements(vals)}`) 462 | } 463 | } 464 | 465 | // @ts-ignore 466 | const rowIsRetraction = hasRetractionMeta(row) 467 | // 4. Call `logic` on each value 468 | for (const val of vals) { 469 | // @ts-ignore 470 | const valIsRetraction = hasRetractionMeta(val) 471 | logic(row, val, rowIsRetraction || valIsRetraction || false) 472 | } 473 | } 474 | 475 | } 476 | } 477 | 478 | const MetaSymbol: symbol = Symbol('meta') 479 | 480 | function hasRetractionMeta(v: any): boolean { 481 | return !!v[MetaSymbol]?.isRetraction 482 | } 483 | 484 | function setRetractionMeta(v: any, isRetraction: boolean) { 485 | v[MetaSymbol] = { isRetraction } 486 | } 487 | 488 | export class RelationIndex { 489 | // Array of tuples 490 | elements: Array<[K, ...Array>]> 491 | keyOrdering: [KName, ...Array] 492 | 493 | constructor(elements: Array<[K, ...Array>]>, keyOrdering: [KName, ...Array]) { 494 | this.elements = elements 495 | this.keyOrdering = keyOrdering 496 | } 497 | 498 | toString() { 499 | return this.elements.map(el => `${hasRetractionMeta(el) ? "RETRACTION:" : ""}${JSON.stringify(el)}`).join("\n") 500 | } 501 | 502 | clone(): RelationIndex { 503 | // @ts-ignore 504 | const cloned = new RelationIndex([...this.elements], [...this.keyOrdering]) 505 | return cloned 506 | } 507 | 508 | dedup() { 509 | dedupBy(this.elements, (a, b) => sortTuple(a, b) === 0) 510 | } 511 | 512 | indexBy | K, NewVal extends { [NewKeyName in KName | keyof Val]: ValueOf | K }>(newkeyOrdering: [NewKName, ...Array]): RelationIndex { 513 | const keyMapping = this.keyOrdering.reduce((acc: { [key: string]: number }, k, idx) => { 514 | acc[k as string] = idx 515 | return acc 516 | }, {}) 517 | 518 | const newData = this.elements.map(row => { 519 | const newRow = newkeyOrdering.map(k => row[keyMapping[k as string]]) 520 | if (hasRetractionMeta(row)) { 521 | setRetractionMeta(newRow, true) 522 | } 523 | return newRow 524 | }) as Array<[NewK, ...Array>]> 525 | newData.sort((rowA, rowB) => sortTuple(rowA, rowB)) 526 | return new RelationIndex(newData, newkeyOrdering) 527 | } 528 | 529 | filterElements(constants: Partial<{ [KeyName in KName]: K } & Val>, isAnti: boolean = false): RelationIndex { 530 | if (isEmptyObj(constants)) { 531 | return this 532 | } 533 | 534 | const a = new RelationIndex( 535 | this.elements.filter(row => { 536 | return row.every((v, i) => { 537 | const constantVal = constants[this.keyOrdering[i]] 538 | if (constantVal !== undefined) { 539 | const isEqual = (v === constantVal) 540 | return isAnti ? !isEqual : isEqual 541 | } 542 | return true 543 | }) 544 | }), 545 | this.keyOrdering 546 | ) 547 | return a 548 | 549 | } 550 | 551 | assert(element: { [KeyName in KName]: K } & Val) { 552 | // @ts-ignore 553 | this.insertRow(this.keyOrdering.map(k => element[k])) 554 | } 555 | 556 | retract(element: { [KeyName in KName]: K } & Val) { 557 | // @ts-ignore 558 | const newRow = this.keyOrdering.map(k => element[k]) 559 | newRow[MetaSymbol] = { isRetraction: true } 560 | this.insertRow(newRow, true) 561 | } 562 | 563 | insertRow(newRow: [K, ...Array>], isRetraction: boolean = false) { 564 | const insertIdx = gallop(this.elements, (row: any) => sortTuple(row, newRow) === -1) 565 | // Check if this is a retraction and if we have a matching datum to retract. 566 | // If so, we will remove the positive match and clear the slate. 567 | const nextDatum = this.elements[insertIdx] 568 | if (isRetraction) { 569 | if (nextDatum && !hasRetractionMeta(nextDatum) && sortTuple(nextDatum, newRow) === 0) { 570 | // We have a match, remove nextDatum from our elements 571 | this.elements.splice(insertIdx, 1) 572 | return 573 | } 574 | } else { 575 | // Check the opposite too. We added a retraction, and now we add an assertion 576 | if (nextDatum && hasRetractionMeta(nextDatum) && sortTuple(nextDatum, newRow) === 0) { 577 | // We have a match, remove nextDatum from our elements 578 | this.elements.splice(insertIdx, 1) 579 | return 580 | } 581 | 582 | } 583 | this.elements.splice(insertIdx, 0, newRow) 584 | } 585 | } 586 | 587 | 588 | interface Tell { 589 | assert: (v: Val) => void 590 | } 591 | interface Retract { 592 | retract: (v: Val) => void 593 | } 594 | 595 | interface MultiIndexRelation { 596 | keys: () => Array 597 | isIndexedBy: (k: [keyof T, ...Array]) => number 598 | indexByK: (k: keyof T) => void 599 | indexBy: (ks: [keyof T, ...Array]) => RelationIndex 600 | } 601 | 602 | 603 | export class Relation implements MultiIndexRelation, Tell, Retract { 604 | relations: Array, { [K in keyof T]: T[K] }>> = [] 605 | constructor() { 606 | this.relations = [] 607 | } 608 | toString(): string { 609 | if (!this.relations[0]) { 610 | return "" 611 | } 612 | return "[" + this.relations[0]?.elements.map(el => hasRetractionMeta(el) ? `RETRACTION: ${JSON.stringify(el)}` : JSON.stringify(el)).join(", ") + "]" 613 | } 614 | 615 | dedup() { 616 | this.relations.forEach(relation => relation.dedup()) 617 | } 618 | 619 | public get length(): number { 620 | if (this.relations.length === 0) { 621 | return 0 622 | } 623 | 624 | return this.relations[0].elements.length 625 | } 626 | 627 | clone(): Relation { 628 | const cloned = new Relation() 629 | cloned.relations = this.relations.map(relation => relation.clone()) 630 | return cloned 631 | } 632 | 633 | merge(otherRelation: Relation) { 634 | // TODO this can be faster if we remember the indices we started at. But easy just to do the simple thing for now 635 | const otherkeyOrdering = otherRelation.keys() 636 | otherRelation.relations[0].elements.forEach(row => { 637 | const isRetraction = hasRetractionMeta(row) 638 | const datum = otherkeyOrdering.reduce((acc, key, index) => { 639 | // @ts-ignore 640 | acc[key] = row[index] 641 | return acc 642 | }, {}) 643 | if (isRetraction) { 644 | this.retract(datum as T) 645 | } else { 646 | this.assert(datum as T) 647 | } 648 | }); 649 | } 650 | 651 | createInitialRelation(datum: T, isRetraction: boolean = false) { 652 | const entries = Object.entries(datum) 653 | const ks = entries.map(([k]) => k) 654 | const vals = entries.map(([, val]) => val) 655 | if (isRetraction) { 656 | // @ts-ignore 657 | vals[MetaSymbol] = { isRetraction: true } 658 | } 659 | const rel = new RelationIndex([vals] as any, ks as any) as any 660 | this.relations.push(rel) 661 | } 662 | 663 | assert(v: T) { 664 | if (this.relations.length === 0) { 665 | this.createInitialRelation(v) 666 | return 667 | } 668 | 669 | this.relations.forEach(relation => relation.assert(v)) 670 | } 671 | 672 | retract(v: T) { 673 | if (this.relations.length === 0) { 674 | this.createInitialRelation(v, true) 675 | return 676 | } 677 | 678 | this.relations.forEach(relation => relation.retract(v)) 679 | } 680 | 681 | 682 | _isIndexedByK(k: keyof T) { 683 | return this.relations.findIndex(relation => { 684 | return relation.keyOrdering[0] === k 685 | }) 686 | } 687 | 688 | _isIndexedBy(ks: [keyof T, ...Array]) { 689 | return this.relations.findIndex(relation => { 690 | return relation.keyOrdering.every((k, i) => k === ks[i]) 691 | }) 692 | } 693 | 694 | isIndexedByK(k: keyof T) { 695 | return this._isIndexedByK(k) 696 | } 697 | 698 | isIndexedBy(ks: [keyof T, ...Array]) { 699 | return this._isIndexedBy(ks) 700 | } 701 | 702 | keys() { 703 | return this.relations[0].keyOrdering 704 | } 705 | 706 | indexByK(k: keyof T) { 707 | if (this.relations.length === 0) { 708 | console.warn("No Data to index by") 709 | const relation = new RelationIndex([], [k]) 710 | this.relations.push(relation) 711 | return relation 712 | } 713 | 714 | const indexedRelationIdx = this._isIndexedByK(k) 715 | if (indexedRelationIdx !== -1) { 716 | return this.relations[indexedRelationIdx] 717 | } 718 | 719 | const currentKeys: Array = this.keys() 720 | const newKeyOrdering = [k, ...currentKeys.filter(k2 => k2 !== k)] 721 | const newIndexedRelation = this.relations[0].indexBy(newKeyOrdering as any) 722 | // @ts-ignore 723 | this.relations.push(newIndexedRelation) 724 | return newIndexedRelation 725 | } 726 | 727 | indexBy(ks: [keyof T, ...Array]): RelationIndex { 728 | if (this.relations.length === 0) { 729 | // console.warn("No Data to index by") 730 | return new RelationIndex([], ks) 731 | } 732 | 733 | const indexedRelationIdx = this._isIndexedBy(ks) 734 | if (indexedRelationIdx !== -1) { 735 | // @ts-ignore 736 | return this.relations[indexedRelationIdx] 737 | } 738 | 739 | const newIndexedRelation = this.relations[0].indexBy(ks) 740 | // @ts-ignore 741 | this.relations.push(newIndexedRelation) 742 | // @ts-ignore 743 | return newIndexedRelation 744 | } 745 | } 746 | 747 | export const Added = Symbol("DatumAdded") 748 | export const Removed = Symbol("DatumRemoved") 749 | export const Modified = Symbol("DatumModified") 750 | type DiffKind = typeof Added | typeof Removed | typeof Modified 751 | 752 | export type RecentDatum = { 753 | kind: typeof Added | typeof Removed 754 | datum: T 755 | } | { 756 | kind: typeof Modified 757 | datum: T 758 | oldDatum: T 759 | } 760 | 761 | export class Variable implements Tell, Retract { 762 | stable: Relation = new Relation() 763 | recent: Relation = new Relation() 764 | toAdd: Array> = [] 765 | _recentChanges: Array> = [] 766 | _subscribers: Array<(v: T, isRetraction: boolean) => void> = [] 767 | meta: { 768 | isAnti: boolean 769 | } = { isAnti: false } 770 | // Keep track of the count of datums we've seen 771 | counts: Map = new Map() 772 | name: string = "" 773 | 774 | setName(name: string) { 775 | this.name = name 776 | } 777 | 778 | toString(): string { 779 | return ` 780 | Variable ${this.name}: 781 | Counts: ${JSON.stringify([...this.counts])} 782 | Stable: ${this.stable.toString()} 783 | Recent: ${this.recent.toString()} 784 | ToAdd: ${this.toAdd.map(r => r.toString())} 785 | 786 | ` 787 | } 788 | 789 | clone(): Variable { 790 | const cloned = new Variable() 791 | cloned.stable = this.stable.clone() 792 | cloned.recent = this.recent.clone() 793 | cloned.toAdd = this.toAdd.map(toAdd => toAdd.clone()) 794 | cloned.counts = new Map([...this.counts]) 795 | return cloned 796 | } 797 | 798 | isEmpty(): boolean { 799 | return this.stable.length === 0 && this.recent.length === 0 && this.toAdd.length === 0 800 | } 801 | 802 | cloneAndTrack(): Variable { 803 | const cloned = this.clone() 804 | this.onAssert((v, isRetraction) => isRetraction ? cloned.retract(v) : cloned.assert(v)) 805 | return cloned 806 | } 807 | 808 | keys(): Array { 809 | if (this.stable.length) { 810 | return this.stable.keys() 811 | } else if (this.recent.length) { 812 | return this.recent.keys() 813 | } else if (this.toAdd.length && this.toAdd[0].length) { 814 | return this.toAdd[0].keys() 815 | } 816 | throw new Error("Relation doesn't have any data. Can't infer schema") 817 | } 818 | 819 | recentData(): Array> | null { 820 | if (!this.changed()) { 821 | return null 822 | } 823 | return this.recent.relations[0].elements.map(row => { 824 | // @ts-ignore 825 | const datum = fromEntries(row.map((v, i) => [this.recent.relations[0].keyOrdering[i], v])) 826 | return { 827 | kind: hasRetractionMeta(row) ? Removed : Added, 828 | datum: datum, 829 | } 830 | }) 831 | } 832 | 833 | private lastReadAllData: Array | null = null 834 | readAllData(): Array { 835 | if (!this.changed() && this.lastReadAllData !== null) { 836 | return this.lastReadAllData 837 | } 838 | while (this.changed()) { } 839 | if (!this.stable.relations[0]) { 840 | return [] 841 | } 842 | this.lastReadAllData = this.stable.relations[0].elements 843 | .filter(el => { 844 | return !hasRetractionMeta(el) 845 | }) 846 | .map(row => { 847 | // @ts-ignore 848 | const datum = fromEntries(row.map((v, i) => [this.stable.relations[0].keyOrdering[i], v])) 849 | return datum 850 | }) 851 | return this.lastReadAllData 852 | } 853 | 854 | _remapKeys(newKeyOrdering: { [K in keyof In]: keyof Out }): Variable { 855 | const out = new Variable() 856 | out.stable = this.stable as any 857 | out.recent = this.recent as any 858 | out.toAdd = this.toAdd as any 859 | return out 860 | } 861 | 862 | 863 | assert(v: T) { 864 | if (this.toAdd.length === 0) { 865 | this.toAdd.push(new Relation()) 866 | } 867 | const nextCount = this.updateCount(v, true) 868 | if (nextCount === 1) { 869 | this.toAdd[0].assert(v) 870 | this._subscribers.forEach(s => s(v, false)) 871 | } 872 | } 873 | 874 | retract(v: T) { 875 | if (this.toAdd.length === 0) { 876 | this.toAdd.push(new Relation()) 877 | } 878 | const nextCount = this.updateCount(v, false) 879 | if (nextCount === 0) { 880 | this.toAdd[0].retract(v) 881 | this._subscribers.forEach(s => s(v, true)) 882 | } 883 | } 884 | 885 | /** 886 | * Returns next count 887 | */ 888 | updateCount(v: T, increment: boolean): number { 889 | // Sort the keys 890 | const sortedEntries = Object.entries(v).sort(([k1], [k2]) => k1 === k2 ? 0 : k1 < k2 ? -1 : 1) 891 | const sortedEntriesKey = JSON.stringify(sortedEntries) 892 | const existingCount = this.counts.get(sortedEntriesKey) ?? 0 893 | const nextCount = existingCount + (increment ? 1 : -1) 894 | this.counts.set(sortedEntriesKey, nextCount) 895 | return nextCount 896 | } 897 | 898 | getCount(v: T) { 899 | // Sort the keys 900 | const sortedEntries = Object.entries(v).sort(([k1], [k2]) => k1 === k2 ? 0 : k1 < k2 ? -1 : 1) 901 | const sortedEntriesKey = JSON.stringify(sortedEntries) 902 | return this.counts.get(sortedEntriesKey) ?? 0 903 | } 904 | 905 | onAssert(f: (v: T, isRetraction: boolean) => void) { 906 | this._subscribers.push(f) 907 | } 908 | 909 | removeOnAssert(f: (v: T, isRetraction: boolean) => void) { 910 | this._subscribers = this._subscribers.filter(onAssertFn => f !== onAssertFn) 911 | } 912 | 913 | onChange(f: () => void) { 914 | const subscribeFn = (datum: T, isRetraction: boolean) => { 915 | f() 916 | } 917 | if (!this.recent.length) { 918 | f() 919 | } 920 | this.onAssert(subscribeFn); 921 | return () => { this.removeOnAssert(subscribeFn) } 922 | } 923 | onNewDatum(f: (d: RecentDatum) => void) { 924 | const subscribeFn = (datum: T, isRetraction: boolean) => { 925 | f({ kind: isRetraction ? Removed : Added, datum }) 926 | } 927 | this.onAssert(subscribeFn); 928 | return () => { this.removeOnAssert(subscribeFn) } 929 | } 930 | 931 | // recentChanges(): Generator{ 932 | // function* () { 933 | 934 | // this._recentChanges.map(relation => { 935 | // const indexedRelation = relation.relations[0] 936 | // const keyOrder = indexedRelation.keyOrdering 937 | // indexedRelation.elements.map((e, i) => [keyOrder[i], e]) 938 | // }) 939 | // } 940 | // } 941 | 942 | changed() { 943 | // 1. Merge this.recent into this.stable. 944 | if (this.recent.length > 0) { 945 | let recent = this.recent; 946 | this.recent = new Relation(); 947 | 948 | if (this.stable.relations.length === 0) { 949 | // There is no relation, so let's just use the toAdd as our relation 950 | this.stable = recent 951 | } else { 952 | this.stable.merge(recent) 953 | } 954 | } 955 | 956 | // 2. Move this.toAdd into this.recent. 957 | if (this.toAdd.length > 0) { 958 | // 2a. Restrict `toAdd` to tuples not in `this.stable`. 959 | for (let toAddIndex = 0; toAddIndex < this.toAdd.length; toAddIndex++) { 960 | const toAdd = this.toAdd[toAddIndex] 961 | if (this.stable.relations.length === 0) { 962 | // There is no relation, so let's just use the toAdd as our relation 963 | toAdd.dedup() 964 | this.recent = toAdd 965 | continue 966 | } 967 | 968 | let indexedToAddIdx = -1 969 | const indexedStableIdx = this.stable.relations.findIndex((relation) => { 970 | indexedToAddIdx = toAdd.isIndexedBy(relation.keyOrdering) 971 | return indexedToAddIdx 972 | }) 973 | 974 | let indexedToAdd 975 | let indexedStable: any 976 | 977 | if (indexedStableIdx !== -1 && indexedToAddIdx !== -1) { 978 | indexedStable = this.stable.relations[indexedStableIdx] 979 | indexedToAdd = toAdd.relations[indexedToAddIdx] 980 | } else { 981 | indexedStable = this.stable.indexBy(this.stable.keys()) 982 | indexedToAdd = toAdd.indexBy(this.stable.keys()) 983 | } 984 | 985 | 986 | if (indexedToAdd === undefined || indexedStable === undefined) { 987 | // Shouldn't happen 988 | throw new Error("Shouldn't happen") 989 | } 990 | 991 | // Filter out elements we've already seen in our relation 992 | // @ts-ignore 993 | indexedToAdd.elements = indexedToAdd.elements.filter(elem => { 994 | let searchIdx = gallop( 995 | indexedStable.elements, 996 | (row: any) => sortTuple(row, elem) < 0); 997 | if (searchIdx < indexedStable.elements.length && 998 | sortTuple( 999 | indexedStable.elements[searchIdx], elem) === 0) { 1000 | // Check if this is a retraction, if so let it through 1001 | if (hasRetractionMeta(elem)) { 1002 | return true 1003 | } 1004 | return false 1005 | } 1006 | 1007 | return true; 1008 | }); 1009 | } 1010 | 1011 | // 2b. Merge all newly added relations. 1012 | let toAdd = this.toAdd.pop(); 1013 | while (!!toAdd && this.toAdd.length > 0) { 1014 | toAdd.merge(this.toAdd.pop() as any); 1015 | } 1016 | 1017 | if (toAdd) { 1018 | this.recent = toAdd; 1019 | } 1020 | } 1021 | 1022 | // Return true iff recent is non-empty. 1023 | return !!this.recent.length; 1024 | } 1025 | 1026 | } 1027 | 1028 | // (logicFn: (joined: V1 & V2 & V3) => void, ...variables: [Variable, Variable, Variable]): void 1029 | // } 1030 | 1031 | export function remapKeys(keyOrder: Array, keyMap: { [key: string]: string }): Array { 1032 | return keyOrder.map(k => k in keyMap ? keyMap[k] : k) 1033 | } 1034 | 1035 | export function reverseRemapKeys(keyOrder: Array, keyMap: { [key: string]: string }): Array { 1036 | const reversekeyMap = Object.entries(keyMap).map(([k, v]) => [v, k]).reduce((acc: any, [k, v]) => { acc[k] = v; return acc }, {}) 1037 | return remapKeys(keyOrder, reversekeyMap) 1038 | } 1039 | 1040 | type RemapKeys = { [K in keyof In]: keyof Out } 1041 | export function variableJoinHelper(logicFn: (joined: V1, isRetraction: boolean) => void, variables: [Variable], remapKeys: [RemapKeys], constants: [Partial]): void; 1042 | export function variableJoinHelper(logicFn: (joined: V1 & V2, isRetraction: boolean) => void, variables: [Variable, Variable], remapKeys: [RemapKeys, RemapKeys], constants: [Partial, Partial]): void; 1043 | export function variableJoinHelper(logicFn: (joined: V1 & V2 & V3, isRetraction: boolean) => void, variables: [Variable, Variable, Variable], remapKeys: [RemapKeys, RemapKeys, RemapKeys], constants: [Partial, Partial, Partial]): void; 1044 | 1045 | export function variableJoinHelper(logicFn: (source: any, isRetraction: boolean) => void, variables: Array, remapKeys: Array, constants: Array = []) { 1046 | while (variables.some(v => v.changed())) { 1047 | innerVariableJoinHelper(logicFn, variables, remapKeys, constants, false) 1048 | } 1049 | } 1050 | 1051 | export function* variableJoinHelperGen(variables: Array, remapKeys: Array, constants: Array = []): Generator { 1052 | const out: any[] = [] 1053 | // @ts-ignore 1054 | variableJoinHelper(joint => out.push(joint), variables, remapKeys, constants) 1055 | for (const i of out) { 1056 | yield i 1057 | } 1058 | } 1059 | 1060 | export function innerVariableJoinHelper(logicFn: (source: any, isRetraction: boolean) => void, variables: Array, remapKeyMetas: Array, constants: Array = [], stableOnly: boolean) { 1061 | // We have to compare: 1062 | // All the recents 1063 | // every stable against every other recent 1064 | // every 2 stables from the reset of the recents 1065 | // and so on... 1066 | // Except we don't need to check all stables against each other 1067 | // This looks a lot like a permutation of 1's and 0's. 1068 | // Where 1 is stable, and 0 is recent. 1069 | // Example with 3 variables 1070 | // 0 0 0 // Check all recents against each other 1071 | // 0 0 1 // Check the rightmost stable against the other recents 1072 | // 0 1 0 1073 | // 0 1 1 1074 | // ... 1075 | // 1 1 1 // Don't check all stables 1076 | // 1077 | // That looks a lot like counting. 1078 | 1079 | if (variables[0].meta.isAnti) { 1080 | throw new Error("First Table in Query cannot be an anti (.not) query.") 1081 | } else if (variables.length === 2 && variables[1].meta.isAnti) { 1082 | // throw new Error("Query must have more than 2 tables if one is an anti (.not) query") 1083 | } 1084 | 1085 | // TODO order keys by remapKeyMetas 1086 | const srckeyOrder: Array = Object.values(remapKeyMetas[0]) 1087 | 1088 | const fullOutputKeyOrder = joinKeyOrdering(variables.map((_, i) => Object.values(remapKeyMetas[i]))) 1089 | 1090 | const outputKeyOrder = fullOutputKeyOrder.slice(srckeyOrder.length) 1091 | const restKeyOrders = remapKeyMetas.slice(1).map((remapKeyMeta, i) => { 1092 | // @ts-ignore 1093 | const indexByKeyOrder = filterKeys(Object.values(remapKeyMeta), fullOutputKeyOrder) 1094 | return indexByKeyOrder 1095 | }) 1096 | 1097 | if (DEBUG_LEVEL > 0) { 1098 | console.log("Output key order is", outputKeyOrder) 1099 | } 1100 | 1101 | const restKeyOrderSets = restKeyOrders.map(keyOrder => new Set(keyOrder)) 1102 | const restkeyLengths = restKeyOrderSets.map(keyOrderSet => srckeyOrder.filter(k => keyOrderSet.has(k)).length) 1103 | 1104 | let totalIterations = (2 ** variables.length) - 1 // minus 1 since we don't need to check the final state of all stable 1105 | let currentIteration = 0 1106 | 1107 | // If we only want to query the stable relations. This will pick only the stable relations from all the variables and only run once 1108 | if (stableOnly) { 1109 | totalIterations += 1 1110 | currentIteration = totalIterations - 1 1111 | } 1112 | 1113 | // let currentIteration = totalIterations - 1 1114 | while (currentIteration < totalIterations) { 1115 | const anyUndefinedRelation = variables.some((v, i) => (currentIteration >> i) & 1 ? v.stable.relations.length === 0 : v.recent.relations.length === 0) 1116 | if (anyUndefinedRelation) { 1117 | currentIteration++ 1118 | continue 1119 | } 1120 | if (DEBUG_LEVEL > 0) { 1121 | console.log("Comparing:\n" + variables.map((v, i) => (currentIteration >> i) & 1 ? `${i}: Stable. ${v.stable.toString()}` : `${i}: Recent. ${v.recent.toString()}`).join("\n")) 1122 | } 1123 | const indexedRelations = variables.map((variable, index) => { 1124 | // check if we should return a recent or stable for this relation 1125 | const relation = ((currentIteration >> index) & 1) ? variable.stable : variable.recent 1126 | if (index !== 0) { 1127 | const relationKeyOrder = restKeyOrders[index - 1] 1128 | const relationKeyOrderSet = restKeyOrderSets[index - 1] 1129 | const keyLength = restkeyLengths[index - 1] 1130 | let indexedRelation = relation.indexBy(reverseRemapKeys(relationKeyOrder, remapKeyMetas[index])) 1131 | // Filter the relation with known constants. Could make joins faster 1132 | if (constants[index] !== undefined && constants[index] !== EmptyObj) { 1133 | indexedRelation = indexedRelation.filterElements(constants[index]) 1134 | } 1135 | 1136 | // @ts-ignore 1137 | return new ExtendWithUnconstrained( 1138 | (src: any) => { 1139 | const keyTuple = src.filter((_: any, i: number) => relationKeyOrderSet.has(srckeyOrder[i])) 1140 | return keyTuple 1141 | // @ts-ignore 1142 | // return src[srckeyOrder.indexOf(relationKeyOrder[0])] 1143 | }, 1144 | keyLength, 1145 | outputKeyOrder, 1146 | // @ts-ignore 1147 | indexedRelation, 1148 | relationKeyOrder, 1149 | variables[index].meta.isAnti 1150 | ) 1151 | } 1152 | 1153 | // if (DEBUG_LEVEL > 0) { 1154 | // console.log("Src Key Order is:", srckeyOrder) 1155 | // console.log("Remap Key meta", srckeyOrder) 1156 | // } 1157 | let indexedRelation = relation.indexBy(reverseRemapKeys(srckeyOrder, remapKeyMetas[index])) 1158 | // Filter the relation with known constants. Could make joins faster 1159 | if (constants[index] !== undefined && constants[index] !== EmptyObj) { 1160 | indexedRelation = indexedRelation.filterElements(constants[index]) 1161 | } 1162 | 1163 | return indexedRelation 1164 | }) 1165 | leapJoinHelper(indexedRelations[0] as any, indexedRelations.slice(1) as any, (sourceRow, extension: any, isRetraction: boolean) => { 1166 | const out: any = {} 1167 | srckeyOrder.reduce((acc: any, k: any, i: any) => { 1168 | if (isAutoKey(k)) { 1169 | return acc 1170 | } 1171 | acc[k] = sourceRow[i] 1172 | return acc 1173 | }, out) 1174 | outputKeyOrder.reduce((acc, k, i) => { 1175 | if (isAutoKey(k)) { 1176 | return acc 1177 | } 1178 | // @ts-ignore 1179 | acc[k] = extension[i] 1180 | return acc 1181 | }, out) 1182 | logicFn(out, isRetraction) 1183 | }) 1184 | 1185 | currentIteration++ 1186 | } 1187 | } 1188 | 1189 | function isEmptyObj(obj: {}) { 1190 | if (obj === EmptyObj) { 1191 | return true 1192 | } 1193 | 1194 | for (var prop in obj) { 1195 | if (obj.hasOwnProperty(prop)) { 1196 | return false; 1197 | } 1198 | } 1199 | 1200 | return true; 1201 | } 1202 | 1203 | interface Queryable { 1204 | (keyMap: Partial): void 1205 | } 1206 | interface AntiQueryable { 1207 | not(keyMap: Partial): void 1208 | } 1209 | 1210 | export interface View { 1211 | recentData(): null | Array> 1212 | readAllData(): Array 1213 | // Returns unsubscribe fn 1214 | onChange(subscriber: () => void): () => void 1215 | // Returns unsubscribe fn. Doesn't affect recentData calls 1216 | onNewDatum(subscriber: (datum: RecentDatum) => void): () => void 1217 | // Clones recentData view state, but should preserve pointer to original data 1218 | copy(): View 1219 | } 1220 | 1221 | interface Viewable { 1222 | view(): View 1223 | } 1224 | 1225 | export interface Table extends Tell, Retract, Queryable, AntiQueryable, Viewable { 1226 | update(lookupArgs: Partial, mergeWith: Partial): void 1227 | } 1228 | 1229 | type UnsubscribeFn = () => void 1230 | export interface MaterializedTable extends Queryable, AntiQueryable, Viewable { 1231 | // Rerun the query to see the latest changes 1232 | runQuery: () => void 1233 | /** 1234 | * Add implications to the query. This allows for recursion. The results of a query can feed back to the inputs of the query. 1235 | */ 1236 | implies: (f: (datum: T, kind: typeof Added | typeof Removed) => void) => MaterializedTable 1237 | toString: () => string 1238 | /** 1239 | * Subscribe to when a dependency of this MaterializedTable has change. 1240 | * Useful if you want to know when you should rerurn the query 1241 | */ 1242 | onDependencyChange: (subscriber: () => void) => UnsubscribeFn 1243 | viewExt: () => ViewExt 1244 | // queryVariables: null | Array> 1245 | } 1246 | 1247 | type PrivateMaterializeTableState = {} 1248 | function newMaterializedTable(v: Variable, runQuery: () => void, onDependencyChange: (s: () => void) => UnsubscribeFn): MaterializedTable { 1249 | const outVar = new Variable() 1250 | const innerTable = _newTable(v, true) 1251 | const materializedTable = innerTable as unknown as MaterializedTable & PrivateMaterializeTableState 1252 | materializedTable.runQuery = runQuery 1253 | 1254 | materializedTable.implies = (f: (datum: T, kind: typeof Added | typeof Removed) => void) => { 1255 | const implicationView = materializedTable.view() 1256 | const implicationContext = new QueryContext() 1257 | // const scheduledRunQuery = false 1258 | const onRecentDatum = ({ kind, datum }: RecentDatum) => { 1259 | queryContext = implicationContext 1260 | if (kind === Added) { 1261 | queryContext.implicationState = { isRetraction: false } 1262 | f(datum, kind) 1263 | } else if (kind === Removed) { 1264 | queryContext.implicationState = { isRetraction: true } 1265 | f(datum, kind) 1266 | } else { 1267 | throw new Error("Unhandle modification ??") 1268 | } 1269 | queryContext = emptyQueryContext 1270 | 1271 | runQuery() 1272 | // if (!scheduledRunQuery) { 1273 | // runQuery() 1274 | // hmm should we schedule this to run on the next tick? 1275 | // setTimeout(() => runQuery(), 0) 1276 | // } 1277 | } 1278 | implicationView.onChange(() => { 1279 | implicationView.recentData()?.map(onRecentDatum); 1280 | }) 1281 | implicationView.recentData()?.map(onRecentDatum); 1282 | return materializedTable 1283 | } 1284 | materializedTable.onDependencyChange = onDependencyChange 1285 | return materializedTable 1286 | 1287 | } 1288 | 1289 | 1290 | class QueryContext { 1291 | implicationState: null | { 1292 | isRetraction: boolean 1293 | } = null; 1294 | variables: Array = [] 1295 | remapKeys: Array = [] 1296 | constants: Array = [] 1297 | // Indices into which variales are anti 1298 | antiVariablesIndices: Set = new Set() 1299 | 1300 | addVariable(v: Variable, remapKeys: any, constantVals: any) { 1301 | this.variables.push(v) 1302 | this.remapKeys.push(remapKeys) 1303 | this.constants.push(constantVals) 1304 | } 1305 | 1306 | } 1307 | 1308 | function fromEntries(entries: Array<[string, V]>): Object { 1309 | let nonEmpty = false 1310 | const o = {} 1311 | for (let [k, v] of entries) { 1312 | nonEmpty = true 1313 | // @ts-ignore 1314 | o[k] = v 1315 | } 1316 | return nonEmpty ? o : EmptyObj 1317 | } 1318 | 1319 | const emptyQueryContext = new QueryContext() 1320 | let queryContext = emptyQueryContext 1321 | /** 1322 | * Returns a auto generated suffix key (for when a key needs to be defined, but isn't from the user) 1323 | * @param keyName 1324 | */ 1325 | function autoKey(keyName: string) { 1326 | return '___' + keyName + Math.random().toString(36).substring(2, 10) 1327 | } 1328 | 1329 | function isAutoKey(k: string): boolean { 1330 | return k.startsWith('___') 1331 | } 1332 | 1333 | 1334 | // TODO these validators are not used 1335 | // type TypeValidator = { typeName: string, validate: (t: any) => boolean } 1336 | type TypeValidator = { typeName: string, validate: (t: any) => t is T } 1337 | // type TableSchema = { [K in Keys]: TypeValidator } 1338 | type TableSchema = { [K in keyof T]: TypeValidator } 1339 | export function newTable(schema: TableSchema): Table { 1340 | return _newTable(undefined, false, schema) 1341 | } 1342 | 1343 | export const StringType = { 1344 | typeName: 'string', 1345 | validate: (t: any): t is string => typeof t === 'string' 1346 | } 1347 | 1348 | export const NumberType = { 1349 | typeName: 'number', 1350 | validate: (t: any): t is number => typeof t === 'number' 1351 | } 1352 | 1353 | export const BoolType = { 1354 | typeName: 'boolean', 1355 | validate: (t: any): t is boolean => typeof t === 'boolean' 1356 | } 1357 | 1358 | export const ObjectType = { 1359 | typeName: 'object', 1360 | validate: (t: any): t is Object => typeof t === 'object' 1361 | } 1362 | 1363 | export const ArrayType = { 1364 | typeName: 'object', 1365 | validate: (t: any): t is Array => Array.isArray(t) 1366 | } 1367 | 1368 | export function _newTable(existingVar?: Variable, isDerived?: boolean, schema?: TableSchema): Table { 1369 | const variable = existingVar || new Variable() 1370 | const table = (keymap: any) => { 1371 | // We are in an implication clause here, so the keymap is actually datum 1372 | if (!!queryContext.implicationState) { 1373 | const datum: T = keymap 1374 | if (queryContext.implicationState.isRetraction) { 1375 | variable.retract(datum) 1376 | } else { 1377 | variable.assert(datum) 1378 | } 1379 | return 1380 | } 1381 | 1382 | const constants = fromEntries(Object.entries(keymap).filter(([k, v]: any) => { 1383 | if (typeof v === 'object' && v && 'ns' in v && v.ns === FreeVarNS) { 1384 | return false 1385 | } 1386 | return true 1387 | })) 1388 | 1389 | // If there's a schema, use that. Otherwise attempt to infer from the variable 1390 | const inferredKeys = schema ? Object.keys(schema) : variable.keys() 1391 | const remapKeys = fromEntries(Object.entries(keymap).map(([k, v]: any) => { 1392 | if (typeof v === 'object' && v && 'ns' in v && v.ns === FreeVarNS) { 1393 | return [k, v.k] 1394 | } 1395 | return [k, autoKey(k)] 1396 | })) 1397 | // fill in missing keys 1398 | inferredKeys.forEach(k => { 1399 | if (!(k in remapKeys)) { 1400 | // @ts-ignore 1401 | remapKeys[k] = autoKey(k) 1402 | } 1403 | }) 1404 | 1405 | queryContext.addVariable(variable, remapKeys, constants) 1406 | } 1407 | 1408 | const antiQuery = (keymap: any) => { 1409 | // Adds the variable 1410 | table(keymap) 1411 | queryContext.antiVariablesIndices.add(queryContext.variables.length - 1) 1412 | } 1413 | 1414 | table.not = antiQuery 1415 | 1416 | table._innerVar = variable 1417 | 1418 | if (!isDerived) { 1419 | table.assert = (args: T) => { 1420 | // Trick where we reverse the semantics if we are in a retraction 1421 | if (queryContext.implicationState?.isRetraction) { 1422 | variable.retract(args) 1423 | } else { 1424 | variable.assert(args) 1425 | } 1426 | } 1427 | table.retract = (args: T) => { 1428 | // // Same trick as above 1429 | if (queryContext.implicationState?.isRetraction) { 1430 | variable.assert(args) 1431 | } else { 1432 | variable.retract(args) 1433 | } 1434 | } 1435 | 1436 | // Sugar to find a value an retract/assert a new updated value 1437 | table.update = (lookupArgs: Partial, mergeWith: Partial) => { 1438 | const variableClone = variable.cloneAndTrack() 1439 | // Move everything to stable relation 1440 | while (variableClone.changed()) { } 1441 | 1442 | // Look for items in stable first: 1443 | const lookupKs = Object.keys(lookupArgs) 1444 | // Look for a relation that has a compatible index 1445 | const findCompatibleIndexedRelation = (relation: RelationIndex) => { 1446 | return lookupKs.every((k, i) => { 1447 | k === relation.keyOrdering[i] 1448 | }) 1449 | } 1450 | // Look for a pre-existing index pattern 1451 | let indexedRelation = variableClone.stable.relations.find(findCompatibleIndexedRelation) 1452 | 1453 | // No pre-existing index, so we'll just make one 1454 | if (indexedRelation === undefined) { 1455 | const index = variableClone.stable.relations[0]?.keyOrdering 1456 | if (index === undefined) { 1457 | throw new Error("Tried to update a table with no data!") 1458 | } 1459 | const keySet = new Set(index) as Set 1460 | lookupKs.forEach(k => keySet.delete(k)) 1461 | const newKeyOrdering = lookupKs.concat([...keySet]) 1462 | indexedRelation = variableClone.stable.indexBy(newKeyOrdering as any) as any 1463 | } 1464 | 1465 | if (indexedRelation === undefined) { 1466 | throw new Error("Shouldn't happen") 1467 | } 1468 | 1469 | const retractions = [] 1470 | const assertions = [] 1471 | 1472 | const unconstrainedParts = new Array(indexedRelation.keyOrdering.length - lookupKs.length).fill(Unconstrained) 1473 | const lookupVals = lookupKs.map(k => lookupArgs[k as keyof T]).concat(unconstrainedParts) 1474 | const startIdx = gallop(indexedRelation.elements as any, element => { 1475 | return sortTuple(element, lookupVals) === -1 1476 | }) 1477 | const endIdx = gallop(indexedRelation.elements as any, element => { 1478 | return sortTuple(element, lookupVals) === 0 1479 | }, startIdx) 1480 | 1481 | if (startIdx === indexedRelation.elements.length || endIdx === startIdx) { 1482 | throw new Error("No data found to update!") 1483 | } 1484 | 1485 | const keyOrdering = indexedRelation.keyOrdering 1486 | for (let i = startIdx; i < endIdx; i++) { 1487 | const element = indexedRelation.elements[i] 1488 | const asObj = element.reduce((acc, v, i) => { 1489 | // @ts-ignore 1490 | acc[keyOrdering[i]] = v 1491 | return acc 1492 | }, {}) 1493 | retractions.push(asObj) 1494 | assertions.push({ ...asObj, ...mergeWith }) 1495 | } 1496 | 1497 | for (let i = 0; i < retractions.length; i++) { 1498 | variable.retract(retractions[i] as T) 1499 | variable.assert(assertions[i] as T) 1500 | } 1501 | } 1502 | } 1503 | 1504 | // table.clone = () => variable.clone() 1505 | table.view = (): View => { 1506 | // It would be nice to use something like a weakref here 1507 | // TODO: This is a potential source of memory leaks since the view can never 1508 | // be reclaimed unless the table also gets reclaimed. 1509 | let cloned = variable.cloneAndTrack() as unknown as View 1510 | const copyFn = () => { const cloned = variable.cloneAndTrack() as unknown as View; cloned.copy = copyFn; return cloned } 1511 | cloned.copy = copyFn 1512 | return cloned 1513 | } 1514 | 1515 | table.viewExt = (): ViewExt => { 1516 | return new ViewExtImpl(table.view()) 1517 | } 1518 | 1519 | // queryableVariable.changed = variable.changed 1520 | 1521 | table.toString = () => variable.toString() 1522 | return table 1523 | } 1524 | 1525 | const FreeVarNS = Symbol("FreeVariable") 1526 | const FreeVarGenerator: any = new Proxy({}, { 1527 | get: function (obj, prop) { 1528 | return { ns: FreeVarNS, k: prop } 1529 | } 1530 | }); 1531 | 1532 | export type SchemaOf = V extends Table ? T : never 1533 | 1534 | export const EmptyObj = {} 1535 | 1536 | export type QueryFn = (freeVars: Out) => void 1537 | export function query(queryFn: QueryFn): MaterializedTable { 1538 | queryContext = new QueryContext() 1539 | // @ts-ignore – a trick 1540 | queryFn(FreeVarGenerator) 1541 | const savedQueryContext = queryContext 1542 | queryContext = new QueryContext() 1543 | // Split variables into parts 1544 | const parts: any = [[]] 1545 | let keySetSeen = new Set(Object.values(savedQueryContext.remapKeys[0])) 1546 | // Clone the variables so each query has it's own notions of stable/recent 1547 | let queryVariables = savedQueryContext.variables.map((v: Variable, i: number) => { 1548 | const cloned = v.cloneAndTrack() 1549 | const isAnti = savedQueryContext.antiVariablesIndices.has(i) 1550 | 1551 | if (isAnti) { 1552 | cloned.meta.isAnti = true 1553 | cloned.onAssert(() => { 1554 | // Move everything to the stable relation 1555 | while (cloned.changed()) { } 1556 | }) 1557 | // Move everything to the stable relation 1558 | while (cloned.changed()) { } 1559 | } 1560 | 1561 | return cloned 1562 | }) 1563 | 1564 | savedQueryContext.remapKeys.forEach((remapKeys, i) => { 1565 | if (i === 0) { 1566 | return parts[0] = [[queryVariables[0]], [remapKeys], [savedQueryContext.constants[0]]] 1567 | } 1568 | 1569 | const lastPart = parts[parts.length - 1] 1570 | const vals = Object.values(remapKeys) 1571 | if (vals.some(k => keySetSeen.has(k))) { 1572 | lastPart[0].push(queryVariables[i]) 1573 | lastPart[1].push(savedQueryContext.remapKeys[i]) 1574 | lastPart[2].push(savedQueryContext.constants[i]) 1575 | vals.forEach(k => { keySetSeen.add(k) }) 1576 | } else { 1577 | keySetSeen = new Set(vals) 1578 | const newPart = [ 1579 | [queryVariables[i]], 1580 | [savedQueryContext.remapKeys[i]], 1581 | [savedQueryContext.constants[i]] 1582 | ] 1583 | parts.push(newPart) 1584 | } 1585 | }) 1586 | 1587 | // @ts-ignore 1588 | const variableParts = parts.map(([variables, remapKeys, constants]) => { 1589 | const outVar = new Variable() 1590 | const runInnerQuery = () => { 1591 | variableJoinHelper((join, isRetraction) => { isRetraction ? outVar.retract(join) : outVar.assert(join) }, variables, remapKeys, constants) 1592 | } 1593 | return [outVar, runInnerQuery] 1594 | }) 1595 | 1596 | const outVar = new Variable() 1597 | // @ts-ignore 1598 | const joinFn = (join, isRetraction) => { 1599 | isRetraction ? outVar.retract(join) : outVar.assert(join) 1600 | } 1601 | const innerVars = variableParts.map(([v]: any) => v) 1602 | 1603 | const constantParts = variableParts.map(() => EmptyObj) 1604 | 1605 | const runQuery = () => { 1606 | variableParts.forEach(([_v, runInnerQuery]: any) => { 1607 | runInnerQuery() 1608 | }) 1609 | 1610 | // If some output from the parts is empty, the whole query will be empty 1611 | if (variableParts.some(([v]: [Variable]) => v.isEmpty())) { 1612 | return 1613 | } 1614 | 1615 | // TODO this seems buggy. Queries should work even if there isn't data available 1616 | const remapKeysPart = variableParts.map(([v, _runQuery]: any) => fromEntries(v.keys().map((k: any) => [k, k]))) 1617 | 1618 | variableJoinHelper(joinFn, innerVars, remapKeysPart, constantParts) 1619 | } 1620 | 1621 | 1622 | let dependencyChangeSubscribers: Array<() => void> = [] 1623 | queryVariables.forEach(v => { 1624 | v.onAssert(() => { 1625 | dependencyChangeSubscribers.forEach(f => f()) 1626 | }) 1627 | }) 1628 | const onDependencyChange = (subscriber: () => void) => { 1629 | dependencyChangeSubscribers.push(subscriber) 1630 | return () => { 1631 | dependencyChangeSubscribers = dependencyChangeSubscribers.filter(f => f !== subscriber) 1632 | } 1633 | } 1634 | const outMaterializedTable = newMaterializedTable(outVar, runQuery, onDependencyChange) 1635 | 1636 | // Run query once 1637 | outMaterializedTable.runQuery() 1638 | 1639 | // @ts-ignore 1640 | outMaterializedTable.queryVariables = queryVariables 1641 | 1642 | outVar.setName("Query") 1643 | // @ts-ignore 1644 | outMaterializedTable.toString = () => outVar.toString() 1645 | 1646 | return outMaterializedTable 1647 | } 1648 | 1649 | export function intoTable(data: Array) { 1650 | const table = _newTable(undefined, false, undefined) 1651 | for (const datum of data) { 1652 | table.assert(datum) 1653 | } 1654 | return table 1655 | } -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export { 2 | Added, 3 | ArrayType, 4 | BoolType, 5 | MaterializedTable, 6 | Modified, 7 | NumberType, 8 | ObjectType, 9 | QueryFn, 10 | RecentDatum, 11 | Removed, 12 | StringType, 13 | Table, 14 | View, 15 | intoTable, 16 | newTable, 17 | query, 18 | } from './datalog' 19 | 20 | export { 21 | Impl, 22 | Indexed, 23 | IndexedImpl, 24 | IndexedViewExt, 25 | SingleItemView, 26 | ViewExt, 27 | } from './view-ext' -------------------------------------------------------------------------------- /src/view-ext.test.ts: -------------------------------------------------------------------------------- 1 | import * as datalog from './datalog' 2 | import { Added, Modified, Removed } from './datalog' 3 | import * as ViewExt from './view-ext' 4 | 5 | describe("Map", () => { 6 | test("maps simple values", () => { 7 | const table = datalog.intoTable([ 8 | { n: 0 }, 9 | { n: 1 }, 10 | { n: 2 }, 11 | ]) 12 | const view = table.view() 13 | const viewExt = new ViewExt.Impl(view) 14 | const mappedView = viewExt.map(({ n }) => ({ n: n + 1 })) 15 | expect(mappedView.readAllData()).toEqual([ 16 | { n: 1 }, 17 | { n: 2 }, 18 | { n: 3 }, 19 | ]) 20 | 21 | table.retract({ n: 2 }) 22 | expect(mappedView.readAllData()).toEqual([ 23 | { n: 1 }, 24 | { n: 2 }, 25 | ]) 26 | }) 27 | 28 | test("dedups in map", () => { 29 | const table = datalog.intoTable([ 30 | { a: 0, b: 1 }, 31 | { a: 1, b: 1 }, 32 | { a: 1, b: 2 }, 33 | ]) 34 | const view = table.view() 35 | const viewExt = new ViewExt.Impl(view) 36 | const mappedView = viewExt.map(({ a }) => ({ a })) 37 | while (mappedView.recentData()) { } 38 | expect(mappedView.readAllData()).toEqual([ 39 | { a: 0 }, 40 | { a: 1 }, 41 | ]) 42 | 43 | table.retract({ a: 1, b: 2 }) 44 | expect(mappedView.readAllData()).toEqual([ 45 | { a: 0 }, 46 | { a: 1 }, 47 | ]) 48 | }) 49 | }) 50 | 51 | describe("Reduce", () => { 52 | test("Reduces simple cases", () => { 53 | const table = datalog.intoTable([ 54 | { n: 0 }, 55 | { n: 1 }, 56 | { n: 2 }, 57 | ]) 58 | const view = table.view() 59 | const viewExt = new ViewExt.Impl(view) 60 | const reducedView = viewExt.reduce((acc, { kind, datum: { n } }) => { 61 | switch (kind) { 62 | case datalog.Added: 63 | return acc + n 64 | case datalog.Removed: 65 | return acc - n 66 | case datalog.Modified: 67 | throw new Error("Wasn't expected a modification") 68 | } 69 | }, 0) 70 | 71 | expect(reducedView.readAllData()).toEqual([ 72 | 3, 73 | ]) 74 | 75 | table.retract({ n: 2 }) 76 | expect(reducedView.recentData()).toEqual([ 77 | { kind: datalog.Modified, datum: 1, oldDatum: 3 } 78 | ]) 79 | }) 80 | }) 81 | 82 | 83 | describe("SortBy", () => { 84 | test("Refresh datalog.gallop memory", () => { 85 | expect(datalog.gallop([0, 1, 2, 4], (n: number) => n < 3)).toBe(3) 86 | expect(datalog.gallop([0, 1, 2, 4], (n: number) => n < -1)).toBe(0) 87 | expect(datalog.gallop([0, 1, 2, 4], (n: number) => n < 5)).toBe(4) 88 | expect(datalog.gallop([0, 1, 2, 4], (n: number) => n < 2)).toBe(2) 89 | }) 90 | 91 | test("Sort tuple on Objects", () => { 92 | expect(datalog.sortTuple({ a: 1 }, { a: 1, b: 2 })).toBe(0) 93 | expect(datalog.sortTuple({ a: 1 }, { a: 2, b: 2 })).toBe(0) 94 | }) 95 | 96 | test("Simple Sort", () => { 97 | const table = datalog.intoTable([ 98 | { n: 1 }, 99 | { n: 6 }, 100 | { n: 2 }, 101 | { n: 5 }, 102 | { n: 0 }, 103 | ]) 104 | 105 | const view = table.view() 106 | const viewExt = new ViewExt.Impl(view) 107 | const sortedView = viewExt.sortBy(({ n: a }, { n: b }) => a < b ? -1 : a > b ? 1 : 0) 108 | 109 | expect(sortedView.readAllData()).toEqual([ 110 | { datum: { n: 0 }, index: 0 }, 111 | { datum: { n: 1 }, index: 1 }, 112 | { datum: { n: 2 }, index: 2 }, 113 | { datum: { n: 5 }, index: 3 }, 114 | { datum: { n: 6 }, index: 4 }, 115 | ]) 116 | 117 | table.assert({ n: 3 }) 118 | expect(sortedView.recentData()).toEqual( 119 | [ 120 | { kind: Added, datum: { index: 3, datum: { n: 3 } } }, 121 | ] 122 | ) 123 | 124 | table.retract({ n: 2 }) 125 | expect(sortedView.recentData()).toEqual( 126 | [ 127 | { kind: Removed, datum: { index: 2, datum: { n: 2 } } }, 128 | ] 129 | ) 130 | }) 131 | 132 | test("Take from sorted", () => { 133 | const table = datalog.intoTable([ 134 | { n: 1 }, 135 | { n: 6 }, 136 | { n: 2 }, 137 | { n: 5 }, 138 | { n: 0 }, 139 | ]) 140 | 141 | const view = table.view() 142 | const viewExt = new ViewExt.Impl(view) 143 | const sortedView = viewExt.sortBy(({ n: a }, { n: b }) => a < b ? -1 : a > b ? 1 : 0) 144 | const takedView = sortedView.take(3) 145 | 146 | expect(takedView.readAllData()).toEqual([ 147 | { datum: { n: 0 }, index: 0 }, 148 | { datum: { n: 1 }, index: 1 }, 149 | { datum: { n: 2 }, index: 2 }, 150 | ]) 151 | 152 | const dropedView = sortedView.drop(2) 153 | 154 | expect(dropedView.readAllData()).toEqual([ 155 | { datum: { n: 2 }, index: 2 }, 156 | { datum: { n: 5 }, index: 3 }, 157 | { datum: { n: 6 }, index: 4 }, 158 | ]) 159 | 160 | const dropedViewAgain = dropedView.drop(1) 161 | 162 | expect(dropedViewAgain.readAllData()).toEqual([ 163 | { datum: { n: 5 }, index: 3 }, 164 | { datum: { n: 6 }, index: 4 }, 165 | ]) 166 | 167 | const takedView2 = dropedViewAgain.take(1) 168 | 169 | expect(takedView2.readAllData()).toEqual([ 170 | { datum: { n: 5 }, index: 3 }, 171 | ]) 172 | const dropped3 = takedView.drop(1) 173 | 174 | expect(dropped3.readAllData()).toEqual([ 175 | { datum: { n: 1 }, index: 1 }, 176 | { datum: { n: 2 }, index: 2 }, 177 | ]) 178 | 179 | table.retract({ n: 2 }) 180 | 181 | expect(dropped3.readAllData()).toEqual([ 182 | { datum: { n: 1 }, index: 1 }, 183 | { datum: { n: 5 }, index: 2 }, 184 | ]) 185 | }) 186 | test("OrderBy", () => { 187 | const table = datalog.intoTable([ 188 | { n: 1 }, 189 | { n: 6 }, 190 | { n: 2 }, 191 | { n: 5 }, 192 | { n: 0 }, 193 | ]) 194 | 195 | const view = table.view() 196 | const viewExt = new ViewExt.Impl(view) 197 | const sortedView = viewExt.orderBy('n', true) 198 | 199 | expect(sortedView.readAllData()).toEqual([ 200 | { datum: { n: 0 }, index: 0 }, 201 | { datum: { n: 1 }, index: 1 }, 202 | { datum: { n: 2 }, index: 2 }, 203 | { datum: { n: 5 }, index: 3 }, 204 | { datum: { n: 6 }, index: 4 }, 205 | ]) 206 | 207 | const sortedOppositeView = viewExt.orderBy('n', false) 208 | 209 | expect(sortedView.readAllData()).toEqual([ 210 | { datum: { n: 6 }, index: 0 }, 211 | { datum: { n: 5 }, index: 1 }, 212 | { datum: { n: 2 }, index: 2 }, 213 | { datum: { n: 1 }, index: 3 }, 214 | { datum: { n: 0 }, index: 4 }, 215 | ]) 216 | }) 217 | }) -------------------------------------------------------------------------------- /src/view-ext.ts: -------------------------------------------------------------------------------- 1 | import { View, RecentDatum } from './datalog' 2 | import * as datalog from './datalog' 3 | 4 | /** 5 | * A function that does some effect. It's up to you to undo that effect when you 6 | * get a Removed kind of RecentDatum 7 | */ 8 | type EffectFn = (t: RecentDatum) => void 9 | 10 | export type Indexed = { index: number, datum: T } 11 | 12 | export interface ViewExt extends datalog.View { 13 | map(f: (t: T) => O): ViewExt; 14 | mapEffect>(f: F): SingleItemView<{}>; 15 | 16 | reduce(reducer: (accumulator: Acc, recentVal: datalog.RecentDatum) => Acc, initialVal: Acc): ViewExt 17 | 18 | sortBy(sortFn: (a: T, b: T) => -1 | 0 | 1): IndexedViewExt 19 | orderBy(key: keyof T, ascending?: boolean): IndexedViewExt 20 | } 21 | 22 | export interface IndexedViewExt extends ViewExt> { 23 | take(n: number): IndexedViewExt & ViewExt> 24 | drop(n: number): IndexedViewExt & ViewExt> 25 | mapIndexed(f: (t: T) => O): IndexedViewExt 26 | } 27 | 28 | export class SingleItemView implements datalog.View { 29 | lastValSincePoll: T | null 30 | currentVal: T 31 | subscribers: Array<() => void> = [] 32 | newDatumSubscribers: Array<(d: RecentDatum) => void> = [] 33 | modifiedSinceLastPolled = true 34 | 35 | constructor(initialVal: T) { 36 | this.currentVal = initialVal 37 | this.lastValSincePoll = null 38 | } 39 | 40 | copy(): View { 41 | const copied = new SingleItemView(this.currentVal) 42 | copied.lastValSincePoll = this.lastValSincePoll 43 | 44 | return copied 45 | } 46 | 47 | _setValue(v: T) { 48 | this.modifiedSinceLastPolled = true 49 | this.currentVal = v 50 | this.newDatumSubscribers.forEach(subscriber => subscriber({ kind: datalog.Modified, datum: v, oldDatum: this.lastValSincePoll! })) 51 | this.subscribers.map(s => s()) 52 | } 53 | 54 | recentData(): null | Array> { 55 | const lastVal = this.lastValSincePoll 56 | const modifiedSinceLastPolled = this.modifiedSinceLastPolled 57 | // Reset vals 58 | this.modifiedSinceLastPolled = false 59 | this.lastValSincePoll = this.currentVal 60 | if (lastVal === null) { 61 | return [{ kind: datalog.Added, datum: this.currentVal }] 62 | } 63 | 64 | return modifiedSinceLastPolled ? [{ kind: datalog.Modified, datum: this.currentVal, oldDatum: lastVal }] : null 65 | } 66 | 67 | readAllData(): Array { 68 | this.modifiedSinceLastPolled = false 69 | this.lastValSincePoll = this.currentVal 70 | return [this.currentVal] 71 | } 72 | 73 | onChange(subscriber: () => void): () => void { 74 | this.subscribers.push(subscriber) 75 | if (this.modifiedSinceLastPolled) { 76 | subscriber() 77 | } 78 | return () => this.subscribers = this.subscribers.filter(s => s !== subscriber) 79 | } 80 | 81 | onNewDatum(subscriber: (d: RecentDatum) => void): () => void { 82 | this.newDatumSubscribers.push(subscriber) 83 | return () => this.newDatumSubscribers = this.newDatumSubscribers.filter(s => s !== subscriber) 84 | } 85 | } 86 | 87 | function isPlainObj(o: any): boolean { 88 | return typeof o == 'object' && o.constructor == Object; 89 | } 90 | 91 | class MappedIndexedView implements View> { 92 | innerView: View> 93 | // innerArray: Array = [] 94 | changes: Array>> = [] 95 | subscribers: Array<() => void> = [] 96 | newDatumSubscribers: Array<(d: RecentDatum>) => void> = [] 97 | mapFn: (t: T) => O 98 | 99 | constructor(fromView: View>, mapFn: (t: T) => O) { 100 | this.innerView = fromView 101 | this.mapFn = mapFn 102 | // this.innerArray = this.innerView.readAllData() 103 | this.changes = this.innerView.readAllData().map((d) => ({ kind: datalog.Added, datum: { index: d.index, datum: mapFn(d.datum) } })) 104 | 105 | const onChange = (recentDatum: RecentDatum>) => { 106 | const scopeChanges: Array>> = [] 107 | 108 | switch (recentDatum.kind) { 109 | case datalog.Added: 110 | scopeChanges.push({ kind: datalog.Added, datum: { index: recentDatum.datum.index, datum: mapFn(recentDatum.datum.datum) } }) 111 | break; 112 | case datalog.Removed: 113 | scopeChanges.push({ kind: datalog.Removed, datum: { index: recentDatum.datum.index, datum: mapFn(recentDatum.datum.datum) } }) 114 | break; 115 | case datalog.Modified: 116 | scopeChanges.push({ kind: datalog.Modified, datum: { index: recentDatum.datum.index, datum: mapFn(recentDatum.datum.datum) }, oldDatum: { index: recentDatum.oldDatum.index, datum: mapFn(recentDatum.oldDatum.datum) } }) 117 | break; 118 | } 119 | 120 | this.changes = this.changes.concat(scopeChanges) 121 | this.newDatumSubscribers.forEach(subscriber => { 122 | scopeChanges.forEach(change => subscriber(change)) 123 | }) 124 | this.subscribers.forEach(subscriber => { 125 | subscriber() 126 | }) 127 | } 128 | 129 | this.innerView.onChange(() => { 130 | const recentData = this.innerView.recentData() 131 | recentData?.map(onChange) 132 | }) 133 | } 134 | 135 | recentData() { 136 | if (this.changes.length > 0) { 137 | const recentChanges = this.changes 138 | this.changes = [] 139 | return recentChanges 140 | } 141 | return null 142 | } 143 | 144 | readAllData() { 145 | this.changes = [] 146 | return this.innerView.readAllData().map(({ index, datum }) => ({ index, datum: this.mapFn(datum) })) 147 | } 148 | 149 | onChange(f: () => void) { 150 | this.subscribers.push(f) 151 | return () => { this.subscribers = this.subscribers.filter(s => s !== f) } 152 | } 153 | 154 | onNewDatum(f: (d: RecentDatum>) => void) { 155 | this.newDatumSubscribers.push(f) 156 | return () => { 157 | this.newDatumSubscribers = this.newDatumSubscribers.filter(subscriber => { 158 | subscriber !== f 159 | }) 160 | } 161 | } 162 | 163 | copy() { 164 | const copied = new MappedIndexedView(this.innerView, this.mapFn) 165 | return copied 166 | } 167 | } 168 | 169 | 170 | 171 | class SortedView implements View> { 172 | innerView: View 173 | sorted: Array = [] 174 | changes: Array>> = [] 175 | subscribers: Array<() => void> = [] 176 | newDatumSubscribers: Array<(d: RecentDatum>) => void> = [] 177 | sortFn: (a: T, b: T) => -1 | 0 | 1 178 | 179 | constructor(fromView: View, sortFn: (a: T, b: T) => -1 | 0 | 1) { 180 | this.innerView = fromView 181 | this.sorted = this.innerView.readAllData().sort(sortFn) 182 | this.changes = this.sorted.map((datum, i) => ({ kind: datalog.Added, datum: { index: i, datum } })) 183 | this.sortFn = sortFn 184 | 185 | const onChange = ({ kind, datum }: RecentDatum) => { 186 | const scopeChanges: Array>> = [] 187 | // const stableRelation = innerVar.stable.relations[0] 188 | // if (!stableRelation || stableRelation.elements.length === 0) { 189 | if (this.sorted.length === 0) { 190 | if (kind !== datalog.Added) { 191 | throw new Error("Unexpected! We don't have anything to remove or modify") 192 | } 193 | 194 | this.changes.push({ kind: datalog.Added, datum: { index: 0, datum } }) 195 | this.sorted.push(datum) 196 | return 197 | } 198 | 199 | switch (kind) { 200 | case datalog.Added: 201 | // A new datum was added, let's find out where it goes in the list 202 | // const positionToInsert = datalog.gallop(elements, (tuple) => sortFn(toObj(tuple), datum) === -1) 203 | const positionToInsert = datalog.gallop(this.sorted, (d) => sortFn(d, datum) === -1) 204 | this.sorted.splice(positionToInsert, 0, datum) 205 | scopeChanges.push({ kind: datalog.Added, datum: { index: positionToInsert, datum } }) 206 | 207 | // This will push Modified changes on index positions, but it's not 208 | // needed. Since callers will end up with the same thing if they just 209 | // follow the Added/Removed 210 | // for (let i = positionToInsert + 1; i < this.sorted.length; i++) { 211 | // scopeChanges.push({ 212 | // kind: datalog.Modified, 213 | // datum: { index: i, datum: this.sorted[i] }, 214 | // oldDatum: { index: i - 1, datum: this.sorted[i] } 215 | // }) 216 | // } 217 | break; 218 | case datalog.Removed: 219 | // A new datum was removed, let's find out where it was in the list 220 | const positionToRemove = datalog.gallop(this.sorted, (d) => sortFn(d, datum) === -1) 221 | // Check if the item is there 222 | if (datalog.sortTuple(this.sorted[positionToRemove], datum) !== 0) { 223 | throw new Error("Tried to remove a value that doesn't exist!!") 224 | } 225 | // Remove from our array 226 | this.sorted.splice(positionToRemove, 1) 227 | scopeChanges.push({ kind: datalog.Removed, datum: { index: positionToRemove, datum } }) 228 | 229 | // This will push Modified changes on index positions, but it's not 230 | // needed. Since callers will end up with the same thing if they just 231 | // follow the Added/Removed 232 | // for (let i = positionToRemove; i < this.sorted.length; i++) { 233 | // scopeChanges.push({ 234 | // kind: datalog.Modified, 235 | // datum: { index: i, datum: this.sorted[i] }, 236 | // oldDatum: { index: i + 1, datum: this.sorted[i] } 237 | // }) 238 | // } 239 | break; 240 | case datalog.Modified: 241 | throw new Error("Not implemented! – Modification datum on indexed View") 242 | // Tricky case, we have to see where the item was, and where it should 243 | // go. I don't think it's possible unless we add a new field to 244 | // modified... (old value) 245 | } 246 | 247 | this.changes = this.changes.concat(scopeChanges) 248 | this.newDatumSubscribers.forEach(subscriber => { 249 | scopeChanges.forEach(change => subscriber(change)) 250 | }) 251 | this.subscribers.forEach(subscriber => { 252 | subscriber() 253 | }) 254 | } 255 | 256 | this.innerView.onChange(() => { 257 | // this.innerView.recentData()?.map(onChange) 258 | const recentData = this.innerView.recentData() 259 | recentData?.map(onChange) 260 | }) 261 | // this.innerView.recentData()?.map(onChange) 262 | 263 | 264 | 265 | 266 | } 267 | recentData() { 268 | if (this.changes.length > 0) { 269 | const recentChanges = this.changes 270 | this.changes = [] 271 | return recentChanges 272 | } 273 | return null 274 | } 275 | 276 | readAllData() { 277 | this.changes = [] 278 | return this.sorted.map((datum, index) => ({ index, datum })) 279 | } 280 | 281 | onChange(f: () => void) { 282 | this.subscribers.push(f) 283 | return () => { this.subscribers = this.subscribers.filter(s => s !== f) } 284 | } 285 | 286 | onNewDatum(f: (d: RecentDatum>) => void) { 287 | this.newDatumSubscribers.push(f) 288 | return () => { 289 | this.newDatumSubscribers = this.newDatumSubscribers.filter(subscriber => { 290 | subscriber !== f 291 | }) 292 | } 293 | } 294 | 295 | copy() { 296 | const copied = new SortedView(this.innerView, this.sortFn) 297 | return copied 298 | } 299 | } 300 | 301 | 302 | export class Impl implements ViewExt { 303 | innerView: View 304 | constructor(v: View) { 305 | this.innerView = v 306 | } 307 | recentData(): null | Array> { 308 | return this.innerView.recentData() 309 | } 310 | readAllData(): Array { 311 | return this.innerView.readAllData() 312 | } 313 | 314 | copy(): View { 315 | return this.innerView.copy() 316 | } 317 | 318 | onChange(subscriber: () => void): () => void { 319 | return this.innerView.onChange(subscriber) 320 | } 321 | 322 | onNewDatum(subscriber: (d: RecentDatum) => void): () => void { 323 | return this.innerView.onNewDatum(subscriber) 324 | } 325 | 326 | map(f: (t: T) => O): ViewExt { 327 | const out = datalog._newTable() 328 | const onChange = ({ kind, datum }: RecentDatum) => { 329 | if (kind === datalog.Added) { 330 | out.assert(f(datum)) 331 | } else { 332 | out.retract(f(datum)) 333 | } 334 | } 335 | this.innerView.onChange(() => { 336 | this.innerView.recentData()?.map(onChange) 337 | }) 338 | // this.innerView.recentData()?.map(onChange) 339 | 340 | return new Impl(out.view()) 341 | } 342 | 343 | mapEffect>(f: F): SingleItemView<{}> { 344 | const out = new SingleItemView({}) 345 | const onChange = (r: RecentDatum) => { 346 | f(r) 347 | } 348 | this.innerView.onChange(() => { 349 | this.innerView.recentData()?.map(onChange) 350 | out._setValue({}) 351 | }) 352 | // this.innerView.recentData()?.map(onChange) 353 | out._setValue({}) 354 | return out 355 | } 356 | 357 | reduce(reducer: (accumulator: Acc, recentVal: datalog.RecentDatum) => Acc, initalVal: Acc): ViewExt { 358 | let acc = new SingleItemView(initalVal) 359 | const onChange = (r: RecentDatum) => { 360 | const lastAcc = acc.currentVal 361 | const nextVal = reducer(acc.currentVal, r) 362 | if (lastAcc !== nextVal) { 363 | acc._setValue(nextVal) 364 | } 365 | } 366 | this.innerView.onChange(() => { 367 | this.innerView.recentData()?.map(onChange) 368 | }) 369 | // this.innerView.recentData()?.map(onChange) 370 | return new Impl(acc) 371 | } 372 | 373 | sortBy(sortFn: (a: T, b: T) => -1 | 0 | 1): IndexedViewExt { 374 | return new IndexedImpl(new SortedView(this.innerView, sortFn)) 375 | } 376 | 377 | orderBy(key: keyof T, ascending: boolean = true): IndexedViewExt { 378 | const sortFn = (a: T, b: T): -1 | 0 | 1 => { 379 | const aEl = a[key] 380 | const bEl = b[key] 381 | const result = (aEl < bEl ? -1 : aEl > bEl ? 1 : 0) 382 | return result * (ascending ? 1 : -1) as -1 | 0 | 1 383 | } 384 | return this.sortBy(sortFn) 385 | } 386 | } 387 | 388 | export class IndexedImpl implements IndexedViewExt { 389 | innerView: ViewExt> 390 | filters: { take?: number, drop?: number } = {} 391 | constructor(fromView: View>) { 392 | this.innerView = new Impl(fromView) 393 | } 394 | 395 | take(n: number): IndexedViewExt { 396 | const nextView = this.copy() 397 | nextView.filters.take = n 398 | nextView.filters.drop = this.filters.drop 399 | return nextView 400 | } 401 | 402 | drop(n: number): IndexedViewExt { 403 | const nextView = this.copy() 404 | nextView.filters.drop = (this.filters.drop || 0) + n 405 | nextView.filters.take = this.filters.take ? Math.max(this.filters.take - n, 0) : this.filters.take 406 | return nextView 407 | } 408 | 409 | private passThruDatum(datum: Indexed): boolean { 410 | if (!this.filters.take && !this.filters.drop) { 411 | return true 412 | } 413 | 414 | return (datum.index >= (this.filters.drop || 0) && datum.index < ((this.filters.take || Infinity) + (this.filters.drop || 0))) 415 | } 416 | 417 | recentData(): null | Array>> { 418 | return this.innerView.recentData()?.filter((recentDatum) => { 419 | if (recentDatum.kind === datalog.Modified) { 420 | return this.passThruDatum(recentDatum.datum) || this.passThruDatum(recentDatum.oldDatum) 421 | } 422 | return this.passThruDatum(recentDatum.datum) 423 | }) || null 424 | } 425 | 426 | readAllData(): Array> { 427 | return this.innerView.readAllData().filter((datum) => this.passThruDatum(datum)) 428 | } 429 | // Returns unsubscribe fn 430 | onChange(subscriber: () => void): () => void { 431 | return this.innerView.onChange(subscriber) 432 | } 433 | 434 | // Returns unsubscribe fn 435 | onNewDatum(subscriber: (d: RecentDatum>) => void): () => void { 436 | return this.innerView.onNewDatum(subscriber) 437 | } 438 | 439 | copy() { 440 | return new IndexedImpl(this.innerView.copy()) 441 | } 442 | 443 | map(f: (t: Indexed) => O): ViewExt { 444 | return this.innerView.map(f) 445 | } 446 | 447 | mapIndexed(f: (t: T) => O): IndexedViewExt { 448 | return new IndexedImpl(new MappedIndexedView(this.innerView, f)) 449 | } 450 | 451 | mapEffect>>(f: F): SingleItemView<{}> { 452 | return this.innerView.mapEffect(f) 453 | } 454 | 455 | reduce(reducer: (accumulator: Acc, recentVal: datalog.RecentDatum>) => Acc, initialVal: Acc): ViewExt { 456 | return this.innerView.reduce(reducer, initialVal) 457 | } 458 | 459 | sortBy(sortFn: (a: Indexed, b: Indexed) => -1 | 0 | 1): IndexedViewExt> { 460 | return this.innerView.sortBy(sortFn) 461 | } 462 | 463 | orderBy(key: keyof Indexed, ascending: boolean = true): IndexedViewExt> { 464 | return this.innerView.orderBy(key, ascending) 465 | } 466 | } -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | // /* Basic Options */ 4 | // "incremental": true, /* Enable incremental compilation */ 5 | "target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', or 'ESNEXT'. */ 6 | "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */ 7 | "lib": [ 8 | "ES2015", 9 | "ES2017", 10 | "dom" 11 | ], /* Specify library files to be included in the compilation. */ 12 | "allowJs": true, /* Allow javascript files to be compiled. */ 13 | // "checkJs": true, /* Report errors in .js files. */ 14 | // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */ 15 | "declaration": true, /* Generates corresponding '.d.ts' file. */ 16 | // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ 17 | // "sourceMap": true, /* Generates corresponding '.map' file. */ 18 | // "outFile": "./", /* Concatenate and emit output to single file. */ 19 | "outDir": "./dist", /* Redirect output structure to the directory. */ 20 | // "rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ 21 | // "composite": true, /* Enable project compilation */ 22 | // "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */ 23 | // "removeComments": true, /* Do not emit comments to output. */ 24 | // "noEmit": true, /* Do not emit outputs. */ 25 | // "importHelpers": true, /* Import emit helpers from 'tslib'. */ 26 | "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ 27 | // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ 28 | /* Strict Type-Checking Options */ 29 | "strict": true, /* Enable all strict type-checking options. */ 30 | // "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ 31 | // "strictNullChecks": true, /* Enable strict null checks. */ 32 | // "strictFunctionTypes": true, /* Enable strict checking of function types. */ 33 | // "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ 34 | // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ 35 | // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ 36 | // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ 37 | /* Additional Checks */ 38 | // "noUnusedLocals": true, /* Report errors on unused locals. */ 39 | // "noUnusedParameters": true, /* Report errors on unused parameters. */ 40 | // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ 41 | // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ 42 | /* Module Resolution Options */ 43 | // "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ 44 | // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ 45 | // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ 46 | // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ 47 | // "typeRoots": [], /* List of folders to include type definitions from. */ 48 | // "types": [], /* Type declaration files to be included in compilation. */ 49 | // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ 50 | "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ 51 | // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ 52 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 53 | /* Source Map Options */ 54 | // "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ 55 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 56 | // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ 57 | // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ 58 | /* Experimental Options */ 59 | // "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */ 60 | // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ 61 | /* Advanced Options */ 62 | "forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */ 63 | }, 64 | "exclude": [ 65 | "dist/**", 66 | "src/**/*.test.ts", 67 | "src/**/*.test.js", 68 | "babel.config.js" 69 | ] 70 | } --------------------------------------------------------------------------------