├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── feature_request.md │ └── rfc.md ├── dependabot.yml ├── pull_request_template.md └── workflows │ ├── docs.yml │ ├── review.yml │ └── test.yml ├── .gitignore ├── .vscode └── settings.json ├── CHANGELOG.md ├── LICENSE ├── README.md ├── package-lock.json ├── package.json ├── rollup.config.js ├── src ├── index.ts ├── local │ ├── collection.spec.ts │ ├── collection.ts │ ├── db.spec.ts │ ├── db.ts │ ├── document.spec.ts │ └── document.ts ├── middleware │ ├── changes │ │ ├── changes.spec.ts │ │ └── index.ts │ ├── mongo │ │ ├── index.ts │ │ ├── mongo.spec.ts │ │ └── query.ts │ ├── overrides.ts │ ├── schemas │ │ ├── index.ts │ │ └── schemas.spec.ts │ └── ulid │ │ ├── index.ts │ │ └── ulid.spec.ts ├── remote │ ├── db.spec.ts │ ├── grpc.spec.ts │ ├── grpc.ts │ ├── index.ts │ └── remote.spec.ts ├── types.d.ts └── utils │ ├── index.ts │ ├── spec.utils.ts │ └── utils.spec.ts ├── tsconfig.json ├── typedoc.json └── webpack.test.js /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | - OS: [e.g. iOS] 28 | - Browser [e.g. chrome, safari] 29 | - Version [e.g. 22] 30 | 31 | **Smartphone (please complete the following information):** 32 | - Device: [e.g. iPhone6] 33 | - OS: [e.g. iOS8.1] 34 | - Browser [e.g. stock browser, safari] 35 | - Version [e.g. 22] 36 | 37 | **Additional context** 38 | Add any other context about the problem here. 39 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/rfc.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: RFC 3 | about: Request for comment 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | - Feature Name: (fill me in with a unique ident, `my_awesome_feature`) 11 | - Start Date: (fill me in with today's date, YYYY-MM-DD) 12 | - RFC PR: [textileio/js-threads#0000](https://github.com/textileio/js-threads/pull) 13 | - Relevant Issue: [textileio/js-threads#0000](https://github.com/textileio/js-threads/issue) 14 | 15 | # Summary 16 | [summary]: #summary 17 | 18 | One paragraph explanation of the feature. 19 | 20 | # Motivation 21 | [motivation]: #motivation 22 | 23 | Why are we doing this? What use cases does it support? What is the expected outcome? 24 | 25 | # Guide-level explanation 26 | [guide-level-explanation]: #guide-level-explanation 27 | 28 | Explain the proposal as if it was already included in the library and you were teaching it to another community member. That generally means: 29 | 30 | - Introducing new named concepts. 31 | - Explaining the feature largely in terms of examples. 32 | - Explaining how community members should *think* about the feature, and how it should impact the way they use this library. It should explain the impact as concretely as possible. 33 | - If applicable, provide sample error messages, deprecation warnings, or migration guidance. 34 | - If applicable, describe the differences between teaching this to existing developers and new developers. 35 | 36 | For implementation-oriented RFCs (e.g. for library internals), this section should focus on how library contributors should think about the change, and give examples of its concrete impact. For policy RFCs, this section should provide an example-driven introduction to the policy, and explain its impact in concrete terms. 37 | 38 | # Reference-level explanation 39 | [reference-level-explanation]: #reference-level-explanation 40 | 41 | This is the technical portion of the RFC. Explain the design in sufficient detail that: 42 | 43 | - Its interaction with other features is clear. 44 | - It is reasonably clear how the feature would be implemented. 45 | - Corner cases are dissected by example. 46 | 47 | The section should return to the examples given in the previous section, and explain more fully how the detailed proposal makes those examples work. 48 | 49 | # Drawbacks 50 | [drawbacks]: #drawbacks 51 | 52 | Why should we *not* do this? 53 | 54 | # Rationale and alternatives 55 | [rationale-and-alternatives]: #rationale-and-alternatives 56 | 57 | - Why is this design the best in the space of possible designs? 58 | - What other designs have been considered and what is the rationale for not choosing them? 59 | - What is the impact of not doing this? 60 | 61 | # Prior art 62 | [prior-art]: #prior-art 63 | 64 | Discuss prior art, both the good and the bad, in relation to this proposal. 65 | A few examples of what this can include are: 66 | 67 | - For code-driven proposals: Does this feature exist in other similar libraries languages and what experience have their community had? 68 | - Is this done by some other community and what were their experiences with it? 69 | - For other teams: What lessons can we learn from what other communities have done here? 70 | - Papers: Are there any published papers or great posts that discuss this? If you have some relevant papers to refer to, this can serve as a more detailed theoretical background. 71 | 72 | This section is intended to encourage you as an author to think about the lessons from other libraries, provide readers of your RFC with a fuller picture. 73 | If there is no prior art, that is fine - your ideas are interesting to us whether they are brand new or if it is an adaptation from other libraries. 74 | 75 | Note that while precedent set by other libraries is some motivation, it does not on its own motivate an RFC. 76 | Please also take into consideration that this library might sometimes intentionally diverge from common patterns. 77 | 78 | # Unresolved questions 79 | [unresolved-questions]: #unresolved-questions 80 | 81 | - What parts of the design do you expect to resolve through the RFC process before this gets merged? 82 | - What parts of the design do you expect to resolve through the implementation of this feature before stabilization? 83 | - What related issues do you consider out of scope for this RFC that could be addressed in the future independently of the solution that comes out of this RFC? 84 | 85 | # Future possibilities 86 | [future-possibilities]: #future-possibilities 87 | 88 | Think about what the natural extension and evolution of your proposal would 89 | be and how it would affect the library and project as a whole in a holistic 90 | way. Try to use this section as a tool to more fully consider all possible 91 | interactions with the project and library in your proposal. 92 | Also consider how the this all fits into the roadmap for the project 93 | and of the relevant sub-team. 94 | 95 | This is also a good place to "dump ideas", if they are out of scope for the 96 | RFC you are writing but otherwise related. 97 | 98 | If you have tried and cannot think of any future possibilities, 99 | you may simply state that you cannot think of anything. 100 | 101 | Note that having something written down in the future-possibilities section 102 | is not a reason to accept the current or a future RFC; such notes should be 103 | in the section on motivation or rationale in this or subsequent RFCs. 104 | The section merely provides additional information. 105 | 106 | > This PR template comes from https://github.com/rust-lang/rfcs/blob/master/0000-template.md 107 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # Basic dependabot.yml file for npm packages 2 | version: 2 3 | updates: 4 | # Enable version updates for npm 5 | - package-ecosystem: "npm" 6 | # Look for `package.json` and `lock` files in the `root` directory 7 | directory: "/" 8 | # Check the npm registry for updates weekly 9 | schedule: 10 | interval: "weekly" 11 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ## Description 2 | 3 | Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change. 4 | 5 | Fixes # (issue) 6 | 7 | ## Type of change 8 | 9 | Please delete options that are not relevant. 10 | 11 | - [ ] Bug fix (non-breaking change which fixes an issue) 12 | - [ ] New feature (non-breaking change which adds functionality) 13 | - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) 14 | - [ ] This change requires a documentation update 15 | 16 | ## How Has This Been Tested? 17 | 18 | Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration 19 | 20 | - [ ] Test A 21 | - [ ] Test B 22 | 23 | **Test Configuration**: 24 | * Firmware version: 25 | * Hardware: 26 | * Toolchain: 27 | * SDK: 28 | 29 | ## Checklist: 30 | 31 | - [ ] My code follows the style guidelines of this project 32 | - [ ] I have performed a self-review of my own code 33 | - [ ] I have commented my code, particularly in hard-to-understand areas 34 | - [ ] I have made corresponding changes to the documentation 35 | - [ ] My changes generate no new warnings 36 | - [ ] I have added tests that prove my fix is effective or that my feature works 37 | - [ ] New and existing unit tests pass locally with my changes 38 | - [ ] Any dependent changes have been merged and published in downstream modules 39 | 40 | > This PR template comes from https://github.com/embeddedartistry/templates 41 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: Docs 2 | on: 3 | push: 4 | branches: 5 | - main 6 | 7 | jobs: 8 | deploy: 9 | name: Docs 10 | runs-on: ubuntu-latest 11 | if: "!contains(github.event.head_commit.message, 'skip-ci')" 12 | 13 | steps: 14 | - name: Checkout 15 | uses: actions/checkout@v1 16 | 17 | - name: Cache 18 | uses: actions/cache@v1 19 | with: 20 | path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS 21 | key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} 22 | restore-keys: | 23 | ${{ runner.os }}-build-${{ env.cache-name }}- 24 | ${{ runner.os }}-build- 25 | ${{ runner.os }}- 26 | - name: Setup 27 | uses: actions/setup-node@v1 28 | with: 29 | node-version: 12 30 | registry-url: https://registry.npmjs.org/ 31 | 32 | - name: Install 33 | run: npm install 34 | 35 | - name: Docs 36 | run: npm run docs 37 | 38 | - name: Deploy 39 | uses: peaceiris/actions-gh-pages@v3 40 | with: 41 | github_token: ${{ secrets.GITHUB_TOKEN }} 42 | PUBLISH_BRANCH: gh-pages 43 | PUBLISH_DIR: docs 44 | -------------------------------------------------------------------------------- /.github/workflows/review.yml: -------------------------------------------------------------------------------- 1 | name: Review 2 | on: 3 | pull_request: 4 | branches: 5 | - main 6 | 7 | jobs: 8 | lint: 9 | name: Lint 10 | runs-on: ubuntu-latest 11 | if: "!contains(github.event.head_commit.message, 'skip-ci')" 12 | 13 | steps: 14 | - name: Checkout 15 | uses: actions/checkout@v1 16 | - name: Cache 17 | uses: actions/cache@v1 18 | with: 19 | path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS 20 | key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} 21 | restore-keys: | 22 | ${{ runner.os }}-build-${{ env.cache-name }}- 23 | ${{ runner.os }}-build- 24 | ${{ runner.os }}- 25 | - name: Setup 26 | uses: actions/setup-node@v1 27 | with: 28 | node-version: 12 29 | - name: Install 30 | run: npm install 31 | - name: Lint 32 | run: npm run lint 33 | 34 | spelling: 35 | name: Spelling 36 | runs-on: ubuntu-latest 37 | steps: 38 | - uses: actions/checkout@v1 39 | - uses: reviewdog/action-misspell@v1 40 | with: 41 | reporter: github-pr-review 42 | github_token: ${{ secrets.github_token }} 43 | locale: "US" 44 | 45 | validate: 46 | name: Validate 47 | runs-on: ubuntu-latest 48 | steps: 49 | - name: Checkout 50 | uses: actions/checkout@v1 51 | 52 | - name: Run Commitsar 53 | uses: docker://aevea/commitsar 54 | packages: 55 | name: Textile packages 56 | runs-on: ubuntu-latest 57 | if: "!contains(github.event.head_commit.message, 'skip-ci')" 58 | 59 | steps: 60 | - name: Checkout 61 | uses: actions/checkout@v1 62 | - name: Cache 63 | uses: actions/cache@v1 64 | with: 65 | path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS 66 | key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} 67 | restore-keys: | 68 | ${{ runner.os }}-build-${{ env.cache-name }}- 69 | ${{ runner.os }}-build- 70 | ${{ runner.os }}- 71 | - name: Setup 72 | uses: actions/setup-node@v1 73 | with: 74 | node-version: 12 75 | - name: Install dependencies 76 | run: npm i 77 | - name: Install npm-check-updates 78 | run: npm i -g npm-check-updates 79 | - name: Display 80 | run: npm run textile:check 81 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | on: 3 | push: 4 | branches: 5 | - main 6 | pull_request: 7 | branches: 8 | - main 9 | 10 | jobs: 11 | test: 12 | name: Test 13 | runs-on: ubuntu-latest 14 | if: "!contains(github.event.head_commit.message, 'skip-ci')" 15 | 16 | services: 17 | threads1: 18 | image: textile/go-threads:latest 19 | env: 20 | THRDS_HOSTADDR: /ip4/0.0.0.0/tcp/4006 21 | THRDS_APIADDR: /ip4/0.0.0.0/tcp/6006 22 | THRDS_APIPROXYADDR: /ip4/0.0.0.0/tcp/6007 23 | THRDS_DEBUG: true 24 | ports: 25 | - "4006:4006" 26 | - "127.0.0.1:6006:6006" 27 | - "127.0.0.1:6007:6007" 28 | threads2: 29 | image: textile/go-threads:latest 30 | env: 31 | THRDS_HOSTADDR: /ip4/0.0.0.0/tcp/4006 32 | THRDS_APIADDR: /ip4/0.0.0.0/tcp/6006 33 | THRDS_APIPROXYADDR: /ip4/0.0.0.0/tcp/6007 34 | THRDS_DEBUG: true 35 | ports: 36 | - "4206:4006" 37 | - "127.0.0.1:6206:6006" 38 | - "127.0.0.1:6207:6007" 39 | 40 | steps: 41 | - name: Checkout 42 | uses: actions/checkout@v1 43 | 44 | - name: Cache 45 | uses: actions/cache@v1 46 | with: 47 | path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS 48 | key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} 49 | restore-keys: | 50 | ${{ runner.os }}-build-${{ env.cache-name }}- 51 | ${{ runner.os }}-build- 52 | ${{ runner.os }}- 53 | 54 | - name: Setup 55 | uses: actions/setup-node@v1 56 | with: 57 | node-version: 12 58 | 59 | - name: Install 60 | run: npm install 61 | 62 | - name: Test Node 63 | run: npm run test:node 64 | 65 | - name: Test Browser 66 | run: npm run test:browser 67 | 68 | - name: Compile Test 69 | run: npm run build 70 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.toptal.com/developers/gitignore/api/code,node 3 | # Edit at https://www.toptal.com/developers/gitignore?templates=code,node 4 | 5 | ### Code ### 6 | .vscode 7 | !.vscode/settings.json 8 | !.vscode/tasks.json 9 | !.vscode/launch.json 10 | !.vscode/extensions.json 11 | *.code-workspace 12 | 13 | ### Node ### 14 | # Logs 15 | logs 16 | *.log 17 | npm-debug.log* 18 | yarn-debug.log* 19 | yarn-error.log* 20 | 21 | # Diagnostic reports (https://nodejs.org/api/report.html) 22 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 23 | 24 | # Runtime data 25 | pids 26 | *.pid 27 | *.seed 28 | *.pid.lock 29 | 30 | # Directory for instrumented libs generated by jscoverage/JSCover 31 | lib-cov 32 | 33 | # Coverage directory used by tools like istanbul 34 | coverage 35 | *.lcov 36 | 37 | # nyc test coverage 38 | .nyc_output 39 | 40 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 41 | .grunt 42 | 43 | # Bower dependency directory (https://bower.io/) 44 | bower_components 45 | 46 | # node-waf configuration 47 | .lock-wscript 48 | 49 | # Compiled binary addons (https://nodejs.org/api/addons.html) 50 | build/Release 51 | 52 | # Dependency directories 53 | node_modules/ 54 | jspm_packages/ 55 | 56 | # TypeScript v1 declaration files 57 | typings/ 58 | 59 | # TypeScript cache 60 | *.tsbuildinfo 61 | 62 | # Optional npm cache directory 63 | .npm 64 | 65 | # Optional eslint cache 66 | .eslintcache 67 | 68 | # Microbundle cache 69 | .rpt2_cache/ 70 | .rts2_cache_cjs/ 71 | .rts2_cache_es/ 72 | .rts2_cache_umd/ 73 | 74 | # Optional REPL history 75 | .node_repl_history 76 | 77 | # Output of 'npm pack' 78 | *.tgz 79 | 80 | # Yarn Integrity file 81 | .yarn-integrity 82 | 83 | # dotenv environment variables file 84 | .env 85 | .env.test 86 | 87 | # local build folders 88 | build 89 | 90 | # local dbs for testing 91 | **.sqlite 92 | 93 | # parcel-bundler cache (https://parceljs.org/) 94 | .cache 95 | 96 | # Next.js build output 97 | .next 98 | 99 | # Nuxt.js build / generate output 100 | .nuxt 101 | dist 102 | 103 | # Gatsby files 104 | .cache/ 105 | # Comment in the public line in if your project uses Gatsby and not Next.js 106 | # https://nextjs.org/blog/next-9-1#public-directory-support 107 | # public 108 | 109 | # vuepress build output 110 | .vuepress/dist 111 | 112 | # Serverless directories 113 | .serverless/ 114 | 115 | # FuseBox cache 116 | .fusebox/ 117 | 118 | # DynamoDB Local files 119 | .dynamodb/ 120 | 121 | # TernJS port file 122 | .tern-port 123 | 124 | # Stores VSCode versions used for testing VSCode extensions 125 | .vscode-test 126 | 127 | # End of https://www.toptal.com/developers/gitignore/api/code,node 128 | 129 | docs/ 130 | repo/ 131 | docker-compose*.yml 132 | **/.DS_Store -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "typescript.tsdk": "node_modules/typescript/lib", 3 | "mochaExplorer.files": "src/**/*.spec.ts", 4 | "mochaExplorer.esmLoader": true, 5 | "mochaExplorer.exit": true, 6 | "mochaExplorer.require": [ 7 | "ts-node/register", 8 | "source-map-support/register" 9 | ], 10 | "mochaExplorer.launcherScript": "node_modules/mocha-explorer-launcher-scripts/nyc", 11 | "mochaExplorer.env": { 12 | "TS_NODE_FILES": "true", 13 | "TS_NODE_COMPILER_OPTIONS": "{\"module\": \"commonjs\" }" 14 | } 15 | } -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. 4 | 5 | ### [0.0.5](https://github.com/textileio/thread-db/compare/v0.0.4...v0.0.5) (2020-12-04) 6 | 7 | ### [0.0.4](https://github.com/textileio/thread-db/compare/v0.0.3...v0.0.4) (2020-11-24) 8 | 9 | ### [0.0.3](https://github.com/textileio/thread-db/compare/v0.0.2...v0.0.3) (2020-11-24) 10 | 11 | 12 | ### Features 13 | 14 | * esm builds for node and browser/bundle ([125d1bf](https://github.com/textileio/thread-db/commit/125d1bf163719faab7064dbb9b65f8fd4041b5d9)) 15 | 16 | ### 0.0.2 (2020-11-06) 17 | 18 | 19 | ### Features 20 | 21 | * adds docs building and config ([ae2749c](https://github.com/textileio/thread-db/commit/ae2749cd06797167e6f04ad64a7642f0002c112a)) 22 | * enable dependabot ([760d30e](https://github.com/textileio/thread-db/commit/760d30e2a6f9bddaf671f5f9976e98b4a1e00a54)) 23 | * remote working against hub + test ([b631450](https://github.com/textileio/thread-db/commit/b631450fdebfbab414d44ed88136f8d857d366c4)) 24 | * revert to not using txn for now ([e4d6a55](https://github.com/textileio/thread-db/commit/e4d6a551010f2ef4028f74d9df524115e447deb6)) 25 | * use correct id + revert package-lock ([84fd648](https://github.com/textileio/thread-db/commit/84fd648b27da3566420732e62c1959a06afc9906)) 26 | 27 | 28 | ### Bug Fixes 29 | 30 | * extra error ([d0008f6](https://github.com/textileio/thread-db/commit/d0008f6182f5406a7ca9388b7c00bab2b3f09344)) 31 | * point remote to localhost + error updates ([6b04605](https://github.com/textileio/thread-db/commit/6b04605dacea6bd826011a8f2f68ed97a56319b2)) 32 | * proper json output + remote deps ([a6b42d6](https://github.com/textileio/thread-db/commit/a6b42d6069853eded0851908a85abfb961bc0980)) 33 | * uses transactions to hack around temp hub blocker ([8ed9511](https://github.com/textileio/thread-db/commit/8ed9511889855c4c49b84eea44e8d5bf3486e60d)) 34 | * workaround for diff hub/remote behavior ([3567822](https://github.com/textileio/thread-db/commit/35678220361176d8395bf7174ce9711f041e441b)) 35 | 36 | ### 0.0.1 (2020-10-29) 37 | 38 | 39 | ### Features 40 | 41 | * adds docs building and config ([ae2749c](https://github.com/textileio/thread-db/commit/ae2749cd06797167e6f04ad64a7642f0002c112a)) 42 | * enable dependabot ([760d30e](https://github.com/textileio/thread-db/commit/760d30e2a6f9bddaf671f5f9976e98b4a1e00a54)) 43 | * remote working against hub + test ([b631450](https://github.com/textileio/thread-db/commit/b631450fdebfbab414d44ed88136f8d857d366c4)) 44 | * revert to not using txn for now ([e4d6a55](https://github.com/textileio/thread-db/commit/e4d6a551010f2ef4028f74d9df524115e447deb6)) 45 | * use correct id + revert package-lock ([84fd648](https://github.com/textileio/thread-db/commit/84fd648b27da3566420732e62c1959a06afc9906)) 46 | 47 | 48 | ### Bug Fixes 49 | 50 | * extra error ([d0008f6](https://github.com/textileio/thread-db/commit/d0008f6182f5406a7ca9388b7c00bab2b3f09344)) 51 | * point remote to localhost + error updates ([6b04605](https://github.com/textileio/thread-db/commit/6b04605dacea6bd826011a8f2f68ed97a56319b2)) 52 | * proper json output + remote deps ([a6b42d6](https://github.com/textileio/thread-db/commit/a6b42d6069853eded0851908a85abfb961bc0980)) 53 | * workaround for diff hub/remote behavior ([3567822](https://github.com/textileio/thread-db/commit/35678220361176d8395bf7174ce9711f041e441b)) 54 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2020 Textile 2 | 3 | MIT License 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Javascript implementation of Textile's ThreadDB 2 | 3 | [![Textile](https://img.shields.io/badge/made%20by-Textile-informational.svg)](https://textile.io) 4 | [![Slack](https://img.shields.io/badge/slack-slack.textile.io-informational.svg)](https://slack.textile.io) 5 | [![License](https://img.shields.io/github/license/textileio/js-threaddb.svg)](./LICENSE) 6 | [![Release](https://img.shields.io/npm/v/@textile/threaddb.svg)](https://www.npmjs.com/package/@textile/threaddb) 7 | 8 | ![Test](https://github.com/textileio/js-threaddb/workflows/Test/badge.svg) 9 | [![Docs](https://github.com/textileio/js-threaddb/workflows/Docs/badge.svg)](https://textileio.github.io/js-threaddb) 10 | 11 | **This project has been moved to https://github.com/textileio/js-textile.** 12 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@textile/threaddb", 3 | "version": "0.0.5", 4 | "description": "ThreadDB Javascript implementation", 5 | "main": "dist/cjs/index.js", 6 | "module": "dist/esm/index.js", 7 | "bundle": "dist/index.esm.js", 8 | "types": "dist/esm/index.d.ts", 9 | "files": [ 10 | "dist/**/!(*.spec).js?(.map)", 11 | "dist/**/!(*.spec).d.ts" 12 | ], 13 | "scripts": { 14 | "prepublishOnly": "npm run build", 15 | "build": "npm run build:cjs && npm run build:esm && npm run build:bundle", 16 | "build:cjs": "tsc --outDir dist/cjs --declaration false --module CommonJS", 17 | "build:esm": "tsc --outDir dist/esm --declaration --module ES6", 18 | "build:bundle": "rollup -c", 19 | "clean": "rimraf ./dist ./coverage ./build ./.nyc_output ./tsconfig.tsbuildinfo", 20 | "test": "npm run test:node && npm run test:browser", 21 | "test:browser": "polendina src/**/*.spec.ts --webpack-config=./webpack.test.js", 22 | "test:node": "env TS_NODE_FILES=true TS_NODE_COMPILER_OPTIONS='{\"module\":\"commonjs\"}' nyc --reporter=lcov mocha", 23 | "lint": "eslint", 24 | "docs": "typedoc", 25 | "version": "standard-version", 26 | "textile:check": "npx ncu '/^@textile/.*$/'", 27 | "textile:update": "npx ncu -u '/^@textile/.*$/'" 28 | }, 29 | "keywords": [ 30 | "database", 31 | "ipfs", 32 | "textile", 33 | "local-first", 34 | "p2p" 35 | ], 36 | "author": "Textile ", 37 | "license": "MIT", 38 | "devDependencies": { 39 | "@babel/polyfill": "^7.12.1", 40 | "@commitlint/cli": "^11.0.0", 41 | "@commitlint/config-conventional": "^11.0.0", 42 | "@istanbuljs/nyc-config-typescript": "^1.0.1", 43 | "@rollup/plugin-commonjs": "^16.0.0", 44 | "@rollup/plugin-inject": "^4.0.2", 45 | "@rollup/plugin-json": "^4.1.0", 46 | "@rollup/plugin-node-resolve": "^10.0.0", 47 | "@types/chai": "^4.2.14", 48 | "@types/mocha": "^8.0.3", 49 | "@typescript-eslint/eslint-plugin": "^4.5.0", 50 | "@typescript-eslint/parser": "^4.5.0", 51 | "@wessberg/rollup-plugin-ts": "^1.3.8", 52 | "chai": "^4.2.0", 53 | "eslint": "^7.11.0", 54 | "eslint-config-prettier": "^7.0.0", 55 | "eslint-plugin-import": "^2.22.1", 56 | "eslint-plugin-node": "^11.1.0", 57 | "eslint-plugin-prettier": "^3.1.4", 58 | "eslint-plugin-promise": "^4.2.1", 59 | "esm": "^3.2.25", 60 | "husky": "^4.3.0", 61 | "mocha": "^8.2.0", 62 | "mocha-explorer-launcher-scripts": "^0.3.0", 63 | "nyc": "^15.1.0", 64 | "polendina": "^1.1.0", 65 | "prettier": "^2.1.2", 66 | "rimraf": "^3.0.2", 67 | "rollup": "^2.33.3", 68 | "rollup-plugin-terser": "^7.0.2", 69 | "standard-version": "^9.0.0", 70 | "ts-loader": "^8.0.6", 71 | "ts-node": "^9.0.0", 72 | "typedoc": "^0.19.2", 73 | "typescript": "^4.0.3" 74 | }, 75 | "dependencies": { 76 | "@improbable-eng/grpc-web": "^0.13.0", 77 | "@textile/context": "^0.9.2", 78 | "@textile/crypto": "^2.0.0", 79 | "@textile/grpc-transport": "0.2.1", 80 | "@textile/security": "^0.6.2", 81 | "@textile/threads-client": "^1.3.2", 82 | "@textile/threads-client-grpc": "^1.0.2", 83 | "@textile/threads-id": "^0.3.1", 84 | "@types/json-schema": "^7.0.6", 85 | "@types/to-json-schema": "^0.2.0", 86 | "ajv": "^6.12.6", 87 | "buffer": "^6.0.3", 88 | "dexie": "^3.0.2", 89 | "dexie-mongoify": "^1.3.0", 90 | "esbuild": "^0.8.13", 91 | "fast-json-patch": "^3.0.0-1", 92 | "indexeddbshim": "^7.0.0", 93 | "json-schema": "^0.2.5", 94 | "regenerator-runtime": "^0.13.7", 95 | "to-json-schema": "^0.2.5", 96 | "ulid": "^2.3.0" 97 | }, 98 | "commitlint": { 99 | "extends": [ 100 | "@commitlint/config-conventional" 101 | ] 102 | }, 103 | "husky": { 104 | "hooks": { 105 | "commit-msg": "commitlint -E HUSKY_GIT_PARAMS" 106 | } 107 | }, 108 | "mocha": { 109 | "spec": "src/**/*.spec.ts", 110 | "reporter": "spec", 111 | "recursive": true, 112 | "require": [ 113 | "ts-node/register" 114 | ], 115 | "exit": true, 116 | "esmLoader": true 117 | }, 118 | "nyc": { 119 | "extends": "@istanbuljs/nyc-config-typescript" 120 | }, 121 | "eslintConfig": { 122 | "env": { 123 | "browser": true, 124 | "es2020": true 125 | }, 126 | "extends": [ 127 | "plugin:@typescript-eslint/recommended", 128 | "prettier/@typescript-eslint", 129 | "plugin:prettier/recommended" 130 | ], 131 | "ignorePatterns": [ 132 | "node_modules", 133 | "dist" 134 | ], 135 | "parser": "@typescript-eslint/parser", 136 | "parserOptions": { 137 | "ecmaVersion": 11, 138 | "sourceType": "module" 139 | }, 140 | "plugins": [ 141 | "@typescript-eslint", 142 | "prettier" 143 | ], 144 | "rules": { 145 | "prettier/prettier": "error" 146 | } 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /rollup.config.js: -------------------------------------------------------------------------------- 1 | import typescript from "@wessberg/rollup-plugin-ts"; 2 | import commonjs from "@rollup/plugin-commonjs"; 3 | import resolve from "@rollup/plugin-node-resolve"; 4 | import json from "@rollup/plugin-json"; 5 | import inject from "@rollup/plugin-inject"; 6 | import { terser } from "rollup-plugin-terser"; 7 | import path from "path"; 8 | 9 | import pkg from "./package.json"; 10 | 11 | export default { 12 | input: "src/index.ts", 13 | output: [ 14 | { 15 | exports: "named", 16 | file: path.resolve(pkg.bundle), 17 | format: "es", 18 | }, 19 | ], 20 | external: ["stream"], 21 | plugins: [ 22 | json(), 23 | resolve({ 24 | preferBuiltins: false, 25 | browser: true, 26 | }), 27 | commonjs(), 28 | inject({ 29 | Buffer: ["buffer/", "Buffer"], 30 | }), 31 | typescript(), 32 | terser(), 33 | ], 34 | // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types 35 | onwarn(warning, warn) { 36 | // suppress eval warnings 37 | if (warning.code === "EVAL") return; 38 | warn(warning); 39 | }, 40 | }; 41 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import "regenerator-runtime/runtime"; 2 | export { Collection } from "./local/collection"; 3 | export { Database } from "./local/db"; 4 | export { PrivateKey, PublicKey } from "@textile/crypto"; 5 | export { ThreadID } from "@textile/threads-id"; 6 | export { Client } from "@textile/threads-client"; 7 | export { Remote } from "./remote"; 8 | export type { JSONSchema } from "./middleware/schemas"; 9 | -------------------------------------------------------------------------------- /src/local/collection.spec.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-non-null-assertion */ 2 | import { expect } from "chai"; 3 | import { ulid } from "ulid"; 4 | import { Collection } from "./collection"; 5 | import { NewDexie } from "../utils"; 6 | import { shouldHaveThrown } from "../utils/spec.utils"; 7 | import { Query } from "../middleware/mongo"; 8 | // import { ChangeTableName } from "../middleware/changes"; 9 | 10 | const databaseName = "collection"; 11 | 12 | describe("collection", function () { 13 | const db = NewDexie(databaseName); 14 | 15 | after(async function () { 16 | // Cleanup time! 17 | db.close(); 18 | await db.delete(); 19 | }); 20 | describe("workflows", async function () { 21 | before(async function () { 22 | // Super low-level access 23 | db.version(1).stores({ things: "++_id,thing" }); 24 | }); 25 | it("should handle a normal db workflow", async function () { 26 | interface Info { 27 | _id?: string; 28 | other?: number; 29 | thing: string; 30 | } 31 | const Thing = new Collection(db.table("things")); 32 | const data: Info = { _id: ulid(), thing: "one" }; 33 | const thing1 = data; 34 | expect(thing1.thing).to.equal("one"); 35 | thing1.other = 1; 36 | // Won't compile because typed instances can't have extra properties, 37 | // which is exactly what we want! 38 | // thing1.more = 'something' 39 | expect(thing1.other).to.equal(1); 40 | expect(await Thing.find({}).count()).to.equal(0); 41 | await Thing.save(thing1); 42 | expect(await Thing.find({}).count()).to.equal(1); 43 | await Thing.save(data); 44 | try { 45 | await Thing.insert(data); 46 | throw shouldHaveThrown; 47 | // TODO: Better error reporting to mask out dexie stuff 48 | } catch (err) { 49 | expect(err).to.not.equal(shouldHaveThrown); 50 | } 51 | await Thing.insert( 52 | { other: -1, thing: "five" }, 53 | { other: 2, thing: "two" }, 54 | { other: 3, thing: "three" }, 55 | { other: 4, thing: "four" } 56 | ); 57 | const all = await Thing.find({ 58 | $or: [{ other: { $gt: 1 } }, { thing: { $eq: "one" } }], 59 | }).sortBy("_id"); 60 | const last = all[0]; 61 | expect(last).to.have.haveOwnProperty("other", 1); 62 | }); 63 | }); 64 | 65 | describe("units", () => { 66 | // Default Person interface to work with types 67 | type Person = { 68 | name: string; 69 | age: number; 70 | }; 71 | 72 | // Default person data, frozen to keep from modifying directly 73 | const defaultPerson: Person = Object.freeze({ 74 | name: "Lucas", 75 | age: 7, 76 | }); 77 | 78 | // Function to create a copy of person, rather than mutate 79 | const copyPerson = ( 80 | person: Person = defaultPerson, 81 | _id?: string 82 | ): Person & { _id?: string } => { 83 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 84 | const out: any = { ...person }; 85 | if (_id) out._id = _id; 86 | return out; 87 | }; 88 | 89 | // Function to setup a new default collection based on the person interface 90 | const setupCollection = () => { 91 | return new Collection(db.table("Person")); 92 | }; 93 | 94 | before(async function () { 95 | // Super low-level access 96 | db.close(); 97 | // Create Person store with indexes on _id, name, and age 98 | db.version(2).stores({ Person: "++_id,name,age" }); 99 | await db.open(); 100 | }); 101 | 102 | beforeEach(() => { 103 | // Clear out the store before each run 104 | db.tables.forEach((table) => table.clear()); 105 | }); 106 | 107 | describe("top-level instance", () => { 108 | it("should have a name property", function () { 109 | const Person = setupCollection(); 110 | expect(Person.name).to.equal("Person"); 111 | }); 112 | 113 | it("should handle multiple write operations", async function () { 114 | const Person = setupCollection(); 115 | // const test = copyPerson(undefined, ulid()); 116 | await Person.insert(copyPerson(), copyPerson(), copyPerson()); 117 | const person = copyPerson(); 118 | const [id] = await Person.insert(person); 119 | await Person.delete(id); 120 | expect(await Person.find({}).count()).to.equal(3); 121 | }); 122 | }); 123 | 124 | describe("creating entities", () => { 125 | it("should create single entity from data", async function () { 126 | const Person = setupCollection(); 127 | const person = copyPerson(); 128 | expect(person).to.not.have.ownProperty("_id"); 129 | // Should create entity with proper id 130 | const entity = Person.create(person); 131 | expect(entity).to.have.ownProperty("_id"); 132 | 133 | // Should not exist in underlying storage yet 134 | expect(await entity.exists()).to.equal(false); 135 | expect(await Person.has(entity._id)).to.equal(false); 136 | 137 | // Now save it 138 | await entity.save(); 139 | 140 | // Now it should exist in underlying storage 141 | expect(await entity.exists()).to.equal(true); 142 | expect(await Person.has(entity._id)).to.equal(true); 143 | }); 144 | 145 | it("should create entities with ulid ids by default", async function () { 146 | const Person = setupCollection(); 147 | const person1 = copyPerson(); 148 | const [id] = await Person.insert(person1); 149 | // Should update person in place 150 | expect(person1._id).to.equal(id); 151 | const obj = await Person.findById(id); 152 | expect(obj).to.have.ownProperty("_id"); 153 | expect(obj?._id).to.equal(id); 154 | expect(obj?._id).to.have.length(26); 155 | }); 156 | 157 | it("should be able to override ulid id", async function () { 158 | const Person = setupCollection(); 159 | const person1 = copyPerson(); 160 | person1._id = "override"; 161 | const [id] = await Person.insert(person1); 162 | // Should update person in place 163 | expect(person1._id).to.equal("override"); 164 | const obj = await Person.findById(id); 165 | expect(obj).to.have.ownProperty("_id"); 166 | expect(obj?._id).to.equal(id); 167 | }); 168 | 169 | it("should create a single entity (w/ type checking) at a time", async function () { 170 | const Person = setupCollection(); 171 | const person1 = copyPerson(); 172 | const [id] = await Person.insert(person1); 173 | const exists = await Person.has(id); 174 | const obj = await Person.findById(id); 175 | expect(exists).to.be.true; 176 | expect(obj).to.have.ownProperty("_id", id); 177 | expect(obj?.save).to.not.be.undefined; 178 | const person2 = copyPerson(undefined, ulid()); 179 | let has = await Person.has(person2._id!); 180 | expect(has).to.equal(false); 181 | await Person.insert(person2); 182 | has = await Person.has(person2._id!); 183 | expect(has).to.equal(true); 184 | }); 185 | 186 | it("should create multiple entities (variadic arguments w/ type checking) at once", async function () { 187 | const Person = setupCollection(); 188 | const person = copyPerson(); 189 | const [id] = await Person.insert(person, copyPerson(), copyPerson()); 190 | expect(await Person.find({}).count()).to.equal(3); 191 | expect(await Person.has(id)).to.be.true; 192 | }); 193 | 194 | it("should create an entity with a predefined id", async function () { 195 | const Person = setupCollection(); 196 | const _id = ulid(); 197 | const person = { _id, name: "Hans", age: 12 }; 198 | const [id_] = await Person.insert(person); 199 | expect(id_).to.equal(_id); 200 | }); 201 | 202 | it("should not overwrite an existing entity", async function () { 203 | const Person = setupCollection(); 204 | const _id = ulid(); 205 | try { 206 | await Person.insert({ _id, name: "Hans", age: 12 } as Person); 207 | const person = { _id, name: "Hans", age: 12 }; 208 | await Person.insert(person); 209 | throw shouldHaveThrown; 210 | } catch (err) { 211 | expect(err).to.not.equal(shouldHaveThrown); 212 | } 213 | }); 214 | }); 215 | 216 | describe("creating transactions", () => { 217 | it("should create a readonly transaction", async function () { 218 | const Person = setupCollection(); 219 | const person = copyPerson(); 220 | const [id] = await Person.insert(person); 221 | Person.readTransaction(async function () { 222 | // await Person.insert(person); 223 | expect(await Person.has(id)).to.be.true; 224 | }); 225 | }); 226 | 227 | it("should create a write transaction", async function () { 228 | const Person = setupCollection(); 229 | const person = copyPerson(); 230 | await Person.writeTransaction(async function () { 231 | const [id] = await Person.insert(person); 232 | expect(await Person.has(id)).to.be.true; 233 | }); 234 | }); 235 | }); 236 | 237 | describe("checking for entities", () => { 238 | it("should test for existing entity", async function () { 239 | const Person = setupCollection(); 240 | const person = copyPerson(); 241 | const [id1] = await Person.insert(person); 242 | expect(await Person.has(id1)).to.be.true; 243 | expect(await Person.has("blah")).to.be.false; 244 | 245 | const person2 = copyPerson(); 246 | const [id2] = await Person.insert(person2); 247 | expect(await Person.has(id2)).to.be.true; 248 | await Person.delete(id2); 249 | expect(await Person.has(id2)).to.be.false; 250 | 251 | // Test exists from instance 252 | const [id3] = await Person.insert(person2); 253 | const personInstance = await Person.findById(id3); 254 | if (personInstance) { 255 | expect(await personInstance.exists()).to.equal(true); 256 | await personInstance.remove(); 257 | expect(await personInstance.exists()).to.equal(false); 258 | } else { 259 | throw new Error("should not be undefined"); 260 | } 261 | }); 262 | 263 | it("should test for multiple entities", async function () { 264 | const Person = setupCollection(); 265 | const persons = [copyPerson(), copyPerson(), copyPerson()]; 266 | const ids = await Person.insert(...persons); 267 | expect( 268 | await Promise.all(ids.map((id) => Person.has(id))) 269 | ).to.deep.equal([true, true, true]); 270 | expect( 271 | await Promise.all(["foo", "bar", "baz"].map((p) => Person.has(p))) 272 | ).to.deep.equal([false, false, false]); 273 | }); 274 | }); 275 | 276 | describe("returning entities", () => { 277 | it("should get existing entity", async function () { 278 | const Person = setupCollection(); 279 | const person = copyPerson(); 280 | const [id] = await Person.insert(person); 281 | let found = await Person.findById(id); 282 | expect(found).to.deep.equal(person); 283 | found = await Person.findById("blah"); 284 | expect(found).to.be.undefined; 285 | }); 286 | 287 | it("should get multiple entities", async function () { 288 | const Person = setupCollection(); 289 | const persons = [copyPerson(), copyPerson(), copyPerson()]; 290 | const ids = await Person.insert(...persons); 291 | expect( 292 | await Promise.all(ids.map((id) => Person.findById(id))) 293 | ).to.deep.equal(persons); 294 | const founds = await Promise.all( 295 | ["foo", "bar", "baz"].map((p) => Person.findById(p)) 296 | ); 297 | expect(founds.every((found) => found === undefined)).to.be.true; 298 | }); 299 | }); 300 | 301 | describe("type checking entities", () => { 302 | it("should correctly handle typed entities", async function () { 303 | const Person = new Collection(db.table("Person")); 304 | const person = copyPerson(); 305 | const typed = Person.create(person); 306 | expect(typed.age).to.equal(person.age); 307 | expect(typed.name).to.equal(person.name); 308 | expect(typed._id).to.not.be.undefined; 309 | // Also works with unknown 310 | const Unknown = new Collection( 311 | db.table("Person") // Reuse Person table... 312 | ); 313 | // We can create an empty object of unknown type 314 | const empty = Unknown.create(); 315 | // This is a method from Instance 316 | expect(empty.exists).to.not.be.undefined; 317 | // This is a default _id 318 | expect(empty._id).to.not.be.undefined; 319 | }); 320 | }); 321 | 322 | describe("exporting entities", () => { 323 | it("should export entity to JSON string", async function () { 324 | const Person = setupCollection(); 325 | const person = copyPerson(); 326 | const [id] = await Person.insert(person); 327 | expect(await Person.has(id)).to.be.true; 328 | const personInstance = await Person.findById(id); 329 | // Should be an actual class instance, that we can export to JSON 330 | if (personInstance) { 331 | const json = personInstance.toJSON(); 332 | expect(json).to.deep.equal(person); 333 | } else { 334 | throw new Error("should not be undefined"); 335 | } 336 | }); 337 | }); 338 | 339 | describe("deleting entities", () => { 340 | it("should delete existing entity", async function () { 341 | const Person = setupCollection(); 342 | const person = copyPerson(); 343 | let [id] = await Person.insert(person); 344 | expect(await Person.has(id)).to.be.true; 345 | await Person.delete(id); 346 | expect(await Person.has(id)).to.be.false; 347 | await Person.delete("blah"); // Should not throw here, fails gracefully 348 | await Person.save(person); 349 | expect(await Person.has(id)).to.be.true; 350 | await Person.delete(id); 351 | expect(await Person.has(id)).to.be.false; 352 | 353 | // Test remove from instance 354 | [id] = await Person.insert(person); 355 | expect(await Person.has(id)).to.equal(true); 356 | const personInstance = await Person.findById(id); 357 | if (personInstance) { 358 | await personInstance.remove(); 359 | expect(await Person.has(id)).to.equal(false); 360 | } else { 361 | throw new Error("should not be undefined"); 362 | } 363 | }); 364 | 365 | it("should delete multiple entities", async function () { 366 | const Person = setupCollection(); 367 | const persons = [copyPerson(), copyPerson(), copyPerson()]; 368 | const ids = await Person.insert(...persons); 369 | expect( 370 | await Promise.all(ids.map((id) => Person.has(id))) 371 | ).to.deep.equal([true, true, true]); 372 | await Person.delete(...ids); 373 | expect(await Promise.all(ids.map((p) => Person.has(p)))).to.deep.equal([ 374 | false, 375 | false, 376 | false, 377 | ]); 378 | await Person.delete("foo", "bar", "baz"); // Should not error 379 | }); 380 | 381 | it("should delete all entities", async function () { 382 | // TODO: We don't support deleteRange for our track changes yet... 383 | const Person = setupCollection(); 384 | await Person.writeTransaction(async function () { 385 | const person = copyPerson(); 386 | await Person.insert(person); 387 | }); 388 | expect(await Person.find().count()).to.equal(1); 389 | // Closer checking into deleting things 390 | // const changes = db.table(ChangeTableName); 391 | // const beforeClear = await changes.count(); 392 | // Delete all entities from Person collection 393 | await Person.clear(); 394 | // Alternative to find() above... directly counting all instances 395 | expect(await Person.count()).to.equal(0); 396 | // Buuuut, this doesn't yet lead to any changes being recorded 397 | // expect(await changes.count()).to.be.greaterThan(beforeClear); 398 | }); 399 | }); 400 | 401 | describe("saving entities", () => { 402 | it("should save/update existing entity", async function () { 403 | const Person = setupCollection(); 404 | const person = copyPerson(); 405 | const [id] = await Person.save(person); 406 | person.name = "Mod"; 407 | await Person.save(person); 408 | 409 | expect(await Person.findById(id)).to.haveOwnProperty("name", "Mod"); 410 | 411 | // Test save from instance 412 | const personInstance = await Person.findById(id); 413 | if (personInstance) { 414 | personInstance.age = 99; 415 | await personInstance.save(); 416 | expect(await Person.findById(id)).to.haveOwnProperty("age", 99); 417 | } else { 418 | throw new Error("should not be undefined"); 419 | } 420 | }); 421 | 422 | it("should save/update multiple entities", async function () { 423 | const Person = setupCollection(); 424 | const persons = [copyPerson(), copyPerson(), copyPerson()]; 425 | await Person.insert(...persons); 426 | persons.forEach((p) => p.age++); 427 | await Person.save(...persons); 428 | const array = await Person.find({}).toArray((data) => 429 | data.map(({ age }) => age) 430 | ); 431 | expect(array).to.deep.equal([8, 8, 8]); 432 | }); 433 | 434 | it("should also save/update a non-existent entity", async function () { 435 | const Person = setupCollection(); 436 | await Person.save({ name: "nothing", age: 55 }); 437 | expect(await Person.find().count()).to.equal(1); 438 | }); 439 | }); 440 | 441 | describe("find/search", () => { 442 | it("should support finding one result at a time", async function () { 443 | const Person = setupCollection(); 444 | const people: Person[] = [ 445 | { name: "Lucas", age: 7 }, 446 | { name: "Clyde", age: 99 }, 447 | { name: "Duke", age: 2 }, 448 | ]; 449 | await Person.insert(...people); 450 | const query = { 451 | // Query for everyone over the age of 5 452 | age: { $gt: 5 }, 453 | }; 454 | // But only "find" one of them... 455 | const result = await Person.findOne(query); 456 | 457 | expect(result).to.not.be.undefined; 458 | expect(result).to.have.ownProperty("age"); 459 | expect(result?.age).to.be.greaterThan(5); 460 | }); 461 | it("should support simple queries", async function () { 462 | const Person = setupCollection(); 463 | const people: Person[] = [ 464 | { name: "Lucas", age: 7 }, 465 | { name: "Clyde", age: 99 }, 466 | { name: "Duke", age: 2 }, 467 | ]; 468 | await Person.insert(...people); 469 | const query = { 470 | // Find everyone over the age of 5 471 | age: { $gt: 5 }, 472 | }; 473 | const results = Person.find(query); 474 | 475 | expect(await results.count()).to.equal(2); 476 | const last = await results.last(); 477 | // Should we 'unravel' the key/value pairs here? 478 | expect(last).to.have.ownProperty("age"); 479 | expect(last?.age).to.be.greaterThan(5); 480 | }); 481 | 482 | it("should support complex queries", async function () { 483 | const Person = setupCollection(); 484 | const people: Person[] = [ 485 | { name: "Lucas", age: 56 }, 486 | { name: "Clyde", age: 55 }, 487 | { name: "Mike", age: 52 }, 488 | { name: "Micheal", age: 52 }, 489 | { name: "Duke", age: 2 }, 490 | { name: "Michelle", age: 2 }, 491 | { name: "Michelangelo", age: 55 }, 492 | ]; 493 | await Person.insert(...people); 494 | const query: Query = { 495 | // Find people who are older than 5, and younger than 56, ... 496 | // but don't include Michael, he's a jerk... 497 | $and: [ 498 | { age: { $gt: 5 } }, 499 | { age: { $lt: 56 } }, 500 | { name: { $not: { $eq: "Micheal" } } }, 501 | ], 502 | }; 503 | const results = Person.find(query); 504 | expect(await results.count()).to.equal(3); 505 | const last = await results.last(); 506 | expect(last).to.have.ownProperty("age"); 507 | expect(last?.age).to.be.greaterThan(5); 508 | expect(last?.age).to.be.lessThan(56); 509 | expect(await Person.find().count()).to.equal(7); 510 | }); 511 | }); 512 | 513 | describe("read transaction", () => { 514 | it("should test for existing entity", async function () { 515 | const Person = setupCollection(); 516 | const person = copyPerson(); 517 | const [id] = await Person.save(person); 518 | await Person.readTransaction(async function () { 519 | expect(await Person.has(id)).to.be.true; 520 | }); 521 | }); 522 | 523 | it("should return existing entity", async function () { 524 | const Person = setupCollection(); 525 | const person = copyPerson(); 526 | const [id] = await Person.save(person); 527 | await Person.readTransaction(async function () { 528 | const found = await Person.findById(id); 529 | expect(found).to.deep.equal(person); 530 | // await Person.insert(person); // Compiler won't let us! 531 | }); 532 | }); 533 | 534 | it("should support nested transactions, but no writes inside a read transaction", async function () { 535 | const Person = setupCollection(); 536 | const person = copyPerson(); 537 | await Person.save(person); 538 | try { 539 | await Person.readTransaction(async function () { 540 | // Note that dexie actually console.logs the exception here, which is annoying :shrug: 541 | // But the test is still "passing"... 542 | await Person.writeTransaction(async function () { 543 | return Person.insert(person); 544 | }); 545 | throw shouldHaveThrown; 546 | }); 547 | } catch (err) { 548 | expect(err).to.not.equal(shouldHaveThrown); 549 | } 550 | }); 551 | }); 552 | 553 | describe("write transaction", () => { 554 | it("should perform normal write operations", async function () { 555 | const Person = setupCollection(); 556 | await Person.writeTransaction(async function () { 557 | const person = copyPerson(); 558 | const [id] = await Person.insert(person); 559 | expect(await Person.find().count()).to.equal(1); 560 | return Person.delete(id); 561 | }); 562 | expect(await Person.find().count()).to.equal(0); 563 | }); 564 | 565 | it("should allow read transactions inside write transactions", (done) => { 566 | const Person = setupCollection(); 567 | const person = copyPerson(); 568 | Person.save(person).then(([id]) => { 569 | Person.writeTransaction(async function () { 570 | const found = await Person.readTransaction(async function () { 571 | return Person.findById(id); 572 | }); 573 | expect(found).to.not.be.undefined; 574 | }).then(done); 575 | }); 576 | }).timeout(5000); 577 | }); 578 | }); 579 | }); 580 | -------------------------------------------------------------------------------- /src/local/collection.ts: -------------------------------------------------------------------------------- 1 | import { Query } from "../middleware/mongo"; 2 | import { Document, DocumentInstanceClassFactory, Instance } from "./document"; 3 | import { JSONSchema } from "../middleware/schemas"; 4 | import { PromiseExtended, Collection as Result, Table } from "dexie"; 5 | 6 | /** 7 | * Index defines an index. 8 | */ 9 | 10 | export interface Index { 11 | /** 12 | * Path to the field to index in dot syntax, e.g., "name.last" or "age". 13 | */ 14 | path: string; 15 | /** 16 | * Unique indicates that only one instance should exist per field value. 17 | */ 18 | unique?: boolean; 19 | } 20 | 21 | /** 22 | * CollectionConfig describes a new Collection. 23 | */ 24 | export interface CollectionConfig { 25 | /** 26 | * The name for the collection 27 | */ 28 | name: string; 29 | /** 30 | * The JSON Schema definition for instance validation 31 | */ 32 | schema?: JSONSchema; 33 | /** 34 | * A set of fields to use for indexing 35 | */ 36 | indexes?: Index[]; 37 | /** 38 | * A validator function for writes 39 | */ 40 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 41 | writeValidator?: (author: string, event: any, instance: any) => boolean; 42 | /** 43 | * A filter function for reads 44 | */ 45 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 46 | readFilter?: (reader: string, instance: any) => any; 47 | } 48 | 49 | /** 50 | * Collection is a group of instances sharing a schema. 51 | * Collections are like db tables. They can only exist in a db. 52 | */ 53 | export class Collection { 54 | constructor(private table: Table) { 55 | // When we update things, validate the input 56 | this.table.mapToClass(DocumentInstanceClassFactory(table)); 57 | } 58 | 59 | /** 60 | * A name for the collection. 61 | */ 62 | get name(): string { 63 | return this.table.name; 64 | } 65 | 66 | /** 67 | * Delete all instances in the collection. 68 | */ 69 | async clear(): Promise { 70 | const { result } = await this.table.drop(); 71 | return result.ok > 0; 72 | } 73 | 74 | /** 75 | * Lock the collection for readonly operations. 76 | * @param cb A callback that takes a readonly collection. 77 | * @param timeout How long to wait to obtain the read lock. If after timeout seconds the lock 78 | * is not obtained, the method will error. 79 | * @note Provides no serialize-able isolation guarantees. 80 | * @note In practice, this will return a readonly Collection, which disables write operations. 81 | */ 82 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 83 | readTransaction any>(cb: T): ReturnType { 84 | return this.table.db.transaction( 85 | "readonly", 86 | [this.table], 87 | cb 88 | ) as ReturnType; 89 | } 90 | 91 | /** 92 | * Lock the collection for exclusive write operations. 93 | * @param cb A callback that takes a collection. 94 | * @param timeout How long to wait to obtain the write lock. If after timeout seconds the lock 95 | * is not obtained, the method will error. 96 | * @note Provides no serialize-able isolation guarantees. 97 | */ 98 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 99 | writeTransaction any>(cb: T): ReturnType { 100 | return this.table.db.transaction( 101 | "readwrite", 102 | [this.table], 103 | cb 104 | ) as ReturnType; 105 | } 106 | 107 | /** 108 | * Find an instance by id. 109 | * @param id The instance id. 110 | */ 111 | findById(id: string): PromiseExtended<(Document & Instance) | undefined> { 112 | // TODO: Fix up these messy types 113 | return (this.table.get(id) as unknown) as PromiseExtended< 114 | (Document & Instance) | undefined 115 | >; 116 | } 117 | 118 | /** 119 | * Insert (multiple) new instance(s). 120 | * @note Insert is similar to save, except it will not allow saving/overwriting existing instances. 121 | * @note This is the same as `create` on the Go API. 122 | * @param instances A variadic array of instances. 123 | */ 124 | async insert(...instances: T[]): Promise { 125 | return this.table.bulkAdd(instances, { allKeys: true }); 126 | } 127 | 128 | /** 129 | * Create a new instance document that can be added or operated on. 130 | * This does not automatically commit the instance to the collection. 131 | * @param data The input data to use when initializing the document. 132 | */ 133 | create(data?: T): Document & Instance { 134 | // TODO: Create is actually used differently in the Go clients, should we rename this? 135 | const cls = this.table.schema.mappedClass; 136 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 137 | return new (cls as any)(data); 138 | } 139 | 140 | /** 141 | * Remove (multiple) instance(s) by id. 142 | * @note It doesn't fail if the ID doesn't exist. 143 | * @param ids A variadic array of instance ids. 144 | */ 145 | async delete(...ids: string[]): Promise { 146 | await this.table.bulkDelete(ids); 147 | return; 148 | } 149 | 150 | /** 151 | * Save updates to (multiple) instance(s). 152 | * @note Save is similar to insert, except it allows saving/overwriting existing instances. 153 | * @param instances A variadic array of instances. 154 | */ 155 | save(...instances: T[]): Promise { 156 | return this.table.bulkPut(instances, { allKeys: true }); 157 | } 158 | 159 | /** 160 | * Check that (all) instance(s) exists. 161 | * @param id A variadic array of instance ids. 162 | */ 163 | async has(...ids: string[]): Promise { 164 | const instances = await this.table.bulkGet(ids); 165 | return Boolean(instances.length) && instances[0] !== undefined; 166 | } 167 | 168 | /** 169 | * Find all instances matching the query. 170 | * @param query Mongodb-style filter query. 171 | * @param options Additional options to control query operation. 172 | */ 173 | find( 174 | query?: Query & Instance> 175 | ): Result & Instance, string> { 176 | // TODO: Fix up these messy types 177 | return this.table.find(query) as Result & Instance, string>; 178 | } 179 | 180 | /** 181 | * Find the first instance matching the query 182 | * @param query Mongodb-style filter query. 183 | * @param options Additional search options. 184 | * @note This is not available on the Go API. 185 | */ 186 | findOne( 187 | query?: Query & Instance> 188 | ): Promise<(Document & Instance) | undefined> { 189 | // TODO: Fix up these messy types 190 | // FIXME: We don't have tests for this method yet 191 | return this.table.findOne(query) as Promise< 192 | (Document & Instance) | undefined 193 | >; 194 | } 195 | 196 | /** 197 | * Count all instances matching the query. 198 | * @param query Mongodb-style filter query. 199 | * @param options Additional search options. 200 | * @note This is not available on the Go API. 201 | */ 202 | count(query?: Query>): Promise { 203 | return this.table.count(query); 204 | } 205 | } 206 | -------------------------------------------------------------------------------- /src/local/db.spec.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-non-null-assertion */ 2 | import { expect } from "chai"; 3 | import { Database } from "./db"; 4 | import { ulid } from "ulid"; 5 | import Dexie from "dexie"; 6 | import { personSchema, shouldHaveThrown } from "../utils/spec.utils"; 7 | 8 | const databaseName = "database"; 9 | 10 | describe("database", function () { 11 | describe("construction and init", async function () { 12 | let db: Database; 13 | 14 | afterEach(function () { 15 | // Expect db.close to close the db 16 | db.close(); 17 | }); 18 | 19 | after(async function () { 20 | // Expect db.delete to delete the db 21 | await db.delete(); 22 | }); 23 | 24 | it("should only allow version incrementing", async function () { 25 | // Create basic db with two collections, one being empty 26 | db = new Database(databaseName); 27 | await db.open(2); 28 | expect(db.verno).to.equal(2); 29 | db.close(); 30 | db = new Database( 31 | databaseName, 32 | { 33 | name: "Person", 34 | schema: personSchema, 35 | indexes: [ 36 | { path: "name", unique: true }, 37 | { path: "age", unique: false }, 38 | ], 39 | }, 40 | { 41 | name: "Empty", 42 | } 43 | ); 44 | try { 45 | await db.open(1); 46 | throw shouldHaveThrown; 47 | } catch (err) { 48 | // TODO: Return a nicer error here 49 | expect(err.toString()).to.include("VersionError"); 50 | } 51 | db.close(); 52 | // We delete it here because otherwise, we'll have issues with our version numbers later 53 | await db.delete(); 54 | }); 55 | 56 | it("should not have a db id until the remote has been set and initialized", function () { 57 | // Create basic db with two collections, one being empty 58 | db = new Database( 59 | databaseName, 60 | { 61 | name: "Person", 62 | schema: personSchema, 63 | indexes: [ 64 | { path: "name", unique: true }, 65 | { path: "age", unique: false }, 66 | ], 67 | }, 68 | { 69 | name: "Empty", 70 | } 71 | ); 72 | // Don't open it yet! 73 | expect(db.id).to.be.undefined; 74 | }); 75 | 76 | it("should be able to pre-define collections", async function () { 77 | // Create basic db with two collections, one being empty 78 | db = new Database( 79 | databaseName, 80 | { 81 | name: "Person", 82 | schema: personSchema, 83 | indexes: [ 84 | { path: "name", unique: true }, 85 | { path: "age", unique: false }, 86 | ], 87 | }, 88 | { 89 | name: "Empty", 90 | } 91 | ); 92 | await db.open(); 93 | expect(db.collections().size).to.equal(2); 94 | const collection = db.collection("Person"); 95 | expect(collection).to.not.be.undefined; 96 | // Low level check 97 | expect(db.dexie.table("Person").schema.indexes).to.have.lengthOf(2); 98 | expect(db.dexie.table("Person").schema.primKey).to.have.ownProperty( 99 | "name", 100 | "_id" 101 | ); 102 | }); 103 | 104 | it("should be able to define collection configs prior to opening the db", async function () { 105 | // Create basic db with two collections, one being empty 106 | db = new Database(databaseName); 107 | // Chain the collection config calls... 108 | db.collectionConfig({ 109 | name: "Person", 110 | schema: personSchema, 111 | indexes: [ 112 | { path: "name", unique: true }, 113 | { path: "age", unique: false }, 114 | ], 115 | }).collectionConfig({ 116 | name: "Empty", 117 | }); 118 | //Now we open it and check 119 | await db.open(); 120 | expect(db.collections().size).to.equal(2); 121 | const collection = db.collection("Person"); 122 | expect(collection).to.not.be.undefined; 123 | // Actually write something to cause data to be persisted 124 | await collection?.insert({ name: "toddler", age: 4 }); 125 | expect(await collection?.count()).to.equal(1); 126 | // Low level check 127 | expect(db.dexie.table("Person").schema.indexes).to.have.lengthOf(2); 128 | expect(db.dexie.table("Person").schema.primKey).to.have.ownProperty( 129 | "name", 130 | "_id" 131 | ); 132 | }); 133 | }); 134 | 135 | describe("methods", async function () { 136 | let db: Database; 137 | 138 | before(async function () { 139 | // Create basic db with two collections, one being empty 140 | db = new Database( 141 | databaseName, 142 | { 143 | name: "Person", 144 | schema: personSchema, 145 | indexes: [ 146 | { path: "name", unique: true }, 147 | { path: "age", unique: false }, 148 | ], 149 | }, 150 | { 151 | name: "Empty", 152 | } 153 | ); 154 | await db.open(); 155 | }); 156 | 157 | after(async function () { 158 | // Expect db.delete to delete the db 159 | db.close(); 160 | await db.delete(); 161 | }); 162 | 163 | it("should have monotonically increasing ulid ids", async function () { 164 | const collection = db.collection("Person"); 165 | const first = ulid(); 166 | // Now just wait a sec here... 167 | await new Promise((resolve) => setTimeout(resolve, 100)); 168 | const obj = collection?.create({ name: "baby", age: 2 }); 169 | // base32-encoded 26-character string representing 128 bytes 170 | expect(obj?._id).to.have.lengthOf(26); 171 | // Should work down the the millisecond... 172 | expect(first <= obj!._id).to.equal(true); 173 | }); 174 | 175 | it("should be able to list local collections", function () { 176 | const collections = db.collections(); 177 | expect(collections.size).to.equal(2); 178 | expect([...collections.keys()]).to.deep.equal(["Person", "Empty"]); 179 | }); 180 | 181 | it("should be able to list local collections even if we lose them", function () { 182 | (db as any).collectionMap.clear(); 183 | const collections = db.collections(); 184 | expect(collections.size).to.equal(2); 185 | expect([...collections.keys()]).to.deep.equal(["Person", "Empty"]); 186 | }); 187 | 188 | it("should be able to get a specific collection by name", function () { 189 | const collection = db.collection("Person"); 190 | expect(collection?.name).to.equal("Person"); 191 | // expect(collection?.schema).to.deep.equal(personSchema); 192 | // Should throw on missing collections 193 | try { 194 | // eslint-disable-next-line @typescript-eslint/no-unused-vars 195 | db.collection("Missing"); 196 | throw shouldHaveThrown; 197 | } catch (err) { 198 | expect(err).to.be.instanceOf(Dexie.InvalidTableError); 199 | } 200 | }); 201 | 202 | it("should be able to get specific collection, even by re-building cache", function () { 203 | (db as any).collectionMap.clear(); 204 | const collection = db.collection("Person"); 205 | expect(collection?.name).to.equal("Person"); 206 | }); 207 | }); 208 | }); 209 | -------------------------------------------------------------------------------- /src/local/db.ts: -------------------------------------------------------------------------------- 1 | import { Collection, CollectionConfig } from "./collection"; 2 | import { createIndexString, NewDexie } from "../utils"; 3 | import { Remote } from "../remote"; 4 | import { Dexie, Table } from "dexie"; 5 | 6 | export class Database { 7 | readonly dexie: Dexie; 8 | readonly remote: Remote; 9 | private collectionMap: Map = new Map(); 10 | private pendingSchemas: CollectionConfig[] = []; 11 | 12 | /** 13 | * Create a new local db instance. 14 | * @param name The name for db persistence. 15 | * @param collections A (variadic) list of collection configs. 16 | * @see {@link CollectionConfig } for details on collection configuration options. 17 | */ 18 | constructor(name: string, ...collections: CollectionConfig[]) { 19 | this.dexie = NewDexie(name); 20 | this.remote = new Remote(this.dexie); // Always start with defaults 21 | collections.forEach((collection) => this.pendingSchemas.push(collection)); 22 | } 23 | 24 | /** 25 | * Base32-encoded string representation of the db's thread id. 26 | */ 27 | get id(): string | undefined { 28 | return this.remote.id; 29 | } 30 | 31 | /** 32 | * Open the local db for reads/writes. 33 | */ 34 | async open(version = 1): Promise { 35 | if (!this.dexie.isOpen()) { 36 | // First, define our stores/indexes 37 | let stores = []; 38 | if (this.pendingSchemas.length) { 39 | const specs = this.pendingSchemas.map((config) => { 40 | const indexes = [ 41 | { path: "_id", uuid: false, auto: true }, // Always include _id as uuid 42 | ...(config.indexes ?? []), 43 | ] 44 | .map((index) => createIndexString(index)) 45 | .join(","); 46 | return [config.name, indexes]; 47 | }); 48 | stores = Object.fromEntries(specs); 49 | } 50 | // TODO: Can we "skip" the version thing? 51 | this.dexie.version(version).stores(stores); 52 | // Try to open the dexie store, if we don't have the right version here, consider version++ 53 | await this.dexie.open(); 54 | 55 | // Now we have our table specs... time to populate our collections 56 | for (const collection of this.pendingSchemas) { 57 | // Should always be the case 58 | if (!this.collectionMap.has(collection.name)) { 59 | // If we didn't create this table yet, this will throw 60 | const table = this.dexie.table(collection.name); 61 | // Set the internal schema for this table, which will be used elsewhere 62 | await table.setSchema(collection.schema); 63 | // Add it to our collections map for faster reference 64 | this.collectionMap.set(collection.name, new Collection(table)); 65 | } 66 | } 67 | this.pendingSchemas = []; 68 | } 69 | return this; 70 | } 71 | 72 | /** 73 | * Close the local db to reads/writes. 74 | */ 75 | close(): void { 76 | return this.dexie.close(); 77 | } 78 | 79 | /** 80 | * Delete the local db and its persistent storage. 81 | */ 82 | delete(): Promise { 83 | return this.dexie.delete(); 84 | } 85 | 86 | /** 87 | * Get the current local db version number. 88 | * This is a non-ordered integer hash of the stringified input indexes. 89 | * It is used for uniqueness. 90 | */ 91 | get verno(): number { 92 | return this.dexie.verno; 93 | } 94 | 95 | /** 96 | * Helper method to push additional collection configs to pending schemas list. 97 | * This may be called multiple times, but _must_ be called _before_ opening the db. 98 | * @param config A collection config to add to the internal list. 99 | */ 100 | collectionConfig(config: CollectionConfig): this { 101 | this.pendingSchemas.push(config); 102 | return this; 103 | } 104 | 105 | /** 106 | * Get an existing local collection. 107 | * @param name The name of the collection. 108 | */ 109 | collection(name: string): Collection | undefined { 110 | let collection = this.collectionMap.get(name); 111 | if (collection !== undefined) { 112 | return collection as Collection; 113 | } 114 | const table: Dexie.Table = this.dexie.table(name); 115 | collection = new Collection(table); 116 | this.collectionMap.set(name, collection); 117 | return collection as Collection; 118 | } 119 | 120 | /** 121 | * Returns all local collections by name. 122 | */ 123 | collections(): Map { 124 | const tables: Table[] = this.dexie.tables.filter( 125 | (table) => !table.name.startsWith("_") 126 | ); 127 | for (const table of tables) { 128 | if (!this.collectionMap.has(table.name)) { 129 | this.collectionMap.set(table.name, new Collection(table)); 130 | } 131 | } 132 | return this.collectionMap; 133 | } 134 | } 135 | -------------------------------------------------------------------------------- /src/local/document.spec.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-non-null-assertion */ 2 | import { expect } from "chai"; 3 | import { NewDexie } from "../utils"; 4 | import { DocumentInstanceClassFactory } from "./document"; 5 | 6 | const databaseName = "document"; 7 | 8 | describe("document", function () { 9 | const dexie = NewDexie(databaseName); 10 | 11 | after(async function () { 12 | dexie.close(); 13 | await dexie.delete(); 14 | }); 15 | 16 | describe("basic", async function () { 17 | before(async function () { 18 | // Super low-level access 19 | dexie.version(1).stores({ things: "++_id,thing" }); 20 | }); 21 | 22 | it("should create a (constructable) document class", async function () { 23 | const Cls = DocumentInstanceClassFactory(dexie.table("things")); 24 | try { 25 | new Cls(); 26 | } catch (err) { 27 | throw new Error("should be constructable"); 28 | } 29 | }); 30 | 31 | it("should create a valid class instance with core methods and expected properties", async function () { 32 | const Cls = DocumentInstanceClassFactory(dexie.table("things")); 33 | const instance = new Cls<{ name: string; age: number }>({ 34 | name: "Lucas", 35 | age: 99, 36 | }); 37 | expect(instance.age).to.equal(99); 38 | expect(instance.name).to.equal("Lucas"); 39 | expect(instance._id).to.not.be.undefined; 40 | expect(instance.exists).to.not.be.undefined; 41 | }); 42 | }); 43 | }); 44 | -------------------------------------------------------------------------------- /src/local/document.ts: -------------------------------------------------------------------------------- 1 | import { Table } from "dexie"; 2 | import { JSONType } from "../middleware/schemas"; 3 | import { ulid } from "ulid"; 4 | 5 | /** 6 | * Document is any JSON object with an _id field. 7 | * It can be operated on directly, and its updates should be reflected in its saved state. 8 | */ 9 | export type Document = T & { 10 | _id: string; 11 | }; 12 | 13 | /** 14 | * Instance is a document with methods on it. 15 | */ 16 | export interface Instance { 17 | save(): Promise; 18 | remove(): Promise; 19 | exists(): Promise; 20 | toJSON(): JSONType; 21 | } 22 | 23 | /** 24 | * DocumentInstanceConstructor is an object that can be used to create new DocumentInstances. 25 | */ 26 | export interface DocumentInstanceConstructor { 27 | new (data?: Partial): Document & Instance; 28 | } 29 | 30 | /** 31 | * Create new DocumentInstances within a given collection/table. 32 | * @param table Input dexie-compatible table. 33 | */ 34 | export function DocumentInstanceClassFactory( 35 | table: Table 36 | ): DocumentInstanceConstructor { 37 | /** 38 | * DocumentInstance is a document and a reference to its underlying collection. 39 | */ 40 | const cls = class DocumentInstance { 41 | _id!: string; 42 | 43 | constructor(data: Partial = {}) { 44 | // Spread on data should override existing _id if provided 45 | return Object.assign(this, { _id: ulid(), ...data }); 46 | } 47 | 48 | /** 49 | * Save this instance to its parent collection. 50 | */ 51 | save(): Promise { 52 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 53 | return table.put({ ...this } as any); 54 | } 55 | 56 | /** 57 | * Remove this instance (by id) from its parent collection. 58 | */ 59 | remove(): Promise { 60 | return table.delete(this._id); 61 | } 62 | 63 | /** 64 | * Check if this instance (by id) exists in its parent collection. 65 | */ 66 | async exists(): Promise { 67 | return (await table.get(this._id)) !== undefined; 68 | } 69 | 70 | /** 71 | * Get a JSON representation of this instance. 72 | */ 73 | toJSON(): JSONType { 74 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 75 | return { ...(this as any) }; 76 | } 77 | }; 78 | return cls as DocumentInstanceConstructor; 79 | } 80 | -------------------------------------------------------------------------------- /src/middleware/changes/changes.spec.ts: -------------------------------------------------------------------------------- 1 | import Dexie from "dexie"; 2 | import setGlobalVars from "indexeddbshim"; 3 | const { indexedDB, IDBKeyRange } = setGlobalVars({}, { checkOrigin: false }); 4 | import { changesAddon, ChangeTableName } from "."; 5 | import { expect } from "chai"; 6 | 7 | const databaseName = "changes"; 8 | 9 | describe("changes middleware", function () { 10 | let db: Dexie; 11 | before(async function () { 12 | db = new Dexie(databaseName, { 13 | indexedDB, 14 | IDBKeyRange, 15 | addons: [...Dexie.addons, changesAddon], 16 | }); 17 | 18 | db.version(1).stores({ 19 | friends: "++id,name,shoeSize,address.city", 20 | other: "++id", 21 | }); 22 | 23 | expect(db.table(ChangeTableName)).to.not.be.undefined; 24 | 25 | await db.open(); 26 | }); 27 | 28 | after(async function () { 29 | db.close(); 30 | await db.delete(); 31 | }); 32 | 33 | it("should work", async function () { 34 | const friends = db.table("friends"); 35 | // Change 1 36 | await friends.put({ 37 | id: "test", 38 | name: "dev", 39 | shoeSize: 1, 40 | address: { 41 | city: "victoria", 42 | }, 43 | }); 44 | 45 | expect(await friends.count()).to.equal(1); 46 | 47 | await db.transaction("readwrite", ["friends"], async (tx) => { 48 | // Mask out reference to friends above 49 | const friends = tx.table("friends"); 50 | const friend = await friends.get({ name: "dev" }); 51 | ++friend.shoeSize; 52 | // Change 2 53 | await friends.put(friend); 54 | await db.transaction("readwrite", friends, async (tx) => { 55 | // Change 3 & 4 56 | // id is the id of the last add ("blah") 57 | const id = await friends.bulkAdd([ 58 | { 59 | id: "steve", 60 | name: "steve", 61 | shoeSize: 99, 62 | address: { 63 | city: "nothing", 64 | }, 65 | }, 66 | { 67 | id: "blah", 68 | name: "guy", 69 | shoeSize: 88, 70 | address: { 71 | city: "unknown", 72 | }, 73 | }, 74 | ]); 75 | expect(await friends.count()).to.equal(3); 76 | const friend = await friends.get(id); 77 | friend.name = "other"; 78 | // Change 5 79 | await friends.put(friend); 80 | // Still 3 because we're just updating 81 | expect(await friends.count()).to.equal(3); 82 | // Change 6 83 | await friends.delete(id); 84 | }); 85 | }); 86 | 87 | // Should be back down to 2 again 88 | expect(await friends.count()).to.equal(2); 89 | // Low level access to changes, which was automatically added to the 90 | // above transactions behind the scenes 91 | const changes = db.table(ChangeTableName); 92 | const array = await changes.find().toArray(); 93 | expect(array).to.have.lengthOf(6); 94 | }); 95 | }); 96 | -------------------------------------------------------------------------------- /src/middleware/changes/index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Dexie add-on that provides DB-level change tracking. 3 | * https://gist.github.com/medmunds/17c331c694ae00ce072ff642619f473b 4 | */ 5 | import jsonpatch, { Operation } from "fast-json-patch"; 6 | import Dexie, { 7 | DBCoreAddRequest, 8 | DBCoreDeleteRequest, 9 | DBCoreIndex, 10 | DBCoreMutateRequest, 11 | DBCoreMutateResponse, 12 | DBCorePutRequest, 13 | DBCoreTable, 14 | Middleware, 15 | DBCore, 16 | } from "dexie"; 17 | import { 18 | initOverrideCreateTransaction, 19 | initOverrideParseStoreSpec, 20 | } from "../overrides"; 21 | 22 | export const ChangeTableName = "_changes"; 23 | export const StashTableName = "_stash"; 24 | export const MetaTableName = "_meta"; 25 | 26 | export interface Change { 27 | name: string; 28 | key: string; 29 | type: "put" | "delete" | "add"; 30 | ops: Operation[]; 31 | before: T | undefined; 32 | after: T | undefined; 33 | } 34 | 35 | const isEmpty = (obj: any) => 36 | Object.keys(obj).length === 0 && obj.constructor === Object; 37 | 38 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 39 | const zip = (arr1: T[], arr2: R[], fill: any = undefined) => { 40 | // Missing values should be undefined 41 | return arr1.map((k, i) => [k, arr2[i] ?? fill]); 42 | }; 43 | 44 | export function createTrackedChangesMiddleware(core: DBCore): DBCore { 45 | return { 46 | ...core, 47 | // transaction(req: DBCoreTransactionRequest): DBCoreTransaction { 48 | // return dbCore.transaction(req); 49 | // }, 50 | table(name: string): DBCoreTable { 51 | const table = core.table(name); 52 | // Utility tables start with _ 53 | // TODO: Is this too simplistic a rule? 54 | if (name.startsWith("_")) return table; 55 | return { 56 | ...table, 57 | async mutate(req: DBCoreMutateRequest): Promise { 58 | // Shortcut for utility tables 59 | if (name.startsWith("_")) return table.mutate(req); 60 | if (req.type === "deleteRange") { 61 | // The only type we don't handle right now is "deleteRange". 62 | // If we needed "deleteRange" tracking, we could probably enumerate 63 | // the existing keys in the range, and then continue as below? 64 | // For now, we just don't track this change. So we'll never send this information 65 | // to the remote... it is always considered a "local" operation. 66 | return table.mutate(req); 67 | // throw new Error(`Cannot handle ${req.type} operation`); 68 | } 69 | // Extract primary key to check for auto-incrementing 70 | const { primaryKey } = table.schema; 71 | // Things change slightly if we're auto-incrementing 72 | // TODO: We might not need to worry about this given our use of ulid 73 | const autoIncrement = Boolean(primaryKey.autoIncrement); 74 | // TODO: Does this need to run in a special promise (like hooks middleware uses)? 75 | let keys = req.keys || getEffectiveKeys(primaryKey, req); 76 | // Extract the state of things _before_ this mutation 77 | let before = await getExistingValues(table, req, keys); 78 | // If we're auto-incrementing and adding things, we want results 79 | if (autoIncrement && (req.type === "add" || req.type === "put")) { 80 | req = { ...req, wantResults: true }; 81 | } 82 | // Do the default mutation 83 | const response = await table.mutate(req); 84 | // Check that we do indeed have our results 85 | if (autoIncrement && (req.type === "add" || req.type === "put")) { 86 | if (response.results === undefined) { 87 | throw new Error( 88 | `autoIncrement keys not returned from ${req.type} mutation` 89 | ); 90 | } 91 | // Keys can be derived from results if we're auto-incrementing 92 | keys = response.results; 93 | } 94 | // TODO: This should be optimized (just use undefined[] for deletes, 95 | // use req.values for add/put if not auto-incrementing, etc.) 96 | // NOTE: The auto-increment thing might not be an issue, in which 97 | // case, we could just compute these after the fact using 98 | // `jsonpatch.applyPatch` 99 | let after = await table.getMany({ trans: req.trans, keys }); 100 | // Compute json-patch ops... 101 | // We pair up before with our requested values, this is either 102 | // nothing (delete) or the new state 103 | let ops = zip( 104 | before, 105 | req.type === "delete" ? [] : req.values, 106 | {} // Fill with {} if missing 107 | ).map(([prev, next]) => jsonpatch.compare(prev, next)); 108 | // Check for any failures 109 | const { failures, numFailures } = response; 110 | // If we have some, we need to filter out the results 111 | // TODO: Clean all this up to avoid the multiple loops etc 112 | if (numFailures > 0) { 113 | // Filter out failed items 114 | before = before.filter((_obj, i) => !failures[i]); 115 | keys = keys.filter((_key, i) => !failures[i]); 116 | ops = ops.filter((_obj, i) => !failures[i]); 117 | after = after.filter((_obj, i) => !failures[i]); 118 | } 119 | // If we still have some changes, we'll want to compute the diffs 120 | if (keys.length > 0) { 121 | // Create the changes values, which will always be a list of 1 122 | const values: Change[] = []; 123 | for (let i = 0; i < keys.length; i++) { 124 | const b = before[0]; 125 | // If "putting" here, but is new object, should actually "add" it, otherwise, leave 126 | const type = req.type === "put" && isEmpty(b) ? "add" : req.type; 127 | values.push({ 128 | name, 129 | type, 130 | key: keys[i], 131 | before: b, 132 | ops: ops[i], 133 | after: after[i], 134 | }); 135 | } 136 | // Grab a reference to our change table for updating 137 | const changes = core.table(ChangeTableName); 138 | // Create the update object, which will always be an "add" op 139 | const update: DBCoreAddRequest = { 140 | type: "add", 141 | trans: req.trans, 142 | values, 143 | }; 144 | // Mutate the changes table (within the same transaction) 145 | await changes.mutate(update); 146 | } 147 | // This contains our modified response (mostly un-touched) 148 | return response; 149 | }, 150 | }; 151 | }, 152 | }; 153 | } 154 | 155 | // These DBCore helpers aren't exported by Dexie; borrowed directly from: 156 | // https://github.com/dfahlander/Dexie.js/blob/v3.0.1/src/dbcore/get-effective-keys.ts 157 | 158 | function getEffectiveKeys( 159 | primaryKey: DBCoreIndex, 160 | req: 161 | | (Pick & { 162 | keys?: any[]; 163 | }) 164 | | Pick 165 | ) { 166 | if (req.type === "delete") return req.keys; 167 | return req.keys || req.values.map(primaryKey.extractKey); 168 | } 169 | 170 | function getExistingValues( 171 | table: DBCoreTable, 172 | req: DBCoreAddRequest | DBCorePutRequest | DBCoreDeleteRequest, 173 | effectiveKeys: unknown[] 174 | ) { 175 | return req.type === "add" 176 | ? Promise.resolve(new Array(req.values.length).fill({})) 177 | : table 178 | .getMany({ trans: req.trans, keys: effectiveKeys }) 179 | .then((values) => values.map((obj) => (obj === undefined ? {} : obj))); 180 | } 181 | 182 | /** 183 | * ChangesMiddleware object 184 | */ 185 | export const changesMiddleware: Middleware = { 186 | stack: "dbcore", 187 | name: "ChangesMiddleware", 188 | create: createTrackedChangesMiddleware, 189 | }; 190 | 191 | /** 192 | * ChangesAddon function 193 | */ 194 | export function changesAddon(db: Dexie): void { 195 | // Override creating a new transaction. This adds the changes table 196 | // to all transactions 197 | const overrideCreateTransaction = initOverrideCreateTransaction( 198 | db, 199 | ChangeTableName 200 | ); 201 | db._createTransaction = Dexie.override( 202 | db._createTransaction, 203 | overrideCreateTransaction 204 | ); 205 | 206 | // Override parsing the stores to add changes, stash, and meta data tables. 207 | const overrideParseStoresSpec = initOverrideParseStoreSpec({ 208 | [ChangeTableName]: "++id,name", 209 | [StashTableName]: "++id,name", 210 | [MetaTableName]: "&key", 211 | }); 212 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 213 | (db.Version.prototype as any)._parseStoresSpec = Dexie.override( 214 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 215 | (db.Version.prototype as any)._parseStoresSpec, 216 | overrideParseStoresSpec 217 | ); 218 | // Enable the middleware so we don't have to do it manually 219 | db.use(changesMiddleware); 220 | } 221 | -------------------------------------------------------------------------------- /src/middleware/mongo/index.ts: -------------------------------------------------------------------------------- 1 | // This is a pure side-effects addon :( 2 | import "dexie-mongoify"; 3 | import { FilterQuery as Query } from "./query"; 4 | 5 | // Export here for callers 6 | export { Query }; 7 | 8 | // Module augmentation to add methods to Dexie's default instance 9 | declare module "dexie" { 10 | interface DeleteResult { 11 | result: { 12 | ok: number; 13 | n: number; 14 | }; 15 | deletedCount: number; 16 | } 17 | 18 | interface InsertResult { 19 | insertedCount: number; 20 | insertedId: string; 21 | ops: T[]; 22 | result: { 23 | ok: number; 24 | n: number; 25 | }; 26 | } 27 | 28 | interface UpdateResult { 29 | result: { 30 | ok: 1; 31 | nModified: number; 32 | }; 33 | modifiedCount: number; 34 | upsertedCount: number; 35 | upsertedId: string | null; 36 | } 37 | 38 | interface Table { 39 | count(query?: Query): PromiseExtended; 40 | find(query?: Query): Collection; 41 | findOne(query?: Query): PromiseExtended; 42 | insert(item: T): PromiseExtended | never>; 43 | remove(query?: Query): PromiseExtended; 44 | drop(): PromiseExtended; 45 | update( 46 | query: Query, 47 | update: T, 48 | options?: { 49 | upsert?: boolean; 50 | $set?: any; 51 | $addToSet?: any; 52 | $push?: any; 53 | } 54 | ): PromiseExtended; 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /src/middleware/mongo/mongo.spec.ts: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/textileio/js-threaddb/59cf70f593f9d5266cd4aca1e4655952aef78569/src/middleware/mongo/mongo.spec.ts -------------------------------------------------------------------------------- /src/middleware/mongo/query.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-explicit-any */ 2 | // From https://github.com/DefinitelyTyped/DefinitelyTyped/blob/master/types/mongodb/index.d.ts 3 | 4 | /** Update Query */ 5 | type KeysOfAType = { 6 | [key in keyof TSchema]: NonNullable extends Type ? key : never; 7 | }[keyof TSchema]; 8 | type KeysOfOtherType = { 9 | [key in keyof TSchema]: NonNullable extends Type ? never : key; 10 | }[keyof TSchema]; 11 | 12 | type AcceptedFields = { 13 | readonly [key in KeysOfAType]?: AssignableType; 14 | }; 15 | 16 | /** It avoid uses fields of non Type */ 17 | type NotAcceptedFields = { 18 | readonly [key in KeysOfOtherType]?: never; 19 | }; 20 | 21 | type DotAndArrayNotation = { 22 | readonly [key: string]: AssignableType; 23 | }; 24 | 25 | type ReadonlyPartial = { 26 | readonly [key in keyof TSchema]?: TSchema[key]; 27 | }; 28 | 29 | export type OnlyFieldsOfType< 30 | TSchema, 31 | FieldType = any, 32 | AssignableType = FieldType 33 | > = AcceptedFields & 34 | NotAcceptedFields & 35 | DotAndArrayNotation; 36 | 37 | export type MatchKeysAndValues = ReadonlyPartial & 38 | DotAndArrayNotation; 39 | 40 | type Unpacked = Type extends Array ? Element : Type; 41 | 42 | // type UpdateOptionalId = T extends { _id?: any } ? OptionalId : T; 43 | type UpdateOptionalId = T extends { _id?: any } ? T : T; 44 | 45 | export type SortValues = -1 | 1; 46 | 47 | export type AddToSetOperators = { 48 | $each: Type; 49 | }; 50 | 51 | export type ArrayOperator = { 52 | // $each: Type; 53 | $slice?: number; 54 | // $position?: number; 55 | $sort?: SortValues | Record; 56 | }; 57 | 58 | export type SetFields = ({ 59 | readonly [key in KeysOfAType]?: 60 | | UpdateOptionalId> 61 | | AddToSetOperators>>>; 62 | } & 63 | NotAcceptedFields) & { 64 | readonly [key: string]: AddToSetOperators | any; 65 | }; 66 | 67 | export type PushOperator = ({ 68 | readonly [key in KeysOfAType]?: 69 | | UpdateOptionalId> 70 | | ArrayOperator>>>; 71 | } & 72 | NotAcceptedFields) & { 73 | readonly [key: string]: ArrayOperator | any; 74 | }; 75 | 76 | export type PullOperator = ({ 77 | readonly [key in KeysOfAType]?: 78 | | Partial> 79 | | ObjectQuerySelector>; 80 | } & 81 | NotAcceptedFields) & { 82 | readonly [key: string]: QuerySelector | any; 83 | }; 84 | 85 | export type PullAllOperator = ({ 86 | readonly [key in KeysOfAType]?: TSchema[key]; 87 | } & 88 | NotAcceptedFields) & { 89 | readonly [key: string]: any[]; 90 | }; 91 | 92 | /** https://docs.mongodb.com/manual/reference/operator/update */ 93 | export type UpdateQuery = { 94 | /** https://docs.mongodb.com/manual/reference/operator/update-field/ */ 95 | // $currentDate?: OnlyFieldsOfType< 96 | // TSchema, 97 | // Date, 98 | // true | { $type: "date" | "timestamp" } 99 | // >; 100 | $inc?: OnlyFieldsOfType; 101 | $mul?: OnlyFieldsOfType; 102 | $rename?: { [key: string]: string }; 103 | $set?: MatchKeysAndValues; 104 | $unset?: OnlyFieldsOfType; 105 | $min?: MatchKeysAndValues; 106 | $max?: MatchKeysAndValues; 107 | /** https://docs.mongodb.com/manual/reference/operator/update-array/ */ 108 | $addToSet?: SetFields; 109 | $pop?: OnlyFieldsOfType; 110 | $push?: PushOperator; 111 | $pull?: PullOperator; 112 | $pullAll?: PullAllOperator; 113 | }; 114 | 115 | // we can search using alternative types in mongodb e.g. 116 | // string types can be searched using a regex in mongo 117 | // array types can be searched using their element type 118 | type RegExpForString = T extends string ? RegExp | T : T; 119 | type MongoAltQuery = T extends Array 120 | ? T | RegExpForString 121 | : RegExpForString; 122 | 123 | /** https://docs.mongodb.com/manual/reference/operator/query/#query-selectors */ 124 | export type QuerySelector = { 125 | // Comparison 126 | $eq?: T; 127 | $gt?: T; 128 | $gte?: T; 129 | $in?: T[]; 130 | $lt?: T; 131 | $lte?: T; 132 | $ne?: T; 133 | $nin?: T[]; 134 | // Logical 135 | $not?: T extends string ? QuerySelector | RegExp : QuerySelector; 136 | // Element 137 | /** 138 | * When `true`, `$exists` matches the documents that contain the field, 139 | * including documents where the field value is null. 140 | */ 141 | $exists?: boolean; 142 | // $type?: BSONType | BSONTypeAlias; 143 | // Evaluation 144 | // $expr?: any; 145 | // $jsonSchema?: any; 146 | // $mod?: T extends number ? [number, number] : never; 147 | // $regex?: T extends string ? RegExp | string : never; 148 | // $options?: T extends string ? string : never; 149 | // // Array 150 | // // TODO: define better types for $all and $elemMatch 151 | $all?: T extends Array ? any[] : never; 152 | $elemMatch?: T extends Array ? Record : never; 153 | $size?: T extends Array ? number : never; 154 | }; 155 | 156 | export type RootQuerySelector = { 157 | /** https://docs.mongodb.com/manual/reference/operator/query/and/#op._S_and */ 158 | $and?: Array>; 159 | /** https://docs.mongodb.com/manual/reference/operator/query/nor/#op._S_nor */ 160 | $nor?: Array>; 161 | /** https://docs.mongodb.com/manual/reference/operator/query/or/#op._S_or */ 162 | $or?: Array>; 163 | /** https://docs.mongodb.com/manual/reference/operator/query/text */ 164 | // $text?: { 165 | // $search: string; 166 | // $language?: string; 167 | // $caseSensitive?: boolean; 168 | // $diacraticSensitive?: boolean; 169 | // }; 170 | // /** https://docs.mongodb.com/manual/reference/operator/query/where/#op._S_where */ 171 | // $where?: string | Function; 172 | // /** https://docs.mongodb.com/manual/reference/operator/query/comment/#op._S_comment */ 173 | // $comment?: string; 174 | // we could not find a proper TypeScript generic to support nested queries e.g. 'user.friends.name' 175 | // this will mark all unrecognized properties as any (including nested queries) 176 | // [key: string]: any; 177 | }; 178 | 179 | export type ObjectQuerySelector = T extends Record 180 | ? { [key in keyof T]?: QuerySelector } 181 | : QuerySelector; 182 | 183 | export type Condition = MongoAltQuery | QuerySelector>; 184 | 185 | export type FilterQuery = { 186 | [P in keyof T]?: Condition; 187 | } & 188 | RootQuerySelector; 189 | -------------------------------------------------------------------------------- /src/middleware/overrides.ts: -------------------------------------------------------------------------------- 1 | import type { Dexie, DbSchema, Transaction, Version } from "dexie"; 2 | 3 | export function initOverrideCreateTransaction(db: Dexie, ...names: string[]) { 4 | return function overrideCreateTransaction( 5 | origFunc: typeof db._createTransaction 6 | ) { 7 | return function ( 8 | this: Dexie, 9 | mode: IDBTransactionMode, 10 | storenames: Array, 11 | dbschema: DbSchema, 12 | parent?: Transaction 13 | ): ReturnType { 14 | // Don't observe dynamically opened databases. 15 | if (db.dynamicallyOpened()) 16 | // eslint-disable-next-line prefer-rest-params,@typescript-eslint/no-explicit-any 17 | return origFunc.apply(this, (arguments as unknown) as any); 18 | if ( 19 | mode === "readwrite" && 20 | // Lame, but it should be fine for our purposes 21 | storenames.some((name) => !name.startsWith("_")) 22 | ) { 23 | // Make sure to also include the changes store. 24 | storenames = storenames.slice(0); // Clone 25 | // Otherwise, firefox will hang... 26 | for (const name of names) { 27 | if (storenames.indexOf("name") === -1) storenames.push(name); 28 | } 29 | } 30 | // Call original db._createTransaction() 31 | const trans = origFunc.call(this, mode, storenames, dbschema, parent); 32 | return trans; 33 | }; 34 | }; 35 | } 36 | 37 | export function initOverrideParseStoreSpec(extras: { 38 | [tableName: string]: string | null; 39 | }) { 40 | return function overrideParseStoresSpec( 41 | origFunc: ( 42 | stores: { [tableName: string]: string | null }, 43 | outSchema: DbSchema 44 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 45 | ) => any 46 | ) { 47 | return function ( 48 | this: Version, 49 | stores: { [tableName: string]: string | null }, 50 | outSchema: DbSchema 51 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 52 | ): any { 53 | stores = { ...stores, ...extras }; 54 | // Call default implementation. Will populate the dbSchema structures. 55 | origFunc.call(this, stores, outSchema); 56 | }; 57 | }; 58 | } 59 | -------------------------------------------------------------------------------- /src/middleware/schemas/index.ts: -------------------------------------------------------------------------------- 1 | import Ajv, { ValidationError } from "ajv"; 2 | import Dexie, { 3 | DBCore, 4 | DBCoreMutateRequest, 5 | DBCoreTable, 6 | Middleware, 7 | } from "dexie"; 8 | import { JSONSchema4, JSONSchema4Type } from "json-schema"; 9 | import { 10 | initOverrideCreateTransaction, 11 | initOverrideParseStoreSpec, 12 | } from "../overrides"; 13 | 14 | export const SchemasTableName = "_schemas"; 15 | 16 | export type JSONSchema = JSONSchema4; // | JSONSchema6 | JSONSchema7; 17 | export type JSONType = JSONSchema4Type; // | JSONSchema6Type | JSONSchema7Type; 18 | 19 | declare module "dexie" { 20 | export interface Table { 21 | setSchema(schema?: JSONSchema): Promise; 22 | getSchema(): Promise; 23 | } 24 | } 25 | 26 | // TODO: Make this configurable 27 | const defaultSchema: JSONSchema = { 28 | properties: { 29 | _id: { 30 | type: "string", 31 | }, 32 | }, 33 | }; 34 | 35 | export function createSchemaMiddleware(core: DBCore): DBCore { 36 | return { 37 | ...core, 38 | table(tableName: string): DBCoreTable { 39 | const table = core.table(tableName); 40 | // Utility tables start with _ 41 | // TODO: Is this too simplistic a rule? 42 | if (tableName.startsWith("_")) return table; 43 | return { 44 | ...table, 45 | async mutate(req: DBCoreMutateRequest) { 46 | const pair = await core 47 | .table(SchemasTableName) 48 | .get({ key: tableName, trans: req.trans }); 49 | const schema: JSONSchema = pair?.schema ?? defaultSchema; 50 | const validator = new Ajv({ useDefaults: true }).compile(schema); 51 | // We only need to worry about validation when mutating data 52 | try { 53 | switch (req.type) { 54 | case "add": 55 | case "put": 56 | // Clone values to avoid mutating input request values 57 | const values = Dexie.deepClone(req.values); 58 | values.forEach((value) => { 59 | if (validator(value) === false && validator.errors) { 60 | throw new ValidationError(validator.errors); 61 | } 62 | }); 63 | // Clone request and replace with updated values 64 | req = { ...req, values }; 65 | } 66 | } catch (err) { 67 | throw err; 68 | } 69 | return table.mutate(req); 70 | }, 71 | }; 72 | }, 73 | }; 74 | } 75 | 76 | export const schemaMiddleware: Middleware = { 77 | stack: "dbcore", 78 | name: "SchemaMiddleware", 79 | create: createSchemaMiddleware, 80 | }; 81 | 82 | /** 83 | * SchemaAddon function 84 | */ 85 | export function schemaAddon(db: Dexie): void { 86 | // Enable the middleware so we don't have to do it manually 87 | db.use(schemaMiddleware); 88 | // Override creating a new transaction. This adds the schemas table 89 | // to all transactions 90 | const overrideCreateTransaction = initOverrideCreateTransaction( 91 | db, 92 | SchemasTableName 93 | ); 94 | db._createTransaction = Dexie.override( 95 | db._createTransaction, 96 | overrideCreateTransaction 97 | ); 98 | // Override parsing the stores to add schemas table. 99 | const overrideParseStoresSpec = initOverrideParseStoreSpec({ 100 | [SchemasTableName]: "&name", 101 | }); 102 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 103 | (db.Version.prototype as any)._parseStoresSpec = Dexie.override( 104 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 105 | (db.Version.prototype as any)._parseStoresSpec, 106 | overrideParseStoresSpec 107 | ); 108 | // setSchema will default to setting a wide open schema 109 | db.Table.prototype.setSchema = async function ( 110 | schema: JSONSchema = defaultSchema 111 | ) { 112 | await this.db.table(SchemasTableName).put({ name: this.name, schema }); 113 | }; 114 | // getSchema will always return a default schema, even if one doesn't exist 115 | db.Table.prototype.getSchema = async function () { 116 | const pair = await this.db.table(SchemasTableName).get({ name: this.name }); 117 | const obj = pair?.schema; 118 | if ( 119 | obj === undefined || 120 | (Object.keys(obj).length === 0 && obj.constructor === Object) 121 | ) { 122 | // Empty or undefined object 123 | return defaultSchema; 124 | } 125 | return obj; 126 | }; 127 | } 128 | -------------------------------------------------------------------------------- /src/middleware/schemas/schemas.spec.ts: -------------------------------------------------------------------------------- 1 | import Dexie from "dexie"; 2 | import setGlobalVars from "indexeddbshim"; 3 | const { indexedDB, IDBKeyRange } = setGlobalVars({}, { checkOrigin: false }); 4 | import { schemaAddon, SchemasTableName } from "."; 5 | import { personSchema, shouldHaveThrown } from "../../utils/spec.utils"; 6 | import { expect } from "chai"; 7 | 8 | const databaseName = "schema"; 9 | 10 | describe("schema middleware", function () { 11 | let db: Dexie; 12 | before(async function () { 13 | db = new Dexie(databaseName, { 14 | indexedDB, 15 | IDBKeyRange, 16 | addons: [...Dexie.addons, schemaAddon], 17 | }); 18 | 19 | db.version(1).stores({ 20 | person: "++id,name,age", 21 | }); 22 | 23 | expect(db.table(SchemasTableName)).to.not.be.undefined; 24 | 25 | await db.open(); 26 | }); 27 | 28 | after(async function () { 29 | db.close(); 30 | await db.delete(); 31 | }); 32 | 33 | it("should work", async function () { 34 | this.timeout(100000); 35 | const person = db.table("person"); 36 | // No schema yet! 37 | // Count = 1 38 | await person.put({ 39 | _id: "test", 40 | name: "dev", 41 | // age: 2, // Don't include age... 42 | extra: "would throw", // But doesn't because we haven't set schema yet 43 | }); 44 | expect(await person.count()).to.equal(1); 45 | // Now we set the schema 46 | await person.setSchema(personSchema); 47 | expect(await db.table(SchemasTableName).count()).to.equal(1); 48 | try { 49 | await db.transaction("readwrite", ["person"], async (tx) => { 50 | // Mask out reference to friends above 51 | const person = tx.table("person"); 52 | const friend = await person.get({ name: "dev" }); 53 | friend.age = undefined; // Invalid 54 | // Should not work 55 | await person.put(friend); 56 | }); 57 | throw shouldHaveThrown; 58 | } catch (err) { 59 | expect(err.toString()).to.include("validation failed"); 60 | } 61 | 62 | // Low level access to schemas 63 | const schemas = db.table(SchemasTableName); 64 | const array = await schemas.find().toArray(); 65 | expect(array).to.have.lengthOf(1); 66 | }); 67 | }); 68 | -------------------------------------------------------------------------------- /src/middleware/ulid/index.ts: -------------------------------------------------------------------------------- 1 | import { ulid } from "ulid"; 2 | import { DBCore, DBCoreMutateRequest, DBCoreTable, Middleware } from "dexie"; 3 | 4 | export function createUlidMiddleware(core: DBCore): DBCore { 5 | return { 6 | ...core, 7 | table(tableName: string): DBCoreTable { 8 | const table = core.table(tableName); 9 | // Utility tables start with _ 10 | // TODO: Is this too simplistic a rule? 11 | if (tableName.startsWith("_")) return table; 12 | return { 13 | ...table, 14 | async mutate(req: DBCoreMutateRequest) { 15 | switch (req.type) { 16 | case "add": 17 | case "put": 18 | const { primaryKey } = table.schema; 19 | // Assume "_id" should be a ulid if its autoIncrementing 20 | if (primaryKey.keyPath === "_id" && primaryKey.autoIncrement) { 21 | req.values.forEach( 22 | (value) => (value._id = value._id || ulid()) 23 | ); 24 | } 25 | break; 26 | } 27 | const res = await table.mutate(req); 28 | return res; 29 | }, 30 | }; 31 | }, 32 | }; 33 | } 34 | 35 | export const ulidMiddleware: Middleware = { 36 | stack: "dbcore", 37 | name: "UlidMiddleware", 38 | create: createUlidMiddleware, 39 | }; 40 | -------------------------------------------------------------------------------- /src/middleware/ulid/ulid.spec.ts: -------------------------------------------------------------------------------- 1 | import Dexie from "dexie"; 2 | import setGlobalVars from "indexeddbshim"; 3 | const { indexedDB, IDBKeyRange } = setGlobalVars({}, { checkOrigin: false }); 4 | import { ulidMiddleware } from "."; 5 | import { expect } from "chai"; 6 | 7 | const databaseName = "ulid"; 8 | 9 | describe("ulid middleware", function () { 10 | let db: Dexie; 11 | 12 | before(async function () { 13 | db = new Dexie(databaseName, { 14 | indexedDB, 15 | IDBKeyRange, 16 | }); 17 | db.use(ulidMiddleware); 18 | 19 | db.version(1).stores({ 20 | // Assumes auto-incrementing primary keys named _id should be ulids 21 | friends: "++_id,name,shoeSize,address.city", 22 | // But ++id (no underscore) should _not_ be a ulid 23 | others: "++id, name", 24 | }); 25 | 26 | await db.open(); 27 | }); 28 | 29 | after(async function () { 30 | db.close(); 31 | await db.delete(); 32 | }); 33 | 34 | it("should automatically create ulid ids", async function () { 35 | const [_id, id] = await db.transaction( 36 | "readwrite", 37 | ["friends", "others"], 38 | async () => { 39 | const _id = await db.table("friends").put({ 40 | name: "steve", 41 | shoeSize: 99, 42 | address: { 43 | city: "nowhere", 44 | }, 45 | }); 46 | 47 | const id = await db.table("others").put({ 48 | name: "steve", 49 | shoeSize: 0, 50 | address: { 51 | city: "somewhere", 52 | }, 53 | }); 54 | return [_id, id]; 55 | } 56 | ); 57 | 58 | const obj1 = await db.table("friends").get(_id); 59 | expect(obj1).to.not.be.undefined; 60 | expect(obj1).to.have.ownProperty("_id", _id); 61 | // base32-encoded 26-character string representing 128 bytes 62 | expect(obj1?._id).to.have.length(26); 63 | 64 | const obj2 = await db.table("others").get(id); 65 | expect(obj2.id).to.equal(1); 66 | }); 67 | }); 68 | -------------------------------------------------------------------------------- /src/remote/db.spec.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai"; 2 | import { PrivateKey } from "@textile/crypto"; 3 | import { Database } from "../local/db"; 4 | import { Errors } from "./index"; 5 | import { createDbClient } from "./grpc"; 6 | import { personSchema, shouldHaveThrown } from "../utils/spec.utils"; 7 | import ThreadID from "@textile/threads-id"; 8 | 9 | const databaseName = "remote-db"; 10 | const serviceHost = "http://localhost:6007"; 11 | 12 | describe("remote + db", function () { 13 | const privateKey = PrivateKey.fromRandom(); 14 | context("schemas and pushing to remote", async function () { 15 | this.timeout(30000); 16 | let db: Database; 17 | let id: string | undefined; 18 | 19 | after(async function () { 20 | db.close(); 21 | // Expect db.delete to delete the db 22 | await db.delete(); 23 | }); 24 | 25 | /** 26 | * Function to create a set of pre-defined udpates/changes 27 | */ 28 | async function createChanges() { 29 | const collection = db.collection("Person"); 30 | if (collection === undefined) throw new Error("should be defined"); 31 | // Create some updates 32 | await collection.insert({ name: "child", age: 4 }); 33 | await collection.insert({ name: "kid", age: 8 }); 34 | await collection.insert({ name: "teen", age: 16 }); 35 | } 36 | 37 | it("should be able to push local schemas to remote on push", async function () { 38 | this.timeout(30000); 39 | db = new Database(databaseName); 40 | // db.collectionConfig({ name: "Person", schema: personSchema }); 41 | db.remote.set({ serviceHost }); 42 | // We always need to authorize first... 43 | const token = await db.remote.authorize(privateKey); 44 | // Now open the db with NO collections 45 | await db.open(1); // First version 46 | // We'll initialize before we actually push anything 47 | id = await db.remote.initialize(); 48 | 49 | // Before we push anything, let's just check that we don't already have remote collections 50 | // Low level checks 51 | const client = createDbClient(db.remote.config); 52 | try { 53 | await client.getCollectionInfo( 54 | // eslint-disable-next-line @typescript-eslint/no-non-null-assertion 55 | ThreadID.fromString(id!), 56 | "Person" 57 | ); 58 | throw shouldHaveThrown; 59 | } catch (err) { 60 | expect(err.toString()).to.include("collection not found"); 61 | } 62 | 63 | // Now we close the db for some reason 64 | db.close(); 65 | // Now let's open it with some new collection configs 66 | db = new Database(databaseName, { 67 | name: "Person", 68 | schema: personSchema, 69 | indexes: [ 70 | { path: "name", unique: true }, 71 | { path: "age", unique: false }, 72 | ], 73 | }); 74 | // Set our thread id and auth token directly 75 | // This is just syntactic sugar over more direct setting 76 | db.remote.set({ serviceHost, id, token }); 77 | // Now let's open the db again 78 | // This internally updates the db version, because the collection set is different 79 | // We should already be authorized because we saved our token from before 80 | await db.open(2); // Version 2 81 | id = await db.remote.initialize(); 82 | // Now finally, we push said changes 83 | await db.remote.push("Person"); 84 | 85 | // These pushes should include the schemas/collections, so let's check that they're there 86 | const info = await client.getCollectionInfo( 87 | // eslint-disable-next-line @typescript-eslint/no-non-null-assertion 88 | ThreadID.fromString(db.id!), 89 | "Person" 90 | ); 91 | expect(info.schema).to.deep.equal(personSchema); 92 | db.close(); 93 | }); 94 | 95 | it("should be able to push local schemas to remote on init", async function () { 96 | db = new Database( 97 | databaseName, 98 | { 99 | name: "Person", 100 | schema: personSchema, 101 | indexes: [ 102 | { path: "name", unique: true }, 103 | { path: "age", unique: false }, 104 | ], 105 | }, 106 | { 107 | name: "Empty", 108 | } 109 | ); 110 | await db.open(2); 111 | // Create some default changes to push 112 | // These will already have been "checked" for schema compliance locally, but they'll 113 | // get checked remotely as well 114 | // Note: We haven't actually touched anything "remote" yet 115 | await createChanges(); 116 | // Now for the remote stuff 117 | // We always need to authorize first... 118 | db.remote.set({ serviceHost }); 119 | await db.remote.authorize(privateKey); 120 | // Do we have a remote table yet? Let's just push and see! 121 | try { 122 | await db.remote.push("Person"); 123 | } catch (err) { 124 | expect(err).to.equal(Errors.ThreadIDError); 125 | // Opps, I didn't create the remote one yet, let's initialize 126 | // Use id from before, or if this is a fresh test, create a new one 127 | await db.remote.initialize(id); 128 | } 129 | await db.remote.push("Person"); 130 | 131 | // Low level checks 132 | const client = createDbClient(db.remote.config); 133 | const info = await client.getCollectionInfo( 134 | // eslint-disable-next-line @typescript-eslint/no-non-null-assertion 135 | ThreadID.fromString(db.id!), 136 | "Person" 137 | ); 138 | expect(info.schema).to.deep.equal(personSchema); 139 | }); 140 | }); 141 | }); 142 | -------------------------------------------------------------------------------- /src/remote/grpc.spec.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-non-null-assertion */ 2 | import { expect } from "chai"; 3 | import { getToken, getTokenChallenge, newDB } from "./grpc"; 4 | import { PrivateKey } from "@textile/crypto"; 5 | import ThreadID from "@textile/threads-id"; 6 | import { grpc } from "@improbable-eng/grpc-web"; 7 | 8 | const opts = { 9 | serviceHost: "http://127.0.0.1:6007", 10 | }; 11 | 12 | describe("grpc", function () { 13 | describe("authenticate", async function () { 14 | it("should return a valid token from getToken", async function () { 15 | const privateKey = PrivateKey.fromRandom(); 16 | const token = await getToken(privateKey, opts); 17 | expect(token).to.not.be.undefined; 18 | }); 19 | 20 | it("should return a valid token from getTokenChallenge", async function () { 21 | const privateKey = PrivateKey.fromRandom(); 22 | const token = await getTokenChallenge( 23 | privateKey.public.toString(), 24 | async (challenge: Uint8Array) => { 25 | return privateKey.sign(challenge); 26 | }, 27 | opts 28 | ); 29 | expect(token).to.not.be.undefined; 30 | }); 31 | 32 | it("should be able to create a remote db", async function () { 33 | // First, authenticate 34 | const privateKey = PrivateKey.fromRandom(); 35 | const token = await getToken(privateKey, opts); 36 | // Next, create! 37 | const metadata = new grpc.Metadata({ 38 | authentication: `bearer ${token}`, 39 | }); 40 | const threadID = ThreadID.fromRandom(); 41 | const id = await newDB("test", threadID, [], { 42 | ...opts, 43 | metadata, 44 | }); 45 | expect(id).to.not.be.undefined; 46 | expect(id).to.equal(threadID.toString()); 47 | }); 48 | }); 49 | }); 50 | -------------------------------------------------------------------------------- /src/remote/grpc.ts: -------------------------------------------------------------------------------- 1 | import { grpc } from "@improbable-eng/grpc-web"; 2 | import type { ThreadID } from "@textile/threads-id"; 3 | import { WebsocketTransport } from "@textile/grpc-transport"; 4 | import * as pb from "@textile/threads-client-grpc/threads_pb"; 5 | import * as api from "@textile/threads-client-grpc/threads_pb_service"; 6 | import { Client, CollectionConfig } from "@textile/threads-client"; 7 | import { Context, defaultHost } from "@textile/context"; 8 | import { Identity } from "@textile/crypto"; 9 | 10 | export interface GrpcConfig { 11 | serviceHost: string; 12 | transport: grpc.TransportFactory; 13 | debug: boolean; 14 | metadata: grpc.Metadata; 15 | } 16 | 17 | export { CollectionConfig }; 18 | 19 | export const defaults: GrpcConfig = { 20 | serviceHost: defaultHost, 21 | transport: WebsocketTransport(), 22 | debug: false, 23 | metadata: new grpc.Metadata(), 24 | }; 25 | 26 | export function createClient(opts: Partial = {}): api.APIClient { 27 | const config = { ...defaults, ...opts }; 28 | return new api.APIClient(config.serviceHost, { 29 | transport: config.transport, 30 | debug: config.debug, 31 | }); 32 | } 33 | 34 | export function createDbClient(config: Partial = {}): Client { 35 | // Create a new remote client instance 36 | // TODO: This is not be the best way to do this... 37 | // Pull in any existing headers that may have already been set 38 | const json: Record = {}; 39 | config.metadata?.forEach((key, values) => (json[key] = values)); 40 | const context = 41 | Object.keys(json).length > 0 42 | ? // eslint-disable-next-line @typescript-eslint/no-explicit-any 43 | Context.fromJSON(json, config.serviceHost) 44 | : new Context(config.serviceHost); 45 | const client = new Client(context); 46 | return client; 47 | } 48 | 49 | /** 50 | * Obtain a token per user (identity) for interacting with the remote API. 51 | * @param identity Complete identity object. Useful in tests or where the 52 | * developer has complete control over the key generation process. 53 | * @param opts Options for controlling communication with the remote gRPC 54 | * endpoint. 55 | * @example 56 | * ```@typescript 57 | * import { grpc } from '@textile/threads' 58 | * 59 | * async function example (identity: PrivateKey) { 60 | * const token = await grpc.getToken(identity) 61 | * return token 62 | * } 63 | * ``` 64 | */ 65 | export async function getToken( 66 | identity: Identity, 67 | config?: Partial 68 | ): Promise { 69 | const opts = { ...defaults, ...config }; 70 | return getTokenChallenge( 71 | identity.public.toString(), 72 | async (challenge: Uint8Array) => { 73 | return identity.sign(challenge); 74 | }, 75 | opts 76 | ); 77 | } 78 | 79 | /** 80 | * Obtain a token per user (identity) for interacting with the remote API. 81 | * @param publicKey The public key of a user identity to use for creating 82 | * records in the database. Must be the corresponding public key of the 83 | * private key used in `callback`. 84 | * @param callback A callback function that takes a `challenge` argument and 85 | * returns a signed message using the input challenge and the private key 86 | * associated with `publicKey`. 87 | * @param config Options for controlling communication with the remote gRPC 88 | * endpoint. 89 | * @example 90 | * ```typescript 91 | * import { grpc } from '@textile/threads' 92 | * 93 | * async function example (identity: PrivateKey) { 94 | * const token = await grpc.getTokenChallenge( 95 | * identity.public.toString(), 96 | * (challenge: Uint8Array) => { 97 | * return new Promise((resolve, reject) => { 98 | * // This is where you should program PrivateKey to respond to challenge 99 | * // Read more here: https://docs.textile.io/tutorials/hub/production-auth/ 100 | * }) 101 | * } 102 | * ) 103 | * return token 104 | * } 105 | * ``` 106 | */ 107 | export async function getTokenChallenge( 108 | publicKey: string, 109 | callback: (challenge: Uint8Array) => Uint8Array | Promise, 110 | config?: Partial 111 | ): Promise { 112 | const opts = { ...defaults, ...config }; 113 | const client = createClient(opts); 114 | const bidi = client.getToken(opts.metadata); 115 | return new Promise((resolve, reject) => { 116 | let token = ""; 117 | bidi.on("data", async (message: pb.GetTokenReply) => { 118 | if (message.hasChallenge()) { 119 | const challenge = message.getChallenge_asU8(); 120 | const signature = await callback(challenge); 121 | const req = new pb.GetTokenRequest(); 122 | req.setSignature(signature); 123 | bidi.write(req); 124 | } else if (message.hasToken()) { 125 | token = message.getToken(); 126 | bidi.end(); 127 | } 128 | }); 129 | bidi.on("end", (status) => { 130 | if (status?.code === grpc.Code.OK) { 131 | resolve(token); 132 | } else { 133 | reject(new Error(status?.details)); 134 | } 135 | }); 136 | const req = new pb.GetTokenRequest(); 137 | req.setKey(publicKey); 138 | bidi.write(req); 139 | }); 140 | } 141 | 142 | export async function newDB( 143 | name: string, 144 | threadID: ThreadID, 145 | collections: CollectionConfig[], 146 | config?: Partial 147 | ): Promise { 148 | const opts = { ...defaults, ...config }; 149 | const client = createClient(opts); 150 | const requestMessage = new pb.NewDBRequest(); 151 | requestMessage.setDbid(threadID.toBytes()); 152 | requestMessage.setName(name); 153 | const collectionsList: pb.CollectionConfig[] = []; 154 | for (const collection of collections) { 155 | const config = new pb.CollectionConfig(); 156 | config.setName(collection.name); 157 | config.setSchema(collection.schema); 158 | const indexesList: pb.Index[] = []; 159 | for (const index of collection.indexes ?? []) { 160 | const idx = new pb.Index(); 161 | idx.setPath(index.path); 162 | idx.setUnique(index.unique); 163 | indexesList.push(idx); 164 | } 165 | config.setIndexesList(indexesList); 166 | collectionsList.push(config); 167 | } 168 | requestMessage.setCollectionsList(collectionsList); 169 | return new Promise((resolve, reject) => { 170 | client.newDB( 171 | requestMessage, 172 | opts.metadata ?? new grpc.Metadata(), 173 | ( 174 | error: api.ServiceError | null, 175 | responseMessage: pb.NewDBReply | null 176 | ) => { 177 | if (error) reject(new Error(error.message)); 178 | // Should just be an empty object, which we return as a boolean 179 | const success = Boolean(responseMessage?.toObject()); 180 | const id = threadID.toString(); 181 | if (success) resolve(id); 182 | else reject(new Error(`Unable to create thread with id = ${id}`)); 183 | } 184 | ); 185 | }); 186 | } 187 | -------------------------------------------------------------------------------- /src/remote/index.ts: -------------------------------------------------------------------------------- 1 | import type { Dexie } from "dexie"; 2 | import { Context } from "@textile/context"; 3 | import { KeyInfo, UserAuth } from "@textile/security"; 4 | import jsonpatch from "fast-json-patch"; 5 | import { 6 | GrpcConfig, 7 | getToken, 8 | newDB, 9 | defaults, 10 | createDbClient, 11 | getTokenChallenge, 12 | CollectionConfig, 13 | } from "./grpc"; 14 | import type { Identity } from "@textile/crypto"; 15 | import ThreadID from "@textile/threads-id"; 16 | import { DBInfo, WriteTransaction } from "@textile/threads-client"; 17 | import { grpc } from "@improbable-eng/grpc-web"; 18 | import { 19 | Change, 20 | ChangeTableName, 21 | StashTableName, 22 | MetaTableName, 23 | } from "../middleware/changes"; 24 | 25 | export const Errors = { 26 | ThreadIDError: new Error("Missing/invalid thread id"), 27 | MissingDbError: new Error("Missing remote db/thread"), 28 | NoRemoteError: new Error("No remote service host specified"), 29 | RemoteError: new Error("Remote operation failed"), 30 | ChangeError: new Error("Unable to process db change(s)"), 31 | LocalChangesError: new Error("Unpushed local changes"), 32 | ThreadExists: new Error("Remote db/thread already exists"), 33 | }; 34 | 35 | export const ThreadIDName = "thread-id"; 36 | 37 | const encoder = new TextEncoder(); 38 | const decoder = new TextDecoder(); 39 | 40 | /** 41 | * Config specifies the configuration options for remote sync. 42 | */ 43 | export interface RemoteConfig extends GrpcConfig { 44 | /** 45 | * Thread id 46 | */ 47 | id: string; 48 | /** 49 | * Authorization token. This is here for ease of use. 50 | */ 51 | token: string; 52 | } 53 | 54 | export class Remote { 55 | /** 56 | * Database identity on remote peer. String encoding of a thread id. 57 | */ 58 | public id?: string; 59 | /** 60 | * Set of configuration options for remote sync. 61 | */ 62 | public config: Partial; 63 | 64 | /** 65 | * Create a new Remote instance. 66 | * @param storage The (private) storage provider. This is a pretty generic 67 | * interface that can be satisfied by a basic object for tests, or a full- 68 | * blown Dexie db for real usage. 69 | * @param config A set of configuration options for remote sync. 70 | */ 71 | constructor( 72 | private storage: Dexie, 73 | config: Partial = defaults 74 | ) { 75 | // Pull apart config into id, and config components and assign them 76 | const { id, token, ...rest } = config; 77 | // Set config to shallow, single-depth copy 78 | // Metadata will be overwritten if it exists 79 | this.config = { metadata: new grpc.Metadata(), ...rest }; 80 | if (token) { 81 | this.config.metadata?.set("authorization", `bearer ${token}`); 82 | } 83 | // Specifies the thread id to use when making updates 84 | this.id = id; 85 | } 86 | 87 | /** 88 | * Create a new gRPC client instance from a supplied user auth object. 89 | * Assumes all default gRPC settlings. For customization options, use a context object directly. 90 | * The callback method will automatically refresh expiring credentials. 91 | * @param auth The user auth object or an async callback that returns a user auth object. 92 | * @example 93 | * ```typescript 94 | * import {UserAuth, Client} from '@textile/threads' 95 | * 96 | * function create (auth: UserAuth) { 97 | * return Client.withUserAuth(auth) 98 | * } 99 | * ``` 100 | * @example 101 | * ```typescript 102 | * import {UserAuth, Client} from '@textile/threads' 103 | * 104 | * function setCallback (callback: () => Promise) { 105 | * return Client.withUserAuth(callback) 106 | * } 107 | * ``` 108 | */ 109 | async setUserAuth( 110 | auth: UserAuth | (() => Promise) 111 | ): Promise { 112 | const init = 113 | typeof auth === "object" 114 | ? Context.fromUserAuth(auth, this.config.serviceHost) 115 | : Context.fromUserAuthCallback(auth, this.config.serviceHost); 116 | // Pull in any existing headers that may have already been set 117 | const json: Record = {}; 118 | this.config.metadata?.forEach((key, values) => (json[key] = values)); 119 | const metadata = await Context.fromJSON({ 120 | ...json, 121 | ...init.toJSON(), 122 | }).toMetadata(); 123 | this.config.metadata = metadata; 124 | return this; 125 | } 126 | 127 | /** 128 | * Create a new gRPC client instance from a supplied key and secret 129 | * @param key The KeyInfo object containing {key: string, secret: string, type: 0}. 0 === User Group Key, 1 === Account Key 130 | * @param host The remote gRPC host to connect with. Should be left as default. 131 | * @param debug Whether to run in debug mode. Defaults to false. 132 | * @example 133 | * ```typescript 134 | * import {KeyInfo, Client} from '@textile/threads' 135 | * 136 | * async function create (keyInfo: KeyInfo) { 137 | * return await Client.withKeyInfo(keyInfo) 138 | * } 139 | * ``` 140 | */ 141 | async setKeyInfo(key: KeyInfo): Promise { 142 | const init = await new Context(this.config.serviceHost).withKeyInfo(key); 143 | // Pull in any existing headers that may have already been set 144 | const json: Record = {}; 145 | this.config.metadata?.forEach((key, values) => (json[key] = values)); 146 | const metadata = await Context.fromJSON({ 147 | ...json, 148 | ...init.toJSON(), 149 | }).toMetadata(); 150 | this.config.metadata = metadata; 151 | return this; 152 | return this; 153 | } 154 | 155 | /** 156 | * Set the remote configuration options. 157 | * @param config The configuration options to use. All are optional. 158 | * @example 159 | * ```@typescript 160 | * import type { Remote } from '@textile/threads' 161 | * 162 | * function example (remote: Remote) { 163 | * remote.set({ serviceHost: "http://example.com:6007" }) 164 | * console.log(remote.get()) 165 | * // { serviceHost: "http://example.com:6007", ... } 166 | * } 167 | * ``` 168 | */ 169 | set(config: Partial): this { 170 | // Replace current values with config options, otherwise stick to existing 171 | const { id, ...rest } = config; 172 | // Update config with shallow, single-depth copy 173 | this.config = { ...this.config, ...rest }; 174 | // Update id if we specified a new one, this enables switching between dbs 175 | this.id = id ?? this.id; 176 | // Return this so we can chain updates/changes 177 | return this; 178 | } 179 | 180 | /** 181 | * Get the remote configuration options. 182 | * @example 183 | * ```@typescript 184 | * import type { Remote } from '@textile/threads' 185 | * 186 | * function example (remote: Remote) { 187 | * remote.set({ serviceHost: "http://example.com:6007" }) 188 | * console.log(remote.get()) 189 | * // { serviceHost: "http://example.com:6007", ... } 190 | * } 191 | * ``` 192 | */ 193 | get(): Partial { 194 | // Pull apart this into id, and config components 195 | const { id, config } = this; 196 | // This essentially provides a shallow, single-depth copy of the properties 197 | return { id, ...config }; 198 | } 199 | 200 | async info(): Promise { 201 | // Check that we have a valid thread id 202 | if (this.id === undefined) throw Errors.ThreadIDError; 203 | const threadID = ThreadID.fromString(this.id); 204 | const client = createDbClient(this.config); 205 | return client.getDBInfo(threadID); 206 | } 207 | 208 | /** 209 | * Authorize with a remote. 210 | * @param identity The identity to use for authorization, or the public key 211 | * of an identity. 212 | * @param callback A callback to use as part of the identity challenge. If 213 | * identity is a public key string, then a callback is required. 214 | * @see {@link getToken} or {@link getTokenChallenge} for lower-level access. 215 | * @note This is an online-only operation (i.e., can only be done when the 216 | * peer is able to connect with the remote). 217 | * @example 218 | * ```@typescript 219 | * import type { PrivateKey } from "@textile/crypto"; 220 | * import type { Remote } from '@textile/threads' 221 | * 222 | * async function example (remote: Remote, identity: PrivateKey) { 223 | * const token = await remote.authorize(identity) 224 | * // The token is also automatically added to remote's `config.metadata` 225 | * const { metadata } = remote.config 226 | * console.log(metadata?.get("authorization")) 227 | * // ... 228 | * return token 229 | * } 230 | * ``` 231 | */ 232 | async authorize(identity: Identity): Promise; 233 | async authorize( 234 | identity: string, 235 | callback: (challenge: Uint8Array) => Uint8Array | Promise 236 | ): Promise; 237 | async authorize( 238 | identity: Identity | string, 239 | callback?: (challenge: Uint8Array) => Uint8Array | Promise 240 | ): Promise { 241 | // If we already have what we need, we don't really need to do the remote auth 242 | // const [localKey, localAuth] = await this.storage 243 | // .table(MetaTableName) 244 | // .bulkGet(["public-key", "authorization"]); 245 | // const publicKey = 246 | // typeof identity === "string" ? identity : identity.public.toString(); 247 | // TODO: Decide if we want to persist either the auth or the public key in the future? 248 | // if (localKey === publicKey && localAuth !== undefined) { 249 | // return localAuth; // No need to hit the remote APIs 250 | // } 251 | if (!this.config.serviceHost) { 252 | throw Errors.NoRemoteError; 253 | } 254 | // Fetch token for this identity from remote (must be online) 255 | let token = ""; 256 | if (typeof identity === "string") { 257 | if (callback === undefined) { 258 | throw new Error("Callback required for public key challenge"); 259 | } 260 | token = await getTokenChallenge(identity, callback, this.config); 261 | } else { 262 | token = await getToken(identity, this.config); 263 | } 264 | // Update in-memory config metadata for interacting with remote APIs 265 | this.config.metadata?.set("authorization", `bearer ${token}`); 266 | // TODO: Should we update authorization metadata for later re-hydration? 267 | // await this.storage.table(MetaTableName).bulkPut([ 268 | // { key: "public-key", value: publicKey }, // We're just storing this in case we need it 269 | // { key: "authorization", value: `bearer ${token}` }, 270 | // ]); 271 | // Return the token in case caller wants to storage themselves 272 | return token; 273 | } 274 | 275 | /** 276 | * Initialize a new remote db/thread. 277 | * Should only be done once, but only after opening the db. 278 | * @note This is an online-only operation (i.e., can only be done when the 279 | * peer is able to connect with the remote). 280 | * @param name The name for the new db. 281 | * @param id The thread id for the new db. Will default to random thread id. 282 | * Can be a thread id object or base32-encode string (the default). 283 | */ 284 | async initialize( 285 | id: ThreadID | string | undefined = this.id 286 | ): Promise { 287 | if (!this.config.serviceHost) { 288 | throw Errors.NoRemoteError; 289 | } 290 | // Meta is where we store metadata about this db... thread id, schemas, etc 291 | const meta = this.storage.table(MetaTableName); 292 | // Defaults new random id... 293 | let threadID = ThreadID.fromRandom(); 294 | // But if we specified a valid id... 295 | if (id !== undefined) { 296 | // If we have a string, convert it... 297 | if (typeof id === "string") { 298 | threadID = ThreadID.fromString(id); 299 | } else { 300 | // Keep is "as is" 301 | threadID = id; 302 | } 303 | } else { 304 | // If it was undefined, try to extract from existing storage 305 | const { value } = (await meta.get(ThreadIDName)) ?? {}; 306 | if (value !== undefined) { 307 | // If valid, use it 308 | threadID = ThreadID.fromString(value); 309 | } 310 | // Otherwise, stick to the random one we created 311 | } 312 | // Extract schema information from existing local dbs, and push to remote 313 | const schemas: CollectionConfig[] = await Promise.all( 314 | this.storage.tables 315 | .filter((table) => !table.name.startsWith("_")) 316 | .map(async (table) => ({ 317 | name: table.name, 318 | schema: encoder.encode(JSON.stringify(await table.getSchema())), 319 | indexesList: [], 320 | writevalidator: "", // TODO: Update this once we support validators/filters 321 | readfilter: "", 322 | })) 323 | ); 324 | // This will throw if we get some error, but the "backup" is success 325 | let idString = ""; 326 | try { 327 | idString = await newDB(this.storage.name, threadID, schemas, this.config); 328 | } catch (err) { 329 | if (err.toString().includes("db already exists")) { 330 | idString = threadID.toString(); 331 | // If it already exists, maybe we just need to create/update the schemas? 332 | const client = createDbClient(this.config); 333 | for (const schema of schemas) { 334 | schema.schema = JSON.parse(decoder.decode(schema.schema)); 335 | try { 336 | await client.newCollection(threadID, schema); 337 | } catch (err) { 338 | if (!err.message.includes("collection already registered")) 339 | throw err; 340 | } 341 | } 342 | } else { 343 | // Otherwise, just throw it 344 | throw err; 345 | } 346 | } 347 | 348 | // Otherwise throw a generic remote error :( 349 | if (!idString) throw Errors.RemoteError; 350 | // Reset id in case we've updated or created a new random one 351 | this.set({ id: idString }); 352 | // Update metadata table with ThreadIDName 353 | await meta.put({ key: ThreadIDName, value: idString }); 354 | // Return id in case we created a new random one 355 | return idString; 356 | } 357 | 358 | async push(...collections: string[]): Promise { 359 | if (!this.config.serviceHost) { 360 | throw Errors.NoRemoteError; 361 | } 362 | // Check that we have a valid thread id 363 | if (this.id === undefined) throw Errors.ThreadIDError; 364 | const threadID = ThreadID.fromString(this.id); 365 | const localChanges = this.storage.table(ChangeTableName); 366 | const client = createDbClient(this.config); 367 | 368 | // Blast thru provided collection names... 369 | for (const collectionName of collections) { 370 | // Check that table exists locally... 371 | this.storage.table(collectionName); 372 | // Filter changes by collection 373 | const filtered = localChanges.where("name").equals(collectionName); 374 | if ((await filtered.count()) < 1) { 375 | return; // Early out if no changes 376 | } 377 | // For each change, create transaction item and switch on type 378 | // TODO: Currently, go-threads underlying db doesn't support isolation in transactions 379 | // so we have to do these as one-off transactions for now so that queries reflect reality 380 | // this is **not** ideal, as we lose the atomicity of pushes... 381 | let trans: WriteTransaction | undefined; 382 | try { 383 | // TODO:See above, we need to actually materialize the array it seems? 384 | const changes = await filtered.toArray(); 385 | let count = 0; 386 | 387 | for (const obj of changes) { 388 | trans = client.writeTransaction(threadID, collectionName); 389 | await trans.start(); 390 | switch (obj.type) { 391 | case "put": { 392 | // FIXME: https://github.com/textileio/go-threads/issues/440 393 | // TODO: https://github.com/textileio/go-threads/pull/450 394 | try { 395 | await trans.save([obj.after]); 396 | // await client.save(threadID, collectionName, [obj.after]); 397 | break; 398 | } catch (err) { 399 | throw err; 400 | } 401 | } 402 | case "add": { 403 | try { 404 | await trans.create([obj.after]); 405 | // await client.create(threadID, collectionName, [obj.after]); 406 | break; 407 | } catch (err) { 408 | throw err; 409 | } 410 | } 411 | case "delete": { 412 | try { 413 | await trans.delete([obj.key]); 414 | // await client.delete(threadID, collectionName, [obj.key]); 415 | break; 416 | } catch (err) { 417 | // TODO: https://github.com/textileio/go-threads/pull/450 418 | // console.error(err); // We'll ignore this though 419 | throw err; 420 | } 421 | } 422 | } 423 | // FIXME: We close out the transaction on each loop :( 424 | await trans.end(); 425 | // We track count to make sure we're processed them all later 426 | count++; 427 | } 428 | // Assuming all good, we'll delete our local changes 429 | const deleted = await filtered.delete(); 430 | // Make sure we deleted just as much as we were expecting 431 | // Won't know why we made it this far, so just use a generic error 432 | if (count !== deleted) throw Errors.ChangeError; 433 | } catch (err) { 434 | // In theory, err will be due to remote transaction calls... abort! 435 | try { 436 | await trans?.discard(); 437 | } catch (err) { 438 | // Nothing more we can do here 439 | } 440 | throw err; // Rethrow for external consumers 441 | } 442 | // TODO: Maybe return updated hash of head update. 443 | } 444 | } 445 | 446 | /** 447 | * Stash local changes. 448 | * This moves local changes out of the local staging table into a stashed table. Making it 449 | * possible to pull remote changes and re-apply local changes on top (like a git rebase). 450 | * @see {@link Remote.applyStash}. 451 | */ 452 | async createStash(): Promise { 453 | // Grab a reference to our changes table 454 | const changes = this.storage.table(ChangeTableName); 455 | // If we don't have any, all good, just return 456 | if (!(await changes.count())) return; 457 | // Get a reference to stash table for storing changes 458 | const stash = this.storage.table(StashTableName); 459 | const array = await changes.toArray(); 460 | // Move change set to stash table, useful for rebasing later 461 | await stash.bulkPut(array); 462 | // Clear out local changes 463 | return changes.clear(); 464 | } 465 | 466 | /** 467 | * Clear the local stash. 468 | * @see {@link Remote.createStash}. 469 | */ 470 | async clearStash(): Promise { 471 | // Grab a reference to our stash table 472 | const stash = this.storage.table(StashTableName); 473 | // Clear it out, dropping all changes, whether we've allied them or not! 474 | return stash.clear(); 475 | } 476 | 477 | /** 478 | * Apply the local stash back on top of the local changes. 479 | * @param collections The set of collections to filter on. This means you can fine-tune which 480 | * changes are apoplied. 481 | * @see {@link Remote.createStash}. 482 | */ 483 | async applyStash(...collections: string[]): Promise { 484 | for (const collectionName of collections) { 485 | await this.storage.transaction( 486 | "rw", 487 | [collectionName, StashTableName], 488 | async (tx) => { 489 | const stash = tx.table(StashTableName); 490 | const table = tx.table(collectionName); 491 | const filtered = stash.where("name").equals(collectionName); 492 | if ((await filtered.count()) < 1) { 493 | return; // Early out if no changes 494 | } 495 | // TODO: This can be optimized big time! 496 | for (const obj of await filtered.toArray()) { 497 | if (obj.type === "delete") { 498 | table.delete(obj.key); 499 | } else { 500 | const value = (await table.get(obj.key)) ?? {}; 501 | jsonpatch.applyPatch(value, obj.ops, false, true); 502 | await table.put(value); 503 | } 504 | } 505 | await filtered.delete(); 506 | } 507 | ); 508 | } 509 | } 510 | 511 | /** 512 | * Pull remote changes into local db. 513 | * Attempts to force pull remote updates. This will automatically request updates from the remote. 514 | * @note This is an online-only operation (i.e., can only be done when the 515 | * peer is able to connect with the remote). 516 | * @param collections A (possibly empty) variadic set of collections to pull. 517 | * @returns A promise that resolves to the set of modified keys. 518 | */ 519 | async pull(...collections: string[]): Promise { 520 | // Simple to start: 521 | // Fetch all records for the given collection(s) 522 | // For each remote record, check against local, if diff, record it, otherwise, ignore 523 | // For each local remote that isn't in above set, setup for delete 524 | // Ideally, we'd have some new APIs on the remote side that allow us to track revisions 525 | // But this is a much longer conversation about appropriate approach etc. 526 | if (!this.config.serviceHost) { 527 | throw Errors.NoRemoteError; 528 | } 529 | // Check that we have a valid thread id 530 | if (this.id === undefined) throw Errors.ThreadIDError; 531 | const threadID = ThreadID.fromString(this.id); 532 | const localChanges = this.storage.table(ChangeTableName); 533 | if (await localChanges.count()) throw Errors.LocalChangesError; 534 | const client = createDbClient(this.config); 535 | // Blast thru provided collection names... 536 | // TODO: Yes, I know this is all extremely sub-optimal! 537 | for (const collectionName of collections) { 538 | const instances = await client.find(threadID, collectionName, {}); 539 | const table = this.storage.table(collectionName); 540 | // Remote is our source of thruth, we completely overwrite anything local that is different 541 | const keys = await table.bulkPut(instances, { allKeys: true }); 542 | // Now we also need to drop anything locally that wasn't in our remote 543 | await table.filter((obj) => !keys.includes(obj._id)).delete(); 544 | } 545 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 546 | const isModUpdate = (ops: any[]) => { 547 | const [op0] = ops; 548 | return op0.op === "add" && op0.path == "/_mod"; 549 | }; 550 | // TODO: Maybe return the ids of modified/deleted instances? 551 | const changes = this.storage.table(ChangeTableName); 552 | const values = await changes 553 | .filter((change) => change.ops.length > 0 && !isModUpdate(change.ops)) 554 | .toArray(); 555 | // Drop these "fake" changes 556 | await changes.clear(); 557 | // Return the mutated keys 558 | // TODO: This currently ignores collection name, which is potentially confusing! 559 | return values.map((change) => change.key); 560 | } 561 | } 562 | -------------------------------------------------------------------------------- /src/remote/remote.spec.ts: -------------------------------------------------------------------------------- 1 | import { PrivateKey } from "@textile/crypto"; 2 | import { ThreadID } from "@textile/threads-id"; 3 | import { shouldHaveThrown } from "../utils/spec.utils"; 4 | import { expect } from "chai"; 5 | import { ChangeTableName, StashTableName } from "../middleware/changes"; 6 | import { Remote, Errors } from "."; 7 | import { createDbClient } from "./grpc"; 8 | import { NewDexie } from "../utils"; 9 | import { grpc } from "@improbable-eng/grpc-web"; 10 | import { Where } from "@textile/threads-client"; 11 | 12 | const databaseName = "remote"; 13 | const serviceHost = "http://localhost:6007"; 14 | 15 | describe("remote", function () { 16 | // Default Dog interface to work with types 17 | class Dog { 18 | constructor( 19 | public _id?: string, 20 | public name: string = "Lucas", 21 | public age: number = 7 22 | ) {} 23 | } 24 | const privateKey = PrivateKey.fromRandom(); 25 | // NewDexie adds the tables we need automatically... 26 | const dexie = NewDexie(databaseName); 27 | // But we still need a version 28 | dexie.version(1).stores({ 29 | dogs: "++_id,name,age", // ulid-based id, with indexes on name and age 30 | }); 31 | dexie.table("dogs").mapToClass(Dog); 32 | // Create a single remote to use for all tests 33 | const remote: Remote = new Remote(dexie); 34 | 35 | before(async function () { 36 | // Make sure we're open for business 37 | await dexie.open(); 38 | }); 39 | 40 | after(async function () { 41 | // Cleanup time! 42 | dexie.close(); 43 | await dexie.delete(); 44 | }); 45 | 46 | /** 47 | * Function to create a set of pre-defined udpates/changes 48 | */ 49 | async function createChanges() { 50 | // Create some default changes... 51 | const dogs = dexie.table("dogs"); 52 | // Change 1 | Dogs 1 53 | const lucas = await dogs.put(new Dog()); // Lucas is the default 54 | 55 | await dexie.transaction("rw", ["dogs"], async (tx) => { 56 | // Mask out reference to "dogs" above 57 | const dogs = tx.table("dogs"); 58 | const friend = await dogs.get(lucas); 59 | ++friend.age; 60 | // Change 2 | Dogs 1 61 | await dogs.put(friend); 62 | // Nested transactions (testing using object rather than string[]) 63 | await dexie.transaction("rw", dogs, async () => { 64 | // Change 3 & 4 | Dogs 3 65 | // "id" is the id of the last add, so the id for Jefferson 66 | const id = await dogs.bulkAdd([ 67 | new Dog(undefined, "Jefferson", 9), 68 | new Dog(undefined, "Clark", 13), 69 | ]); 70 | const friend = await dogs.get(id); 71 | friend.name = "Lewis"; 72 | // Change 5 | Dogs 3 73 | await dogs.put(friend); 74 | // Change 6 | Dogs 2 75 | // Lucas and Jefferson remain 76 | await dogs.delete(id); 77 | }); 78 | }); 79 | } 80 | 81 | context("init and auth", function () { 82 | beforeEach(async function () { 83 | // Need to authorize before running any tests... 84 | // But we _could_ cache token if we had one from before... 85 | remote.set({ serviceHost }); 86 | await remote.authorize(privateKey); 87 | }); 88 | 89 | it("should have authorization token cached in internal metadata", function () { 90 | const [cached] = remote.config.metadata?.get("authorization") ?? []; 91 | expect(cached).to.not.be.undefined; 92 | }); 93 | 94 | it("should throw when authorizing without a remote set", async function () { 95 | const cached = { ...remote.config }; 96 | remote.config.serviceHost = undefined; 97 | try { 98 | await remote.authorize(privateKey); 99 | throw shouldHaveThrown; 100 | } catch (err) { 101 | expect(err).to.equal(Errors.NoRemoteError); 102 | } 103 | // Set it back for other tests 104 | remote.config = cached; 105 | }); 106 | 107 | it("should be able to re-authorize without error", async function () { 108 | // Re-authorize with same private key 109 | const token = await remote.authorize(privateKey); 110 | const other = PrivateKey.fromRandom(); 111 | // This time use the callback flow 112 | const otherToken = await remote.authorize( 113 | other.public.toString(), 114 | (challenge: Uint8Array) => other.sign(challenge) 115 | ); 116 | expect(otherToken).to.not.be.undefined; 117 | expect(otherToken.length).to.equal(token.length); 118 | // Now have to put previous token back in place for subsequent tests 119 | remote.config.metadata?.set("authorization", token); 120 | }); 121 | 122 | it("should be able to get and set config properties", function () { 123 | expect(remote.config).to.not.be.undefined; 124 | expect(remote.config.serviceHost).to.not.be.undefined; 125 | expect(remote.get().debug).to.equal(false); 126 | remote.set({ debug: true }); 127 | expect(remote.config.debug).to.equal(true); 128 | expect(remote.get().debug).to.equal(true); 129 | // Ok, let's set it back because otherwise it floods our test outputs 130 | remote.set({ debug: false }); 131 | const metadata = remote.config.metadata; 132 | if (metadata === undefined) 133 | throw Error("metadata should not be undefined"); 134 | const cached = metadata.get("authorization"); 135 | expect(cached).to.not.be.undefined; 136 | // Clear it out 137 | remote.config.metadata = new grpc.Metadata(); 138 | expect(remote.config.metadata?.get("authorization")).to.deep.equal([]); 139 | // Set it back 140 | remote.config.metadata = metadata; 141 | }); 142 | 143 | it("should throw if no remote db + thread has been created yet", async function () { 144 | try { 145 | await remote.pull(); 146 | throw shouldHaveThrown; 147 | } catch (err) { 148 | expect(err).to.equal(Errors.ThreadIDError); 149 | } 150 | }); 151 | 152 | it("should create remote db on initialize, and throw when trying to re-create", async function () { 153 | this.timeout(5000); 154 | const id0 = ThreadID.fromRandom().toString(); // Use a string 155 | const id1 = await remote.initialize(id0); 156 | expect(id1).to.not.be.undefined; 157 | expect(ThreadID.fromString(id1).toString()).to.equal(id1); 158 | // Should throw here because we're trying to use an existing db id 159 | // Clear cached id just to be sure we're pulling from the db 160 | remote.id = undefined; 161 | // Leave off id to default to existing id (stored in db) 162 | await remote.initialize(); // Leave off id 163 | 164 | // Try with new random one, this isn't a good idea in practice, but is 165 | // allowed because we want to be able to migrate etc in the future 166 | // Application logic should be used to prevent this bad behavior for now 167 | const id2 = await remote.initialize(ThreadID.fromRandom()); // Use thread id object 168 | expect(id2).to.not.equal(id1); 169 | }); 170 | }); 171 | 172 | context("changes and stashing", function () { 173 | beforeEach(async function () { 174 | // Have to set timeout because this takes longer than normal setup 175 | this.timeout(30000); 176 | await createChanges(); 177 | }); 178 | 179 | it("should stash all changes and clear changes table, and then clear stash", async function () { 180 | // Before this, we've create some changes 181 | const changes = dexie.table(ChangeTableName); 182 | const stash = dexie.table(StashTableName); 183 | expect(await changes.count()).to.equal(6); 184 | expect(await stash.count()).to.equal(0); 185 | // Create a stash and clear out changes 186 | await remote.createStash(); 187 | expect(await stash.count()).to.equal(6); 188 | expect(await changes.count()).to.equal(0); 189 | // We shouldn't have any changes to stash, so this should return quick! 190 | await remote.createStash(); 191 | // Should still equal 6 (from before) 192 | expect(await stash.count()).to.equal(6); 193 | // Now we clear them all! 194 | await remote.clearStash(); 195 | expect(await stash.count()).to.equal(0); 196 | }); 197 | 198 | it("should be able to create changes, stash them, make more changes, apply stash to overwrite", async function () { 199 | this.timeout(30000); 200 | // Before this, we've create some changes 201 | const changes = dexie.table(ChangeTableName); 202 | const stash = dexie.table(StashTableName); 203 | // Create a stash and clear out changes 204 | await remote.createStash(); 205 | // Simulate changes coming in via a pull from remote that overwrites local changes 206 | await dexie.transaction("rw", ["dogs"], async (tx) => { 207 | const dogs = tx.table("dogs"); 208 | // Should be defined 209 | const friend = await dogs.get({ name: "Lucas" }); 210 | if (friend === undefined) throw new Error("should be defined"); 211 | friend.name = "Not Lucas"; 212 | await dogs.put(friend); 213 | }); 214 | expect(await stash.count()).to.equal(6); 215 | expect(await changes.count()).to.equal(1); 216 | const dogs = dexie.table("dogs"); 217 | expect(await dogs.get({ name: "Not Lucas" })).to.not.be.undefined; 218 | // Now apply stash 219 | await remote.applyStash("dogs"); 220 | const friend = await dogs.get({ name: "Lucas" }); 221 | if (friend === undefined) throw new Error("should be defined"); 222 | // Make sure Clark is still removed 223 | const clark = await dogs.get({ name: "Clark" }); 224 | expect(clark).to.be.undefined; 225 | }); 226 | }); 227 | 228 | context("push and pull", function () { 229 | before(async function () { 230 | // Need to authorize before running any tests... 231 | // But we _could_ cache token if we had one from before... 232 | remote.set({ serviceHost }); 233 | await remote.authorize(privateKey); 234 | }); 235 | 236 | beforeEach(async function () { 237 | // Have to set timeout because this takes longer than normal setup 238 | this.timeout(30000); 239 | // If we're running these tests in batch, we need to initialize a new thread each time 240 | // to avoid hanging instances 241 | await remote.initialize(ThreadID.fromRandom()); 242 | // Clear tables 243 | await dexie.table("dogs").clear(); 244 | await dexie.table(ChangeTableName).clear(); 245 | // Create some fresh updates 246 | await createChanges(); 247 | }); 248 | 249 | it("should throw when pushing local table that does not exist", async function () { 250 | const changes = dexie.table(ChangeTableName); 251 | expect(await changes.count()).to.equal(6); 252 | try { 253 | await remote.push("fake"); // Fake is not a real collection name! 254 | throw shouldHaveThrown; 255 | } catch (err) { 256 | expect(err.toString()).to.contain("Table fake does not exist"); 257 | } 258 | }); 259 | 260 | it("should push tracked changes to a remote when calling push", async function () { 261 | const threadID = ThreadID.fromString(remote.id ?? ""); 262 | // Low level check to make sure we have our changes 263 | const changes = dexie.table(ChangeTableName); 264 | const count = await changes.count(); 265 | expect(count).to.equal(6); 266 | await remote.push("dogs"); 267 | expect(await changes.count()).to.equal(0); 268 | // Trying again should not lead to any issues 269 | await remote.push("dogs"); // Push everything this time... except we have none! 270 | // Low level checks 271 | const client = createDbClient(remote.config); 272 | const dogs = dexie.table("dogs"); 273 | const total = await dogs.count(); 274 | expect(total).to.equal(2); 275 | const q = new Where("age").gt(0); 276 | const instances = await client.find(threadID, "dogs", q); 277 | expect(instances).to.have.lengthOf(total); 278 | }); 279 | 280 | it("should pull changes from remote and automatically update local db", async function () { 281 | this.timeout(5000); 282 | //Should fail to pull if we already have local changes to push 283 | try { 284 | await remote.pull("dogs"); 285 | throw shouldHaveThrown; 286 | } catch (err) { 287 | expect(err).to.equal(Errors.LocalChangesError); 288 | } 289 | // Ok, now we'll push them 290 | await remote.push("dogs"); 291 | // There should be no new updates on the remote that we don't know about yet 292 | // Pulling in this case should do nothing, though in practice (for now) we still update 293 | const changed1 = await remote.pull("dogs"); 294 | expect(changed1).to.have.lengthOf(0); 295 | // Ok, now we'll make a low-level update on the remote and see what happens 296 | const threadID = ThreadID.fromString(remote.id ?? ""); 297 | // Low level checks 298 | const client = createDbClient(remote.config); 299 | const dogs = dexie.table("dogs"); 300 | const array = await dogs.toArray(); // Should be two in there 301 | // eslint-disable-next-line @typescript-eslint/no-non-null-assertion 302 | await client.delete(threadID, "dogs", [array[0]._id!]); 303 | array[1].name = "Mod"; 304 | await client.save(threadID, "dogs", [array[1]]); 305 | const changed2 = await remote.pull("dogs"); 306 | expect(changed2).to.have.lengthOf(2); 307 | expect(await dogs.count()).to.equal(1); 308 | }); 309 | }); 310 | }); 311 | -------------------------------------------------------------------------------- /src/types.d.ts: -------------------------------------------------------------------------------- 1 | declare module "indexeddbshim"; 2 | declare module "dexie-mongoify"; 3 | -------------------------------------------------------------------------------- /src/utils/index.ts: -------------------------------------------------------------------------------- 1 | import Dexie, { DexieOptions } from "dexie"; 2 | import setGlobalVars from "indexeddbshim"; 3 | // eslint-disable-next-line @typescript-eslint/ban-types 4 | let indexedDB: { open: Function } | undefined, 5 | IDBKeyRange: // eslint-disable-next-line @typescript-eslint/ban-types 6 | { bound: Function; lowerBound: Function; upperBound: Function } | undefined; 7 | if (setGlobalVars instanceof Function) { 8 | const globals: DexieOptions = setGlobalVars({}, { checkOrigin: false }); 9 | indexedDB = globals.indexedDB; 10 | IDBKeyRange = globals.IDBKeyRange; 11 | } 12 | import { ulidMiddleware } from "../middleware/ulid"; 13 | import { changesAddon } from "../middleware/changes"; 14 | import { schemaAddon } from "../middleware/schemas"; 15 | // This is a pure side-effects addon :( 16 | import "../middleware/mongo"; 17 | 18 | /** 19 | * Create a new Dexie database using indexedDB shim by default. 20 | * @param databaseName The name of the new db. 21 | */ 22 | export function NewDexie(databaseName: string): Dexie { 23 | // TODO: This options stuff is kinda messy, but required for nodejs support and tests with polendina 24 | const options: DexieOptions = { 25 | addons: [...Dexie.addons, changesAddon, schemaAddon], 26 | }; 27 | if (indexedDB) options.indexedDB = indexedDB; 28 | if (IDBKeyRange) options.IDBKeyRange = IDBKeyRange; 29 | const dexie = new Dexie(databaseName, options); 30 | dexie.use(ulidMiddleware); 31 | // If another window/tab is accessing this db, we want to "gracefully" 32 | // handle any version upgrades due to new collections being created 33 | // @see {@link https://stackoverflow.com/a/39015986/1256988} 34 | dexie.on("versionchange", () => { 35 | dexie.close(); // Allow other page to upgrade schema 36 | dexie 37 | .open() // Reopen the db 38 | .then(() => { 39 | // New table can be accessed from now on. 40 | console.log("Database reloaded with updated version"); 41 | }) 42 | .catch((err) => { 43 | // Failed to open. Log or show! 44 | console.log("Failed to reload database with updated version"); 45 | throw err; 46 | }); 47 | return false; // Tell Dexie's default implementation not to run 48 | }); 49 | return dexie; 50 | } 51 | 52 | export function nameFromKeyPath(keyPath?: string | string[]): string { 53 | return typeof keyPath === "string" 54 | ? keyPath 55 | : keyPath 56 | ? "[" + [].join.call(keyPath, "+") + "]" 57 | : ""; 58 | } 59 | 60 | export function createIndexString( 61 | { 62 | path, 63 | unique, 64 | multi, 65 | auto, 66 | }: { 67 | path: string; 68 | unique?: boolean; 69 | multi?: boolean; 70 | auto?: boolean; 71 | }, 72 | isPrimKey = false 73 | ): string { 74 | return ( 75 | (unique && !isPrimKey ? "&" : "") + 76 | (multi ? "*" : "") + 77 | (auto ? "++" : "") + 78 | nameFromKeyPath(path) 79 | ); 80 | } 81 | 82 | /** 83 | * A decent hash function. 84 | * @param s Input string 85 | * @link https://stackoverflow.com/a/52171480/1256988 86 | */ 87 | export const hashString = (s: string): number => { 88 | let i, h; 89 | for (i = 0, h = 9; i < s.length; ) { 90 | h = Math.imul(h ^ s.charCodeAt(i++), 9 ** 9); 91 | } 92 | // Add max 32bit integer + 1 93 | return (h ^ (h >>> 9)) + 0x7fffffff + 1; 94 | }; 95 | -------------------------------------------------------------------------------- /src/utils/spec.utils.ts: -------------------------------------------------------------------------------- 1 | import { JSONSchema } from "../middleware/schemas"; 2 | 3 | export const shouldHaveThrown = new Error("should have thrown"); 4 | 5 | export const personSchema: JSONSchema = { 6 | $schema: "http://json-schema.org/draft-07/schema#", 7 | title: "Person", 8 | description: "A simple person schema", 9 | type: "object", 10 | 11 | properties: { 12 | _id: { 13 | description: "Field to contain ulid-based instance id", 14 | type: "string", 15 | }, 16 | name: { 17 | description: "Name of the person", 18 | type: "string", 19 | }, 20 | age: { 21 | type: "integer", 22 | minimum: 1, // Why not? 23 | }, 24 | }, 25 | required: ["_id", "name", "age"], 26 | }; 27 | -------------------------------------------------------------------------------- /src/utils/utils.spec.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai"; 2 | import { hashString } from "."; 3 | 4 | describe("utils", function () { 5 | it("should produce a decent hash that achieves avalanche (non-strict) ", async function () { 6 | const hash1 = hashString("++_id,name,age"); 7 | const hash2 = hashString("++_id,name,age,sex"); 8 | const hash3 = hashString(JSON.stringify("++_id,name,age").slice(1, -1)); 9 | expect(hash1).to.not.equal(hash2); 10 | expect(hash1).to.equal(hash3); 11 | }); 12 | }); 13 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES6", 4 | "module": "CommonJS", 5 | "lib": [ 6 | "DOM", 7 | "ES2020" 8 | ], 9 | "declaration": true, 10 | "rootDir": "./src", 11 | "outDir": "./dist", 12 | "strict": true, 13 | "moduleResolution": "node", 14 | "esModuleInterop": true, 15 | "downlevelIteration": true 16 | }, 17 | "include": [ 18 | "src/**/*" 19 | ], 20 | "exclude": [ 21 | "src/**/*.spec.ts", 22 | "dist", 23 | "node_modules", 24 | "build", 25 | "coverage", 26 | "docs", 27 | "examples" 28 | ] 29 | } -------------------------------------------------------------------------------- /typedoc.json: -------------------------------------------------------------------------------- 1 | { 2 | "readme": "./README.md", 3 | "name": "@textile/threaddb", 4 | "mode": "file", 5 | "out": "docs", 6 | "exclude": [ 7 | "**/*.spec.ts", 8 | "./src/utils/**", 9 | "./src/remote/grpc.ts", 10 | "**/node_modules/**", 11 | "./src/middleware/**" 12 | ], 13 | "gitRevision": "main", 14 | "excludePrivate": true, 15 | "excludeNotExported": true, 16 | "excludeProtected": true, 17 | "hideGenerator": true, 18 | "target": "ES6", 19 | "moduleResolution": "node", 20 | "preserveConstEnums": true, 21 | "stripInternal": true, 22 | "module": "commonjs" 23 | } 24 | -------------------------------------------------------------------------------- /webpack.test.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | target: "web", 3 | entry: ["@babel/polyfill"], 4 | module: { 5 | rules: [ 6 | { 7 | test: /\.tsx?$/, 8 | use: "ts-loader", 9 | exclude: /node_modules/, 10 | }, 11 | ], 12 | }, 13 | performance: { 14 | hints: false, 15 | }, 16 | resolve: { 17 | modules: ["./node_modules"], 18 | extensions: [".tsx", ".ts", ".js"], 19 | }, 20 | }; 21 | --------------------------------------------------------------------------------