├── .gitignore ├── LICENSE ├── index.ts ├── package-lock.json ├── package.json ├── readme.md ├── sh └── push-new-version.js ├── src ├── config-internal.ts ├── config.ts ├── core.ts ├── durafetch-do.ts ├── log.ts └── util.ts └── test ├── worker-1 ├── .dev.vars ├── package-lock.json ├── package.json ├── readme.md ├── sh │ ├── durafetch-client.sh │ ├── durafetch-config.json │ ├── logs.sh │ ├── publish.sh │ └── start-dev-server.sh ├── src │ ├── durable_object_a.ts │ ├── durafetch-with-config.ts │ └── index.ts ├── tsconfig.json └── wrangler.toml └── worker-2 ├── .dev.vars ├── package-lock.json ├── package.json ├── readme.md ├── sh ├── logs.sh ├── publish.sh └── start-dev-server.sh ├── src ├── durable_object_a.ts ├── durafetch-with-config.ts └── index.ts ├── tsconfig.json └── wrangler.toml /.gitignore: -------------------------------------------------------------------------------- 1 | test/worker-1/sh/del/* -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 - present, durafetch.com owners. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /index.ts: -------------------------------------------------------------------------------- 1 | // @see https://stackoverflow.com/questions/49320632/how-to-import-an-npm-module-that-has-a-typescript-main-file-in-typescript 2 | 3 | import * as config from "./src/config" 4 | import * as core from "./src/core" 5 | import * as durafetch_do from "./src/durafetch-do" 6 | import * as util from "./src/util" 7 | 8 | 9 | export { 10 | config, 11 | core, 12 | durafetch_do, 13 | util 14 | } -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "durafetch-server", 3 | "version": "1.0.0", 4 | "lockfileVersion": 2, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "durafetch-server", 9 | "version": "1.0.0", 10 | "license": "SEE LICENSE IN LICENSE", 11 | "dependencies": { 12 | "lodash": "^4.17.21" 13 | }, 14 | "devDependencies": { 15 | "@cloudflare/workers-types": "^4.20230518.0", 16 | "typescript": "^5.0.4" 17 | } 18 | }, 19 | "node_modules/@cloudflare/workers-types": { 20 | "version": "4.20230518.0", 21 | "resolved": "https://registry.npmjs.org/@cloudflare/workers-types/-/workers-types-4.20230518.0.tgz", 22 | "integrity": "sha512-A0w1V+5SUawGaaPRlhFhSC/SCDT9oQG8TMoWOKFLA4qbqagELqEAFD4KySBIkeVOvCBLT1DZSYBMCxbXddl0kw==", 23 | "dev": true 24 | }, 25 | "node_modules/lodash": { 26 | "version": "4.17.21", 27 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", 28 | "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" 29 | }, 30 | "node_modules/typescript": { 31 | "version": "5.0.4", 32 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.4.tgz", 33 | "integrity": "sha512-cW9T5W9xY37cc+jfEnaUvX91foxtHkza3Nw3wkoF4sSlKn0MONdkdEndig/qPBWXNkmplh3NzayQzCiHM4/hqw==", 34 | "dev": true, 35 | "bin": { 36 | "tsc": "bin/tsc", 37 | "tsserver": "bin/tsserver" 38 | }, 39 | "engines": { 40 | "node": ">=12.20" 41 | } 42 | } 43 | }, 44 | "dependencies": { 45 | "@cloudflare/workers-types": { 46 | "version": "4.20230518.0", 47 | "resolved": "https://registry.npmjs.org/@cloudflare/workers-types/-/workers-types-4.20230518.0.tgz", 48 | "integrity": "sha512-A0w1V+5SUawGaaPRlhFhSC/SCDT9oQG8TMoWOKFLA4qbqagELqEAFD4KySBIkeVOvCBLT1DZSYBMCxbXddl0kw==", 49 | "dev": true 50 | }, 51 | "lodash": { 52 | "version": "4.17.21", 53 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", 54 | "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" 55 | }, 56 | "typescript": { 57 | "version": "5.0.4", 58 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.4.tgz", 59 | "integrity": "sha512-cW9T5W9xY37cc+jfEnaUvX91foxtHkza3Nw3wkoF4sSlKn0MONdkdEndig/qPBWXNkmplh3NzayQzCiHM4/hqw==", 60 | "dev": true 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "durafetch-server", 3 | "version": "0.3.0", 4 | "main": "index.ts", 5 | "scripts": {}, 6 | "description": "Download *all* data stored in Cloudflare Durable Objects to a SQLite database.", 7 | "keywords": [ 8 | "Cloudflare", 9 | "Durable Object", 10 | "Cloudflare Durable Object", 11 | "Cloudflare Workers", 12 | "durafetch" 13 | ], 14 | "homepage": "https://durafetch.com", 15 | "author": "Enzo", 16 | "license": "SEE LICENSE IN LICENSE", 17 | "dependencies": { 18 | "lodash": "^4.17.21" 19 | }, 20 | "devDependencies": { 21 | "@cloudflare/workers-types": "^4.20230518.0", 22 | "typescript": "^5.0.4" 23 | }, 24 | "repository": { 25 | "type": "git", 26 | "url": "https://github.com/emadda/durafetch-server.git" 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | # Durafetch 2 | 3 | Durafetch allows you to download your Cloudflare Durable Object state into a local SQLite database file. 4 | 5 | It consists of two npm packages: 6 | 7 | - 1 . `durafetch-server` 8 | - This repo - JS code that you `import` into your Cloudflare Worker. 9 | - Wraps functions such as `fetch` to keep a list of Durable Object IDs. 10 | - Works on localhost for usage during development. 11 | 12 | - 2 . `durafetch` 13 | - Repo: [durafetch](https://github.com/emadda/durafetch) 14 | 15 | - A CLI client that: 16 | - Downloads the list of Durable Object IDs. 17 | - Determines which objects have new data since the last run. 18 | - Connects to each Durable Object directly via WebSocket and downloads only the changes since last download. 19 | - Writes them to a local SQLite database. 20 | 21 | - Usage: 22 | - `npm install --global durafetch` 23 | - `durafetch --config-file config.json` 24 | 25 | # Why use Durafetch? 26 | 27 | 28 | **As an admin UI** 29 | 30 | Durable Objects do not have an admin UI or any other method of observing their state 31 | other than the provided JS API's. 32 | 33 | This makes development difficult as you cannot see what is stored in your Durable Object. 34 | 35 | Durafetch gives you a SQL interface to see what the state of your system is so you can observe it during development and in production. 36 | 37 | **For queries.** 38 | 39 | Durable Objects are distributed by their nature, but it is often useful to create a central database of the state so you can query it as a single datastore. SQLite gives you a SQL query engine with JSON functions. 40 | 41 | 42 | 43 | **For backup and restoring.** 44 | 45 | There is no method to extract data from Durable Objects - Durafetch lets you do this. 46 | 47 | Presently there is no method for restoring - this may be added later. 48 | 49 | # Steps to add Durafetch to your Cloudflare worker. 50 | 51 | - [test/worker-1](test/worker-1) Minimal example worker you can test locally. 52 | 53 | ## Steps 54 | 55 | - 1 . `npm install durafetch-server` 56 | - 2 . Create a [durafetch-with-config.ts](test/worker-1/src/durafetch-with-config.ts) file. 57 | - This will pass the worker_name to durafetch. 58 | - Import functions from here. 59 | - 3 . Add `DURAFETCH_DO` to [wrangler.toml](test/worker-1/wrangler.toml), along with subdomain routes, `DURAFETCH_AUTH` env. 60 | - 4 . Add [`wrap_worker_env()`](test/worker-1/src/index.ts) to worker fetch, along with external Durafetch API handler. 61 | - 5 . Add [`wrap_durable_object(this)`](test/worker-1/src/durable_object_a.ts) to any Durable Objects you want to download the data from. 62 | - 6 . Add `127.0.0.1 durafetch_{worker_name}.localhost` to `/etc/hosts` so that `ws://durafetch_{worker_name}.localhost:1234` connects to `workerd` locally during development. 63 | 64 | ### Setting up subdomain routing. 65 | 66 | Each worker has its own Durafetch external API (CF service bindings are not used). The Durafetch CLI fetches data from each of them and writes them all to the same SQLite DB. 67 | 68 | The Durafetch external API is reachable from a subdomain: `durafetch_{your_worker_name}.your-domain.com`. CF has automatic HTTPS cert provisioning for first level subdomains - the wildcard subdomain allows you to route any subdomain to your worker. 69 | 70 | Add this to your wrangler.toml: 71 | 72 | ``` 73 | # Note: "wrangler dev" in version 3+ rewrites localhost URLs to match the zone_name when running locally. 74 | # - This breaks subdomain routing as it replaces "http://durafetch_{your_worker_name}.localhost:1234" with "http://your-domain.com". 75 | # - A temp fix is to add these routes using the CF web UI and remove them from your wrangler.toml file. 76 | routes = [ 77 | { pattern = "*.your-domain.com/*", zone_name = "your-domain.com" } 78 | ] 79 | ``` 80 | 81 | Add `your-domain.com` to Cloudflare DNS. 82 | 83 | Add a CNAME record: 84 | 85 | | Type | Name | Content | Proxy Status | 86 | |-------|------|-----------------------------|--------------| 87 | | CNAME | * | can.be.anything.example.com | Proxied | 88 | 89 | Because this is "Proxied", the `Content` target is ignored and CF DNS returns the IP of your worker. 90 | 91 | ## Using the CLI client to download data to a SQLite db 92 | 93 | - 1 . `npm install durafetch` 94 | - 2 . Save JSON config that looks like this (`worker-1` is the name of your worker): 95 | 96 | ``` 97 | { 98 | "db_file": "./del/db.sqlite", 99 | "servers": [ 100 | { 101 | "ws_url": "ws://durafetch_worker-1.localhost:8720", 102 | "auth_token": "secret_http_auth_bearer_token_replace_this_with_more_than_40_chars" 103 | } 104 | ] 105 | } 106 | ``` 107 | 108 | - 3 . Start client: `durafetch --config-file ./config.json` 109 | 110 | # Pricing 111 | 112 | - [License](LICENSE) 113 | - [https://durafetch.com#pricing](https://durafetch.com#pricing) 114 | 115 | # Scalability 116 | 117 | Durafetch has been designed with scalability in mind: 118 | 119 | - You should be able to extract more than 128MB of data (a single worker has 128MB of RAM) as WebSockets are used to stream the key/values instead of storing them in RAM. 120 | - WebSockets connect directly to each Durable Object (they do not go via a proxy Durable Object which would become a bottleneck). 121 | - Only changes are downloaded - DF will not re-read previous key/value data it already has downloaded. 122 | - Minimizes requests, CPU time and costs. 123 | - Minimal data copies. 124 | - The values of `storage.put("key", "value")` are not copied on every change - when the CLI client downloads data it reads the current state directly from the Durable Object. 125 | - Values do not get sent to intermediate storage (like R2 or another Durable Object) on change - this reduces request/storage costs. 126 | - Changes to keys are recorded for each write - this allows the "download only changes" logic to work. 127 | - The assumption is that keys are generally much smaller than values. 128 | - When using Durafetch the number of write requests are doubled - each write triggers a second write that records the key(s) that were written to along with a integer write_id. 129 | - The cost of write requests is currently [$1 per million](https://developers.cloudflare.com/workers/platform/pricing/#durable-objects). 130 | 131 | Please create an issue if you encounter any problems. 132 | 133 | # Security 134 | 135 | - In production only HTTPS/WSS is allowed. 136 | - Requests for the external Durafetch API must use a `Authorization: Bearer` HTTP header with the secret token set as an env var. 137 | 138 | # To do 139 | 140 | - Client 141 | - [ ] Detect writes to the SQLite database, write them back to the remote object. 142 | - [ ] Optionally keep a SQLite write audit history to make deletes visible and allow syncing to other systems. 143 | - [ ] Export/restore CLI. 144 | - [ ] Convert polling to bidrectional WebSocket when receiving the current list of Durable Objects. 145 | 146 | - Server 147 | - [ ] Compress/Garbage collect the "changed keys" history stored in each Durable Object. 148 | - [ ] Regex filter to include/exclude by class/name/key. 149 | 150 | -------------------------------------------------------------------------------- /sh/push-new-version.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | // Script to push a new version. 4 | // - Ensures local git tags, remote Github repo and NPM are all in sync. 5 | // - Copies tag from package.json to git tag, pushes to remote. 6 | // - Pushes version to npm. 7 | 8 | const fs = require("fs"); 9 | const util = require('util'); 10 | const exec = util.promisify(require('child_process').exec); 11 | 12 | const github_repo = `https://github.com/emadda/durafetch-server`; 13 | 14 | 15 | const run = async () => { 16 | let x; 17 | 18 | x = await exec('git status --short package.json'); 19 | if (/^M/.test(x.stdout.trim())) { 20 | console.error("Error: Must commit package.json as `npm publish` will read the version from the remote Github repo."); 21 | return false; 22 | } 23 | 24 | const package_buffer = fs.readFileSync("package.json"); 25 | const p = JSON.parse(package_buffer.toString()); 26 | const cur_version = p.version; 27 | const new_tag = `v${cur_version}`; 28 | 29 | 30 | // Check the tag is new. 31 | x = await exec('git tag'); 32 | const tags = x.stdout.split("\n").filter(x => /^v\d/.test(x)); 33 | 34 | if (tags.includes(new_tag)) { 35 | console.error(`Error: Version ${cur_version} already exists as a git tag.`); 36 | return false; 37 | } 38 | 39 | 40 | // Create git tag, push to remote. 41 | console.log(`Creating local git tag ${new_tag} and pushing to remote origin.`); 42 | x = await exec(`git tag ${new_tag}`); 43 | x = await exec(`git push -u origin master`); 44 | x = await exec(`git push origin ${new_tag}`); 45 | console.log(x.stdout); 46 | 47 | 48 | // Publish to NPM. 49 | // x = await exec(`npm publish ${github_repo}`); 50 | // console.log(x.stdout); 51 | // console.log("Published to NPM"); 52 | 53 | // Use the remote Github repo to ensure the code is committed and pushed. 54 | // Run this manually to enter the OTP: 55 | console.log(`Now run "npm publish ${github_repo}"`); 56 | // npm publish https://github.com/emadda/durafetch-server 57 | 58 | return true; 59 | } 60 | 61 | 62 | run(); -------------------------------------------------------------------------------- /src/config-internal.ts: -------------------------------------------------------------------------------- 1 | const config_int = { 2 | private_key_prefix: `_df`, 3 | private_key: `_durafetch`, 4 | primary_name: `durafetch`, 5 | primary_name_caps: `DURAFETCH`, 6 | env_key_auth: `DURAFETCH_AUTH`, 7 | env_key_env: `DURAFETCH_ENV`, 8 | }; 9 | 10 | export { 11 | config_int 12 | } -------------------------------------------------------------------------------- /src/config.ts: -------------------------------------------------------------------------------- 1 | import _ from "lodash"; 2 | 3 | // @see https://developers.cloudflare.com/workers/platform/limits/#durable-objects-limits 4 | const max_val_size_kb = 128; 5 | const ram_per_do_kb = 128 * 1000; 6 | 7 | const default_config = { 8 | // Use upto 64MB RAM. 9 | number_of_key_values_in_each_do_download_ws_message: (ram_per_do_kb / 2) / max_val_size_kb, 10 | worker_name: null, 11 | log_level: "error", 12 | 13 | // When writes occur in a DO they send an HTTP request to DURAFETCH_DO to notify it of a new write_id. 14 | // - Rate limit those notifications to one notification per x ms. 15 | max_write_notifications_per_do_in_ms: 100 16 | } 17 | 18 | const config = { 19 | ...default_config 20 | }; 21 | 22 | // Allow setting `worker_name` from the importing parent package. 23 | // - `worker_name` may come from many places (package.json name, CF env vars etc). 24 | // - There does not seem to be a documented CF interface to read this from the worker. 25 | // - The importer should create a `durafetch-configured.ts` file with `set_config(x); export * from "durafetch-server"`. 26 | // - Imported files are only run once even if included multiple times. 27 | const set_config = (x) => { 28 | _.merge(config, x); 29 | } 30 | 31 | 32 | export { 33 | set_config, 34 | config 35 | } -------------------------------------------------------------------------------- /src/core.ts: -------------------------------------------------------------------------------- 1 | import * as _ from "lodash"; 2 | import { 3 | add_ws_helper_fns, 4 | get_durafetch_do, 5 | get_internal_req_hostname, 6 | get_worker_name, 7 | is_auth_valid, 8 | starts_with_ext_subdomain, json_res, res_not_authorised 9 | } from "./util"; 10 | import {config} from "./config"; 11 | import {config_int} from "./config-internal"; 12 | import {log} from "./log"; 13 | 14 | 15 | const is_do_binding = (obj) => { 16 | log.log({obj}); 17 | 18 | // Check for interface: DurableObjectNamespace 19 | return ( 20 | _.isObject(obj) && 21 | "idFromName" in obj && 22 | typeof obj.idFromName === 'function' && 23 | "idFromString" in obj && 24 | typeof obj.idFromString === 'function' 25 | ); 26 | } 27 | 28 | 29 | const get_do_bindings = (env) => { 30 | const o = []; 31 | for (const [k, v] of Object.entries(env)) { 32 | if (is_do_binding(v)) { 33 | o.push([k, v]); 34 | } 35 | } 36 | 37 | return o; 38 | }; 39 | 40 | const wrap_one_durable_object_stub = (stub, opts) => { 41 | const {meta} = opts; 42 | 43 | const orig_fn_fetch = stub.fetch; 44 | 45 | 46 | let has_set_name_p = null; 47 | stub.fetch = async (...args) => { 48 | 49 | if (has_set_name_p === null) { 50 | has_set_name_p = new Promise(async (resolve, reject) => { 51 | 52 | const req = new Request( 53 | // Assumption: The domain will stop any external requests from reaching the control machinery of the durable object (as it is not possible to route `https://durafetch.example.com/set-meta` to a worker). 54 | // - A common pattern for Workers is to forward external requests via `fetch(external_request)`. This could potentially create an attack vector by allowing external actors to set the name. 55 | `https://${get_internal_req_hostname()}/set-meta`, 56 | { 57 | method: 'POST', 58 | headers: {'content-type': 'application/json'}, 59 | body: JSON.stringify({ 60 | id: meta.id, 61 | name: meta.name 62 | }) 63 | } 64 | ); 65 | 66 | log.log("Making request to set-meta from stub.fetch"); 67 | const res = await orig_fn_fetch.apply(stub, [req]); 68 | try { 69 | const x = await res.json(); 70 | if (x.ok) { 71 | resolve(true); 72 | return; 73 | } 74 | } catch (e) { 75 | log.error(e); 76 | } 77 | 78 | // Dev should fix this during development. 79 | const msg = `Could not set the name of the Durable Object with name ${meta.name} and id ${meta.id}. You need to wrap the durable object to allow it to accept setting the name via fetch.`; 80 | log.error(msg); 81 | throw Error(msg); 82 | // reject(false); 83 | 84 | // Note: wordkerd will not log the Error thrown as it terminates itself first with `Error: The script will never generate a response.`. 85 | // - Fix: `log.error` works. 86 | }); 87 | } 88 | 89 | // First `fetch` call sets the name, the next calls will queue until the name has been set. 90 | await has_set_name_p; 91 | 92 | 93 | const res = orig_fn_fetch.apply(stub, args); 94 | return res; 95 | }; 96 | 97 | return stub; 98 | } 99 | 100 | 101 | const wrap_one_do_binding = (binding) => { 102 | 103 | // Assumption: Durable objects are always referenced by name. 104 | // Note: This index has the same lifetime as the JS worker `env` object. Could get very large in the case of accessing millions of durable objects. 105 | const id_to_meta = {}; 106 | const orig_fn_idFromName = binding.idFromName; 107 | binding.idFromName = (...args) => { 108 | const [name] = args; 109 | 110 | const id_obj = orig_fn_idFromName.apply(binding, args); 111 | const id = id_obj.toString(); 112 | 113 | id_to_meta[id] = { 114 | id, 115 | name 116 | }; 117 | 118 | return id_obj; 119 | }; 120 | 121 | 122 | const orig_fn_get = binding.get; 123 | binding.get = (...args) => { 124 | const [id] = args; 125 | 126 | let meta = id_to_meta[id]; 127 | if (!_.isObject(meta)) { 128 | // When: DO referenced without calling `idFromName` first. 129 | // E.g. When calling it from DURAFETCH_DO as the client references by `id`. 130 | meta = { 131 | id: id.toString(), 132 | name: null 133 | } 134 | } 135 | 136 | 137 | const stub = orig_fn_get.apply(binding, args); 138 | return wrap_one_durable_object_stub(stub, {meta}); 139 | }; 140 | 141 | return binding; 142 | } 143 | 144 | 145 | const wrap_worker_env = (env, opts = {}) => { 146 | if (config_int.private_key in env) { 147 | // Already wrapped. 148 | log.log("Ignoring wrap_env as env already wrapped."); 149 | return; 150 | } 151 | 152 | const { 153 | exclude = [] 154 | } = opts; 155 | exclude.push("DURAFETCH_DO"); 156 | 157 | log.log("wrap_env"); 158 | 159 | const do_bindings = get_do_bindings(env); 160 | 161 | log.log("do_bindings.keys", do_bindings.map(x => x[0])); 162 | 163 | for (const [k, v] of do_bindings) { 164 | if (exclude.includes(k)) { 165 | continue; 166 | } 167 | 168 | const proxied_binding = wrap_one_do_binding(v); 169 | log.log({proxied_binding}); 170 | env[k] = proxied_binding; 171 | } 172 | 173 | 174 | env[config_int.private_key] = { 175 | env_wrapped: true 176 | }; 177 | 178 | log.log({env}); 179 | 180 | } 181 | 182 | 183 | const key_prefix = config_int.private_key_prefix; 184 | const key_next_write_id = `${key_prefix}.next_write_id`; 185 | const get_key_for_write_id = (write_id) => `${key_prefix}.write_id.${write_id}` 186 | const keys_contain_meta = (keys) => keys.some((x) => key_is_meta(x)); 187 | const key_is_meta = (k) => k.startsWith(key_prefix); 188 | const get_meta_key = () => `${key_prefix}.meta` 189 | 190 | // Usage: 191 | // - Set `this.storage` from the constructor arg[0]. 192 | // - Call in durable object class constructor, pass `this`. 193 | const wrap_durable_object = (ins, opts = {}) => { 194 | const ok = ( 195 | ("class_name" in opts) && 196 | _.isString(opts.class_name) && 197 | opts.class_name.length > 0 198 | ); 199 | 200 | if (!ok) { 201 | // Note: `ins.constructor.name` is empty after when using wrangler v3+. 202 | throw Error("Must set `class_name` via wrap_durable_object arg"); 203 | } 204 | 205 | 206 | const orig_fn_fetch = ins.fetch; 207 | const orig_fetch = (...args) => { 208 | return orig_fn_fetch.apply(ins, args); 209 | } 210 | 211 | // On every operation that writes: Keep a write_id and record the key that was modified 212 | // 213 | // - Each write log has a `log_started` - this is like a `log_id` - a specific branch of writes. 214 | // - Needed in case of a `deleteAll` then writing to the same keys as before. 215 | // - `deleteAll` should delete the write log - it will be recreated on the next DO start. 216 | // 217 | // - @todo/low `transaction` not supported. The newer CF runtime implicitly locking the event loop on reads/writes. 218 | // 219 | // - Each write to a key gets its own `write_id` - many writes can be part of the same implicit write transaction. 220 | // - Transaction ID's are not recorded as they are not observable. 221 | // - It is not possible to detect which write is the last one to detect the transaction start/end boundaries as the runtime does this when going from sync to an await point. 222 | // - The current write id must be stored in the DO RAM (instead of `await storage.get()`ing it). 223 | // - An async read will break the atomic unit of writes - so that writes before or after it are atomic in case of DO process failure. 224 | const storage = ins.state.storage; 225 | 226 | 227 | // Throttle notifications in case of rapid writes. 228 | // Is called with last args, invoked on leading and trailing edge by default. 229 | // @see https://developers.cloudflare.com/workers/runtime-apis/durable-objects/#durable-object-lifespan 230 | // - "Durable Objects will wait until all async IO is complete including promises". 231 | // Note: `_.throttle` must be wrapped per durable object instance so the rate limit state is per durable object instance (not shared globally for all durable object instances). 232 | // @todo/low retry on fail 233 | const send_event_internal_do_new_cur_write_id_throttled = _.throttle(send_event_internal_do_new_cur_write_id, config.max_write_notifications_per_do_in_ms); 234 | 235 | const wrap_write_fn = (fn_str) => { 236 | const orig_fn = storage[fn_str]; 237 | 238 | // @todo/high Determine the effect of mixing writes without await with await. 239 | // - Assumption: The order is preserved, but the await just pauses the execution until a write ACK is returned from the disk to the runtime. 240 | storage[fn_str] = (...args) => { 241 | let keys = []; 242 | if (_.isString(args[0]) && ["put", "delete"].includes(fn_str)) { 243 | keys = [args[0]]; 244 | } 245 | 246 | if (_.isObject(args[0]) && fn_str === "put") { 247 | keys = _.keys(args[0]); 248 | } 249 | 250 | // Multi delete. 251 | if (_.isArray(args[0]) && fn_str === "delete") { 252 | keys = [...args[0]]; 253 | } 254 | 255 | if (keys.length === 0 || !init_ran) { 256 | throw Error("Could not read keys from write fn args, or next_write_id not initialised."); 257 | } 258 | 259 | 260 | if (keys_contain_meta(keys)) { 261 | // This is a _df write. Do not log. 262 | return orig_fn.apply(storage, args); 263 | } 264 | 265 | 266 | const write_id = ins[key_prefix].next_write_id.write_id; 267 | 268 | // @todo/high Assumption: Order of writes is preserved; these writes atomically apply together in-order regardless of if the fn caller awaits the returned `write` 269 | // @see https://blog.cloudflare.com/durable-objects-easy-fast-correct-choose-three/ 270 | // @todo/high Test failure states. 271 | // - Can a write tx fail, but the next one complete ok? 272 | // - Assumption: A write tx fail is rare and kills the process/invalidates all subsequent writes. 273 | // - If not, there could be a gap in write_id's as the local write_id is incremented regardless of transaction success. 274 | // @todo/maybe Collect write_id's of many writes into a single insert, apply in next write tx with setTimeout(x, 1) to reduce number of writes. 275 | const write = orig_fn.apply(storage, args); 276 | storage.put(get_key_for_write_id(write_id), {keys}); 277 | increment_write_id(); 278 | send_event_internal_do_new_cur_write_id_throttled(ins, get_cur_write_id()); 279 | return write; 280 | } 281 | }; 282 | 283 | const wrap_delete_all = () => { 284 | const orig_fn = storage.deleteAll; 285 | 286 | // After a deleteAll event, storage is cleared but the class instance variables still reference the write_id-version from the storage state that was cleared. 287 | // - Reset log_id/write_id. A change in log_id indicates to any reading clients that a new branch has started and to delete all the previous state. 288 | storage.deleteAll = async (...args) => { 289 | const ret = await orig_fn.apply(storage, args); 290 | set_new_write_branch_after_delete_all(); 291 | return ret; 292 | } 293 | }; 294 | 295 | 296 | wrap_write_fn("put"); 297 | wrap_write_fn("delete"); 298 | wrap_delete_all(); 299 | 300 | 301 | // Ignore deleteAll - it will restart the write log. 302 | 303 | 304 | // Read from storage into local DO class instance var. 305 | let init_ran = false; 306 | 307 | const init_local_once = async (full_meta) => { 308 | if (init_ran) { 309 | return; 310 | } 311 | init_ran = true; 312 | 313 | // Restore from previous run, or create if it is the very-first instance. 314 | let next_write_id = await storage.get(key_next_write_id); 315 | if (next_write_id === undefined) { 316 | next_write_id = get_new_write_branch(); 317 | } 318 | 319 | if (!(key_prefix in ins)) { 320 | ins[key_prefix] = { 321 | next_write_id, 322 | meta: full_meta 323 | } 324 | } 325 | } 326 | 327 | const set_new_write_branch_after_delete_all = () => { 328 | ins[key_prefix].next_write_id = get_new_write_branch(); 329 | } 330 | 331 | const get_new_write_branch = () => { 332 | return { 333 | // Only unique per DO (to distinguish storage.deleteAll restarts). 334 | log_id: new Date().toISOString(), 335 | // Max JS number = 9007199254740991, durable objects are $5/M writes, meaning it would cost $45B to exhaust the ID space. That's 18 Jay-Z's. 336 | write_id: 1 337 | } 338 | } 339 | 340 | const increment_write_id = () => { 341 | ins[key_prefix].next_write_id.write_id += 1; 342 | return storage.put(key_next_write_id, ins[key_prefix].next_write_id); 343 | }; 344 | const get_cur_write_id = () => { 345 | const x = _.cloneDeep(ins[key_prefix].next_write_id); 346 | x.write_id -= 1; 347 | return x; 348 | } 349 | const get_cur_write_id_from_storage = async () => { 350 | const v = await storage.get(key_next_write_id); 351 | if (v === undefined) { 352 | return null; 353 | } 354 | v.write_id -= 1; 355 | return v; 356 | } 357 | 358 | 359 | ins.fetch = async (req) => { 360 | const url = (new URL(req.url)); 361 | const hostname = url.hostname; 362 | 363 | // @todo/med Explicitly define subdomains for internal/external requests. 364 | if (hostname === get_internal_req_hostname()) { 365 | if (url.pathname === "/set-meta") { 366 | const meta = await req.json(); 367 | const full_meta = get_full_meta(meta); 368 | 369 | await init_local_once(full_meta); 370 | 371 | const k = get_meta_key(); 372 | const val = await ins.state.storage.get(k); 373 | 374 | if (val === undefined) { 375 | // When: Very first instance of this DO name. 376 | log.log("Setting meta", meta); 377 | await ins.state.storage.put(k, meta); 378 | } 379 | 380 | // Notify DURAFETCH_DO that this DO name has started (so that it can update its index). 381 | await send_event_internal_do_started(ins.env, { 382 | obj: get_started_event(full_meta), 383 | cur_write_id: await get_cur_write_id_from_storage() 384 | }); 385 | 386 | 387 | return json_res({ok: true}); 388 | } 389 | throw Error(`Incorrect path for wrapped durable object ${url}.`); 390 | } 391 | 392 | 393 | // Forwarded from DURAFETCH_DO. 394 | // DURAFETCH_DO has already checked: Auth, Request format, Websocket upgrade header. 395 | if (starts_with_ext_subdomain(ins.env, req)) { 396 | 397 | // Re-check auth in the case of the end user forwarding internet requests to their DO with wildcard domains enabled. 398 | if (!is_auth_valid(ins.env, req)) { 399 | return res_not_authorised(); 400 | } 401 | 402 | 403 | if (url.pathname === "/external/do/read_all_from") { 404 | return return_all_data_via_ws_messages(ins, req); 405 | } 406 | 407 | // @todo/low Add prefix to all error messages from DURAFETCH to allow easier debugging. 408 | return new Response(`${config_int.primary_name_caps}: Unknown action.`, {status: 400}); 409 | } 410 | 411 | 412 | return orig_fetch(req); 413 | }; 414 | 415 | const get_full_meta = (meta) => { 416 | return { 417 | worker_name: get_worker_name(), 418 | class_name: opts.class_name, 419 | name: meta.name, 420 | 421 | id: meta.id, 422 | } 423 | } 424 | 425 | const get_started_event = (meta) => { 426 | return { 427 | ...get_full_meta(meta), 428 | last_started_at: new Date() 429 | } 430 | } 431 | 432 | }; 433 | 434 | 435 | const send_event_internal_do_new_cur_write_id = async (ins, cur_write_id) => { 436 | const o = await get_durafetch_do(ins.env); 437 | 438 | const req = new Request( 439 | `https://${get_internal_req_hostname()}/internal/do/new_cur_write_id`, 440 | { 441 | method: 'POST', 442 | headers: {'content-type': 'application/json'}, 443 | body: JSON.stringify({ 444 | meta: ins[key_prefix].meta, 445 | cur_write_id 446 | }) 447 | } 448 | ); 449 | return o.fetch(req); 450 | }; 451 | 452 | const send_event_internal_do_started = async (env, x) => { 453 | const o = await get_durafetch_do(env); 454 | 455 | const req = new Request( 456 | // Assumption: The domain will stop any external requests from reaching the control machinery of the durable object (as it is not possible to route `https://durafetch.example.com/set-meta` to a worker). 457 | // - A common pattern for Workers is to forward external requests via `fetch(external_request)`. This could potentially create an attack vector by allowing external actors to set the name. 458 | `https://${get_internal_req_hostname()}/internal/do/started`, 459 | { 460 | method: 'POST', 461 | headers: {'content-type': 'application/json'}, 462 | body: JSON.stringify(x) 463 | } 464 | ); 465 | return o.fetch(req); 466 | } 467 | 468 | 469 | const return_all_data_via_ws_messages = (durable_object_ins, req) => { 470 | const url = new URL(req.url); 471 | const { 472 | // Read from and including this write_id. 473 | from_log_id = null, 474 | from_write_id = null, 475 | 476 | // @todo/med Array of regex filters to match (worker_name, class_name, name, key). Send to DO to filter on read. Also add exclude list? 477 | filters = null 478 | } = _.fromPairs([...url.searchParams]); 479 | 480 | 481 | log.log("NEW WEBSOCKET PAIR: return_all_data_via_ws_messages"); 482 | const webSocketPair = new WebSocketPair(); 483 | const [client, server] = Object.values(webSocketPair); 484 | add_ws_helper_fns(server); 485 | 486 | // Assumption: this tells the runtime this ws socket will be terminated in JS (used as a server). 487 | // - This must keep a reference to `server` var and keep the web socket running, as it is not garbage collected and closed at the end of this function. 488 | server.accept(); 489 | 490 | server.addEventListener('message', (e) => { 491 | // const not_string = (typeof e.data !== "string"); 492 | // server.send("ACK string received from wsdo"); 493 | log.log("return_all_data_via_ws_messages: ws message received: ", e.data); 494 | }); 495 | 496 | server.addEventListener('error', (event) => { 497 | log.log("ws.error", event); 498 | }); 499 | server.addEventListener('close', (event) => { 500 | log.log(`ws.close ${new Date()}`, event); 501 | }); 502 | 503 | (async () => { 504 | // No writes have occurred - no data to download. 505 | // @todo/low When adding to an existing DO there could be data that was written before the write fn's were wrapped. 506 | const cur_write_id = await durable_object_ins.state.storage.get(key_next_write_id); 507 | if (cur_write_id === undefined) { 508 | await read_no_changes(server, "durable_object_has_not_written_any_data"); 509 | return; 510 | } 511 | 512 | if (_.isString(from_log_id) && _.isString(from_write_id)) { 513 | 514 | // The remote durable object has changed log_id since the client requested (E.g. due to a storage.deleteAll). 515 | // - Send no changes, and expect the next log_id will be the correct one. 516 | if (cur_write_id.log_id !== from_log_id) { 517 | await read_no_changes(server, "requested_log_id_differs_from_remote"); 518 | return; 519 | } 520 | 521 | const changed_keys = await get_all_keys_changed_from_write_id(durable_object_ins, from_log_id, from_write_id); 522 | if (_.isArray(changed_keys)) { 523 | if (changed_keys.length === 0) { 524 | await read_no_changes(server, "already_up_to_date"); 525 | return; 526 | } else { 527 | await read_changes_only(durable_object_ins, server, changed_keys); 528 | return; 529 | } 530 | } 531 | } 532 | 533 | // When: First read. Or reading after a `storage.deleteAll` 534 | await read_all_keys(durable_object_ins, server); 535 | })(); 536 | 537 | 538 | return new Response(null, { 539 | status: 101, 540 | webSocket: client, 541 | }); 542 | } 543 | 544 | 545 | const read_changes_only = async (durable_object_ins, server, changed_keys) => { 546 | const cur_write_id = await durable_object_ins.state.storage.get(key_next_write_id); 547 | cur_write_id.write_id -= 1; 548 | 549 | server.send_json({ 550 | kind: "start", 551 | read_type: "changes_only", 552 | // Data includes writes upto and including this write id. 553 | cur_write_id 554 | }); 555 | 556 | // @todo/low Allow filtering these keys with a regex. 557 | // @todo/high Filter values to only those that can be passed to `JSON.stringify`. `put` can store many native JS types. 558 | const x = _.chunk(changed_keys, 128); 559 | const keys_exist = []; 560 | 561 | for (const keys of x) { 562 | // Note: `get([...keys])` - any keys that do not exist are omitted. 563 | const all = await durable_object_ins.state.storage.get(keys); 564 | const keys_and_values = {}; 565 | 566 | for (const [k, v] of all) { 567 | keys_and_values[k] = v; 568 | keys_exist.push(k); 569 | } 570 | 571 | server.send_json({ 572 | kind: "keys_and_values", 573 | keys_and_values 574 | }); 575 | } 576 | 577 | // @todo/next When the deleted keys are the last write, and are deleted from the local, it creates a loop where the local thinks it has to fetch the deletes and then deletes the write_id of those deletes. 578 | const deletes = _.difference(changed_keys, keys_exist); 579 | if (deletes.length > 0) { 580 | server.send_json({ 581 | kind: "deleted_keys", 582 | deleted_keys: deletes 583 | }); 584 | } 585 | 586 | server.send_json({ 587 | kind: "end" 588 | }); 589 | 590 | // Wait for client to receive `end`. 591 | setTimeout(() => { 592 | server.close(); 593 | }, 60_000); 594 | } 595 | 596 | const read_no_changes = async (server, reason = undefined) => { 597 | server.send_json({ 598 | kind: "start", 599 | read_type: "no_changes", 600 | reason 601 | }); 602 | server.send_json({ 603 | kind: "end" 604 | }); 605 | setTimeout(() => { 606 | server.close(); 607 | }, 60_000); 608 | } 609 | 610 | const log_all_storage_values = async (ins) => { 611 | const all = await ins.state.storage.list(); 612 | 613 | for (const [k, v] of all.entries()) { 614 | log.log(JSON.stringify({k, v}, null, 4)); 615 | } 616 | } 617 | 618 | const read_all_keys = async (durable_object_ins, server) => { 619 | let opts = { 620 | limit: config.number_of_key_values_in_each_do_download_ws_message 621 | } 622 | 623 | 624 | const cur_write_id = await durable_object_ins.state.storage.get(key_next_write_id); 625 | if (cur_write_id === undefined) { 626 | throw Error("Only call read_all_keys when cur_write_id is set."); 627 | } 628 | 629 | // await log_all_storage_values(durable_object_ins); 630 | cur_write_id.write_id -= 1; 631 | 632 | 633 | server.send_json({ 634 | kind: "start", 635 | read_type: "from_start", 636 | // Data includes writes upto and including this write id. 637 | // Note: Client can get this event even when they have a previous download - `deleteAll` could start a new branch. 638 | cur_write_id 639 | }); 640 | 641 | while (true) { 642 | const all = await durable_object_ins.state.storage.list(opts); 643 | const keys_and_values = {}; 644 | let last_k = null; 645 | 646 | 647 | for (const [k, v] of all) { 648 | // @todo/low Allow filtering these keys with a regex. 649 | 650 | // Exclude `_df` keys. 651 | if (key_is_meta(k)) { 652 | continue; 653 | } 654 | 655 | keys_and_values[k] = v; 656 | last_k = k; 657 | } 658 | 659 | if (last_k === null) { 660 | break; 661 | } 662 | 663 | // Assumption: All of these reads happen inside one read tx, and block writes/other events. 664 | // - Await points close tx's implicitly, reads block incoming events from interrupting this loop. 665 | // - @see https://blog.cloudflare.com/durable-objects-easy-fast-correct-choose-three/ 666 | server.send_json({ 667 | kind: "keys_and_values", 668 | keys_and_values 669 | }); 670 | 671 | opts.startAfter = last_k; 672 | } 673 | 674 | server.send_json({ 675 | kind: "end" 676 | }); 677 | 678 | // Wait for client to receive `end`. 679 | setTimeout(() => { 680 | server.close(); 681 | }, 60_000); 682 | } 683 | 684 | 685 | const get_all_keys_changed_from_write_id = async (durable_object_ins, from_log_id, from_write_id) => { 686 | const cur_write_id = await durable_object_ins.state.storage.get(key_next_write_id); 687 | cur_write_id.write_id -= 1; 688 | 689 | if (from_log_id !== cur_write_id.log_id) { 690 | // Incremental update not possible (likely because deleteAll has restarted the write log). 691 | return null; 692 | } 693 | 694 | const client_cur_write_id = from_write_id - 1; 695 | 696 | if (client_cur_write_id === cur_write_id.write_id) { 697 | // No changes. 698 | return []; 699 | } 700 | 701 | if (client_cur_write_id > cur_write_id.write_id) { 702 | // Client cannot be ahead of master. 703 | throw Error(`Requesting to read from a write id (${from_write_id}) that is passed the current write id (${cur_write_id.write_id})`); 704 | } 705 | 706 | const write_ids_to_read = _.range(from_write_id, cur_write_id.write_id + 1); 707 | const keys = [] 708 | for (const x of write_ids_to_read) { 709 | // Ignore client asking from ID's from 0. 710 | if (x === 0) { 711 | continue; 712 | } 713 | 714 | const v = await durable_object_ins.state.storage.get(get_key_for_write_id(x)); 715 | if (v === undefined) { 716 | throw Error(`Missing write_id in write log for Durable Object. log_id=${from_log_id} write_id=${x}. This could be caused by a failed tx followed by a successful tx.`); 717 | } 718 | 719 | if (keys_contain_meta(v.keys)) { 720 | throw Error(`Meta data included in changed key listing - it should not be. key_prefix=${key_prefix}.`); 721 | } 722 | 723 | keys.push(...v.keys); 724 | } 725 | return _.uniq(keys); 726 | } 727 | 728 | 729 | export { 730 | wrap_worker_env, 731 | wrap_durable_object 732 | } -------------------------------------------------------------------------------- /src/durafetch-do.ts: -------------------------------------------------------------------------------- 1 | import * as _ from "lodash"; 2 | import { 3 | get_worker_name, 4 | is_auth_valid, 5 | starts_with_ext_subdomain, 6 | res_not_authorised, 7 | get_internal_req_hostname 8 | } from "./util"; 9 | import {log} from "./log"; 10 | 11 | const is_websocket_upgrade = (request) => { 12 | return request.headers.get('Upgrade') === "websocket"; 13 | } 14 | 15 | 16 | const json_res = (obj, opts = {}) => { 17 | const o = _.merge( 18 | { 19 | headers: { 20 | 'content-type': 'application/json', 21 | // ...cors_headers 22 | } 23 | }, 24 | opts 25 | ); 26 | 27 | const r = new Response(JSON.stringify(obj), o); 28 | return r; 29 | }; 30 | 31 | 32 | const to_do_key = (durable_object_id) => { 33 | return `do-key.${durable_object_id}`; 34 | } 35 | 36 | const get_do_stub = (env, do_class_name, name) => { 37 | const o = env[do_class_name]; 38 | const id_do = o.idFromName(name); 39 | return o.get(id_do); 40 | } 41 | 42 | const get_do_stub_from_id = (env, do_class_name, id_do) => { 43 | const o = env[do_class_name]; 44 | const id = o.idFromString(id_do); 45 | return o.get(id); 46 | } 47 | 48 | class DURAFETCH_DO { 49 | constructor(state, env) { 50 | // ADMIN_DO 51 | // this.constructor.name 52 | this.state = state; 53 | this.env = env; 54 | 55 | 56 | } 57 | 58 | // Server side web socket connection. 59 | ws_get_and_watch_durable_object_index = []; 60 | state: DurableObjectState 61 | 62 | async fetch(req: Request) { 63 | const url = new URL(req.url); 64 | 65 | // External requests (from the internet). 66 | if (starts_with_ext_subdomain(this.env, req)) { 67 | if (!is_auth_valid(this.env, req)) { 68 | return res_not_authorised(); 69 | } 70 | 71 | 72 | // Worker forwards upgrade from public request, durable object keeps server side websocket in state. 73 | // @todo/low In prod, force wss:// (TLS). 74 | if (is_websocket_upgrade(req)) { 75 | 76 | // Get list of durable object names, watch for changes. 77 | if (url.pathname === "/external/do/get_and_watch_index") { 78 | return this.start_websocket_get_and_watch_durable_object_index(); 79 | } 80 | 81 | 82 | // Read all key and values from a specific durable object, stream chunks over ws, close after completion. 83 | if (url.pathname === "/external/do/read_all_from") { 84 | return this.process_external_do_read_all_from(req); 85 | } 86 | } 87 | 88 | 89 | if (req.method === "POST") { 90 | // Delete the list of durable objects (but not the objects themselves). 91 | // - Used for testing. 92 | if (url.pathname === "/external/do/delete_all") { 93 | await this.state.storage.deleteAll(); 94 | const all = [...(await this.state.storage.list())]; 95 | console.log("Deleted all keys in DURAFETCH_DO", JSON.stringify(all)); 96 | return json_res({ok: true}); 97 | } 98 | } 99 | 100 | 101 | return new Response(`Unknown action.`, {status: 400}); 102 | } 103 | 104 | 105 | // Internal requests (direct from another worker using DO stub). 106 | if (url.hostname === get_internal_req_hostname()) { 107 | if (req.method === "POST") { 108 | if (url.pathname === "/internal/do/started") { 109 | return this.process_internal_do_started(req); 110 | } 111 | 112 | if (url.pathname === "/internal/do/new_cur_write_id") { 113 | return this.process_internal_do_new_cur_write_id(req); 114 | } 115 | } 116 | } 117 | 118 | 119 | return new Response(`Unknown action.`, {status: 400}); 120 | } 121 | 122 | async process_external_do_read_all_from(req) { 123 | const url = new URL(req.url); 124 | 125 | const { 126 | worker_name = null, 127 | class_name = null, 128 | name = null, 129 | id = null, 130 | 131 | from_write_id = null, 132 | from_write_log_id = null, 133 | 134 | // @todo/med Array of regex filters to match (worker_name, class_name, name, key). Send to DO to filter on read. 135 | filters = null 136 | } = _.fromPairs([...url.searchParams]); 137 | 138 | const is_valid = ( 139 | _.isString(worker_name) && 140 | _.isString(class_name) && 141 | _.isString(id) && 142 | worker_name === get_worker_name() && 143 | (class_name in this.env) 144 | ); 145 | if (!is_valid) { 146 | // Each worker has its own DURAFETCH_DO instance - this prevents using service bindings to do cross worker requests. 147 | // - Each DURAFETCH_DO reads the durable objects within it's parent worker namespace. Each has its own subdomain. The client will download from each subdomain into a single DB. 148 | return json_res({ 149 | ok: false, 150 | msg: "Provide query params with worker_name, class_name and name (of the durable object). Worker name must match the one that handles the HTTP request." 151 | }, {status: 400}); 152 | } 153 | 154 | 155 | // The client connects to the DO directly with a websocket connection. 156 | // - It will either: 157 | // - 1. Read all (read all state from the DO). 158 | // - 2. Read all since X (read all since a write_id - client already has a previous download up to that write_id). 159 | // 160 | // A websocket connection is used because: 161 | // - `ws.send()` is sync 162 | // - Allows moving data from the JS process (in the case that the DO has too much data to store in the 128MB RAM limit). 163 | // - Does not break the "read tx" that is implicit to DO's (await points implicitly break read tx's). 164 | // 165 | // The websocket connection ends after the download: 166 | // - 1. Avoid keeping it alive to reduce costs. 167 | // - 2. When an end user connects, it will be re-started geographically close to them. 168 | // - For watching for changes, DO's will HTTP POST to this process which will then route down a ws, which prevents points 1 and 2. 169 | const stub = await get_do_stub_from_id(this.env, class_name, id); 170 | return stub.fetch(req); 171 | } 172 | 173 | 174 | async process_internal_do_started(req) { 175 | const {obj, cur_write_id} = await req.json(); 176 | 177 | const is_valid = ( 178 | _.isString(obj.worker_name) && 179 | _.isString(obj.class_name) && 180 | _.isString(obj.id) 181 | ); 182 | 183 | if (!is_valid) { 184 | return new Response(`Invalid request. Include worker_name, class_name and id.`, {status: 400}); 185 | } 186 | 187 | const do_key = to_do_key(obj.id); 188 | 189 | // @todo/low Communicate deletes: Objects can be deleted via wrangler.toml config. Can this be detected and removed from this index? 190 | // @todo/high Do not overwrite a string name with null (in the case the same durable object is referenced by name, and then later by id). 191 | const val = {obj, cur_write_id}; 192 | this.state.storage.put(do_key, val); 193 | 194 | // @todo/low Remove this as full lists are being downloaded instead and diffed by the client? 195 | // Notify watchers. 196 | // - When a new durable object is created they: 197 | // - 1. Connect and retrieve the initial state. 198 | // - 2. Watch for any new writes via a WS to DURAFETCH_DO (which receives POST requests from each DO). 199 | for (const server of this.ws_get_and_watch_durable_object_index) { 200 | server.send({ 201 | kind: "partial_index", 202 | durable_object_list: [val] 203 | }) 204 | } 205 | 206 | return json_res({ok: true}); 207 | } 208 | 209 | async process_internal_do_new_cur_write_id(req) { 210 | const body = await req.json(); 211 | const id = body.meta.id; 212 | const key = to_do_key(id); 213 | const v = await this.state.storage.get(key); 214 | if (v === undefined) { 215 | const msg = `Key ${key} does not exist in DURAFETCH_DO, but it should at this point as it is being updated with the current write_id (after the durable object has been started which should create this key) ${JSON.stringify(body)}.` 216 | log.error(msg); 217 | throw Error(msg); 218 | } 219 | 220 | v.cur_write_id = body.cur_write_id; 221 | this.state.storage.put(key, v); 222 | // @todo/high Send partial_index of durable object listing to all ws listening on `this.ws_get_and_watch_durable_object_index`. 223 | 224 | return json_res({ok: true}); 225 | } 226 | 227 | 228 | // Return the full list of durable objects ID's on first connect. 229 | // After that, send any new durable objects ID's that get created. 230 | start_websocket_get_and_watch_durable_object_index() { 231 | log.log("NEW WEBSOCKET PAIR: start_websocket_get_and_watch_durable_object_index"); 232 | const webSocketPair = new WebSocketPair(); 233 | const [client, server] = Object.values(webSocketPair); 234 | 235 | const remove_ws = () => { 236 | _.remove(this.ws_get_and_watch_durable_object_index, (x) => x === server); 237 | } 238 | 239 | // Assumption: this tells the runtime this ws socket will be terminated in JS (used as a server). 240 | // - This must keep a reference to `server` var and keep the web socket running, as it is not garbage collected and closed at the end of this function. 241 | server.accept(); 242 | 243 | server.addEventListener('message', (e) => { 244 | // const not_string = (typeof e.data !== "string"); 245 | // server.send("ACK string received from wsdo"); 246 | }); 247 | 248 | server.addEventListener('error', (event) => { 249 | log.log("ws.error", event); 250 | remove_ws(); 251 | }); 252 | server.addEventListener('close', (event) => { 253 | log.log(`ws.close ${new Date()}`, event); 254 | remove_ws(); 255 | }); 256 | 257 | 258 | (async () => { 259 | const first_msg = await this.get_current_durable_object_index(); 260 | try { 261 | server.send(JSON.stringify(first_msg)); 262 | // Only send the partial index after the first full index msg. 263 | this.ws_get_and_watch_durable_object_index.push(server); 264 | } catch (e) { 265 | // When: ws closed by client. 266 | } 267 | })(); 268 | 269 | 270 | return new Response(null, { 271 | status: 101, 272 | webSocket: client, 273 | }); 274 | } 275 | 276 | async get_current_durable_object_index() { 277 | const all = await this.state.storage.list({prefix: `do-key`}); 278 | const o = []; 279 | for (const [k, v] of all) { 280 | o.push(v); 281 | } 282 | 283 | return { 284 | kind: "full_index", 285 | durable_object_list: o 286 | } 287 | 288 | } 289 | } 290 | 291 | 292 | interface Env { 293 | } 294 | 295 | export { 296 | DURAFETCH_DO 297 | } -------------------------------------------------------------------------------- /src/log.ts: -------------------------------------------------------------------------------- 1 | import {config} from "./config"; 2 | 3 | // Disable logging when added to end users project. 4 | // - Keep errors to enable debugging any issues. 5 | 6 | const levels = [ 7 | "error", "warn", "log", "info", "debug" 8 | ]; 9 | 10 | const is_enabled = (level) => { 11 | // Note: `config.log_level` may change at runtime. 12 | const all_enabled = levels.slice(0, levels.indexOf(config.log_level) + 1); 13 | return all_enabled.includes(level); 14 | } 15 | 16 | const log = { 17 | error: (...args) => { 18 | if (is_enabled("error")) { 19 | console.error(...args); 20 | } 21 | }, 22 | 23 | log: (...args) => { 24 | if (is_enabled("log")) { 25 | console.log(...args); 26 | } 27 | } 28 | } 29 | 30 | 31 | export { 32 | log 33 | } -------------------------------------------------------------------------------- /src/util.ts: -------------------------------------------------------------------------------- 1 | import _ from "lodash"; 2 | import {config} from "./config"; 3 | import {config_int} from "./config-internal"; 4 | import {log} from "./log"; 5 | 6 | 7 | // esbuild can use a text loader for toml, which can be parsed. 8 | // Assumption: package.json "name" matches the name in the wrangler.toml (it is the worker name). 9 | // @todo/low Service bindings may confuse this, as it skips the worker associated with the DO. 10 | const get_worker_name = () => { 11 | const valid = (_.isString(config.worker_name) && config.worker_name.length > 0); 12 | if (!valid) { 13 | const msg = `Must set worker_name in ${config_int.primary_name} config`; 14 | log.error(msg); 15 | throw Error(msg); 16 | } 17 | 18 | return config.worker_name; 19 | }; 20 | 21 | 22 | const get_external_subdomain = () => { 23 | return `${config_int.primary_name}_${get_worker_name()}` 24 | } 25 | 26 | 27 | // Determines if an external request should be routed to the Durafetch external HTTP API. 28 | // 29 | // Issue: `ws://x.example.com` allows plaintext WebSocket messages, even when the CF site is configured to only allow HTTPS. 30 | // Fix: Detect when running on CF, reject plaintext HTTP websocket upgrades. 31 | // 32 | // There is no way to detect when running on CF vs a local workerd instance in dev, so an env var is used. 33 | // @see https://developers.cloudflare.com/workers/examples/block-on-tls/ 34 | // - `request.cf` with tls is still set in local workerd with http. 35 | const starts_with_ext_subdomain = (env, request) => { 36 | const url = new URL(request.url); 37 | const is_for_ext_subdomain = url.hostname.startsWith(get_external_subdomain()); 38 | 39 | if (!is_for_ext_subdomain) { 40 | return false; 41 | } 42 | 43 | // Assert env is configured to ensure `prod` (running on CF) can be identified. 44 | const msg = `${config_int.env_key_env} should be set to either "dev" or "prod" in wrangler.toml. In dev overrride in the ".dev.vars" file.` 45 | 46 | // DURAFETCH_ENV 47 | if (!(config_int.env_key_env in env)) { 48 | console.error(msg); 49 | return false; 50 | } 51 | 52 | const df_env = env[config_int.env_key_env]; 53 | if (!["dev", "prod"].includes(df_env)) { 54 | console.error(msg); 55 | return false; 56 | } 57 | 58 | if (df_env === "prod" && url.protocol !== "https:") { 59 | console.error(`Ignoring plaintext HTTP request for the external Durafetch API - must use wss:// or https://.`, {url}); 60 | return false; 61 | } 62 | 63 | return true; 64 | } 65 | 66 | const get_durafetch_do = async (env) => { 67 | const id_do = env.DURAFETCH_DO.idFromName("primary"); 68 | return env.DURAFETCH_DO.get(id_do); 69 | } 70 | 71 | // This is used in two places: 72 | // 1. Workers/DO to DURAFETCH_DO 73 | // 2. Worker to user DO that is wrapped. 74 | const get_internal_req_hostname = () => { 75 | return `${config_int.primary_name}.example.com` 76 | } 77 | 78 | 79 | const env_key_auth = config_int.env_key_auth; 80 | 81 | // @todo/low Allow configuring allowable IP's. 82 | // @todo/low Rate limit password attempts (CF WAF already does this by default. A long secret key makes brute forcing unlikely). 83 | const is_auth_valid = (env, req) => { 84 | const ok = ( 85 | env_key_auth in env && 86 | _.isString(env[env_key_auth]) && 87 | env[env_key_auth].length >= 40 88 | ); 89 | 90 | if (!ok) { 91 | const msg = `Set ${env_key_auth} env var to a single admin secret token with a length >= 40. It is unique to your CF worker.`; 92 | log.error(msg); 93 | throw Error(msg); 94 | } 95 | 96 | const allowed_tokens = [env[env_key_auth]]; 97 | 98 | const x = req.headers.get("Authorization"); 99 | 100 | if (x === null) { 101 | return false; 102 | } 103 | 104 | const token = x.replace(/^Bearer /i, ""); 105 | return allowed_tokens.includes(token); 106 | } 107 | 108 | const json_res = (obj, opts = {}) => { 109 | const o = _.merge( 110 | { 111 | headers: { 112 | 'content-type': 'application/json', 113 | // ...cors_headers 114 | } 115 | }, 116 | opts 117 | ); 118 | 119 | const r = new Response(JSON.stringify(obj), o); 120 | return r; 121 | }; 122 | 123 | const res_not_authorised = () => { 124 | return json_res({ 125 | ok: false, 126 | msg: "Auth invalid. Include `Authorization: Bearer x` token and set the auth token as an env var." 127 | }, {status: 401}); 128 | } 129 | 130 | 131 | const add_ws_helper_fns = (server) => { 132 | server.send_json = (x) => { 133 | return server.send(JSON.stringify(x)); 134 | }; 135 | } 136 | 137 | export { 138 | get_worker_name, 139 | get_external_subdomain, 140 | starts_with_ext_subdomain, 141 | get_durafetch_do, 142 | get_internal_req_hostname, 143 | is_auth_valid, 144 | json_res, 145 | res_not_authorised, 146 | add_ws_helper_fns 147 | 148 | } -------------------------------------------------------------------------------- /test/worker-1/.dev.vars: -------------------------------------------------------------------------------- 1 | DURAFETCH_ENV = "dev" 2 | DURAFETCH_AUTH = "secret_http_auth_bearer_token_replace_this_with_more_than_40_chars" -------------------------------------------------------------------------------- /test/worker-1/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "worker-1", 3 | "version": "1.0.0", 4 | "lockfileVersion": 2, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "worker-1", 9 | "version": "1.0.0", 10 | "license": "NOLICENSE", 11 | "dependencies": { 12 | "durafetch-server": "file:../.." 13 | }, 14 | "devDependencies": {} 15 | }, 16 | "../..": { 17 | "version": "1.0.0", 18 | "license": "SEE LICENSE IN LICENSE", 19 | "dependencies": { 20 | "lodash": "^4.17.21" 21 | }, 22 | "devDependencies": { 23 | "@cloudflare/workers-types": "^4.20230518.0", 24 | "typescript": "^5.0.4" 25 | } 26 | }, 27 | "node_modules/durafetch-server": { 28 | "resolved": "../..", 29 | "link": true 30 | } 31 | }, 32 | "dependencies": { 33 | "durafetch-server": { 34 | "version": "file:../..", 35 | "requires": { 36 | "@cloudflare/workers-types": "^4.20230518.0", 37 | "lodash": "^4.17.21", 38 | "typescript": "^5.0.4" 39 | } 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /test/worker-1/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "worker-1", 3 | "version": "1.0.0", 4 | "description": "Minimal Durafetch example Cloudflare worker", 5 | "main": "index.js", 6 | "dependencies": { 7 | "durafetch-server": "file:../.." 8 | }, 9 | "scripts": {}, 10 | "author": "", 11 | "license": "NOLICENSE" 12 | } 13 | -------------------------------------------------------------------------------- /test/worker-1/readme.md: -------------------------------------------------------------------------------- 1 | Shows a minimal example of adding `durafetch-server` to a Cloudflare Worker. -------------------------------------------------------------------------------- /test/worker-1/sh/durafetch-client.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Note: Add `127.0.0.1 durafetch_worker-1.localhost` to /etc/hosts 3 | # - The subdomain is used for routing and simulates the subdomain of production requests. 4 | 5 | cd "$(dirname "$0")" 6 | 7 | # Allows `ws://` (no TLS for `ws://x.localhost`). 8 | NODE_ENV=development durafetch --config-file ./durafetch-config.json 9 | 10 | #durafetch --config-file ./durafetch-config.json 11 | -------------------------------------------------------------------------------- /test/worker-1/sh/durafetch-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_file": "./del/db.sqlite", 3 | "servers": [ 4 | { 5 | "ws_url": "ws://durafetch_worker-1.localhost:8720", 6 | "auth_token": "secret_http_auth_bearer_token_replace_this_with_more_than_40_chars" 7 | } 8 | ], 9 | "concurrent_downloads": 50, 10 | "poll": { 11 | "every_ms": 1000 12 | }, 13 | "logging": { 14 | "ndjson": false 15 | } 16 | } -------------------------------------------------------------------------------- /test/worker-1/sh/logs.sh: -------------------------------------------------------------------------------- 1 | yes | wrangler tail --ip self --format pretty 2 | #wrangler tail --ip self --format json -------------------------------------------------------------------------------- /test/worker-1/sh/publish.sh: -------------------------------------------------------------------------------- 1 | wrangler publish 2 | # wrangler delete -------------------------------------------------------------------------------- /test/worker-1/sh/start-dev-server.sh: -------------------------------------------------------------------------------- 1 | wrangler dev src/index.ts --experimental-local --port 8720 -------------------------------------------------------------------------------- /test/worker-1/src/durable_object_a.ts: -------------------------------------------------------------------------------- 1 | import p from './../package.json'; 2 | import * as durafetch from "./durafetch-with-config"; 3 | 4 | 5 | const { 6 | core: { 7 | wrap_durable_object 8 | } 9 | } = durafetch; 10 | 11 | 12 | // An example of one of your own application Durable Objects. 13 | class DURABLE_OBJECT_A { 14 | state: DurableObjectState 15 | 16 | constructor(state: DurableObjectState, env: Env) { 17 | this.state = state; 18 | this.env = env; 19 | 20 | 21 | // - Intercepts calls to `fetch` 22 | // - 1. Internal (from worker): `/set-meta` will set the id/name of this Durable Object, and then send a "durable object started" event to DURAFETCH_DO. 23 | // - 2. External (from the internet): `/external/do/read_all_from` will return storage data from a given write_id via a WebSocket. 24 | 25 | // Intercepts calls to `storage.*` 26 | // - Records the keys written to and stores them in the same write transaction, increments write_id. 27 | wrap_durable_object(this, {class_name: DURABLE_OBJECT_A.name}); 28 | } 29 | 30 | // Your application code here. 31 | async fetch(request: Request) { 32 | const storage = this.state.storage; 33 | const now = (new Date()).toISOString(); 34 | 35 | 36 | const v = (await storage.get("request_counter")) ?? {worker_name: p.name, counter: 0, now: null}; 37 | 38 | v.counter++; 39 | v.now = now; 40 | 41 | // The key `request_counter` is recorded as a change behind the scenes by Durafetch. 42 | await storage.put("request_counter", v); 43 | 44 | 45 | return new Response( 46 | JSON.stringify(v, null, 4), 47 | {headers: {'content-type': 'application/json'}} 48 | ); 49 | } 50 | } 51 | 52 | interface Env { 53 | } 54 | 55 | export { 56 | DURABLE_OBJECT_A 57 | }; -------------------------------------------------------------------------------- /test/worker-1/src/durafetch-with-config.ts: -------------------------------------------------------------------------------- 1 | import p from './../package.json'; 2 | import {config} from "durafetch-server"; 3 | 4 | console.log("CONFIG SET"); 5 | config.set_config({ 6 | worker_name: p.name 7 | }); 8 | 9 | export * from "durafetch-server"; -------------------------------------------------------------------------------- /test/worker-1/src/index.ts: -------------------------------------------------------------------------------- 1 | import {DURABLE_OBJECT_A} from "./durable_object_a"; 2 | import * as durafetch from "./durafetch-with-config"; 3 | 4 | const { 5 | core: { 6 | wrap_worker_env 7 | }, 8 | util: { 9 | get_durafetch_do, 10 | get_external_subdomain, 11 | starts_with_ext_subdomain 12 | }, 13 | durafetch_do: { 14 | DURAFETCH_DO 15 | } 16 | } = durafetch; 17 | 18 | 19 | export interface Env { 20 | DURABLE_OBJECT_A: DurableObjectNamespace; 21 | DURAFETCH_DO: DurableObjectNamespace; 22 | } 23 | 24 | 25 | const ignore_favicon = (request, env) => { 26 | if (new URL(request.url).pathname.startsWith("/favicon.ico")) { 27 | return new Response(null, {status: 204}); 28 | } 29 | return null; 30 | } 31 | 32 | export default { 33 | async fetch( 34 | request: Request, 35 | env: Env, 36 | ctx: ExecutionContext 37 | ): Promise { 38 | 39 | // - Intercepts calls to durable object bindings. 40 | // - When sending a request to a durable object's `fetch`, the `id` and `name` are intercepted and sent to the same durable object. 41 | // - Inside the durable objects `fetch` this will then set `id` and `name`, and also send a "durable object started" event to DURAFETCH_DO. 42 | // - DURAFETCH_DO keeps a list of all durable objects that have ever been started within this worker. 43 | wrap_worker_env(env); 44 | 45 | 46 | // - This is the external HTTP API for DURAFETCH_DO. 47 | // - 1. Lists all past and current Durable Object IDs. 48 | // - 2. Forwards read requests to specific Durable Object ID instances which return data via WebSocket. 49 | // 50 | // - Subdomain config: 51 | // - wrangler.toml: Add `{ pattern = "*.example.com/*", zone_name = "example.com" }` to `routes` 52 | // - HTTPS provisioning for first level subdomains is automatic/free for all CF accounts. 53 | // - May need to add `durafetch_worker-1.localhost` to `/etc/hosts` for dev requests to work locally. 54 | // - CF DNS: Add "CNAME * can.be.anything.as.worker.route.overrides.example.com" 55 | // - For "Proxied" entries the workers routes takes precedence so the CNAME target never takes effect. 56 | // - You can observe this traffic by using the "reverse proxy" feature of many proxy tools. 57 | if (starts_with_ext_subdomain(env, request)) { 58 | const o = await get_durafetch_do(env); 59 | return o.fetch(request); 60 | } 61 | 62 | 63 | // Add your applications URL handlers here. 64 | 65 | 66 | // Ignore when loading from a browser. 67 | const favicon = ignore_favicon(request, env); 68 | if (favicon) { 69 | return favicon; 70 | } 71 | 72 | 73 | // An example of messaging a durable object. 74 | const id_do = env.DURABLE_OBJECT_A.idFromName("example-name"); 75 | const stub = env.DURABLE_OBJECT_A.get(id_do); 76 | 77 | return stub.fetch(request); 78 | // return new Response("Hello World!"); 79 | }, 80 | }; 81 | 82 | 83 | export { 84 | DURABLE_OBJECT_A, 85 | DURAFETCH_DO 86 | } 87 | -------------------------------------------------------------------------------- /test/worker-1/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Visit https://aka.ms/tsconfig.json to read more about this file */ 4 | 5 | /* Projects */ 6 | // "incremental": true, /* Enable incremental compilation */ 7 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ 8 | // "tsBuildInfoFile": "./", /* Specify the folder for .tsbuildinfo incremental compilation files. */ 9 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects */ 10 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ 11 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ 12 | 13 | /* Language and Environment */ 14 | "target": "es2021" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, 15 | "lib": [ 16 | "es2021" 17 | ] /* Specify a set of bundled library declaration files that describe the target runtime environment. */, 18 | "jsx": "react" /* Specify what JSX code is generated. */, 19 | // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ 20 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ 21 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h' */ 22 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ 23 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx*`.` */ 24 | // "reactNamespace": "", /* Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit. */ 25 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ 26 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ 27 | 28 | /* Modules */ 29 | "module": "es2022" /* Specify what module code is generated. */, 30 | // "rootDir": "./", /* Specify the root folder within your source files. */ 31 | "moduleResolution": "node" /* Specify how TypeScript looks up a file from a given module specifier. */, 32 | // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ 33 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ 34 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ 35 | // "typeRoots": [], /* Specify multiple folders that act like `./node_modules/@types`. */ 36 | "types": [ 37 | "@cloudflare/workers-types" 38 | ] /* Specify type package names to be included without being referenced in a source file. */, 39 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 40 | "resolveJsonModule": true /* Enable importing .json files */, 41 | // "noResolve": true, /* Disallow `import`s, `require`s or ``s from expanding the number of files TypeScript should add to a project. */ 42 | 43 | /* JavaScript Support */ 44 | "allowJs": true /* Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files. */, 45 | "checkJs": false /* Enable error reporting in type-checked JavaScript files. */, 46 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`. */ 47 | 48 | /* Emit */ 49 | // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ 50 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */ 51 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ 52 | // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ 53 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output. */ 54 | // "outDir": "./", /* Specify an output folder for all emitted files. */ 55 | // "removeComments": true, /* Disable emitting comments. */ 56 | "noEmit": true /* Disable emitting files from a compilation. */, 57 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ 58 | // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types */ 59 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ 60 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ 61 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 62 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ 63 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ 64 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ 65 | // "newLine": "crlf", /* Set the newline character for emitting files. */ 66 | // "stripInternal": true, /* Disable emitting declarations that have `@internal` in their JSDoc comments. */ 67 | // "noEmitHelpers": true, /* Disable generating custom helper functions like `__extends` in compiled output. */ 68 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ 69 | // "preserveConstEnums": true, /* Disable erasing `const enum` declarations in generated code. */ 70 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ 71 | // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ 72 | 73 | /* Interop Constraints */ 74 | "isolatedModules": true /* Ensure that each file can be safely transpiled without relying on other imports. */, 75 | "allowSyntheticDefaultImports": true /* Allow 'import x from y' when a module doesn't have a default export. */, 76 | // "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */, 77 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ 78 | "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, 79 | 80 | /* Type Checking */ 81 | "strict": true /* Enable all strict type-checking options. */, 82 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied `any` type.. */ 83 | // "strictNullChecks": true, /* When type checking, take into account `null` and `undefined`. */ 84 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ 85 | // "strictBindCallApply": true, /* Check that the arguments for `bind`, `call`, and `apply` methods match the original function. */ 86 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ 87 | // "noImplicitThis": true, /* Enable error reporting when `this` is given the type `any`. */ 88 | // "useUnknownInCatchVariables": true, /* Type catch clause variables as 'unknown' instead of 'any'. */ 89 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ 90 | // "noUnusedLocals": true, /* Enable error reporting when a local variables aren't read. */ 91 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read */ 92 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ 93 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ 94 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ 95 | // "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */ 96 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ 97 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type */ 98 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ 99 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ 100 | 101 | /* Completeness */ 102 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ 103 | "skipLibCheck": true /* Skip type checking all .d.ts files. */ 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /test/worker-1/wrangler.toml: -------------------------------------------------------------------------------- 1 | name = "worker-1" 2 | main = "src/index.ts" 3 | compatibility_date = "2023-05-29" 4 | 5 | 6 | # Overridden by .dev.vars file. 7 | vars = { DURAFETCH_ENV = "prod", DURAFETCH_AUTH = "secret_http_auth_bearer_token_replace_this_with_more_than_40_chars" } 8 | 9 | # Must also add CNAME * anything.example.com to Cloudflare DNS. 10 | routes = [ 11 | # TLS only works for first level subdomains (Total TLS = $10/month extra). 12 | # Most specific match take priority 13 | # @see https://developers.cloudflare.com/workers/platform/triggers/routes/#matching-behavior 14 | { pattern = "durafetch_worker-1.your-domain.com/*", zone_name = "your-domain.com" }, 15 | { pattern = "your-app.your-domain.com/*", zone_name = "your-domain.com" } 16 | ] 17 | 18 | 19 | [durable_objects] 20 | bindings = [ 21 | { name = "DURABLE_OBJECT_A", class_name = "DURABLE_OBJECT_A" }, 22 | { name = "DURAFETCH_DO", class_name = "DURAFETCH_DO" } 23 | ] 24 | 25 | 26 | # @see https://developers.cloudflare.com/workers/learning/using-durable-objects/#durable-object-migrations-in-wranglertoml 27 | [[migrations]] 28 | tag = "v1" # Should be unique for each entry 29 | new_classes = ["DURABLE_OBJECT_A", "DURAFETCH_DO"] # Array of new classes 30 | 31 | 32 | -------------------------------------------------------------------------------- /test/worker-2/.dev.vars: -------------------------------------------------------------------------------- 1 | DURAFETCH_ENV = "dev" 2 | DURAFETCH_AUTH = "secret_http_auth_bearer_token_replace_this_with_more_than_40_chars" -------------------------------------------------------------------------------- /test/worker-2/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "worker-1", 3 | "version": "1.0.0", 4 | "lockfileVersion": 2, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "worker-1", 9 | "version": "1.0.0", 10 | "license": "NOLICENSE", 11 | "dependencies": { 12 | "durafetch-server": "file:../.." 13 | }, 14 | "devDependencies": {} 15 | }, 16 | "../..": { 17 | "version": "1.0.0", 18 | "license": "SEE LICENSE IN LICENSE", 19 | "dependencies": { 20 | "lodash": "^4.17.21" 21 | }, 22 | "devDependencies": { 23 | "@cloudflare/workers-types": "^4.20230518.0", 24 | "typescript": "^5.0.4" 25 | } 26 | }, 27 | "node_modules/durafetch-server": { 28 | "resolved": "../..", 29 | "link": true 30 | } 31 | }, 32 | "dependencies": { 33 | "durafetch-server": { 34 | "version": "file:../..", 35 | "requires": { 36 | "@cloudflare/workers-types": "^4.20230518.0", 37 | "lodash": "^4.17.21", 38 | "typescript": "^5.0.4" 39 | } 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /test/worker-2/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "worker-2", 3 | "version": "1.0.0", 4 | "description": "Server handler for tests.", 5 | "main": "index.js", 6 | "dependencies": { 7 | "durafetch-server": "file:../.." 8 | }, 9 | "scripts": {}, 10 | "author": "", 11 | "license": "NOLICENSE" 12 | } 13 | -------------------------------------------------------------------------------- /test/worker-2/readme.md: -------------------------------------------------------------------------------- 1 | Shows a minimal example of adding `durafetch-server` to a Cloudflare Worker. -------------------------------------------------------------------------------- /test/worker-2/sh/logs.sh: -------------------------------------------------------------------------------- 1 | yes | wrangler tail --ip self --format pretty 2 | #wrangler tail --ip self --format json -------------------------------------------------------------------------------- /test/worker-2/sh/publish.sh: -------------------------------------------------------------------------------- 1 | wrangler publish 2 | # wrangler delete -------------------------------------------------------------------------------- /test/worker-2/sh/start-dev-server.sh: -------------------------------------------------------------------------------- 1 | wrangler dev src/index.ts --experimental-local --port 8787 -------------------------------------------------------------------------------- /test/worker-2/src/durable_object_a.ts: -------------------------------------------------------------------------------- 1 | import p from './../package.json'; 2 | import * as durafetch from "./durafetch-with-config"; 3 | 4 | 5 | const { 6 | core: { 7 | wrap_durable_object 8 | } 9 | } = durafetch; 10 | 11 | 12 | // An example of one of your own application Durable Objects. 13 | class DURABLE_OBJECT_A { 14 | state: DurableObjectState 15 | 16 | constructor(state: DurableObjectState, env: Env) { 17 | this.state = state; 18 | this.env = env; 19 | wrap_durable_object(this, {class_name: DURABLE_OBJECT_A.name}); 20 | } 21 | 22 | 23 | async fetch(request: Request) { 24 | const storage = this.state.storage; 25 | const now = (new Date()).toISOString(); 26 | 27 | const url = new URL(request.url); 28 | if (request.method === "POST" && url.pathname === "/set-step") { 29 | const {name, step} = await request.json() 30 | 31 | switch (step) { 32 | case "step-1": 33 | // No writes, but Durable Object will report to DURAFETCH_DO that it has been created and exists. 34 | break; 35 | case "step-2": 36 | // CREATE 37 | 38 | // Single 39 | storage.put("key-a", 1); 40 | 41 | // Multi 42 | storage.put({ 43 | "key-b": 1, 44 | "key-c": 1, 45 | "key-d": 1 46 | }); 47 | 48 | break; 49 | 50 | case "step-3": 51 | // UPDATE 52 | 53 | // Single 54 | storage.put("key-a", 2); 55 | 56 | // Multi 57 | storage.put({ 58 | "key-b": 2, 59 | "key-c": 2, 60 | "key-d": 2, 61 | }); 62 | break; 63 | case "step-4": 64 | // DELETE 65 | 66 | // Single 67 | storage.delete("key-a"); 68 | 69 | // Multi 70 | storage.delete(["key-b", "key-c"]); 71 | 72 | break; 73 | case "step-5": 74 | // DELETE ALL 75 | storage.deleteAll(); 76 | break; 77 | 78 | default: 79 | throw Error(`Unknown step ${step}`); 80 | // return new Response("Unknown action", {status: 400}); 81 | break; 82 | } 83 | 84 | 85 | return new Response( 86 | JSON.stringify({ok: true}, null, 4), 87 | {headers: {'content-type': 'application/json'}} 88 | ); 89 | } 90 | 91 | return new Response("Unknown action", {status: 400}); 92 | } 93 | } 94 | 95 | interface Env { 96 | } 97 | 98 | export { 99 | DURABLE_OBJECT_A 100 | }; -------------------------------------------------------------------------------- /test/worker-2/src/durafetch-with-config.ts: -------------------------------------------------------------------------------- 1 | import p from './../package.json'; 2 | import {config} from "durafetch-server"; 3 | 4 | console.log("CONFIG SET"); 5 | config.set_config({ 6 | worker_name: p.name 7 | }); 8 | 9 | export * from "durafetch-server"; -------------------------------------------------------------------------------- /test/worker-2/src/index.ts: -------------------------------------------------------------------------------- 1 | import {DURABLE_OBJECT_A} from "./durable_object_a"; 2 | import * as durafetch from "./durafetch-with-config"; 3 | 4 | const { 5 | core: { 6 | wrap_worker_env 7 | }, 8 | util: { 9 | get_durafetch_do, 10 | get_external_subdomain, 11 | starts_with_ext_subdomain 12 | }, 13 | durafetch_do: { 14 | DURAFETCH_DO 15 | } 16 | } = durafetch; 17 | 18 | 19 | export interface Env { 20 | DURABLE_OBJECT_A: DurableObjectNamespace; 21 | DURAFETCH_DO: DurableObjectNamespace; 22 | } 23 | 24 | 25 | const ignore_favicon = (request, env) => { 26 | if (new URL(request.url).pathname.startsWith("/favicon.ico")) { 27 | return new Response(null, {status: 204}); 28 | } 29 | return null; 30 | } 31 | 32 | export default { 33 | async fetch( 34 | request: Request, 35 | env: Env, 36 | ctx: ExecutionContext 37 | ): Promise { 38 | wrap_worker_env(env); 39 | 40 | if (starts_with_ext_subdomain(env, request)) { 41 | const o = await get_durafetch_do(env); 42 | return o.fetch(request); 43 | } 44 | 45 | // Ignore when loading from a browser. 46 | const favicon = ignore_favicon(request, env); 47 | if (favicon) { 48 | return favicon; 49 | } 50 | 51 | 52 | const url = new URL(request.url); 53 | 54 | if (request.method === "POST" && url.pathname === "/set-step-many") { 55 | const {names, step} = await request.json(); 56 | 57 | const all = []; 58 | for (const n of names) { 59 | const id_do = env.DURABLE_OBJECT_A.idFromName(n); 60 | const stub = env.DURABLE_OBJECT_A.get(id_do); 61 | all.push(stub.fetch(new Request( 62 | "https://example.com/set-step", 63 | { 64 | method: 'POST', 65 | headers: {'content-type': 'application/json'}, 66 | body: JSON.stringify({name: n, step}) 67 | } 68 | ))); 69 | } 70 | 71 | const x = await Promise.all(all); 72 | for (const res of x) { 73 | console.log(await res.json()); 74 | } 75 | 76 | return new Response( 77 | JSON.stringify({ok: true}), 78 | {headers: {'content-type': 'application/json'}} 79 | ); 80 | } 81 | 82 | 83 | return new Response("Unknown action"); 84 | }, 85 | }; 86 | 87 | 88 | export { 89 | DURABLE_OBJECT_A, 90 | DURAFETCH_DO 91 | } 92 | -------------------------------------------------------------------------------- /test/worker-2/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Visit https://aka.ms/tsconfig.json to read more about this file */ 4 | 5 | /* Projects */ 6 | // "incremental": true, /* Enable incremental compilation */ 7 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ 8 | // "tsBuildInfoFile": "./", /* Specify the folder for .tsbuildinfo incremental compilation files. */ 9 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects */ 10 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ 11 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ 12 | 13 | /* Language and Environment */ 14 | "target": "es2021" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, 15 | "lib": [ 16 | "es2021" 17 | ] /* Specify a set of bundled library declaration files that describe the target runtime environment. */, 18 | "jsx": "react" /* Specify what JSX code is generated. */, 19 | // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ 20 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ 21 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h' */ 22 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ 23 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx*`.` */ 24 | // "reactNamespace": "", /* Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit. */ 25 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ 26 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ 27 | 28 | /* Modules */ 29 | "module": "es2022" /* Specify what module code is generated. */, 30 | // "rootDir": "./", /* Specify the root folder within your source files. */ 31 | "moduleResolution": "node" /* Specify how TypeScript looks up a file from a given module specifier. */, 32 | // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ 33 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ 34 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ 35 | // "typeRoots": [], /* Specify multiple folders that act like `./node_modules/@types`. */ 36 | "types": [ 37 | "@cloudflare/workers-types" 38 | ] /* Specify type package names to be included without being referenced in a source file. */, 39 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 40 | "resolveJsonModule": true /* Enable importing .json files */, 41 | // "noResolve": true, /* Disallow `import`s, `require`s or ``s from expanding the number of files TypeScript should add to a project. */ 42 | 43 | /* JavaScript Support */ 44 | "allowJs": true /* Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files. */, 45 | "checkJs": false /* Enable error reporting in type-checked JavaScript files. */, 46 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`. */ 47 | 48 | /* Emit */ 49 | // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ 50 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */ 51 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ 52 | // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ 53 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output. */ 54 | // "outDir": "./", /* Specify an output folder for all emitted files. */ 55 | // "removeComments": true, /* Disable emitting comments. */ 56 | "noEmit": true /* Disable emitting files from a compilation. */, 57 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ 58 | // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types */ 59 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ 60 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ 61 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 62 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ 63 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ 64 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ 65 | // "newLine": "crlf", /* Set the newline character for emitting files. */ 66 | // "stripInternal": true, /* Disable emitting declarations that have `@internal` in their JSDoc comments. */ 67 | // "noEmitHelpers": true, /* Disable generating custom helper functions like `__extends` in compiled output. */ 68 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ 69 | // "preserveConstEnums": true, /* Disable erasing `const enum` declarations in generated code. */ 70 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ 71 | // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ 72 | 73 | /* Interop Constraints */ 74 | "isolatedModules": true /* Ensure that each file can be safely transpiled without relying on other imports. */, 75 | "allowSyntheticDefaultImports": true /* Allow 'import x from y' when a module doesn't have a default export. */, 76 | // "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */, 77 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ 78 | "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, 79 | 80 | /* Type Checking */ 81 | "strict": true /* Enable all strict type-checking options. */, 82 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied `any` type.. */ 83 | // "strictNullChecks": true, /* When type checking, take into account `null` and `undefined`. */ 84 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ 85 | // "strictBindCallApply": true, /* Check that the arguments for `bind`, `call`, and `apply` methods match the original function. */ 86 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ 87 | // "noImplicitThis": true, /* Enable error reporting when `this` is given the type `any`. */ 88 | // "useUnknownInCatchVariables": true, /* Type catch clause variables as 'unknown' instead of 'any'. */ 89 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ 90 | // "noUnusedLocals": true, /* Enable error reporting when a local variables aren't read. */ 91 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read */ 92 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ 93 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ 94 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ 95 | // "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */ 96 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ 97 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type */ 98 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ 99 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ 100 | 101 | /* Completeness */ 102 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ 103 | "skipLibCheck": true /* Skip type checking all .d.ts files. */ 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /test/worker-2/wrangler.toml: -------------------------------------------------------------------------------- 1 | name = "worker-2" 2 | main = "src/index.ts" 3 | compatibility_date = "2023-05-29" 4 | 5 | 6 | # Overridden by .dev.vars file. 7 | vars = { DURAFETCH_ENV = "prod", DURAFETCH_AUTH = "secret_http_auth_bearer_token_replace_this_with_more_than_40_chars" } 8 | 9 | # Must also add CNAME * anything.example.com to Cloudflare DNS. 10 | routes = [ 11 | # TLS only works for first level subdomains (Total TLS = $10/month extra). 12 | # Most specific match take priority 13 | # @see https://developers.cloudflare.com/workers/platform/triggers/routes/#matching-behavior 14 | { pattern = "durafetch_worker-1.your-domain.com/*", zone_name = "your-domain.com" }, 15 | { pattern = "your-app.your-domain.com/*", zone_name = "your-domain.com" } 16 | ] 17 | 18 | 19 | [durable_objects] 20 | bindings = [ 21 | { name = "DURABLE_OBJECT_A", class_name = "DURABLE_OBJECT_A" }, 22 | { name = "DURAFETCH_DO", class_name = "DURAFETCH_DO" } 23 | ] 24 | 25 | 26 | # @see https://developers.cloudflare.com/workers/learning/using-durable-objects/#durable-object-migrations-in-wranglertoml 27 | [[migrations]] 28 | tag = "v1" # Should be unique for each entry 29 | new_classes = ["DURABLE_OBJECT_A", "DURAFETCH_DO"] # Array of new classes 30 | 31 | 32 | --------------------------------------------------------------------------------