├── .eslintrc.json
├── .github
└── prlint.json
├── .gitignore
├── .prettierrc
├── CHANGELOG.md
├── LICENSE
├── README.md
├── app
├── __tests__
│ ├── kv-namespace_spec.js
│ ├── server_spec.js
│ ├── test-app_spec.js
│ └── worker_spec.js
├── caches.js
├── file-kv-store.js
├── in-memory-kv-store.js
├── kv-namespace.js
├── minio-kv-store.js
├── server.js
├── test-app.js
└── worker.js
├── azure-pipelines-template.yml
├── azure-pipelines.yml
├── cli.js
├── examples
├── unit-test-a-worker
│ ├── __tests__
│ │ └── worker_spec.js
│ └── worker.js
└── wrangler.toml
├── index.js
├── lib
└── wrangler.js
├── package-lock.json
├── package.json
├── sample-worker.js
└── start.js
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": [
3 | "standard",
4 | "plugin:jest/recommended",
5 | "plugin:prettier/recommended",
6 | "prettier/standard"
7 | ],
8 | "plugins": [
9 | "standard",
10 | "jest"
11 | ],
12 | "rules": {
13 | "max-len": ["warn", { "code": 120 }]
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/.github/prlint.json:
--------------------------------------------------------------------------------
1 | {
2 | "title": [
3 | {
4 | "pattern": "^(build|ci|docs|feat|fix|perf|refactor|style|test|chore)((.+))?:\\s.+",
5 | "message": "Your title needs to be prefixed with a topic"
6 | }
7 | ]
8 | }
9 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 |
8 | # Runtime data
9 | pids
10 | *.pid
11 | *.seed
12 | *.pid.lock
13 |
14 | # Directory for instrumented libs generated by jscoverage/JSCover
15 | lib-cov
16 |
17 | # Coverage directory used by tools like istanbul
18 | coverage
19 |
20 | # nyc test coverage
21 | .nyc_output
22 |
23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
24 | .grunt
25 |
26 | # Bower dependency directory (https://bower.io/)
27 | bower_components
28 |
29 | # node-waf configuration
30 | .lock-wscript
31 |
32 | # Compiled binary addons (https://nodejs.org/api/addons.html)
33 | build/Release
34 |
35 | # Dependency directories
36 | node_modules/
37 | jspm_packages/
38 |
39 | # TypeScript v1 declaration files
40 | typings/
41 |
42 | # Optional npm cache directory
43 | .npm
44 |
45 | # Optional eslint cache
46 | .eslintcache
47 |
48 | # Optional REPL history
49 | .node_repl_history
50 |
51 | # Output of 'npm pack'
52 | *.tgz
53 |
54 | # Yarn Integrity file
55 | .yarn-integrity
56 |
57 | # dotenv environment variables file
58 | .env
59 |
60 | # next.js build output
61 | .next
62 |
63 | # junit test results
64 | junit.xml
--------------------------------------------------------------------------------
/.prettierrc:
--------------------------------------------------------------------------------
1 | printWidth: 120
2 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
4 |
5 | ## [1.15.0](https://github.com/gja/cloudflare-worker-local/compare/v1.13.0...v1.15.0) (2022-02-04)
6 |
7 |
8 | ### Features
9 |
10 | * Set CF-IPCountry header from env var, close [#80](https://github.com/gja/cloudflare-worker-local/issues/80) ([#81](https://github.com/gja/cloudflare-worker-local/issues/81)) ([4b8b0e1](https://github.com/gja/cloudflare-worker-local/commit/4b8b0e1e758b5b840d8fea9724b5ae3624d40e81))
11 |
12 |
13 | ### Bug Fixes
14 |
15 | * **kv:** handle binary files ([#76](https://github.com/gja/cloudflare-worker-local/issues/76)) ([8a03a79](https://github.com/gja/cloudflare-worker-local/commit/8a03a798098b057b49755d7265940d98fbb06423))
16 | * pipeline fix - Stream.pipeline is available only in nodejs 10 ([#82](https://github.com/gja/cloudflare-worker-local/issues/82)) ([3146f22](https://github.com/gja/cloudflare-worker-local/commit/3146f225d8f257ea034215bd348a6360037fe31b))
17 |
18 | ## [1.14.0](https://github.com/gja/cloudflare-worker-local/compare/v1.13.0...v1.14.0) (2022-02-04)
19 |
20 |
21 | ### Features
22 |
23 | * Uses node crypto instead of ossl crypto, close [#79]((https://github.com/gja/cloudflare-worker-local/pulls/79)
24 | * Set CF-IPCountry header from env var, close [#80](https://github.com/gja/cloudflare-worker-local/issues/80) ([#81](https://github.com/gja/cloudflare-worker-local/issues/81)) ([4b8b0e1](https://github.com/gja/cloudflare-worker-local/commit/4b8b0e1e758b5b840d8fea9724b5ae3624d40e81))
25 | * **kv:** handle binary files ([#76](https://github.com/gja/cloudflare-worker-local/issues/76)) ([8a03a79](https://github.com/gja/cloudflare-worker-local/commit/8a03a798098b057b49755d7265940d98fbb06423))
26 |
27 |
28 | ### Bug Fixes
29 |
30 | * pipeline fix - Stream.pipeline is available only in nodejs 10 ([#82](https://github.com/gja/cloudflare-worker-local/issues/82)) ([3146f22](https://github.com/gja/cloudflare-worker-local/commit/3146f225d8f257ea034215bd348a6360037fe31b))
31 |
32 |
33 | # [1.13.0](https://github.com/gja/cloudflare-worker-local/compare/v1.12.1...v1.13.0) (2020-12-15)
34 |
35 |
36 | ### Features
37 |
38 | * Remaining KV Functions, File-System KV Store, Cache API Stubs, Workers Sites Support ([#57](https://github.com/gja/cloudflare-worker-local/issues/57)) ([78348e4](https://github.com/gja/cloudflare-worker-local/commit/78348e4))
39 |
40 |
41 |
42 |
43 | ## [1.12.1](https://github.com/gja/cloudflare-worker-local/compare/v1.12.0...v1.12.1) (2020-03-26)
44 |
45 |
46 | ### Bug Fixes
47 |
48 | * replace missing fs require ([#50](https://github.com/gja/cloudflare-worker-local/issues/50)) ([7ac3ca7](https://github.com/gja/cloudflare-worker-local/commit/7ac3ca7))
49 |
50 |
51 |
52 |
53 | # [1.12.0](https://github.com/gja/cloudflare-worker-local/compare/v1.11.0...v1.12.0) (2020-03-25)
54 |
55 |
56 | ### Features
57 |
58 | * Add Cloudflare Environment Variable and Secret support ([#41](https://github.com/gja/cloudflare-worker-local/issues/41)) ([2091bb5](https://github.com/gja/cloudflare-worker-local/commit/2091bb5))
59 |
60 |
61 |
62 |
63 | # [1.11.0](https://github.com/gja/cloudflare-worker-local/compare/v1.10.0...v1.11.0) (2019-08-01)
64 |
65 |
66 | ### Features
67 |
68 | * add FetchEvent.passThroughOnException() ([#37](https://github.com/gja/cloudflare-worker-local/issues/37)) ([3e21659](https://github.com/gja/cloudflare-worker-local/commit/3e21659))
69 | * Set cf values to request headers for local debugging ([#35](https://github.com/gja/cloudflare-worker-local/issues/35)) ([8d78e0a](https://github.com/gja/cloudflare-worker-local/commit/8d78e0a))
70 |
71 |
72 |
73 |
74 | # [1.10.0](https://github.com/gja/cloudflare-worker-local/compare/v1.9.0...v1.10.0) (2019-04-15)
75 |
76 |
77 | ### Bug Fixes
78 |
79 | * **Example:** Apparently response headers are immutable. Closes [[#27](https://github.com/gja/cloudflare-worker-local/issues/27)] ([#28](https://github.com/gja/cloudflare-worker-local/issues/28)) ([31cb10b](https://github.com/gja/cloudflare-worker-local/commit/31cb10b))
80 |
81 |
82 | ### Features
83 |
84 | * **Worker:** Supporting callback functions like setTimeout, setInterval ([#30](https://github.com/gja/cloudflare-worker-local/issues/30)) ([0abbaec](https://github.com/gja/cloudflare-worker-local/commit/0abbaec)), closes [#29](https://github.com/gja/cloudflare-worker-local/issues/29)
85 | * Reduce post limit, fixes [#31](https://github.com/gja/cloudflare-worker-local/issues/31) ([#32](https://github.com/gja/cloudflare-worker-local/issues/32)) ([000c435](https://github.com/gja/cloudflare-worker-local/commit/000c435))
86 |
87 |
88 |
89 |
90 | # [1.9.0](https://github.com/gja/cloudflare-worker-local/compare/v1.8.1...v1.9.0) (2019-01-04)
91 |
92 |
93 | ### Features
94 |
95 | * **Worker:** waitUntil and respondWith must be bound to e in order to work ([06bc243](https://github.com/gja/cloudflare-worker-local/commit/06bc243))
96 |
97 |
98 |
99 |
100 | ## [1.8.1](https://github.com/gja/cloudflare-worker-local/compare/v1.8.0...v1.8.1) (2019-01-04)
101 |
102 |
103 | ### Bug Fixes
104 |
105 | * Exposing the stores via test app ([fd468e1](https://github.com/gja/cloudflare-worker-local/commit/fd468e1))
106 |
107 |
108 |
109 |
110 | # [1.8.0](https://github.com/gja/cloudflare-worker-local/compare/v1.7.0...v1.8.0) (2019-01-02)
111 |
112 |
113 | ### Features
114 |
115 | * **KVStore:** Expose the KV Store from create app ([#24](https://github.com/gja/cloudflare-worker-local/issues/24)) ([5844ace](https://github.com/gja/cloudflare-worker-local/commit/5844ace))
116 |
117 |
118 |
119 |
120 | # [1.7.0](https://github.com/gja/cloudflare-worker-local/compare/v1.6.0...v1.7.0) (2018-12-28)
121 |
122 |
123 | ### Features
124 |
125 | * **TestWorkers:** Ability to Unit Test Workers ([#23](https://github.com/gja/cloudflare-worker-local/issues/23)) ([dc33dff](https://github.com/gja/cloudflare-worker-local/commit/dc33dff))
126 |
127 |
128 |
129 |
130 | # [1.6.0](https://github.com/gja/cloudflare-worker-local/compare/v1.5.0...v1.6.0) (2018-12-18)
131 |
132 |
133 | ### Features
134 |
135 | * **Worker:** Supporting most CF Headers ([#22](https://github.com/gja/cloudflare-worker-local/issues/22)) ([99e5db7](https://github.com/gja/cloudflare-worker-local/commit/99e5db7)), closes [#21](https://github.com/gja/cloudflare-worker-local/issues/21)
136 | * **InMemoryKVStore:** Delete Items from InMemoryKVStore ([#20](https://github.com/gja/cloudflare-worker-local/pull/20))
137 |
138 |
139 |
140 | # [1.5.0](https://github.com/gja/cloudflare-worker-local/compare/v1.4.0...v1.5.0) (2018-12-06)
141 |
142 |
143 | ### Features
144 |
145 | * Adding an index.js which only exports safe symbols ([#19](https://github.com/gja/cloudflare-worker-local/issues/19)) ([6e5323d](https://github.com/gja/cloudflare-worker-local/commit/6e5323d))
146 |
147 |
148 |
149 |
150 | # [1.4.0](https://github.com/gja/cloudflare-worker-local/compare/v1.3.0...v1.4.0) (2018-12-03)
151 |
152 |
153 | ### Features
154 |
155 | * update main in package ([#13](https://github.com/gja/cloudflare-worker-local/issues/13)) ([8c82a79](https://github.com/gja/cloudflare-worker-local/commit/8c82a79))
156 | * **Worker:** add self to scope ([996cec5](https://github.com/gja/cloudflare-worker-local/commit/996cec5))
157 | * **Worker:** add URLSearchParams ([#16](https://github.com/gja/cloudflare-worker-local/issues/16)) ([af6f1be](https://github.com/gja/cloudflare-worker-local/commit/af6f1be))
158 |
159 |
160 |
161 |
162 | # [1.3.0](https://github.com/gja/cloudflare-worker-local/compare/v1.2.0...v1.3.0) (2018-11-29)
163 |
164 |
165 | ### Features
166 |
167 | * **build:** Added standard-version ([#12](https://github.com/gja/cloudflare-worker-local/issues/12)) ([9507984](https://github.com/gja/cloudflare-worker-local/commit/9507984))
168 | * **build:** Adding prlint ([168e4cb](https://github.com/gja/cloudflare-worker-local/commit/168e4cb))
169 |
170 |
171 |
172 |
173 | ## [1.2.0](https://github.com/gja/cloudflare-worker-local/compare/v1.1.0...v1.2.0) (2018-11-29)
174 |
175 |
176 | ### Features
177 |
178 | * Simple In Memory KV Store by [@gja](https://github.com/gja)
179 | * Base64 support by [@jdanyow](https://github.com/jdanyow)
180 | * Subtle Crypto support by [@jdanyow](https://github.com/jdanyow)
181 | * Readme Updates for hot reloading
182 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # cloudflare-worker-local
2 | Run (or test) a Cloudflare Worker Locally. If you are looking for a project that will quickly help you bootstrap your worker, take a look at [create-cloudflare-worker](https://github.com/gja/create-cloudflare-worker)
3 |
4 | ## Running
5 |
6 | ```shell
7 | $ npm install -g cloudflare-worker-local
8 | $ cloudflare-worker-local /path/to/worker.js localhost:3000 4000
9 |
10 | Listening on Port 4000 and forwarding requests to http://localhost:3000/
11 | ```
12 |
13 | ## Automatically reloading
14 |
15 | It is possible to use nodemon to automatically reload the worker
16 |
17 | ```shell
18 | $ npm install -g nodemon
19 | $ nodemon --watch /path/to/worker.js --signal SIGHUP --exec 'cloudflare-worker-local /path/to/worker.js localhost:3000 4000'
20 | ```
21 |
22 | ## Unit Testing a Cloudflare Worker
23 |
24 | `cloudflare-worker-local` can be used to unit test a cloudflare worker. Please see [This Example](examples/unit-test-a-worker). You may also be interested in [create-cloudflare-worker](https://github.com/gja/create-cloudflare-worker)
25 |
26 | ## Things that are supported (and in scope)
27 |
28 | * Anything in Node.js scope by default (Object, Array)
29 | * Anything provided by fetch (fetch, Request, Response, Headers)
30 | * WHATWG URL
31 | * console
32 | * btoa / atob
33 | * crypto.subtle
34 | * Cloudflare key value store if you pass in the KV_NAMESPACE environment variable
35 | * Cloudflare [event.passThroughOnException()](https://workers.cloudflare.com/docs/reference/workers-concepts/fetch-event-lifecycle/#passthroughonexception) for runtime exception handling
36 | * Cloudflare Environment Variables and Secrets loaded from a wrangler.toml
37 | * Workers Sites
38 | * Set cloudflare `CF-IPCountry` header value from `COUNTRY` environment variable (default is `DEV`)
39 | * ... this list should probably have more things
40 |
41 | ## Contributors
42 |
43 | * Tejas Dinkar (@gja)
44 | * Jeremy Danyow (@jdanyow)
45 | * Rita Liu (@rita-liu)
46 | * Nolan Woods (@innovate-invent)
47 | * Brendan Coll (@mrbbot)
48 |
49 | ## Future enhancements
50 |
51 | * Support WASM
52 | * Support CPU timeouts
53 | * Better Examples
54 |
55 | ## Environment Variables
56 |
57 | * NUM_WORKERS - Specifies the number of node workers (default 1, to get KV Working in memory)
58 | * KV_NAMESPACES - A comma separated list of keyspaces. (ex: MY_STORE,ANOTHER_STORE)
59 |
60 | ## CloudFlare KV Store emulation using Minio or any S3 compatible service
61 |
62 | To enable Minio as the KV store simply provide these options as environment variables:
63 | MINIO_ENDPOINT, MINIO_ACCESS_KEY, and MINIO_SECRET_KEY
64 |
65 | ```shell script
66 | $ MINIO_ENDPOINT="localhost" MINIO_ACCESS_KEY="my_access_key" MINIO_SECRET_KEY="my_secret" cloudflare-worker-local /path/to/worker.js localhost:3000 4000
67 | ```
68 |
69 | Optionally, these variables are available as well:
70 | MINIO_PORT, MINIO_USE_SSL, MINIO_REGION, MINIO_TRANSPORT, MINIO_SESSIONTOKEN, and MINIO_PARTSIZE
71 |
72 | See [the Minio documentation](https://docs.min.io/docs/javascript-client-api-reference.html) for details on the various parameters.
73 |
74 | ## CloudFlare KV Store emulation using the File System
75 |
76 | To use the File System as the KV store simply provide the KV_FILE_ROOT option as an environment
77 | variable. A directory will be created in here for each KV namespace.
78 |
79 | ## CloudFlare Environment Variables and Secrets
80 |
81 | Support for CloudFlare Environment Variables and Secrets is provided via a wrangler.toml file.
82 | See the [wrangler documentation](https://developers.cloudflare.com/workers/tooling/wrangler/configuration)
83 | for more information on the file schema.
84 |
85 | To load the wrangler.toml, specify it on the command line:
86 | ```shell
87 | $ cloudflare-worker-local /path/to/worker.js localhost:3000 4000 /path/to/wrangler.toml
88 | ```
89 |
90 | Optionally, the desired environment specified within the wrangler.toml can be loaded:
91 | ```shell
92 | $ cloudflare-worker-local /path/to/worker.js localhost:3000 4000 /path/to/wrangler.toml production
93 | ```
94 |
95 | Secrets are specified under the 'secrets' root key in the document. See the [wrangler.toml](./examples/wrangler.toml)
96 | for an example of the supported structures.
97 |
98 | Two features are provided while loading the wrangler.toml:
99 | * All vars and secrets strings can contain ${} placeholders.
100 | A placeholder path is resolved using lodash.get and has the context of the root of the config document.
101 | A placeholder can not refer to a value defined later in the document that also has placeholders.
102 | * Any var or secret that is not a string will be automatically JSON encoded.
103 | This allows you to inject complex data into a script by JSON decoding the variable value.
104 |
105 | Additionally, any 'kv-namespaces' in the wrangler.toml will be appended to the list of namespaces
106 | provided by KV_NAMESPACES.
107 |
108 | ## Workers Sites
109 |
110 | If a wrangler.toml file containing a `[site]` section with a `bucket` directory is loaded, the Workers Sites
111 | default KV namespace and manifest will be added to the worker's scope. Calls to `getAssetFromKV` will always
112 | return the latest version of an asset in the `bucket` directory. Note that you'll need to bundle your worker
113 | code (with Webpack for instance) before running it to use `@cloudflare/kv-asset-handler`, as `import`/
114 | `require` are not in workers' scopes.
115 |
--------------------------------------------------------------------------------
/app/__tests__/kv-namespace_spec.js:
--------------------------------------------------------------------------------
1 | const path = require("path");
2 | const { promisify } = require('util');
3 | const rimraf = promisify(require("rimraf"));
4 | const { KVNamespace } = require('../kv-namespace');
5 | const { InMemoryKVStore } = require('../in-memory-kv-store');
6 | const { FileKVStore } = require('../file-kv-store');
7 |
8 | const TEST_NAMESPACE = 'TEST_NAMESPACE';
9 | const TEST_NAMESPACE_PATH = path.join(__dirname, TEST_NAMESPACE);
10 |
11 | async function createMemoryNamespace(initialData) {
12 | const store = new InMemoryKVStore();
13 | store.values[TEST_NAMESPACE] = initialData || {};
14 | return {
15 | ns: store.getClient(TEST_NAMESPACE),
16 | storedFor: async (key) => store.values[TEST_NAMESPACE][key],
17 | };
18 | }
19 |
20 | async function createFileNamespace(initialData) {
21 | await rimraf(TEST_NAMESPACE_PATH);
22 | const store = new FileKVStore(__dirname);
23 | for(const [key, data] of Object.entries(initialData || {})) {
24 | await FileKVStore.putter(path.join(TEST_NAMESPACE_PATH, key), data);
25 | }
26 | return {
27 | ns: store.getClient(TEST_NAMESPACE),
28 | storedFor: async (key) => {
29 | const stored = await FileKVStore.getter(path.join(TEST_NAMESPACE_PATH, key));
30 | return stored === null ? undefined : stored;
31 | },
32 | };
33 | }
34 |
35 | let nsObjTestVals = {
36 | memory: 'value',
37 | file: Buffer.from('value'),
38 | };
39 |
40 | const namespaceCreators = {
41 | memory: createMemoryNamespace,
42 | file: createFileNamespace,
43 | };
44 |
45 | describe('kv-namespace', () => {
46 | beforeEach(() => {
47 | // Reset getTimestamp function before each test
48 | KVNamespace.getTimestamp = () => 5000;
49 | });
50 |
51 | afterAll(async () => {
52 | // Delete files created during tests at the end
53 | await rimraf(TEST_NAMESPACE_PATH);
54 | });
55 |
56 | for (const [namespaceType, createNamespace] of Object.entries(namespaceCreators)) {
57 | describe(namespaceType, () => {
58 | describe('get', () => {
59 | test('it gets text by default', async () => {
60 | const { ns } = await createNamespace({
61 | key: {
62 | value: 'value',
63 | expiration: -1,
64 | metadata: null,
65 | },
66 | });
67 | expect(await ns.get('key')).toStrictEqual(nsObjTestVals[namespaceType]);
68 | });
69 |
70 | test('it gets text', async () => {
71 | const { ns } = await createNamespace({
72 | key: {
73 | value: 'value',
74 | expiration: -1,
75 | metadata: null,
76 | },
77 | });
78 | expect(await ns.get('key', 'text')).toStrictEqual(nsObjTestVals[namespaceType]);
79 | });
80 |
81 | test('it gets json', async () => {
82 | const { ns } = await createNamespace({
83 | key: {
84 | value: '{"field": "value"}',
85 | expiration: -1,
86 | metadata: null,
87 | },
88 | });
89 | expect(await ns.get('key', 'json')).toStrictEqual({ field: 'value' });
90 | });
91 |
92 | test('it gets array buffers', async () => {
93 | const { ns } = await createNamespace({
94 | key: {
95 | value: '\x01\x02\x03',
96 | expiration: -1,
97 | metadata: null,
98 | },
99 | });
100 | expect(new Uint8Array(await ns.get('key', 'arrayBuffer'))).toStrictEqual(new Uint8Array([1, 2, 3]));
101 | });
102 |
103 | test('it fails to get streams', async () => {
104 | const { ns } = await createNamespace({
105 | key: {
106 | value: '\x01\x02\x03',
107 | expiration: -1,
108 | metadata: null,
109 | },
110 | });
111 | expect.assertions(1);
112 | await expect(ns.get('key', 'stream')).rejects.toStrictEqual(new Error('Type "stream" is not supported!'));
113 | });
114 |
115 | test('it returns null for non-existent keys', async () => {
116 | const { ns } = await createNamespace();
117 | await expect(await ns.get('key')).toBeNull();
118 | });
119 |
120 | test('it returns null for and removes expired keys', async () => {
121 | const { ns, storedFor } = await createNamespace({
122 | key: {
123 | value: 'value',
124 | expiration: 1000,
125 | metadata: null,
126 | },
127 | });
128 | await expect(await storedFor('key')).toBeDefined();
129 | await expect(await ns.get('key')).toBeNull();
130 | await expect(await storedFor('key')).toBeUndefined();
131 | });
132 | });
133 |
134 | describe('getWithMetadata', () => {
135 | test('it gets text by default with metadata', async () => {
136 | const { ns } = await createNamespace({
137 | key: {
138 | value: 'value',
139 | expiration: -1,
140 | metadata: { testing: true },
141 | },
142 | });
143 | expect(await ns.getWithMetadata('key')).toStrictEqual({
144 | value: nsObjTestVals[namespaceType],
145 | metadata: { testing: true },
146 | });
147 | });
148 |
149 | test('it gets text with metadata', async () => {
150 | const { ns } = await createNamespace({
151 | key: {
152 | value: 'value',
153 | expiration: -1,
154 | metadata: { testing: true },
155 | },
156 | });
157 | expect(await ns.getWithMetadata('key', 'text')).toStrictEqual({
158 | value: nsObjTestVals[namespaceType],
159 | metadata: { testing: true },
160 | });
161 | });
162 |
163 | test('it gets json with metadata', async () => {
164 | const { ns } = await createNamespace({
165 | key: {
166 | value: '{"field": "value"}',
167 | expiration: -1,
168 | metadata: { testing: true },
169 | },
170 | });
171 | expect(await ns.getWithMetadata('key', 'json')).toStrictEqual({
172 | value: { field: 'value' },
173 | metadata: { testing: true },
174 | });
175 | });
176 |
177 | test('it gets array buffers with metadata', async () => {
178 | const { ns } = await createNamespace({
179 | key: {
180 | value: '\x01\x02\x03',
181 | expiration: -1,
182 | metadata: { testing: true },
183 | },
184 | });
185 | const { value, metadata } = await ns.getWithMetadata('key', 'arrayBuffer');
186 | expect({
187 | value: new Uint8Array(value),
188 | metadata,
189 | }).toStrictEqual({
190 | value: new Uint8Array([1, 2, 3]),
191 | metadata: { testing: true },
192 | });
193 | });
194 |
195 | test('it fails to get streams with metadata', async () => {
196 | const { ns } = await createNamespace({
197 | key: {
198 | value: '\x01\x02\x03',
199 | expiration: -1,
200 | metadata: { testing: true },
201 | },
202 | });
203 | expect.assertions(1);
204 | await expect(ns.getWithMetadata('key', 'stream')).rejects.toStrictEqual(
205 | new Error('Type "stream" is not supported!')
206 | );
207 | });
208 |
209 | test('it returns null for non-existent keys with metadata', async () => {
210 | const { ns } = await createNamespace();
211 | await expect(await ns.getWithMetadata('key')).toStrictEqual({
212 | value: null,
213 | metadata: null,
214 | });
215 | });
216 |
217 | test('it returns null for and removes expired keys with metadata', async () => {
218 | const { ns, storedFor } = await createNamespace({
219 | key: {
220 | value: 'value',
221 | expiration: 1000,
222 | metadata: { testing: true },
223 | },
224 | });
225 | await expect(await storedFor('key')).toBeDefined();
226 | await expect(await ns.getWithMetadata('key')).toStrictEqual({
227 | value: null,
228 | metadata: null,
229 | });
230 | await expect(await storedFor('key')).toBeUndefined();
231 | });
232 | });
233 |
234 | describe('put', () => {
235 | test('it puts text', async () => {
236 | const { ns, storedFor } = await createNamespace();
237 | await ns.put('key', 'value');
238 | await expect(await storedFor('key')).toStrictEqual({
239 | value: nsObjTestVals[namespaceType],
240 | expiration: -1,
241 | metadata: null,
242 | });
243 | });
244 |
245 | test('it puts array buffers', async () => {
246 | const { ns, storedFor } = await createNamespace();
247 | await ns.put('key', new Uint8Array([1, 2, 3]).buffer);
248 | await expect(await storedFor('key')).toStrictEqual({
249 | value: namespaceType === 'memory' ? '\x01\x02\x03' : Buffer.from([1, 2, 3]),
250 | expiration: -1,
251 | metadata: null,
252 | });
253 | });
254 |
255 | test('it puts text with expiration', async () => {
256 | const { ns, storedFor } = await createNamespace();
257 | await ns.put('key', 'value', { expiration: 1000 });
258 | await expect(await storedFor('key')).toStrictEqual({
259 | value: nsObjTestVals[namespaceType],
260 | expiration: 1000,
261 | metadata: null,
262 | });
263 | });
264 |
265 | test('it puts text with string expiration', async () => {
266 | const { ns, storedFor } = await createNamespace();
267 | await ns.put('key', 'value', { expiration: '1000' });
268 | await expect(await storedFor('key')).toStrictEqual({
269 | value: nsObjTestVals[namespaceType],
270 | expiration: 1000,
271 | metadata: null,
272 | });
273 | });
274 |
275 | test('it puts text with expiration ttl', async () => {
276 | KVNamespace.getTimestamp = () => 1000;
277 | const { ns, storedFor } = await createNamespace();
278 | await ns.put('key', 'value', { expirationTtl: 1000 });
279 | await expect(await storedFor('key')).toStrictEqual({
280 | value: nsObjTestVals[namespaceType],
281 | expiration: 2000,
282 | metadata: null,
283 | });
284 | });
285 |
286 | test('it puts text with string expiration ttl', async () => {
287 | KVNamespace.getTimestamp = () => 1000;
288 | const { ns, storedFor } = await createNamespace();
289 | await ns.put('key', 'value', { expirationTtl: '1000' });
290 | await expect(await storedFor('key')).toStrictEqual({
291 | value: nsObjTestVals[namespaceType],
292 | expiration: 2000,
293 | metadata: null,
294 | });
295 | });
296 |
297 | test('it puts text with metadata', async () => {
298 | const { ns, storedFor } = await createNamespace();
299 | await ns.put('key', 'value', { metadata: { testing: true } });
300 | await expect(await storedFor('key')).toStrictEqual({
301 | value: nsObjTestVals[namespaceType],
302 | expiration: -1,
303 | metadata: { testing: true },
304 | });
305 | });
306 |
307 | test('it puts text with expiration and metadata', async () => {
308 | const { ns, storedFor } = await createNamespace();
309 | await ns.put('key', 'value', {
310 | expiration: 1000,
311 | metadata: { testing: true },
312 | });
313 | await expect(await storedFor('key')).toStrictEqual({
314 | value: nsObjTestVals[namespaceType],
315 | expiration: 1000,
316 | metadata: { testing: true },
317 | });
318 | });
319 |
320 | test('it puts text with expiration ttl and metadata', async () => {
321 | KVNamespace.getTimestamp = () => 1000;
322 | const { ns, storedFor } = await createNamespace();
323 | await ns.put('key', 'value', {
324 | expirationTtl: 1000,
325 | metadata: { testing: true },
326 | });
327 | await expect(await storedFor('key')).toStrictEqual({
328 | value: nsObjTestVals[namespaceType],
329 | expiration: 2000,
330 | metadata: { testing: true },
331 | });
332 | });
333 |
334 | test('it overrides existing keys', async () => {
335 | const { ns, storedFor } = await createNamespace({
336 | key: {
337 | value: 'value',
338 | expiration: -1,
339 | metadata: null,
340 | },
341 | });
342 | await ns.put('key', 'value2', {
343 | expiration: 1000,
344 | metadata: { testing: true },
345 | });
346 | await expect(await storedFor('key')).toStrictEqual({
347 | value: namespaceType === 'memory' ? 'value2' : Buffer.from('value2', 'utf-8'),
348 | expiration: 1000,
349 | metadata: { testing: true },
350 | });
351 | });
352 | });
353 |
354 | describe('delete', () => {
355 | test('it deletes existing keys', async () => {
356 | const { ns, storedFor } = await createNamespace({
357 | key: {
358 | value: 'value',
359 | expiration: -1,
360 | metadata: null,
361 | },
362 | });
363 | await ns.delete('key');
364 | await expect(await storedFor('key')).toBeUndefined();
365 | });
366 |
367 | test('it does nothing for non-existent keys', async () => {
368 | const { ns } = await createNamespace({});
369 | await ns.delete('key');
370 | });
371 | });
372 |
373 | describe('list', () => {
374 | test('it lists keys in sorted order', async () => {
375 | const { ns } = await createNamespace({
376 | key3: {
377 | value: 'value3',
378 | expiration: -1,
379 | metadata: null,
380 | },
381 | key1: {
382 | value: 'value1',
383 | expiration: -1,
384 | metadata: null,
385 | },
386 | key2: {
387 | value: 'value2',
388 | expiration: -1,
389 | metadata: null,
390 | },
391 | });
392 | expect(await ns.list()).toEqual({
393 | keys: [{ name: 'key1' }, { name: 'key2' }, { name: 'key3' }],
394 | list_complete: true,
395 | cursor: '',
396 | });
397 | });
398 |
399 | test('it lists keys matching prefix', async () => {
400 | const { ns } = await createNamespace({
401 | section1key1: {
402 | value: 'value11',
403 | expiration: -1,
404 | metadata: null,
405 | },
406 | section1key2: {
407 | value: 'value12',
408 | expiration: -1,
409 | metadata: null,
410 | },
411 | section2key1: {
412 | value: 'value21',
413 | expiration: -1,
414 | metadata: null,
415 | },
416 | });
417 | expect(await ns.list({ prefix: 'section1' })).toEqual({
418 | keys: [{ name: 'section1key1' }, { name: 'section1key2' }],
419 | list_complete: true,
420 | cursor: '',
421 | });
422 | });
423 |
424 | test('it lists keys with metadata', async () => {
425 | const { ns } = await createNamespace({
426 | key1: {
427 | value: 'value1',
428 | expiration: -1,
429 | metadata: { testing: 1 },
430 | },
431 | key2: {
432 | value: 'value2',
433 | expiration: -1,
434 | metadata: { testing: 2 },
435 | },
436 | key3: {
437 | value: 'value3',
438 | expiration: -1,
439 | metadata: { testing: 3 },
440 | },
441 | });
442 | expect(await ns.list()).toEqual({
443 | keys: [
444 | { name: 'key1', metadata: { testing: 1 } },
445 | { name: 'key2', metadata: { testing: 2 } },
446 | { name: 'key3', metadata: { testing: 3 } },
447 | ],
448 | list_complete: true,
449 | cursor: '',
450 | });
451 | });
452 |
453 | test('it lists keys with expiration', async () => {
454 | KVNamespace.getTimestamp = () => 500;
455 | const { ns } = await createNamespace({
456 | key1: {
457 | value: 'value1',
458 | expiration: 1000,
459 | metadata: null,
460 | },
461 | key2: {
462 | value: 'value2',
463 | expiration: 2000,
464 | metadata: null,
465 | },
466 | key3: {
467 | value: 'value3',
468 | expiration: 3000,
469 | metadata: null,
470 | },
471 | });
472 | expect(await ns.list()).toEqual({
473 | keys: [
474 | { name: 'key1', expiration: 1000 },
475 | { name: 'key2', expiration: 2000 },
476 | { name: 'key3', expiration: 3000 },
477 | ],
478 | list_complete: true,
479 | cursor: '',
480 | });
481 | });
482 |
483 | test('it lists keys with metadata and expiration', async () => {
484 | KVNamespace.getTimestamp = () => 500;
485 | const { ns } = await createNamespace({
486 | key1: {
487 | value: 'value1',
488 | expiration: 1000,
489 | metadata: { testing: 1 },
490 | },
491 | key2: {
492 | value: 'value2',
493 | expiration: 2000,
494 | metadata: { testing: 2 },
495 | },
496 | key3: {
497 | value: 'value3',
498 | expiration: 3000,
499 | metadata: { testing: 3 },
500 | },
501 | });
502 | expect(await ns.list()).toEqual({
503 | keys: [
504 | { name: 'key1', expiration: 1000, metadata: { testing: 1 } },
505 | { name: 'key2', expiration: 2000, metadata: { testing: 2 } },
506 | { name: 'key3', expiration: 3000, metadata: { testing: 3 } },
507 | ],
508 | list_complete: true,
509 | cursor: '',
510 | });
511 | });
512 |
513 | test('it ignores and removes expired keys', async () => {
514 | const { ns, storedFor } = await createNamespace({
515 | key1: {
516 | value: 'value1',
517 | expiration: 1000,
518 | metadata: null,
519 | },
520 | key2: {
521 | value: 'value2',
522 | expiration: 2000,
523 | metadata: null,
524 | },
525 | key3: {
526 | value: 'value3',
527 | expiration: 3000,
528 | metadata: null,
529 | },
530 | });
531 | expect(await storedFor('key1')).toBeDefined();
532 | expect(await storedFor('key2')).toBeDefined();
533 | expect(await storedFor('key3')).toBeDefined();
534 | expect(await ns.list()).toEqual({
535 | keys: [],
536 | list_complete: true,
537 | cursor: '',
538 | });
539 | expect(await storedFor('key1')).toBeUndefined();
540 | expect(await storedFor('key2')).toBeUndefined();
541 | expect(await storedFor('key3')).toBeUndefined();
542 | });
543 |
544 | test('it paginates keys', async () => {
545 | const { ns } = await createNamespace({
546 | key1: {
547 | value: 'value1',
548 | expiration: -1,
549 | metadata: null,
550 | },
551 | key2: {
552 | value: 'value2',
553 | expiration: -1,
554 | metadata: null,
555 | },
556 | key3: {
557 | value: 'value3',
558 | expiration: -1,
559 | metadata: null,
560 | },
561 | });
562 | const { keys, list_complete, cursor } = await ns.list({ limit: 2 });
563 | expect({ keys, list_complete }).toEqual({
564 | keys: [{ name: 'key1' }, { name: 'key2' }],
565 | list_complete: false,
566 | });
567 | expect(cursor).not.toBe('');
568 | expect(await ns.list({ limit: 2, cursor })).toEqual({
569 | keys: [{ name: 'key3' }],
570 | list_complete: true,
571 | cursor: '',
572 | });
573 | });
574 |
575 | test('it paginates keys matching prefix', async () => {
576 | const { ns } = await createNamespace({
577 | section1key1: {
578 | value: 'value11',
579 | expiration: -1,
580 | metadata: null,
581 | },
582 | section1key2: {
583 | value: 'value12',
584 | expiration: -1,
585 | metadata: null,
586 | },
587 | section1key3: {
588 | value: 'value13',
589 | expiration: -1,
590 | metadata: null,
591 | },
592 | section2key1: {
593 | value: 'value21',
594 | expiration: -1,
595 | metadata: null,
596 | },
597 | });
598 | const { keys, list_complete, cursor } = await ns.list({ prefix: 'section1', limit: 2 });
599 | expect({ keys, list_complete }).toEqual({
600 | keys: [{ name: 'section1key1' }, { name: 'section1key2' }],
601 | list_complete: false,
602 | });
603 | expect(cursor).not.toBe('');
604 | expect(
605 | await ns.list({
606 | prefix: 'section1',
607 | limit: 2,
608 | cursor,
609 | })
610 | ).toEqual({
611 | keys: [{ name: 'section1key3' }],
612 | list_complete: true,
613 | cursor: '',
614 | });
615 | });
616 |
617 | test('it returns an empty list with no keys', async () => {
618 | const { ns } = await createNamespace();
619 | expect(await ns.list()).toEqual({
620 | keys: [],
621 | list_complete: true,
622 | cursor: '',
623 | });
624 | });
625 |
626 | test('it returns an empty list with no matching keys', async () => {
627 | const { ns } = await createNamespace({
628 | key1: {
629 | value: 'value1',
630 | expiration: -1,
631 | metadata: null,
632 | },
633 | key2: {
634 | value: 'value2',
635 | expiration: -1,
636 | metadata: null,
637 | },
638 | key3: {
639 | value: 'value3',
640 | expiration: -1,
641 | metadata: null,
642 | },
643 | });
644 | expect(await ns.list({ prefix: 'none' })).toEqual({
645 | keys: [],
646 | list_complete: true,
647 | cursor: '',
648 | });
649 | });
650 | });
651 | });
652 | }
653 | });
654 |
--------------------------------------------------------------------------------
/app/__tests__/server_spec.js:
--------------------------------------------------------------------------------
1 | const { createApp } = require("../server");
2 | const supertest = require("supertest");
3 | const { MinioKVStore, Minio } = require("../minio-kv-store");
4 | const wrangler = require("../../lib/wrangler");
5 |
6 | describe("server", () => {
7 | it("returns the response from the worker", async () => {
8 | const app = createApp(
9 | 'addEventListener("fetch", (e) => e.respondWith(new Response("hello", {status: 201, headers: {"Some-Header": "value"}})))'
10 | );
11 |
12 | await supertest(app)
13 | .get("/some-route")
14 | .expect(201, "hello")
15 | .expect("Some-Header", "value");
16 | });
17 |
18 | it("passes url related field to the header", async () => {
19 | const app = createApp(
20 | 'addEventListener("fetch", (e) => e.respondWith(new Response(`${new URL(e.request.url).pathname}|${e.request.headers.get("Foo")}`)))'
21 | );
22 |
23 | await supertest(app)
24 | .get("/some-route")
25 | .set("Foo", "bar")
26 | .expect(200, `/some-route|bar`);
27 | });
28 |
29 | it("passes post information on", async () => {
30 | const app = createApp(
31 | 'addEventListener("fetch", (e) => e.respondWith(e.request.text().then(text => new Response(`${e.request.method}|${text}`))))'
32 | );
33 |
34 | await supertest(app)
35 | .post("/some-route")
36 | .send("foo=bar")
37 | .expect(200, `POST|foo=bar`);
38 | });
39 |
40 | it("allows you to update the worker", async () => {
41 | const app = createApp('addEventListener("fetch", (e) => e.respondWith(new Response("goodbye")))');
42 |
43 | await supertest(app)
44 | .get("/some-route")
45 | .expect(200, "goodbye");
46 | });
47 |
48 | it("passes the current ip onwards", async () => {
49 | const app = createApp(
50 | 'addEventListener("fetch", (e) => e.respondWith(new Response(e.request.headers.get("X-Forwarded-For"))))'
51 | );
52 | await supertest(app)
53 | .get("/some-route")
54 | .expect(200, "127.0.0.1");
55 | });
56 |
57 | it("creates stores and passes it to the worker", async () => {
58 | const app = createApp(
59 | 'addEventListener("fetch", (e) => e.respondWith(MYSTORE.get("key").then(v => new Response(v))))',
60 | {
61 | kvStores: ["MYSTORE"]
62 | }
63 | );
64 |
65 | await app.stores.MYSTORE.put("key", "value");
66 |
67 | await supertest(app)
68 | .get("/some-route")
69 | .expect(200, "value");
70 | });
71 |
72 | it("can load CloudFlare 'environment variables' and 'secrets' from wrangler.toml", async () => {
73 | let kvStores = ["MYSTORE"]; // Check if stores somehow clobbered
74 | // Import config from provided wrangler.toml
75 | const config = wrangler.loadConfig(__dirname + "/../../examples/wrangler.toml");
76 | wrangler.toJSON(config);
77 | const env = {...config.vars, ...config.secrets};
78 | if (Array.isArray(config['kv-namespaces'])) kvStores = kvStores.concat(config['kv-namespaces'].map(n=>n.binding));
79 | const app = createApp(
80 | 'addEventListener("fetch", (e) => e.respondWith(Promise.all([MYSTORE.get("key"), wranglerKV.get("key")]).then(([v, x]) => new Response(JSON.stringify({MYSTORE: v, wranglerKV: x, variable1, foo})))))',
81 | {
82 | kvStores,
83 | env
84 | }
85 | );
86 |
87 | await app.stores.MYSTORE.put("key", "value");
88 | await app.stores.wranglerKV.put("key", "value");
89 |
90 | await supertest(app)
91 | .get("/some-route")
92 | .expect(200, '{"MYSTORE":"value","wranglerKV":"value","variable1":"somevalue","foo":"{\\"bar\\":\\"shhh\\"}"}');
93 | });
94 |
95 | it("allows big post request", async () => {
96 | let body = "x"
97 | for (let i = 0; i < 20; i++) {
98 | body = body + body
99 | }
100 |
101 | const app = createApp(
102 | 'addEventListener("fetch", (e) => e.respondWith(e.request.text().then(text => new Response(`${e.request.method}`))))'
103 | );
104 |
105 | await supertest(app)
106 | .post("/some-route")
107 | .send(body)
108 | .expect(200, 'POST');
109 | });
110 |
111 | it("can init a minio client", async () => {
112 | const app = createApp(
113 | 'addEventListener("fetch", (e) => e.respondWith(new Response("success")))',
114 | {
115 | kvStore: ()=>new MinioKVStore(new Minio.Client({endPoint: 'localhost'})),
116 | kvStores: [] // leave this empty so the client doesn't attempt to make requests
117 | }
118 | );
119 | });
120 |
121 | it("config country overrides cf-ipcountry header ", async () => {
122 | const app = createApp(
123 | 'addEventListener("fetch", (e) => e.respondWith(new Response("hello", {status: 200, headers: {"return-country": e.request.headers.get("cf-ipcountry")}})))',
124 | {country: 'some-country'}
125 | );
126 |
127 | await supertest(app)
128 | .get("/some-route")
129 | .expect(200, "hello")
130 | .expect("return-country", "some-country");
131 | });
132 |
133 | it("set DEV as cf-ipcountry header by default", async () => {
134 | const app = createApp(
135 | 'addEventListener("fetch", (e) => e.respondWith(new Response("hello", {status: 200, headers: {"return-country": e.request.headers.get("cf-ipcountry")}})))'
136 | );
137 |
138 | await supertest(app)
139 | .get("/some-route")
140 | .expect(200, "hello")
141 | .expect("return-country", "DEV");
142 | });
143 | });
144 |
--------------------------------------------------------------------------------
/app/__tests__/test-app_spec.js:
--------------------------------------------------------------------------------
1 | const { createTestApp } = require("../test-app");
2 | const express = require("express");
3 | const supertest = require("supertest");
4 |
5 | describe("server", () => {
6 | it("returns the response from the worker", async () => {
7 | const upstreamApp = express();
8 | upstreamApp.get("/some-route", (req, res) => res.end("hello"));
9 | const app = createTestApp('addEventListener("fetch", (e) => e.respondWith(fetch(e.request)))', upstreamApp);
10 |
11 | await supertest(app)
12 | .get("/some-route")
13 | .expect(200, "hello");
14 | });
15 | });
16 |
--------------------------------------------------------------------------------
/app/__tests__/worker_spec.js:
--------------------------------------------------------------------------------
1 | const express = require("express");
2 | const { Worker } = require("../worker");
3 | const { InMemoryKVStore } = require("../in-memory-kv-store");
4 | const { Headers } = require("node-fetch");
5 |
6 | describe("Workers", () => {
7 | test("It Can Create and Execute a Listener", () => {
8 | const worker = new Worker("foo.com", 'addEventListener("add", (a, b) => a + b)');
9 | expect(worker.triggerEvent("add", 1, 2)).toBe(3);
10 | });
11 |
12 | describe("Ensuring Things are in scope", () => {
13 | test("It has self global", () => {
14 | const worker = new Worker("foo.com", `addEventListener('test', () => self)`);
15 | const self = worker.triggerEvent("test");
16 | expect(self).toBeDefined();
17 | });
18 |
19 | test("It has Node buildins like Object in scope", () => {
20 | const worker = new Worker("foo.com", 'addEventListener("test", () => Object.assign({}, {foo: "bar"}))');
21 | expect(worker.triggerEvent("test").foo).toBe("bar");
22 | });
23 |
24 | test("It has Fetch buildins like Request in scope", () => {
25 | const worker = new Worker("foo.com", 'addEventListener("test", () => new Request())');
26 | expect(worker.triggerEvent("test").method).toBe("GET");
27 | });
28 |
29 | test("It has support for WHATWG URLs", () => {
30 | const worker = new Worker(
31 | "foo.com",
32 | 'addEventListener("test", () => new URL("https://www.cloudflare.com/api?foo=bar"))'
33 | );
34 | const url = worker.triggerEvent("test");
35 | expect(url.hostname).toBe("www.cloudflare.com");
36 | expect(url.pathname).toBe("/api");
37 | expect(url.searchParams.get("foo")).toBe("bar");
38 | });
39 |
40 | test("It has support for URLSearchParams", () => {
41 | const worker = new Worker("foo.com", `addEventListener('test', () => new URLSearchParams({ foo: 'bar' }))`);
42 | const params = worker.triggerEvent("test");
43 | expect(params.has("foo")).toBe(true);
44 | expect(params.get("foo")).toBe("bar");
45 | expect(params.has("baz")).toBe(false);
46 | expect(params.get("baz")).toBe(null);
47 | });
48 |
49 | test("It has support for base64 encoding APIs", () => {
50 | const worker = new Worker(
51 | "foo.com",
52 | `addEventListener('test', () => ({ encoded: btoa('test'), decoded: atob('dGVzdA==') }))`
53 | );
54 | const { encoded, decoded } = worker.triggerEvent("test");
55 | expect(encoded).toBe("dGVzdA==");
56 | expect(decoded).toBe("test");
57 | });
58 |
59 | test("It has support for delayed promises with setTimeout", async () => {
60 | const worker = new Worker(
61 | "foo.com",
62 | `addEventListener('test', () => new Promise(resolve => setTimeout(() => resolve(42), 100)))`
63 | );
64 | const result = await worker.triggerEvent("test");
65 | expect(result).toBe(42);
66 | })
67 |
68 | test("It has support for crypto and Text encoding APIs", async () => {
69 | const worker = new Worker(
70 | "foo.com",
71 | `addEventListener('test', async () => {
72 | const password = 'test';
73 | const plainText = 'foo';
74 | const ptUtf8 = new TextEncoder().encode(plainText);
75 | const pwUtf8 = new TextEncoder().encode(password);
76 | const pwHash = await crypto.subtle.digest('SHA-256', pwUtf8);
77 | const iv = crypto.getRandomValues(new Uint8Array(12));
78 | const alg = { name: 'AES-GCM', iv: iv };
79 | const encKey = await crypto.subtle.importKey('raw', pwHash, alg, false, ['encrypt']);
80 | const encBuffer = await crypto.subtle.encrypt(alg, encKey, ptUtf8);
81 | const decKey = await crypto.subtle.importKey('raw', pwHash, alg, false, ['decrypt']);
82 | const ptBuffer = await crypto.subtle.decrypt(alg, decKey, encBuffer);
83 | const plainText2 = new TextDecoder().decode(ptBuffer);
84 | return plainText === plainText2;
85 | })`
86 | );
87 | const decrypted = await worker.triggerEvent("test");
88 | expect(decrypted).toBe(true);
89 | });
90 |
91 | test("It has support for the console API", () => {
92 | const worker = new Worker("foo.com", `addEventListener('test', () => console.log('test'))`);
93 | const spy = jest.spyOn(console, "log");
94 | worker.triggerEvent("test");
95 | expect(spy).toHaveBeenCalledWith("test");
96 | });
97 | });
98 |
99 | test("It can stub out responses", async () => {
100 | const worker = new Worker("foo.com", 'addEventListener("fetch", (e) => e.respondWith(new Response("hello")))');
101 | const response = await worker.executeFetchEvent("http://foo.com");
102 | expect(response.status).toBe(200);
103 | expect(await response.text()).toBe("hello");
104 | });
105 |
106 | describe("Cloudflare Headers", () => {
107 | it("Adds cloudflare headers", async () => {
108 | const worker = new Worker(
109 | "foo.com",
110 | 'addEventListener("fetch", (e) => e.respondWith(new Response("hello", {headers: e.request.headers})))'
111 | );
112 | const response = await worker.executeFetchEvent("http://foo.com");
113 | expect(response.headers.get("CF-Ray")).toHaveLength(16);
114 | expect(response.headers.get("CF-Visitor")).toBe('{"scheme":"http"}');
115 | expect(response.headers.get("CF-IPCountry")).toBe("DEV");
116 | expect(response.headers.get("CF-Connecting-IP")).toBe("127.0.0.1");
117 | expect(response.headers.get("X-Real-IP")).toBe("127.0.0.1");
118 |
119 | expect(response.headers.get("X-Forwarded-For")).toBe("127.0.0.1");
120 | expect(response.headers.get("X-Forwarded-Proto")).toBe("http");
121 | });
122 |
123 | it("correctly appends to X-Forwarded-*", async () => {
124 | const worker = new Worker(
125 | "foo.com",
126 | 'addEventListener("fetch", (e) => e.respondWith(new Response("hello", {headers: e.request.headers})))'
127 | );
128 | const response = await worker.executeFetchEvent("http://foo.com", {
129 | headers: new Headers({
130 | "X-Forwarded-For": "8.8.8.8",
131 | "X-Forwarded-Proto": "https"
132 | })
133 | });
134 | expect(response.headers.get("X-Forwarded-For")).toBe("8.8.8.8, 127.0.0.1");
135 | expect(response.headers.get("X-Forwarded-Proto")).toBe("https, http");
136 | });
137 | });
138 |
139 | describe("Fetch Behavior", () => {
140 | let upstreamServer;
141 | let upstreamHost;
142 |
143 | beforeAll(async function() {
144 | const upstreamApp = express();
145 | upstreamApp.get("/success", (req, res) => res.send("OK"));
146 | upstreamApp.get("/redirect", (req, res) => res.redirect(301, "https://www.google.com"));
147 | upstreamApp.get("/host", (req, res) => res.send(req.headers.host));
148 | upstreamApp.get("/cacheable", (req, res) => res.set(req.headers).send());
149 |
150 | await new Promise(resolve => {
151 | upstreamServer = upstreamApp.listen(resolve);
152 | });
153 |
154 | upstreamHost = `127.0.0.1:${upstreamServer.address().port}`;
155 | });
156 |
157 | test("It Fetches Correctly", async () => {
158 | const worker = new Worker(upstreamHost, "", { upstreamHost: upstreamHost });
159 | const response = await worker.executeFetchEvent(`http://${upstreamHost}/success`);
160 | expect(response.status).toBe(200);
161 | expect(await response.text()).toBe("OK");
162 | });
163 |
164 | test("It does not follow redirects", async () => {
165 | const worker = new Worker(upstreamHost, "", { upstreamHost: upstreamHost });
166 | const response = await worker.executeFetchEvent(`http://${upstreamHost}/redirect`);
167 | expect(response.status).toBe(301);
168 | expect(response.headers.get("Location")).toBe("https://www.google.com/");
169 | });
170 |
171 | test("The worker forwards the request upstream", async () => {
172 | const worker = new Worker("foo.com", "", { upstreamHost: upstreamHost });
173 | const response = await worker.executeFetchEvent(`http://foo.com/success`);
174 | expect(response.status).toBe(200);
175 | expect(await response.text()).toBe("OK");
176 | });
177 |
178 | test("The worker does not keeps the host the same", async () => {
179 | const worker = new Worker("foo.com", "", { upstreamHost: upstreamHost });
180 | const response = await worker.executeFetchEvent(`http://foo.com/host`);
181 | expect(response.status).toBe(200);
182 | expect(await response.text()).toBe("foo.com");
183 | });
184 |
185 | test("It does not forward to the upstream host if the hostname is not the same", async () => {
186 | const worker = new Worker(
187 | "foo.com",
188 | `addEventListener("fetch", (e) => e.respondWith(fetch("http://${upstreamHost}/host")))`,
189 | { upstreamHost: null }
190 | );
191 | const response = await worker.executeFetchEvent(`http://foo.com/host`);
192 | expect(response.status).toBe(200);
193 | expect(await response.text()).toBe(upstreamHost);
194 | });
195 |
196 | it("It forwards cache control headers", async () => {
197 | const worker = new Worker(
198 | "foo.com",
199 | `addEventListener("fetch", (e) => e.respondWith(fetch("http://${upstreamHost}/cacheable", { cf: {
200 | cacheKey: "foo",
201 | cacheEverything: true,
202 | cacheTtl: 300
203 | } })))`
204 | );
205 | const response = await worker.executeFetchEvent("http://foo.com/cacheable");
206 | expect(await response.headers.get("cf-cache-key")).toBe("foo");
207 | expect(await response.headers.get("cf-cache-everything")).toBe("true");
208 | expect(await response.headers.get("cf-cache-ttl")).toBe("300");
209 | });
210 |
211 | test("It can save things into the KV store", async () => {
212 | const kvStoreFactory = new InMemoryKVStore();
213 | const worker = new Worker(
214 | "foo.com",
215 | `addEventListener("fetch", (e) => {e.respondWith(new Response("foo")); e.waitUntil(MYSTORE.put("foo", "bar"))})`,
216 | { kvStores: { MYSTORE: kvStoreFactory.getClient("MYSTORE") } }
217 | );
218 |
219 | const response = await worker.executeFetchEvent(`http://foo.com/blah`);
220 | expect(response.status).toBe(200);
221 | expect(await response.text()).toBe("foo");
222 | expect(await kvStoreFactory.getClient("MYSTORE").get("foo")).toBe("bar");
223 | await kvStoreFactory.getClient("MYSTORE").delete("foo");
224 | expect(await kvStoreFactory.getClient("MYSTORE").get("foo")).toBe(null);
225 | });
226 |
227 | test("It can access CloudFlare 'environment variables' and 'secrets' ", async () => {
228 | const worker = new Worker(
229 | "foo.com",
230 | `addEventListener('test', () => ({ variable1, foo }))`,
231 | {env: {variable1: "somevalue", foo: '{"bar": "shhh"}'}}
232 | );
233 | const { variable1, foo } = worker.triggerEvent("test");
234 | expect(variable1).toBe("somevalue");
235 | expect(foo).toBe('{"bar": "shhh"}');
236 | });
237 |
238 | test("It fetches directly from origin is passThroughOnException() is called", async () => {
239 | const worker = new Worker(
240 | upstreamHost,
241 | `
242 | async function handleRequest(event) {
243 | throw "An exception from worker!";
244 | }
245 | addEventListener("fetch", (e) => {
246 | e.passThroughOnException();
247 | e.respondWith(handleRequest(e));
248 | });
249 | `,
250 | { upstreamHost: upstreamHost }
251 | );
252 | const response = await worker.executeFetchEvent(`http://${upstreamHost}/success`);
253 | expect(response.status).toBe(200);
254 | });
255 |
256 | test("It does not quietly eat runtion exceptions", async () => {
257 | const worker = new Worker(
258 | upstreamHost,
259 | `addEventListener("fetch", (e) => {throw "An exception from worker!"})`,
260 | { upstreamHost: upstreamHost }
261 | );
262 | try {
263 | const response = await worker.executeFetchEvent(`http://${upstreamHost}/success`);
264 | } catch(ex) {
265 | expect(ex).toBe("An exception from worker!");
266 | }
267 | });
268 |
269 | afterAll(async function() {
270 | upstreamServer.close();
271 | });
272 | });
273 | });
274 |
--------------------------------------------------------------------------------
/app/caches.js:
--------------------------------------------------------------------------------
1 | // Caches API stubs: see https://developers.cloudflare.com/workers/runtime-apis/cache
2 | // (required for Workers Sites to work)
3 | const caches = {
4 | default: {
5 | put(request, response) {
6 | // undefined indicates the response for the request was "successfully" cached
7 | return Promise.resolve(undefined);
8 | },
9 | match(request, options) {
10 | // undefined indicates that a cached response for the request couldn't be found
11 | return Promise.resolve(undefined);
12 | },
13 | delete(request, options) {
14 | // false indicates that a cached response for the request couldn't be found to delete
15 | return Promise.resolve(false);
16 | }
17 | }
18 | };
19 |
20 | module.exports = { caches };
--------------------------------------------------------------------------------
/app/file-kv-store.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 | const path = require('path');
3 | const { promisify } = require('util');
4 | const mkdirp = require('mkdirp');
5 | const { KVNamespace, allLister } = require('./kv-namespace');
6 |
7 | /** Reads a file's contents, returning null if the file couldn't be found */
8 | async function readFile(filePath) {
9 | return new Promise((resolve, reject) => {
10 | fs.readFile(filePath, (err, data) => {
11 | if (err) {
12 | if (err.code === 'ENOENT') {
13 | resolve(null); // File not found
14 | } else {
15 | reject(err);
16 | }
17 | } else {
18 | resolve(data);
19 | }
20 | });
21 | });
22 | }
23 |
24 | /** Writes data to a file */
25 | async function writeFile(filePath, data) {
26 | await mkdirp(path.dirname(filePath));
27 | return new Promise((resolve, reject) => {
28 | fs.writeFile(filePath, data, 'utf8', (err) => {
29 | if (err) {
30 | reject(err);
31 | } else {
32 | resolve();
33 | }
34 | });
35 | });
36 | }
37 |
38 | /** Deletes a file, returning true if successful, or false if the file couldn't be found */
39 | async function deleteFile(filePath) {
40 | return new Promise((resolve, reject) => {
41 | fs.unlink(filePath, (err) => {
42 | if (err) {
43 | if (err.code === 'ENOENT') {
44 | resolve(false); // File not found
45 | } else {
46 | reject(err);
47 | }
48 | } else {
49 | resolve(true);
50 | }
51 | });
52 | });
53 | }
54 |
55 | /** Gets a list of all files in a directory, returning an empty array if the directory couldn't be found */
56 | async function readDir(filePath) {
57 | return new Promise((resolve, reject) => {
58 | fs.readdir(filePath,(err, files) => {
59 | if (err) {
60 | if (err.code === 'ENOENT') {
61 | resolve([]); // Directory not found
62 | } else {
63 | reject(err);
64 | }
65 | } else {
66 | resolve(files);
67 | }
68 | });
69 | });
70 | }
71 |
72 | const stat = promisify(fs.stat);
73 |
74 | /** Recursively traverses a directory and all its sub-directories, returning a list of relative file paths */
75 | async function walkDir(rootPath) {
76 | const files = [];
77 | const fileNames = await readDir(rootPath);
78 | for (const fileName of fileNames) {
79 | const filePath = path.join(rootPath, fileName);
80 | const fileStat = await stat(filePath);
81 | if (fileStat.isDirectory()) {
82 | // Recurse into this subdirectory, adding all it's paths
83 | files.push(...(await walkDir(filePath)));
84 | } else {
85 | files.push(filePath);
86 | }
87 | }
88 | return files;
89 | }
90 |
91 | /** Suffix to add to key file names for the metadata file containing metadata and expiration information */
92 | const metaSuffix = '.meta.json';
93 |
94 | class FileKVStore {
95 | constructor(root = ".") {
96 | this.root = root;
97 | }
98 |
99 | static async getter(filePath) {
100 | // Try to get file data
101 | const value = await readFile(filePath);
102 | // If it didn't exist, return null
103 | if (value === null) return null;
104 |
105 | // Try to get file metadata
106 | const metadataValue = await readFile(filePath + metaSuffix);
107 | if (metadataValue === null) {
108 | // If it didn't exist, assume no expiration and metadata
109 | return { value, expiration: -1, metadata: null };
110 | } else {
111 | // Otherwise, if it did, JSON parse it and use it
112 | const { expiration, metadata } = JSON.parse(metadataValue);
113 | return { value, expiration, metadata };
114 | }
115 | }
116 |
117 | static async putter(filePath, { value, expiration, metadata }) {
118 | // Write file value
119 | await writeFile(filePath, value);
120 |
121 | const metaPath = filePath + metaSuffix;
122 | if (expiration !== -1 || metadata !== null) {
123 | // Write file metadata (if there is any)
124 | await writeFile(metaPath, JSON.stringify({ expiration, metadata }));
125 | } else {
126 | // Otherwise, delete any metadata from old writes
127 | await deleteFile(metaPath);
128 | }
129 | }
130 |
131 | static async remover(filePath) {
132 | // Delete file and any metadata
133 | await deleteFile(filePath);
134 | await deleteFile(filePath + metaSuffix);
135 | }
136 |
137 | static async lister(root, prefix, limit, startAfter) {
138 | const filePaths = await walkDir(root);
139 | const files = [];
140 | for (const filePath of filePaths) {
141 | // Ignore meta files
142 | if (filePath.endsWith(metaSuffix)) continue;
143 | // Get key name by removing root directory + path separator
144 | const name = filePath.substring(root.length + 1);
145 | // Try to get file metadata
146 | const metadataValue = await readFile(path.join(root, name + metaSuffix));
147 | if (metadataValue === null) {
148 | // If it didn't exist, assume no expiration and metadata
149 | files.push([name, { expiration: -1, metadata: null }]);
150 | } else {
151 | // Otherwise, if it did, JSON parse it and use it
152 | const { expiration, metadata } = JSON.parse(metadataValue);
153 | files.push([name, { expiration, metadata }]);
154 | }
155 | }
156 | return allLister(files, prefix, limit, startAfter);
157 | }
158 |
159 | getClient(namespace) {
160 | return new KVNamespace({
161 | getter: async (key) => FileKVStore.getter(path.join(this.root, namespace, key)),
162 | putter: async (key, data) => FileKVStore.putter(path.join(this.root, namespace, key), data),
163 | remover: async (key) => FileKVStore.remover(path.join(this.root, namespace, key)),
164 | lister: async (prefix, limit, startAfter) => FileKVStore.lister(path.join(this.root, namespace), prefix, limit, startAfter),
165 | });
166 | }
167 | }
168 |
169 | module.exports = { FileKVStore };
170 |
--------------------------------------------------------------------------------
/app/in-memory-kv-store.js:
--------------------------------------------------------------------------------
1 | const { KVNamespace, allLister } = require('./kv-namespace');
2 |
3 | class InMemoryKVStore {
4 | constructor() {
5 | this.values = {};
6 | }
7 |
8 | getClient(namespace) {
9 | this.values[namespace] = this.values[namespace] || {};
10 | return new KVNamespace({
11 | getter: async (key) => this.values[namespace][key] || null,
12 | putter: async (key, value) => (this.values[namespace][key] = value),
13 | remover: async (key) => delete this.values[namespace][key],
14 | lister: async (prefix, limit, startAfter) =>
15 | allLister(Object.entries(this.values[namespace]), prefix, limit, startAfter),
16 | });
17 | }
18 | }
19 |
20 | module.exports = { InMemoryKVStore };
21 |
--------------------------------------------------------------------------------
/app/kv-namespace.js:
--------------------------------------------------------------------------------
1 | const { TextDecoder, TextEncoder } = require('util');
2 |
3 | /**
4 | * @typedef {Object} KVValue
5 | * @property {string} value
6 | * @property {number} expiration
7 | * @property {(* | null)} metadata
8 | */
9 |
10 | /**
11 | * @typedef {Object} KVNamespaceOptions
12 | * @property {function(key: string): Promise} getter
13 | * @property {function(key: string, value: KVValue): Promise} putter
14 | * @property {function(key: string): Promise} remover
15 | * @property {function(prefix: string, limit: number, startAfter: string): Promise<{keys: (string | KVValue)[][], next: string}>} lister
16 | */
17 |
18 | class KVNamespace {
19 | /**
20 | * @returns {number} seconds since the UNIX epoch
21 | */
22 | static getTimestamp() {
23 | return Math.round(Date.now() / 1000);
24 | }
25 |
26 | /**
27 | * @param {(string | number | undefined)} value
28 | * @returns {number} value as an integer, or -1 if it isn't one
29 | * @private
30 | */
31 | static _normaliseInteger(value) {
32 | if (typeof value === 'number') {
33 | return Math.round(value);
34 | } else if (typeof value === 'string') {
35 | const parsed = parseInt(value);
36 | return isNaN(parsed) ? -1 : parsed;
37 | } else {
38 | return -1;
39 | }
40 | }
41 |
42 | /**
43 | * @param {KVNamespaceOptions} options
44 | */
45 | constructor(options) {
46 | const { getter, putter, remover, lister } = options;
47 | this.getter = getter;
48 | this.putter = putter;
49 | this.remover = remover;
50 | this.lister = lister;
51 | }
52 |
53 | /**
54 | * @param {string} key
55 | * @param {("text" | "json" | "arrayBuffer" | "stream")} [type]
56 | * @returns {Promise<* | null>}
57 | */
58 | async get(key, type) {
59 | return (await this.getWithMetadata(key, type)).value;
60 | }
61 |
62 | // TODO: support "stream" type
63 | /**
64 | * @param {string} key
65 | * @param {("text" | "json" | "arrayBuffer" | "stream")} [type]
66 | * @returns {Promise<{value: (* | null), metadata: (* | null)}>}
67 | */
68 | async getWithMetadata(key, type) {
69 | // Get value (with metadata/expiration), if we couldn't find anything, return null
70 | const fullValue = await this.getter(key);
71 | if (fullValue === null) {
72 | return { value: null, metadata: null };
73 | }
74 | // Extract out value, expiration and metadata
75 | const { value, expiration, metadata } = fullValue;
76 |
77 | // Check expiration, and delete key if expired
78 | if (expiration !== -1 && expiration < KVNamespace.getTimestamp()) {
79 | await this.delete(key);
80 | return { value: null, metadata: null };
81 | }
82 |
83 | // Get correctly typed value, defaulting to text
84 | let typedValue = value;
85 | if (type === 'json') {
86 | typedValue = JSON.parse(value);
87 | } else if (type === 'arrayBuffer') {
88 | const buffer = typeof value === 'string' ? new TextEncoder().encode(value) : value;
89 | // The API expects an ArrayBuffer to be returned, but Workers Sites expects there to be a length property (equal
90 | // to the number of bytes in the buffer) which doesn't exist by default on ArrayBuffers. So we add a read-only
91 | // length property equal to the byteLength.
92 | Object.defineProperty(buffer, 'length', {
93 | value: buffer.byteLength,
94 | writable: false,
95 | });
96 | typedValue = buffer;
97 | } else if (type === 'stream') {
98 | throw new Error('Type "stream" is not supported!');
99 | }
100 |
101 | return { value: typedValue, metadata };
102 | }
103 |
104 | // TODO: support FormData and ReadableStream's as values
105 | /**
106 | * @param {string} key
107 | * @param {(string | ArrayBuffer)} value
108 | * @param {{expiration: (string | number | undefined), expirationTtl: (string | number | undefined), metadata: (* | undefined)}} [options]
109 | * @returns {Promise}
110 | */
111 | async put(key, value, options) {
112 | options = options || {};
113 |
114 | // Convert value to string if it isn't already
115 | if (value instanceof ArrayBuffer) {
116 | value = new TextDecoder().decode(value);
117 | }
118 |
119 | // Normalise expiration
120 | let expiration = KVNamespace._normaliseInteger(options.expiration);
121 | const expirationTtl = KVNamespace._normaliseInteger(options.expirationTtl);
122 | if (expirationTtl !== -1) {
123 | expiration = KVNamespace.getTimestamp() + expirationTtl;
124 | }
125 |
126 | // Normalise metadata
127 | const metadata = options.metadata === undefined ? null : options.metadata;
128 |
129 | // Store value, expiration and metadata
130 | await this.putter(key, { value, expiration, metadata });
131 | }
132 |
133 | /**
134 | * @param {string} key
135 | * @returns {Promise}
136 | */
137 | async delete(key) {
138 | return this.remover(key);
139 | }
140 |
141 | /**
142 | * @param {{prefix: (string | undefined), limit: (number | undefined), cursor: (string | undefined)}} [options]
143 | * @returns {Promise<{keys: { name: string, expiration: (number | undefined), metadata: (* | undefined) }[], list_complete: boolean, cursor: string}>}
144 | */
145 | async list(options) {
146 | // Get options
147 | options = options || {};
148 | const prefix = options.prefix || '';
149 | const limit = options.limit === undefined ? 1000 : options.limit;
150 | if (limit <= 0) {
151 | throw new Error('Invalid limit: must be > 0');
152 | }
153 | const startAfter = options.cursor ? Buffer.from(options.cursor, 'base64').toString('utf8') : '';
154 |
155 | // Get all keys
156 | const { keys, next } = await this.lister(prefix, limit, startAfter);
157 |
158 | // Get keys matching prefix
159 | const timestamp = KVNamespace.getTimestamp();
160 | const expiredKeys = [];
161 | const filteredKeys = keys
162 | .map(([name, fullValue]) => {
163 | // Extract out value, expiration and metadata
164 | const { expiration, metadata } = fullValue;
165 | return {
166 | name,
167 | expiration: expiration === -1 ? undefined : expiration,
168 | metadata: metadata == null ? undefined : metadata,
169 | };
170 | })
171 | .filter(({ name, expiration }) => {
172 | // Check timestamp
173 | if (expiration !== undefined && expiration < timestamp) {
174 | expiredKeys.push(name);
175 | return false;
176 | }
177 | return true;
178 | });
179 |
180 | // Delete expired keys
181 | for (const expiredKey of expiredKeys) {
182 | await this.delete(expiredKey);
183 | }
184 |
185 | // Convert next to something that looks more cursor-like
186 | const cursor = next === '' ? '' : Buffer.from(next, 'utf8').toString('base64');
187 |
188 | return { keys: filteredKeys, list_complete: next === '', cursor };
189 | }
190 | }
191 |
192 | /**
193 | * Helper for building a lister where we have all the data in the form of an array of [key, { expiration, metadata }]'s.
194 | * Used by InMemoryKVStore and FileKVStore.
195 | */
196 | function allLister(all, prefix, limit, startAfter) {
197 | // Get all matching keys, sorted
198 | all = all.filter(([key]) => key.startsWith(prefix)).sort(([keyA], [keyB]) => keyA.localeCompare(keyB));
199 |
200 | // Find the correct part of the sorted array to return
201 | let startIndex = 0,
202 | endIndex = all.length;
203 | if (startAfter !== '') {
204 | startIndex = all.findIndex(([key]) => key === startAfter);
205 | // If we couldn't find where to start, return nothing
206 | if (startIndex === -1) return { keys: [], next: '' };
207 | // Since we want to start AFTER this index, add 1 to it
208 | startIndex++;
209 | }
210 | endIndex = startIndex + limit;
211 |
212 | // Return the keys and the next key if there is one
213 | return {
214 | keys: all.slice(startIndex, endIndex),
215 | next: endIndex < all.length ? all[endIndex - 1][0] : '',
216 | };
217 | }
218 |
219 | module.exports = { KVNamespace, allLister };
220 |
--------------------------------------------------------------------------------
/app/minio-kv-store.js:
--------------------------------------------------------------------------------
1 | const Minio = require('minio');
2 | const { KVNamespace } = require('./kv-namespace');
3 |
4 | class MinioKVStore {
5 | constructor(client) {
6 | if (!client instanceof Minio.Client) throw new Error('client must be configured instance of Minio.Client');
7 | this.client = client;
8 | }
9 |
10 | getClient(namespace) {
11 | // Make sure the namespace is a valid bucket name
12 | namespace = namespace.trim().toLowerCase().replace(/_/g, '-').toString();
13 | const bucketPromise = (async () => {
14 | if (!(await this.client.bucketExists(namespace))) await this.client.makeBucket(namespace);
15 | })();
16 |
17 | return new KVNamespace({
18 | getter: async (key) => {
19 | await bucketPromise;
20 | return new Promise(async (resolve, reject) => {
21 | try {
22 | // Get expiration and metadata
23 | const stat = await this.client.statObject(namespace, key);
24 | let expiration = parseInt(stat.metaData.expiration);
25 | if (isNaN(expiration)) expiration = -1;
26 | const metadata = JSON.parse(stat.metaData.metadata);
27 |
28 | // Get value
29 | let value = '';
30 | const stream = await this.client.getObject(namespace, key);
31 | stream.on('data', (chunk) => (value += chunk.toString('utf8')));
32 | stream.on('error', reject);
33 | stream.on('end', () => resolve({ value, expiration, metadata }));
34 | } catch (e) {
35 | if (e.code === 'NotFound' || e.code === 'NoSuchKey') {
36 | resolve(null);
37 | } else {
38 | reject(e);
39 | }
40 | }
41 | });
42 | },
43 | putter: async (key, { value, expiration, metadata }) => {
44 | await bucketPromise;
45 | return this.client.putObject(namespace, key, value, undefined, {
46 | // Minio metadata values need to be strings
47 | expiration: expiration.toString(),
48 | metadata: JSON.stringify(metadata),
49 | });
50 | },
51 | remover: async (key) => {
52 | await bucketPromise;
53 | return this.client.removeObject(namespace, key);
54 | },
55 | lister: async (prefix, limit, start) => {
56 | await bucketPromise;
57 | return new Promise(async (resolve, reject) => {
58 | // The `true` here enables recursive mode, ensuring keys containing '/' are returned
59 | const stream = await this.client.extensions.listObjectsV2WithMetadata(namespace, prefix, true, start);
60 | const objects = [];
61 | let next = '';
62 | stream.on('data', (object) => {
63 | if (objects.length >= limit) {
64 | // If this pushes us over the limit, set next and stop reading more objects
65 | if (next === '') {
66 | next = objects[objects.length - 1][0];
67 | stream.destroy();
68 | }
69 | return;
70 | }
71 |
72 | // Default metadata
73 | const value = { expiration: -1, metadata: null };
74 | if (object.metadata) {
75 | // listObjectsV2WithMetadata returns metadata in HTTP header format.
76 | // Custom headers are prefixed with "X-Amz-Meta-".
77 | // Each header key maps to an array, which will always be length 1 for us.
78 | if (object.metadata['X-Amz-Meta-Expiration']) {
79 | const expiration = parseInt(object.metadata['X-Amz-Meta-Expiration'][0]);
80 | if (!isNaN(expiration)) value.expiration = expiration;
81 | }
82 | if (object.metadata['X-Amz-Meta-Metadata']) {
83 | value.metadata = JSON.parse(object.metadata['X-Amz-Meta-Metadata'][0]);
84 | }
85 | }
86 | // Add object in the form [key, { expiration, metadata }]
87 | objects.push([object.name, value]);
88 | });
89 | stream.on('error', reject);
90 | // Once all objects have been processed, resolve with the array
91 | stream.on('end', () => resolve({ keys: objects, next }));
92 | });
93 | },
94 | });
95 | }
96 | }
97 |
98 | function getEnvOpts(env) {
99 | const opts = {
100 | endPoint: env.MINIO_ENDPOINT,
101 | port: env.MINIO_PORT || 9000,
102 | useSSL: (env.MINIO_USE_SSL !== 'false'),
103 | accessKey: env.MINIO_ACCESS_KEY,
104 | secretKey: env.MINIO_SECRET_KEY
105 | };
106 | if (env.MINIO_REGION) opts.region = env.MINIO_REGION;
107 | if (env.MINIO_TRANSPORT) opts.transport = env.MINIO_TRANSPORT;
108 | if (env.MINIO_SESSIONTOKEN) opts.sessionToken = env.MINIO_SESSIONTOKEN;
109 | if (env.MINIO_PARTSIZE) opts.partSize = env.MINIO_PARTSIZE;
110 |
111 | for (const [k, v] of Object.entries(opts)) {
112 | if (v === undefined) throw new Error(`Minio argument: ${k} missing`);
113 | }
114 | return opts;
115 | }
116 |
117 | module.exports = { MinioKVStore, Minio, getEnvOpts };
118 |
--------------------------------------------------------------------------------
/app/server.js:
--------------------------------------------------------------------------------
1 | const express = require("express");
2 | const bodyParser = require("body-parser");
3 | const { InMemoryKVStore } = require("./in-memory-kv-store");
4 | const { Worker } = require("./worker");
5 | const { pipeline } = require('stream');
6 | const { promisify } = require('util');
7 | const streamPipeline = promisify(pipeline);
8 |
9 | async function callWorker(worker, req, res, opts) {
10 | const url = req.protocol + "://" + req.get("host") + req.originalUrl;
11 |
12 | const response = await worker.executeFetchEvent(url, {
13 | headers: req.headers,
14 | method: req.method,
15 | body: ["GET", "HEAD"].includes(req.method) ? undefined : req.body,
16 | ip: req.connection.remoteAddress.split(":").pop(),
17 | country: opts.country
18 | });
19 |
20 | res.status(response.status);
21 | for (var pair of response.headers) {
22 | res.set(pair[0], pair[1]);
23 | }
24 |
25 | //if body is a stream then stream it otherwise just return
26 | if (typeof response.body.on === 'function') {
27 | return streamPipeline(response.body, res);
28 | } else {
29 | const data = await response.arrayBuffer();
30 | res.end(Buffer.from(data), "binary");
31 | }
32 | }
33 |
34 | function buildKVStores(kvStoreFactory, kvStores) {
35 | return kvStores.reduce((acc, name) => {
36 | acc[name] = kvStoreFactory.getClient(name);
37 | return acc;
38 | }, {});
39 | }
40 |
41 | function createApp(workerContent, opts = {}) {
42 | let workersByOrigin = {};
43 | let kvStores;
44 | if (opts.kvStore) kvStores = buildKVStores(opts.kvStore(), opts.kvStores || []);
45 | else kvStores = buildKVStores(new InMemoryKVStore(), opts.kvStores || []);
46 | const app = express();
47 | app.use(bodyParser.raw({ type: "*/*", limit: "100GB" }));
48 | app.use(async (req, res) => {
49 | try {
50 | const origin = req.headers.host;
51 | workersByOrigin[origin] = workersByOrigin[origin] || new Worker(origin, workerContent, { ...opts, kvStores });
52 | const worker = workersByOrigin[origin];
53 | await callWorker(worker, req, res, {country: opts.country});
54 | } catch (e) {
55 | console.warn(e);
56 | res.status(520);
57 | res.end("Something Went Wrong!");
58 | }
59 | });
60 | app.updateWorker = contents => {
61 | workerContent = contents;
62 | workersByOrigin = {};
63 | };
64 | app.updateOpts = newOpts => {
65 | opts = Object.assign({}, opts, newOpts);
66 | workersByOrigin = {};
67 | };
68 | app.stores = kvStores;
69 |
70 | return app;
71 | }
72 |
73 | module.exports = { createApp };
74 |
--------------------------------------------------------------------------------
/app/test-app.js:
--------------------------------------------------------------------------------
1 | const { createApp } = require("./server");
2 | const http = require("http");
3 |
4 | function createTestApp(workerContent, upstreamApp, opts = {}) {
5 | const app = createApp(workerContent, opts);
6 | const server = http.createServer(app);
7 | const upstreamServer = http.createServer(upstreamApp);
8 |
9 | server.listen = function() {
10 | upstreamServer.listen(0);
11 | app.updateOpts({ upstreamHost: `127.0.0.1:${upstreamServer.address().port}` });
12 | return http.Server.prototype.listen.apply(this, arguments);
13 | };
14 |
15 | server.close = function() {
16 | upstreamServer.close();
17 | return http.Server.prototype.close.apply(this, arguments);
18 | };
19 |
20 | server.stores = app.stores;
21 |
22 | return server;
23 | }
24 |
25 | module.exports = { createTestApp };
26 |
--------------------------------------------------------------------------------
/app/worker.js:
--------------------------------------------------------------------------------
1 | const { createContext, Script } = require("vm");
2 | const { Request, Response, Headers } = require("node-fetch");
3 | const { URL } = require("url");
4 | const fetch = require("node-fetch");
5 | const atob = require("atob");
6 | const btoa = require("btoa");
7 | const crypto = require("crypto").webcrypto;
8 | const { TextDecoder, TextEncoder } = require("util");
9 | const { caches } = require("./caches");
10 |
11 | function chomp(str) {
12 | return str.substr(0, str.length - 1);
13 | }
14 |
15 | function buildRequest(url, opts) {
16 | const { country = "DEV", ip = "127.0.0.1", ray = `${Math.floor(1000000000000000 + Math.random() * 9000000000000000)}`, ...requestOpts } = opts;
17 | const request = new Request(url, { redirect: "manual", ...requestOpts });
18 | const headers = request.headers;
19 | const parsedURL = new URL(request.url);
20 |
21 | // CF Specific Headers
22 | headers.set("CF-Ray", ray);
23 | headers.set("CF-Visitor", JSON.stringify({ scheme: chomp(parsedURL.protocol) }));
24 | headers.set("CF-IPCountry", country);
25 | headers.set("CF-Connecting-IP", ip);
26 | headers.set("X-Real-IP", ip);
27 |
28 | // General Proxy Headers
29 | headers.append("X-Forwarded-For", ip);
30 | headers.append("X-Forwarded-Proto", chomp(parsedURL.protocol));
31 |
32 | return new Request(request, { headers });
33 | }
34 |
35 | class FetchEvent {
36 | constructor(request) {
37 | this.responsePromise = null;
38 | this.waitEvents = [];
39 | this.type = "fetch";
40 | this.request = request;
41 | this.exceptionHandler = false;
42 | }
43 |
44 | waitUntil(e) {
45 | this.waitEvents.push(e);
46 | }
47 |
48 | respondWith(e) {
49 | this.responsePromise = e;
50 | }
51 |
52 | passThroughOnException() {
53 | this.exceptionHandler = this.__originResponse;
54 | }
55 |
56 | async __response() {
57 | const [response, ...others] = await Promise.all([this.responsePromise].concat(this.waitEvents));
58 | return response;
59 | }
60 |
61 | async __originResponse() {
62 | const response = await fetch(this.request);
63 | return response;
64 | };
65 | }
66 |
67 | class Worker {
68 | constructor(origin, workerContents, opts = {}) {
69 | const { upstreamHost, kvStores = {}, env = {} } = opts;
70 | this.listeners = {
71 | fetch: e => e.respondWith(this.fetchUpstream(e.request))
72 | };
73 | this.upstreamHost = upstreamHost;
74 | this.origin = origin;
75 |
76 | this.evaluateWorkerContents(workerContents, kvStores, env);
77 | }
78 |
79 | evaluateWorkerContents(workerContents, kvStores, env) {
80 | const context = {
81 | // From fetch
82 | Request,
83 | Response,
84 | Headers,
85 |
86 | // URL Standards
87 | URL,
88 | URLSearchParams,
89 |
90 | // bas64
91 | atob,
92 | btoa,
93 |
94 | // Crypto
95 | crypto,
96 | TextDecoder,
97 | TextEncoder,
98 |
99 | // Debugging
100 | console,
101 |
102 | // Async
103 | setTimeout,
104 | setInterval,
105 | clearTimeout,
106 | clearInterval,
107 |
108 | // Cache stubs
109 | caches
110 | };
111 | const script = new Script(workerContents);
112 | script.runInContext(
113 | createContext(
114 | Object.assign(context, kvStores, env, {
115 | fetch: this.fetchUpstream.bind(this),
116 | addEventListener: this.addEventListener.bind(this),
117 | triggerEvent: this.triggerEvent.bind(this),
118 | self: context
119 | })
120 | )
121 | );
122 | }
123 |
124 | fetchUpstream(urlOrRequest, init) {
125 | let request = urlOrRequest instanceof Request ? urlOrRequest : new Request(urlOrRequest, init);
126 |
127 | const url = new URL(request.url);
128 | const originalHost = url.host;
129 |
130 | if (originalHost === this.origin) {
131 | url.host = this.upstreamHost;
132 | request = new Request(url, request);
133 | request.headers.set("Host", originalHost);
134 | }
135 |
136 | if (init && init.cf) {
137 | for (var key in init.cf) {
138 | var val = init.cf[key];
139 | key = key.split(/(?=[A-Z])/).join('-').toUpperCase();
140 | request.headers.set(`CF-${key}`, val);
141 | }
142 | }
143 |
144 | return fetch(request);
145 | }
146 |
147 | async executeFetchEvent(url, opts = {}) {
148 | const fetchEvent = new FetchEvent(buildRequest(url, opts));
149 | try {
150 | this.triggerEvent("fetch", fetchEvent);
151 | return await fetchEvent.__response();
152 | } catch (ex) {
153 | if (fetchEvent && fetchEvent.exceptionHandler && fetchEvent.exceptionHandler instanceof Function) {
154 | return fetchEvent.exceptionHandler();
155 | } else {
156 | throw ex;
157 | }
158 | }
159 | }
160 |
161 | addEventListener(event, listener) {
162 | this.listeners[event] = listener;
163 | }
164 |
165 | triggerEvent(event) {
166 | return this.listeners[event].apply(this, Array.from(arguments).slice(1));
167 | }
168 | }
169 |
170 | module.exports = { Worker };
171 |
--------------------------------------------------------------------------------
/azure-pipelines-template.yml:
--------------------------------------------------------------------------------
1 | jobs:
2 | - job: ${{ parameters.name }}
3 | pool:
4 | vmImage: ${{ parameters.vmImage }}
5 | strategy:
6 | matrix:
7 | node_16_x:
8 | node_version: 16.x
9 | maxParallel: 3
10 | steps:
11 | - task: NodeTool@0
12 | inputs:
13 | versionSpec: $(node_version)
14 | displayName: 'Install Node.js'
15 | - task: UsePythonVersion@0
16 | inputs:
17 | versionSpec: '2.7.x'
18 | addToPath: true
19 |
20 | - script: npm install
21 | displayName: 'Install dependencies'
22 | - script: npm run test-ci
23 | displayName: 'Run tests'
24 | - task: PublishTestResults@2
25 | displayName: 'Publish Test Results'
26 | inputs:
27 | testResultsFiles: 'junit.xml'
28 | testRunTitle: TestRun ${{ parameters.name }} $(node_version)
29 | - task: PublishCodeCoverageResults@1
30 | displayName: 'Publish code coverage results'
31 | inputs:
32 | codeCoverageTool: 'cobertura'
33 | summaryFileLocation: $(Build.Repository.LocalPath)/coverage/cobertura-coverage.xml
34 | reportDirectory: $(Build.Repository.LocalPath)/coverage
35 |
--------------------------------------------------------------------------------
/azure-pipelines.yml:
--------------------------------------------------------------------------------
1 | jobs:
2 | - template: azure-pipelines-template.yml
3 | parameters:
4 | name: macOS
5 | vmImage: macOS-10.13
6 |
7 | - template: azure-pipelines-template.yml
8 | parameters:
9 | name: Linux
10 | vmImage: ubuntu-16.04
11 |
12 | - template: azure-pipelines-template.yml
13 | parameters:
14 | name: Windows
15 | vmImage: vs2017-win2016
16 |
--------------------------------------------------------------------------------
/cli.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | require("./start.js")
4 |
--------------------------------------------------------------------------------
/examples/unit-test-a-worker/__tests__/worker_spec.js:
--------------------------------------------------------------------------------
1 | const { createTestApp } = require("cloudflare-worker-local");
2 | const supertest = require("supertest");
3 | const express = require("express");
4 | const fs = require("fs");
5 |
6 | const workerContent = fs.readFileSync(`${__dirname}/../worker.js`);
7 |
8 | describe("My New Worker", () => {
9 | it("Adds a header on 200 responses", async () => {
10 | const upstreamApp = express();
11 | upstreamApp.get("/route", (req, res) => res.end("Success"));
12 | await supertest(createTestApp(workerContent, upstreamApp))
13 | .get("/route")
14 | .expect("Foo", "Bar")
15 | .expect(200, "Success");
16 | });
17 |
18 | it("Adds a different header on 404", async () => {
19 | // all routes are 404
20 | const upstreamApp = express();
21 |
22 | await supertest(createTestApp(workerContent, upstreamApp))
23 | .get("/route")
24 | .expect("Foo", "Not Bar")
25 | .expect(404);
26 | });
27 | });
28 |
--------------------------------------------------------------------------------
/examples/unit-test-a-worker/worker.js:
--------------------------------------------------------------------------------
1 | // Add any more global vars (like Request, Response) to the below list as they are used
2 | /* global addEventListener fetch Response Headers */
3 |
4 | addEventListener("fetch", e => {
5 | e.respondWith(fetchAndAddHeader(e.request));
6 | });
7 |
8 | async function fetchAndAddHeader(request) {
9 | const response = await fetch(request);
10 |
11 | const headers = new Headers(response.headers);
12 |
13 | if (response.status === 200) {
14 | headers.set("Foo", "Bar");
15 | } else {
16 | headers.set("Foo", "Not Bar");
17 | }
18 |
19 | return new Response(response.body, {
20 | headers: headers,
21 | status: response.status,
22 | statusText: response.statusText,
23 | });
24 | }
25 |
--------------------------------------------------------------------------------
/examples/wrangler.toml:
--------------------------------------------------------------------------------
1 | name = "example"
2 | type = "javascript"
3 | workers_dev = false
4 | route = "example.com/*"
5 | account_id = "account id"
6 | zone_id = "zone id"
7 |
8 | kv-namespaces = [
9 | { binding = "wranglerKV", id = "06779da6940b431db6e566b4846d64db" }
10 | ]
11 |
12 | [vars]
13 | variable1 = "somevalue"
14 |
15 | [secrets]
16 | mysecret = "not4u"
17 |
18 | [secrets.foo]
19 | bar = 'shhh'
20 |
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | const { createApp } = require("./app/server");
2 | const { Worker } = require("./app/worker");
3 | const { createTestApp } = require("./app/test-app");
4 |
5 | module.exports = {
6 | Worker,
7 | createApp,
8 | createTestApp
9 | };
10 |
--------------------------------------------------------------------------------
/lib/wrangler.js:
--------------------------------------------------------------------------------
1 | "use strict";
2 | exports.__esModule = true;
3 | var fs = require("fs");
4 | var TOML = require("@iarna/toml");
5 | var lodash_get_1 = require("lodash.get");
6 | var lodash_merge_1 = require("lodash.merge");
7 | var placeholder = /(?",
16 | "license": "Apache-2.0",
17 | "bugs": {
18 | "url": "https://github.com/gja/cloudflare-worker-local/issues"
19 | },
20 | "homepage": "https://github.com/gja/cloudflare-worker-local#readme",
21 | "dependencies": {
22 | "@iarna/toml": "^2.2.3",
23 | "atob": "^2.1.2",
24 | "body-parser": "^1.18.3",
25 | "btoa": "^1.2.1",
26 | "express": "^4.16.4",
27 | "lodash.get": "^4.4.2",
28 | "lodash.merge": "^4.6.2",
29 | "mkdirp": "^1.0.4",
30 | "node-fetch": "^2.6.7"
31 | },
32 | "optionalDependencies": {
33 | "minio": "^7.0.15"
34 | },
35 | "devDependencies": {
36 | "jest": "^24.7.1",
37 | "jest-junit": "^6.3.0",
38 | "rimraf": "^3.0.2",
39 | "standard-version": "^8.0.1",
40 | "supertest": "^3.3.0"
41 | },
42 | "engines": {
43 | "node": ">=15.0.0"
44 | },
45 | "bin": {
46 | "cloudflare-worker-local": "./cli.js"
47 | },
48 | "jest": {
49 | "testPathIgnorePatterns": [
50 | "/node_modules/",
51 | "/examples"
52 | ]
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/sample-worker.js:
--------------------------------------------------------------------------------
1 | addEventListener('fetch', event => {
2 | event.respondWith(fetchAndApply(event.request))
3 | })
4 |
5 | async function fetchAndApply(request) {
6 | if (request.headers.get('user-agent').includes('annoying_robot')) {
7 | return new Response('Sorry, this page is not available.',
8 | { status: 403, statusText: 'Forbidden' })
9 | }
10 |
11 | return fetch(request)
12 | }
13 |
--------------------------------------------------------------------------------
/start.js:
--------------------------------------------------------------------------------
1 | const fs = require("fs");
2 | const cluster = require("cluster");
3 | const process = require("process");
4 | const wrangler = require("./lib/wrangler");
5 |
6 | const { InMemoryKVStore } = require("./app/in-memory-kv-store");
7 | const { FileKVStore } = require("./app/file-kv-store");
8 |
9 | if (process.argv.length < 5) {
10 | console.log("Usage: cloudflare-worker-local /path/to/worker.js host.to.forward.request.to:3000 [/path/to/wrangler.toml [env]]");
11 | process.exit(-1);
12 | }
13 |
14 | let kvStore = ()=>new InMemoryKVStore();
15 | if (process.env.MINIO_ENDPOINT) {
16 | const { MinioKVStore, Minio, getEnvOpts } = require('./app/minio-kv-store');
17 | kvStore = ()=>new MinioKVStore(new Minio.Client(getEnvOpts(process.env)));
18 | }
19 | if (process.env.KV_FILE_ROOT) {
20 | kvStore = ()=>new FileKVStore(process.env.KV_FILE_ROOT);
21 | }
22 |
23 | if (cluster.isMaster) {
24 | for (var i = 0; i < (process.env.NUM_WORKERS || 1); i++) {
25 | cluster.fork();
26 | }
27 |
28 | process.on("SIGHUP", () => {
29 | for (let i in cluster.workers) {
30 | cluster.workers[i].process.kill("SIGHUP");
31 | }
32 | });
33 |
34 | cluster.on("exit", function(worker, code, signal) {
35 | console.log("worker " + worker.process.pid + " died");
36 | cluster.fork();
37 | });
38 | } else {
39 | const { createApp } = require(".");
40 | const port = process.argv[4];
41 | // .split(",") will return [""] when KV_NAMESPACES isn't set, so filter out empty strings
42 | let kvStores = (process.env.KV_NAMESPACES || "").split(",").filter(name => name !== "");
43 | let env = {};
44 | if (process.argv[5]) {
45 | // Import config from provided wrangler.toml
46 | const config = wrangler.loadConfig(process.argv[5], process.argv[6]);
47 | wrangler.toJSON(config);
48 | env = {...config.vars, ...config.secrets};
49 | if (Array.isArray(config['kv-namespaces'])) kvStores = kvStores.concat(config['kv-namespaces'].map(n=>n.binding));
50 | // Add Workers Sites KV namespace and manifest to env if it's enabled
51 | if (config.site && config.site.bucket) {
52 | console.log(`Serving Workers Site from ${config.site.bucket}`);
53 | // Workers Sites expects a KV namespace named __STATIC_CONTENT mapping file name keys to contents
54 | env["__STATIC_CONTENT"] = new FileKVStore().getClient(config.site.bucket);
55 | // Workers Sites also expects an object named __STATIC_CONTENT_MANIFEST mapping file names to file names
56 | // containing an asset hash for edge caching. Since we stub caching out, we can just use the original file name
57 | // as the file name with hash, so we set this to a proxy with returns a value equal to each requested key.
58 | env["__STATIC_CONTENT_MANIFEST"] = new Proxy({}, {get: (target, prop) => prop});
59 | }
60 | }
61 | const opts = { upstreamHost: process.argv[3], kvStores, kvStore, env, country: process.env.COUNTRY };
62 | const app = createApp(fs.readFileSync(process.argv[2]), opts);
63 |
64 | process.on("SIGHUP", () => {
65 | fs.readFile(process.argv[2], (_, newWorkerContent) => {
66 | console.log("Updating Worker");
67 | app.updateWorker(newWorkerContent);
68 | });
69 | });
70 |
71 | try {
72 | app.listen(port, function() {
73 | console.log(`Example app listening on port ${port}!`);
74 | });
75 | } catch (e) {
76 | console.error("Worker died - Aborting");
77 | console.error(e.stack);
78 | cluster.worker.disconnect();
79 | process.exit();
80 | }
81 | }
82 |
--------------------------------------------------------------------------------