├── .gitignore
├── .jsdb
├── db
│ ├── bundles
│ │ ├── rules.js
│ │ └── triggers.js
│ ├── default
│ │ └── rules.js
│ └── users
│ │ ├── indexes.json
│ │ └── rules.js
└── hosting
│ └── index.html
├── .npmignore
├── .nvmrc
├── Dockerfile
├── LICENSE.txt
├── README.md
├── Replicated.Dockerfile
├── authApp.js
├── dbApp.js
├── functionsApp.js
├── lifecycleMiddleware.js
├── litestream.yml
├── opHandlersBetterSqlite.js
├── opHandlersSqlite.js
├── operationFallback.js
├── package-lock.json
├── package.json
├── parser.js
├── run.sh
├── server.js
├── tests
├── .jsdb
│ └── functions
│ │ ├── helloWorld.js
│ │ └── remoteInserts.js
└── api.test.js
└── vm.js
/.gitignore:
--------------------------------------------------------------------------------
1 | /node_modules/
2 | .idea
3 | .service-accounts
4 | .env
5 | .env.*
6 | !.env.example
7 | /jsdbbundle/
8 | /jsdbbundle.zip
9 | /uploads/
10 | /.jsdb-temp/
11 | /.service-accounts/
12 | /api.test.js
13 | /service-accounts/
14 | /database.sqlite
15 |
--------------------------------------------------------------------------------
/.jsdb/db/bundles/rules.js:
--------------------------------------------------------------------------------
1 | export default function ({req}){
2 | const apiKey = req.get('X-API-Key');
3 | if(apiKey !== process.env.API_KEY) {
4 | throw new Error('Invalid API Key');
5 | }
6 | return true;
7 | }
--------------------------------------------------------------------------------
/.jsdb/db/bundles/triggers.js:
--------------------------------------------------------------------------------
1 | export async function push({req, value, result}) {
2 | const {importFromBase64} = await import("../../../lifecycleMiddleware.js");
3 | await importFromBase64(value.file.string);
4 | }
--------------------------------------------------------------------------------
/.jsdb/db/default/rules.js:
--------------------------------------------------------------------------------
1 | export default () => true; //This allows all operations. Change to false in production
--------------------------------------------------------------------------------
/.jsdb/db/users/indexes.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "unique": true,
4 | "fields" : ["credentials.email"]
5 | }
6 | ]
--------------------------------------------------------------------------------
/.jsdb/db/users/rules.js:
--------------------------------------------------------------------------------
1 | export default () => false
--------------------------------------------------------------------------------
/.jsdb/hosting/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | JSDB Project
6 |
7 |
8 | Add your static files to the /.jsdb/hosting folder
9 |
10 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | /service-accounts/
--------------------------------------------------------------------------------
/.nvmrc:
--------------------------------------------------------------------------------
1 | 17
2 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:18-slim
2 |
3 | WORKDIR /usr/src/app
4 |
5 | COPY package*.json ./
6 |
7 | RUN npm install
8 |
9 | COPY . .
10 |
11 | EXPOSE 8080
12 | CMD [ "node", "server.js" ]
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Server Side Public License
2 | VERSION 1, OCTOBER 16, 2018
3 |
4 | Copyright © 2018 MongoDB, Inc.
5 |
6 | Everyone is permitted to copy and distribute verbatim copies of this
7 | license document, but changing it is not allowed.
8 |
9 | TERMS AND CONDITIONS
10 |
11 | 0. Definitions.
12 |
13 | “This License” refers to Server Side Public License.
14 |
15 | “Copyright” also means copyright-like laws that apply to other kinds of
16 | works, such as semiconductor masks.
17 |
18 | “The Program” refers to any copyrightable work licensed under this
19 | License. Each licensee is addressed as “you”. “Licensees” and
20 | “recipients” may be individuals or organizations.
21 |
22 | To “modify” a work means to copy from or adapt all or part of the work in
23 | a fashion requiring copyright permission, other than the making of an
24 | exact copy. The resulting work is called a “modified version” of the
25 | earlier work or a work “based on” the earlier work.
26 |
27 | A “covered work” means either the unmodified Program or a work based on
28 | the Program.
29 |
30 | To “propagate” a work means to do anything with it that, without
31 | permission, would make you directly or secondarily liable for
32 | infringement under applicable copyright law, except executing it on a
33 | computer or modifying a private copy. Propagation includes copying,
34 | distribution (with or without modification), making available to the
35 | public, and in some countries other activities as well.
36 |
37 | To “convey” a work means any kind of propagation that enables other
38 | parties to make or receive copies. Mere interaction with a user through a
39 | computer network, with no transfer of a copy, is not conveying.
40 |
41 | An interactive user interface displays “Appropriate Legal Notices” to the
42 | extent that it includes a convenient and prominently visible feature that
43 | (1) displays an appropriate copyright notice, and (2) tells the user that
44 | there is no warranty for the work (except to the extent that warranties
45 | are provided), that licensees may convey the work under this License, and
46 | how to view a copy of this License. If the interface presents a list of
47 | user commands or options, such as a menu, a prominent item in the list
48 | meets this criterion.
49 |
50 | 1. Source Code.
51 |
52 | The “source code” for a work means the preferred form of the work for
53 | making modifications to it. “Object code” means any non-source form of a
54 | work.
55 |
56 | A “Standard Interface” means an interface that either is an official
57 | standard defined by a recognized standards body, or, in the case of
58 | interfaces specified for a particular programming language, one that is
59 | widely used among developers working in that language. The “System
60 | Libraries” of an executable work include anything, other than the work as
61 | a whole, that (a) is included in the normal form of packaging a Major
62 | Component, but which is not part of that Major Component, and (b) serves
63 | only to enable use of the work with that Major Component, or to implement
64 | a Standard Interface for which an implementation is available to the
65 | public in source code form. A “Major Component”, in this context, means a
66 | major essential component (kernel, window system, and so on) of the
67 | specific operating system (if any) on which the executable work runs, or
68 | a compiler used to produce the work, or an object code interpreter used
69 | to run it.
70 |
71 | The “Corresponding Source” for a work in object code form means all the
72 | source code needed to generate, install, and (for an executable work) run
73 | the object code and to modify the work, including scripts to control
74 | those activities. However, it does not include the work's System
75 | Libraries, or general-purpose tools or generally available free programs
76 | which are used unmodified in performing those activities but which are
77 | not part of the work. For example, Corresponding Source includes
78 | interface definition files associated with source files for the work, and
79 | the source code for shared libraries and dynamically linked subprograms
80 | that the work is specifically designed to require, such as by intimate
81 | data communication or control flow between those subprograms and other
82 | parts of the work.
83 |
84 | The Corresponding Source need not include anything that users can
85 | regenerate automatically from other parts of the Corresponding Source.
86 |
87 | The Corresponding Source for a work in source code form is that same work.
88 |
89 | 2. Basic Permissions.
90 |
91 | All rights granted under this License are granted for the term of
92 | copyright on the Program, and are irrevocable provided the stated
93 | conditions are met. This License explicitly affirms your unlimited
94 | permission to run the unmodified Program, subject to section 13. The
95 | output from running a covered work is covered by this License only if the
96 | output, given its content, constitutes a covered work. This License
97 | acknowledges your rights of fair use or other equivalent, as provided by
98 | copyright law. Subject to section 13, you may make, run and propagate
99 | covered works that you do not convey, without conditions so long as your
100 | license otherwise remains in force. You may convey covered works to
101 | others for the sole purpose of having them make modifications exclusively
102 | for you, or provide you with facilities for running those works, provided
103 | that you comply with the terms of this License in conveying all
104 | material for which you do not control copyright. Those thus making or
105 | running the covered works for you must do so exclusively on your
106 | behalf, under your direction and control, on terms that prohibit them
107 | from making any copies of your copyrighted material outside their
108 | relationship with you.
109 |
110 | Conveying under any other circumstances is permitted solely under the
111 | conditions stated below. Sublicensing is not allowed; section 10 makes it
112 | unnecessary.
113 |
114 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
115 |
116 | No covered work shall be deemed part of an effective technological
117 | measure under any applicable law fulfilling obligations under article 11
118 | of the WIPO copyright treaty adopted on 20 December 1996, or similar laws
119 | prohibiting or restricting circumvention of such measures.
120 |
121 | When you convey a covered work, you waive any legal power to forbid
122 | circumvention of technological measures to the extent such circumvention is
123 | effected by exercising rights under this License with respect to the
124 | covered work, and you disclaim any intention to limit operation or
125 | modification of the work as a means of enforcing, against the work's users,
126 | your or third parties' legal rights to forbid circumvention of
127 | technological measures.
128 |
129 | 4. Conveying Verbatim Copies.
130 |
131 | You may convey verbatim copies of the Program's source code as you
132 | receive it, in any medium, provided that you conspicuously and
133 | appropriately publish on each copy an appropriate copyright notice; keep
134 | intact all notices stating that this License and any non-permissive terms
135 | added in accord with section 7 apply to the code; keep intact all notices
136 | of the absence of any warranty; and give all recipients a copy of this
137 | License along with the Program. You may charge any price or no price for
138 | each copy that you convey, and you may offer support or warranty
139 | protection for a fee.
140 |
141 | 5. Conveying Modified Source Versions.
142 |
143 | You may convey a work based on the Program, or the modifications to
144 | produce it from the Program, in the form of source code under the terms
145 | of section 4, provided that you also meet all of these conditions:
146 |
147 | a) The work must carry prominent notices stating that you modified it,
148 | and giving a relevant date.
149 |
150 | b) The work must carry prominent notices stating that it is released
151 | under this License and any conditions added under section 7. This
152 | requirement modifies the requirement in section 4 to “keep intact all
153 | notices”.
154 |
155 | c) You must license the entire work, as a whole, under this License to
156 | anyone who comes into possession of a copy. This License will therefore
157 | apply, along with any applicable section 7 additional terms, to the
158 | whole of the work, and all its parts, regardless of how they are
159 | packaged. This License gives no permission to license the work in any
160 | other way, but it does not invalidate such permission if you have
161 | separately received it.
162 |
163 | d) If the work has interactive user interfaces, each must display
164 | Appropriate Legal Notices; however, if the Program has interactive
165 | interfaces that do not display Appropriate Legal Notices, your work
166 | need not make them do so.
167 |
168 | A compilation of a covered work with other separate and independent
169 | works, which are not by their nature extensions of the covered work, and
170 | which are not combined with it such as to form a larger program, in or on
171 | a volume of a storage or distribution medium, is called an “aggregate” if
172 | the compilation and its resulting copyright are not used to limit the
173 | access or legal rights of the compilation's users beyond what the
174 | individual works permit. Inclusion of a covered work in an aggregate does
175 | not cause this License to apply to the other parts of the aggregate.
176 |
177 | 6. Conveying Non-Source Forms.
178 |
179 | You may convey a covered work in object code form under the terms of
180 | sections 4 and 5, provided that you also convey the machine-readable
181 | Corresponding Source under the terms of this License, in one of these
182 | ways:
183 |
184 | a) Convey the object code in, or embodied in, a physical product
185 | (including a physical distribution medium), accompanied by the
186 | Corresponding Source fixed on a durable physical medium customarily
187 | used for software interchange.
188 |
189 | b) Convey the object code in, or embodied in, a physical product
190 | (including a physical distribution medium), accompanied by a written
191 | offer, valid for at least three years and valid for as long as you
192 | offer spare parts or customer support for that product model, to give
193 | anyone who possesses the object code either (1) a copy of the
194 | Corresponding Source for all the software in the product that is
195 | covered by this License, on a durable physical medium customarily used
196 | for software interchange, for a price no more than your reasonable cost
197 | of physically performing this conveying of source, or (2) access to
198 | copy the Corresponding Source from a network server at no charge.
199 |
200 | c) Convey individual copies of the object code with a copy of the
201 | written offer to provide the Corresponding Source. This alternative is
202 | allowed only occasionally and noncommercially, and only if you received
203 | the object code with such an offer, in accord with subsection 6b.
204 |
205 | d) Convey the object code by offering access from a designated place
206 | (gratis or for a charge), and offer equivalent access to the
207 | Corresponding Source in the same way through the same place at no
208 | further charge. You need not require recipients to copy the
209 | Corresponding Source along with the object code. If the place to copy
210 | the object code is a network server, the Corresponding Source may be on
211 | a different server (operated by you or a third party) that supports
212 | equivalent copying facilities, provided you maintain clear directions
213 | next to the object code saying where to find the Corresponding Source.
214 | Regardless of what server hosts the Corresponding Source, you remain
215 | obligated to ensure that it is available for as long as needed to
216 | satisfy these requirements.
217 |
218 | e) Convey the object code using peer-to-peer transmission, provided you
219 | inform other peers where the object code and Corresponding Source of
220 | the work are being offered to the general public at no charge under
221 | subsection 6d.
222 |
223 | A separable portion of the object code, whose source code is excluded
224 | from the Corresponding Source as a System Library, need not be included
225 | in conveying the object code work.
226 |
227 | A “User Product” is either (1) a “consumer product”, which means any
228 | tangible personal property which is normally used for personal, family,
229 | or household purposes, or (2) anything designed or sold for incorporation
230 | into a dwelling. In determining whether a product is a consumer product,
231 | doubtful cases shall be resolved in favor of coverage. For a particular
232 | product received by a particular user, “normally used” refers to a
233 | typical or common use of that class of product, regardless of the status
234 | of the particular user or of the way in which the particular user
235 | actually uses, or expects or is expected to use, the product. A product
236 | is a consumer product regardless of whether the product has substantial
237 | commercial, industrial or non-consumer uses, unless such uses represent
238 | the only significant mode of use of the product.
239 |
240 | “Installation Information” for a User Product means any methods,
241 | procedures, authorization keys, or other information required to install
242 | and execute modified versions of a covered work in that User Product from
243 | a modified version of its Corresponding Source. The information must
244 | suffice to ensure that the continued functioning of the modified object
245 | code is in no case prevented or interfered with solely because
246 | modification has been made.
247 |
248 | If you convey an object code work under this section in, or with, or
249 | specifically for use in, a User Product, and the conveying occurs as part
250 | of a transaction in which the right of possession and use of the User
251 | Product is transferred to the recipient in perpetuity or for a fixed term
252 | (regardless of how the transaction is characterized), the Corresponding
253 | Source conveyed under this section must be accompanied by the
254 | Installation Information. But this requirement does not apply if neither
255 | you nor any third party retains the ability to install modified object
256 | code on the User Product (for example, the work has been installed in
257 | ROM).
258 |
259 | The requirement to provide Installation Information does not include a
260 | requirement to continue to provide support service, warranty, or updates
261 | for a work that has been modified or installed by the recipient, or for
262 | the User Product in which it has been modified or installed. Access
263 | to a network may be denied when the modification itself materially
264 | and adversely affects the operation of the network or violates the
265 | rules and protocols for communication across the network.
266 |
267 | Corresponding Source conveyed, and Installation Information provided, in
268 | accord with this section must be in a format that is publicly documented
269 | (and with an implementation available to the public in source code form),
270 | and must require no special password or key for unpacking, reading or
271 | copying.
272 |
273 | 7. Additional Terms.
274 |
275 | “Additional permissions” are terms that supplement the terms of this
276 | License by making exceptions from one or more of its conditions.
277 | Additional permissions that are applicable to the entire Program shall be
278 | treated as though they were included in this License, to the extent that
279 | they are valid under applicable law. If additional permissions apply only
280 | to part of the Program, that part may be used separately under those
281 | permissions, but the entire Program remains governed by this License
282 | without regard to the additional permissions. When you convey a copy of
283 | a covered work, you may at your option remove any additional permissions
284 | from that copy, or from any part of it. (Additional permissions may be
285 | written to require their own removal in certain cases when you modify the
286 | work.) You may place additional permissions on material, added by you to
287 | a covered work, for which you have or can give appropriate copyright
288 | permission.
289 |
290 | Notwithstanding any other provision of this License, for material you add
291 | to a covered work, you may (if authorized by the copyright holders of
292 | that material) supplement the terms of this License with terms:
293 |
294 | a) Disclaiming warranty or limiting liability differently from the
295 | terms of sections 15 and 16 of this License; or
296 |
297 | b) Requiring preservation of specified reasonable legal notices or
298 | author attributions in that material or in the Appropriate Legal
299 | Notices displayed by works containing it; or
300 |
301 | c) Prohibiting misrepresentation of the origin of that material, or
302 | requiring that modified versions of such material be marked in
303 | reasonable ways as different from the original version; or
304 |
305 | d) Limiting the use for publicity purposes of names of licensors or
306 | authors of the material; or
307 |
308 | e) Declining to grant rights under trademark law for use of some trade
309 | names, trademarks, or service marks; or
310 |
311 | f) Requiring indemnification of licensors and authors of that material
312 | by anyone who conveys the material (or modified versions of it) with
313 | contractual assumptions of liability to the recipient, for any
314 | liability that these contractual assumptions directly impose on those
315 | licensors and authors.
316 |
317 | All other non-permissive additional terms are considered “further
318 | restrictions” within the meaning of section 10. If the Program as you
319 | received it, or any part of it, contains a notice stating that it is
320 | governed by this License along with a term that is a further restriction,
321 | you may remove that term. If a license document contains a further
322 | restriction but permits relicensing or conveying under this License, you
323 | may add to a covered work material governed by the terms of that license
324 | document, provided that the further restriction does not survive such
325 | relicensing or conveying.
326 |
327 | If you add terms to a covered work in accord with this section, you must
328 | place, in the relevant source files, a statement of the additional terms
329 | that apply to those files, or a notice indicating where to find the
330 | applicable terms. Additional terms, permissive or non-permissive, may be
331 | stated in the form of a separately written license, or stated as
332 | exceptions; the above requirements apply either way.
333 |
334 | 8. Termination.
335 |
336 | You may not propagate or modify a covered work except as expressly
337 | provided under this License. Any attempt otherwise to propagate or modify
338 | it is void, and will automatically terminate your rights under this
339 | License (including any patent licenses granted under the third paragraph
340 | of section 11).
341 |
342 | However, if you cease all violation of this License, then your license
343 | from a particular copyright holder is reinstated (a) provisionally,
344 | unless and until the copyright holder explicitly and finally terminates
345 | your license, and (b) permanently, if the copyright holder fails to
346 | notify you of the violation by some reasonable means prior to 60 days
347 | after the cessation.
348 |
349 | Moreover, your license from a particular copyright holder is reinstated
350 | permanently if the copyright holder notifies you of the violation by some
351 | reasonable means, this is the first time you have received notice of
352 | violation of this License (for any work) from that copyright holder, and
353 | you cure the violation prior to 30 days after your receipt of the notice.
354 |
355 | Termination of your rights under this section does not terminate the
356 | licenses of parties who have received copies or rights from you under
357 | this License. If your rights have been terminated and not permanently
358 | reinstated, you do not qualify to receive new licenses for the same
359 | material under section 10.
360 |
361 | 9. Acceptance Not Required for Having Copies.
362 |
363 | You are not required to accept this License in order to receive or run a
364 | copy of the Program. Ancillary propagation of a covered work occurring
365 | solely as a consequence of using peer-to-peer transmission to receive a
366 | copy likewise does not require acceptance. However, nothing other than
367 | this License grants you permission to propagate or modify any covered
368 | work. These actions infringe copyright if you do not accept this License.
369 | Therefore, by modifying or propagating a covered work, you indicate your
370 | acceptance of this License to do so.
371 |
372 | 10. Automatic Licensing of Downstream Recipients.
373 |
374 | Each time you convey a covered work, the recipient automatically receives
375 | a license from the original licensors, to run, modify and propagate that
376 | work, subject to this License. You are not responsible for enforcing
377 | compliance by third parties with this License.
378 |
379 | An “entity transaction” is a transaction transferring control of an
380 | organization, or substantially all assets of one, or subdividing an
381 | organization, or merging organizations. If propagation of a covered work
382 | results from an entity transaction, each party to that transaction who
383 | receives a copy of the work also receives whatever licenses to the work
384 | the party's predecessor in interest had or could give under the previous
385 | paragraph, plus a right to possession of the Corresponding Source of the
386 | work from the predecessor in interest, if the predecessor has it or can
387 | get it with reasonable efforts.
388 |
389 | You may not impose any further restrictions on the exercise of the rights
390 | granted or affirmed under this License. For example, you may not impose a
391 | license fee, royalty, or other charge for exercise of rights granted
392 | under this License, and you may not initiate litigation (including a
393 | cross-claim or counterclaim in a lawsuit) alleging that any patent claim
394 | is infringed by making, using, selling, offering for sale, or importing
395 | the Program or any portion of it.
396 |
397 | 11. Patents.
398 |
399 | A “contributor” is a copyright holder who authorizes use under this
400 | License of the Program or a work on which the Program is based. The work
401 | thus licensed is called the contributor's “contributor version”.
402 |
403 | A contributor's “essential patent claims” are all patent claims owned or
404 | controlled by the contributor, whether already acquired or hereafter
405 | acquired, that would be infringed by some manner, permitted by this
406 | License, of making, using, or selling its contributor version, but do not
407 | include claims that would be infringed only as a consequence of further
408 | modification of the contributor version. For purposes of this definition,
409 | “control” includes the right to grant patent sublicenses in a manner
410 | consistent with the requirements of this License.
411 |
412 | Each contributor grants you a non-exclusive, worldwide, royalty-free
413 | patent license under the contributor's essential patent claims, to make,
414 | use, sell, offer for sale, import and otherwise run, modify and propagate
415 | the contents of its contributor version.
416 |
417 | In the following three paragraphs, a “patent license” is any express
418 | agreement or commitment, however denominated, not to enforce a patent
419 | (such as an express permission to practice a patent or covenant not to
420 | sue for patent infringement). To “grant” such a patent license to a party
421 | means to make such an agreement or commitment not to enforce a patent
422 | against the party.
423 |
424 | If you convey a covered work, knowingly relying on a patent license, and
425 | the Corresponding Source of the work is not available for anyone to copy,
426 | free of charge and under the terms of this License, through a publicly
427 | available network server or other readily accessible means, then you must
428 | either (1) cause the Corresponding Source to be so available, or (2)
429 | arrange to deprive yourself of the benefit of the patent license for this
430 | particular work, or (3) arrange, in a manner consistent with the
431 | requirements of this License, to extend the patent license to downstream
432 | recipients. “Knowingly relying” means you have actual knowledge that, but
433 | for the patent license, your conveying the covered work in a country, or
434 | your recipient's use of the covered work in a country, would infringe
435 | one or more identifiable patents in that country that you have reason
436 | to believe are valid.
437 |
438 | If, pursuant to or in connection with a single transaction or
439 | arrangement, you convey, or propagate by procuring conveyance of, a
440 | covered work, and grant a patent license to some of the parties receiving
441 | the covered work authorizing them to use, propagate, modify or convey a
442 | specific copy of the covered work, then the patent license you grant is
443 | automatically extended to all recipients of the covered work and works
444 | based on it.
445 |
446 | A patent license is “discriminatory” if it does not include within the
447 | scope of its coverage, prohibits the exercise of, or is conditioned on
448 | the non-exercise of one or more of the rights that are specifically
449 | granted under this License. You may not convey a covered work if you are
450 | a party to an arrangement with a third party that is in the business of
451 | distributing software, under which you make payment to the third party
452 | based on the extent of your activity of conveying the work, and under
453 | which the third party grants, to any of the parties who would receive the
454 | covered work from you, a discriminatory patent license (a) in connection
455 | with copies of the covered work conveyed by you (or copies made from
456 | those copies), or (b) primarily for and in connection with specific
457 | products or compilations that contain the covered work, unless you
458 | entered into that arrangement, or that patent license was granted, prior
459 | to 28 March 2007.
460 |
461 | Nothing in this License shall be construed as excluding or limiting any
462 | implied license or other defenses to infringement that may otherwise be
463 | available to you under applicable patent law.
464 |
465 | 12. No Surrender of Others' Freedom.
466 |
467 | If conditions are imposed on you (whether by court order, agreement or
468 | otherwise) that contradict the conditions of this License, they do not
469 | excuse you from the conditions of this License. If you cannot use,
470 | propagate or convey a covered work so as to satisfy simultaneously your
471 | obligations under this License and any other pertinent obligations, then
472 | as a consequence you may not use, propagate or convey it at all. For
473 | example, if you agree to terms that obligate you to collect a royalty for
474 | further conveying from those to whom you convey the Program, the only way
475 | you could satisfy both those terms and this License would be to refrain
476 | entirely from conveying the Program.
477 |
478 | 13. Offering the Program as a Service.
479 |
480 | If you make the functionality of the Program or a modified version
481 | available to third parties as a service, you must make the Service Source
482 | Code available via network download to everyone at no charge, under the
483 | terms of this License. Making the functionality of the Program or
484 | modified version available to third parties as a service includes,
485 | without limitation, enabling third parties to interact with the
486 | functionality of the Program or modified version remotely through a
487 | computer network, offering a service the value of which entirely or
488 | primarily derives from the value of the Program or modified version, or
489 | offering a service that accomplishes for users the primary purpose of the
490 | Program or modified version.
491 |
492 | “Service Source Code” means the Corresponding Source for the Program or
493 | the modified version, and the Corresponding Source for all programs that
494 | you use to make the Program or modified version available as a service,
495 | including, without limitation, management software, user interfaces,
496 | application program interfaces, automation software, monitoring software,
497 | backup software, storage software and hosting software, all such that a
498 | user could run an instance of the service using the Service Source Code
499 | you make available.
500 |
501 | 14. Revised Versions of this License.
502 |
503 | MongoDB, Inc. may publish revised and/or new versions of the Server Side
504 | Public License from time to time. Such new versions will be similar in
505 | spirit to the present version, but may differ in detail to address new
506 | problems or concerns.
507 |
508 | Each version is given a distinguishing version number. If the Program
509 | specifies that a certain numbered version of the Server Side Public
510 | License “or any later version” applies to it, you have the option of
511 | following the terms and conditions either of that numbered version or of
512 | any later version published by MongoDB, Inc. If the Program does not
513 | specify a version number of the Server Side Public License, you may
514 | choose any version ever published by MongoDB, Inc.
515 |
516 | If the Program specifies that a proxy can decide which future versions of
517 | the Server Side Public License can be used, that proxy's public statement
518 | of acceptance of a version permanently authorizes you to choose that
519 | version for the Program.
520 |
521 | Later license versions may give you additional or different permissions.
522 | However, no additional obligations are imposed on any author or copyright
523 | holder as a result of your choosing to follow a later version.
524 |
525 | 15. Disclaimer of Warranty.
526 |
527 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
528 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
529 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY
530 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
531 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
532 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
533 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
534 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
535 |
536 | 16. Limitation of Liability.
537 |
538 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
539 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
540 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING
541 | ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF
542 | THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO
543 | LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU
544 | OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
545 | PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
546 | POSSIBILITY OF SUCH DAMAGES.
547 |
548 | 17. Interpretation of Sections 15 and 16.
549 |
550 | If the disclaimer of warranty and limitation of liability provided above
551 | cannot be given local legal effect according to their terms, reviewing
552 | courts shall apply local law that most closely approximates an absolute
553 | waiver of all civil liability in connection with the Program, unless a
554 | warranty or assumption of liability accompanies a copy of the Program in
555 | return for a fee.
556 |
557 | END OF TERMS AND CONDITIONS
558 |
559 |
560 |
561 |
562 |
563 | Elastic License 2.0
564 |
565 | URL: https://www.elastic.co/licensing/elastic-license
566 |
567 | ## Acceptance
568 |
569 | By using the software, you agree to all of the terms and conditions below.
570 |
571 | ## Copyright License
572 |
573 | The licensor grants you a non-exclusive, royalty-free, worldwide,
574 | non-sublicensable, non-transferable license to use, copy, distribute, make
575 | available, and prepare derivative works of the software, in each case subject to
576 | the limitations and conditions below.
577 |
578 | ## Limitations
579 |
580 | You may not provide the software to third parties as a hosted or managed
581 | service, where the service provides users with access to any substantial set of
582 | the features or functionality of the software.
583 |
584 | You may not move, change, disable, or circumvent the license key functionality
585 | in the software, and you may not remove or obscure any functionality in the
586 | software that is protected by the license key.
587 |
588 | You may not alter, remove, or obscure any licensing, copyright, or other notices
589 | of the licensor in the software. Any use of the licensor’s trademarks is subject
590 | to applicable law.
591 |
592 | ## Patents
593 |
594 | The licensor grants you a license, under any patent claims the licensor can
595 | license, or becomes able to license, to make, have made, use, sell, offer for
596 | sale, import and have imported the software, in each case subject to the
597 | limitations and conditions in this license. This license does not cover any
598 | patent claims that you cause to be infringed by modifications or additions to
599 | the software. If you or your company make any written claim that the software
600 | infringes or contributes to infringement of any patent, your patent license for
601 | the software granted under these terms ends immediately. If your company makes
602 | such a claim, your patent license ends immediately for work on behalf of your
603 | company.
604 |
605 | ## Notices
606 |
607 | You must ensure that anyone who gets a copy of any part of the software from you
608 | also gets a copy of these terms.
609 |
610 | If you modify the software, you must include in any modified copies of the
611 | software prominent notices stating that you have modified the software.
612 |
613 | ## No Other Rights
614 |
615 | These terms do not imply any licenses other than those expressly granted in
616 | these terms.
617 |
618 | ## Termination
619 |
620 | If you use the software in violation of these terms, such use is not licensed,
621 | and your licenses will automatically terminate. If the licensor provides you
622 | with a notice of your violation, and you cease all violation of this license no
623 | later than 30 days after you receive that notice, your licenses will be
624 | reinstated retroactively. However, if you violate these terms after such
625 | reinstatement, any additional violation of these terms will cause your licenses
626 | to terminate automatically and permanently.
627 |
628 | ## No Liability
629 |
630 | *As far as the law allows, the software comes as is, without any warranty or
631 | condition, and the licensor will not be liable to you for any damages arising
632 | out of these terms or the use or nature of the software, under any kind of
633 | legal claim.*
634 |
635 | ## Definitions
636 |
637 | The **licensor** is the entity offering these terms, and the **software** is the
638 | software the licensor makes available under these terms, including any portion
639 | of it.
640 |
641 | **you** refers to the individual or entity agreeing to these terms.
642 |
643 | **your company** is any legal entity, sole proprietorship, or other kind of
644 | organization that you work for, plus all organizations that have control over,
645 | are under the control of, or are under common control with that
646 | organization. **control** means ownership of substantially all the assets of an
647 | entity, or the power to direct its management and policies by vote, contract, or
648 | otherwise. Control can be direct or indirect.
649 |
650 | **your licenses** are all the licenses granted to you for the software under
651 | these terms.
652 |
653 | **use** means anything you do with the software requiring one of your licenses.
654 |
655 | **trademark** means trademarks, service marks, and similar rights.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Install jsdb server
2 | ```shell
3 | npm i @jsdb/server
4 | ```
5 |
6 | Create file called server.js
7 | ```js
8 | import {start} from "@jsdb/server";
9 | start();
10 | ```
11 |
12 | Create .env file
13 | ```dotenv
14 | # Used to sign jwt tokens
15 | JWT_SECRET="SUPER_SECRET_KEY"
16 |
17 | # Nodejs server port
18 | PORT=3001
19 |
20 | # Max requests per minute from the same IP
21 | RATE_LIMIT=10000
22 | ```
23 | Run your server
24 | ```shell
25 | node .
26 | ```
27 |
28 | Check the docs https://javascriptdb.com/docs
29 |
30 |
--------------------------------------------------------------------------------
/Replicated.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:18
2 |
3 | WORKDIR /usr/src/app
4 |
5 | COPY package*.json ./
6 |
7 | RUN npm install
8 |
9 | ADD https://github.com/benbjohnson/litestream/releases/download/v0.3.8/litestream-v0.3.8-linux-amd64-static.tar.gz /tmp/litestream.tar.gz
10 | RUN tar -C /usr/local/bin -xzf /tmp/litestream.tar.gz
11 |
12 | COPY . .
13 | COPY litestream.yml /etc/litestream.yml
14 |
15 | EXPOSE 8080
16 |
17 | ENTRYPOINT ["bash", "run.sh" ]
--------------------------------------------------------------------------------
/authApp.js:
--------------------------------------------------------------------------------
1 | import express from 'express';
2 | import jwt from "jsonwebtoken";
3 | import JwtStrategy from "passport-jwt";
4 | import passport from "passport";
5 | import {Strategy as localStrategy} from 'passport-local';
6 | import bcrypt from "bcryptjs";
7 | import {opHandlers} from "./opHandlersBetterSqlite.js";
8 |
9 | const app = express();
10 |
11 | passport.use(
12 | new JwtStrategy.Strategy(
13 | {
14 | secretOrKey: process.env.JWT_SECRET,
15 | jwtFromRequest: JwtStrategy.ExtractJwt.fromAuthHeaderAsBearerToken()
16 | },
17 | async (token, done) => {
18 | try {
19 | return done(null, token.user);
20 | } catch (error) {
21 | done(error);
22 | }
23 | }
24 | )
25 | );
26 |
27 |
28 | passport.use(
29 | 'signup',
30 | new localStrategy(
31 | {
32 | usernameField: 'email',
33 | passwordField: 'password'
34 | },
35 | async (email, password, done) => {
36 | try {
37 | password = bcrypt.hashSync(password, 8);
38 | const user = {credentials: {email, password}};
39 | const result = opHandlers.set({collection:'users', value: user})
40 | return done(null, {email, id: result.insertedId});
41 | } catch (error) {
42 | done(error);
43 | }
44 | }
45 | )
46 | );
47 |
48 | passport.use(
49 | 'login',
50 | new localStrategy(
51 | {
52 | usernameField: 'email',
53 | passwordField: 'password'
54 | },
55 | async (email, password, done) => {
56 | try {
57 | const callbackFn = ((user) => user.credentials.email === email).toString()
58 | const user = opHandlers.find({collection: 'users',callbackFn,thisArg:{email}})
59 |
60 | if(!bcrypt.compareSync(password, user.credentials.password)){
61 | return done(null, false, {message: 'Invalid password'})
62 | }
63 |
64 | if (!user) {
65 | return done(null, false, {message: 'User not found'});
66 | }
67 |
68 | return done(null, user, {message: 'Logged in Successfully'});
69 | } catch (error) {
70 | return done(error);
71 | }
72 | }
73 | )
74 | );
75 |
76 | app.post(
77 | '/signup',
78 | passport.authenticate('signup', {session: false}),
79 | async (req, res) => {
80 | try {
81 | const token = jwt.sign({ user: { id: req.user.id, email: req.user.email } }, process.env.JWT_SECRET);
82 | res.send({ token, userId: req.user.id });
83 | } catch (e) {
84 | console.error(e);
85 | res.status(500).send(e);
86 | }
87 | }
88 | );
89 |
90 | app.post(
91 | '/signin',
92 | async (req, res, next) => {
93 | passport.authenticate(
94 | 'login',
95 | async (err, user, info) => {
96 | try {
97 | if (err || !user) {
98 | console.error(err)
99 | const error = new Error('An error occurred.');
100 | return next(error);
101 | }
102 |
103 | req.login(
104 | user,
105 | { session: false },
106 | async (error) => {
107 | if (error) return next(error);
108 |
109 | const token = jwt.sign({ user: { id: user.id, email: user.email } }, process.env.JWT_SECRET);
110 |
111 | return res.json({ token, userId: user.id });
112 | }
113 | );
114 | } catch (error) {
115 | return next(error);
116 | }
117 | }
118 | )(req, res, next);
119 | }
120 | );
121 |
122 | export default app;
--------------------------------------------------------------------------------
/dbApp.js:
--------------------------------------------------------------------------------
1 | import express from 'express';
2 | import {resolveMiddlewareFunction, rules, triggers} from "./lifecycleMiddleware.js";
3 | import {opHandlers} from "./opHandlersBetterSqlite.js";
4 |
5 | const app = express();
6 |
7 | app.use(async (req, res, next) => {
8 | const {collection} = req.body;
9 | const method = req.path.replaceAll('/', '');
10 |
11 | const ruleFunction = resolveMiddlewareFunction('rules', collection, method);
12 |
13 | if (!ruleFunction) {
14 | console.warn(`No rule defined for ${collection} method ${method}`);
15 | } else {
16 | // console.log(`${method} rule:`, ruleFunction.toString());
17 | try {
18 | const ruleResult = await ruleFunction({collection, user: req.user, req, ...req.body});
19 | if (ruleResult) {
20 | req.excludeFields = ruleResult?.excludeFields;
21 | req.where = ruleResult?.where;
22 | } else {
23 | return res.status(401).send({message: 'Unauthorized!'});
24 | }
25 | } catch (e) {
26 | console.error(e);
27 | return res.status(401).send({message: e.message});
28 | }
29 | }
30 | next();
31 | })
32 |
33 | app.post('/filter', async (req, res, next) => {
34 | try {
35 | const result = opHandlers.filter(req.body);
36 | res.send({value: result});
37 | next();
38 | } catch (e) {
39 | console.error(e);
40 | res.status(500).send(e);
41 | }
42 | });
43 |
44 | app.post('/find', async (req, res, next) => {
45 | try {
46 | const result = opHandlers.find(req.body);
47 | res.send({value: result || null});
48 | next();
49 | } catch (e) {
50 | console.error(e);
51 | res.status(500).send(e);
52 | }
53 | });
54 |
55 | app.post('/map', async (req, res, next) => {
56 | try {
57 | const mapResult = opHandlers.map(req.body);
58 | res.send(mapResult);
59 | next();
60 | } catch (e) {
61 | console.error(e);
62 | res.status(500).send(e);
63 | }
64 | });
65 |
66 | app.post(['/getAll', '/forEach', '/entries', '/values'], async (req, res, next) => {
67 | try {
68 | const array = opHandlers.getAll(req.body);
69 | res.send(array);
70 | next();
71 | } catch (e) {
72 | console.error(e);
73 | res.status(500).send(e);
74 | }
75 | });
76 |
77 | app.post(['/slice'], async (req, res, next) => {
78 | try {
79 | const array = opHandlers.slice(req.body);
80 | res.send(array);
81 | next();
82 | } catch (e) {
83 | console.error(e);
84 | res.status(500).send(e);
85 | }
86 | });
87 |
88 | app.post('/has', async (req, res, next) => {
89 | try {
90 | const exists = opHandlers.has(req.body);
91 | res.send({value: exists});
92 | next();
93 | } catch (e) {
94 | console.error(e);
95 | res.status(500).send(e);
96 | }
97 | });
98 |
99 | app.post('/keys', async (req, res, next) => {
100 | try {
101 | const ids = opHandlers.keys(req.body);
102 | res.send(ids);
103 | next();
104 | } catch (e) {
105 | console.error(e);
106 | res.status(500).send(e);
107 | }
108 | });
109 |
110 | app.post('/push', async (req, res, next) => {
111 | try {
112 | const result = opHandlers.set(req.body);
113 | req.insertedId = result.insertedId;
114 | res.send({value: result.insertedId});
115 | const documentData = opHandlers.get({collection:req.body.collection,id: result.insertedId});
116 | req.realtimeListeners.emit(req.body.collection, {event: 'add', document: documentData})
117 | next();
118 | } catch (e) {
119 | console.error(e);
120 | res.status(500).send(e);
121 | }
122 | });
123 |
124 | app.post(['/size', '/length'], async (req, res, next) => {
125 | try {
126 | const count = opHandlers.size(req.body);
127 | res.send({value: count});
128 | next();
129 | } catch (e) {
130 | console.error(e);
131 | res.status(500).send(e);
132 | }
133 | })
134 |
135 | app.post('/clear', async (req, res, next) => {
136 | try {
137 | opHandlers.clear(req.body);
138 | res.sendStatus(200);
139 | next();
140 | } catch (e) {
141 | res.status(500).send(e);
142 | }
143 | })
144 |
145 | app.post('/delete', async (req, res, next) => {
146 | try {
147 | const {collection, id, path} = req.body;
148 | const wasDeleted = opHandlers.delete({collection, id, path});
149 | res.send({value: wasDeleted});
150 | next();
151 | } catch (e) {
152 | console.error(e);
153 | res.status(500).send(e);
154 | }
155 | })
156 |
157 | app.post('/set', async (req, res, next) => {
158 | try {
159 | const {collection, id, value, path} = req.body;
160 | const result = opHandlers.set({collection, id, value, path});
161 | res.result = result;
162 | const documentData = opHandlers.get({collection, id});
163 | if (result.inserted) { // It was new
164 | req.realtimeListeners.emit(collection, {event: 'add', document: documentData})
165 | } else { //Modified existing one
166 | req.realtimeListeners.emit(collection, {event: 'edit', document: documentData})
167 | }
168 | req.realtimeListeners.emit(`${collection}.${id}`, documentData);
169 | res.sendStatus(200)
170 | next();
171 | } catch (e) {
172 | console.error(e);
173 | res.status(500).send(e);
174 | }
175 | });
176 |
177 | app.post('/get', async (req, res, next) => {
178 | try {
179 | const {collection, id, path} = req.body;
180 | const result = opHandlers.get({collection, id, path});
181 | res.send({value: result});
182 | next();
183 | } catch (e) {
184 | console.error(e);
185 | res.status(500).send(e);
186 | }
187 | });
188 |
189 | app.use(async (req, res, next) => {
190 | next();
191 | const {collection, id, value} = req.body;
192 | const method = req.path.replaceAll('/', '');
193 | const triggerFunction = resolveMiddlewareFunction('triggers', collection, method)
194 | try {
195 | triggerFunction?.({collection, id, value, user: req.user, insertedId: req.insertedId, req, res});
196 | } catch (e) {
197 | console.error(e);
198 | }
199 | });
200 |
201 | export default app;
--------------------------------------------------------------------------------
/functionsApp.js:
--------------------------------------------------------------------------------
1 | import {functions} from "./lifecycleMiddleware.js";
2 | import express from "express";
3 | const app = express();
4 |
5 | app.post('/:functionPath', async function (req,res,next){
6 | try {
7 | const fn = functions[req.params.functionPath]?.default;
8 | if(!fn) {
9 | res.sendStatus(404);
10 | } else {
11 | res.send(await fn({req, user: req.user, data: req.body}));
12 | }
13 | } catch (e) {
14 | console.error(e);
15 | res.status(500).send({message: `Error executing function ${req.params.functionPath}`});
16 | }
17 | });
18 |
19 | export default app;
--------------------------------------------------------------------------------
/lifecycleMiddleware.js:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import url from "url";
3 | import fsPromises from "fs/promises";
4 | import AdmZip from "adm-zip";
5 | import operationFallback from "./operationFallback.js";
6 | import {forceIndex} from "./opHandlersBetterSqlite.js";
7 |
8 | export const triggers = {};
9 | export const rules = {};
10 | export const functions = {};
11 | export const indexes = {};
12 |
13 | export function resolveMiddlewareFunction(middlewareType, collection, method) {
14 | let object = middlewareType === 'rules' ? rules : triggers;
15 | const resolvedFunction = object[collection]?.[method]
16 | || rules[collection]?.[operationFallback[method]]
17 | || rules[collection]?.default
18 | || rules.default?.[method]
19 | || rules.default?.[operationFallback[method]]
20 | || rules.default?.default;
21 | return resolvedFunction;
22 | }
23 |
24 | export async function importFromPath(extractedBundlePath) {
25 | const dbCollectionDirs = await fsPromises.readdir(path.resolve(extractedBundlePath, 'db'));
26 | dbCollectionDirs.push('default');
27 | await Promise.all(dbCollectionDirs.map(async collectionDir => {
28 | try {
29 | rules[collectionDir] = await import(path.resolve(extractedBundlePath, 'db', collectionDir, 'rules.js'))
30 | } catch (e) {
31 | if (e.code !== 'ERR_MODULE_NOT_FOUND') console.error(e)
32 | }
33 | try {
34 | triggers[collectionDir] = await import(path.resolve(extractedBundlePath, 'db', collectionDir, 'triggers.js'))
35 | } catch (e) {
36 | if (e.code !== 'ERR_MODULE_NOT_FOUND') console.error(e)
37 | }
38 | try {
39 | indexes[collectionDir] = (await import(path.resolve(extractedBundlePath, 'db', collectionDir, 'indexes.json'),{assert: {type: 'json'}})).default
40 | indexes[collectionDir].map(async index => await forceIndex(collectionDir,index))
41 | } catch (e) {
42 | if (e.code !== 'ERR_MODULE_NOT_FOUND') console.error(e)
43 | }
44 | }));
45 |
46 | try {
47 | const functionFileNames = (await fsPromises.readdir(path.resolve(extractedBundlePath, 'functions')))
48 | .filter(name => name.includes('.js'));
49 | await Promise.all(functionFileNames.map(async (functionFileName) => {
50 | try {
51 | const functionName = functionFileName.replace('.js', '');
52 | functions[functionName] = await import(path.resolve(extractedBundlePath, 'functions', functionFileName));
53 | } catch (e) {
54 | console.error(e);
55 | }
56 | }));
57 | } catch (e) {
58 | if (e.code !== 'ENOENT') console.error(e);
59 | }
60 |
61 | const bundleHostingPath = path.resolve(extractedBundlePath, 'hosting')
62 | const serverHostingPath = path.resolve(process.cwd(), '.jsdb', 'hosting')
63 | console.log({rules, triggers, functions, indexes});
64 | try {
65 | if (bundleHostingPath !== serverHostingPath) {
66 | console.log('Copy hosting from', bundleHostingPath, serverHostingPath);
67 | await fsPromises.cp(bundleHostingPath, serverHostingPath, {recursive: true, force: true});
68 | }
69 | } catch (e) {
70 | if (e.code !== 'ENOENT') console.error(e);
71 | }
72 | }
73 |
74 | export async function importFromBase64(base64) {
75 | // TODO : do this without writing a temporal zip file to FS
76 | const tmpBundlePath = path.resolve(process.cwd(), '.tmpJsdbBundle.zip');
77 | await fsPromises.writeFile(tmpBundlePath, Buffer.from(base64, 'base64'));
78 | const zip = new AdmZip(tmpBundlePath);
79 | const tempJsdbPath = path.resolve(process.cwd(), '.jsdb-temp');
80 | zip.extractAllTo(tempJsdbPath, true);
81 | await importFromPath(tempJsdbPath);
82 | fsPromises.rm(tmpBundlePath)
83 | fsPromises.rm(tempJsdbPath, {recursive: true, force: true});
84 | }
85 |
86 | const defaultsPath = path.resolve(url.fileURLToPath(import.meta.url), '../.jsdb');
87 |
88 | await importFromPath(defaultsPath);
89 |
--------------------------------------------------------------------------------
/litestream.yml:
--------------------------------------------------------------------------------
1 | dbs:
2 | - path: database.sqlite
3 | replicas:
4 | - url: ${REPLICATION_PATH}
--------------------------------------------------------------------------------
/opHandlersBetterSqlite.js:
--------------------------------------------------------------------------------
1 | import {memoizedRun} from "./vm.js";
2 | import _ from "lodash-es";
3 | import {functionToWhere} from "./parser.js";
4 | import Database from 'better-sqlite3';
5 | export const db = new Database(process.env.SQLITE_DATABASE_PATH || './database.sqlite');
6 | db.pragma( 'journal_mode = WAL;' );
7 | let preparedStatementMap = new Map();
8 |
9 | export const uuid = () => {
10 | const CHARS = 'abcdefghijklmnopqrstuvwxyz0123456789'
11 |
12 | let autoId = ''
13 |
14 | for (let i = 0; i < 24; i++) {
15 | autoId += CHARS.charAt(
16 | Math.floor(Math.random() * CHARS.length)
17 | )
18 | }
19 | return autoId
20 | }
21 |
22 | const tablesCreated = new Map();
23 |
24 | function dbCommand(cmd, sql, parameters = {}) {
25 | try {
26 | let statement;
27 | if(preparedStatementMap.has(sql)) {
28 | statement = preparedStatementMap.get(sql);
29 | } else {
30 | statement = db.prepare(sql);
31 | preparedStatementMap.set(sql, statement)
32 | }
33 | const data = statement[cmd](parameters)
34 | return {statement, data}
35 | } catch (e) {
36 | console.error(`Error running: ${sql}`,e)
37 | }
38 | }
39 |
40 | export function forceTable(collection) {
41 | if (tablesCreated.has(safe(collection))) return;
42 | dbCommand('run', `CREATE TABLE IF NOT EXISTS ${collection} (id TEXT PRIMARY KEY, value JSONB)`)
43 | tablesCreated.set(collection, true);
44 | }
45 |
46 | export function forceIndex(collection, index) {
47 | forceTable(collection)
48 | try {
49 | const indexName = index.fields.join('_').replace(/\s+/g, ' ').trim()
50 | const columns = index.fields.map(field => {
51 | const parts = field.replace(/\s+/g, ' ').trim().split(' ')
52 | if (parts.length > 2) {
53 | throw new Error('Invalid field, must have form: path.to.property DESC');
54 | } else if (parts[1] !== undefined && !['ASC', 'DESC'].includes(parts[1])) {
55 | throw new Error('Invalid field, order should be ASC or DESC');
56 | }
57 | return `JSON_EXTRACT(value, '$.${safe(parts[0])}') ${safe(parts[1] || 'ASC')}`
58 | }).join(',')
59 | dbCommand('run', `CREATE UNIQUE INDEX IF NOT EXISTS '${indexName}' ON ${collection} (${columns})`)
60 | } catch (e) {
61 | console.error(e)
62 | }
63 | }
64 |
65 | function rowDataToObject(data) {
66 | return {id: data.id, ...JSON.parse(data.value)};
67 | }
68 |
69 | function rowsToObjects(rows) {
70 | return rows.map(rowDataToObject);
71 | }
72 |
73 | export const opHandlers = {
74 | getAll({collection}) {
75 | forceTable(collection);
76 | const result = dbCommand('all', `SELECT * FROM ${collection}`)
77 | return rowsToObjects(result.data || []);
78 | },
79 | slice({collection, start, end}) {
80 | forceTable(collection);
81 | const result = dbCommand('all', `SELECT * FROM ${collection} LIMIT $limit OFFSET $offset`, {
82 | offset: start,
83 | limit: end - start
84 | })
85 | return rowsToObjects(result.data || []);
86 | },
87 | get({collection, id, path = []}) {
88 | forceTable(collection);
89 | if (path.length > 0) {
90 | const result = dbCommand('get', `SELECT id, json_extract(value, '$.${safe(path.join('.'))}') as value FROM ${collection} WHERE id = $id`, {
91 | id,
92 | })
93 | return result.data.value;
94 | } else {
95 | const result = dbCommand('get', `SELECT id,value FROM ${collection} WHERE id = $id`, {
96 | id,
97 | })
98 | return result.data && rowDataToObject(result.data);
99 | }
100 | },
101 | set({collection, id = uuid(), value, path = []}) {
102 | forceTable(collection);
103 | const insertSegment = `INSERT INTO ${collection} (id,value) VALUES ($id,json($value))`;
104 | let result;
105 | if (path.length > 0) {
106 | // Make new object from path
107 | const object = _.set({}, path, value);
108 | result = dbCommand('run', `${insertSegment} ON CONFLICT (id) DO UPDATE SET value = json_set(value,'$.${safe(path.join('.'))}',json($nestedValue)) RETURNING *`, {
109 | id,
110 | value: JSON.stringify(object),
111 | nestedValue: JSON.stringify(value)
112 | })
113 | } else {
114 | result = dbCommand('run', `${insertSegment} ON CONFLICT (id) DO UPDATE SET value = $value RETURNING *`, {
115 | id,
116 | value: JSON.stringify(value)
117 | })
118 | }
119 | const inserted = result?.statement?.changes === 0;
120 | return {inserted, insertedId: id}
121 | },
122 | push({collection, value}) {
123 | forceTable(collection);
124 | this.set({collection, value});
125 | return this.size({collection});
126 | },
127 | delete({collection, id, path = []}) {
128 | forceTable(collection);
129 | if (path.length > 0) {
130 | const result = dbCommand('run', `UPDATE ${collection} SET value = json_remove(value,'$.${safe(path.join('.'))}') WHERE id = $id`, {
131 | id,
132 | })
133 | return result.statement.changes > 0;
134 | } else {
135 | const result = dbCommand('run', `DELETE FROM ${collection} WHERE id = $id`, {
136 | id
137 | })
138 | return result.data.changes > 0;
139 | }
140 | },
141 | has({collection, id}) {
142 | forceTable(collection);
143 | const result = dbCommand('get', `SELECT EXISTS(SELECT id FROM ${collection} WHERE id = $id) as found`, {
144 | id
145 | })
146 | return result?.data.found > 0;
147 | },
148 | keys({collection}) {
149 | forceTable(collection);
150 | const result = dbCommand('all', `SELECT id FROM ${collection}`)
151 | return result?.data?.map(r => r.id);
152 | },
153 | size({collection}) {
154 | forceTable(collection);
155 | const result = dbCommand('get', `SELECT COUNT(id) as count FROM ${collection}`)
156 | return result?.data?.count || 0;
157 | },
158 | clear({collection}) {
159 | forceTable(collection);
160 | dbCommand('run', `DROP TABLE ${collection}`);
161 | tablesCreated.delete(collection);
162 | return true;
163 | },
164 | filter({collection, operations}) {
165 | console.time('Filter exec')
166 | forceTable(collection);
167 | const lengthOp = operations.find(op => op.type === 'length');
168 | let query = `SELECT ${lengthOp?'COUNT(*) as count':'*'} FROM ${collection}`
169 | let queryParams = {};
170 |
171 | const where = operations.filter(op => op.type === 'filter').map(op => functionToWhere(op.data.callbackFn, op.data.thisArg)).join(' AND ');
172 | if(where) query += ` WHERE ${where} `
173 |
174 | const orderBy = operations.filter(op => op.type === 'orderBy').map(op => `json_extract(value,'$.${op.data.property}') ${op.data.order}`).join(' ');
175 | if(orderBy) query += ` ORDER BY ${orderBy} `
176 |
177 | const sliceOp = operations.find(op => op.type === 'slice');
178 | if(sliceOp) {
179 | query += ` LIMIT $limit OFFSET $offset `;
180 | queryParams.offset = sliceOp?.data.start;
181 | queryParams.limit = sliceOp?.data.end - sliceOp?.data.start;
182 | }
183 |
184 | if(lengthOp) {
185 | // Return without running map operation, doesn't make sense to waste time mapping and then counting.
186 | const result = dbCommand('get', query, queryParams)
187 | console.timeEnd('Filter exec')
188 | return result?.data?.count;
189 |
190 | } else {
191 | const result = dbCommand('all', query, queryParams)
192 | const mapOp = operations.find(op => op.type === 'map');
193 | const objects = rowsToObjects(result.data || []);
194 | if(mapOp) {
195 | console.timeEnd('Filter exec')
196 | return memoizedRun({array: objects, ...mapOp.data.thisArg}, `array.map(${mapOp.data.callbackFn})`)
197 | } else {
198 | console.timeEnd('Filter exec')
199 | return objects;
200 | }
201 | }
202 | },
203 | find({collection, callbackFn, thisArg}) {
204 | forceTable(collection);
205 | const result = this.getAll({collection});
206 | return memoizedRun({array: result, ...thisArg}, `array.find(${callbackFn})`)
207 | },
208 | map({collection, callbackFn, thisArg}) {
209 | forceTable(collection);
210 | const result = this.getAll({collection});
211 | return memoizedRun({array: result, ...thisArg}, `array.map(${callbackFn})`)
212 | }
213 | }
214 |
215 | function safe(string) {
216 | string.split('.').forEach(segment => {
217 | if(!/^\w+$/.test(segment)) throw new Error('Unsafe string. Only alphanumerical chars allowed.')
218 | })
219 | return string;
220 | }
221 |
222 | global.opHandlers = opHandlers;
--------------------------------------------------------------------------------
/opHandlersSqlite.js:
--------------------------------------------------------------------------------
1 | import sqlite3 from "sqlite3";
2 | import {memoizedRun} from "./vm.js";
3 | import _ from "lodash-es"
4 | import {functionToWhere} from "./parser.js";
5 |
6 | export const db = new sqlite3.Database(process.env.SQLITE_DATABASE_PATH || './database.sqlite');
7 | db.run( 'PRAGMA journal_mode = WAL;' );
8 | let preparedStatementMap = new Map();
9 |
10 | export const uuid = () => {
11 | const CHARS = 'abcdefghijklmnopqrstuvwxyz0123456789'
12 |
13 | let autoId = ''
14 |
15 | for (let i = 0; i < 24; i++) {
16 | autoId += CHARS.charAt(
17 | Math.floor(Math.random() * CHARS.length)
18 | )
19 | }
20 | return autoId
21 | }
22 |
23 | const tablesCreated = new Map();
24 |
25 | async function runPromise(cmd, sql, params) {
26 | return new Promise((resolve, reject) => {
27 | let statement;
28 | if(preparedStatementMap.has(sql)) {
29 | statement = preparedStatementMap.get(sql);
30 | } else {
31 | statement = db.prepare(sql);
32 | preparedStatementMap.set(sql, statement)
33 | }
34 | statement[cmd](params, function (error, data) {
35 | if (error) {
36 | reject(error)
37 | } else {
38 | resolve({statement: this, data})
39 | }
40 | })
41 | })
42 | }
43 |
44 | export async function forceTable(collection) {
45 | if (tablesCreated.has(safe(collection))) return;
46 | await runPromise('run', `CREATE TABLE IF NOT EXISTS ${collection} (id TEXT PRIMARY KEY, value JSONB)`)
47 | tablesCreated.set(collection, true);
48 | }
49 |
50 | export async function forceIndex(collection, index) {
51 | await forceTable(collection)
52 | try {
53 | const indexName = index.fields.join('_').replace(/\s+/g, ' ').trim()
54 | const columns = index.fields.map(field => {
55 | const parts = field.replace(/\s+/g, ' ').trim().split(' ')
56 | if (parts.length > 2) {
57 | throw new Error('Invalid field, must have form: path.to.property DESC');
58 | } else if (parts[1] !== undefined && !['ASC', 'DESC'].includes(parts[1])) {
59 | throw new Error('Invalid field, order should be ASC or DESC');
60 | }
61 | return `JSON_EXTRACT(value, '$.${safe(parts[0])}') ${safe(parts[1] || 'ASC')}`
62 | }).join(',')
63 | await runPromise('run', `CREATE UNIQUE INDEX IF NOT EXISTS '${indexName}' ON ${collection} (${columns})`)
64 | } catch (e) {
65 | console.error(e)
66 | }
67 | }
68 |
69 | function rowDataToObject(data) {
70 | return {id: data.id, ...JSON.parse(data.value)};
71 | }
72 |
73 | function rowsToObjects(rows) {
74 | return rows.map(rowDataToObject);
75 | }
76 |
77 | export const opHandlers = {
78 | async getAll({collection}) {
79 | await forceTable(collection);
80 | const result = await runPromise('all', `SELECT * FROM ${collection}`)
81 | return rowsToObjects(result.data || []);
82 | },
83 | async slice({collection, start, end}) {
84 | await forceTable(collection);
85 | const result = await runPromise('all', `SELECT * FROM ${collection} LIMIT $limit OFFSET $offset`, {
86 | $offset: start,
87 | $limit: end - start
88 | })
89 | return rowsToObjects(result.data || []);
90 | },
91 | async get({collection, id, path = []}) {
92 | await forceTable(collection);
93 | if (path.length > 0) {
94 | const result = await runPromise('get', `SELECT id, json_extract(value, '$.${safe(path.join('.'))}') as value FROM ${collection} WHERE id = $id`, {
95 | $id: id,
96 | })
97 | return result.data.value;
98 | } else {
99 | const result = await runPromise('get', `SELECT id,value FROM ${collection} WHERE id = $id`, {
100 | $id: id,
101 | })
102 | return result.data && rowDataToObject(result.data);
103 | }
104 | },
105 | async set({collection, id = uuid(), value, path = []}) {
106 | await forceTable(collection);
107 | const insertSegment = `INSERT INTO ${collection} (id,value) VALUES ($id,json($value))`;
108 | let result;
109 | if (path.length > 0) {
110 | // Make new object from path
111 | const object = _.set({}, path, value);
112 | result = await runPromise('run', `${insertSegment} ON CONFLICT (id) DO UPDATE SET value = json_set(value,'$.${safe(path.join('.'))}',json($nestedValue)) RETURNING *`, {
113 | $id: id,
114 | $value: JSON.stringify(object),
115 | $nestedValue: JSON.stringify(value)
116 | })
117 | } else {
118 | result = await runPromise('run', `${insertSegment} ON CONFLICT (id) DO UPDATE SET value = $value RETURNING *`, {
119 | $id: id,
120 | $value: JSON.stringify(value)
121 | })
122 | }
123 | const inserted = result.statement.changes === 0;
124 | return {inserted, insertedId: id}
125 | },
126 | async push({collection, value}) {
127 | await forceTable(collection);
128 | await this.set({collection, value});
129 | return await this.size({collection});
130 | },
131 | async delete({collection, id, path = []}) {
132 | await forceTable(collection);
133 | if (path.length > 0) {
134 | const result = await runPromise('run', `UPDATE ${collection} SET value = json_remove(value,'$.${safe(path.join('.'))}') WHERE id = $id`, {
135 | $id: id,
136 | })
137 | return {deletedCount: result.statement.changes};
138 | } else {
139 | const result = await runPromise('run', `DELETE FROM ${collection} WHERE id = $id`, {
140 | $id: id
141 | })
142 | return {deletedCount: result.statement.changes};
143 | }
144 | },
145 | async has({collection, id}) {
146 | await forceTable(collection);
147 | const result = await runPromise('get', `SELECT EXISTS(SELECT id FROM ${collection} WHERE id = $id) as found`, {
148 | $id: id
149 | })
150 | return result?.data.found > 0;
151 | },
152 | async keys({collection}) {
153 | await forceTable(collection);
154 | const result = await runPromise('all', `SELECT id FROM ${collection}`)
155 | return result?.data?.map(r => r.id);
156 | },
157 | async size({collection}) {
158 | await forceTable(collection);
159 | const result = await runPromise('get', `SELECT COUNT(id) as count FROM ${collection}`)
160 | return result?.data?.count || 0;
161 | },
162 | async clear({collection}) {
163 | await forceTable(collection);
164 | await runPromise('run', `DROP TABLE ${collection}`);
165 | tablesCreated.delete(collection);
166 | return true;
167 | },
168 | async filter({collection, operations}) {
169 | await forceTable(collection);
170 | const lengthOp = operations.find(op => op.type === 'length');
171 | let query = `SELECT ${lengthOp?'COUNT(*) as count':'*'} FROM ${collection}`
172 | let queryParams = {};
173 |
174 | const where = operations.filter(op => op.type === 'filter').map(op => functionToWhere(op.data.callbackFn, op.data.thisArg)).join(' AND ');
175 | if(where) query += ` WHERE ${where} `
176 |
177 | const orderBy = operations.filter(op => op.type === 'orderBy').map(op => `json_extract(value,'$.${op.data.property}') ${op.data.order}`).join(' ');
178 | if(orderBy) query += ` ORDER BY ${orderBy} `
179 |
180 | const sliceOp = operations.find(op => op.type === 'slice');
181 | if(sliceOp) {
182 | query += ` LIMIT $limit OFFSET $offset `;
183 | queryParams.$offset = sliceOp?.data.start;
184 | queryParams.$limit = sliceOp?.data.end - sliceOp?.data.start;
185 | }
186 |
187 | if(lengthOp) {
188 | // Return without running map operation, doesn't make sense to waste time mapping and then counting.
189 | const result = await runPromise('get', query, queryParams)
190 | return result?.data?.count;
191 | } else {
192 | const result = await runPromise('all', query, queryParams)
193 | const mapOp = operations.find(op => op.type === 'map');
194 | const objects = rowsToObjects(result.data || []);
195 | if(mapOp) {
196 | return memoizedRun({array: objects, ...mapOp.data.thisArg}, `array.map(${mapOp.data.callbackFn})`)
197 | } else {
198 | return objects;
199 | }
200 | }
201 | },
202 | async find({collection, callbackFn, thisArg}) {
203 | await forceTable(collection);
204 | const result = await this.getAll({collection});
205 | return memoizedRun({array: result, ...thisArg}, `array.find(${callbackFn})`)
206 | },
207 | async map({collection, callbackFn, thisArg}) {
208 | await forceTable(collection);
209 | const result = await this.getAll({collection});
210 | return memoizedRun({array: result, ...thisArg}, `array.map(${callbackFn})`)
211 | }
212 | }
213 |
214 | function safe(string) {
215 | string.split('.').forEach(segment => {
216 | if(!/^\w+$/.test(segment)) throw new Error('Unsafe string. Only alphanumerical chars allowed.')
217 | })
218 | return string;
219 | }
--------------------------------------------------------------------------------
/operationFallback.js:
--------------------------------------------------------------------------------
1 | export default {
2 | 'filter' : 'read',
3 | 'find' : 'read',
4 | 'map' : 'read',
5 | 'getAll' : 'read',
6 | 'forEach' : 'read',
7 | 'entries' : 'read',
8 | 'values' : 'read',
9 | 'has' : 'read',
10 | 'keys' : 'read',
11 | 'push' : 'write',
12 | 'size' : 'read',
13 | 'length' : 'read',
14 | 'clear' : 'write',
15 | 'delete' : 'write',
16 | 'set' : 'write',
17 | 'get' : 'read',
18 | }
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@jsdb/server",
3 | "version": "0.0.17",
4 | "description": "Install jsdb server ```shell npm i @jsdb/server ```",
5 | "main": "server.js",
6 | "type": "module",
7 | "scripts": {
8 | "test": "node api.test.js",
9 | "start": "node server.js"
10 | },
11 | "author": "jpcapdevila",
12 | "license": "SSPL",
13 | "dependencies": {
14 | "@jsdb/sdk": "^0.0.36",
15 | "acorn": "^8.7.1",
16 | "adm-zip": "^0.5.9",
17 | "bcryptjs": "^2.4.3",
18 | "better-sqlite3": "^7.5.3",
19 | "cors": "^2.8.5",
20 | "dotenv": "^16.0.0",
21 | "estree-walker": "^3.0.1",
22 | "express": "^4.17.3",
23 | "express-rate-limit": "^6.3.0",
24 | "jsonwebtoken": "^8.5.1",
25 | "lodash-es": "^4.17.21",
26 | "passport": "^0.5.2",
27 | "passport-jwt": "^4.0.0",
28 | "passport-local": "^1.0.0",
29 | "sqlite3": "^5.0.6",
30 | "vm2": "^3.9.11",
31 | "ws": "^8.5.0"
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/parser.js:
--------------------------------------------------------------------------------
1 | import * as acorn from "acorn";
2 | import {walk} from "estree-walker";
3 | import _ from "lodash-es";
4 | import * as assert from "assert";
5 |
6 | export function functionToWhere(fn, thisArg) {
7 | const ast = acorn.parse(fn);
8 | const arrowFunction = ast.body[0].expression;
9 | assert.equal(arrowFunction.type, 'ArrowFunctionExpression', 'Filter or find callbacks should be arrow functions.')
10 | assert.equal(arrowFunction.params.length, 1, 'Filter or find callbacks only receive one argument.')
11 | assert.match(arrowFunction.body.type, /BinaryExpression|LogicalExpression|CallExpression/, 'Callback body should be a one-liner Binary or Logical expression. Blocks are not allowed.')
12 | const param = arrowFunction.params[0].name;
13 | let string = ''
14 | let operatorMaps = {
15 | '===': '==',
16 | '&&': 'AND',
17 | '||': 'OR'
18 | }
19 |
20 | function getOperator(operator) {
21 | return operatorMaps[operator] || operator;
22 | }
23 |
24 | walk(arrowFunction.body, {
25 | enter(node, parent, prop, index) {
26 | if (node.type === 'LogicalExpression') {
27 | string += '('
28 | }
29 | if (node.type === 'BinaryExpression') {
30 | string += '('
31 | }
32 | if (node.type === 'CallExpression' && node.callee.object.type === 'ThisExpression' && ['like','notLike'].includes(node.callee.property.name)) {
33 | const columnPath = fn.substring(node.arguments[0].object.end, node.arguments[0].end);
34 | const operator = node.callee.property.name === 'like' ? 'LIKE' : 'NOT LIKE';
35 | let likeValue;
36 | if(node.arguments[1].type === 'Literal') {
37 | likeValue = node.arguments[1].value;
38 | } else if(node.arguments[1].type === 'MemberExpression') {
39 | const likeValuePath = nodeString(fn, node.arguments[1]);
40 | likeValue = _.get(thisArg, likeValuePath)
41 | }
42 | string += `json_extract(value,'$${columnPath}') ${operator} "${likeValue}"`
43 | this.skip()
44 | }
45 | if (node.type === 'MemberExpression') {
46 | if (node.object.name === param) {
47 | const path = fn.substring(node.object.end, node.end)
48 | string += `json_extract(value,'$${path}')`
49 | } else {
50 | const path = nodeString(fn, node);
51 | string += JSON.stringify(_.get(thisArg, path))
52 | }
53 | if (prop === 'left') {
54 | string += getOperator(parent.operator)
55 | }
56 | this.skip()
57 | }
58 | if (node.type === 'Literal') {
59 | string += nodeString(fn, node)
60 | if (prop === 'left') {
61 | string += getOperator(parent.operator)
62 | }
63 | this.skip()
64 | }
65 | if (node.type === 'NewExpression' && node.callee.name === 'Date') {
66 | string += JSON.stringify(new Date())
67 | if (prop === 'left') {
68 | string += getOperator(parent.operator)
69 | }
70 | this.skip()
71 | }
72 | },
73 | leave(node, parent, prop, index) {
74 | if (node.type === 'LogicalExpression') {
75 | string += ')'
76 | if (prop === 'left') {
77 | string += getOperator(parent.operator)
78 | }
79 | }
80 | if (node.type === 'BinaryExpression') {
81 | string += ')'
82 | if (prop === 'left') {
83 | string += getOperator(parent.operator)
84 | }
85 | }
86 |
87 | }
88 | });
89 | return string.replaceAll(`"`, `'`)
90 | }
91 |
92 | function nodeString(fn, node) {
93 | return fn.substring(node.start, node.end)
94 | }
--------------------------------------------------------------------------------
/run.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | # Restore the database if it does not already exist.
5 | if [ -f database.sqlite ]; then
6 | echo "Database already exists, skipping restore"
7 | else
8 | echo "No database found, restoring from replica if exists"
9 | litestream restore -v -if-replica-exists -o database.sqlite "${REPLICATION_PATH}"
10 | fi
11 |
12 | # Run litestream with your app as the subprocess.
13 | exec litestream replicate -exec "node server.js"
--------------------------------------------------------------------------------
/server.js:
--------------------------------------------------------------------------------
1 | import express from "express";
2 | import cors from "cors";
3 | import url from 'url';
4 | import fs from 'fs';
5 | import path from "path";
6 | import rateLimit from 'express-rate-limit'
7 | import passport from "passport";
8 | import { WebSocketServer } from 'ws';
9 | import 'dotenv/config'
10 | import authApp from './authApp.js';
11 | import dbApp from "./dbApp.js";
12 | import functionsApp from "./functionsApp.js";
13 | import EventEmitter from 'events';
14 | import _ from 'lodash-es';
15 | import {
16 | functions,
17 | importFromBase64,
18 | importFromPath, indexes,
19 | resolveMiddlewareFunction,
20 | rules,
21 | triggers
22 | } from "./lifecycleMiddleware.js";
23 | import {opHandlers} from "./opHandlersBetterSqlite.js";
24 | import jwt from "jsonwebtoken";
25 |
26 | const wsServer = new WebSocketServer({ noServer: true });
27 | const realtimeListeners = new EventEmitter();
28 |
29 | // TODO : move this somewhere proper
30 |
31 | wsServer.on('connection', socket => {
32 | socket.on('message', async message => {
33 | try {
34 | const parsedMessage = JSON.parse(message);
35 | try {
36 | let token;
37 | if(parsedMessage.authorization) {
38 | token = jwt.verify(parsedMessage.authorization.replaceAll('Bearer ',''), process.env.JWT_SECRET);
39 | }
40 | const ruleFunction = await resolveMiddlewareFunction('rules', parsedMessage.collection, parsedMessage.operation);
41 | const ruleResult = await ruleFunction({...parsedMessage, user: token?.user})
42 | if (ruleResult) {
43 | // TODO : How do we pass this along for the full duration of the subscription
44 | // req.excludeFields = ruleResult?.excludeFields;
45 | // req.where = ruleResult?.where;
46 | } else {
47 | return socket.send(JSON.stringify({
48 | operation: 'error',
49 | context: message,
50 | message: 'Unauthorized!'
51 | }));
52 | }
53 | } catch (e) {
54 | console.error(e);
55 | return socket.send(JSON.stringify({
56 | operation: 'error',
57 | context: message,
58 | message: e.message
59 | }));
60 | }
61 |
62 | if(parsedMessage.operation === 'get') {
63 | const {collection, id, path = [], operation} = parsedMessage;
64 | const eventName = `${collection}.${id}`;
65 | function documentChangeHandler(documentData) {
66 | let value;
67 | if(path.length > 0) {
68 | value = _.get(documentData, path);
69 | } else {
70 | value = documentData;
71 | }
72 | socket.send(JSON.stringify({
73 | fullPath: `${collection}.${id}` + (path.length > 0 ? `.${path.join('.')}` : ''),
74 | value,
75 | operation,
76 | content: 'value'
77 | }));
78 | }
79 | const document = opHandlers.get({collection, id})
80 | documentChangeHandler(document)
81 | realtimeListeners.on(eventName, documentChangeHandler)
82 | } else if (parsedMessage.operation === 'filter') {
83 | const {collection, operations, operation, eventName} = parsedMessage;
84 | const serverEventName = collection;
85 | async function collectionChangeHandler(changeData) {
86 | if(changeData.event === 'drop') {
87 | socket.send(JSON.stringify({
88 | content: changeData.event,
89 | operation,
90 | eventName
91 | }));
92 | } else {
93 | try {
94 | const filteredResult = opHandlers.filter({collection, operations});
95 | socket.send(JSON.stringify({
96 | content: 'reset',
97 | value: filteredResult,
98 | eventName,
99 | operation,
100 | collection
101 | }))
102 | } catch (e) {
103 | console.error('Error running filter')
104 | }
105 | }
106 | }
107 | try {
108 | const filteredResult = opHandlers.filter({collection, operations});
109 | socket.send(JSON.stringify({
110 | content: 'reset',
111 | value: filteredResult,
112 | eventName,
113 | operation,
114 | collection
115 | }))
116 | realtimeListeners.on(serverEventName, collectionChangeHandler)
117 | } catch (e) {
118 | console.error('Error running filter')
119 | }
120 |
121 | } else if(parsedMessage.operation === 'push') {
122 | const {collection, operation, eventName} = parsedMessage;
123 | const id = opHandlers.push(parsedMessage);
124 | socket.send(JSON.stringify({
125 | value: id,
126 | eventName,
127 | operation,
128 | collection
129 | }));
130 | }
131 | } catch (e) {
132 | console.error(e);
133 | }
134 | });
135 | });
136 |
137 | export const app = express();
138 |
139 | const entryPointUrl = url.pathToFileURL(process.argv[1]).href;
140 | const runningAsLibrary = import.meta.url !== entryPointUrl;
141 |
142 | if(runningAsLibrary) {
143 | const customPath = path.resolve(url.fileURLToPath(entryPointUrl), '../.jsdb');
144 | await importFromPath(customPath);
145 | }
146 |
147 | try {
148 | const bundles = opHandlers.getAll({collection: 'bundles'});
149 | const currentDbBundle = bundles[0];
150 | if(currentDbBundle) {
151 | await importFromBase64(currentDbBundle.file.string);
152 | }
153 | } catch (e) {
154 | console.error(e);
155 | }
156 |
157 | if(process.env.RATE_LIMIT) {
158 | // Apply the rate limiting middleware to all requests
159 | app.use(rateLimit({
160 | windowMs: 60 * 1000, // 1 minute
161 | max: process.env.RATE_LIMIT,
162 | standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers
163 | legacyHeaders: false, // Disable the `X-RateLimit-*` headers
164 | }))
165 | }
166 |
167 | app.use(cors());
168 | const regexpIsoDate = /^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*))(?:Z|(\+|-)([\d|:]*))?$/;
169 |
170 | app.use(express.json({
171 | reviver(key, value) {
172 | if(regexpIsoDate.test(value)) {
173 | return new Date(value);
174 | } else {
175 | return value;
176 | }
177 | },
178 | limit: '1mb'
179 | }));
180 |
181 | app.use((req, res, next) => {
182 | req.realtimeListeners = realtimeListeners;
183 | const authorization = req.get('Authorization');
184 | if (authorization) {
185 | passport.authenticate('jwt', { session: false })(req, res, next);
186 | } else {
187 | next()
188 | }
189 | })
190 | const port = process.env.PORT || 3001;
191 |
192 | app.use('/auth', authApp);
193 | app.use('/db', dbApp);
194 | app.use('/functions', functionsApp);
195 | app.use('/__discovery', (req,res) => {
196 | const response = JSON.parse(JSON.stringify({rules, triggers, functions, indexes},(key, value) => {
197 | if(typeof value === 'function') {
198 | return 'fn'
199 | }
200 | return value;
201 | }))
202 | res.send(response)
203 | })
204 |
205 | const hostingPath = path.resolve(url.fileURLToPath(entryPointUrl), '../.jsdb/hosting');
206 |
207 | if (fs.existsSync(hostingPath)) {
208 | app.use(express.static(hostingPath, {
209 | fallthrough: true,
210 | maxAge: '5m'
211 | }));
212 | app.use('*', function (req, res) {
213 | if(!res.finished) res.sendFile(path.resolve(hostingPath, 'index.html'));
214 | })
215 | }
216 |
217 | export function start() {
218 | const server = app.listen(port, () => {
219 | console.log(`Listening on port ${port}`)
220 | });
221 | server.on('upgrade', (request, socket, head) => {
222 | wsServer.handleUpgrade(request, socket, head, socket => {
223 | wsServer.emit('connection', socket, request);
224 | });
225 | });
226 | }
227 |
228 | if (!runningAsLibrary) {
229 | start();
230 | }
--------------------------------------------------------------------------------
/tests/.jsdb/functions/helloWorld.js:
--------------------------------------------------------------------------------
1 | export default function () {
2 | return {message:'IT WORKS!'}
3 | }
--------------------------------------------------------------------------------
/tests/.jsdb/functions/remoteInserts.js:
--------------------------------------------------------------------------------
1 | const opHandlers = global.opHandlers;
2 | export default async function () {
3 | const timeStart = Date.now();
4 | for(let i = 0; i<1000;i++) {
5 | opHandlers.push({collection:'serverLogs', value:{type: 'info', text: 'LIVE LOG!', date: new Date()}})
6 | }
7 | const timeEnd = Date.now();
8 | console.log('Size',opHandlers.size({collection: 'serverLogs'}));
9 | opHandlers.clear({collection: 'serverLogs'});
10 | return {time: (timeEnd-timeStart)};
11 | // return {time: 0}
12 | }
--------------------------------------------------------------------------------
/tests/api.test.js:
--------------------------------------------------------------------------------
1 | import {start} from "../server.js";
2 | import {setServerUrl, db, auth, functions, initApp} from "@jsdb/sdk";
3 | import * as assert from "assert";
4 | import {opHandlers} from "../opHandlersBetterSqlite.js";
5 | start();
6 | setServerUrl('http://localhost:3001');
7 |
8 | const passedMap = new Map();
9 | const failedMap = new Map();
10 |
11 | async function test(name, callback) {
12 | try {
13 | await callback();
14 | passedMap.set(name, true)
15 | } catch (e) {
16 | console.trace(name,e.message);
17 | failedMap.set(name, e);
18 | }
19 | }
20 |
21 | // try {
22 | // await auth.createAccount({email: `test32edadas@healthtree.org`, password: 'dhs87a6dasdg7as8db68as67da'})
23 | // await auth.signIn({email: `test32edadas@healthtree.org`, password: 'dhs87a6dasdg7as8db68as67da'})
24 | // } catch (e) {
25 | //
26 | // }
27 | //
28 | // await test('Initial clear map using .clear()', async() => {
29 | // await db.msgs.clear();
30 | // })
31 | //
32 | // await test('set message', async() => {
33 | // await db.msgs.set('x',{text: 'xyz'});
34 | // })
35 | //
36 | // await test('get keys using .keys()', async() => {
37 | // const keys = await db.msgs.keys();
38 | // assert.deepStrictEqual(keys, ['x'])
39 | // })
40 | //
41 | // await test('get values using .values()', async() => {
42 | // const values = await db.msgs.values();
43 | // assert.deepStrictEqual(Array.from(values), [{id:'x',text: 'xyz'}])
44 | // })
45 | //
46 | // await test('get message using .get()', async() => {
47 | // const startMs = Date.now();
48 | // const msg = await db.msgs.get('x');
49 | // const endMs = Date.now();
50 | // console.log('Get by id time', endMs - startMs)
51 | // assert.equal(msg.text, 'xyz')
52 | // })
53 | //
54 | // await test('set message', async() => {
55 | // await db.msgs.set('x',{text: 'xyz'});
56 | // })
57 | //
58 | // await test('get keys using .keys()', async() => {
59 | // const keys = await db.msgs.keys();
60 | // assert.deepStrictEqual(keys, ['x'])
61 | // })
62 | //
63 | // await test('get values using .values()', async() => {
64 | // const values = await db.msgs.values();
65 | // assert.deepStrictEqual(Array.from(values), [{id:'x',text: 'xyz'}])
66 | // })
67 | //
68 | // await test('get message using .get()', async() => {
69 | // const msg = await db.msgs.get('x');
70 | // assert.equal(msg.text, 'xyz')
71 | // })
72 | //
73 | // await test('check if message exists using .has()', async() => {
74 | // const xExists = await db.msgs.has('x');
75 | // const yExists = await db.msgs.has('y');
76 | // assert.equal(xExists, true)
77 | // assert.equal(yExists, false)
78 | // })
79 | //
80 | // await test('get size using .size', async() => {
81 | // const size = await db.msgs.size;
82 | // assert.equal(size, 1)
83 | // })
84 | //
85 | // await test('get size using .length', async() => {
86 | // const size = await db.msgs.length;
87 | // assert.equal(size, 1)
88 | // })
89 | //
90 | // await test('get message using dot notation', async() => {
91 | // const msg = await db.msgs.x;
92 | // assert.equal(msg.text, 'xyz')
93 | // })
94 | //
95 | // await test('get message property using dot notation', async() => {
96 | // const text = await db.msgs.x.text;
97 | // assert.equal(text, 'xyz')
98 | // })
99 | //
100 | // await test('delete message property', async() => {
101 | // const wasDeleted = await delete db.msgs.x.text;
102 | // await new Promise(resolve => setTimeout(resolve, 1000))
103 | // const text = await db.msgs.x.text;
104 | // assert.equal(text, undefined)
105 | // assert.equal(wasDeleted, true)
106 | // })
107 | //
108 | // await test('delete message using .delete()', async() => {
109 | // const wasDeleted = await db.msgs.delete('x');
110 | // const msg = await db.msgs.x;
111 | // assert.equal(msg, undefined);
112 | // assert.equal(wasDeleted, true);
113 | // })
114 | //
115 | // await test('add message using .push()', async() => {
116 | // const result = await db.msgs.push({text:'FUN!', date: new Date()});
117 | // assert.equal(typeof result, 'string')
118 | // })
119 | //
120 | // await test('find message using .find()', async() => {
121 | // const msg = await db.msgs.find(msg => msg.text === 'FUN!');
122 | // assert.equal(msg.text, 'FUN!')
123 | // })
124 | //
125 | // await test('find message using .find() and thisArg', async() => {
126 | // const msg = await db.msgs.find(msg => msg.text === self.text, {self:{text:'FUN!'}});
127 | // assert.equal(msg.text, 'FUN!')
128 | // })
129 | //
130 | // await test('filter message using .filter() and thisArg', async() => {
131 | // const msgs = await db.msgs.filter(msg => msg.text === self.text, {self:{text:'FUN!'}});
132 | // assert.equal(msgs.length, 1)
133 | // assert.equal(msgs[0].text, 'FUN!')
134 | // })
135 | //
136 | // await test('filter message using .filter() with date', async() => {
137 | // const msgs = await db.msgs.filter(msg => msg.date < new Date());
138 | // assert.equal(msgs.length, 1)
139 | // assert.equal(msgs[0].text, 'FUN!')
140 | // })
141 | //
142 | // await test('filter message using .filter() & notLike', async() => {
143 | // const msgs = await db.msgs.filter(msg => this.notLike(msg.text, '%U%'));
144 | // assert.equal(msgs.length, 0)
145 | // })
146 | //
147 | // await test('filter message using .filter() & like', async() => {
148 | // const msgs = await db.msgs.filter(msg => this.like(msg.text, '%U%'));
149 | // assert.equal(msgs.length, 1)
150 | // assert.equal(msgs[0].text, 'FUN!')
151 | // })
152 | //
153 | // await test('filter message using .filter() & like from thisArg', async() => {
154 | // const msgs = await db.msgs.filter(msg => this.like(msg.text, ctx.like), {ctx: {like:'%U%'}});
155 | // assert.equal(msgs.length, 1)
156 | // assert.equal(msgs[0].text, 'FUN!')
157 | // })
158 | //
159 | //
160 | // await test('filter message using .filter() & notLike', async() => {
161 | // const msgs = await db.msgs.filter(msg => this.like(msg.text, '%UX%'));
162 | // assert.equal(msgs.length, 0)
163 | // })
164 | //
165 | // await test('slice messages to get 1 message', async() => {
166 | // const msgs = await db.msgs.slice(0,1);
167 | // assert.equal(msgs.length, 1)
168 | // assert.equal(msgs[0].text, 'FUN!')
169 | // })
170 | //
171 | // await test('filter message using chainable .filter .sortBy .slice', async() => {
172 | // const msgs = await db.msgs.filter(msg => msg.text === self.text, {self:{text:'FUN!'}})
173 | // .orderBy('date','ASC')
174 | // .slice(0,1);
175 | // assert.equal(msgs.length, 1);
176 | // assert.equal(msgs[0].text, 'FUN!');
177 | // })
178 | //
179 | // await test('filter message using chainable .filter .length', async() => {
180 | // const msgsLength = await db.msgs.filter(msg => msg.text === 'FUN!').length;
181 | // assert.equal(msgsLength, 1);
182 | // })
183 | //
184 | // await test('filter message using chainable .filter .map', async() => {
185 | // const msgs = await db.msgs.filter(msg => msg.text === 'FUN!').map(msg => msg.text);
186 | // assert.equal(msgs[0], 'FUN!');
187 | // })
188 | //
189 | // await test('map msgs using .map()', async() => {
190 | // const texts = await db.msgs.map(msg => msg.text);
191 | // assert.equal(texts.length, 1)
192 | // assert.equal(texts[0], 'FUN!')
193 | // })
194 | //
195 | // await test('iterate using forEach', async() => {
196 | // const msgs = [];
197 | // await db.msgs.forEach(msg => msgs.push(msg))
198 | // assert.deepStrictEqual(msgs.length, 1)
199 | // assert.deepStrictEqual(msgs[0].text, 'FUN!')
200 | // })
201 | //
202 | // await test('iterate using for await', async() => {
203 | // const msgs = []
204 | // for await (const msg of db.msgs){
205 | // msgs.push(msg);
206 | // }
207 | // assert.deepStrictEqual(msgs.length, 1)
208 | // assert.deepStrictEqual(msgs[0].text, 'FUN!')
209 | // })
210 | //
211 | // await test('subscribe to individual msg', async() => {
212 | // let lastValue;
213 | // const unsubscribe = db.msgs.x.subscribe(value => {
214 | // lastValue = value
215 | // });
216 | // db.msgs.x.text = "IS LIVE!"
217 | // await new Promise(resolve => setTimeout(resolve, 2000))
218 | // unsubscribe();
219 | // assert.equal(lastValue?.text,'IS LIVE!');
220 | // })
221 | //
222 | // await test('clear msgs', async() => {
223 | // await db.msgs.clear();
224 | // const size = await db.msgs.size;
225 | // assert.deepStrictEqual(size, 0)
226 | // })
227 | //
228 | await test('Insert 10000 logs', async() => {
229 | const startMs = Date.now();
230 | // const promises = [];
231 | for(let i = 0; i<10000;i++) {
232 | await db.logs.push({type:'info',text:'Dummy log',date: new Date(),i});
233 | }
234 | // await Promise.all(promises);
235 | const endMs = Date.now();
236 | console.log('10k Write Time', endMs-startMs)
237 | // assert.deepStrictEqual(endMs-startMs<2000, true);
238 | })
239 | //
240 | // await test('Get 1000', async() => {
241 | // const startMs = Date.now();
242 | // await db.logs.values();
243 | // const endMs = Date.now();
244 | // console.log('Get 1000 Time', endMs-startMs)
245 | // // assert.deepStrictEqual(endMs-startMs<2000, true);
246 | // })
247 | //
248 | // await test('Query 1000 logs', async() => {
249 | // const allLogs = await db.logs.keys()
250 | // const startMs = Date.now();
251 | // for(const id of allLogs) {
252 | // await db.logs.get(id);
253 | // }
254 | // const endMs = Date.now();
255 | // console.log('Query Read Time', endMs-startMs)
256 | // // assert.deepStrictEqual(endMs-startMs<2000, true);
257 | // })
258 | //
259 | // await test('Find first log', async() => {
260 | // const startMs = Date.now();
261 | // await db.logs.filter(log => log.i === 999)
262 | // const endMs = Date.now();
263 | // console.log('Find first time', endMs-startMs)
264 | // // assert.deepStrictEqual(endMs-startMs<2000, true);
265 | // })
266 | //
267 | // await test('clear logs', async() => {
268 | // await db.logs.clear();
269 | // const size = await db.logs.size;
270 | // assert.deepStrictEqual(size, 0)
271 | // })
272 | //
273 | // await test('Subscribe filter', async () => {
274 | // let lastValue;
275 | // const unsubscribe = db.logs.filter(log => log.text === 'LIVE LOG!').subscribe(value => {
276 | // lastValue = value
277 | // });
278 | // await db.logs.push({type:'info',text:'LIVE LOG!',date: new Date()});
279 | // await new Promise(resolve => setTimeout(resolve, 2000))
280 | // unsubscribe();
281 | // assert.equal(lastValue[0]?.text,'LIVE LOG!');
282 | // })
283 | //
284 | await test('clear logs', async() => {
285 | await db.logs.clear();
286 | const size = await db.logs.size;
287 | assert.deepStrictEqual(size, 0)
288 | })
289 | //
290 | // await test('call remote function', async() => {
291 | // const result = await functions.helloWorld();
292 | // assert.deepStrictEqual(result.message, 'IT WORKS!')
293 | // });
294 | //
295 | // await test('call function & remotely insert 1000 records', async() => {
296 | // const result = await functions.remoteInserts();
297 | // console.log('Remote insert 1000 time', result.time)
298 | // assert.deepStrictEqual(result.time < 100, true)
299 | // });
300 |
301 | // LOCAL TESTS
302 |
303 | const localJsdb = initApp({connector: 'LOCAL', opHandlers: opHandlers});
304 |
305 | await test('Local get map size', async() => {
306 | const size = await localJsdb.db.logs.length;
307 | assert.deepStrictEqual(size, 0)
308 | });
309 |
310 | await test('Local insert 10000', async() => {
311 | const startMs = Date.now();
312 | for(let i = 0; i<10000;i++) {
313 | localJsdb.db.logs.push({type:'info',text:'Dummy log',date: new Date(),i});
314 | }
315 | const endMs = Date.now();
316 | console.log('10k Local Write Time', endMs-startMs)
317 | const size = await localJsdb.db.logs.length;
318 | assert.deepStrictEqual(size, 10000)
319 | });
320 |
321 | await test('clear logs', async() => {
322 | await localJsdb.db.logs.clear();
323 | const size = await localJsdb.db.logs.size;
324 | assert.deepStrictEqual(size, 0)
325 | })
326 |
327 | const wsJsdb = initApp({connector: 'WS', opHandlers: opHandlers, serverUrl: 'http://localhost:3001'})
328 |
329 | await test('WS insert 10000', async() => {
330 | const startMs = Date.now();
331 | for(let i = 0; i<10000;i++) {
332 | const iStart = performance.now();
333 | await wsJsdb.db.logs.push({type:'info',text:'Dummy log',date: new Date(),i});
334 | const iEnd = performance.now();
335 | // console.log('Individual', iStart)
336 | }
337 | const endMs = Date.now();
338 | console.log('10k WS Write Time', endMs-startMs)
339 | const size = await wsJsdb.db.logs.length;
340 | assert.deepStrictEqual(size, 10000)
341 | });
342 |
343 | console.log('PASSED',passedMap.size)
344 | console.log('FAILED',failedMap.size)
345 |
346 | if(failedMap.size > 0) {
347 | throw new Error('Errors found while running tests')
348 | }
349 |
350 | process.exit()
--------------------------------------------------------------------------------
/vm.js:
--------------------------------------------------------------------------------
1 | const cachedRuns = new Map();
2 | import {VM} from "vm2";
3 |
4 | export function memoizedRun(sandbox, expression) {
5 | const key = JSON.stringify(sandbox)+expression;
6 | if(cachedRuns.has(key)) {
7 | console.log('From cache');
8 | return cachedRuns.get(key)
9 | }
10 | console.log('Dry run')
11 | const vm = new VM({
12 | timeout: 1000,
13 | allowAsync: false,
14 | sandbox,
15 | });
16 | const result = vm.run(expression);
17 | cachedRuns.set(key, result);
18 | return result;
19 | }
--------------------------------------------------------------------------------