├── .gitattributes
├── .github
├── CODEOWNERS
├── FUNDING.yml
└── workflows
│ ├── ci.yml
│ ├── deps.yaml
│ └── jsr.yml
├── .gitignore
├── LICENSE
├── README.md
├── deno.json
├── deps.ts
├── mod.ts
├── src
├── auth
│ ├── base.ts
│ ├── mod.ts
│ ├── pbkdf2.ts
│ ├── scram.ts
│ └── x509.ts
├── client.ts
├── cluster.ts
├── collection
│ ├── collection.ts
│ ├── commands
│ │ ├── aggregate.ts
│ │ ├── find.ts
│ │ ├── list_indexes.ts
│ │ └── update.ts
│ └── mod.ts
├── database.ts
├── error.ts
├── gridfs
│ ├── bucket.ts
│ ├── indexes.ts
│ └── upload.ts
├── protocol
│ ├── cursor.ts
│ ├── handshake.ts
│ ├── header.ts
│ ├── message.ts
│ ├── mod.ts
│ └── protocol.ts
├── types.ts
├── types
│ ├── geojson.ts
│ ├── geospatial.ts
│ ├── gridfs.ts
│ └── read_write_concern.ts
└── utils
│ ├── ns.ts
│ ├── saslprep
│ ├── code_points.mem
│ ├── load_code_points.ts
│ ├── memory_pager.ts
│ ├── mod.ts
│ └── sparse_bitfield.ts
│ ├── srv.ts
│ └── uri.ts
└── tests
├── assets
├── 1.jpg
├── 2.jpg
├── sample_neighborhoods.json
└── sample_places.json
├── cases
├── 00_uri.ts
├── 01_auth.ts
├── 02_connect.ts
├── 03_crud.ts
├── 04_indexes.ts
├── 05_srv.ts
├── 06_gridfs.ts
├── 07_worker.ts
├── 08_find_cursor.ts
├── 09_geospatial_types.ts
├── 10_command_helpers.ts
└── import_worker.ts
├── common.ts
└── deps.ts
/.gitattributes:
--------------------------------------------------------------------------------
1 | * text eol=lf
2 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @erfanium
2 | src/gridfs @lucsoft
3 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | github: [lucsoft]
2 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: ci
2 |
3 | on: [push, pull_request]
4 |
5 | jobs:
6 | build:
7 | name: ubuntu-latest
8 | runs-on: ubuntu-latest
9 | timeout-minutes: 60
10 |
11 | strategy:
12 | matrix:
13 | mongo: ["4.0", "4.2", "4.4", "5.0", "6.0"]
14 | steps:
15 | - name: Setup Environment
16 | run: |
17 | git config --global core.autocrlf false
18 | git config --global core.eol lf
19 |
20 | - name: Clone repository
21 | uses: actions/checkout@v2
22 |
23 | - name: Install Deno
24 | uses: denolib/setup-deno@master
25 | with:
26 | deno-version: 2.x.x
27 |
28 | - name: Log versions
29 | run: |
30 | deno --version
31 |
32 | # Lint & Format
33 | - name: Lint & Format TS
34 | run: |
35 | deno lint
36 | deno fmt --check
37 |
38 | # Start MongoDB
39 | - name: Start MongoDB (Linux)
40 | uses: wbari/start-mongoDB@v0.2
41 | with:
42 | mongoDBVersion: ${{ matrix.mongo }}
43 |
44 | # Tests
45 | - name: Test TS
46 | run: deno test -A
47 |
--------------------------------------------------------------------------------
/.github/workflows/deps.yaml:
--------------------------------------------------------------------------------
1 | name: deps
2 |
3 | on:
4 | schedule:
5 | - cron: "0 23 * * *"
6 |
7 | jobs:
8 | update:
9 | runs-on: ubuntu-latest
10 |
11 | steps:
12 | - uses: williamhorning/deno-outdated-action@v1
13 | with:
14 | branch_name: "bump-version"
15 | commit_message: "chores: update Deno dependencies"
16 | deno_version: "2.x"
17 | pull_request_title: "chore: update Deno dependencies"
18 |
--------------------------------------------------------------------------------
/.github/workflows/jsr.yml:
--------------------------------------------------------------------------------
1 | name: jsr
2 |
3 | on:
4 | release:
5 | types: [published]
6 | workflow_dispatch:
7 |
8 | jobs:
9 | publish:
10 | name: ubuntu-latest
11 | runs-on: ubuntu-latest
12 | timeout-minutes: 60
13 | permissions:
14 | contents: read
15 | id-token: write
16 |
17 | steps:
18 | - name: Setup Environment
19 | run: |
20 | git config --global core.autocrlf false
21 | git config --global core.eol lf
22 | - name: Clone repository
23 | uses: actions/checkout@v2
24 |
25 | - name: Install Deno
26 | uses: denolib/setup-deno@master
27 | with:
28 | deno-version: 2.x.x
29 |
30 | - name: Log versions
31 | run: |
32 | deno --version
33 | - name: Publish to JSR
34 | run: |
35 | deno publish
36 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /target
2 | .deno_plugins
3 | **/*.rs.bk
4 | .idea/
5 | .DS_Store
6 | .vscode/
7 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020-2023 the deno_mongo authors
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # deno_mongo
2 |
3 | > **deno_mongo** is a **MongoDB** database driver developed for Deno. supports
4 | > Deno Deploy as well.
5 |
6 |
7 |

8 |
9 |
10 | ## ATTENTION
11 |
12 | **Deno have added the support for npm modules. so you can also use `npm:mongodb`
13 | driver from now.**\
14 | Each of these two drivers has its own advantages and disadvantages. you can
15 | follow [this](https://github.com/denodrivers/mongo/issues/380) issue for more
16 | details. In short:
17 |
18 | - if you want to use basic MongoDB operations and you don't care about
19 | stability, this driver just works.
20 | - if you want most of MongoDB feature working with Deno and you don't care about
21 | the possible overheads by using node compat layer, you may try the
22 | `npm:mongodb` driver
23 | - if you are using MongoDB Atlas, using
24 | [atlas_sdk](https://github.com/erfanium/atlas_sdk) can be your best option.
25 |
26 | [](https://github.com/manyuanrong/deno_mongo/releases)
27 | [](https://github.com/manyuanrong/deno_mongo/actions)
28 | [](https://github.com/manyuanrong/deno_mongo)
29 | [](https://discord.gg/HEdTCvZUSf)
30 |
31 | ## Links
32 |
33 | - [Docs](https://doc.deno.land/https/deno.land/x/mongo/mod.ts)
34 |
35 | ### Import
36 |
37 | Replace `LATEST_VERSION` with
38 | [current latest version](https://deno.land/x/mongo)
39 |
40 | ```ts
41 | import {
42 | Bson,
43 | MongoClient,
44 | } from "https://deno.land/x/mongo@LATEST_VERSION/mod.ts";
45 | ```
46 |
47 | ### Connect
48 |
49 | ```ts
50 | const client = new MongoClient();
51 |
52 | // Connecting to a Local Database
53 | await client.connect("mongodb://127.0.0.1:27017");
54 |
55 | // Connecting to a Mongo Atlas Database
56 | await client.connect({
57 | db: "",
58 | tls: true,
59 | servers: [
60 | {
61 | host: "",
62 | port: 27017,
63 | },
64 | ],
65 | credential: {
66 | username: "",
67 | password: "",
68 | db: "",
69 | mechanism: "SCRAM-SHA-1",
70 | },
71 | });
72 |
73 | // Connect using srv url
74 | await client.connect(
75 | "mongodb+srv://:@/?authMechanism=SCRAM-SHA-1",
76 | );
77 | ```
78 |
79 | ### Access Collection
80 |
81 | ```ts
82 | // Defining schema interface
83 | interface UserSchema {
84 | _id: ObjectId;
85 | username: string;
86 | password: string;
87 | }
88 |
89 | const db = client.database("test");
90 | const users = db.collection("users");
91 | ```
92 |
93 | ### Insert
94 |
95 | ```ts
96 | const insertId = await users.insertOne({
97 | username: "user1",
98 | password: "pass1",
99 | });
100 |
101 | const insertIds = await users.insertMany([
102 | {
103 | username: "user1",
104 | password: "pass1",
105 | },
106 | {
107 | username: "user2",
108 | password: "pass2",
109 | },
110 | ]);
111 | ```
112 |
113 | ### Find
114 |
115 | ```ts
116 | const user1 = await users.findOne({ _id: insertId });
117 |
118 | const all_users = await users.find({ username: { $ne: null } }).toArray();
119 |
120 | // find by ObjectId
121 | const user1_id = await users.findOne({
122 | _id: new ObjectId("SOME OBJECTID STRING"),
123 | });
124 | ```
125 |
126 | ### Count
127 |
128 | ```ts
129 | const count = await users.countDocuments({ username: { $ne: null } });
130 |
131 | const estimatedCount = await users.estimatedDocumentCount({
132 | username: { $ne: null },
133 | });
134 | ```
135 |
136 | ### Aggregation
137 |
138 | ```ts
139 | const docs = await users.aggregate([
140 | { $match: { username: "many" } },
141 | { $group: { _id: "$username", total: { $sum: 1 } } },
142 | ]).toArray();
143 | ```
144 |
145 | ### Update
146 |
147 | ```ts
148 | const { matchedCount, modifiedCount, upsertedId } = await users.updateOne(
149 | { username: { $ne: null } },
150 | { $set: { username: "USERNAME" } },
151 | );
152 |
153 | const { matchedCount, modifiedCount, upsertedId } = await users.updateMany(
154 | { username: { $ne: null } },
155 | { $set: { username: "USERNAME" } },
156 | );
157 | ```
158 |
159 | ### Replace
160 |
161 | ```ts
162 | const { matchedCount, modifiedCount, upsertedId } = await users.replaceOne(
163 | { username: "a" },
164 | {
165 | username: "user1",
166 | password: "pass1",
167 | }, // new document
168 | );
169 | ```
170 |
171 | ### Delete
172 |
173 | ```ts
174 | const deleteCount = await users.deleteOne({ _id: insertId });
175 |
176 | const deleteCount2 = await users.deleteMany({ username: "test" });
177 | ```
178 |
179 | ### Cursor methods
180 |
181 | ```ts
182 | const cursor = users.find();
183 |
184 | // Skip & Limit
185 | cursor.skip(10).limit(10);
186 |
187 | // iterate results
188 | for await (const user of cursor) {
189 | console.log(user);
190 | }
191 |
192 | // or save results to array (uses more memory)
193 | const users = await cursor.toArray();
194 | ```
195 |
196 | ### GridFS
197 |
198 | ```ts
199 | // Upload
200 | const bucket = new GridFSBucket(db);
201 | const upstream = bucket.openUploadStream("test.txt");
202 |
203 | const writer = upstream.getWriter();
204 | writer.write(fileContents);
205 |
206 | await writer.close();
207 |
208 | // Download
209 | const file = await new Response(bucket.openDownloadStream(id)).text();
210 | ```
211 |
212 | ## Community Resources
213 |
214 | ### Tools
215 |
216 | - [atlas_sdk](https://github.com/erfanium/atlas_sdk) TypeSafe MongoDB Atlas Data
217 | API SDK for Deno & Deno Deploy
218 | - [dangoDB](https://github.com/oslabs-beta/dangoDB) A MongoDB ODM for Deno
219 | - [denomongo-unittest-utils](https://github.com/Gang-of-Fork/denomongo-unittest-utils)
220 | mock mongo-collections for use in unit-tests
221 |
222 | ### Examples
223 |
224 | - [deno-deploy-mongo](https://github.com/erfanium/deno-deploy-mongo) A simple
225 | app with Deno, MongoDB and oak deployed on Deno Deploy and MongoDB Atlas
226 | - [deno_rest](https://github.com/vicky-gonsalves/deno_rest) A simple oak based
227 | boilerplate for RESTful apis using deno_mongo
228 |
229 | ## Contributing
230 |
231 | ### Local testing with Docker
232 |
233 | 1. `docker run -d -p 27017:27017 mongo`
234 | 2. deno test -A
235 |
--------------------------------------------------------------------------------
/deno.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@db/mongo",
3 | "version": "0.34.0",
4 | "exports": {
5 | ".": "./mod.ts",
6 | "./client": "./src/client.ts",
7 | "./collection": "./src/collection/collection.ts",
8 | "./gridfs": "./src/gridfs/bucket.ts",
9 | "./types": "./src/types.ts"
10 | },
11 | "publish": {
12 | "exclude": [".github", "tests", ".gitattributes", ".gitignore", "deno.lock"]
13 | },
14 | "test": {
15 | "include": [
16 | "tests/cases/*.ts"
17 | ]
18 | },
19 | "lock": false
20 | }
21 |
--------------------------------------------------------------------------------
/deps.ts:
--------------------------------------------------------------------------------
1 | export {
2 | Binary,
3 | BSONRegExp,
4 | BSONSymbol,
5 | Code,
6 | DBRef,
7 | Decimal128,
8 | deserialize,
9 | Double,
10 | Int32,
11 | Long,
12 | MaxKey,
13 | MinKey,
14 | ObjectId,
15 | serialize,
16 | Timestamp,
17 | UUID,
18 | } from "jsr:@lucsoft/web-bson@^0.4.0";
19 | export { crypto as stdCrypto } from "jsr:@std/crypto@^1.0.3/crypto";
20 | export { decodeBase64, encodeBase64 } from "jsr:@std/encoding@^1.0.5/base64";
21 | export { encodeHex } from "jsr:@std/encoding@^1.0.5/hex";
22 |
--------------------------------------------------------------------------------
/mod.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * @module
3 | *
4 | * # deno_mongo
5 | *
6 | * **deno_mongo** is a **MongoDB** driver for Deno which also supports Deno Deploy.
7 | *
8 | * ## ATTENTION
9 | *
10 | * Deno has support for npm modules now, so you can also use `npm:mongodb`.
11 | * See [this](https://github.com/denodrivers/mongo/issues/380) for more details.
12 | *
13 | * ## Usage
14 | *
15 | * Replace `version` with the latest version of the driver.
16 | *
17 | * ```ts
18 | * import { MongoClient } from 'jsr:@db/mongo@version';
19 | * ```
20 | *
21 | * See [the README](https://github.com/denodrivers/mongo) for more examples.
22 | *
23 | * ## Other community resources and examples
24 | *
25 | * - [atlas_sdk](https://deno.land/x/atlas_sdk) - TypeSafe MongoDB Atlas SDK
26 | * - [dangoDB](https://github.com/oslabs-beta/dangoDB) - MongoDB ORM for Deno
27 | * - [deno-deploy-mongo](https://github.com/erfanium/deno-deploy-mongo) - A simple app with Deno, MongoDB, and Oak using MongoDB Atlas
28 | * - [deno_rest](https://github.com/vicky-gonsalves/deno_rest) - An Oak-based template for RESTful APIs using this driver
29 | * - [denomongo-unittest-utils](https://github.com/Gang-of-Fork/denomongo-unittest-utils) - Mock collection for unit tests
30 | */
31 |
32 | export {
33 | Binary,
34 | BSONRegExp,
35 | BSONSymbol,
36 | Code,
37 | DBRef,
38 | Decimal128,
39 | Double,
40 | Int32,
41 | Long,
42 | MaxKey,
43 | MinKey,
44 | ObjectId,
45 | Timestamp,
46 | UUID,
47 | } from "./deps.ts";
48 | export { MongoClient } from "./src/client.ts";
49 | export { Collection } from "./src/collection/mod.ts";
50 | export { Database } from "./src/database.ts";
51 | export { GridFSBucket } from "./src/gridfs/bucket.ts";
52 | export * from "./src/types.ts";
53 |
--------------------------------------------------------------------------------
/src/auth/base.ts:
--------------------------------------------------------------------------------
1 | import type { WireProtocol } from "../protocol/mod.ts";
2 | import type { ConnectOptions, Credential, Document } from "../types.ts";
3 |
4 | export abstract class AuthPlugin {
5 | abstract auth(authContext: AuthContext): Document;
6 | abstract prepare(authContext: AuthContext): Document;
7 | }
8 |
9 | /** Context used during authentication */
10 | export class AuthContext {
11 | /** The connection to authenticate */
12 | protocol: WireProtocol;
13 | /** The credentials to use for authentication */
14 | credentials?: Credential;
15 | /** The options passed to the `connect` method */
16 | options: ConnectOptions;
17 |
18 | /** A response from an initial auth attempt, only some mechanisms use this (e.g, SCRAM) */
19 | response?: Document;
20 | /** A random nonce generated for use in an authentication conversation */
21 | nonce?: Uint8Array;
22 |
23 | constructor(
24 | protocol: WireProtocol,
25 | credentials: Credential | undefined,
26 | options: ConnectOptions,
27 | ) {
28 | this.protocol = protocol;
29 | this.credentials = credentials;
30 | this.options = options;
31 | this.nonce = globalThis.crypto.getRandomValues(new Uint8Array(24));
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/src/auth/mod.ts:
--------------------------------------------------------------------------------
1 | export * from "./base.ts";
2 | export * from "./scram.ts";
3 | export * from "./x509.ts";
4 |
--------------------------------------------------------------------------------
/src/auth/pbkdf2.ts:
--------------------------------------------------------------------------------
1 | const encoder = new TextEncoder();
2 |
3 | const algoMap = {
4 | sha: "SHA-1",
5 | "sha-1": "SHA-1",
6 | sha1: "SHA-1",
7 | sha256: "SHA-256",
8 | "sha-256": "SHA-256",
9 | sha384: "SHA-384",
10 | "sha-384": "SHA-384",
11 | "sha-512": "SHA-512",
12 | sha512: "SHA-512",
13 | };
14 |
15 | export async function pbkdf2(
16 | password: string,
17 | salt: Uint8Array,
18 | iterations: number,
19 | length: number,
20 | _algo: "sha1" | "sha256",
21 | ) {
22 | const algo = algoMap[_algo];
23 | const key = await crypto.subtle.importKey(
24 | "raw",
25 | encoder.encode(password),
26 | { name: "PBKDF2" },
27 | false,
28 | ["deriveBits"],
29 | );
30 |
31 | return crypto.subtle.deriveBits(
32 | {
33 | name: "PBKDF2",
34 | salt: salt,
35 | iterations: iterations,
36 | hash: {
37 | name: algo,
38 | },
39 | },
40 | key,
41 | length << 3,
42 | );
43 | }
44 |
--------------------------------------------------------------------------------
/src/auth/scram.ts:
--------------------------------------------------------------------------------
1 | import {
2 | Binary,
3 | decodeBase64,
4 | encodeBase64,
5 | encodeHex,
6 | stdCrypto,
7 | } from "../../deps.ts";
8 | import { MongoDriverError } from "../error.ts";
9 | import type { HandshakeDocument } from "../protocol/handshake.ts";
10 | import { driverMetadata } from "../protocol/mod.ts";
11 | import type { Credential, Document } from "../types.ts";
12 | import { saslprep } from "../utils/saslprep/mod.ts";
13 | import { type AuthContext, AuthPlugin } from "./base.ts";
14 | import { pbkdf2 } from "./pbkdf2.ts";
15 |
16 | type CryptoMethod = "sha1" | "sha256";
17 |
18 | const enc = new TextEncoder();
19 | const dec = new TextDecoder();
20 |
21 | export class ScramAuthPlugin extends AuthPlugin {
22 | cryptoMethod: CryptoMethod;
23 | constructor(cryptoMethod: CryptoMethod) {
24 | super();
25 | this.cryptoMethod = cryptoMethod || "sha256";
26 | }
27 |
28 | prepare(authContext: AuthContext): Document {
29 | const handshakeDoc = {
30 | ismaster: true,
31 | client: driverMetadata,
32 | compression: authContext.options.compression,
33 | };
34 | const request = {
35 | ...handshakeDoc,
36 | ...{
37 | speculativeAuthenticate: {
38 | ...makeFirstMessage(
39 | this.cryptoMethod,
40 | authContext.options.credential!,
41 | authContext.nonce!,
42 | ),
43 | ...{ db: authContext.options.credential!.db },
44 | },
45 | },
46 | };
47 | return request;
48 | }
49 |
50 | auth(authContext: AuthContext): Promise {
51 | const response = authContext.response;
52 | if (response && response.speculativeAuthenticate) {
53 | return continueScramConversation(
54 | this.cryptoMethod,
55 | response.speculativeAuthenticate,
56 | authContext,
57 | );
58 | }
59 | return executeScram(this.cryptoMethod, authContext);
60 | }
61 | }
62 | export function cleanUsername(username: string) {
63 | return username.replace("=", "=3D").replace(",", "=2C");
64 | }
65 |
66 | export function clientFirstMessageBare(username: string, nonce: Uint8Array) {
67 | // NOTE: This is done b/c Javascript uses UTF-16, but the server is hashing in UTF-8.
68 | // Since the username is not sasl-prep-d, we need to do this here.
69 | return Uint8Array.from(
70 | [
71 | ...enc.encode("n="),
72 | ...enc.encode(username),
73 | ...enc.encode(",r="),
74 | ...enc.encode(encodeBase64(nonce)),
75 | ],
76 | );
77 | }
78 |
79 | export function makeFirstMessage(
80 | cryptoMethod: CryptoMethod,
81 | credentials: Credential,
82 | nonce: Uint8Array,
83 | ) {
84 | const username = cleanUsername(credentials.username!);
85 | const mechanism = cryptoMethod === "sha1" ? "SCRAM-SHA-1" : "SCRAM-SHA-256";
86 |
87 | // NOTE: This is done b/c Javascript uses UTF-16, but the server is hashing in UTF-8.
88 | // Since the username is not sasl-prep-d, we need to do this here.
89 | return {
90 | saslStart: 1,
91 | mechanism,
92 | payload: new Binary(
93 | Uint8Array.from(
94 | [...enc.encode("n,,"), ...clientFirstMessageBare(username, nonce)],
95 | ),
96 | ),
97 | autoAuthorize: 1,
98 | options: { skipEmptyExchange: true },
99 | };
100 | }
101 |
102 | export async function executeScram(
103 | cryptoMethod: CryptoMethod,
104 | authContext: AuthContext,
105 | ) {
106 | const { protocol, credentials } = authContext;
107 | if (!credentials) {
108 | throw new MongoDriverError("AuthContext must provide credentials.");
109 | }
110 | if (!authContext.nonce) {
111 | throw new MongoDriverError(
112 | "AuthContext must contain a valid nonce property",
113 | );
114 | }
115 | const nonce = authContext.nonce;
116 | const db = credentials.db!;
117 |
118 | const saslStartCmd = makeFirstMessage(cryptoMethod, credentials, nonce);
119 | const result = await protocol.commandSingle(db, saslStartCmd);
120 | return continueScramConversation(cryptoMethod, result, authContext);
121 | }
122 |
123 | export async function continueScramConversation(
124 | cryptoMethod: CryptoMethod,
125 | response: Document,
126 | authContext: AuthContext,
127 | ) {
128 | const protocol = authContext.protocol;
129 | const credentials = authContext.credentials;
130 | if (!credentials) {
131 | throw new MongoDriverError("AuthContext must provide credentials.");
132 | }
133 | if (!authContext.nonce) {
134 | throw new MongoDriverError("Unable to continue SCRAM without valid nonce");
135 | }
136 | const nonce = authContext.nonce;
137 |
138 | const db = credentials.db!;
139 | const username = cleanUsername(credentials.username!);
140 | const password = credentials.password!;
141 |
142 | let processedPassword;
143 | if (cryptoMethod === "sha256") {
144 | processedPassword = saslprep(password);
145 | } else {
146 | processedPassword = await passwordDigest(username, password);
147 | }
148 |
149 | const payload = fixPayload(dec.decode(response.payload.buffer));
150 | const dict = parsePayload(payload);
151 |
152 | const iterations = parseInt(dict.i, 10);
153 | if (iterations && iterations < 4096) {
154 | throw new MongoDriverError(
155 | `Server returned an invalid iteration count ${iterations}`,
156 | );
157 | }
158 |
159 | const salt = dict.s;
160 | const rnonce = dict.r;
161 | if (rnonce.startsWith("nonce")) {
162 | throw new MongoDriverError(`Server returned an invalid nonce: ${rnonce}`);
163 | }
164 |
165 | // Set up start of proof
166 | const withoutProof = `c=biws,r=${rnonce}`;
167 | const saltedPassword = await HI(
168 | processedPassword,
169 | decodeBase64(salt),
170 | iterations,
171 | cryptoMethod,
172 | );
173 |
174 | const clientKey = await HMAC(cryptoMethod, saltedPassword, "Client Key");
175 | const serverKey = await HMAC(cryptoMethod, saltedPassword, "Server Key");
176 | const storedKey = await H(cryptoMethod, clientKey);
177 | const authMessage = [
178 | dec.decode(clientFirstMessageBare(username, nonce)),
179 | payload,
180 | withoutProof,
181 | ].join(",");
182 |
183 | const clientSignature = await HMAC(cryptoMethod, storedKey, authMessage);
184 | const clientProof = `p=${xor(clientKey, clientSignature)}`;
185 | const clientFinal = [withoutProof, clientProof].join(",");
186 |
187 | const serverSignature = await HMAC(cryptoMethod, serverKey, authMessage);
188 |
189 | const saslContinueCmd = {
190 | saslContinue: 1,
191 | conversationId: response.conversationId,
192 | payload: new Binary(enc.encode(clientFinal)),
193 | };
194 |
195 | const result = await protocol.commandSingle(db, saslContinueCmd);
196 |
197 | const parsedResponse = parsePayload(
198 | fixPayload2(dec.decode(result.payload.buffer)),
199 | );
200 | if (
201 | !compareDigest(
202 | decodeBase64(parsedResponse.v),
203 | new Uint8Array(serverSignature),
204 | )
205 | ) {
206 | // throw new MongoDriverError("Server returned an invalid signature");
207 | }
208 | if (result.done) {
209 | return result;
210 | }
211 | const retrySaslContinueCmd = {
212 | saslContinue: 1,
213 | conversationId: result.conversationId,
214 | payload: new Uint8Array(0),
215 | };
216 |
217 | return protocol.commandSingle(db, retrySaslContinueCmd);
218 | }
219 |
220 | //this is a hack to fix codification in payload (in being and end of payload exists a codification problem, needs investigation ...)
221 | export function fixPayload(payload: string) {
222 | const temp = payload.split("=");
223 | temp.shift();
224 | const it = parseInt(temp.pop()!, 10);
225 | payload = "r=" + temp.join("=") + "=" + it;
226 | return payload;
227 | }
228 | //this is a second hack to fix codification in payload (in being and end of payload exists a codification problem, needs investigation ...)
229 | export function fixPayload2(payload: string) {
230 | let temp = payload.split("v=");
231 | temp.shift();
232 | payload = temp.join("v=");
233 | temp = payload.split("ok");
234 | temp.pop();
235 | return "v=" + temp.join("ok");
236 | }
237 |
238 | export function parsePayload(payload: string) {
239 | const dict: Document = {};
240 | const parts = payload.split(",");
241 | for (let i = 0; i < parts.length; i++) {
242 | const valueParts = parts[i].split("=");
243 | dict[valueParts[0]] = valueParts[1];
244 | }
245 |
246 | return dict;
247 | }
248 |
249 | export async function passwordDigest(
250 | username: string,
251 | password: string,
252 | ): Promise {
253 | if (typeof username !== "string") {
254 | throw new MongoDriverError("username must be a string");
255 | }
256 |
257 | if (typeof password !== "string") {
258 | throw new MongoDriverError("password must be a string");
259 | }
260 |
261 | if (password.length === 0) {
262 | throw new MongoDriverError("password cannot be empty");
263 | }
264 |
265 | const result = await stdCrypto.subtle.digest(
266 | "MD5",
267 | enc.encode(`${username}:mongo:${password}`),
268 | );
269 | return encodeHex(new Uint8Array(result));
270 | }
271 |
272 | // XOR two buffers
273 | export function xor(_a: ArrayBuffer, _b: ArrayBuffer) {
274 | const a = new Uint8Array(_a);
275 | const b = new Uint8Array(_b);
276 |
277 | const length = Math.max(a.length, b.length);
278 | const res = new Uint8Array(length);
279 |
280 | for (let i = 0; i < length; i += 1) {
281 | res[i] = a[i] ^ b[i];
282 | }
283 |
284 | return encodeBase64(res);
285 | }
286 |
287 | export function H(method: CryptoMethod, text: BufferSource) {
288 | return crypto.subtle.digest(
289 | method === "sha256" ? "SHA-256" : "SHA-1",
290 | text,
291 | );
292 | }
293 |
294 | export async function HMAC(
295 | method: CryptoMethod,
296 | secret: ArrayBuffer,
297 | text: string,
298 | ) {
299 | const key = await crypto.subtle.importKey(
300 | "raw",
301 | secret,
302 | {
303 | name: "HMAC",
304 | hash: method === "sha256" ? "SHA-256" : "SHA-1",
305 | },
306 | false,
307 | ["sign", "verify"],
308 | );
309 |
310 | const signature = await crypto.subtle.sign(
311 | "HMAC",
312 | key,
313 | enc.encode(text),
314 | );
315 |
316 | return signature;
317 | }
318 |
319 | interface HICache {
320 | [key: string]: ArrayBuffer;
321 | }
322 |
323 | let _hiCache: HICache = {};
324 | let _hiCacheCount = 0;
325 | function _hiCachePurge() {
326 | _hiCache = {};
327 | _hiCacheCount = 0;
328 | }
329 |
330 | const hiLengthMap = {
331 | sha256: 32,
332 | sha1: 20,
333 | };
334 |
335 | export async function HI(
336 | data: string,
337 | salt: Uint8Array,
338 | iterations: number,
339 | cryptoMethod: CryptoMethod,
340 | ): Promise {
341 | // omit the work if already generated
342 | const key = [data, encodeBase64(salt), iterations].join(
343 | "_",
344 | );
345 | if (_hiCache[key] !== undefined) {
346 | return _hiCache[key];
347 | }
348 |
349 | // generate the salt
350 | const saltedData = await pbkdf2(
351 | data,
352 | salt,
353 | iterations,
354 | hiLengthMap[cryptoMethod],
355 | cryptoMethod,
356 | );
357 |
358 | // cache a copy to speed up the next lookup, but prevent unbounded cache growth
359 | if (_hiCacheCount >= 200) {
360 | _hiCachePurge();
361 | }
362 |
363 | _hiCache[key] = saltedData;
364 | _hiCacheCount += 1;
365 | return saltedData;
366 | }
367 |
368 | export function compareDigest(lhs: Uint8Array, rhs: Uint8Array) {
369 | if (lhs.length !== rhs.length) {
370 | return false;
371 | }
372 |
373 | let result = 0;
374 | for (let i = 0; i < lhs.length; i++) {
375 | result |= lhs[i] ^ rhs[i];
376 | }
377 |
378 | return result === 0;
379 | }
380 |
--------------------------------------------------------------------------------
/src/auth/x509.ts:
--------------------------------------------------------------------------------
1 | import type { HandshakeDocument } from "../protocol/handshake.ts";
2 | import { driverMetadata } from "../protocol/mod.ts";
3 | import type { Credential, Document } from "../types.ts";
4 | import { type AuthContext, AuthPlugin } from "./base.ts";
5 |
6 | export interface X509Command extends Document {
7 | authenticate: number;
8 | mechanism: string;
9 | user?: string;
10 | }
11 |
12 | export class X509AuthPlugin extends AuthPlugin {
13 | constructor() {
14 | super();
15 | }
16 | prepare(authContext: AuthContext): Document {
17 | const handshakeDoc = {
18 | ismaster: true,
19 | client: driverMetadata,
20 | compression: authContext.options.compression,
21 | speculativeAuthenticate: x509AuthenticateCommand(authContext.credentials),
22 | };
23 | return handshakeDoc;
24 | }
25 |
26 | auth(authContext: AuthContext): Promise {
27 | if (authContext.response!.speculativeAuthenticate) {
28 | return Promise.resolve(authContext.response!);
29 | }
30 | return authContext.protocol.commandSingle(
31 | "$external",
32 | x509AuthenticateCommand(authContext.credentials),
33 | );
34 | }
35 | }
36 |
37 | function x509AuthenticateCommand(credentials?: Credential): Document {
38 | const command: X509Command = { authenticate: 1, mechanism: "MONGODB-X509" };
39 | if (credentials) {
40 | command.user = credentials!.username;
41 | }
42 | return command;
43 | }
44 |
--------------------------------------------------------------------------------
/src/client.ts:
--------------------------------------------------------------------------------
1 | import { Cluster } from "./cluster.ts";
2 | import { Database } from "./database.ts";
3 | import { MongoDriverError } from "./error.ts";
4 | import type {
5 | BuildInfo,
6 | ConnectOptions,
7 | Document,
8 | ListDatabaseInfo,
9 | } from "./types.ts";
10 | import { parse } from "./utils/uri.ts";
11 |
12 | /**
13 | * A client that allows you to interact with a MongoDB Server
14 | * @module
15 | */
16 |
17 | /** A client that allows you to interact with a MongoDB Server */
18 | export class MongoClient {
19 | #cluster?: Cluster;
20 | #defaultDbName = "admin";
21 | #buildInfo?: BuildInfo;
22 |
23 | /** Get information about your server's build */
24 | get buildInfo(): BuildInfo | undefined {
25 | return this.#buildInfo;
26 | }
27 |
28 | /** Get the cluster associated with the client */
29 | getCluster(): Cluster {
30 | if (!this.#cluster) {
31 | throw new MongoDriverError(
32 | "MongoClient is not connected to the Database",
33 | );
34 | }
35 |
36 | return this.#cluster;
37 | }
38 |
39 | /**
40 | * Connect to the given MongoDB server
41 | *
42 | * @param options Connection options or a MongoDB URI
43 | */
44 | async connect(options: ConnectOptions | string): Promise {
45 | try {
46 | const parsedOptions = typeof options === "string"
47 | ? await parse(options)
48 | : options;
49 |
50 | this.#defaultDbName = parsedOptions.db;
51 | const cluster = new Cluster(parsedOptions);
52 | await cluster.connect();
53 | await cluster.authenticate();
54 | await cluster.updateMaster();
55 |
56 | this.#cluster = cluster;
57 | this.#buildInfo = await this.runCommand(this.#defaultDbName, {
58 | buildInfo: 1,
59 | });
60 | } catch (e: unknown) {
61 | throw new MongoDriverError(
62 | `Connection failed: ${e instanceof Error ? e.message : "unknown"}`,
63 | );
64 | }
65 | return this.database((options as ConnectOptions).db);
66 | }
67 |
68 | /**
69 | * List all databases on the connected server
70 | *
71 | * @param options Options to pass to the `listDatabases` command
72 | * @returns A list of databases including their name, size on disk, and whether they are empty
73 | */
74 | async listDatabases(
75 | options: {
76 | filter?: Document;
77 | nameOnly?: boolean;
78 | authorizedCollections?: boolean;
79 | comment?: Document;
80 | } = {},
81 | ): Promise {
82 | const { databases } = await this.getCluster().protocol.commandSingle(
83 | "admin",
84 | {
85 | listDatabases: 1,
86 | ...options,
87 | },
88 | );
89 | return databases;
90 | }
91 |
92 | /** Run a command on the connected server */
93 | // deno-lint-ignore no-explicit-any
94 | runCommand(db: string, body: Document): Promise {
95 | return this.getCluster().protocol.commandSingle(db, body);
96 | }
97 |
98 | /** Get a database instance on the connected server */
99 | database(name: string = this.#defaultDbName): Database {
100 | return new Database(this.getCluster(), name);
101 | }
102 |
103 | /** Close the connection to the server */
104 | close() {
105 | if (this.#cluster) this.#cluster.close();
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/src/cluster.ts:
--------------------------------------------------------------------------------
1 | import { AuthContext, ScramAuthPlugin, X509AuthPlugin } from "./auth/mod.ts";
2 | import { MongoDriverError } from "./error.ts";
3 | import { WireProtocol } from "./protocol/mod.ts";
4 | import type { ConnectOptions, Server } from "./types.ts";
5 |
6 | export class Cluster {
7 | #options: ConnectOptions;
8 | #connections: Deno.Conn[];
9 | #protocols: WireProtocol[];
10 | #masterIndex: number;
11 |
12 | constructor(options: ConnectOptions) {
13 | this.#options = options;
14 | this.#connections = [];
15 | this.#protocols = [];
16 | this.#masterIndex = -1;
17 | }
18 |
19 | async connect() {
20 | const options = this.#options;
21 | this.#connections = await Promise.all(
22 | options.servers.map((server) => this.connectToServer(server, options)),
23 | );
24 | }
25 |
26 | connectToServer(
27 | server: Server,
28 | options: ConnectOptions,
29 | ): Promise {
30 | const denoConnectOps: Deno.ConnectTlsOptions = {
31 | hostname: server.host,
32 | port: server.port,
33 | };
34 |
35 | if (!options.tls) return Deno.connect(denoConnectOps);
36 |
37 | if (options.certFile) {
38 | denoConnectOps.caCerts = [Deno.readTextFileSync(options.certFile)];
39 | }
40 |
41 | if (options.keyFile) {
42 | //TODO: need something like const key = decrypt(options.keyFile) ...
43 | if (options.keyFilePassword) {
44 | throw new MongoDriverError(
45 | "Tls keyFilePassword not implemented in Deno driver",
46 | );
47 | }
48 | throw new MongoDriverError("Tls keyFile not implemented in Deno driver");
49 | //TODO: need Deno.connectTls with something like key or keyFile option.
50 | }
51 |
52 | return Deno.connectTls(denoConnectOps);
53 | }
54 |
55 | async authenticate() {
56 | const options = this.#options;
57 | this.#protocols = await Promise.all(
58 | this.#connections.map((conn) => this.authenticateToServer(conn, options)),
59 | );
60 | }
61 |
62 | async authenticateToServer(
63 | conn: Deno.Conn,
64 | options: ConnectOptions,
65 | ): Promise {
66 | const protocol = new WireProtocol(conn);
67 | if (options.credential) {
68 | const authContext = new AuthContext(
69 | protocol,
70 | options.credential,
71 | options,
72 | );
73 | const mechanism = options.credential!.mechanism;
74 | let authPlugin;
75 | if (mechanism === "SCRAM-SHA-256") {
76 | authPlugin = new ScramAuthPlugin("sha256"); //TODO AJUST sha256
77 | } else if (mechanism === "SCRAM-SHA-1") {
78 | authPlugin = new ScramAuthPlugin("sha1");
79 | } else if (mechanism === "MONGODB-X509") {
80 | authPlugin = new X509AuthPlugin();
81 | } else {
82 | throw new MongoDriverError(
83 | `Auth mechanism not implemented in Deno driver: ${mechanism}`,
84 | );
85 | }
86 | const request = authPlugin.prepare(authContext);
87 | authContext.response = await protocol.commandSingle(
88 | "admin", // TODO: Should get the auth db from connectionOptions?
89 | request,
90 | );
91 | await authPlugin.auth(authContext);
92 | } else {
93 | await protocol.connect();
94 | }
95 | return protocol;
96 | }
97 |
98 | async updateMaster() {
99 | const results = await Promise.all(this.#protocols.map((protocol) => {
100 | return protocol.commandSingle(
101 | "admin",
102 | { isMaster: 1 },
103 | );
104 | }));
105 | const masterIndex = results.findIndex((result) =>
106 | result.isWritablePrimary || result.ismaster
107 | );
108 | if (masterIndex === -1) throw new Error(`Could not find a master node`);
109 | this.#masterIndex = masterIndex;
110 | }
111 |
112 | private getMaster() {
113 | return {
114 | protocol: this.#protocols[this.#masterIndex],
115 | conn: this.#connections[this.#masterIndex],
116 | };
117 | }
118 |
119 | get protocol(): WireProtocol {
120 | return this.getMaster().protocol;
121 | }
122 |
123 | close() {
124 | for (const conn of this.#connections) {
125 | try {
126 | conn.close();
127 | } catch {
128 | // this is safe to ignore
129 | }
130 | }
131 | }
132 | }
133 |
--------------------------------------------------------------------------------
/src/collection/collection.ts:
--------------------------------------------------------------------------------
1 | import { ObjectId } from "../../deps.ts";
2 | import {
3 | MongoDriverError,
4 | MongoInvalidArgumentError,
5 | MongoServerError,
6 | } from "../error.ts";
7 | import type { WireProtocol } from "../protocol/mod.ts";
8 | import type {
9 | AggregateOptions,
10 | AggregatePipeline,
11 | CountOptions,
12 | CreateIndexOptions,
13 | DeleteOptions,
14 | DistinctOptions,
15 | Document,
16 | DropIndexOptions,
17 | DropOptions,
18 | Filter,
19 | FindAndModifyOptions,
20 | FindOptions,
21 | InsertDocument,
22 | InsertOptions,
23 | UpdateFilter,
24 | UpdateOptions,
25 | } from "../types.ts";
26 | import { AggregateCursor } from "./commands/aggregate.ts";
27 | import { FindCursor } from "./commands/find.ts";
28 | import { ListIndexesCursor } from "./commands/list_indexes.ts";
29 | import { update } from "./commands/update.ts";
30 |
31 | /**
32 | * A collection within a MongoDB Database
33 | * @module
34 | */
35 |
36 | /** A collection within a MongoDB Database */
37 | export class Collection {
38 | #protocol: WireProtocol;
39 | #dbName: string;
40 |
41 | constructor(protocol: WireProtocol, dbName: string, readonly name: string) {
42 | this.#protocol = protocol;
43 | this.#dbName = dbName;
44 | }
45 |
46 | /**
47 | * Get a FindCursor for the given filter
48 | *
49 | * @param filter The query used to match documents
50 | * @param options Additional options for the operation
51 | * @returns A cursor for the query
52 | */
53 | find(
54 | filter?: Filter,
55 | options?: FindOptions,
56 | ): FindCursor {
57 | return new FindCursor({
58 | filter,
59 | protocol: this.#protocol,
60 | collectionName: this.name,
61 | dbName: this.#dbName,
62 | options: options ?? {},
63 | });
64 | }
65 |
66 | /**
67 | * Find one Document using the given filter
68 | *
69 | * @param filter The query used to match for a document
70 | * @param options Additional options for the operation
71 | * @returns The document matched, or undefined if no document was found
72 | */
73 | findOne(
74 | filter?: Filter,
75 | options?: FindOptions,
76 | ): Promise {
77 | const cursor = this.find(filter, options);
78 | return cursor.next();
79 | }
80 |
81 | /**
82 | * Find and modify a document in one, returning the matching document.
83 | *
84 | * @param query The query used to match documents
85 | * @param options Additional options for the operation (e.g. containing update
86 | * or remove parameters)
87 | * @returns The document matched and modified
88 | */
89 | async findAndModify(
90 | filter?: Filter,
91 | options?: FindAndModifyOptions,
92 | ): Promise {
93 | const result = await this.#protocol.commandSingle<{
94 | value: T;
95 | ok: number;
96 | // deno-lint-ignore no-explicit-any
97 | lastErrorObject: any;
98 | }>(this.#dbName, {
99 | findAndModify: this.name,
100 | query: filter,
101 | ...options,
102 | });
103 | if (result.ok === 0) {
104 | throw new MongoDriverError("Could not execute findAndModify operation");
105 | }
106 | return result.value;
107 | }
108 |
109 | /**
110 | * Count the number of documents matching the given filter
111 | *
112 | * @param filter The query used to match documents
113 | * @param options Additional options for the operation
114 | * @returns The number of documents matching the filter
115 | */
116 | async countDocuments(
117 | filter?: Filter,
118 | options?: CountOptions,
119 | ): Promise {
120 | const pipeline: AggregatePipeline[] = [];
121 | if (filter) {
122 | pipeline.push({ $match: filter });
123 | }
124 |
125 | if (typeof options?.skip === "number") {
126 | pipeline.push({ $skip: options.limit });
127 | delete options.skip;
128 | }
129 |
130 | if (typeof options?.limit === "number") {
131 | pipeline.push({ $limit: options.limit });
132 | delete options.limit;
133 | }
134 |
135 | pipeline.push({ $group: { _id: 1, n: { $sum: 1 } } });
136 |
137 | const result = await this.aggregate<{ n: number }>(
138 | pipeline,
139 | options as AggregateOptions,
140 | ).next();
141 | if (result) return result.n;
142 | return 0;
143 | }
144 |
145 | /** A function that returns the estimated number of documents in the collection */
146 | async estimatedDocumentCount(): Promise {
147 | const pipeline = [
148 | { $collStats: { count: {} } },
149 | { $group: { _id: 1, n: { $sum: "$count" } } },
150 | ];
151 |
152 | const result = await this.aggregate<{ n: number }>(pipeline).next();
153 | if (result) return result.n;
154 | return 0;
155 | }
156 |
157 | /**
158 | * Insert a single document into the collection
159 | *
160 | * @param doc The document to insert
161 | * @param options Additional options for the operation
162 | * @returns The inserted document's ID
163 | */
164 | async insertOne(
165 | doc: InsertDocument,
166 | options?: InsertOptions,
167 | ): Promise>["_id"]> {
168 | const { insertedIds } = await this.insertMany([doc], options);
169 | return insertedIds[0];
170 | }
171 |
172 | /**
173 | * Insert multiple documents into the collection
174 | *
175 | * @param docs An array of documents to insert
176 | * @param options Additional options for the operation
177 | * @returns The inserted documents' IDs and the number of documents inserted
178 | */
179 | async insertMany(
180 | docs: InsertDocument[],
181 | options?: InsertOptions,
182 | ): Promise<
183 | {
184 | insertedIds: Required>["_id"][];
185 | insertedCount: number;
186 | }
187 | > {
188 | const insertedIds = docs.map((doc) => {
189 | if (!doc._id) {
190 | doc._id = new ObjectId();
191 | }
192 |
193 | return doc._id;
194 | });
195 |
196 | const res = await this.#protocol.commandSingle(this.#dbName, {
197 | insert: this.name,
198 | documents: docs,
199 | ordered: options?.ordered ?? true,
200 | writeConcern: options?.writeConcern,
201 | bypassDocumentValidation: options?.bypassDocumentValidation,
202 | comment: options?.comment,
203 | });
204 | const { writeErrors } = res;
205 | if (writeErrors) {
206 | const [{ errmsg }] = writeErrors;
207 | throw new MongoServerError(errmsg);
208 | }
209 | return {
210 | insertedIds,
211 | insertedCount: res.n,
212 | };
213 | }
214 |
215 | /**
216 | * Update a single document matching the given filter
217 | *
218 | * @param filter The query used to match the document
219 | * @param update The update to apply to the document
220 | * @param options Additional options for the operation
221 | * @returns The number of documents matched, modified, and upserted
222 | */
223 | async updateOne(
224 | filter: Filter,
225 | update: UpdateFilter,
226 | options?: UpdateOptions,
227 | ): Promise<
228 | {
229 | upsertedId: ObjectId | undefined;
230 | upsertedCount: number;
231 | matchedCount: number;
232 | modifiedCount: number;
233 | }
234 | > {
235 | const {
236 | upsertedIds,
237 | upsertedCount,
238 | matchedCount,
239 | modifiedCount,
240 | } = await this.updateMany(filter, update, {
241 | ...options,
242 | multi: false,
243 | });
244 | return {
245 | upsertedId: upsertedIds?.[0],
246 | upsertedCount,
247 | matchedCount,
248 | modifiedCount,
249 | };
250 | }
251 |
252 | /**
253 | * Update multiple documents matching the given filter
254 | *
255 | * @param filter The query used to match the documents
256 | * @param doc The update to apply to the documents
257 | * @param options Additional options for the operation
258 | * @returns The number of documents matched, modified, and upserted
259 | */
260 | updateMany(
261 | filter: Filter,
262 | doc: UpdateFilter,
263 | options?: UpdateOptions,
264 | ): Promise<
265 | {
266 | upsertedIds: ObjectId[] | undefined;
267 | upsertedCount: number;
268 | modifiedCount: number;
269 | matchedCount: number;
270 | }
271 | > {
272 | if (!hasAtomicOperators(doc)) {
273 | throw new MongoInvalidArgumentError(
274 | "Update document requires atomic operators",
275 | );
276 | }
277 |
278 | return update(this.#protocol, this.#dbName, this.name, filter, doc, {
279 | ...options,
280 | multi: options?.multi ?? true,
281 | });
282 | }
283 |
284 | /**
285 | * Replace a single document matching the given filter
286 | *
287 | * @param filter The query used to match the document
288 | * @param replacement The replacement document
289 | * @param options Additional options for the operation
290 | * @returns The number of documents matched, modified, and upserted
291 | */
292 | async replaceOne(
293 | filter: Filter,
294 | replacement: InsertDocument,
295 | options?: UpdateOptions,
296 | ): Promise<
297 | {
298 | upsertedId: ObjectId | undefined;
299 | upsertedCount: number;
300 | matchedCount: number;
301 | modifiedCount: number;
302 | }
303 | > {
304 | if (hasAtomicOperators(replacement)) {
305 | throw new MongoInvalidArgumentError(
306 | "Replacement document must not contain atomic operators",
307 | );
308 | }
309 |
310 | const { upsertedIds, upsertedCount, matchedCount, modifiedCount } =
311 | await update(
312 | this.#protocol,
313 | this.#dbName,
314 | this.name,
315 | filter,
316 | replacement,
317 | {
318 | ...options,
319 | multi: false,
320 | },
321 | );
322 |
323 | return {
324 | upsertedId: upsertedIds?.[0],
325 | upsertedCount,
326 | matchedCount,
327 | modifiedCount,
328 | };
329 | }
330 |
331 | /**
332 | * Delete multiple documents matching the given filter
333 | *
334 | * @param filter The query used to match the documents
335 | * @param options Additional options for the operation
336 | * @returns The number of documents deleted
337 | */
338 | async deleteMany(
339 | filter: Filter,
340 | options?: DeleteOptions,
341 | ): Promise {
342 | const res = await this.#protocol.commandSingle(this.#dbName, {
343 | delete: this.name,
344 | deletes: [
345 | {
346 | q: filter,
347 | limit: options?.limit ?? 0,
348 | collation: options?.collation,
349 | hint: options?.hint,
350 | comment: options?.comment,
351 | },
352 | ],
353 | ordered: options?.ordered ?? true,
354 | writeConcern: options?.writeConcern,
355 | });
356 | return res.n;
357 | }
358 |
359 | /**
360 | * Delete a single document matching the given filter
361 | *
362 | * @param filter The query used to match the document
363 | * @param options Additional options for the operation
364 | * @returns The number of documents deleted
365 | */
366 | deleteOne(
367 | filter: Filter,
368 | options?: DeleteOptions,
369 | ): Promise {
370 | return this.deleteMany(filter, { ...options, limit: 1 });
371 | }
372 |
373 | /**
374 | * Drop the collection from the database
375 | *
376 | * @param options Additional options for the operation
377 | */
378 | async drop(options?: DropOptions): Promise {
379 | const _res = await this.#protocol.commandSingle(this.#dbName, {
380 | drop: this.name,
381 | ...options,
382 | });
383 | }
384 |
385 | async distinct(
386 | key: string,
387 | query?: Filter,
388 | options?: DistinctOptions,
389 | // deno-lint-ignore no-explicit-any
390 | ): Promise {
391 | const { values } = await this.#protocol.commandSingle(this.#dbName, {
392 | distinct: this.name,
393 | key,
394 | query,
395 | ...options,
396 | });
397 | return values;
398 | }
399 |
400 | /**
401 | * Perform aggregation on the collection
402 | *
403 | * @param pipeline The aggregation pipeline
404 | * @param options Additional options for the operation
405 | * @returns A cursor for the aggregation
406 | */
407 | aggregate(
408 | pipeline: AggregatePipeline[],
409 | options?: AggregateOptions,
410 | ): AggregateCursor {
411 | return new AggregateCursor({
412 | pipeline,
413 | protocol: this.#protocol,
414 | dbName: this.#dbName,
415 | collectionName: this.name,
416 | options,
417 | });
418 | }
419 |
420 | /**
421 | * Create an index on the collection
422 | *
423 | * @param options The options for the operation
424 | * @returns The result of the operation
425 | */
426 | async createIndexes(
427 | options: CreateIndexOptions,
428 | ): Promise<
429 | {
430 | ok: number;
431 | createdCollectionAutomatically: boolean;
432 | numIndexesBefore: number;
433 | numIndexesAfter: number;
434 | }
435 | > {
436 | const res = await this.#protocol.commandSingle<{
437 | ok: number;
438 | createdCollectionAutomatically: boolean;
439 | numIndexesBefore: number;
440 | numIndexesAfter: number;
441 | }>(this.#dbName, {
442 | createIndexes: this.name,
443 | ...options,
444 | });
445 | return res;
446 | }
447 |
448 | /**
449 | * Drop an index from the collection
450 | *
451 | * @param options The options for the operation
452 | * @returns The result of the operation
453 | */
454 | async dropIndexes(options: DropIndexOptions): Promise<{
455 | ok: number;
456 | nIndexesWas: number;
457 | }> {
458 | const res = await this.#protocol.commandSingle<{
459 | ok: number;
460 | nIndexesWas: number;
461 | }>(
462 | this.#dbName,
463 | {
464 | dropIndexes: this.name,
465 | ...options,
466 | },
467 | );
468 |
469 | return res;
470 | }
471 |
472 | /**
473 | * List the indexes on the collection
474 | *
475 | * @returns A cursor for the indexes
476 | */
477 | listIndexes(): ListIndexesCursor<
478 | { v: number; key: Document; name: string; ns?: string }
479 | > {
480 | return new ListIndexesCursor<
481 | { v: number; key: Document; name: string; ns?: string }
482 | >({
483 | protocol: this.#protocol,
484 | dbName: this.#dbName,
485 | collectionName: this.name,
486 | });
487 | }
488 | }
489 |
490 | /**
491 | * Check if a document contains atomic operators
492 | *
493 | * @param doc The document to check
494 | * @returns Whether the document contains atomic operators
495 | */
496 | export function hasAtomicOperators(doc: Document | Document[]): boolean {
497 | if (Array.isArray(doc)) {
498 | for (const document of doc) {
499 | if (hasAtomicOperators(document)) {
500 | return true;
501 | }
502 | }
503 | return false;
504 | }
505 | const keys = Object.keys(doc);
506 | return keys.length > 0 && keys[0][0] === "$";
507 | }
508 |
--------------------------------------------------------------------------------
/src/collection/commands/aggregate.ts:
--------------------------------------------------------------------------------
1 | import { CommandCursor } from "../../protocol/cursor.ts";
2 | import type { WireProtocol } from "../../protocol/protocol.ts";
3 | import type { AggregateOptions, Document } from "../../types.ts";
4 |
5 | interface AggregateCursorContext {
6 | dbName: string;
7 | collectionName: string;
8 | protocol: WireProtocol;
9 | pipeline: Document;
10 | options?: AggregateOptions;
11 | }
12 |
13 | export class AggregateCursor extends CommandCursor {
14 | #context: AggregateCursorContext;
15 |
16 | private async executor() {
17 | const { dbName, pipeline, collectionName, protocol, options } =
18 | this.#context;
19 |
20 | const { cursor } = await protocol.commandSingle(dbName, {
21 | aggregate: collectionName,
22 | pipeline,
23 | cursor: {
24 | batchSize: options?.batchSize || 1000,
25 | },
26 | ...options,
27 | });
28 | return {
29 | ...cursor,
30 | id: cursor.id.toString(),
31 | };
32 | }
33 |
34 | constructor(context: AggregateCursorContext) {
35 | super(context.protocol, () => this.executor());
36 | this.#context = context;
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/src/collection/commands/find.ts:
--------------------------------------------------------------------------------
1 | import { CommandCursor, type WireProtocol } from "../../protocol/mod.ts";
2 | import type { Document, FindOptions } from "../../types.ts";
3 |
4 | interface FindCursorContext {
5 | dbName: string;
6 | collectionName: string;
7 | protocol: WireProtocol;
8 | options: FindOptions;
9 | filter?: Document;
10 | }
11 |
12 | export class FindCursor extends CommandCursor {
13 | #context: FindCursorContext;
14 |
15 | private async executor() {
16 | const { protocol, filter, dbName, collectionName, options } = this.#context;
17 | const { cursor } = await protocol.commandSingle(dbName, {
18 | find: collectionName,
19 | filter,
20 | batchSize: 1,
21 | noCursorTimeout: false,
22 | ...options,
23 | });
24 | return {
25 | ...cursor,
26 | id: cursor.id.toString(),
27 | };
28 | }
29 |
30 | constructor(context: FindCursorContext) {
31 | super(context.protocol, () => this.executor());
32 | this.#context = {
33 | ...context,
34 | options: {
35 | ...context.options,
36 | },
37 | };
38 | }
39 |
40 | limit(limit: number): this {
41 | this.#context.options.limit = limit;
42 | return this;
43 | }
44 |
45 | skip(skip: number): this {
46 | this.#context.options.skip = skip;
47 | return this;
48 | }
49 |
50 | sort(sort: Document): this {
51 | this.#context.options.sort = sort;
52 | return this;
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/src/collection/commands/list_indexes.ts:
--------------------------------------------------------------------------------
1 | import { CommandCursor, type WireProtocol } from "../../protocol/mod.ts";
2 |
3 | interface ListIndexesCursorContext {
4 | dbName: string;
5 | collectionName: string;
6 | protocol: WireProtocol;
7 | }
8 |
9 | export class ListIndexesCursor extends CommandCursor {
10 | #context: ListIndexesCursorContext;
11 |
12 | private async executor() {
13 | const { protocol, dbName, collectionName } = this.#context;
14 | const { cursor } = await protocol.commandSingle(dbName, {
15 | listIndexes: collectionName,
16 | });
17 | return {
18 | ...cursor,
19 | id: cursor.id.toString(),
20 | };
21 | }
22 |
23 | constructor(context: ListIndexesCursorContext) {
24 | super(context.protocol, () => this.executor());
25 | this.#context = context;
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/src/collection/commands/update.ts:
--------------------------------------------------------------------------------
1 | import type { ObjectId } from "../../../deps.ts";
2 | import type { WireProtocol } from "../../protocol/mod.ts";
3 | import type { Document, UpdateOptions } from "../../types.ts";
4 |
5 | interface UpdateResponse {
6 | ok: number;
7 | nModified: number;
8 | n: number;
9 | upserted?: {
10 | index: number;
11 | _id: ObjectId;
12 | }[];
13 | }
14 |
15 | export async function update(
16 | protocol: WireProtocol,
17 | dbName: string,
18 | collectionName: string,
19 | query: Document,
20 | doc: Document,
21 | options?: UpdateOptions,
22 | ) {
23 | const { n, nModified, upserted } = await protocol.commandSingle<
24 | UpdateResponse
25 | >(dbName, {
26 | update: collectionName,
27 | updates: [
28 | {
29 | q: query,
30 | u: doc,
31 | upsert: options?.upsert ?? false,
32 | multi: options?.multi ?? true,
33 | collation: options?.collation,
34 | arrayFilters: options?.arrayFilters,
35 | hint: options?.hint,
36 | },
37 | ],
38 | writeConcern: options?.writeConcern,
39 | ordered: options?.ordered ?? true,
40 | bypassDocumentValidation: options?.bypassDocumentValidation,
41 | comment: options?.comment,
42 | });
43 |
44 | return {
45 | upsertedIds: upserted?.map((id) => id._id),
46 | upsertedCount: upserted?.length ?? 0,
47 | modifiedCount: nModified,
48 | matchedCount: n,
49 | };
50 | }
51 |
--------------------------------------------------------------------------------
/src/collection/mod.ts:
--------------------------------------------------------------------------------
1 | export { Collection } from "./collection.ts";
2 |
--------------------------------------------------------------------------------
/src/database.ts:
--------------------------------------------------------------------------------
1 | import type { Cluster } from "./cluster.ts";
2 | import { Collection } from "./collection/mod.ts";
3 | import { CommandCursor } from "./protocol/mod.ts";
4 | import type {
5 | CreateCollectionOptions,
6 | CreateUserOptions,
7 | Document,
8 | } from "./types.ts";
9 | import type { WriteConcern } from "./types/read_write_concern.ts";
10 |
11 | interface ListCollectionsReponse {
12 | cursor: {
13 | id: bigint;
14 | ns: string;
15 | firstBatch: [
16 | {
17 | name: string;
18 | type: "collection";
19 | },
20 | ];
21 | };
22 | ok: 1;
23 | }
24 |
25 | export interface ListCollectionsResult {
26 | name: string;
27 | type: "collection";
28 | }
29 |
30 | /** A Database on a MongoDB Server */
31 | export class Database {
32 | #cluster: Cluster;
33 |
34 | constructor(cluster: Cluster, readonly name: string) {
35 | this.#cluster = cluster;
36 | }
37 |
38 | /** Drop a database, optionally providing a writeConcern */
39 | async dropDatabase(writeConcern?: WriteConcern): Promise {
40 | return await this.#cluster.protocol.commandSingle(this.name, {
41 | dropDatabase: 1,
42 | writeConcern,
43 | });
44 | }
45 |
46 | /** Get a collection by name */
47 | collection(name: string): Collection {
48 | return new Collection(this.#cluster.protocol, this.name, name);
49 | }
50 |
51 | /** List all collections in the database */
52 | listCollections(options: {
53 | filter?: Document;
54 | nameOnly?: boolean;
55 | authorizedCollections?: boolean;
56 | comment?: Document;
57 | } = {}): CommandCursor {
58 | return new CommandCursor(
59 | this.#cluster.protocol,
60 | async () => {
61 | const { cursor } = await this.#cluster.protocol.commandSingle<
62 | ListCollectionsReponse
63 | >(this.name, {
64 | listCollections: 1,
65 | ...options,
66 | });
67 | return {
68 | id: cursor.id,
69 | ns: cursor.ns,
70 | firstBatch: cursor.firstBatch,
71 | };
72 | },
73 | );
74 | }
75 |
76 | /** List all collection names in the database */
77 | async listCollectionNames(options: {
78 | filter?: Document;
79 | authorizedCollections?: boolean;
80 | comment?: Document;
81 | } = {}): Promise {
82 | const cursor = this.listCollections({
83 | ...options,
84 | nameOnly: true,
85 | authorizedCollections: true,
86 | });
87 | const names: string[] = [];
88 | for await (const item of cursor) {
89 | names.push(item.name);
90 | }
91 | return names;
92 | }
93 |
94 | /**
95 | * `createCollection` executes a create command to create a new collection with the specified name and options.
96 | *
97 | * https://www.mongodb.com/docs/manual/reference/command/create/#mongodb-dbcommand-dbcmd.create
98 | */
99 | async createCollection(
100 | name: string,
101 | options?: CreateCollectionOptions,
102 | ): Promise> {
103 | await this.#cluster.protocol.commandSingle(
104 | this.name,
105 | { create: name, ...options },
106 | );
107 |
108 | return this.collection(name);
109 | }
110 |
111 | /** Create a user on the Database */
112 | createUser(
113 | username: string,
114 | password: string,
115 | options?: CreateUserOptions,
116 | ): Promise {
117 | return this.#cluster.protocol.commandSingle(this.name, {
118 | createUser: options?.username ?? username,
119 | pwd: options?.password ?? password,
120 | customData: options?.customData,
121 | roles: options?.roles ?? [],
122 | writeConcern: options?.writeConcern,
123 | authenticationRestrictions: options?.authenticationRestrictions,
124 | mechanisms: options?.mechanisms,
125 | digestPassword: options?.digestPassword,
126 | comment: options?.comment,
127 | });
128 | }
129 |
130 | dropUser(username: string, options: {
131 | writeConcern?: Document;
132 | comment?: Document;
133 | } = {}): Promise {
134 | return this.#cluster.protocol.commandSingle(this.name, {
135 | dropUser: username,
136 | writeConcern: options?.writeConcern,
137 | comment: options?.comment,
138 | });
139 | }
140 |
141 | /** Run a command on the Database */
142 | // deno-lint-ignore no-explicit-any
143 | runCommand(body: Document): Promise {
144 | return this.#cluster.protocol.commandSingle(this.name, body);
145 | }
146 | }
147 |
--------------------------------------------------------------------------------
/src/error.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Representation of a MongoDB server error response.
3 | * @public
4 | */
5 | export interface MongoErrorInfo {
6 | ok: 0;
7 | errmsg: string;
8 | code: number;
9 | codeName: string;
10 | }
11 |
12 | /**
13 | * A base class from which Mongo errors are derived.
14 | * @public
15 | */
16 | export abstract class MongoError extends Error {
17 | constructor(info: MongoErrorInfo | string) {
18 | super(`MongoError: ${JSON.stringify(info)}`);
19 | }
20 | }
21 |
22 | /**
23 | * A class representation of an error ocurring during the driver's execution.
24 | * @public
25 | */
26 | export class MongoDriverError extends MongoError {
27 | /**
28 | * @param info A string containing the error's message.
29 | */
30 | constructor(info: string) {
31 | super(info);
32 | }
33 | }
34 |
35 | /**
36 | * A class representation of an error returned by MongoDB server.
37 | * @public
38 | */
39 | export class MongoServerError extends MongoError implements MongoErrorInfo {
40 | ok: 0;
41 | errmsg: string;
42 | code: number;
43 | codeName: string;
44 |
45 | /**
46 | * @param info An object representing the server's error response.
47 | */
48 | constructor(info: MongoErrorInfo) {
49 | super(info);
50 |
51 | this.ok = info.ok;
52 | this.errmsg = info.errmsg;
53 | this.code = info.code;
54 | this.codeName = info.codeName;
55 | }
56 | }
57 |
58 | /**
59 | * A class representation of a command with invalid arguments
60 | * @public
61 | */
62 | export class MongoInvalidArgumentError extends MongoError {
63 | /**
64 | * @param info A string containing the error's message.
65 | */
66 | constructor(info: string) {
67 | super(info);
68 | }
69 | }
70 |
71 | export class MongoRuntimeError extends MongoDriverError {
72 | constructor(message: string) {
73 | super(message);
74 | }
75 |
76 | override get name(): string {
77 | return "MongoRuntimeError";
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/src/gridfs/bucket.ts:
--------------------------------------------------------------------------------
1 | import { ObjectId } from "../../deps.ts";
2 | import type { Collection } from "../collection/collection.ts";
3 | import type { FindCursor } from "../collection/commands/find.ts";
4 | import type { Database } from "../database.ts";
5 | import { MongoRuntimeError } from "../error.ts";
6 | import type { Filter } from "../types.ts";
7 | import type {
8 | Chunk,
9 | File,
10 | FileId,
11 | GridFSBucketOptions,
12 | GridFSFindOptions,
13 | GridFSUploadOptions,
14 | } from "../types/gridfs.ts";
15 | import { checkIndexes } from "./indexes.ts";
16 | import { createUploadStream } from "./upload.ts";
17 |
18 | /**
19 | * GridFSBucket is a representation of a GridFSBucket on a Database.
20 | * @module
21 | */
22 |
23 | /** Representation of a GridFSBucket on a Database */
24 | export class GridFSBucket {
25 | #chunksCollection: Collection;
26 | #filesCollection: Collection;
27 | #chunkSizeBytes: number;
28 | #checkedIndexes = false;
29 |
30 | private readonly getBucketData = () => ({
31 | filesCollection: this.#filesCollection,
32 | chunksCollection: this.#chunksCollection,
33 | chunkSizeBytes: this.#chunkSizeBytes,
34 | });
35 |
36 | /**
37 | * Create a new GridFSBucket object on @db with the given @options.
38 | */
39 | constructor(db: Database, options: GridFSBucketOptions = {}) {
40 | const newLocal = options.bucketName ?? "fs";
41 | this.#chunksCollection = db.collection(`${newLocal}.chunks`);
42 | this.#filesCollection = db.collection(`${newLocal}.files`);
43 | this.#chunkSizeBytes = options.chunkSizeBytes ?? 255 * 1024;
44 | }
45 |
46 | /**
47 | * Opens a Stream that the application can write the contents of the file to.
48 | * The driver generates the file id.
49 | *
50 | * Returns a Stream to which the application will write the contents.
51 | *
52 | * Note: this method is provided for backward compatibility. In languages
53 | * that use generic type parameters, this method may be omitted since
54 | * the TFileId type might not be an ObjectId.
55 | */
56 | openUploadStream(
57 | filename: string,
58 | options?: GridFSUploadOptions,
59 | ): Promise> {
60 | return this.openUploadStreamWithId(
61 | new ObjectId(),
62 | filename,
63 | options,
64 | );
65 | }
66 |
67 | /**
68 | * Opens a Stream that the application can write the contents of the file to.
69 | * The application provides a custom file id.
70 | *
71 | * Returns a Stream to which the application will write the contents.
72 | */
73 | async openUploadStreamWithId(
74 | id: FileId,
75 | filename: string,
76 | options?: GridFSUploadOptions,
77 | ): Promise> {
78 | if (!this.#checkedIndexes) await this.#checkIndexes();
79 | return createUploadStream(this.getBucketData(), filename, id, options);
80 | }
81 |
82 | /**
83 | * Uploads a user file to a GridFS bucket. The driver generates the file id.
84 | *
85 | * Reads the contents of the user file from the @source Stream and uploads it
86 | * as chunks in the chunks collection. After all the chunks have been uploaded,
87 | * it creates a files collection document for @filename in the files collection.
88 | *
89 | * Returns the id of the uploaded file.
90 | *
91 | * Note: this method is provided for backward compatibility. In languages
92 | * that use generic type parameters, this method may be omitted since
93 | * the TFileId type might not be an ObjectId.
94 | */
95 | async uploadFromStream(
96 | filename: string,
97 | source: ReadableStream,
98 | options?: GridFSUploadOptions,
99 | ): Promise {
100 | const objectid = new ObjectId();
101 | await source.pipeTo(
102 | await this.openUploadStreamWithId(objectid, filename, options),
103 | );
104 | return objectid;
105 | }
106 |
107 | /**
108 | * Uploads a user file to a GridFS bucket. The application supplies a custom file id.
109 | *
110 | * Reads the contents of the user file from the @source Stream and uploads it
111 | * as chunks in the chunks collection. After all the chunks have been uploaded,
112 | * it creates a files collection document for @filename in the files collection.
113 | *
114 | * Note: there is no need to return the id of the uploaded file because the application
115 | * already supplied it as a parameter.
116 | */
117 | async uploadFromStreamWithId(
118 | id: FileId,
119 | filename: string,
120 | source: ReadableStream,
121 | options: GridFSUploadOptions,
122 | ): Promise {
123 | await source.pipeTo(
124 | await this.openUploadStreamWithId(id, filename, options),
125 | );
126 | }
127 |
128 | /** Opens a Stream from which the application can read the contents of the stored file
129 | * specified by @id.
130 | *
131 | * Returns a Stream.
132 | */
133 | async openDownloadStream(id: FileId): Promise> {
134 | if (!this.#checkedIndexes) await this.#checkIndexes();
135 |
136 | return new ReadableStream({
137 | start: async (controller) => {
138 | const collection = this.#chunksCollection.find({ files_id: id });
139 | await collection.forEach((value) =>
140 | controller.enqueue(value?.data.buffer)
141 | );
142 | controller.close();
143 | },
144 | });
145 | }
146 |
147 | /**
148 | * Downloads the contents of the stored file specified by @id and writes
149 | * the contents to the @destination Stream.
150 | */
151 | async downloadToStream(id: FileId, destination: WritableStream) {
152 | await (await this.openDownloadStream(id)).pipeTo(destination);
153 | }
154 |
155 | /**
156 | * Given a @id, delete this stored file’s files collection document and
157 | * associated chunks from a GridFS bucket.
158 | */
159 | async delete(id: FileId) {
160 | await this.#filesCollection.deleteOne({ _id: id });
161 | const response = await this.#chunksCollection.deleteMany({ files_id: id });
162 | if (!response) {
163 | throw new MongoRuntimeError(`File not found for id ${id}`);
164 | }
165 | }
166 |
167 | /**
168 | * Find and return the files collection documents that match @filter.
169 | */
170 | find(
171 | filter: Filter,
172 | options: GridFSFindOptions = {},
173 | ): FindCursor {
174 | return this.#filesCollection.find(filter ?? {}, options);
175 | }
176 |
177 | /**
178 | * Drops the files and chunks collections associated with
179 | * this bucket.
180 | */
181 | async drop() {
182 | await this.#filesCollection.drop();
183 | await this.#chunksCollection.drop();
184 | }
185 |
186 | #checkIndexes = () =>
187 | checkIndexes(
188 | this.#filesCollection,
189 | this.#chunksCollection,
190 | (value) => (this.#checkedIndexes = value),
191 | );
192 | }
193 |
--------------------------------------------------------------------------------
/src/gridfs/indexes.ts:
--------------------------------------------------------------------------------
1 | import type { Collection } from "../collection/collection.ts";
2 | import type { Document, IndexOptions } from "../types.ts";
3 | import type { Chunk, File } from "../types/gridfs.ts";
4 |
5 | async function ensureIndex(
6 | index: IndexOptions,
7 | collection: Collection,
8 | ): Promise["createIndexes"]>> {
9 | // We need to check collection emptiness (ns not found error for listIndexes on empty collection)
10 | const doc = await collection.findOne({}, { projection: { _id: 1 } });
11 | if (doc === undefined) {
12 | return collection.createIndexes({ indexes: [index] });
13 | }
14 | const keys = Object.keys(index.key);
15 | const indexes = await collection.listIndexes().toArray();
16 | const existing = indexes.find(({ key }) => {
17 | const currentKeys = Object.keys(key);
18 | return currentKeys.length === keys.length &&
19 | currentKeys.every((k) => keys.includes(k));
20 | });
21 | if (existing === undefined) {
22 | return collection.createIndexes({ indexes: [index] });
23 | } else {
24 | return {
25 | ok: 1,
26 | createdCollectionAutomatically: false,
27 | numIndexesBefore: indexes.length,
28 | numIndexesAfter: indexes.length,
29 | };
30 | }
31 | }
32 |
33 | const fileIndexSpec = {
34 | name: "gridFSFiles",
35 | key: { filename: 1, uploadDate: 1 },
36 | background: false,
37 | };
38 | export function createFileIndex(collection: Collection) {
39 | return ensureIndex(fileIndexSpec, collection);
40 | }
41 |
42 | const chunkIndexSpec = {
43 | name: "gridFSFiles",
44 | key: { files_id: 1, n: 1 },
45 | unique: true,
46 | background: false,
47 | };
48 | export function createChunksIndex(collection: Collection) {
49 | return ensureIndex(chunkIndexSpec, collection);
50 | }
51 |
52 | export async function checkIndexes(
53 | filesCollection: Collection,
54 | chunksCollection: Collection,
55 | hasCheckedIndexes: (value: boolean) => void,
56 | ) {
57 | await createFileIndex(filesCollection);
58 | await createChunksIndex(chunksCollection);
59 | hasCheckedIndexes(true);
60 | }
61 |
--------------------------------------------------------------------------------
/src/gridfs/upload.ts:
--------------------------------------------------------------------------------
1 | import { Binary, type ObjectId } from "../../deps.ts";
2 | import type { Collection } from "../collection/mod.ts";
3 | import type { Chunk, File, GridFSUploadOptions } from "../types/gridfs.ts";
4 |
5 | export interface BucketInfo {
6 | filesCollection: Collection;
7 | chunksCollection: Collection;
8 | chunkSizeBytes: number;
9 | }
10 |
11 | export function createUploadStream(
12 | { chunkSizeBytes, chunksCollection, filesCollection }: BucketInfo,
13 | filename: string,
14 | id: ObjectId,
15 | options?: GridFSUploadOptions,
16 | ) {
17 | const chunkSizeBytesCombined = options?.chunkSizeBytes ?? chunkSizeBytes;
18 | const uploadBuffer = new Uint8Array(new ArrayBuffer(chunkSizeBytesCombined));
19 | let bufferPosition = 0;
20 | let chunksInserted = 0;
21 | let fileSizeBytes = 0;
22 | return new WritableStream({
23 | write: async (chunk: Uint8Array) => {
24 | let remaining = chunk;
25 | while (remaining.byteLength) {
26 | const availableBuffer = chunkSizeBytesCombined - bufferPosition;
27 | if (remaining.byteLength < availableBuffer) {
28 | uploadBuffer.set(remaining, bufferPosition);
29 | bufferPosition += remaining.byteLength;
30 | fileSizeBytes += remaining.byteLength;
31 | break;
32 | }
33 | const sliced = remaining.slice(0, availableBuffer);
34 | remaining = remaining.slice(availableBuffer);
35 | uploadBuffer.set(sliced, bufferPosition);
36 |
37 | await chunksCollection.insertOne({
38 | files_id: id,
39 | n: chunksInserted,
40 | data: new Binary(uploadBuffer),
41 | });
42 |
43 | bufferPosition = 0;
44 | fileSizeBytes += sliced.byteLength;
45 | ++chunksInserted;
46 | }
47 | },
48 | close: async () => {
49 | // Write the last bytes that are left in the buffer
50 | if (bufferPosition) {
51 | await chunksCollection.insertOne({
52 | files_id: id,
53 | n: chunksInserted,
54 | data: new Binary(uploadBuffer.slice(0, bufferPosition)),
55 | });
56 | }
57 |
58 | await filesCollection.insertOne({
59 | _id: id,
60 | length: fileSizeBytes,
61 | chunkSize: chunkSizeBytesCombined,
62 | uploadDate: new Date(),
63 | filename: filename,
64 | metadata: options?.metadata,
65 | });
66 | },
67 | });
68 | }
69 |
--------------------------------------------------------------------------------
/src/protocol/cursor.ts:
--------------------------------------------------------------------------------
1 | import { Long } from "../../deps.ts";
2 | import type { Document } from "../types.ts";
3 | import { parseNamespace } from "../utils/ns.ts";
4 | import type { WireProtocol } from "./protocol.ts";
5 |
6 | export interface CommandCursorOptions {
7 | id: bigint | number | string;
8 | ns: string;
9 | firstBatch: T[];
10 | maxTimeMS?: number;
11 | comment?: Document;
12 | }
13 |
14 | export class CommandCursor {
15 | #id?: bigint;
16 | #protocol: WireProtocol;
17 | #batches: T[] = [];
18 | #db?: string;
19 | #collection?: string;
20 |
21 | #executor: () => Promise>;
22 | #executed = false;
23 |
24 | constructor(
25 | protocol: WireProtocol,
26 | executor: () => Promise>,
27 | ) {
28 | this.#protocol = protocol;
29 | this.#executor = executor;
30 | }
31 |
32 | private async execute() {
33 | this.#executed = true;
34 | const options = await this.#executor();
35 | this.#batches = options.firstBatch;
36 | this.#id = BigInt(options.id);
37 | const { db, collection } = parseNamespace(options.ns);
38 | this.#db = db;
39 | this.#collection = collection;
40 | }
41 |
42 | async next(): Promise {
43 | if (this.#batches.length > 0) {
44 | return this.#batches.shift();
45 | }
46 |
47 | if (!this.#executed) {
48 | await this.execute();
49 | return this.#batches.shift();
50 | }
51 |
52 | if (this.#id === 0n) {
53 | return undefined;
54 | }
55 |
56 | const { cursor } = await this.#protocol.commandSingle(this.#db!, {
57 | getMore: Long.fromBigInt(this.#id!),
58 | collection: this.#collection,
59 | });
60 | this.#batches = cursor.nextBatch || [];
61 | this.#id = BigInt(cursor.id.toString());
62 | return this.#batches.shift();
63 | }
64 |
65 | async *[Symbol.asyncIterator](): AsyncGenerator {
66 | while (this.#batches.length > 0 || this.#id !== 0n) {
67 | const value = await this.next();
68 | if (value !== undefined) {
69 | yield value;
70 | }
71 | }
72 | }
73 |
74 | async forEach(callback: (item: T, index: number) => void) {
75 | let index = 0;
76 | for await (const item of this) {
77 | if (item) {
78 | callback(item, index++);
79 | }
80 | }
81 | }
82 |
83 | async map(callback: (item: T, index: number) => M): Promise {
84 | let index = 0;
85 | const result = [];
86 | for await (const item of this) {
87 | if (item) {
88 | const newItem = callback(item, index++);
89 | result.push(newItem);
90 | }
91 | }
92 | return result;
93 | }
94 |
95 | toArray(): Promise {
96 | return this.map((item) => item);
97 | }
98 | }
99 |
--------------------------------------------------------------------------------
/src/protocol/handshake.ts:
--------------------------------------------------------------------------------
1 | import type { Document } from "../types.ts";
2 | import type { WireProtocol } from "./protocol.ts";
3 |
4 | export const driverMetadata = {
5 | driver: {
6 | name: "Deno Mongo",
7 | version: "v0.0.1",
8 | },
9 | os: {
10 | type: Deno.build.os,
11 | name: Deno.build.os,
12 | architecture: Deno.build.arch,
13 | },
14 | };
15 |
16 | export interface HandshakeDocument extends Document {
17 | ismaster: boolean;
18 | // deno-lint-ignore no-explicit-any
19 | client: any;
20 | compression: string[];
21 | saslSupportedMechs?: string;
22 | speculativeAuthenticate?: Document;
23 | }
24 |
25 | interface HandshakeResponse {
26 | ismaster: string;
27 | maxBsonObjectSize: number;
28 | maxMessageSizeBytes: number;
29 | maxWriteBatchSize: number;
30 | localTime: Date;
31 | logicalSessionTimeoutMinutes: number;
32 | connectionId: number;
33 | minWireVersion: number;
34 | maxWireVersion: number;
35 | readOnly: boolean;
36 | ok: number;
37 | }
38 |
39 | export async function handshake(
40 | protocol: WireProtocol,
41 | ): Promise {
42 | const reply = await protocol.commandSingle("admin", {
43 | isMaster: true,
44 | client: driverMetadata,
45 | });
46 | return reply;
47 | }
48 |
--------------------------------------------------------------------------------
/src/protocol/header.ts:
--------------------------------------------------------------------------------
1 | export enum OpCode {
2 | REPLAY = 1,
3 | UPDATE = 2001,
4 | INSERT = 2002,
5 | RESERVED = 2003,
6 | QUERY = 2004,
7 | GET_MORE = 2005,
8 | DELETE = 2006,
9 | KILL_CURSORS = 2007,
10 | MSG = 2013,
11 | }
12 |
13 | export interface MessageHeader {
14 | messageLength: number;
15 | requestId: number;
16 | responseTo: number;
17 | opCode: OpCode;
18 | }
19 |
20 | export function setHeader(
21 | view: DataView,
22 | header: MessageHeader,
23 | ) {
24 | view.setInt32(0, header.messageLength, true);
25 | view.setInt32(4, header.requestId, true);
26 | view.setInt32(8, header.responseTo, true);
27 | view.setInt32(12, header.opCode, true);
28 | }
29 |
30 | export function parseHeader(buffer: Uint8Array): MessageHeader {
31 | const view = new DataView(buffer.buffer);
32 | return {
33 | messageLength: view.getUint32(0, true),
34 | requestId: view.getUint32(4, true),
35 | responseTo: view.getUint32(8, true),
36 | opCode: view.getUint32(12, true),
37 | };
38 | }
39 |
--------------------------------------------------------------------------------
/src/protocol/message.ts:
--------------------------------------------------------------------------------
1 | import { deserialize, serialize } from "../../deps.ts";
2 | import type { Document } from "../types.ts";
3 | import { type MessageHeader, OpCode, setHeader } from "./header.ts";
4 |
5 | type MessageFlags = number;
6 |
7 | const encoder = new TextEncoder();
8 | const decoder = new TextDecoder();
9 |
10 | interface Section0 {
11 | document: Document;
12 | }
13 |
14 | interface Section1 {
15 | identifier: string;
16 | documents: Document[];
17 | }
18 |
19 | export type Section = Section0 | Section1;
20 |
21 | export interface Message {
22 | responseTo: number;
23 | flags?: MessageFlags;
24 | sections: Section[];
25 | checksum?: number;
26 | requestId: number;
27 | }
28 |
29 | function serializeSections(
30 | sections: Section[],
31 | ): { length: number; sections: Uint8Array[] } {
32 | let totalLen = 0;
33 | const buffers = sections.map((section) => {
34 | if ("document" in section) {
35 | const document = serialize(section.document);
36 | const section0 = new Uint8Array(1 + document.byteLength);
37 | new DataView(section0.buffer).setUint8(0, 0);
38 | section0.set(document, 1);
39 | totalLen += section0.byteLength;
40 | return section0;
41 | } else {
42 | const identifier = encoder.encode(section.identifier + "\0");
43 | let documentsLength = 0;
44 | const docs = section.documents.map((doc) => {
45 | const document = serialize(doc);
46 | documentsLength += document.byteLength;
47 | return document;
48 | });
49 | const section1 = new Uint8Array(
50 | 1 + 4 + identifier.byteLength + documentsLength,
51 | );
52 | const view = new DataView(section1.buffer);
53 |
54 | view.setUint8(0, 1);
55 | view.setUint32(1, section1.byteLength - 1, true);
56 | let pos = 4;
57 |
58 | for (const doc of docs) {
59 | section1.set(doc, pos);
60 | pos += doc.byteLength;
61 | }
62 |
63 | totalLen += section1.byteLength;
64 | return section1;
65 | }
66 | });
67 |
68 | return { length: totalLen, sections: buffers };
69 | }
70 |
71 | export function serializeMessage(
72 | message: Message,
73 | ): Uint8Array {
74 | const { length: sectionsLength, sections } = serializeSections(
75 | message.sections,
76 | );
77 |
78 | const buffer = new Uint8Array(20 + sectionsLength); // 16 bytes header + 4 bytes flags + sections
79 | const view = new DataView(buffer.buffer);
80 |
81 | // set header
82 | setHeader(view, {
83 | messageLength: buffer.byteLength,
84 | responseTo: message.responseTo,
85 | requestId: message.requestId,
86 | opCode: OpCode.MSG,
87 | });
88 |
89 | // set flags
90 | view.setInt32(16, message.flags ?? 0, true);
91 |
92 | // set sections
93 | let pos = 20;
94 | for (const section of sections) {
95 | buffer.set(section, pos);
96 | pos += section.byteLength;
97 | }
98 |
99 | return buffer;
100 | }
101 |
102 | export function deserializeMessage(
103 | header: MessageHeader,
104 | buffer: Uint8Array,
105 | ): Message {
106 | const view = new DataView(buffer.buffer);
107 |
108 | const flags = view.getInt32(0);
109 | const sections: Section[] = [];
110 |
111 | let pos = 4;
112 | while (pos < view.byteLength) {
113 | const kind = view.getInt8(pos);
114 | pos++;
115 | if (kind === 0) {
116 | const docLen = view.getInt32(pos, true);
117 | const document = deserialize(
118 | new Uint8Array(view.buffer.slice(pos, pos + docLen)),
119 | );
120 | pos += docLen;
121 | sections.push({ document });
122 | } else if (kind === 1) {
123 | const len = view.getInt32(pos, true);
124 | const sectionBody = new Uint8Array(
125 | view.buffer.slice(pos + 4, pos + len - 4),
126 | );
127 | const identifierEndPos = sectionBody.findIndex((byte) => byte === 0);
128 | const identifier = decoder.decode(buffer.slice(0, identifierEndPos));
129 | const docsBuffer = sectionBody.slice(identifierEndPos + 1);
130 | const documents = parseDocuments(docsBuffer);
131 | pos += len;
132 | sections.push({ identifier, documents });
133 | } else {
134 | throw new Error("Invalid section kind: " + kind);
135 | }
136 | }
137 |
138 | return {
139 | responseTo: header.responseTo,
140 | requestId: header.requestId,
141 | flags,
142 | sections,
143 | };
144 | }
145 |
146 | function parseDocuments(buffer: Uint8Array): Document[] {
147 | let pos = 0;
148 | const docs = [];
149 | const view = new DataView(buffer.buffer);
150 | while (pos < buffer.byteLength) {
151 | const docLen = view.getInt32(pos, true);
152 | const doc = deserialize(buffer.slice(pos, pos + docLen));
153 | docs.push(doc);
154 | pos += docLen;
155 | }
156 | return docs;
157 | }
158 |
--------------------------------------------------------------------------------
/src/protocol/mod.ts:
--------------------------------------------------------------------------------
1 | export * from "./cursor.ts";
2 | export * from "./handshake.ts";
3 | export * from "./header.ts";
4 | export * from "./message.ts";
5 | export * from "./protocol.ts";
6 |
--------------------------------------------------------------------------------
/src/protocol/protocol.ts:
--------------------------------------------------------------------------------
1 | import {
2 | MongoDriverError,
3 | type MongoErrorInfo,
4 | MongoServerError,
5 | } from "../error.ts";
6 | import type { Document } from "../types.ts";
7 | import { handshake } from "./handshake.ts";
8 | import { parseHeader } from "./header.ts";
9 | import {
10 | deserializeMessage,
11 | type Message,
12 | serializeMessage,
13 | } from "./message.ts";
14 |
15 | interface CommandTask {
16 | requestId: number;
17 | db: string;
18 | body: Document;
19 | }
20 |
21 | let nextRequestId = 0;
22 |
23 | export class WireProtocol {
24 | #conn: Deno.Conn;
25 | #isPendingResponse = false;
26 | #isPendingRequest = false;
27 | #pendingResponses: Map;
29 | resolve: (value: Message | PromiseLike) => void;
30 | // deno-lint-ignore no-explicit-any
31 | reject: (reason?: any) => void;
32 | }> = new Map();
33 | #commandQueue: CommandTask[] = [];
34 |
35 | constructor(socket: Deno.Conn) {
36 | this.#conn = socket;
37 | }
38 |
39 | async connect() {
40 | const { connectionId: _connectionId } = await handshake(this);
41 | }
42 |
43 | async commandSingle(
44 | db: string,
45 | body: Document,
46 | ): Promise {
47 | const [doc] = await this.command(db, body);
48 | if (doc.ok === 0) {
49 | throw new MongoServerError(doc as MongoErrorInfo);
50 | }
51 | return doc as T;
52 | }
53 |
54 | async command(db: string, body: Document): Promise {
55 | const requestId = nextRequestId++;
56 | const commandTask = {
57 | requestId,
58 | db,
59 | body,
60 | };
61 |
62 | this.#commandQueue.push(commandTask);
63 | this.send();
64 |
65 | const pendingMessage = Promise.withResolvers();
66 | this.#pendingResponses.set(requestId, pendingMessage);
67 | this.receive();
68 | const message = await pendingMessage.promise;
69 |
70 | let documents: T[] = [];
71 |
72 | for (const section of message?.sections!) {
73 | if ("document" in section) {
74 | documents.push(section.document as T);
75 | } else {
76 | documents = documents.concat(section.documents as T[]);
77 | }
78 | }
79 |
80 | return documents;
81 | }
82 |
83 | private async send() {
84 | if (this.#isPendingRequest) return;
85 | this.#isPendingRequest = true;
86 | while (this.#commandQueue.length > 0) {
87 | const task = this.#commandQueue.shift()!;
88 | const buffer = serializeMessage({
89 | requestId: task.requestId,
90 | responseTo: 0,
91 | sections: [
92 | {
93 | document: {
94 | ...task.body,
95 | $db: task.db,
96 | },
97 | },
98 | ],
99 | });
100 |
101 | const w = this.#conn.writable.getWriter();
102 | await w.write(buffer);
103 | w.releaseLock();
104 | }
105 | this.#isPendingRequest = false;
106 | }
107 |
108 | private async receive() {
109 | if (this.#isPendingResponse) return;
110 | this.#isPendingResponse = true;
111 | while (this.#pendingResponses.size > 0) {
112 | try {
113 | const headerBuffer = await this.read_socket(16);
114 | if (!headerBuffer) {
115 | throw new MongoDriverError("Invalid response header");
116 | }
117 | const header = parseHeader(headerBuffer);
118 | let bodyBytes = header.messageLength - 16;
119 | if (bodyBytes < 0) bodyBytes = 0;
120 | const bodyBuffer = await this.read_socket(header.messageLength - 16);
121 | if (!bodyBuffer) {
122 | throw new MongoDriverError("Invalid response body");
123 | }
124 | const pendingMessage = this.#pendingResponses.get(header.responseTo);
125 | this.#pendingResponses.delete(header.responseTo);
126 | try {
127 | const reply = deserializeMessage(header, bodyBuffer);
128 | pendingMessage?.resolve(reply);
129 | } catch (e) {
130 | pendingMessage?.reject(e);
131 | }
132 | } catch (error) {
133 | // If an error occurred in the above block, we won't be able to know for
134 | // sure which specific message triggered the error.
135 | // Though since the state appears to be so broken that we can't even
136 | // read the header anymore, it's likely that the connection has
137 | // simply closed.
138 | // We'll just reject all pending messages so that the user can
139 | // handle these themselves.
140 | for (const pendingMessage of this.#pendingResponses.values()) {
141 | pendingMessage.reject(error);
142 | }
143 | this.#pendingResponses.clear();
144 | }
145 | }
146 | this.#isPendingResponse = false;
147 | }
148 |
149 | private async read_socket(
150 | b: number,
151 | ): Promise {
152 | const reader = this.#conn.readable.getReader({ mode: "byob" });
153 | const { value } = await reader.read(new Uint8Array(b));
154 | reader.releaseLock();
155 | return value;
156 | }
157 | }
158 |
--------------------------------------------------------------------------------
/src/types/geojson.ts:
--------------------------------------------------------------------------------
1 | // Note:
2 | // Copied from the link below
3 | // - https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/master/types/geojson/index.d.ts
4 | //
5 | // See also
6 | // - https://www.npmjs.com/package/@types/geojson
7 |
8 | // Type definitions for non-npm package geojson 7946.0
9 | // Project: https://geojson.org/
10 | // Definitions by: Jacob Bruun
11 | // Arne Schubert
12 | // Jeff Jacobson
13 | // Ilia Choly
14 | // Dan Vanderkam
15 | // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
16 | // TypeScript Version: 2.3
17 |
18 | // Note: as of the RFC 7946 version of GeoJSON, Coordinate Reference Systems
19 | // are no longer supported. (See https://tools.ietf.org/html/rfc7946#appendix-B)}
20 |
21 | // export as namespace GeoJSON;
22 |
23 | /**
24 | * The valid values for the "type" property of GeoJSON geometry objects.
25 | * https://tools.ietf.org/html/rfc7946#section-1.4
26 | */
27 | export type GeoJsonGeometryTypes = Geometry["type"];
28 |
29 | /**
30 | * The value values for the "type" property of GeoJSON Objects.
31 | * https://tools.ietf.org/html/rfc7946#section-1.4
32 | */
33 | export type GeoJsonTypes = GeoJSON["type"];
34 |
35 | /**
36 | * Bounding box
37 | * https://tools.ietf.org/html/rfc7946#section-5
38 | */
39 | export type BBox = [number, number, number, number] | [
40 | number,
41 | number,
42 | number,
43 | number,
44 | number,
45 | number,
46 | ];
47 |
48 | /**
49 | * A Position is an array of coordinates.
50 | * https://tools.ietf.org/html/rfc7946#section-3.1.1
51 | * Array should contain between two and three elements.
52 | * The previous GeoJSON specification allowed more elements (e.g., which could be used to represent M values),
53 | * but the current specification only allows X, Y, and (optionally) Z to be defined.
54 | */
55 | export type Position = number[]; // [number, number] | [number, number, number];
56 |
57 | /**
58 | * The base GeoJSON object.
59 | * https://tools.ietf.org/html/rfc7946#section-3
60 | * The GeoJSON specification also allows foreign members
61 | * (https://tools.ietf.org/html/rfc7946#section-6.1)
62 | * Developers should use "&" type in TypeScript or extend the interface
63 | * to add these foreign members.
64 | */
65 | export interface GeoJsonObject {
66 | // Don't include foreign members directly into this type def.
67 | // in order to preserve type safety.
68 | // [key: string]: any;
69 | /**
70 | * Specifies the type of GeoJSON object.
71 | */
72 | type: GeoJsonTypes;
73 | /**
74 | * Bounding box of the coordinate range of the object's Geometries, Features, or Feature Collections.
75 | * The value of the bbox member is an array of length 2*n where n is the number of dimensions
76 | * represented in the contained geometries, with all axes of the most southwesterly point
77 | * followed by all axes of the more northeasterly point.
78 | * The axes order of a bbox follows the axes order of geometries.
79 | * https://tools.ietf.org/html/rfc7946#section-5
80 | */
81 | bbox?: BBox | undefined;
82 | }
83 |
84 | /**
85 | * Union of GeoJSON objects.
86 | */
87 | export type GeoJSON = Geometry | Feature | FeatureCollection;
88 |
89 | /**
90 | * Geometry object.
91 | * https://tools.ietf.org/html/rfc7946#section-3
92 | */
93 | export type Geometry =
94 | | Point
95 | | MultiPoint
96 | | LineString
97 | | MultiLineString
98 | | Polygon
99 | | MultiPolygon
100 | | GeometryCollection;
101 | export type GeometryObject = Geometry;
102 |
103 | /**
104 | * Point geometry object.
105 | * https://tools.ietf.org/html/rfc7946#section-3.1.2
106 | */
107 | export interface Point extends GeoJsonObject {
108 | type: "Point";
109 | coordinates: Position;
110 | }
111 |
112 | /**
113 | * MultiPoint geometry object.
114 | * https://tools.ietf.org/html/rfc7946#section-3.1.3
115 | */
116 | export interface MultiPoint extends GeoJsonObject {
117 | type: "MultiPoint";
118 | coordinates: Position[];
119 | }
120 |
121 | /**
122 | * LineString geometry object.
123 | * https://tools.ietf.org/html/rfc7946#section-3.1.4
124 | */
125 | export interface LineString extends GeoJsonObject {
126 | type: "LineString";
127 | coordinates: Position[];
128 | }
129 |
130 | /**
131 | * MultiLineString geometry object.
132 | * https://tools.ietf.org/html/rfc7946#section-3.1.5
133 | */
134 | export interface MultiLineString extends GeoJsonObject {
135 | type: "MultiLineString";
136 | coordinates: Position[][];
137 | }
138 |
139 | /**
140 | * Polygon geometry object.
141 | * https://tools.ietf.org/html/rfc7946#section-3.1.6
142 | */
143 | export interface Polygon extends GeoJsonObject {
144 | type: "Polygon";
145 | coordinates: Position[][];
146 | }
147 |
148 | /**
149 | * MultiPolygon geometry object.
150 | * https://tools.ietf.org/html/rfc7946#section-3.1.7
151 | */
152 | export interface MultiPolygon extends GeoJsonObject {
153 | type: "MultiPolygon";
154 | coordinates: Position[][][];
155 | }
156 |
157 | /**
158 | * Geometry Collection
159 | * https://tools.ietf.org/html/rfc7946#section-3.1.8
160 | */
161 | export interface GeometryCollection extends GeoJsonObject {
162 | type: "GeometryCollection";
163 | geometries: Geometry[];
164 | }
165 |
166 | // deno-lint-ignore no-explicit-any
167 | export type GeoJsonProperties = { [name: string]: any } | null;
168 |
169 | /**
170 | * A feature object which contains a geometry and associated properties.
171 | * https://tools.ietf.org/html/rfc7946#section-3.2
172 | */
173 | export interface Feature<
174 | G extends Geometry | null = Geometry,
175 | P = GeoJsonProperties,
176 | > extends GeoJsonObject {
177 | type: "Feature";
178 | /**
179 | * The feature's geometry
180 | */
181 | geometry: G;
182 | /**
183 | * A value that uniquely identifies this feature in a
184 | * https://tools.ietf.org/html/rfc7946#section-3.2.
185 | */
186 | id?: string | number | undefined;
187 | /**
188 | * Properties associated with this feature.
189 | */
190 | properties: P;
191 | }
192 |
193 | /**
194 | * A collection of feature objects.
195 | * https://tools.ietf.org/html/rfc7946#section-3.3
196 | */
197 | export interface FeatureCollection<
198 | G extends Geometry | null = Geometry,
199 | P = GeoJsonProperties,
200 | > extends GeoJsonObject {
201 | type: "FeatureCollection";
202 | features: Array>;
203 | }
204 |
--------------------------------------------------------------------------------
/src/types/geospatial.ts:
--------------------------------------------------------------------------------
1 | import type { Document } from "../types.ts";
2 | import type {
3 | GeoJsonObject,
4 | GeometryCollection,
5 | GeometryObject,
6 | LineString,
7 | MultiLineString,
8 | MultiPoint,
9 | MultiPolygon,
10 | Point,
11 | Polygon,
12 | Position,
13 | } from "./geojson.ts";
14 |
15 | /**
16 | * https://www.mongodb.com/docs/manual/reference/operator/query/geometry/#mongodb-query-op.-geometry
17 | */
18 | interface GeoJsonOperators {
19 | $geometry: G & CoordinateReferenceSystem;
20 | }
21 |
22 | /**
23 | * https://datatracker.ietf.org/doc/html/rfc7946#section-4
24 | */
25 | interface CoordinateReferenceSystem {
26 | crs?: {
27 | type: string;
28 | properties: { name: string };
29 | };
30 | }
31 |
32 | /**
33 | * https://www.mongodb.com/docs/manual/reference/operator/query/minDistance/
34 | * https://www.mongodb.com/docs/manual/reference/operator/query/maxDistance/
35 | */
36 | export interface DistanceConstraint {
37 | $minDistance?: number;
38 | $maxDistance?: number;
39 | }
40 |
41 | export type LegacyPoint = Position;
42 |
43 | /**
44 | * Example:
45 | *
46 | * ```ts
47 | * {
48 | * $geometry: GeometryObject, // any GeoJSON object
49 | * }
50 | * ```
51 | */
52 | export type $geoAny = GeoJsonOperators;
53 |
54 | /**
55 | * Example:
56 | *
57 | * ```ts
58 | * {
59 | * $geometry: { type: "Point", coordinates: [ 40, 5 ] },
60 | * }
61 | * ```
62 | *
63 | * https://www.mongodb.com/docs/manual/reference/geojson/#point
64 | */
65 | export type $geoPoint = GeoJsonOperators;
66 |
67 | /**
68 | * Example:
69 | *
70 | * ```ts
71 | * {
72 | * $geometry: { type: "LineString", coordinates: [ [ 40, 5 ], [ 41, 6 ] ] }
73 | * }
74 | * ```
75 | *
76 | * https://www.mongodb.com/docs/manual/reference/geojson/#linestring
77 | */
78 | export type $geoLineString = GeoJsonOperators;
79 |
80 | /**
81 | * Example:
82 | *
83 | * ```ts
84 | * {
85 | * $geometry: {
86 | * type: "Polygon",
87 | * coordinates: [ [ [ 0 , 0 ] , [ 3 , 6 ] , [ 6 , 1 ] , [ 0 , 0 ] ] ]
88 | * },
89 | * }
90 | *
91 | * ```
92 | * https://www.mongodb.com/docs/manual/reference/geojson/#polygon
93 | */
94 | export type $geoPolygon = GeoJsonOperators;
95 |
96 | /**
97 | * Example:
98 | *
99 | * ```ts
100 | * {
101 | * $geometry: {
102 | * type: "MultiPoint",
103 | * coordinates: [
104 | * [ -73.9580, 40.8003 ],
105 | * [ -73.9498, 40.7968 ],
106 | * [ -73.9737, 40.7648 ],
107 | * [ -73.9814, 40.7681 ]
108 | * ]
109 | * },
110 | * }
111 | * ```
112 | *
113 | * https://www.mongodb.com/docs/manual/reference/geojson/#multipoint
114 | */
115 | export type $geoMultiPoint = GeoJsonOperators;
116 |
117 | /**
118 | * Example:
119 | *
120 | * ```ts
121 | * {
122 | * $geometry: {
123 | * type: "MultiLineString",
124 | * coordinates: [
125 | * [ [ -73.96943, 40.78519 ], [ -73.96082, 40.78095 ] ],
126 | * [ [ -73.96415, 40.79229 ], [ -73.95544, 40.78854 ] ],
127 | * [ [ -73.97162, 40.78205 ], [ -73.96374, 40.77715 ] ],
128 | * [ [ -73.97880, 40.77247 ], [ -73.97036, 40.76811 ] ]
129 | * ]
130 | * }
131 | * }
132 | * ```
133 | *
134 | * https://www.mongodb.com/docs/manual/reference/geojson/#multilinestring
135 | */
136 | export type $geoMultiLineString = GeoJsonOperators;
137 |
138 | /**
139 | * Example:
140 | *
141 | * ```ts
142 | * {
143 | * $geometry: {
144 | * type: "MultiPolygon",
145 | * coordinates: [
146 | * [ [ [ -73.958, 40.8003 ], [ -73.9498, 40.7968 ], [ -73.9737, 40.7648 ], [ -73.9814, 40.7681 ], [ -73.958, 40.8003 ] ] ],
147 | * [ [ [ -73.958, 40.8003 ], [ -73.9498, 40.7968 ], [ -73.9737, 40.7648 ], [ -73.958, 40.8003 ] ] ]
148 | * ]
149 | * },
150 | * }
151 | * ```
152 | *
153 | * https://www.mongodb.com/docs/manual/reference/geojson/#multipolygon
154 | */
155 | export type $geoMultiPolygon = GeoJsonOperators;
156 |
157 | /**
158 | * Example:
159 | *
160 | * ```ts
161 | * {
162 | * $geometry: {
163 | * type: "GeometryCollection",
164 | * geometries: [
165 | * {
166 | * type: "MultiPoint",
167 | * coordinates: [
168 | * [ -73.9580, 40.8003 ],
169 | * [ -73.9498, 40.7968 ],
170 | * [ -73.9737, 40.7648 ],
171 | * [ -73.9814, 40.7681 ]
172 | * ]
173 | * },
174 | * {
175 | * type: "MultiLineString",
176 | * coordinates: [
177 | * [ [ -73.96943, 40.78519 ], [ -73.96082, 40.78095 ] ],
178 | * [ [ -73.96415, 40.79229 ], [ -73.95544, 40.78854 ] ],
179 | * [ [ -73.97162, 40.78205 ], [ -73.96374, 40.77715 ] ],
180 | * [ [ -73.97880, 40.77247 ], [ -73.97036, 40.76811 ] ]
181 | * ]
182 | * }
183 | * ]
184 | * }
185 | * }
186 | * ```
187 | *
188 | * https://www.mongodb.com/docs/manual/reference/geojson/#geometrycollection
189 | */
190 | export type $geoCollection = GeoJsonOperators;
191 |
192 | /**
193 | * Example:
194 | *
195 | * ```ts
196 | * { $box: [ [ 0, 0 ], [ 100, 100 ] ] }
197 | * ```
198 | *
199 | * https://www.mongodb.com/docs/manual/reference/operator/query/box/#-box
200 | */
201 | export type $box = { $box: [LegacyPoint, LegacyPoint] };
202 |
203 | /**
204 | * Example:
205 | *
206 | * ```ts
207 | * { $polygon: [ [ 0 , 0 ], [ 3 , 6 ], [ 6 , 0 ] ] }
208 | * ```
209 | *
210 | * https://www.mongodb.com/docs/manual/reference/operator/query/polygon/#-polygon
211 | */
212 | export type $polygon = { $polygon: LegacyPoint[] };
213 |
214 | /**
215 | * Example:
216 | *
217 | * ```ts
218 | * { $center: [ [-74, 40.74], 10 ] }
219 | * ```
220 | *
221 | * https://www.mongodb.com/docs/manual/reference/operator/query/center/#definition
222 | */
223 | export type $center = { $center: [LegacyPoint, number] };
224 |
225 | /**
226 | * Example:
227 | *
228 | * ```ts
229 | * { $centerSphere: [ [ -88, 30 ], 10/3963.2 ] }
230 | * ```
231 | *
232 | * https://www.mongodb.com/docs/manual/reference/operator/query/centerSphere/#-centersphere
233 | */
234 | export type $centerSphere = { $centerSphere: [LegacyPoint, number] };
235 |
236 | export type ShapeOperator =
237 | | $box
238 | | $polygon
239 | | $center
240 | | $centerSphere;
241 |
242 | export type CenterSpecifier =
243 | | ($geoPoint & DistanceConstraint)
244 | | LegacyPoint
245 | | Document;
246 |
--------------------------------------------------------------------------------
/src/types/gridfs.ts:
--------------------------------------------------------------------------------
1 | import type { Binary, ObjectId } from "../../deps.ts";
2 | import type { Document, ReadPreference } from "../types.ts";
3 | import type { ReadConcern, WriteConcern } from "../types/read_write_concern.ts";
4 |
5 | export type FileId = ObjectId;
6 |
7 | export interface Chunk {
8 | _id: ObjectId;
9 | // deno-lint-ignore camelcase
10 | files_id: ObjectId;
11 | n: number;
12 | data: Binary;
13 | }
14 |
15 | export interface File {
16 | _id: ObjectId;
17 | length: number;
18 | chunkSize: number;
19 | uploadDate: Date;
20 | filename: string;
21 | metadata?: Document;
22 | }
23 |
24 | export interface GridFSBucketOptions {
25 | /**
26 | * The bucket name. Defaults to 'fs'.
27 | */
28 | bucketName?: string;
29 |
30 | /**
31 | * The chunk size in bytes. Defaults to 255 KiB.
32 | */
33 | chunkSizeBytes?: number;
34 |
35 | /**
36 | * The write concern. Defaults to the write concern of the database.
37 | */
38 | writeConcern?: WriteConcern;
39 |
40 | /**
41 | * The read concern. Defaults to the read concern of the database.
42 | */
43 | readConcern?: ReadConcern;
44 |
45 | /**
46 | * The read preference. Defaults to the read preference of the database.
47 | */
48 | readPreference?: ReadPreference;
49 | }
50 |
51 | export interface GridFSUploadOptions {
52 | /**
53 | * The number of bytes per chunk of this file. Defaults to the
54 | * chunkSizeBytes in the GridFSBucketOptions.
55 | */
56 | chunkSizeBytes?: number;
57 |
58 | /**
59 | * User data for the 'metadata' field of the files collection document.
60 | * If not provided the driver MUST omit the metadata field from the
61 | * files collection document.
62 | */
63 | metadata?: Document;
64 | }
65 |
66 | export class GridFSFindOptions {
67 | /**
68 | * Enables writing to temporary files on the server. When set to true, the server
69 | * can write temporary data to disk while executing the find operation on the files collection.
70 | *
71 | * This option is sent only if the caller explicitly provides a value. The default
72 | * is to not send a value. For servers < 3.2, this option is ignored and not sent
73 | * as allowDiskUse does not exist in the OP_QUERY wire protocol.
74 | *
75 | * @see https://docs.mongodb.com/manual/reference/command/find/
76 | */
77 | allowDiskUse?: boolean;
78 |
79 | /**
80 | * The number of documents to return per batch.
81 | */
82 | batchSize?: number;
83 |
84 | /**
85 | * The maximum number of documents to return.
86 | */
87 | limit?: number;
88 |
89 | /**
90 | * The maximum amount of time to allow the query to run.
91 | */
92 | maxTimeMS?: number;
93 |
94 | /**
95 | * The server normally times out idle cursors after an inactivity period (10 minutes)
96 | * to prevent excess memory use. Set this option to prevent that.
97 | */
98 | noCursorTimeout?: boolean;
99 |
100 | /**
101 | * The number of documents to skip before returning.
102 | */
103 | skip?: number;
104 |
105 | /**
106 | * The order by which to sort results. Defaults to not sorting.
107 | */
108 | sort?: Document;
109 | }
110 |
--------------------------------------------------------------------------------
/src/types/read_write_concern.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * @module @see https://github.com/mongodb/specifications/blob/master/source/read-write-concern/read-write-concern.rst#read-concern
3 | */
4 |
5 | const enum ReadConcernLevel {
6 | local = "local",
7 | majority = "majority",
8 | linearizable = "linearizable",
9 | available = "available",
10 | snapshot = "snapshot",
11 | }
12 |
13 | /**
14 | * interface for ReadConcern documents used by MongoDB
15 | * @see https://docs.mongodb.com/manual/reference/read-concern/
16 | */
17 | export interface ReadConcern {
18 | /**
19 | * The level of the read concern.
20 | */
21 | level?: ReadConcernLevel | string;
22 | }
23 |
24 | /**
25 | * interface for WriteConcern documents used by MongoDB
26 | *
27 | * @see https://docs.mongodb.com/manual/reference/write-concern/
28 | */
29 | export interface WriteConcern {
30 | /**
31 | * The number of instances the write operation needs to be propagated to
32 | * before proceeding.
33 | *
34 | * The string based values are:
35 | *
36 | * - majority: The calculated majority of nodes in a cluster has accepted the
37 | * the write
38 | * - custom write name: Writes have been acknowledged by nodes tagged with the
39 | * custom write concern.
40 | */
41 | w: number | "majority" | string;
42 | /**
43 | * If true, the server only returns after the operation has been commited to
44 | * disk
45 | */
46 | j: boolean;
47 | /**
48 | * An optional timeout value after which to stop the write operation
49 | */
50 | wtimeout?: number;
51 | }
52 |
--------------------------------------------------------------------------------
/src/utils/ns.ts:
--------------------------------------------------------------------------------
1 | export function parseNamespace(ns: string) {
2 | const [db, ...rest] = ns.split(".");
3 | return { db, collection: rest.join(".") };
4 | }
5 |
--------------------------------------------------------------------------------
/src/utils/saslprep/code_points.mem:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/denodrivers/mongo/e02f8813081bbdd01324fadca09c1bd8c0bb2816/src/utils/saslprep/code_points.mem
--------------------------------------------------------------------------------
/src/utils/saslprep/memory_pager.ts:
--------------------------------------------------------------------------------
1 | /** Grows a pager. */
2 | function grow(pager: Pager, index: number): void {
3 | while (pager.maxPages < index) {
4 | // deno-lint-ignore no-explicit-any
5 | const old: any = pager.pages;
6 | pager.pages = new Array(32768);
7 | pager.pages[0] = old;
8 | pager.level++;
9 | pager.maxPages *= 32768;
10 | }
11 | }
12 |
13 | /** Truncates the input buffer. */
14 | function truncate(buf: Uint8Array, len: number): Uint8Array {
15 | if (buf.length === len) {
16 | return buf;
17 | }
18 |
19 | if (buf.length > len) {
20 | return buf.slice(0, len);
21 | }
22 |
23 | const cpy: Uint8Array = new Uint8Array(len);
24 | cpy.set(buf, 0);
25 |
26 | return cpy;
27 | }
28 |
29 | /** Concatenates given buffers. */
30 | function concat(bufs: Uint8Array[]): Uint8Array {
31 | const total: number = bufs.reduce(
32 | (acc, cur): number => acc + cur.byteLength,
33 | 0,
34 | );
35 |
36 | const buf: Uint8Array = new Uint8Array(total);
37 | let offset = 0;
38 |
39 | for (const b of bufs) {
40 | buf.set(b, offset);
41 | offset += b.byteLength;
42 | }
43 |
44 | return buf;
45 | }
46 |
47 | /** Compares two buffers. */
48 | function equal(a: Uint8Array, b: Uint8Array): boolean {
49 | if (a.length !== b.length) {
50 | return false;
51 | }
52 |
53 | return a.every((x: number, i: number): boolean => x === b[i]);
54 | }
55 |
56 | /** Factors something? */
57 | function factor(n: number, out: Uint16Array): void {
58 | n = (n - (out[0] = n & 32767)) / 32768;
59 | n = (n - (out[1] = n & 32767)) / 32768;
60 | out[3] = ((n - (out[2] = n & 32767)) / 32768) & 32767;
61 | }
62 |
63 | /** Copies a buffer. */
64 | function copy(buf: Uint8Array): Uint8Array {
65 | const cpy: Uint8Array = new Uint8Array(buf.length);
66 | cpy.set(buf, 0);
67 | return cpy;
68 | }
69 |
70 | /** A class representation of a page. */
71 | export class Page {
72 | offset: number;
73 | buffer: Uint8Array;
74 | updated: boolean;
75 | deduplicate: number;
76 |
77 | constructor(i: number, buf: Uint8Array) {
78 | this.offset = i * buf.length;
79 | this.buffer = buf;
80 | this.updated = false;
81 | this.deduplicate = 0;
82 | }
83 | }
84 |
85 | /** Pager constructor options. */
86 | export interface PagerOptions {
87 | deduplicate?: Uint8Array;
88 | }
89 |
90 | /** A class representation of a pager. */
91 | export class Pager {
92 | readonly pageSize: number;
93 |
94 | maxPages = 32768;
95 | pages = new Array(32768);
96 | length = 0;
97 | level = 0;
98 |
99 | private updates: Page[] = [];
100 | private path: Uint16Array = new Uint16Array(4);
101 | private deduplicate: null | Uint8Array;
102 | private zeros: null | Uint8Array;
103 |
104 | constructor(pageSize: number, opts: PagerOptions = {}) {
105 | this.pageSize = pageSize;
106 | this.deduplicate = opts.deduplicate || null;
107 | this.zeros = this.deduplicate
108 | ? new Uint8Array(this.deduplicate.length)
109 | : null;
110 | }
111 |
112 | updated(page: Page): void {
113 | while (
114 | this.deduplicate &&
115 | page.buffer[page.deduplicate] === this.deduplicate[page.deduplicate]
116 | ) {
117 | if (++page.deduplicate === this.deduplicate.length) {
118 | page.deduplicate = 0;
119 |
120 | if (equal(page.buffer, this.deduplicate)) {
121 | page.buffer = this.deduplicate;
122 | }
123 |
124 | break;
125 | }
126 | }
127 |
128 | if (page.updated || !this.updates) {
129 | return;
130 | }
131 |
132 | page.updated = true;
133 | this.updates.push(page);
134 | }
135 |
136 | lastUpdate(): null | Page {
137 | if (!this.updates || !this.updates.length) {
138 | return null;
139 | }
140 |
141 | const page: Page = this.updates.pop()!;
142 | page.updated = false;
143 | return page;
144 | }
145 |
146 | get(i: number, noAllocate?: boolean): Page {
147 | const arr: Page[] = this._array(i, !!noAllocate);
148 | const first: number = this.path[0];
149 | let page: undefined | Page = arr && arr[first];
150 | if (!page && !noAllocate) {
151 | page = arr[first] = new Page(i, new Uint8Array(this.pageSize));
152 | if (i >= this.length) {
153 | this.length = i + 1;
154 | }
155 | }
156 |
157 | if (
158 | page &&
159 | page.buffer === this.deduplicate &&
160 | this.deduplicate &&
161 | !noAllocate
162 | ) {
163 | page.buffer = copy(page.buffer);
164 | page.deduplicate = 0;
165 | }
166 |
167 | return page;
168 | }
169 |
170 | set(i: number, buf: Uint8Array): void {
171 | const arr: (undefined | Page)[] = this._array(i, false);
172 | const first: number = this.path[0];
173 |
174 | if (i >= this.length) {
175 | this.length = i + 1;
176 | }
177 |
178 | if (!buf || (this.zeros && equal(buf, this.zeros))) {
179 | arr[first] = undefined;
180 | return;
181 | }
182 |
183 | if (this.deduplicate && equal(buf, this.deduplicate)) {
184 | buf = this.deduplicate;
185 | }
186 |
187 | const page: undefined | Page = arr[first];
188 | const b: Uint8Array = truncate(buf, this.pageSize);
189 |
190 | if (page) {
191 | page.buffer = b;
192 | } else {
193 | arr[first] = new Page(i, b);
194 | }
195 | }
196 |
197 | /** Concat all allocated pages into a single buffer. */
198 | toBuffer(): Uint8Array {
199 | const list: Uint8Array[] = new Array(this.length);
200 | const empty: Uint8Array = new Uint8Array(this.pageSize);
201 | let ptr = 0;
202 |
203 | while (ptr < list.length) {
204 | const arr: Page[] = this._array(ptr, true);
205 |
206 | for (let i = 0; i < 32768 && ptr < list.length; i++) {
207 | list[ptr++] = arr && arr[i] ? arr[i].buffer : empty;
208 | }
209 | }
210 |
211 | return concat(list);
212 | }
213 |
214 | private _array(i: number, noAllocate: boolean): Page[] {
215 | if (i >= this.maxPages) {
216 | if (noAllocate) {
217 | return [];
218 | }
219 |
220 | grow(this, i);
221 | }
222 |
223 | factor(i, this.path);
224 | // deno-lint-ignore no-explicit-any
225 | let arr: any[] = this.pages;
226 |
227 | for (let j: number = this.level; j > 0; j--) {
228 | const p: number = this.path[j];
229 | // deno-lint-ignore no-explicit-any
230 | let next: any = arr[p];
231 |
232 | if (!next) {
233 | if (noAllocate) {
234 | return [];
235 | }
236 |
237 | next = arr[p] = new Array(32768);
238 | }
239 |
240 | arr = next;
241 | }
242 |
243 | return arr;
244 | }
245 | }
246 |
--------------------------------------------------------------------------------
/src/utils/saslprep/mod.ts:
--------------------------------------------------------------------------------
1 | // deno-lint-ignore-file camelcase
2 | //ORIGINAL PROJECT AND LICENSE IN: https://github.com/chiefbiiko/saslprep
3 | //ORIGINAL PROJECT AND LICENSE IN: https://github.com/chiefbiiko/sparse-bitfield
4 | //ORIGINAL PROJECT AND LICENSE IN: https://github.com/chiefbiiko/memory-pager
5 | import { loadCodePoints } from "./load_code_points.ts";
6 | import type { Bitfield } from "./sparse_bitfield.ts";
7 |
8 | const {
9 | unassigned_code_points,
10 | commonly_mapped_to_nothing,
11 | non_ASCII_space_characters,
12 | prohibited_characters,
13 | bidirectional_r_al,
14 | bidirectional_l,
15 | } = loadCodePoints();
16 |
17 | // 2.1. Mapping
18 |
19 | /**
20 | * non-ASCII space characters [StringPrep, C.1.2] that can be
21 | * mapped to SPACE (U+0020).
22 | */
23 | const mapping2space: Bitfield = non_ASCII_space_characters;
24 |
25 | /**
26 | * The "commonly mapped to nothing" characters [StringPrep, B.1]
27 | * that can be mapped to nothing.
28 | */
29 | const mapping2nothing: Bitfield = commonly_mapped_to_nothing;
30 |
31 | // utils
32 | function getCodePoint(chr: string): number {
33 | const codePoint: undefined | number = chr.codePointAt(0);
34 |
35 | if (!codePoint) {
36 | throw new Error(`unable to encode character ${chr}`);
37 | }
38 |
39 | return codePoint;
40 | }
41 |
42 | // deno-lint-ignore no-explicit-any
43 | function first(x: any): any {
44 | return x[0];
45 | }
46 |
47 | // deno-lint-ignore no-explicit-any
48 | function last(x: any): any {
49 | return x[x.length - 1];
50 | } /**
51 | * Convert provided string into an array of Unicode Code Points.
52 | * Based on https://stackoverflow.com/a/21409165/1556249
53 | * and https://www.npmjs.com/package/code-point-at.
54 | */
55 |
56 | function toCodePoints(input: string): number[] {
57 | const codepoints = [];
58 | const size = input.length;
59 |
60 | for (let i = 0; i < size; i += 1) {
61 | const before: number = input.charCodeAt(i);
62 |
63 | if (before >= 0xd800 && before <= 0xdbff && size > i + 1) {
64 | const next: number = input.charCodeAt(i + 1);
65 |
66 | if (next >= 0xdc00 && next <= 0xdfff) {
67 | codepoints.push((before - 0xd800) * 0x400 + next - 0xdc00 + 0x10000);
68 | i += 1;
69 | continue;
70 | }
71 | }
72 |
73 | codepoints.push(before);
74 | }
75 |
76 | return codepoints;
77 | }
78 |
79 | export interface SASLprepOptions {
80 | allowUnassigned?: boolean;
81 | }
82 |
83 | /** SASLprep routine. */
84 | export function saslprep(input: string, opts: SASLprepOptions = {}): string {
85 | if (input === null) {
86 | throw new TypeError("Input must not be null.");
87 | }
88 |
89 | if (input.length === 0) {
90 | return "";
91 | }
92 |
93 | // 1. Map
94 | const mapped_input: number[] = toCodePoints(input)
95 | // 1.1 mapping to space
96 | .map((character) => (mapping2space.get(character) ? 0x20 : character))
97 | // 1.2 mapping to nothing
98 | .filter((character) => !mapping2nothing.get(character));
99 |
100 | // 2. Normalize
101 | const normalized_input: string = String.fromCodePoint
102 | .apply(null, mapped_input)
103 | .normalize("NFKC");
104 |
105 | const normalized_map: number[] = toCodePoints(normalized_input);
106 |
107 | // 3. Prohibit
108 | const hasProhibited: boolean = normalized_map.some((character) =>
109 | prohibited_characters.get(character)
110 | );
111 |
112 | if (hasProhibited) {
113 | throw new Error(
114 | "Prohibited character, see https://tools.ietf.org/html/rfc4013#section-2.3",
115 | );
116 | }
117 |
118 | // Unassigned Code Points
119 | if (!opts.allowUnassigned) {
120 | const hasUnassigned: boolean = normalized_map.some((character) =>
121 | unassigned_code_points.get(character)
122 | );
123 |
124 | if (hasUnassigned) {
125 | throw new Error(
126 | "Unassigned code point, see https://tools.ietf.org/html/rfc4013#section-2.5",
127 | );
128 | }
129 | }
130 |
131 | // 4. check bidi
132 |
133 | const hasBidiRAL: boolean = normalized_map.some((character) =>
134 | bidirectional_r_al.get(character)
135 | );
136 |
137 | const hasBidiL: boolean = normalized_map.some((character) =>
138 | bidirectional_l.get(character)
139 | );
140 |
141 | // 4.1 If a string contains any RandALCat character, the string MUST NOT
142 | // contain any LCat character.
143 | if (hasBidiRAL && hasBidiL) {
144 | throw new Error(
145 | "String must not contain RandALCat and LCat at the same time," +
146 | " see https://tools.ietf.org/html/rfc3454#section-6",
147 | );
148 | }
149 |
150 | /**
151 | * 4.2 If a string contains any RandALCat character, a RandALCat
152 | * character MUST be the first character of the string, and a
153 | * RandALCat character MUST be the last character of the string.
154 | */
155 |
156 | const isFirstBidiRAL: boolean = bidirectional_r_al.get(
157 | getCodePoint(first(normalized_input)),
158 | );
159 | const isLastBidiRAL: boolean = bidirectional_r_al.get(
160 | getCodePoint(last(normalized_input)),
161 | );
162 |
163 | if (hasBidiRAL && !(isFirstBidiRAL && isLastBidiRAL)) {
164 | throw new Error(
165 | "Bidirectional RandALCat character must be the first and the last" +
166 | " character of the string, see https://tools.ietf.org/html/rfc3454#section-6",
167 | );
168 | }
169 |
170 | return normalized_input;
171 | }
172 |
--------------------------------------------------------------------------------
/src/utils/saslprep/sparse_bitfield.ts:
--------------------------------------------------------------------------------
1 | import { type Page, Pager } from "./memory_pager.ts";
2 |
3 | /** Is the given number a power of two? */
4 | function powerOfTwo(x: number): boolean {
5 | return !(x & (x - 1));
6 | }
7 |
8 | /** Bitfield constructor options. */
9 | export interface BitfieldOptions {
10 | pageOffset?: number;
11 | pageSize?: number;
12 | pages?: Pager;
13 | trackUpdates?: boolean;
14 | buffer?: Uint8Array;
15 | }
16 |
17 | /** A class representation of a bitfield. */
18 | export class Bitfield {
19 | readonly pageOffset: number;
20 | readonly pageSize: number;
21 | readonly pages: Pager;
22 |
23 | byteLength: number;
24 | length: number;
25 |
26 | private _trackUpdates: boolean;
27 | private _pageMask: number;
28 |
29 | /** Creates a bitfield instance. */
30 | constructor(opts: Uint8Array | BitfieldOptions = {}) {
31 | if (opts instanceof Uint8Array) {
32 | opts = { buffer: opts };
33 | }
34 |
35 | this.pageOffset = opts.pageOffset || 0;
36 | this.pageSize = opts.pageSize || 1024;
37 | this.pages = opts.pages || new Pager(this.pageSize);
38 |
39 | this.byteLength = this.pages.length * this.pageSize;
40 | this.length = 8 * this.byteLength;
41 |
42 | if (!powerOfTwo(this.pageSize)) {
43 | throw new Error("The page size should be a power of two");
44 | }
45 |
46 | this._trackUpdates = !!opts.trackUpdates;
47 | this._pageMask = this.pageSize - 1;
48 |
49 | if (opts.buffer) {
50 | for (let i = 0; i < opts.buffer.length; i += this.pageSize) {
51 | this.pages.set(
52 | i / this.pageSize,
53 | opts.buffer.slice(i, i + this.pageSize),
54 | );
55 | }
56 |
57 | this.byteLength = opts.buffer.length;
58 | this.length = 8 * this.byteLength;
59 | }
60 | }
61 |
62 | /** Gets a byte. */
63 | getByte(i: number): number {
64 | const o: number = i & this._pageMask;
65 | const j: number = (i - o) / this.pageSize;
66 | const page: Page = this.pages.get(j, true);
67 |
68 | return page ? page.buffer[o + this.pageOffset] : 0;
69 | }
70 |
71 | /** Sets a byte. */
72 | setByte(i: number, b: number): boolean {
73 | const o: number = (i & this._pageMask) + this.pageOffset;
74 | const j: number = (i - o) / this.pageSize;
75 | const page: Page = this.pages.get(j, false);
76 |
77 | if (page.buffer[o] === b) {
78 | return false;
79 | }
80 |
81 | page.buffer[o] = b;
82 |
83 | if (i >= this.byteLength) {
84 | this.byteLength = i + 1;
85 | this.length = this.byteLength * 8;
86 | }
87 |
88 | if (this._trackUpdates) {
89 | this.pages.updated(page);
90 | }
91 |
92 | return true;
93 | }
94 |
95 | /** Gets a bit. */
96 | get(i: number): boolean {
97 | const o: number = i & 7;
98 | const j: number = (i - o) / 8;
99 |
100 | return !!(this.getByte(j) & (128 >> o));
101 | }
102 |
103 | /** Sets a bit. */
104 | set(i: number, v: boolean): boolean {
105 | const o: number = i & 7;
106 | const j: number = (i - o) / 8;
107 | const b: number = this.getByte(j);
108 |
109 | return this.setByte(j, v ? b | (128 >> o) : b & (255 ^ (128 >> o)));
110 | }
111 |
112 | /** Gets a single buffer representing the entire bitfield. */
113 | toBuffer(): Uint8Array {
114 | const all: Uint8Array = new Uint8Array(this.pages.length * this.pageSize);
115 |
116 | for (let i = 0; i < this.pages.length; i++) {
117 | const next: Page = this.pages.get(i, true);
118 |
119 | if (next) {
120 | all
121 | .subarray(i * this.pageSize)
122 | .set(
123 | next.buffer.subarray(
124 | this.pageOffset,
125 | this.pageOffset + this.pageSize,
126 | ),
127 | );
128 | }
129 | }
130 |
131 | return all;
132 | }
133 | }
134 |
--------------------------------------------------------------------------------
/src/utils/srv.ts:
--------------------------------------------------------------------------------
1 | import type { ConnectOptions } from "../types.ts";
2 | import { parseSrvUrl } from "./uri.ts";
3 |
4 | enum AllowedOption {
5 | authSource = "authSource",
6 | replicaSet = "replicaSet",
7 | loadBalanced = "loadBalanced",
8 | }
9 |
10 | function isAllowedOption(key: unknown): key is AllowedOption {
11 | return Object.values(AllowedOption).includes(key as AllowedOption);
12 | }
13 |
14 | interface Resolver {
15 | resolveDns: typeof Deno.resolveDns;
16 | }
17 |
18 | interface SRVResolveResultOptions {
19 | authSource?: string;
20 | replicaSet?: string;
21 | loadBalanced?: string;
22 | }
23 |
24 | interface SRVResolveResult {
25 | servers: { host: string; port: number }[];
26 | options: SRVResolveResultOptions;
27 | }
28 |
29 | class SRVError extends Error {
30 | constructor(message?: string) {
31 | super(message);
32 | this.name = "SRVError";
33 | }
34 | }
35 |
36 | export class Srv {
37 | resolver: Resolver;
38 |
39 | constructor(resolver = { resolveDns: Deno.resolveDns }) {
40 | this.resolver = resolver;
41 | }
42 |
43 | async resolveSrvUrl(urlString: string): Promise {
44 | const options = parseSrvUrl(urlString);
45 | const { srvServer, ...connectOptions } = options;
46 | if (!srvServer) {
47 | throw new SRVError(
48 | `Could not parse srv server address from ${urlString}`,
49 | );
50 | }
51 | const resolveResult = await this.resolve(srvServer);
52 | return {
53 | servers: resolveResult.servers,
54 | // TODO: Check and throw on invalid options
55 | ...resolveResult.options,
56 | ...connectOptions,
57 | };
58 | }
59 |
60 | async resolve(url: string): Promise {
61 | const tokens = url.split(".");
62 | if (tokens.length < 3) {
63 | throw new SRVError(
64 | `Expected url in format 'host.domain.tld', received ${url}`,
65 | );
66 | }
67 |
68 | const srvRecord = await this.resolver.resolveDns(
69 | `_mongodb._tcp.${url}`,
70 | "SRV",
71 | );
72 | if (!(srvRecord?.length > 0)) {
73 | throw new SRVError(
74 | `Expected at least one SRV record, received ${srvRecord?.length} for url ${url}`,
75 | );
76 | }
77 | const txtRecords = await this.resolver.resolveDns(url, "TXT");
78 | if (txtRecords?.length !== 1) {
79 | throw new SRVError(
80 | `Expected exactly one TXT record, received ${txtRecords?.length} for url ${url}`,
81 | );
82 | }
83 |
84 | const servers = srvRecord.map((record) => {
85 | return {
86 | host: record.target,
87 | port: record.port,
88 | };
89 | });
90 |
91 | const optionsUri = txtRecords[0].join("");
92 | const options: { valid: SRVResolveResultOptions; illegal: string[] } = {
93 | valid: {},
94 | illegal: [],
95 | };
96 | for (const option of optionsUri.split("&")) {
97 | const [key, value] = option.split("=");
98 | if (isAllowedOption(key) && !!value) options.valid[key] = value;
99 | else options.illegal.push(option);
100 | }
101 |
102 | if (options.illegal.length !== 0) {
103 | throw new SRVError(
104 | `Illegal uri options: ${options.illegal}. Allowed options: ${
105 | Object.values(AllowedOption)
106 | }`,
107 | );
108 | }
109 |
110 | return {
111 | servers,
112 | options: options.valid,
113 | };
114 | }
115 | }
116 |
--------------------------------------------------------------------------------
/src/utils/uri.ts:
--------------------------------------------------------------------------------
1 | // mongodb://username:password@example.com:27017,example2.com:27017,...,example.comN:27017/database?key=value&keyN=valueN
2 | import type { ConnectOptions, Credential, Server } from "../types.ts";
3 | import { Srv } from "./srv.ts";
4 |
5 | interface Parts {
6 | auth?: { user: string; password?: string };
7 | // deno-lint-ignore no-explicit-any
8 | hash?: any;
9 | servers?: Server[];
10 | href?: string;
11 | path?: string;
12 | pathname?: string;
13 | protocol?: string;
14 | // deno-lint-ignore no-explicit-any
15 | search?: any;
16 | }
17 |
18 | //adapted from https://github.com/QubitProducts/urlite
19 | // deno-lint-ignore camelcase
20 | export function parse_url(url: string): Parts {
21 | const fragments = [
22 | "protocol",
23 | "auth",
24 | "hostname",
25 | "port",
26 | "pathname",
27 | "search",
28 | "hash",
29 | ];
30 | const pattern =
31 | /([^:/?#]+:)?(?:(?:\/\/)(?:([^/?#]*:?[^@/]+)@)?([^/:?#]+)(?:(?::)(\d+))?)?(\/?[^?#]*)?(\?[^#]*)?(#[^\s]*)?/;
32 |
33 | const multipleServerPattern =
34 | /([^:/?#]+:)?(?:(?:\/\/)(?:([^/?#]*:?[^@/]+)@)?((?:(?:[^/:?#]+)(?:(?::)(?:\d+))?)+))?/;
35 |
36 | // deno-lint-ignore no-explicit-any, camelcase
37 | function parse_simple(url: string): any {
38 | // deno-lint-ignore no-explicit-any
39 | const parts: any = { servers: [], href: url };
40 | const multiServerMatch = url.match(multipleServerPattern);
41 |
42 | if (multiServerMatch![3].includes(",")) {
43 | const [first, ...rest] = multiServerMatch![3].split(",");
44 | const parts = parse_simple(
45 | url.replace(multiServerMatch![3], first),
46 | );
47 |
48 | for (const serverName of rest) {
49 | const subServer = parse_simple(`temp://${serverName}`);
50 | parts.servers.push(subServer.servers[0]);
51 | }
52 |
53 | return parts;
54 | }
55 |
56 | const matches = url.match(pattern);
57 | let l = fragments.length;
58 | while (l--) {
59 | parts[fragments[l]] = matches![l + 1]
60 | ? decodeURIComponent(matches![l + 1])
61 | : matches![l + 1];
62 | }
63 | parts["servers"] = [
64 | { host: parts["hostname"], port: parseInt(parts["port"]) },
65 | ];
66 | delete parts["hostname"];
67 | delete parts["port"];
68 | parts.path = parts.search
69 | ? (parts.pathname ? parts.pathname + parts.search : parts.search)
70 | : parts.pathname;
71 | return parts;
72 | }
73 |
74 | function parse(url: string): Parts {
75 | // deno-lint-ignore no-explicit-any
76 | const parsed: any = parse_simple(url);
77 | if (parsed.auth) parsed.auth = decodeAuth(parsed.auth);
78 | parsed.search = parsed.search ? queryString("?", parsed.search) : {};
79 | parsed.hash = parsed.hash ? queryString("#", parsed.hash) : {};
80 | return parsed;
81 | }
82 |
83 | // deno-lint-ignore no-explicit-any
84 | function decodeAuth(auth: string): any {
85 | const split = auth.split(":");
86 | return {
87 | user: split[0],
88 | password: split[1],
89 | };
90 | }
91 |
92 | // deno-lint-ignore no-explicit-any
93 | function queryString(identifier: string, qs: string): any {
94 | // deno-lint-ignore no-explicit-any
95 | const obj: any = {};
96 | const params = decodeURI(qs || "").replace(
97 | new RegExp("\\" + identifier),
98 | "",
99 | ).split(/&|&/);
100 |
101 | for (const param of params) {
102 | if (params) {
103 | let index = param.indexOf("=");
104 | if (index === -1) index = param.length;
105 |
106 | const key = param.substring(0, index);
107 | const val = param.substring(index + 1);
108 |
109 | if (Object.prototype.hasOwnProperty.call(obj, key)) {
110 | if (!Array.isArray(obj[key])) obj[key] = [obj[key]];
111 | obj[key].push(val);
112 | } else {
113 | obj[key] = val || true;
114 | }
115 | }
116 | }
117 |
118 | return obj;
119 | }
120 |
121 | return parse(url);
122 | }
123 |
124 | export function isSrvUrl(url: string) {
125 | return /^mongodb\+srv/.test(url);
126 | }
127 |
128 | export type SrvConnectOptions = Omit & {
129 | srvServer?: string;
130 | };
131 |
132 | export function parseSrvUrl(url: string): SrvConnectOptions {
133 | const data = parse_url(url);
134 |
135 | const defaultAuthDb = (data.pathname && (data.pathname.length > 1))
136 | ? data.pathname!.substring(1)
137 | : null;
138 |
139 | const authSource = new URLSearchParams(data.search).get("authSource");
140 |
141 | const connectOptions: SrvConnectOptions = {
142 | db: defaultAuthDb ?? "test",
143 | };
144 |
145 | if (data.auth) {
146 | connectOptions.credential = {
147 | username: data.auth.user,
148 | password: data.auth.password,
149 | db: authSource ?? defaultAuthDb ?? "admin",
150 | mechanism: data.search.authMechanism || "SCRAM-SHA-256",
151 | };
152 | }
153 | connectOptions.compression = data.search.compressors
154 | ? data.search.compressors.split(",")
155 | : [];
156 | connectOptions.srvServer = data.servers?.[0].host;
157 |
158 | if (data.search.appname) {
159 | connectOptions.appname = data.search.appname;
160 | }
161 | if (data.search.ssl) {
162 | connectOptions.tls = data.search.ssl === "true";
163 | }
164 | if (data.search.tls) {
165 | connectOptions.tls = data.search.tls === "true";
166 | } else {
167 | connectOptions.tls = true;
168 | }
169 | if (data.search.tlsCAFile) {
170 | connectOptions.certFile = data.search.tlsCAFile;
171 | }
172 | if (data.search.tlsCertificateKeyFile) {
173 | connectOptions.keyFile = data.search.tlsCertificateKeyFile;
174 | }
175 | if (data.search.tlsCertificateKeyFilePassword) {
176 | connectOptions.keyFilePassword = data.search.tlsCertificateKeyFilePassword;
177 | }
178 | if (data.search.safe) {
179 | connectOptions.safe = data.search.safe === "true";
180 | }
181 | if (data.search.retryWrites) {
182 | connectOptions.retryWrites = data.search.retryWrites === "true";
183 | }
184 | return connectOptions;
185 | }
186 |
187 | export function parse(url: string): Promise {
188 | return isSrvUrl(url)
189 | ? new Srv().resolveSrvUrl(url)
190 | : Promise.resolve(parseNormalUrl(url));
191 | }
192 |
193 | function parseNormalUrl(url: string): ConnectOptions {
194 | const data = parse_url(url);
195 |
196 | const defaultAuthDb = (data.pathname && (data.pathname.length > 1))
197 | ? data.pathname!.substring(1)
198 | : null;
199 |
200 | const authSource = new URLSearchParams(data.search).get("authSource");
201 |
202 | const connectOptions: ConnectOptions = {
203 | servers: data.servers!,
204 | db: defaultAuthDb ?? "test",
205 | };
206 |
207 | for (const server of connectOptions.servers) {
208 | if (server.host.includes(".sock")) {
209 | server.domainSocket = server.host;
210 | }
211 | server.port = server.port || 27017;
212 | }
213 |
214 | if (data.auth) {
215 | connectOptions.credential = {
216 | username: data.auth.user,
217 | password: data.auth.password,
218 | db: authSource ?? defaultAuthDb ?? "admin",
219 | mechanism: data.search.authMechanism || "SCRAM-SHA-256",
220 | };
221 | }
222 | connectOptions.compression = data.search.compressors
223 | ? data.search.compressors.split(",")
224 | : [];
225 | if (data.search.appname) {
226 | connectOptions.appname = data.search.appname;
227 | }
228 | if (data.search.ssl) {
229 | connectOptions.tls = data.search.ssl === "true";
230 | }
231 | if (data.search.tls) {
232 | connectOptions.tls = data.search.tls === "true";
233 | }
234 | if (data.search.tlsCAFile) {
235 | connectOptions.certFile = data.search.tlsCAFile;
236 | }
237 | if (data.search.tlsCertificateKeyFile) {
238 | connectOptions.keyFile = data.search.tlsCertificateKeyFile;
239 | }
240 | if (data.search.tlsCertificateKeyFilePassword) {
241 | connectOptions.keyFilePassword = data.search.tlsCertificateKeyFilePassword;
242 | }
243 | if (data.search.safe) {
244 | connectOptions.safe = data.search.safe === "true";
245 | }
246 | if (data.search.retryWrites) {
247 | connectOptions.retryWrites = data.search.retryWrites === "true";
248 | }
249 | return connectOptions;
250 | }
251 |
--------------------------------------------------------------------------------
/tests/assets/1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/denodrivers/mongo/e02f8813081bbdd01324fadca09c1bd8c0bb2816/tests/assets/1.jpg
--------------------------------------------------------------------------------
/tests/assets/2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/denodrivers/mongo/e02f8813081bbdd01324fadca09c1bd8c0bb2816/tests/assets/2.jpg
--------------------------------------------------------------------------------
/tests/assets/sample_neighborhoods.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "_id": {
4 | "$oid": "55cb9c666c522cafdb053a1a"
5 | },
6 | "name": "Bedford",
7 | "geometry": {
8 | "coordinates": [
9 | [
10 | [
11 | -73.94193078816193,
12 | 40.70072523469547
13 | ],
14 | [
15 | -73.94220058705264,
16 | 40.700890667467746
17 | ],
18 | [
19 | -73.94306406845838,
20 | 40.7014244350918
21 | ],
22 | [
23 | -73.94322686012315,
24 | 40.701520709145726
25 | ],
26 | [
27 | -73.9438247374114,
28 | 40.701862007878276
29 | ],
30 | [
31 | -73.94400504048726,
32 | 40.70196179219718
33 | ],
34 | [
35 | -73.94431460096804,
36 | 40.70213334535181
37 | ],
38 | [
39 | -73.94463910154856,
40 | 40.70231369467456
41 | ],
42 | [
43 | -73.94544988192177,
44 | 40.702760635974364
45 | ],
46 | [
47 | -73.9458549904679,
48 | 40.70298720677488
49 | ],
50 | [
51 | -73.94625107780892,
52 | 40.70320874745355
53 | ],
54 | [
55 | -73.94705205297524,
56 | 40.70366394934019
57 | ],
58 | [
59 | -73.94753858146478,
60 | 40.703350650664795
61 | ],
62 | [
63 | -73.9493787354337,
64 | 40.70215888982628
65 | ],
66 | [
67 | -73.95027424109588,
68 | 40.70157924195056
69 | ],
70 | [
71 | -73.95128819434734,
72 | 40.70092236548591
73 | ],
74 | [
75 | -73.951920189279,
76 | 40.70148754916077
77 | ],
78 | [
79 | -73.95255052777945,
80 | 40.7020516665144
81 | ],
82 | [
83 | -73.95318085172319,
84 | 40.70261690547745
85 | ],
86 | [
87 | -73.9538119690652,
88 | 40.70318097979544
89 | ],
90 | [
91 | -73.95572361014881,
92 | 40.70194576955721
93 | ],
94 | [
95 | -73.95745736372834,
96 | 40.70082260318457
97 | ],
98 | [
99 | -73.95722517405626,
100 | 40.69999935002626
101 | ],
102 | [
103 | -73.957167301054,
104 | 40.69970786791901
105 | ],
106 | [
107 | -73.95701993123406,
108 | 40.698973914349565
109 | ],
110 | [
111 | -73.95795938220984,
112 | 40.69882000321581
113 | ],
114 | [
115 | -73.95885874627406,
116 | 40.6986773264162
117 | ],
118 | [
119 | -73.96019688857467,
120 | 40.698462727438596
121 | ],
122 | [
123 | -73.96105100700007,
124 | 40.698326078819065
125 | ],
126 | [
127 | -73.96092543804184,
128 | 40.69773650701631
129 | ],
130 | [
131 | -73.96062056531659,
132 | 40.6963201401926
133 | ],
134 | [
135 | -73.96015854658333,
136 | 40.69411730915604
137 | ],
138 | [
139 | -73.95931047927598,
140 | 40.69421508783189
141 | ],
142 | [
143 | -73.95799732979468,
144 | 40.694365838684114
145 | ],
146 | [
147 | -73.9570870194244,
148 | 40.694470440162995
149 | ],
150 | [
151 | -73.95614239268207,
152 | 40.69457901857237
153 | ],
154 | [
155 | -73.95582662769675,
156 | 40.69299238288233
157 | ],
158 | [
159 | -73.95541057949602,
160 | 40.690908291885876
161 | ],
162 | [
163 | -73.95635602958276,
164 | 40.69079978191732
165 | ],
166 | [
167 | -73.95727249112613,
168 | 40.69069347835403
169 | ],
170 | [
171 | -73.95808563435828,
172 | 40.69060146372528
173 | ],
174 | [
175 | -73.9581804874994,
176 | 40.69059073040573
177 | ],
178 | [
179 | -73.95857844775936,
180 | 40.690545694857576
181 | ],
182 | [
183 | -73.95886713625437,
184 | 40.69051224801476
185 | ],
186 | [
187 | -73.95899330491858,
188 | 40.69049762936284
189 | ],
190 | [
191 | -73.95928133730644,
192 | 40.69046425696955
193 | ],
194 | [
195 | -73.95934862389824,
196 | 40.6904564609108
197 | ],
198 | [
199 | -73.95942791320554,
200 | 40.69044727471387
201 | ],
202 | [
203 | -73.95957591847137,
204 | 40.69042998753855
205 | ],
206 | [
207 | -73.96008336800442,
208 | 40.6903707157072
209 | ],
210 | [
211 | -73.96013760800457,
212 | 40.69036438035883
213 | ],
214 | [
215 | -73.96029281668112,
216 | 40.690346249915414
217 | ],
218 | [
219 | -73.96023740336433,
220 | 40.69006222280781
221 | ],
222 | [
223 | -73.96022304539724,
224 | 40.689988627383755
225 | ],
226 | [
227 | -73.96018691858275,
228 | 40.689803455988546
229 | ],
230 | [
231 | -73.96017256138677,
232 | 40.68972986156118
233 | ],
234 | [
235 | -73.96012172912181,
236 | 40.68946930706387
237 | ],
238 | [
239 | -73.96009714565346,
240 | 40.689345210097464
241 | ],
242 | [
243 | -73.96000519802635,
244 | 40.688881033718204
245 | ],
246 | [
247 | -73.95985939425704,
248 | 40.688147451217226
249 | ],
250 | [
251 | -73.95971374756459,
252 | 40.6874156340909
253 | ],
254 | [
255 | -73.95956770121337,
256 | 40.68668255592727
257 | ],
258 | [
259 | -73.95684165193596,
260 | 40.68699607883792
261 | ],
262 | [
263 | -73.95468418850508,
264 | 40.68724485443714
265 | ],
266 | [
267 | -73.95453798607406,
268 | 40.68651117540455
269 | ],
270 | [
271 | -73.95439296867414,
272 | 40.685779720013606
273 | ],
274 | [
275 | -73.95424647696164,
276 | 40.68504624826183
277 | ],
278 | [
279 | -73.95410042574005,
280 | 40.684313107633436
281 | ],
282 | [
283 | -73.95395453033524,
284 | 40.68358077882069
285 | ],
286 | [
287 | -73.95380893530668,
288 | 40.68284800827331
289 | ],
290 | [
291 | -73.95366256227194,
292 | 40.68211490361348
293 | ],
294 | [
295 | -73.95351616791015,
296 | 40.68138260047889
297 | ],
298 | [
299 | -73.95337017508861,
300 | 40.68064050844431
301 | ],
302 | [
303 | -73.95155682676496,
304 | 40.680498847575564
305 | ],
306 | [
307 | -73.95115828512961,
308 | 40.68047861480679
309 | ],
310 | [
311 | -73.9495568871113,
312 | 40.68039040292329
313 | ],
314 | [
315 | -73.9477302355664,
316 | 40.680291846282316
317 | ],
318 | [
319 | -73.94674915979888,
320 | 40.680239661363046
321 | ],
322 | [
323 | -73.94627470970092,
324 | 40.68021332692951
325 | ],
326 | [
327 | -73.94397347805013,
328 | 40.680088128426995
329 | ],
330 | [
331 | -73.94326176928655,
332 | 40.68005060712657
333 | ],
334 | [
335 | -73.94120864299748,
336 | 40.67993835375214
337 | ],
338 | [
339 | -73.94032793962053,
340 | 40.67988997506463
341 | ],
342 | [
343 | -73.94047635005941,
344 | 40.680635695422964
345 | ],
346 | [
347 | -73.94062005382186,
348 | 40.68137013010259
349 | ],
350 | [
351 | -73.94076893329961,
352 | 40.68210083903887
353 | ],
354 | [
355 | -73.9409133427312,
356 | 40.682833617234294
357 | ],
358 | [
359 | -73.94105783787931,
360 | 40.68356687284592
361 | ],
362 | [
363 | -73.94120399291621,
364 | 40.68429983923358
365 | ],
366 | [
367 | -73.94134827184915,
368 | 40.685031202512505
369 | ],
370 | [
371 | -73.94149491757346,
372 | 40.68576452882908
373 | ],
374 | [
375 | -73.94163933150469,
376 | 40.68649727009258
377 | ],
378 | [
379 | -73.94178527584324,
380 | 40.687228372121126
381 | ],
382 | [
383 | -73.9419324508184,
384 | 40.687962958755094
385 | ],
386 | [
387 | -73.94207684924385,
388 | 40.68869720298344
389 | ],
390 | [
391 | -73.94222203471806,
392 | 40.68942797886745
393 | ],
394 | [
395 | -73.94236932748694,
396 | 40.690159944665304
397 | ],
398 | [
399 | -73.94251587928605,
400 | 40.69089200097073
401 | ],
402 | [
403 | -73.94266181801652,
404 | 40.69162434435983
405 | ],
406 | [
407 | -73.94280765181726,
408 | 40.692357794128945
409 | ],
410 | [
411 | -73.94295136131598,
412 | 40.69309078423585
413 | ],
414 | [
415 | -73.94310040895432,
416 | 40.69382302905847
417 | ],
418 | [
419 | -73.94311427813774,
420 | 40.693894720557466
421 | ],
422 | [
423 | -73.94312826743185,
424 | 40.693967038330925
425 | ],
426 | [
427 | -73.943242490861,
428 | 40.694557485733355
429 | ],
430 | [
431 | -73.94338802084431,
432 | 40.69528899051899
433 | ],
434 | [
435 | -73.94352527471477,
436 | 40.69603085523812
437 | ],
438 | [
439 | -73.94354024149403,
440 | 40.6961081421151
441 | ],
442 | [
443 | -73.9435563415296,
444 | 40.69619128295102
445 | ],
446 | [
447 | -73.94362121369004,
448 | 40.696526279661654
449 | ],
450 | [
451 | -73.94363806934868,
452 | 40.69661331854307
453 | ],
454 | [
455 | -73.9436842703752,
456 | 40.69685189440415
457 | ],
458 | [
459 | -73.94368427322361,
460 | 40.696851909818065
461 | ],
462 | [
463 | -73.9437245356891,
464 | 40.697059812179496
465 | ],
466 | [
467 | -73.94374306706803,
468 | 40.69715549995503
469 | ],
470 | [
471 | -73.94378455587042,
472 | 40.6973697290538
473 | ],
474 | [
475 | -73.94380383211836,
476 | 40.697469265449826
477 | ],
478 | [
479 | -73.94391750192877,
480 | 40.69805620211356
481 | ],
482 | [
483 | -73.94394947271304,
484 | 40.69822127983908
485 | ],
486 | [
487 | -73.94409591260093,
488 | 40.69897295461309
489 | ],
490 | [
491 | -73.94424286147482,
492 | 40.69969927964773
493 | ],
494 | [
495 | -73.9443878859649,
496 | 40.70042452378256
497 | ],
498 | [
499 | -73.94193078816193,
500 | 40.70072523469547
501 | ]
502 | ]
503 | ],
504 | "type": "Polygon"
505 | }
506 | },
507 | {
508 | "_id": {
509 | "$oid": "55cb9c666c522cafdb053a32"
510 | },
511 | "name": "Yorkville",
512 | "geometry": {
513 | "coordinates": [
514 | [
515 | [
516 | [
517 | -73.93804640603437,
518 | 40.78082954427551
519 | ],
520 | [
521 | -73.93806723089932,
522 | 40.78094470269807
523 | ],
524 | [
525 | -73.93804948921758,
526 | 40.78105580604244
527 | ],
528 | [
529 | -73.93796757721647,
530 | 40.78128933406025
531 | ],
532 | [
533 | -73.93808213268761,
534 | 40.781418437274596
535 | ],
536 | [
537 | -73.93830097951646,
538 | 40.78136208436499
539 | ],
540 | [
541 | -73.93874423089275,
542 | 40.78104387604222
543 | ],
544 | [
545 | -73.93882863410155,
546 | 40.7807634507276
547 | ],
548 | [
549 | -73.93883951845037,
550 | 40.78059924979984
551 | ],
552 | [
553 | -73.93872802926595,
554 | 40.78051074370458
555 | ],
556 | [
557 | -73.9392178962528,
558 | 40.780013520221424
559 | ],
560 | [
561 | -73.93930528496779,
562 | 40.77995122455209
563 | ],
564 | [
565 | -73.93937515379675,
566 | 40.7799787718492
567 | ],
568 | [
569 | -73.93949329752056,
570 | 40.77987369131541
571 | ],
572 | [
573 | -73.93943238100773,
574 | 40.77981220027457
575 | ],
576 | [
577 | -73.93958378972476,
578 | 40.77957647400713
579 | ],
580 | [
581 | -73.93953015793254,
582 | 40.77952762026046
583 | ],
584 | [
585 | -73.939318219876,
586 | 40.77971800182686
587 | ],
588 | [
589 | -73.9392374617828,
590 | 40.77973339355579
591 | ],
592 | [
593 | -73.93861200821945,
594 | 40.780338955868935
595 | ],
596 | [
597 | -73.93839622591737,
598 | 40.78026243339117
599 | ],
600 | [
601 | -73.938237735094,
602 | 40.780259079465324
603 | ],
604 | [
605 | -73.93779269036344,
606 | 40.78035790421231
607 | ],
608 | [
609 | -73.93759894622622,
610 | 40.78046784086144
611 | ],
612 | [
613 | -73.9376536596381,
614 | 40.78079000755278
615 | ],
616 | [
617 | -73.93764631821483,
618 | 40.78100121589283
619 | ],
620 | [
621 | -73.93779944179562,
622 | 40.781031339796634
623 | ],
624 | [
625 | -73.93779448726416,
626 | 40.780781258755425
627 | ],
628 | [
629 | -73.93790133623939,
630 | 40.78074999617253
631 | ],
632 | [
633 | -73.93804640603437,
634 | 40.78082954427551
635 | ]
636 | ]
637 | ],
638 | [
639 | [
640 | [
641 | -73.94383256676022,
642 | 40.782859089475245
643 | ],
644 | [
645 | -73.9438932453981,
646 | 40.78288772733219
647 | ],
648 | [
649 | -73.943954601631,
650 | 40.78291552015379
651 | ],
652 | [
653 | -73.94405439446334,
654 | 40.78296435804619
655 | ],
656 | [
657 | -73.94464665991993,
658 | 40.78321787330016
659 | ],
660 | [
661 | -73.94472264526928,
662 | 40.78324757264143
663 | ],
664 | [
665 | -73.94706938226709,
666 | 40.78423622711466
667 | ],
668 | [
669 | -73.94933170560954,
670 | 40.78519312655759
671 | ],
672 | [
673 | -73.94982390409011,
674 | 40.78451557104566
675 | ],
676 | [
677 | -73.95028254920818,
678 | 40.78389046999264
679 | ],
680 | [
681 | -73.95073913807909,
682 | 40.78326171027431
683 | ],
684 | [
685 | -73.95119927339022,
686 | 40.78263339296121
687 | ],
688 | [
689 | -73.95165920749085,
690 | 40.78200767416535
691 | ],
692 | [
693 | -73.95212203271687,
694 | 40.781386710220794
695 | ],
696 | [
697 | -73.95257421938265,
698 | 40.780753034022965
699 | ],
700 | [
701 | -73.95303367951709,
702 | 40.78012545503866
703 | ],
704 | [
705 | -73.95349212383219,
706 | 40.779496622676206
707 | ],
708 | [
709 | -73.9539837022136,
710 | 40.77882211062083
711 | ],
712 | [
713 | -73.95448224114962,
714 | 40.778138665021665
715 | ],
716 | [
717 | -73.95494520074166,
718 | 40.77750394666421
719 | ],
720 | [
721 | -73.95540729761609,
722 | 40.77686847989678
723 | ],
724 | [
725 | -73.9558693368217,
726 | 40.77623622584272
727 | ],
728 | [
729 | -73.9563349232549,
730 | 40.77560078338051
731 | ],
732 | [
733 | -73.95679749082419,
734 | 40.77496631066816
735 | ],
736 | [
737 | -73.9572941004381,
738 | 40.77428355507528
739 | ],
740 | [
741 | -73.95505481622959,
742 | 40.773336851176026
743 | ],
744 | [
745 | -73.95268752458824,
746 | 40.77234302098273
747 | ],
748 | [
749 | -73.95043175557528,
750 | 40.77138973153775
751 | ],
752 | [
753 | -73.94807387944711,
754 | 40.77039439466542
755 | ],
756 | [
757 | -73.94779385491792,
758 | 40.77024915206164
759 | ],
760 | [
761 | -73.94771625089346,
762 | 40.77021606808562
763 | ],
764 | [
765 | -73.94766538436596,
766 | 40.77019325312954
767 | ],
768 | [
769 | -73.94761452955883,
770 | 40.770170429444896
771 | ],
772 | [
773 | -73.94752257888095,
774 | 40.77012969528945
775 | ],
776 | [
777 | -73.94748975456329,
778 | 40.7701151545216
779 | ],
780 | [
781 | -73.94690764133767,
782 | 40.77073391172354
783 | ],
784 | [
785 | -73.94640418919069,
786 | 40.77126904561922
787 | ],
788 | [
789 | -73.94561514012553,
790 | 40.771943026085076
791 | ],
792 | [
793 | -73.94487889596748,
794 | 40.77256956150469
795 | ],
796 | [
797 | -73.9444243359585,
798 | 40.77304532251584
799 | ],
800 | [
801 | -73.94396084370877,
802 | 40.773530420091916
803 | ],
804 | [
805 | -73.94366431814802,
806 | 40.773840764176796
807 | ],
808 | [
809 | -73.9433389475049,
810 | 40.77421119259063
811 | ],
812 | [
813 | -73.94293043781397,
814 | 40.774676268035805
815 | ],
816 | [
817 | -73.9425876964068,
818 | 40.77521895652805
819 | ],
820 | [
821 | -73.94240100949048,
822 | 40.775726927526115
823 | ],
824 | [
825 | -73.94208160457376,
826 | 40.77595701631703
827 | ],
828 | [
829 | -73.94200266722274,
830 | 40.77618531738269
831 | ],
832 | [
833 | -73.94207418803877,
834 | 40.77691784697707
835 | ],
836 | [
837 | -73.94209312757741,
838 | 40.77696471695027
839 | ],
840 | [
841 | -73.94212060092353,
842 | 40.77700907806879
843 | ],
844 | [
845 | -73.94215605522935,
846 | 40.77705003762475
847 | ],
848 | [
849 | -73.94219877703716,
850 | 40.777086771360594
851 | ],
852 | [
853 | -73.94224790663611,
854 | 40.77711854005664
855 | ],
856 | [
857 | -73.9423024553623,
858 | 40.777144704407284
859 | ],
860 | [
861 | -73.94236132549445,
862 | 40.77716473788681
863 | ],
864 | [
865 | -73.94232881653734,
866 | 40.777242655641246
867 | ],
868 | [
869 | -73.94243848174503,
870 | 40.777315235765954
871 | ],
872 | [
873 | -73.94245740990844,
874 | 40.77740747334762
875 | ],
876 | [
877 | -73.94265530676857,
878 | 40.77812496557382
879 | ],
880 | [
881 | -73.94289340818793,
882 | 40.7786140932461
883 | ],
884 | [
885 | -73.94253556320852,
886 | 40.77909095606242
887 | ],
888 | [
889 | -73.94271237476218,
890 | 40.7792148569398
891 | ],
892 | [
893 | -73.94262799584376,
894 | 40.77932311896862
895 | ],
896 | [
897 | -73.94271600545089,
898 | 40.77954416947608
899 | ],
900 | [
901 | -73.94278066641971,
902 | 40.77950615406814
903 | ],
904 | [
905 | -73.94300423950467,
906 | 40.77963949547416
907 | ],
908 | [
909 | -73.94321386265224,
910 | 40.77931758865997
911 | ],
912 | [
913 | -73.94360047475412,
914 | 40.78015909946616
915 | ],
916 | [
917 | -73.94371468850564,
918 | 40.78062981552884
919 | ],
920 | [
921 | -73.94387075988706,
922 | 40.78127302657157
923 | ],
924 | [
925 | -73.94388068102455,
926 | 40.78153204336433
927 | ],
928 | [
929 | -73.94364823539011,
930 | 40.78265616133325
931 | ],
932 | [
933 | -73.9435442071252,
934 | 40.78288052382262
935 | ],
936 | [
937 | -73.9435697971985,
938 | 40.782923078573404
939 | ],
940 | [
941 | -73.94363201727829,
942 | 40.78296232368537
943 | ],
944 | [
945 | -73.9437600527451,
946 | 40.78282893171711
947 | ],
948 | [
949 | -73.94383256676022,
950 | 40.782859089475245
951 | ]
952 | ]
953 | ]
954 | ],
955 | "type": "MultiPolygon"
956 | }
957 | }
958 | ]
959 |
--------------------------------------------------------------------------------
/tests/assets/sample_places.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "_id": {
4 | "$oid": "55cba2476c522cafdb053add"
5 | },
6 | "location": {
7 | "coordinates": [
8 | -73.856077,
9 | 40.848447
10 | ],
11 | "type": "Point"
12 | },
13 | "name": "Morris Park Bake Shop"
14 | },
15 | {
16 | "_id": {
17 | "$oid": "55cba2476c522cafdb053ade"
18 | },
19 | "location": {
20 | "coordinates": [
21 | -73.961704,
22 | 40.662942
23 | ],
24 | "type": "Point"
25 | },
26 | "name": "Wendy'S"
27 | },
28 | {
29 | "_id": {
30 | "$oid": "55cba2476c522cafdb053adf"
31 | },
32 | "location": {
33 | "coordinates": [
34 | -73.98241999999999,
35 | 40.579505
36 | ],
37 | "type": "Point"
38 | },
39 | "name": "Riviera Caterer"
40 | },
41 | {
42 | "_id": {
43 | "$oid": "55cba2476c522cafdb053ae0"
44 | },
45 | "location": {
46 | "coordinates": [
47 | -73.8601152,
48 | 40.7311739
49 | ],
50 | "type": "Point"
51 | },
52 | "name": "Tov Kosher Kitchen"
53 | },
54 | {
55 | "_id": {
56 | "$oid": "55cba2476c522cafdb053ae1"
57 | },
58 | "location": {
59 | "coordinates": [
60 | -73.8803827,
61 | 40.7643124
62 | ],
63 | "type": "Point"
64 | },
65 | "name": "Brunos On The Boulevard"
66 | },
67 | {
68 | "_id": {
69 | "$oid": "55cba2476c522cafdb053ae2"
70 | },
71 | "location": {
72 | "coordinates": [
73 | -73.98513559999999,
74 | 40.7676919
75 | ],
76 | "type": "Point"
77 | },
78 | "name": "Dj Reynolds Pub And Restaurant"
79 | },
80 | {
81 | "_id": {
82 | "$oid": "55cba2476c522cafdb053ae3"
83 | },
84 | "location": {
85 | "coordinates": [
86 | -73.9068506,
87 | 40.6199034
88 | ],
89 | "type": "Point"
90 | },
91 | "name": "Wilken'S Fine Food"
92 | },
93 | {
94 | "_id": {
95 | "$oid": "55cba2476c522cafdb053ae4"
96 | },
97 | "location": {
98 | "coordinates": [
99 | -74.00528899999999,
100 | 40.628886
101 | ],
102 | "type": "Point"
103 | },
104 | "name": "Regina Caterers"
105 | },
106 | {
107 | "_id": {
108 | "$oid": "55cba2476c522cafdb053ae5"
109 | },
110 | "location": {
111 | "coordinates": [
112 | -73.9482609,
113 | 40.6408271
114 | ],
115 | "type": "Point"
116 | },
117 | "name": "Taste The Tropics Ice Cream"
118 | },
119 | {
120 | "_id": {
121 | "$oid": "55cba2476c522cafdb053ae6"
122 | },
123 | "location": {
124 | "coordinates": [
125 | -74.1377286,
126 | 40.6119572
127 | ],
128 | "type": "Point"
129 | },
130 | "name": "Kosher Island"
131 | },
132 | {
133 | "_id": {
134 | "$oid": "55cba2476c522cafdb053ae7"
135 | },
136 | "location": {
137 | "coordinates": [
138 | -73.8786113,
139 | 40.8502883
140 | ],
141 | "type": "Point"
142 | },
143 | "name": "Wild Asia"
144 | },
145 | {
146 | "_id": {
147 | "$oid": "55cba2476c522cafdb058a57"
148 | },
149 | "location": {
150 | "coordinates": [
151 | -73.9653551,
152 | 40.7828647
153 | ],
154 | "type": "Point"
155 | },
156 | "name": "Cafe1 & Cafe 4 (American Museum Of Natural History)"
157 | }
158 | ]
159 |
--------------------------------------------------------------------------------
/tests/cases/00_uri.ts:
--------------------------------------------------------------------------------
1 | import { parse, parseSrvUrl } from "../../src/utils/uri.ts";
2 | import { assertEquals, describe, it } from "../deps.ts";
3 |
4 | describe("uri", () => {
5 | it({
6 | name: "should correctly parse mongodb://localhost",
7 | async fn() {
8 | const options = await parse("mongodb://localhost/");
9 | assertEquals(options.db, "test");
10 | assertEquals(options.servers.length, 1);
11 | assertEquals(options.servers[0].host, "localhost");
12 | assertEquals(options.servers[0].port, 27017);
13 | },
14 | });
15 |
16 | it({
17 | name: "should correctly parse mongodb://localhost",
18 | async fn() {
19 | const options = await parse("mongodb://localhost/");
20 | assertEquals(options.db, "test");
21 | assertEquals(options.servers.length, 1);
22 | assertEquals(options.servers[0].host, "localhost");
23 | assertEquals(options.servers[0].port, 27017);
24 | },
25 | });
26 |
27 | it({
28 | name: "should correctly parse mongodb://localhost:27017",
29 | async fn() {
30 | const options = await parse("mongodb://localhost:27017/");
31 | assertEquals(options.db, "test");
32 | assertEquals(options.servers.length, 1);
33 | assertEquals(options.servers[0].host, "localhost");
34 | assertEquals(options.servers[0].port, 27017);
35 | },
36 | });
37 |
38 | it({
39 | name:
40 | "should correctly parse mongodb://localhost:27017/test?appname=hello%20world",
41 | async fn() {
42 | const options = await parse(
43 | "mongodb://localhost:27017/test?appname=hello%20world",
44 | );
45 | assertEquals(options.appname, "hello world");
46 | },
47 | });
48 |
49 | it({
50 | name: "should parse ?ssl=true",
51 | async fn() {
52 | const options = await parse(
53 | "mongodb://localhost:27017/test?ssl=true",
54 | );
55 | assertEquals(options.tls, true);
56 | },
57 | });
58 |
59 | it({
60 | name: "should parse srv url with ?ssl=true",
61 | async fn() {
62 | const options = await parseSrvUrl(
63 | "mongodb+srv://a:b@somesubdomain.somedomain.com:27017/test?ssl=true",
64 | );
65 | assertEquals(options.tls, true);
66 | },
67 | });
68 |
69 | it({
70 | name:
71 | "should correctly parse mongodb://localhost/?safe=true&readPreference=secondary",
72 | async fn() {
73 | const options = await parse(
74 | "mongodb://localhost/?safe=true&readPreference=secondary",
75 | );
76 | assertEquals(options.db, "test");
77 | assertEquals(options.servers.length, 1);
78 | assertEquals(options.servers[0].host, "localhost");
79 | assertEquals(options.servers[0].port, 27017);
80 | },
81 | });
82 |
83 | it({
84 | name: "should correctly parse mongodb://localhost:28101/",
85 | async fn() {
86 | const options = await parse("mongodb://localhost:28101/");
87 | assertEquals(options.db, "test");
88 | assertEquals(options.servers.length, 1);
89 | assertEquals(options.servers[0].host, "localhost");
90 | assertEquals(options.servers[0].port, 28101);
91 | },
92 | });
93 | it({
94 | name: "should correctly parse mongodb://fred:foobar@localhost/baz",
95 | async fn() {
96 | const options = await parse("mongodb://fred:foobar@localhost/baz");
97 | assertEquals(options.db, "baz");
98 | assertEquals(options.servers.length, 1);
99 | assertEquals(options.servers[0].host, "localhost");
100 | assertEquals(options.credential!.username, "fred");
101 | assertEquals(options.credential!.password, "foobar");
102 | assertEquals(options.credential!.db, "baz");
103 | },
104 | });
105 |
106 | it({
107 | name: "should correctly parse mongodb://fred:foo%20bar@localhost/baz",
108 | async fn() {
109 | const options = await parse("mongodb://fred:foo%20bar@localhost/baz");
110 | assertEquals(options.db, "baz");
111 | assertEquals(options.servers.length, 1);
112 | assertEquals(options.servers[0].host, "localhost");
113 | assertEquals(options.credential!.username, "fred");
114 | assertEquals(options.credential!.password, "foo bar");
115 | assertEquals(options.credential!.db, "baz");
116 | },
117 | });
118 |
119 | it({
120 | name: "should correctly parse mongodb://%2Ftmp%2Fmongodb-27017.sock",
121 | async fn() {
122 | const options = await parse("mongodb://%2Ftmp%2Fmongodb-27017.sock");
123 | assertEquals(options.servers.length, 1);
124 | assertEquals(options.servers[0].domainSocket, "/tmp/mongodb-27017.sock");
125 | assertEquals(options.db, "test");
126 | },
127 | });
128 |
129 | it({
130 | name:
131 | "should correctly parse mongodb://fred:foo@%2Ftmp%2Fmongodb-27017.sock",
132 | async fn() {
133 | const options = await parse(
134 | "mongodb://fred:foo@%2Ftmp%2Fmongodb-27017.sock",
135 | );
136 | assertEquals(options.servers.length, 1);
137 | assertEquals(options.servers[0].domainSocket, "/tmp/mongodb-27017.sock");
138 | assertEquals(options.credential!.username, "fred");
139 | assertEquals(options.credential!.password, "foo");
140 | assertEquals(options.db, "test");
141 | },
142 | });
143 |
144 | it({
145 | name:
146 | "should correctly parse mongodb://fred:foo@%2Ftmp%2Fmongodb-27017.sock/somedb",
147 | async fn() {
148 | const options = await parse(
149 | "mongodb://fred:foo@%2Ftmp%2Fmongodb-27017.sock/somedb",
150 | );
151 | assertEquals(options.servers.length, 1);
152 | assertEquals(options.servers[0].domainSocket, "/tmp/mongodb-27017.sock");
153 | assertEquals(options.credential!.username, "fred");
154 | assertEquals(options.credential!.password, "foo");
155 | assertEquals(options.credential!.db, "somedb");
156 | assertEquals(options.db, "somedb");
157 | },
158 | });
159 |
160 | it({
161 | name:
162 | "should correctly parse mongodb://fred:foo@%2Ftmp%2Fmongodb-27017.sock/somedb?safe=true",
163 | async fn() {
164 | const options = await parse(
165 | "mongodb://fred:foo@%2Ftmp%2Fmongodb-27017.sock/somedb?safe=true",
166 | );
167 | assertEquals(options.servers.length, 1);
168 | assertEquals(options.servers[0].domainSocket, "/tmp/mongodb-27017.sock");
169 | assertEquals(options.credential!.username, "fred");
170 | assertEquals(options.credential!.password, "foo");
171 | assertEquals(options.credential!.db, "somedb");
172 | assertEquals(options.db, "somedb");
173 | assertEquals(options.safe, true);
174 | },
175 | });
176 |
177 | it({
178 | name:
179 | "should correctly parse mongodb://fred:foobar@localhost,server2.test:28101/baz",
180 | async fn() {
181 | const options = await parse(
182 | "mongodb://fred:foobar@localhost,server2.test:28101/baz",
183 | );
184 | assertEquals(options.db, "baz");
185 | assertEquals(options.servers.length, 2);
186 | assertEquals(options.servers[0].host, "localhost");
187 | assertEquals(options.servers[0].port, 27017);
188 | assertEquals(options.servers[1].host, "server2.test");
189 | assertEquals(options.servers[1].port, 28101);
190 | assertEquals(options.credential!.username, "fred");
191 | assertEquals(options.credential!.password, "foobar");
192 | assertEquals(options.credential!.db, "baz");
193 | },
194 | });
195 | // TODO: add more tests (https://github.com/mongodb/node-mongodb-native/blob/3.6/test/functional/url_parser.test.js)
196 |
197 | it({
198 | name: "should correctly parse uris with authSource and dbName",
199 | async fn() {
200 | const options = await parse(
201 | "mongodb://a:b@localhost:27017/dbName?authSource=admin2",
202 | );
203 |
204 | assertEquals(options.db, "dbName");
205 | assertEquals(options.servers[0].host, "localhost");
206 | assertEquals(options.servers[0].port, 27017);
207 | assertEquals(options.credential!.username, "a");
208 | assertEquals(options.credential!.password, "b");
209 | assertEquals(options.credential!.db, "admin2");
210 | },
211 | });
212 |
213 | it({
214 | name: "should correctly parse uris with authSource and dbName",
215 | fn() {
216 | const options = parseSrvUrl(
217 | "mongodb+srv://a:b@somesubdomain.somedomain.com/dbName?authSource=admin2",
218 | );
219 |
220 | assertEquals(options.db, "dbName");
221 | assertEquals(options.credential!.username, "a");
222 | assertEquals(options.credential!.password, "b");
223 | assertEquals(options.credential!.db, "admin2");
224 | },
225 | });
226 |
227 | it({
228 | name:
229 | "should correctly parse mongodb+srv://someUser:somePassword@somesubdomain.somedomain.com/someDatabaseName?retryWrites=true&w=majority",
230 | fn() {
231 | const options = parseSrvUrl(
232 | "mongodb+srv://someUser:somePassword@somesubdomain.somedomain.com/someDatabaseName?retryWrites=true&w=majority",
233 | );
234 | assertEquals(options.db, "someDatabaseName");
235 | assertEquals(options.credential?.username, "someUser");
236 | assertEquals(options.credential?.password, "somePassword");
237 | assertEquals(options.retryWrites, true);
238 | // deno-lint-ignore no-explicit-any
239 | assertEquals((options as any)["servers"], undefined);
240 | },
241 | });
242 | });
243 |
--------------------------------------------------------------------------------
/tests/cases/01_auth.ts:
--------------------------------------------------------------------------------
1 | import type { Database } from "../../mod.ts";
2 | import {
3 | cleanUsername,
4 | clientFirstMessageBare,
5 | HI,
6 | passwordDigest,
7 | } from "../../src/auth/mod.ts";
8 | import { MongoClient } from "../../src/client.ts";
9 | import { cleanTestDb, getTestDb } from "../common.ts";
10 | import {
11 | afterAll,
12 | assert,
13 | assertEquals,
14 | beforeAll,
15 | describe,
16 | it,
17 | } from "../deps.ts";
18 |
19 | describe("auth", () => {
20 | describe("prerequisites", () => {
21 | it({
22 | name: "passwordDigest:username:password",
23 | async fn() {
24 | const passwordValids: {
25 | username: string;
26 | password: string;
27 | digest: string;
28 | }[] = [
29 | {
30 | username: "user",
31 | password: "pencil",
32 | digest: "1c33006ec1ffd90f9cadcbcc0e118200",
33 | },
34 | {
35 | username: "test",
36 | password: "test",
37 | digest: "a6de521abefc2fed4f5876855a3484f5",
38 | },
39 | ];
40 | for (const { username, password, digest } of passwordValids) {
41 | const digestRes: string = await passwordDigest(username, password);
42 | assertEquals(digestRes, digest);
43 | }
44 | },
45 | });
46 |
47 | it({
48 | name: "clientFirstMessageBare",
49 | fn() {
50 | const username = "1234";
51 | const nonce = new TextEncoder().encode("qwer");
52 | const result: Uint8Array = clientFirstMessageBare(username, nonce);
53 | const expected: Uint8Array = Uint8Array.from(
54 | [
55 | 110,
56 | 61,
57 | 49,
58 | 50,
59 | 51,
60 | 52,
61 | 44,
62 | 114,
63 | 61,
64 | 99,
65 | 88,
66 | 100,
67 | 108,
68 | 99,
69 | 103,
70 | 61,
71 | 61,
72 | ],
73 | );
74 | assertEquals(expected, result);
75 | },
76 | });
77 |
78 | it({
79 | name: "cleanUsername",
80 | fn() {
81 | const username = "first=12,last=34";
82 | const expected = "first=3D12=2Clast=34";
83 | const result = cleanUsername(username);
84 | assertEquals(expected, result);
85 | },
86 | });
87 |
88 | it({
89 | name: "HI",
90 | async fn() {
91 | const salt = "rQ9ZY3MntBeuP3E1TDVC4w";
92 | const iter = 10000;
93 | const data = "1c33006ec1ffd90f9cadcbcc0e118200";
94 | const saltedPassword = await HI(
95 | data,
96 | (new TextEncoder()).encode(salt),
97 | iter,
98 | "sha1",
99 | );
100 | assertEquals(
101 | new Uint8Array(saltedPassword),
102 | Uint8Array.from([
103 | 72,
104 | 84,
105 | 156,
106 | 182,
107 | 17,
108 | 64,
109 | 30,
110 | 116,
111 | 86,
112 | 233,
113 | 7,
114 | 39,
115 | 65,
116 | 137,
117 | 142,
118 | 164,
119 | 0,
120 | 110,
121 | 78,
122 | 230,
123 | ]),
124 | );
125 | },
126 | });
127 | });
128 |
129 | describe("connection", () => {
130 | let database: Database;
131 | let client: MongoClient;
132 | const hostname = "127.0.0.1";
133 |
134 | beforeAll(async () => {
135 | ({ client, database } = await getTestDb());
136 | await database.createUser("user1", "y3mq3mpZ3J6PGfgg");
137 | await database.createUser("user2", "Qa6WkQSuXF425sWZ");
138 | });
139 |
140 | afterAll(async () => {
141 | await database.dropUser("user1");
142 | await database.dropUser("user2");
143 | await cleanTestDb(client, database);
144 | });
145 |
146 | it("should connect with correct credentials, case 1", async () => {
147 | const username = "user1";
148 | const password = "y3mq3mpZ3J6PGfgg";
149 | const client = new MongoClient();
150 | await client.connect(
151 | `mongodb://${username}:${password}@${hostname}:27017/test`,
152 | );
153 | const names = await client.listDatabases();
154 | assert(names instanceof Array);
155 | assert(names.length > 0);
156 | client.close();
157 | });
158 |
159 | it("should connect with correct credentials, case 2", async () => {
160 | const username = "user2";
161 | const password = "Qa6WkQSuXF425sWZ";
162 | const client = new MongoClient();
163 | await client.connect(
164 | `mongodb://${username}:${password}@${hostname}:27017/test`,
165 | );
166 | const names = await client.listDatabases();
167 | assert(names instanceof Array);
168 | assert(names.length > 0);
169 | client.close();
170 | });
171 | });
172 | });
173 |
--------------------------------------------------------------------------------
/tests/cases/02_connect.ts:
--------------------------------------------------------------------------------
1 | import { MongoClient } from "../../src/client.ts";
2 | import {
3 | afterEach,
4 | assert,
5 | assertEquals,
6 | beforeEach,
7 | describe,
8 | it,
9 | } from "../deps.ts";
10 |
11 | const hostname = "127.0.0.1";
12 |
13 | describe("connect", () => {
14 | let client: MongoClient;
15 |
16 | beforeEach(() => {
17 | client = new MongoClient();
18 | });
19 |
20 | afterEach(() => {
21 | client.close();
22 | });
23 |
24 | it("test connect", async () => {
25 | await client.connect(`mongodb://${hostname}:27017`);
26 | const names = await client.listDatabases();
27 | assert(names instanceof Array);
28 | assert(names.length > 0);
29 | });
30 |
31 | it("test connect With Options", async () => {
32 | await client.connect({
33 | servers: [{ host: hostname, port: 27017 }],
34 | db: "admin",
35 | });
36 | const names = await client.listDatabases();
37 | assert(names instanceof Array);
38 | assert(names.length > 0);
39 | });
40 |
41 | it("test default database name from connection options", async () => {
42 | await client.connect(`mongodb://${hostname}:27017/my-db`);
43 | const db = client.database();
44 | assertEquals(db.name, "my-db");
45 | });
46 |
47 | it("runCommand", async () => {
48 | await client.connect(`mongodb://${hostname}:27017`);
49 | const { databases, ok } = await client.runCommand("admin", {
50 | listDatabases: 1,
51 | });
52 | assert(databases.length > 0);
53 | assertEquals(ok, 1);
54 | });
55 | });
56 |
--------------------------------------------------------------------------------
/tests/cases/04_indexes.ts:
--------------------------------------------------------------------------------
1 | import type { Collection, Database, Document, MongoClient } from "../../mod.ts";
2 | import { cleanTestDb, getTestDb } from "../common.ts";
3 | import {
4 | afterEach,
5 | assertEquals,
6 | beforeEach,
7 | describe,
8 | greaterOrEqual,
9 | it,
10 | parse,
11 | } from "../deps.ts";
12 |
13 | describe(
14 | "indexes",
15 | () => {
16 | let client: MongoClient;
17 | let database: Database;
18 | let collection: Collection;
19 | const testCollectionName = "mongo_test_users";
20 |
21 | beforeEach(async () => {
22 | ({ client, database } = await getTestDb());
23 | collection = database.collection(testCollectionName);
24 | });
25 |
26 | afterEach(async () => {
27 | await cleanTestDb(client, database, testCollectionName);
28 | });
29 |
30 | it("createIndexes", async () => {
31 | const res = await collection.createIndexes({
32 | indexes: [{
33 | name: "_name",
34 | key: { name: 1 },
35 | }],
36 | });
37 | assertEquals(
38 | res,
39 | {
40 | createdCollectionAutomatically: true,
41 | numIndexesBefore: 1,
42 | numIndexesAfter: 2,
43 | ok: 1,
44 | },
45 | );
46 | });
47 |
48 | it("listIndexes", async () => {
49 | await collection.createIndexes({
50 | indexes: [{
51 | name: "_name",
52 | key: { name: 1 },
53 | }],
54 | });
55 | const cursor = collection.listIndexes();
56 | const indexes = await cursor.toArray();
57 |
58 | const expected = greaterOrEqual(parse(client.buildInfo!.version), {
59 | major: 4,
60 | minor: 4,
61 | patch: 0,
62 | })
63 | ? [
64 | { v: 2, key: { _id: 1 }, name: "_id_" },
65 | { v: 2, key: { name: 1 }, name: "_name" },
66 | ]
67 | : [
68 | {
69 | v: 2,
70 | key: { _id: 1 },
71 | name: "_id_",
72 | ns: `test.${testCollectionName}`,
73 | },
74 | {
75 | v: 2,
76 | key: { name: 1 },
77 | name: "_name",
78 | ns: `test.${testCollectionName}`,
79 | },
80 | ];
81 | assertEquals(
82 | indexes,
83 | expected,
84 | );
85 | });
86 |
87 | it("dropIndexes", async () => {
88 | await collection.createIndexes({
89 | indexes: [{
90 | name: "_name2",
91 | key: { name: -1 },
92 | }],
93 | });
94 |
95 | await collection.dropIndexes({
96 | index: "*",
97 | });
98 |
99 | const indexes = await collection.listIndexes().toArray();
100 | const expected = greaterOrEqual(parse(client.buildInfo!.version), {
101 | major: 4,
102 | minor: 4,
103 | patch: 0,
104 | })
105 | ? [
106 | { v: 2, key: { _id: 1 }, name: "_id_" },
107 | ]
108 | : [
109 | {
110 | v: 2,
111 | key: { _id: 1 },
112 | name: "_id_",
113 | ns: `test.${testCollectionName}`,
114 | },
115 | ];
116 | assertEquals(
117 | indexes,
118 | expected,
119 | );
120 | });
121 | },
122 | );
123 |
--------------------------------------------------------------------------------
/tests/cases/05_srv.ts:
--------------------------------------------------------------------------------
1 | import { Srv } from "../../src/utils/srv.ts";
2 | import { assertEquals, assertRejects, describe, it } from "../deps.ts";
3 |
4 | function mockResolver(
5 | srvRecords: Partial[] = [],
6 | txtRecords: string[][] = [],
7 | ) {
8 | return {
9 | resolveDns: (_url: string, type: Deno.RecordType) => {
10 | if (type === "SRV") return srvRecords;
11 | if (type === "TXT") return txtRecords;
12 | },
13 | // deno-lint-ignore no-explicit-any
14 | } as any;
15 | }
16 |
17 | describe("SRV", () => {
18 | it({
19 | name: "SRV: it throws an error if url doesn't have subdomain",
20 | fn() {
21 | assertRejects(
22 | () => new Srv().resolve("foo.bar"),
23 | Error,
24 | "Expected url in format 'host.domain.tld', received foo.bar",
25 | );
26 | },
27 | });
28 |
29 | it({
30 | name:
31 | "SRV: it throws an error if SRV resolution doesn't return any SRV records",
32 | fn() {
33 | assertRejects(
34 | () => new Srv(mockResolver()).resolve("mongohost.mongodomain.com"),
35 | Error,
36 | "Expected at least one SRV record, received 0 for url mongohost.mongodomain.com",
37 | );
38 | },
39 | });
40 |
41 | it({
42 | name: "SRV: it throws an error if TXT resolution returns no records",
43 | fn() {
44 | assertRejects(
45 | () =>
46 | new Srv(
47 | mockResolver([{ target: "mongohost1.mongodomain.com" }]),
48 | ).resolve("mongohost.mongodomain.com"),
49 | Error,
50 | "Expected exactly one TXT record, received 0 for url mongohost.mongodomain.com",
51 | );
52 | },
53 | });
54 |
55 | it({
56 | name:
57 | "SRV: it throws an error if TXT resolution returns more than one record",
58 | fn() {
59 | assertRejects(
60 | () =>
61 | new Srv(
62 | mockResolver(
63 | [{ target: "mongohost1.mongodomain.com" }],
64 | [["replicaSet=rs-0"], ["authSource=admin"]],
65 | ),
66 | ).resolve("mongohost.mongodomain.com"),
67 | Error,
68 | "Expected exactly one TXT record, received 2 for url mongohost.mongodomain.com",
69 | );
70 | },
71 | });
72 |
73 | it({
74 | name: "SRV: it throws an error if TXT record contains illegal options",
75 | fn() {
76 | assertRejects(
77 | () =>
78 | new Srv(
79 | mockResolver(
80 | [{ target: "mongohost1.mongodomain.com" }],
81 | [["replicaSet=rs-0&authSource=admin&ssl=true"]],
82 | ),
83 | ).resolve("mongohost.mongodomain.com"),
84 | Error,
85 | "Illegal uri options: ssl=true",
86 | );
87 | },
88 | });
89 |
90 | it({
91 | name: "SRV: it correctly parses seedlist and options for valid records",
92 | async fn() {
93 | const result = await new Srv(
94 | mockResolver(
95 | [
96 | {
97 | target: "mongohost1.mongodomain.com",
98 | port: 27015,
99 | },
100 | {
101 | target: "mongohost2.mongodomain.com",
102 | port: 27017,
103 | },
104 | ],
105 | [["replicaSet=rs-0&authSource=admin"]],
106 | ),
107 | ).resolve("mongohost.mongodomain.com");
108 | assertEquals(result.servers.length, 2);
109 | const server1 = result.servers.find(
110 | (server) => server.host === "mongohost1.mongodomain.com",
111 | );
112 | const server2 = result.servers.find(
113 | (server) => server.host === "mongohost2.mongodomain.com",
114 | );
115 | assertEquals(server1!.port, 27015);
116 | assertEquals(server2!.port, 27017);
117 | assertEquals(result.options.replicaSet, "rs-0");
118 | assertEquals(result.options.authSource, "admin");
119 | assertEquals(result.options.loadBalanced, undefined);
120 | },
121 | });
122 |
123 | it({
124 | name:
125 | "SRV: it correctly parses seedlist and options for options split in two strings",
126 | async fn() {
127 | const result = await new Srv(
128 | mockResolver(
129 | [
130 | {
131 | target: "mongohost1.mongodomain.com",
132 | port: 27015,
133 | },
134 | {
135 | target: "mongohost2.mongodomain.com",
136 | port: 27017,
137 | },
138 | ],
139 | [["replicaS", "et=rs-0&authSource=admin"]],
140 | ),
141 | ).resolve("mongohost.mongodomain.com");
142 | assertEquals(result.servers.length, 2);
143 | const server1 = result.servers.find(
144 | (server) => server.host === "mongohost1.mongodomain.com",
145 | );
146 | const server2 = result.servers.find(
147 | (server) => server.host === "mongohost2.mongodomain.com",
148 | );
149 | assertEquals(server1!.port, 27015);
150 | assertEquals(server2!.port, 27017);
151 | assertEquals(result.options.replicaSet, "rs-0");
152 | assertEquals(result.options.authSource, "admin");
153 | assertEquals(result.options.loadBalanced, undefined);
154 | },
155 | });
156 | });
157 |
--------------------------------------------------------------------------------
/tests/cases/06_gridfs.ts:
--------------------------------------------------------------------------------
1 | import { GridFSBucket, type MongoClient } from "../../mod.ts";
2 | import { getClient } from "../common.ts";
3 | import {
4 | afterAll,
5 | afterEach,
6 | assert,
7 | assertEquals,
8 | assertNotEquals,
9 | beforeEach,
10 | describe,
11 | it,
12 | } from "../deps.ts";
13 |
14 | async function streamReadAll(readable: ReadableStream): Promise {
15 | return new Uint8Array(await new Response(readable).arrayBuffer());
16 | }
17 |
18 | describe("GridFS", () => {
19 | let client: MongoClient;
20 | const testDatabaseName = "test";
21 |
22 | beforeEach(async () => {
23 | client = await getClient();
24 | });
25 |
26 | afterEach(() => {
27 | client.close();
28 | });
29 |
30 | afterAll(async () => {
31 | const client = await getClient();
32 | const database = client.database(testDatabaseName);
33 |
34 | await new GridFSBucket(database, { bucketName: "deno_logo" })
35 | .drop().catch((e) => e);
36 | await new GridFSBucket(database, { bucketName: "echo" })
37 | .drop().catch((e) => e);
38 | await new GridFSBucket(database, { bucketName: "metadata" })
39 | .drop().catch((e) => e);
40 | await new GridFSBucket(database, { bucketName: "delete" })
41 | .drop().catch((e) => e);
42 |
43 | await database.dropDatabase().catch((e) => e);
44 | client.close();
45 | });
46 |
47 | it("GridFS: Echo small Hello World", async () => {
48 | const bucket = new GridFSBucket(client.database(testDatabaseName), {
49 | bucketName: "echo",
50 | });
51 | const upstream = await bucket.openUploadStream("test.txt");
52 | const writer = upstream.getWriter();
53 | await writer.write(new TextEncoder().encode("Hello World! 👋"));
54 | await writer.close();
55 |
56 | const getId =
57 | (await bucket.find({ filename: "test.txt" }).toArray())[0]._id;
58 |
59 | assert(getId);
60 |
61 | const text = await new Response(await bucket.openDownloadStream(getId))
62 | .text();
63 |
64 | assertEquals(text, "Hello World! 👋");
65 | });
66 |
67 | it("GridFS: Echo large Image", async () => {
68 | const bucket = new GridFSBucket(client.database(testDatabaseName), {
69 | bucketName: "A",
70 | });
71 |
72 | // Set an impractically low chunkSize to test chunking algorithm
73 | const upstream = await bucket.openUploadStream("1.jpg", {
74 | chunkSizeBytes: 255 * 8,
75 | });
76 |
77 | const image = await Deno.open("tests/assets/1.jpg", { read: true });
78 | await image.readable.pipeTo(upstream);
79 |
80 | const [{ _id }] = await bucket.find({ filename: "1.jpg" }).toArray();
81 |
82 | const expected = await Deno.readFile("tests/assets/1.jpg");
83 | const actual = await streamReadAll(await bucket.openDownloadStream(_id));
84 |
85 | assertEquals(actual, expected);
86 | });
87 |
88 | it(
89 | "GridFS: Echo large Image (compare with different Image)",
90 | async () => {
91 | const bucket = new GridFSBucket(client.database(testDatabaseName), {
92 | bucketName: "A",
93 | });
94 |
95 | // Set an impractically low chunkSize to test chunking algorithm
96 | const upstream = await bucket.openUploadStream("1.jpg", {
97 | chunkSizeBytes: 255 * 8,
98 | });
99 |
100 | const image = await Deno.open("tests/assets/1.jpg", { read: true });
101 | await image.readable.pipeTo(upstream);
102 |
103 | const [{ _id }] = await bucket.find({ filename: "1.jpg" }).toArray();
104 |
105 | const notExpected = await Deno.readFile("tests/assets/2.jpg");
106 | const actual = await streamReadAll(await bucket.openDownloadStream(_id));
107 |
108 | assertNotEquals(actual, notExpected);
109 | },
110 | );
111 | it(
112 | "GridFS: Metadata does get stored correctly",
113 | async () => {
114 | const bucket = new GridFSBucket(client.database(testDatabaseName), {
115 | bucketName: "metadata",
116 | });
117 | const upstream = await bucket.openUploadStream("metadata.txt", {
118 | metadata: {
119 | helloWorld: "this is a test",
120 | },
121 | });
122 | const writer = upstream.getWriter();
123 | await writer.write(new TextEncoder().encode("Hello World! 👋"));
124 | await writer.close();
125 |
126 | const file =
127 | (await bucket.find({ filename: "metadata.txt" }).toArray())[0];
128 |
129 | assertEquals("this is a test", file.metadata?.helloWorld);
130 | },
131 | );
132 |
133 | it(
134 | "GridFS: Delete does work as expected",
135 | async () => {
136 | const bucket = new GridFSBucket(client.database(testDatabaseName), {
137 | bucketName: "delete",
138 | });
139 | const upstream = await bucket.openUploadStream("stuff.txt");
140 | const writer = upstream.getWriter();
141 | await writer.write(new TextEncoder().encode("[redacted]"));
142 | await writer.close();
143 |
144 | let file = await bucket.find({ filename: "stuff.txt" }).toArray();
145 | assert(file[0]);
146 | await bucket.delete(file[0]._id);
147 | file = await bucket.find({ filename: "stuff.txt" }).toArray();
148 | assert(!file[0]);
149 | },
150 | );
151 |
152 | // https://www.mongodb.com/docs/manual/reference/command/createIndexes/#considerations
153 | it(
154 | "GridFS: Creating indexes - skip index creation on same index keys",
155 | async () => {
156 | const addAsset = async (index: number) => {
157 | const database = client.database(testDatabaseName);
158 | const bucket = new GridFSBucket(database, {
159 | bucketName: "sameKeys",
160 | });
161 | const upstream = await bucket.openUploadStream(`test-asset-${index}`);
162 | const writer = upstream.getWriter();
163 | await writer.write(new TextEncoder().encode(`[asset${index}]`));
164 | await writer.close();
165 | return {
166 | files: await database.collection("sameKeys.files").listIndexes()
167 | .toArray(),
168 | chunks: await database.collection("sameKeys.chunks").listIndexes()
169 | .toArray(),
170 | };
171 | };
172 | assertEquals(await addAsset(0), await addAsset(1));
173 | },
174 | );
175 | });
176 |
--------------------------------------------------------------------------------
/tests/cases/07_worker.ts:
--------------------------------------------------------------------------------
1 | import { assertEquals, describe, it } from "../deps.ts";
2 |
3 | describe("worker", () => {
4 | it({
5 | name: "WORKER: Deno does not throw when deno_mongo is imported in worker",
6 | fn: async () => {
7 | const importWorker = new Worker(
8 | import.meta.resolve("./import_worker.ts"),
9 | { type: "module" },
10 | );
11 | const p = Promise.withResolvers();
12 | importWorker.onmessage = (e) => p.resolve(e.data);
13 | importWorker.postMessage("startWorker");
14 |
15 | const result = await p.promise;
16 | importWorker.terminate();
17 | assertEquals(result, "done");
18 | },
19 | });
20 | });
21 |
--------------------------------------------------------------------------------
/tests/cases/08_find_cursor.ts:
--------------------------------------------------------------------------------
1 | import { FindCursor } from "../../src/collection/commands/find.ts";
2 | import type { WireProtocol } from "../../src/protocol/protocol.ts";
3 | import { assertEquals, describe, it } from "../deps.ts";
4 |
5 | describe("find cursor", () => {
6 | it({
7 | name:
8 | "FindCursor: Options object is immutable and not shared between cursors",
9 | fn: () => {
10 | const FIND_OPTIONS: { limit?: number } = {};
11 |
12 | const cursor_a = new FindCursor<{ id: number }>({
13 | filter: {},
14 | protocol: {} as WireProtocol,
15 | collectionName: "test-collection-name",
16 | dbName: "test-db-name",
17 | options: FIND_OPTIONS,
18 | });
19 |
20 | cursor_a.limit(10);
21 |
22 | assertEquals(FIND_OPTIONS.limit, undefined);
23 | },
24 | });
25 | });
26 |
--------------------------------------------------------------------------------
/tests/cases/10_command_helpers.ts:
--------------------------------------------------------------------------------
1 | import { MongoClient } from "../../mod.ts";
2 | import { assert, assertEquals, describe, it } from "../deps.ts";
3 |
4 | describe("command helpers", () => {
5 | it({
6 | name: "db.dropDatabase",
7 | fn: async () => {
8 | const client = new MongoClient();
9 | const databaseName = `TEST_DATABASE_MUST_NOT_MATCH_${+new Date()}`;
10 | const db = await client.connect(
11 | `mongodb://127.0.0.1:27017/${databaseName}`,
12 | );
13 | const collectioName = `${databaseName}_collection`;
14 |
15 | // To create database physically
16 | await db.createCollection<{ foo: string }>(`${collectioName}`);
17 |
18 | // A sanity check to test existence of the collection inside the test db
19 | assertEquals((await db.listCollectionNames()).length, 1);
20 | const result = await db.dropDatabase();
21 |
22 | assert(result);
23 | assertEquals(result.ok, 1);
24 |
25 | // The collection inside the test db must not exist
26 | assertEquals((await db.listCollectionNames()).length, 0);
27 |
28 | client.close();
29 | },
30 | });
31 | });
32 |
--------------------------------------------------------------------------------
/tests/cases/import_worker.ts:
--------------------------------------------------------------------------------
1 | ///
2 | ///
3 |
4 | import {} from "../../mod.ts";
5 |
6 | globalThis.onmessage = (_e) => {
7 | self.postMessage("done");
8 | self.close();
9 | };
10 |
--------------------------------------------------------------------------------
/tests/common.ts:
--------------------------------------------------------------------------------
1 | import { type Database, MongoClient } from "../mod.ts";
2 |
3 | const hostname = "127.0.0.1";
4 |
5 | export async function getClient(): Promise {
6 | const client = new MongoClient();
7 | await client.connect(`mongodb://${hostname}:27017`);
8 | return client;
9 | }
10 |
11 | export async function getTestDb(): Promise<
12 | { client: MongoClient; database: Database }
13 | > {
14 | const client = await getClient();
15 | return {
16 | client,
17 | database: client.database("test"),
18 | };
19 | }
20 |
21 | export async function cleanTestDb(
22 | client: MongoClient,
23 | database: Database,
24 | collectionNames?: string[] | string,
25 | ) {
26 | if (typeof collectionNames === "string") {
27 | collectionNames = [collectionNames];
28 | }
29 | if (collectionNames !== undefined) {
30 | for (const collectionName of collectionNames) {
31 | await database.collection(collectionName).drop().catch((e) => e);
32 | }
33 | }
34 | await database.dropDatabase().catch((e) => e);
35 | client.close();
36 | }
37 |
--------------------------------------------------------------------------------
/tests/deps.ts:
--------------------------------------------------------------------------------
1 | export {
2 | assert,
3 | assertEquals,
4 | assertNotEquals,
5 | assertRejects,
6 | assertThrows,
7 | } from "jsr:@std/assert@^0.220.1";
8 | export {
9 | afterAll,
10 | afterEach,
11 | beforeAll,
12 | beforeEach,
13 | describe,
14 | it,
15 | } from "jsr:@std/testing@^0.220.1/bdd";
16 | export { greaterOrEqual, parse } from "jsr:@std/semver@^0.220.1";
17 |
--------------------------------------------------------------------------------