├── index.bson.br ├── lerna.json ├── snippets ├── change-streams-monitor │ ├── index.js │ ├── package.json │ ├── LICENSE │ ├── package-lock.json │ ├── README.md │ └── changestreammonitor.js ├── mongocompat │ ├── index.js │ ├── README.md │ ├── package.json │ ├── error-matchers.js │ ├── mongonative.js │ ├── test.js │ └── mongotypes.js ├── uuidhelpers │ ├── README.md │ ├── package.json │ ├── index.js │ └── LICENSE ├── resumetoken │ ├── error-matchers.js │ ├── README.md │ ├── package.json │ ├── index.js │ └── package-lock.json ├── spawn-mongod │ ├── README.md │ ├── package.json │ ├── index.js │ └── LICENSE ├── analyze-schema │ ├── package.json │ ├── README.md │ ├── index.js │ └── LICENSE ├── mock-collection │ ├── package.json │ ├── README.md │ ├── package-lock.json │ ├── index.js │ └── LICENSE └── llm-command │ ├── package.json │ ├── index.js │ ├── README.md │ ├── LICENSE │ └── package-lock.json ├── .github └── workflows │ ├── CODEOWNERS │ ├── check-test.yaml │ └── cron-tasks.yml ├── .gitignore ├── scripts ├── show-index.js └── make-index.js ├── package.json └── README.md /index.bson.br: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mongodb-labs/mongosh-snippets/HEAD/index.bson.br -------------------------------------------------------------------------------- /lerna.json: -------------------------------------------------------------------------------- 1 | { 2 | "packages": [ 3 | "snippets/*" 4 | ], 5 | "version": "independent" 6 | } 7 | -------------------------------------------------------------------------------- /snippets/change-streams-monitor/index.js: -------------------------------------------------------------------------------- 1 | (() => { 2 | load(__dirname + '/changestreammonitor.js'); 3 | })(); 4 | -------------------------------------------------------------------------------- /.github/workflows/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @mongodb-labs/mongosh-snippets-developers 2 | /.github/CODEOWNERS @mongodb-labs/dbx-devtools-admins -------------------------------------------------------------------------------- /snippets/mongocompat/index.js: -------------------------------------------------------------------------------- 1 | load(__dirname + '/mongonative.js'); 2 | load(__dirname + '/mongoassert.js'); 3 | load(__dirname + '/mongotypes.js'); 4 | -------------------------------------------------------------------------------- /snippets/uuidhelpers/README.md: -------------------------------------------------------------------------------- 1 | # uuidhelpers 2 | 3 | A new home for the C#/.NET driver's [uuidhelpers.js](https://github.com/mongodb/mongo-csharp-driver/blob/ac2b2a61c6b7a193cf0266dfb8c65f86c2bf7572/uuidhelpers.js). 4 | -------------------------------------------------------------------------------- /snippets/resumetoken/error-matchers.js: -------------------------------------------------------------------------------- 1 | module.exports = [ 2 | { 3 | matches: [ 4 | /\bdecodeResumeToken is not (defined|a function)\b/ 5 | ], 6 | message: 'Try running `snippet install resumetoken` first' 7 | } 8 | ]; 9 | -------------------------------------------------------------------------------- /snippets/mongocompat/README.md: -------------------------------------------------------------------------------- 1 | # mongocompat 2 | 3 | Provide `mongo` legacy shell compatibility APIs. 4 | 5 | ```js 6 | > Array.sum([1, 2, 3]) 7 | 6 8 | > tojsononeline({a:1,b:2,c:3}) 9 | { "a" : 1, "b" : 2, "c" : 3 } 10 | ``` 11 | -------------------------------------------------------------------------------- /snippets/resumetoken/README.md: -------------------------------------------------------------------------------- 1 | # resumetoken 2 | 3 | Decode change stream resume tokens. 4 | 5 | ```js 6 | > decodeResumeToken('82612F617F000000012B022C0100296E5A100492EF51FC540B4ED5AC1D50BA2C9C519C46645F69640064612F617F37A5DD163BA238230004') 7 | 8 | ``` 9 | -------------------------------------------------------------------------------- /snippets/uuidhelpers/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@mongosh/snippet-uuidhelpers", 3 | "snippetName": "uuidhelpers", 4 | "version": "0.1.2", 5 | "description": "Mongosh snippet containing various small utilities for working with UUIDs.", 6 | "main": "index.js", 7 | "license": "Apache-2.0", 8 | "publishConfig": { 9 | "access": "public" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /snippets/spawn-mongod/README.md: -------------------------------------------------------------------------------- 1 | # spawn-mongod 2 | 3 | Spin up a local mongod process. 4 | 5 | ```js 6 | // Copy data from a collection on the current server to a new server: 7 | > const mongod = spawnMongod({ version: 'latest', port: 27097 }) 8 | > mongod.waitReady 9 | listening! 10 | > db.coll.find().forEach(doc => mongod.getDB('test').coll.insertOne(doc)) 11 | ``` 12 | -------------------------------------------------------------------------------- /snippets/analyze-schema/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@mongosh/snippet-analyze-schema", 3 | "snippetName": "analyze-schema", 4 | "version": "1.0.6", 5 | "description": "schema(db.coll)", 6 | "main": "index.js", 7 | "license": "Apache-2.0", 8 | "dependencies": { 9 | "mongodb-schema": "^9.0.0" 10 | }, 11 | "publishConfig": { 12 | "access": "public" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .lock-wscript 3 | .idea/ 4 | .vscode/ 5 | *.iml 6 | .npmrc 7 | .nvmrc 8 | .nyc_output 9 | *.swp 10 | lerna-debug.log 11 | lib-cov 12 | npm-debug.log 13 | .idea/ 14 | coverage/ 15 | dist/ 16 | node_modules/ 17 | .lock-wscript 18 | .cache/ 19 | expansions.yaml 20 | tmp/expansions.yaml 21 | .evergreen/mongodb 22 | tmp/ 23 | dist.tgz 24 | mongocryptd.pid 25 | !package-lock.json 26 | -------------------------------------------------------------------------------- /snippets/mock-collection/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@mongosh/snippet-mock-collection", 3 | "snippetName": "mock-collection", 4 | "version": "1.0.3", 5 | "description": "mockCollection([{ a: 1 }, { a: 2 }]).find({ a: { $gt: 2 } })", 6 | "main": "index.js", 7 | "license": "Apache-2.0", 8 | "publishConfig": { 9 | "access": "public" 10 | }, 11 | "dependencies": { 12 | "bson": "^4.3.0" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /snippets/spawn-mongod/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@mongosh/snippet-spawn-mongod", 3 | "snippetName": "spawn-mongod", 4 | "version": "1.0.2", 5 | "description": "Spin up a local mongod process", 6 | "main": "index.js", 7 | "license": "Apache-2.0", 8 | "publishConfig": { 9 | "access": "public" 10 | }, 11 | "dependencies": { 12 | "download": "^8.0.0", 13 | "mongodb-download-url": "^1.0.0" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /snippets/resumetoken/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@mongosh/snippet-resumetoken", 3 | "snippetName": "resumetoken", 4 | "version": "1.1.1", 5 | "description": "Resume token decoder script", 6 | "main": "index.js", 7 | "license": "SSPL", 8 | "errorMatchers": "error-matchers.js", 9 | "publishConfig": { 10 | "access": "public" 11 | }, 12 | "dependencies": { 13 | "mongodb-resumetoken-decoder": "^1.1.0" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /snippets/change-streams-monitor/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@mongosh/snippet-change-stream-monitor", 3 | "snippetName": "change-stream-monitor", 4 | "version": "0.2.2", 5 | "description": "Mongosh snippet that allows users to monitor Change Streams on the current server.", 6 | "main": "index.js", 7 | "license": "MIT", 8 | "publishConfig": { 9 | "access": "public" 10 | }, 11 | "dependencies": { 12 | "boks": "^1.0.3", 13 | "to-tabel": "^1.0.3" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /scripts/show-index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | const { promises: fs } = require('fs'); 3 | const path = require('path'); 4 | const bson = require('bson'); 5 | const zlib = require('zlib'); 6 | const util = require('util'); 7 | 8 | (async() => { 9 | const source = await fs.readFile(path.join(__dirname, '..', 'index.bson.br')); 10 | console.dir(bson.deserialize(await util.promisify(zlib.brotliDecompress)(source)), { depth: Infinity }); 11 | })().catch(err => { process.nextTick(() => { throw err; }); }); 12 | -------------------------------------------------------------------------------- /snippets/mongocompat/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@mongosh/snippet-mongocompat", 3 | "snippetName": "mongocompat", 4 | "version": "1.0.20", 5 | "description": "mongo compatibility script for mongosh", 6 | "main": "index.js", 7 | "errorMatchers": "error-matchers.js", 8 | "license": "SSPL", 9 | "publishConfig": { 10 | "access": "public" 11 | }, 12 | "scripts": { 13 | "test": "mongosh --nodb test.js" 14 | }, 15 | "devDependencies": { 16 | "mongosh": "^2.5.8" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /snippets/llm-command/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@mongosh/snippet-llm-command", 3 | "snippetName": "llm-command", 4 | "version": "0.0.9", 5 | "description": "Provides a llm command to ask for MongoDB query expressions in natural language, using Groq or any other local model (ollama)", 6 | "author": "Juanan Pereira (https://ikasten.io)", 7 | "main": "index.js", 8 | "license": "Apache-2.0", 9 | "publishConfig": { 10 | "access": "public" 11 | }, 12 | "dependencies": { 13 | "groq-sdk": "^0.5.0", 14 | "node-fetch-commonjs": "^3.3.2" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /snippets/mock-collection/README.md: -------------------------------------------------------------------------------- 1 | # mock-collection 2 | 3 | Provide a fake read-only collection based on input documents. 4 | 5 | ```js 6 | > mockCollection([{ field: 1 },{ field: 2},{ field: 3 }]) 7 | > coll.find({ field: {$gt:2} }, { _id: 1 }) 8 | [ { _id: ObjectId("6079840f2454d2cd1073ba6c") } ] 9 | > coll.mapReduce(function() { emit('fieldValue', this.field); }, 10 | ... function(key, values) { return key + values.join(','); }, 11 | ... { out: { inline: 1 } }) 12 | { results: [ { _id: 'fieldValue', value: 'fieldValue1,2,3' } ], ok: 1 } 13 | ``` 14 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "root", 3 | "private": true, 4 | "devDependencies": { 5 | "bson": "^4.3.0", 6 | "lerna": "^4.0.0" 7 | }, 8 | "scripts": { 9 | "make-index": "node scripts/make-index", 10 | "show-index": "node scripts/show-index", 11 | "test": "lerna run test" 12 | }, 13 | "homepage": "https://github.com/mongodb-labs/mongosh-snippets", 14 | "repository": { 15 | "type": "git", 16 | "url": "https://github.com/mongodb-labs/mongosh-snippets.git" 17 | }, 18 | "bugs": { 19 | "url": "https://github.com/mongodb-labs/mongosh-snippets/issues" 20 | }, 21 | "overrides": { 22 | "node-gyp": "^11.4.2" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /snippets/resumetoken/index.js: -------------------------------------------------------------------------------- 1 | (() => { 2 | const localRequire = require('module').createRequire(__filename); 3 | const decodeImpl = localRequire('mongodb-resumetoken-decoder').decodeResumeToken; 4 | 5 | globalThis.decodeResumeToken = function(token) { 6 | if (typeof token === 'string') { 7 | return decodeImpl(token); 8 | } 9 | if (token && typeof token._data === 'string') { 10 | return decodeImpl(token._data); 11 | } 12 | if (token && token._id && typeof token._id._data === 'string') { 13 | return decodeImpl(token._id._data); 14 | } 15 | throw new Error(`Unknown token format, expected string: ${token}`); 16 | } 17 | })(); 18 | 19 | -------------------------------------------------------------------------------- /.github/workflows/check-test.yaml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | pull_request: 5 | 6 | workflow_dispatch: 7 | push: 8 | branches: 9 | - main 10 | 11 | permissions: 12 | contents: read # we just need to checkout the repo 13 | 14 | jobs: 15 | test: 16 | name: Test 17 | 18 | timeout-minutes: 45 19 | 20 | strategy: 21 | matrix: 22 | os: [ubuntu-latest] 23 | node: [20.x, 24.x] 24 | fail-fast: false 25 | 26 | runs-on: ${{ matrix.os }} 27 | 28 | steps: 29 | - name: Setup Node.js 30 | uses: actions/setup-node@v4 31 | with: 32 | node-version: ${{ matrix.node }} 33 | 34 | - name: Checkout 35 | uses: actions/checkout@v5 36 | 37 | - name: Install Dependencies 38 | run: | 39 | npm ci 40 | npx lerna bootstrap 41 | shell: bash 42 | 43 | - name: Run Tests 44 | run: npm run test -- --stream 45 | shell: bash 46 | -------------------------------------------------------------------------------- /snippets/mongocompat/error-matchers.js: -------------------------------------------------------------------------------- 1 | module.exports = [ 2 | { 3 | matches: [ 4 | /\b(tojson|tojsononeline|tojsonObject|printjsononeline|isString|isNumber|isObject) is not defined/, 5 | /\b(cd|pwd|getHostName|hostname|_rand|_isWindows|cat|getMemInfo|isInteractive|listFiles|ls|md5sumFile|mkdir|removefile) is not defined/, 6 | /\.tojson is not a function/, 7 | /(\.getTime|\.getInc|\.toStringIncomparable) is not a function/, 8 | /(\.ltrim|\.rtrim|\.pad) is not a function/, 9 | /(\.toPercentStr|\.zeroPad) is not a function/, 10 | /\bDate\.timeFunc is not a function/, 11 | /\bRegExp\.escape is not a function/, 12 | /\bArray\.(contains|unique|shuffle|fetchRefs|sum|avg|stdDev) is not a function/, 13 | /\bObject\.(extend|bsonsize|merge|keySet) is not a function/, 14 | /\bObjectId\.fromDate is not a function/ 15 | ], 16 | message: 'Are you trying to run a script written for the legacy shell? Try running `snippet install mongocompat`' 17 | } 18 | ]; 19 | -------------------------------------------------------------------------------- /.github/workflows/cron-tasks.yml: -------------------------------------------------------------------------------- 1 | on: 2 | # Once a week or on pushes to master 3 | schedule: 4 | - cron: "0 3 * * 0" 5 | push: 6 | branches: 7 | - main 8 | 9 | jobs: 10 | update_index: 11 | name: Update index file 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v1 15 | - uses: actions/setup-node@v2 16 | - name: Install Dependencies 17 | run: npm ci 18 | - name: Set up Git 19 | run: | 20 | git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" 21 | git config --local user.name "github-actions[bot]" 22 | - name: Update index.bson.br 23 | run: npm run make-index 24 | - name: Verify readability index.bson.br 25 | run: npm run show-index 26 | - name: Commit index.bson.br changes 27 | run: | 28 | git commit --no-allow-empty -m "chore: update index.bson.br" index.bson.br || true 29 | - name: Push updates 30 | uses: ad-m/github-push-action@v0.6.0 31 | with: 32 | github_token: ${{ secrets.GITHUB_TOKEN }} 33 | branch: ${{ github.ref }} 34 | -------------------------------------------------------------------------------- /snippets/change-streams-monitor/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Edward Mallia 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /snippets/change-streams-monitor/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@mongosh/snippet-change-stream-monitor", 3 | "version": "0.2.2", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "@mongosh/snippet-change-stream-monitor", 9 | "version": "0.2.1", 10 | "license": "MIT", 11 | "dependencies": { 12 | "boks": "^1.0.3", 13 | "to-tabel": "^1.0.3" 14 | } 15 | }, 16 | "node_modules/ansi-regex": { 17 | "version": "5.0.1", 18 | "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", 19 | "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", 20 | "license": "MIT", 21 | "engines": { 22 | "node": ">=8" 23 | } 24 | }, 25 | "node_modules/boks": { 26 | "version": "1.0.3", 27 | "resolved": "https://registry.npmjs.org/boks/-/boks-1.0.3.tgz", 28 | "integrity": "sha512-rM9ftjL4Bv2ynZx/NIIV6n8uz/8ZQR1NBESrvSF972fvCchUyAbwqZSw6rytc3Nm3Rlgoxqai5KrEbWBBVSvHQ==", 29 | "license": "Apache-2.0", 30 | "engines": { 31 | "node": ">=10.0.0" 32 | } 33 | }, 34 | "node_modules/to-tabel": { 35 | "version": "1.0.3", 36 | "resolved": "https://registry.npmjs.org/to-tabel/-/to-tabel-1.0.3.tgz", 37 | "integrity": "sha512-/FNQWQXQLoEOLUbOLhk+PTKeHlxS4dcOAlK4yXJ/tDfv+xjPssOi7IWaFBaLA3AiKeIOXqcpcoSjNXznNnzkzw==", 38 | "license": "Apache-2.0", 39 | "dependencies": { 40 | "ansi-regex": "^5.0.0", 41 | "boks": "^1.0.3" 42 | }, 43 | "engines": { 44 | "node": ">=8.5.0" 45 | } 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /scripts/make-index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | const { promises: fs } = require('fs'); 3 | const path = require('path'); 4 | const bson = require('bson'); 5 | const zlib = require('zlib'); 6 | const util = require('util'); 7 | 8 | (async() => { 9 | const snippetsDir = path.join(__dirname, '..', 'snippets'); 10 | const index = []; 11 | for await (const dir of await fs.opendir(snippetsDir)) { 12 | if (!dir.isDirectory()) continue; 13 | const pjsonPath = path.join(snippetsDir, dir.name, 'package.json'); 14 | const pjson = JSON.parse(await fs.readFile(pjsonPath, 'utf8')); 15 | if (pjson.errorMatchers) { 16 | pjson.errorMatchers = require(path.join(snippetsDir, dir.name, pjson.errorMatchers)); 17 | } 18 | try { 19 | pjson.readme = await fs.readFile(path.join(snippetsDir, dir.name, 'README.md'), 'utf8'); 20 | } catch (err) { 21 | if (err.code !== 'ENOENT') throw err; 22 | } 23 | index.push(pjson); 24 | } 25 | 26 | const ownPjsonPath = path.join(__dirname, '..', 'package.json'); 27 | const ownPjson = JSON.parse(await fs.readFile(ownPjsonPath, 'utf8')); 28 | const metadata = (({ homepage, repository, bugs }) => ({ homepage, repository, bugs }))(ownPjson); 29 | const indexFileContents = { 30 | indexFileVersion: 1, 31 | index, 32 | metadata 33 | }; 34 | 35 | const data = await util.promisify(zlib.brotliCompress)(bson.serialize(indexFileContents), { 36 | params: { 37 | [zlib.constants.BROTLI_PARAM_QUALITY]: zlib.constants.BROTLI_MAX_QUALITY 38 | } 39 | }); 40 | await fs.writeFile(path.join(__dirname, '..', 'index.bson.br'), data); 41 | })().catch(err => { process.nextTick(() => { throw err; }); }); 42 | -------------------------------------------------------------------------------- /snippets/analyze-schema/README.md: -------------------------------------------------------------------------------- 1 | # analyze-schema 2 | 3 | Analyze the schema of a collection or a cursor. 4 | 5 | ```js 6 | > schema(db.coll); 7 | ┌─────────┬───────┬───────────┬────────────┐ 8 | │ (index) │ 0 │ 1 │ 2 │ 9 | ├─────────┼───────┼───────────┼────────────┤ 10 | │ 0 │ '_id' │ '100.0 %' │ 'ObjectID' │ 11 | │ 1 │ 'a ' │ '50.0 %' │ 'Number' │ 12 | │ 2 │ 'a ' │ '50.0 %' │ 'String' │ 13 | └─────────┴───────┴───────────┴────────────┘ 14 | > schema(db.coll.find({ })); 15 | ┌─────────┬───────┬───────────┬────────────┐ 16 | │ (index) │ 0 │ 1 │ 2 │ 17 | ├─────────┼───────┼───────────┼────────────┤ 18 | │ 0 │ '_id' │ '100.0 %' │ 'ObjectID' │ 19 | │ 1 │ 'a ' │ '100.0 %' │ 'Number' │ 20 | └─────────┴───────┴───────────┴────────────┘ 21 | > schema(db.test.aggregate([{ $group: { _id: null, count: { $sum: 1 } } }])); 22 | ┌─────────┬─────────┬───────────┬──────────┐ 23 | │ (index) │ 0 │ 1 │ 2 │ 24 | ├─────────┼─────────┼───────────┼──────────┤ 25 | │ 0 │ '_id ' │ '100.0 %' │ 'Null' │ 26 | │ 1 │ 'count' │ '100.0 %' │ 'Number' │ 27 | └─────────┴─────────┴───────────┴──────────┘ 28 | > schema(db.coll, { verbose: true }); 29 | { 30 | fields: [ 31 | { 32 | name: '_id', 33 | // [ ... ] 34 | }, 35 | { 36 | path: 'a', 37 | count: 2, 38 | types: [ 39 | { 40 | name: 'Number', 41 | path: 'a', 42 | probability: 0.5, 43 | unique: 1, 44 | // [ ... ] 45 | }, 46 | { 47 | name: 'String', 48 | bsonType: 'String', 49 | // [ ... ] 50 | } 51 | ], 52 | total_count: 2, 53 | type: [ 'Number', 'String' ], 54 | probability: 1 55 | } 56 | ], 57 | count: 2 58 | } 59 | 60 | ``` 61 | -------------------------------------------------------------------------------- /snippets/mongocompat/mongonative.js: -------------------------------------------------------------------------------- 1 | var cd = process.chdir; 2 | var getHostName = os.hostname; 3 | var hostname = os.hostname; 4 | var pwd = process.cwd; 5 | var _rand = Math.random; 6 | var _isWindows = () => process.platform === 'win32'; 7 | 8 | function cat(filename, useBinaryMode) { 9 | let contents = fs.readFileSync(filename, 'utf8'); 10 | if (!useBinaryMode && _isWindows()) { 11 | contents = contents.replace(/(? !arg.startsWith('-')); 24 | if (argv.includes('--nodb')) { 25 | return loadedFiles.length === 0; 26 | } else { 27 | return loadedFiles.length <= 1; 28 | } 29 | } 30 | 31 | function listFiles(dir = '.') { 32 | const files = fs.readdirSync(dir, { withFileTypes: true }); 33 | return files.map(dirent => { 34 | const obj = { baseName: dirent.name, name: path.join(dir, dirent.name) }; 35 | obj.isDirectory = dirent.isDirectory(); 36 | if (dirent.isFile()) { 37 | obj.size = fs.statSync(obj.name).size; 38 | } 39 | return obj; 40 | }); 41 | } 42 | 43 | function ls(dir) { 44 | return listFiles(dir).map(file => file.name); 45 | } 46 | 47 | function md5sumFile(filename) { 48 | return crypto.createHash('md5').update(fs.readFileSync(filename)).digest('hex'); 49 | } 50 | 51 | function mkdir(path) { 52 | const ret = fs.mkdirSync(path, { recursive: true }); 53 | if (ret === undefined) { 54 | return { exists: true, created: false }; 55 | } else { 56 | return { exists: true, created: true }; 57 | } 58 | } 59 | 60 | function removeFile(path) { 61 | let existed = false; 62 | try { 63 | fs.statSync(path); 64 | existed = true; 65 | } catch {} 66 | fs.rmSync(path, { recursive: true, force: true }); 67 | return existed; 68 | } 69 | -------------------------------------------------------------------------------- /snippets/analyze-schema/index.js: -------------------------------------------------------------------------------- 1 | (() => { 2 | const localRequire = require('module').createRequire(__filename); 3 | const schema = localRequire('mongodb-schema'); 4 | const { Readable, PassThrough } = localRequire('stream'); 5 | const { Console } = localRequire('console'); 6 | 7 | globalThis.schema = function(collOrCursor, options = {}) { 8 | let cursor; 9 | if (typeof collOrCursor.tryNext === 'function') { 10 | cursor = collOrCursor; 11 | } else { 12 | const size = Math.min(Math.max(20, collOrCursor.estimatedDocumentCount() * 0.04), 10000); 13 | cursor = collOrCursor.aggregate([{$sample: { size: Math.ceil(size) }}]); 14 | } 15 | 16 | const schemaStream = schema.stream({ semanticTypes: true, ...options }); 17 | let result; 18 | schemaStream.on('data', (data) => result = data); 19 | 20 | let doc; 21 | while ((doc = cursor.tryNext()) !== null) { 22 | schemaStream.write(doc); 23 | } 24 | schemaStream.end(); 25 | sleep(0); 26 | 27 | if (options.verbose) { 28 | return result; 29 | } 30 | 31 | const simplified = []; 32 | let maxFieldPathLength = 0; 33 | for (const field of allFields(result.fields)) { 34 | maxFieldPathLength = Math.max(maxFieldPathLength, field.path.length); 35 | const types = field.types || [{ name: field.type, probability: 1 }]; 36 | for (const { probability, name } of types) { 37 | simplified.push([field.path, `${(probability * 100).toFixed(1)} %`, name]); 38 | } 39 | } 40 | 41 | for (const entry of simplified) { 42 | entry[0] = entry[0].padEnd(maxFieldPathLength); 43 | } 44 | 45 | return tablify(simplified); 46 | }; 47 | 48 | function tablify(input) { 49 | const io = new PassThrough({ encoding: 'utf8' }); 50 | new Console(io).table(input); 51 | return io.read(); 52 | } 53 | 54 | function* allFields(fieldArray) { 55 | for (const field of fieldArray) { 56 | yield field; 57 | for (const type of field.types || []) { 58 | if (type.fields) { 59 | yield* allFields(type.fields); 60 | } 61 | } 62 | } 63 | } 64 | })(); 65 | -------------------------------------------------------------------------------- /snippets/llm-command/index.js: -------------------------------------------------------------------------------- 1 | (() => { 2 | const localRequire = require("module").createRequire(__filename); 3 | const fetch = localRequire("node-fetch-commonjs"); 4 | const Groq = require('groq-sdk'); 5 | 6 | const groqApiKey = process.env['GROQ_API_KEY']; 7 | 8 | globalThis.llm = async function (message, options = {}) { 9 | const model = options.model || 'groq'; // Default to Groq if no model specified 10 | 11 | try { 12 | if (model.toLowerCase() !== 'groq') { 13 | // Ollama version (for any non-Groq model) 14 | const response = await fetch("http://127.0.0.1:11434/api/chat", { 15 | method: "POST", 16 | headers: { 17 | "Content-Type": "application/json", 18 | }, 19 | body: JSON.stringify({ 20 | model: model.toLowerCase(), // Use the specified model 21 | messages: [ 22 | { 23 | role: "user", 24 | content: message, 25 | }, 26 | ], 27 | stream: false, 28 | }), 29 | }); 30 | 31 | const data = await response.json(); 32 | 33 | if (data.message) { 34 | return data.message.content; 35 | } else { 36 | return "No message found in the response."; 37 | } 38 | } else { 39 | // Groq (llama) version 40 | if (!groqApiKey) { 41 | console.warn('Groq API key is not set. Please set the GROQ_API_KEY environment variable.'); 42 | return 'Unable to fetch message due to missing Groq API key.'; 43 | } 44 | 45 | const groq = new Groq({ 46 | apiKey: groqApiKey, 47 | }); 48 | 49 | const chatCompletion = await groq.chat.completions.create({ 50 | messages: [{ role: 'user', content: message }], 51 | model: 'llama-3.1-70b-versatile', 52 | }); 53 | 54 | if (chatCompletion.choices && chatCompletion.choices.length > 0) { 55 | return chatCompletion.choices[0].message.content; 56 | } else { 57 | return "No message found in the response."; 58 | } 59 | } 60 | } catch (error) { 61 | console.error("Error fetching message:", error); 62 | return "An error occurred while fetching the message."; 63 | } 64 | }; 65 | })(); 66 | -------------------------------------------------------------------------------- /snippets/llm-command/README.md: -------------------------------------------------------------------------------- 1 | # LLM Command Snippet 2 | 3 | This snippet adds an `llm` command to mongosh that provides helpful suggestions or recommendations for MongoDB-related tasks. The query results are generated using Groq API by default, with an option to use other models via Ollama. 4 | 5 | ## Prerequisites 6 | 7 | ``` 8 | export GROQ_API_KEY=gsk_XXXXXXX 9 | mongosh 10 | ``` 11 | 12 | ## Usage 13 | 14 | After installing the snippet, you can use the `llm` command in your MongoDB shell like this: 15 | 16 | ```javascript 17 | llm("very briefly, just the command, do not use markdown: in mongosh how to get the collections names of current db?"); 18 | ``` 19 | 20 | This will output a possible solution to your query, such as `db.getCollectionNames()`. 21 | 22 | ```javascript 23 | llm("very briefly, just the command, do not use markdown: in mongosh replace all documents of a collection with property {'set':'llm102'} with the new value {'set':'llm101'} in current db?") 24 | ``` 25 | 26 | This will output a possible solution to your query, such as `db.collection.updateMany({ set: 'llm102' }, { $set: { set: 'llm101' } })`. 27 | 28 | You can also specify a different model to use with an optional parameter: 29 | ```javascript 30 | llm("Your query here", { model: "phi3.5" }); 31 | ``` 32 | This will use the specified model (in this case, 'phi3.5') via Ollama instead of the default Groq API. 33 | 34 | ## Models 35 | 36 | By default, the `llm` command uses the Groq API with the 'llama-3.1-70b-versatile' model. You can use other models by specifying them in the optional parameter: 37 | 38 | - Groq API (default): No need to specify, just use `llm("Your query")`. 39 | - Ollama models: Specify the model name, e.g., `llm("Your query", { model: "gemini2" })` or `llm("Your query", { model: "phi3.5" })`. 40 | 41 | Note: When using Ollama models, make sure you have Ollama running locally on the default port (11434). 42 | 43 | 44 | ## Installation 45 | 46 | You can install this snippet using the `snippet` command in mongosh: 47 | 48 | ```javascript 49 | snippet install llm-command 50 | ``` 51 | 52 | ## Troubleshooting 53 | 54 | If you get the error: 55 | ``` 56 | "Error: Cannot find module 'groq-sdk'" 57 | ``` 58 | then, go to your $USER/.mongodb/mongosh/snippets/node_modules/@juananpe/snippets-llm-command folder 59 | and run `npm install`. Then close and open mongosh again. 60 | 61 | 62 | ## License 63 | 64 | This snippet is licensed under the Apache-2.0 license. 65 | -------------------------------------------------------------------------------- /snippets/mock-collection/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@mongosh/snippet-mock-collection", 3 | "version": "1.0.3", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "@mongosh/snippet-mock-collection", 9 | "version": "1.0.2", 10 | "license": "Apache-2.0", 11 | "dependencies": { 12 | "bson": "^4.3.0" 13 | } 14 | }, 15 | "node_modules/base64-js": { 16 | "version": "1.5.1", 17 | "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", 18 | "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", 19 | "funding": [ 20 | { 21 | "type": "github", 22 | "url": "https://github.com/sponsors/feross" 23 | }, 24 | { 25 | "type": "patreon", 26 | "url": "https://www.patreon.com/feross" 27 | }, 28 | { 29 | "type": "consulting", 30 | "url": "https://feross.org/support" 31 | } 32 | ], 33 | "license": "MIT" 34 | }, 35 | "node_modules/bson": { 36 | "version": "4.7.2", 37 | "resolved": "https://registry.npmjs.org/bson/-/bson-4.7.2.tgz", 38 | "integrity": "sha512-Ry9wCtIZ5kGqkJoi6aD8KjxFZEx78guTQDnpXWiNthsxzrxAK/i8E6pCHAIZTbaEFWcOCvbecMukfK7XUvyLpQ==", 39 | "license": "Apache-2.0", 40 | "dependencies": { 41 | "buffer": "^5.6.0" 42 | }, 43 | "engines": { 44 | "node": ">=6.9.0" 45 | } 46 | }, 47 | "node_modules/buffer": { 48 | "version": "5.7.1", 49 | "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", 50 | "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", 51 | "funding": [ 52 | { 53 | "type": "github", 54 | "url": "https://github.com/sponsors/feross" 55 | }, 56 | { 57 | "type": "patreon", 58 | "url": "https://www.patreon.com/feross" 59 | }, 60 | { 61 | "type": "consulting", 62 | "url": "https://feross.org/support" 63 | } 64 | ], 65 | "license": "MIT", 66 | "dependencies": { 67 | "base64-js": "^1.3.1", 68 | "ieee754": "^1.1.13" 69 | } 70 | }, 71 | "node_modules/ieee754": { 72 | "version": "1.2.1", 73 | "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", 74 | "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", 75 | "funding": [ 76 | { 77 | "type": "github", 78 | "url": "https://github.com/sponsors/feross" 79 | }, 80 | { 81 | "type": "patreon", 82 | "url": "https://www.patreon.com/feross" 83 | }, 84 | { 85 | "type": "consulting", 86 | "url": "https://feross.org/support" 87 | } 88 | ], 89 | "license": "BSD-3-Clause" 90 | } 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /snippets/spawn-mongod/index.js: -------------------------------------------------------------------------------- 1 | (() => { 2 | const localRequire = require('module').createRequire(__filename); 3 | const downloadURL = localRequire('mongodb-download-url').default; 4 | const download = localRequire('download'); 5 | const crypto = localRequire('crypto'); 6 | const child_process = localRequire('child_process'); 7 | const os = localRequire('os'); 8 | const { promises: fs } = localRequire('fs'); 9 | 10 | class MongodWrapper { 11 | constructor(options) { 12 | this.options = options; 13 | this.proc = null; 14 | this.log = ''; 15 | if (!options.port) options.port = '27017'; 16 | 17 | this.waitReady = new Promise(resolve => this.markReady = resolve); 18 | } 19 | 20 | async spawn() { 21 | const { url, artifact } = await downloadURL(this.options); 22 | const id = crypto.createHash('sha256').update(artifact).digest('hex').slice(0, 20); 23 | const downloadPath = `${os.homedir()}/.mongodb/spawn-mongod-downloads/${id}`; 24 | let hasDownload = false; 25 | try { 26 | await fs.stat(downloadPath); 27 | hasDownload = true; 28 | } catch {} 29 | if (!hasDownload) { 30 | print(`[Downloading mongod from ${url} to ${downloadPath} ...]`); 31 | await fs.mkdir(downloadPath, { recursive: true }); 32 | await download(url, downloadPath, { extract: true, strip: 1 }); 33 | } 34 | 35 | let tmpDbPath; 36 | let { port, dbpath } = this.options; 37 | if (!dbpath) { 38 | dbpath = `${os.tmpdir()}/spawn-mongod-db-${crypto.randomBytes(10).toString('hex')}`; 39 | await fs.mkdir(dbpath, { recursive: true }); 40 | tmpDbPath = dbpath; 41 | } 42 | 43 | this.proc = child_process.spawn(`${downloadPath}/bin/mongod`, [ 44 | '--port', port, 45 | '--dbpath', dbpath 46 | ], { stdio: 'pipe' }); 47 | this.proc.stdout.setEncoding('utf8').on('data', chunk => { 48 | this.log += chunk; 49 | if (this.log.includes('Waiting for connections')) this.markReady('listening!'); 50 | }); 51 | this.proc.stderr.setEncoding('utf8').on('data', chunk => { 52 | this.log += chunk; 53 | }); 54 | this.proc.on('exit', () => { 55 | this.exited = true; 56 | if (tmpDbPath) { 57 | fs.rmdir(this.tmpDbPath, { recursive: true }).catch(() => {}); 58 | } 59 | print(`mongod process for port ${port} stopped`); 60 | }); 61 | process.on('exit', () => { 62 | if (!this.exited) this.stop(); 63 | }); 64 | } 65 | 66 | get url() { 67 | return this.exited ? null : `mongodb://localhost:${this.options.port}/?directConnection=true&serverSelectionTimeoutMS=2000`; 68 | } 69 | 70 | getMongo() { 71 | return this.mongo = this.mongo || Mongo(this.url); 72 | } 73 | 74 | getDB(name) { 75 | return this.getMongo().getDB(name); 76 | } 77 | 78 | stop() { 79 | this.proc.kill(); 80 | } 81 | } 82 | 83 | globalThis.spawnMongod = function(options) { 84 | const wrapper = new MongodWrapper(options); 85 | wrapper.spawn(); 86 | return wrapper; 87 | }; 88 | })(); 89 | -------------------------------------------------------------------------------- /snippets/resumetoken/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@mongosh/snippet-resumetoken", 3 | "version": "1.1.1", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "@mongosh/snippet-resumetoken", 9 | "version": "1.1.0", 10 | "license": "SSPL", 11 | "dependencies": { 12 | "mongodb-resumetoken-decoder": "^1.1.0" 13 | } 14 | }, 15 | "node_modules/base64-js": { 16 | "version": "1.5.1", 17 | "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", 18 | "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", 19 | "funding": [ 20 | { 21 | "type": "github", 22 | "url": "https://github.com/sponsors/feross" 23 | }, 24 | { 25 | "type": "patreon", 26 | "url": "https://www.patreon.com/feross" 27 | }, 28 | { 29 | "type": "consulting", 30 | "url": "https://feross.org/support" 31 | } 32 | ], 33 | "license": "MIT" 34 | }, 35 | "node_modules/bson": { 36 | "version": "4.7.2", 37 | "resolved": "https://registry.npmjs.org/bson/-/bson-4.7.2.tgz", 38 | "integrity": "sha512-Ry9wCtIZ5kGqkJoi6aD8KjxFZEx78guTQDnpXWiNthsxzrxAK/i8E6pCHAIZTbaEFWcOCvbecMukfK7XUvyLpQ==", 39 | "license": "Apache-2.0", 40 | "dependencies": { 41 | "buffer": "^5.6.0" 42 | }, 43 | "engines": { 44 | "node": ">=6.9.0" 45 | } 46 | }, 47 | "node_modules/buffer": { 48 | "version": "5.7.1", 49 | "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", 50 | "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", 51 | "funding": [ 52 | { 53 | "type": "github", 54 | "url": "https://github.com/sponsors/feross" 55 | }, 56 | { 57 | "type": "patreon", 58 | "url": "https://www.patreon.com/feross" 59 | }, 60 | { 61 | "type": "consulting", 62 | "url": "https://feross.org/support" 63 | } 64 | ], 65 | "license": "MIT", 66 | "dependencies": { 67 | "base64-js": "^1.3.1", 68 | "ieee754": "^1.1.13" 69 | } 70 | }, 71 | "node_modules/ieee754": { 72 | "version": "1.2.1", 73 | "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", 74 | "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", 75 | "funding": [ 76 | { 77 | "type": "github", 78 | "url": "https://github.com/sponsors/feross" 79 | }, 80 | { 81 | "type": "patreon", 82 | "url": "https://www.patreon.com/feross" 83 | }, 84 | { 85 | "type": "consulting", 86 | "url": "https://feross.org/support" 87 | } 88 | ], 89 | "license": "BSD-3-Clause" 90 | }, 91 | "node_modules/mongodb-resumetoken-decoder": { 92 | "version": "1.1.2", 93 | "resolved": "https://registry.npmjs.org/mongodb-resumetoken-decoder/-/mongodb-resumetoken-decoder-1.1.2.tgz", 94 | "integrity": "sha512-3lHI/Rs4uCD2HgnRvv6yhZxMK9N0Dk0UEdH6oUBad0xbiv0DesgD2tTIJQ3Wcc9ic9GdWNoAySEdIQkvX5XXXw==", 95 | "license": "SSPL", 96 | "dependencies": { 97 | "bson": "^4.6.3" 98 | }, 99 | "bin": { 100 | "mongodb-resumetoken-decoder": "bin/mongodb-resumetoken-decoder.js" 101 | } 102 | } 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # mongosh-snippets 2 | 3 | An experimental plugin feature for [mongosh][]. 4 | 5 | ## What is a snippet? 6 | 7 | A snippet is a script that you can install using the `snippet` command in [mongosh][], 8 | to provide additional functionality that mongosh does otherwise not provide. 9 | 10 | ## What does it mean that snippets are experimental? 11 | 12 | It means that, for the time being, MongoDB does not offer any commercial 13 | support for it, and that it may be changed or removed as we see fit. 14 | 15 | It does not mean that bugs in this feature are an expected occurrence, and you 16 | can and should file bugs in the mongosh [JIRA][] project if you experience any. 17 | 18 | ## How do I install a snippet? 19 | 20 | You can manage snippets through the `snippet` command in mongosh. Running 21 | `snippet help` gives an overview over all commands that are available. 22 | 23 | For installing a snippet, you can use `snippet install `: 24 | 25 | ``` 26 | > snippet uninstall analyze-schema 27 | Running uninstall... 28 | Done! 29 | > snippet install analyze-schema 30 | Running install... 31 | Installed new snippets analyze-schema. Do you want to load them now? [Y/n]: y 32 | Finished installing snippets: analyze-schema 33 | > db.test.insertOne({ field: 'value' }) 34 | { 35 | acknowledged: true, 36 | insertedId: ObjectId("60b60758d381fd904f5dc517") 37 | } 38 | > schema(db.test) 39 | ┌─────────┬─────────┬───────────┬────────────┐ 40 | │ (index) │ 0 │ 1 │ 2 │ 41 | ├─────────┼─────────┼───────────┼────────────┤ 42 | │ 0 │ '_id ' │ '100.0 %' │ 'ObjectID' │ 43 | │ 1 │ 'field' │ '100.0 %' │ 'String' │ 44 | └─────────┴─────────┴───────────┴────────────┘ 45 | ``` 46 | 47 | You can list all installed snippets with `snippet ls`, and you can list all 48 | available snippets with `snippet search`. 49 | 50 | ## Can I disable this feature? 51 | 52 | Yes. 53 | 54 | ``` 55 | > config.set('snippetIndexSourceURLs', '') 56 | ``` 57 | 58 | ## How do snippets work? 59 | 60 | The snippets feature uses the [npm][] package manager under the hood to install 61 | snippets from a pre-specified registry. The default registry currently points to 62 | this Github repository here. When you install a snippet, mongosh will look up 63 | its npm package name based on the information in the registry and install it, 64 | and load it using `load()` by default on each mongosh startup. 65 | 66 | This also means that snippets can depend on npm packages, and use them in their 67 | functionality. For example, the `analyze-schema` example above uses the 68 | [`mongodb-schema`][] package from npm to perform the analysis itself. 69 | 70 | ## Can I add my own snippets? 71 | 72 | Absolutely! You should feel encouraged to do so, if you believe that you have 73 | a script for the shell that you think others might find useful as well. 74 | 75 | In order to add a new snippet: 76 | - Fork and clone this repository 77 | - Add a new directory under `snippets/`, using the name you wish to use 78 | - Add at least the `package.json`, `index.js` and `LICENSE` files, and ideally 79 | also a short `README.md`. 80 | 81 | A minimal package.json could contain: 82 | 83 | ```js 84 | { 85 | "name": "@mongosh/snippet-", 86 | "snippetName": "", 87 | "version": "0.0.1", 88 | "description": "...", 89 | "main": "index.js", 90 | "license": "Apache-2.0", 91 | "publishConfig": { 92 | "access": "public" 93 | } 94 | } 95 | ``` 96 | 97 | Once you have completed that, you can commit your changes and open a pull 98 | request against this repository. 99 | 100 | If it is merged, we will take care of publishing and adding it to the index. 101 | 102 | ## Can I run my own registry? 103 | 104 | Yes. From the mongosh CLI perspective, a snippet registry is just a https URL 105 | pointing to a [brotli][]-compressed [BSON][] file; no package contents are 106 | actually provided in that file. This file has the following format ( 107 | in TypeScript syntax): 108 | 109 | ```typescript 110 | interface ErrorMatcher { 111 | // Add additional information to shell errors matching one of the regular. 112 | // expressions. The message can point to a snippet helping solve that error. 113 | matches: RegExp[]; 114 | message: string; 115 | } 116 | 117 | interface SnippetDescription { 118 | // The *npm* package name. Users do not interact with this. 119 | name: string; 120 | // The snippet name. This is what users interact with. 121 | snippetName: string; 122 | // An optional install specifier that can be used to point npm towards 123 | // packages that are not uploaded to the registry. For example, 124 | // this could be an URL to a git repository or a tarball. 125 | installSpec?: string; 126 | // A version field. Users do not interact with this, as currently, `snippet` 127 | // always installs the latest versions of snippets. 128 | version: string; 129 | description: string; 130 | readme: string; 131 | // License should be a SPDX license identifier. 132 | license: string; 133 | errorMatchers?: ErrorMatcher[]; 134 | } 135 | 136 | interface SnippetIndexFile { 137 | // This must be 1 currently. 138 | indexFileVersion: 1; 139 | index: SnippetDescription[]; 140 | metadata: { homepage: string }; 141 | } 142 | ``` 143 | 144 | [mongosh]: https://github.com/mongodb-js/mongosh 145 | [JIRA]: https://jira.mongodb.org/projects/MONGOSH/issues 146 | [npm]: https://www.npmjs.com/ 147 | [`mongodb-schema`]: https://www.npmjs.com/package/mongodb-schema 148 | [brotli]: https://github.com/google/brotli/ 149 | [BSON]: https://bsonspec.org/ 150 | -------------------------------------------------------------------------------- /snippets/uuidhelpers/index.js: -------------------------------------------------------------------------------- 1 | // originally taken from https://github.com/mongodb/mongo-csharp-driver/blob/ac2b2a61c6b7a193cf0266dfb8c65f86c2bf7572/uuidhelpers.js 2 | 3 | // Javascript helper functions for parsing and displaying UUIDs in the MongoDB shell. 4 | // To create BinData values corresponding to the various driver encodings use: 5 | // var s = "{00112233-4455-6677-8899-aabbccddeeff}"; 6 | // var uuid = UUID(s); // new Standard encoding 7 | // var juuid = JUUID(s); // JavaLegacy encoding 8 | // var csuuid = CSUUID(s); // CSharpLegacy encoding 9 | // var pyuuid = PYUUID(s); // PythonLegacy encoding 10 | // To convert the various BinData values back to human readable UUIDs use: 11 | // uuid.toUUID() => 'UUID("00112233-4455-6677-8899-aabbccddeeff")' 12 | // juuid.ToJUUID() => 'JUUID("00112233-4455-6677-8899-aabbccddeeff")' 13 | // csuuid.ToCSUUID() => 'CSUUID("00112233-4455-6677-8899-aabbccddeeff")' 14 | // pyuuid.ToPYUUID() => 'PYUUID("00112233-4455-6677-8899-aabbccddeeff")' 15 | // With any of the UUID variants you can use toHexUUID to echo the raw BinData with subtype and hex string: 16 | // uuid.toHexUUID() => 'HexData(4, "00112233445566778899aabbccddeeff")' 17 | // juuid.toHexUUID() => 'HexData(3, "7766554433221100ffeeddccbbaa9988")' 18 | // csuuid.toHexUUID() => 'HexData(3, "33221100554477668899aabbccddeeff")' 19 | // pyuuid.toHexUUID() => 'HexData(3, "00112233445566778899aabbccddeeff")' 20 | 21 | (() => { 22 | function HexToBase64(hex) { 23 | var base64Digits = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; 24 | var base64 = ""; 25 | var group; 26 | for (var i = 0; i < 30; i += 6) { 27 | group = parseInt(hex.substr(i, 6), 16); 28 | base64 += base64Digits[(group >> 18) & 0x3f]; 29 | base64 += base64Digits[(group >> 12) & 0x3f]; 30 | base64 += base64Digits[(group >> 6) & 0x3f]; 31 | base64 += base64Digits[group & 0x3f]; 32 | } 33 | group = parseInt(hex.substr(30, 2), 16); 34 | base64 += base64Digits[(group >> 2) & 0x3f]; 35 | base64 += base64Digits[(group << 4) & 0x3f]; 36 | base64 += "=="; 37 | return base64; 38 | } 39 | 40 | function Base64ToHex(base64) { 41 | var base64Digits = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/="; 42 | var hexDigits = "0123456789abcdef"; 43 | var hex = ""; 44 | for (var i = 0; i < 24; ) { 45 | var e1 = base64Digits.indexOf(base64[i++]); 46 | var e2 = base64Digits.indexOf(base64[i++]); 47 | var e3 = base64Digits.indexOf(base64[i++]); 48 | var e4 = base64Digits.indexOf(base64[i++]); 49 | var c1 = (e1 << 2) | (e2 >> 4); 50 | var c2 = ((e2 & 15) << 4) | (e3 >> 2); 51 | var c3 = ((e3 & 3) << 6) | e4; 52 | hex += hexDigits[c1 >> 4]; 53 | hex += hexDigits[c1 & 15]; 54 | if (e3 != 64) { 55 | hex += hexDigits[c2 >> 4]; 56 | hex += hexDigits[c2 & 15]; 57 | } 58 | if (e4 != 64) { 59 | hex += hexDigits[c3 >> 4]; 60 | hex += hexDigits[c3 & 15]; 61 | } 62 | } 63 | return hex; 64 | } 65 | 66 | function UUID(uuid) { 67 | var hex = uuid.replace(/[{}-]/g, ""); // remove extra characters 68 | var base64 = HexToBase64(hex); 69 | return new BinData(4, base64); // new subtype 4 70 | } 71 | 72 | function JUUID(uuid) { 73 | var hex = uuid.replace(/[{}-]/g, ""); // remove extra characters 74 | var msb = hex.substr(0, 16); 75 | var lsb = hex.substr(16, 16); 76 | msb = msb.substr(14, 2) + msb.substr(12, 2) + msb.substr(10, 2) + msb.substr(8, 2) + msb.substr(6, 2) + msb.substr(4, 2) + msb.substr(2, 2) + msb.substr(0, 2); 77 | lsb = lsb.substr(14, 2) + lsb.substr(12, 2) + lsb.substr(10, 2) + lsb.substr(8, 2) + lsb.substr(6, 2) + lsb.substr(4, 2) + lsb.substr(2, 2) + lsb.substr(0, 2); 78 | hex = msb + lsb; 79 | var base64 = HexToBase64(hex); 80 | return new BinData(3, base64); 81 | } 82 | 83 | function CSUUID(uuid) { 84 | var hex = uuid.replace(/[{}-]/g, ""); // remove extra characters 85 | var a = hex.substr(6, 2) + hex.substr(4, 2) + hex.substr(2, 2) + hex.substr(0, 2); 86 | var b = hex.substr(10, 2) + hex.substr(8, 2); 87 | var c = hex.substr(14, 2) + hex.substr(12, 2); 88 | var d = hex.substr(16, 16); 89 | hex = a + b + c + d; 90 | var base64 = HexToBase64(hex); 91 | return new BinData(3, base64); 92 | } 93 | 94 | function PYUUID(uuid) { 95 | var hex = uuid.replace(/[{}-]/g, ""); // remove extra characters 96 | var base64 = HexToBase64(hex); 97 | return new BinData(3, base64); 98 | } 99 | 100 | BinData.prototype.toUUID = function () { 101 | var hex = Base64ToHex(this.base64()); // don't use BinData's hex function because it has bugs in older versions of the shell 102 | var uuid = hex.substr(0, 8) + '-' + hex.substr(8, 4) + '-' + hex.substr(12, 4) + '-' + hex.substr(16, 4) + '-' + hex.substr(20, 12); 103 | return 'UUID("' + uuid + '")'; 104 | } 105 | 106 | BinData.prototype.toJUUID = function () { 107 | var hex = Base64ToHex(this.base64()); // don't use BinData's hex function because it has bugs in older versions of the shell 108 | var msb = hex.substr(0, 16); 109 | var lsb = hex.substr(16, 16); 110 | msb = msb.substr(14, 2) + msb.substr(12, 2) + msb.substr(10, 2) + msb.substr(8, 2) + msb.substr(6, 2) + msb.substr(4, 2) + msb.substr(2, 2) + msb.substr(0, 2); 111 | lsb = lsb.substr(14, 2) + lsb.substr(12, 2) + lsb.substr(10, 2) + lsb.substr(8, 2) + lsb.substr(6, 2) + lsb.substr(4, 2) + lsb.substr(2, 2) + lsb.substr(0, 2); 112 | hex = msb + lsb; 113 | var uuid = hex.substr(0, 8) + '-' + hex.substr(8, 4) + '-' + hex.substr(12, 4) + '-' + hex.substr(16, 4) + '-' + hex.substr(20, 12); 114 | return 'JUUID("' + uuid + '")'; 115 | } 116 | 117 | BinData.prototype.toCSUUID = function () { 118 | var hex = Base64ToHex(this.base64()); // don't use BinData's hex function because it has bugs in older versions of the shell 119 | var a = hex.substr(6, 2) + hex.substr(4, 2) + hex.substr(2, 2) + hex.substr(0, 2); 120 | var b = hex.substr(10, 2) + hex.substr(8, 2); 121 | var c = hex.substr(14, 2) + hex.substr(12, 2); 122 | var d = hex.substr(16, 16); 123 | hex = a + b + c + d; 124 | var uuid = hex.substr(0, 8) + '-' + hex.substr(8, 4) + '-' + hex.substr(12, 4) + '-' + hex.substr(16, 4) + '-' + hex.substr(20, 12); 125 | return 'CSUUID("' + uuid + '")'; 126 | } 127 | 128 | BinData.prototype.toPYUUID = function () { 129 | var hex = Base64ToHex(this.base64()); // don't use BinData's hex function because it has bugs 130 | var uuid = hex.substr(0, 8) + '-' + hex.substr(8, 4) + '-' + hex.substr(12, 4) + '-' + hex.substr(16, 4) + '-' + hex.substr(20, 12); 131 | return 'PYUUID("' + uuid + '")'; 132 | } 133 | 134 | 135 | BinData.prototype.toHexUUID = function () { 136 | var uuid = Base64ToHex(this.base64()); // don't use BinData's hex function because it has bugs 137 | return 'HexData(' + this.subtype() + ', "' + uuid + '")'; 138 | } 139 | 140 | // for compatibility with the new mongosh shell 141 | if (BinData.prototype.base64 === undefined) { 142 | BinData.prototype.base64 = function() { return this.buffer.base64Slice(); }; 143 | } 144 | if (BinData.prototype.subtype === undefined) { 145 | BinData.prototype.subtype = function() { return this.sub_type; }; 146 | } 147 | 148 | function TestUUIDHelperFunctions() { 149 | var s = "{00112233-4455-6677-8899-aabbccddeeff}"; 150 | var uuid = UUID(s); 151 | var juuid = JUUID(s); 152 | var csuuid = CSUUID(s); 153 | var pyuuid = PYUUID(s); 154 | print(uuid.toUUID()); 155 | print(juuid.toJUUID()); 156 | print(csuuid.toCSUUID()); 157 | print(pyuuid.toPYUUID()); 158 | print(uuid.toHexUUID()); 159 | print(juuid.toHexUUID()); 160 | print(csuuid.toHexUUID()); 161 | print(pyuuid.toHexUUID()); 162 | } 163 | 164 | Object.assign(globalThis, { 165 | UUID, JUUID, CSUUID, PYUUID, TestUUIDHelperFunctions 166 | }); 167 | 168 | })(); 169 | -------------------------------------------------------------------------------- /snippets/mock-collection/index.js: -------------------------------------------------------------------------------- 1 | (() => { 2 | const localRequire = require('module').createRequire(__filename); 3 | const bson = localRequire('bson'); 4 | const util = localRequire('util'); 5 | const vm = localRequire('vm'); 6 | 7 | class MockCollection { 8 | constructor(docs) { 9 | this._docs = bson.deserialize(bson.serialize({ docs })).docs; 10 | for (const doc of this._docs) { 11 | if (!doc._id) doc._id = ObjectId(); 12 | } 13 | } 14 | 15 | bulkWrite() { throw new Error('mock collection not writable'); } 16 | deleteMany() { throw new Error('mock collection not writable'); } 17 | deleteOne() { throw new Error('mock collection not writable'); } 18 | findAndModify() { throw new Error('mock collection not writable'); } 19 | findOneAndDelete() { throw new Error('mock collection not writable'); } 20 | findOneAndReplace() { throw new Error('mock collection not writable'); } 21 | findOneAndUpdate() { throw new Error('mock collection not writable'); } 22 | renameCollection() { throw new Error('mock collection not writable'); } 23 | insertMany() { throw new Error('mock collection not writable'); } 24 | insertOne() { throw new Error('mock collection not writable'); } 25 | insert() { throw new Error('mock collection not writable'); } 26 | remove() { throw new Error('mock collection not writable'); } 27 | save() { throw new Error('mock collection not writable'); } 28 | replaceOne() { throw new Error('mock collection not writable'); } 29 | update() { throw new Error('mock collection not writable'); } 30 | updateOne() { throw new Error('mock collection not writable'); } 31 | updateMany() { throw new Error('mock collection not writable'); } 32 | convertToCapped() { throw new Error('mock collection not writable'); } 33 | createIndex() { throw new Error('mock collection not writable'); } 34 | createIndexes() { throw new Error('mock collection not writable'); } 35 | ensureIndex() { throw new Error('mock collection not writable'); } 36 | dropIndex() { throw new Error('mock collection not writable'); } 37 | dropIndexes() { throw new Error('mock collection not writable'); } 38 | hideIndex() { throw new Error('mock collection not writable'); } 39 | unhideIndex() { throw new Error('mock collection not writable'); } 40 | runCommand() { throw new Error('mock collection not writable'); } 41 | runCommandWithCheck() { throw new Error('mock collection not writable'); } 42 | initializeOrderedBulkOp() { throw new Error('mock collection not writable'); } 43 | initializeUnorderedBulkOp() { throw new Error('mock collection not writable'); } 44 | explain() { throw new Error('cannot create explainable object from mock collection'); } 45 | getPlanCache() { throw new Error('cannot create plan cache from mock collection'); } 46 | validate() { throw new Error('cannot validate mock collection'); } 47 | watch() { throw new Error('cannot watch mock collection'); } 48 | getShardDistribution() { throw new Error('cannot get sharding info for mock collection'); } 49 | reIndex() {} 50 | drop() {} 51 | 52 | aggregate(...args) { 53 | let options; 54 | let pipeline; 55 | if (args.length === 0 || Array.isArray(args[0])) { 56 | options = args[1] || {}; 57 | pipeline = args[0] || []; 58 | } else { 59 | options = {}; 60 | pipeline = args || []; 61 | } 62 | pipeline = [ 63 | { $limit: 1 }, 64 | { $count: 'dummy' }, 65 | { $set: { values: this._docs }}, 66 | { $unwind: '$values'}, 67 | { $replaceRoot: { newRoot: '$values' } }, 68 | ...pipeline 69 | ]; 70 | options = { 71 | ...options, 72 | readPreference: 'secondaryPreferred' 73 | }; 74 | return db.getSiblingDB('admin').getCollection('system.version').aggregate(pipeline, options); 75 | } 76 | 77 | count(query, options) { 78 | return this.countDocuments(query, options); 79 | } 80 | 81 | countDocuments(query, options) { 82 | return this.aggregate([{ $match: query || {} }, { $count: 'count' }], options).next().count; 83 | } 84 | 85 | estimatedDocumentCount() { 86 | return this._docs.length; 87 | } 88 | 89 | distinct(field, query, options) { 90 | return this.aggregate([ 91 | { $match: query || {} }, 92 | { $group: { _id: '$' + field } } 93 | ], options).toArray().map(({ _id }) => _id); 94 | } 95 | 96 | find(query, projection) { 97 | const pipeline = [ { $match: query || {} } ]; 98 | if (projection) { 99 | pipeline.push({ $project: projection }); 100 | } 101 | return this.aggregate(pipeline); 102 | } 103 | 104 | findOne(query, projection) { 105 | return this.find(query, projection).next(); 106 | } 107 | 108 | isCapped() { 109 | return false; 110 | } 111 | 112 | getIndexes() { 113 | return [ { v: 2, key: { _id: 1 }, name: '_id_' } ]; 114 | } 115 | 116 | getIndexSpecs() { 117 | return this.getIndexes(); 118 | } 119 | 120 | getIndices() { 121 | return this.getIndexes(); 122 | } 123 | 124 | getIndexKeys() { 125 | return this.getIndexes().map(ix => ix.key); 126 | } 127 | 128 | totalIndexSize() { 129 | return 0; 130 | } 131 | 132 | getDB() { 133 | return null; 134 | } 135 | 136 | getMongo() { 137 | return null; 138 | } 139 | 140 | dataSize() { 141 | return this.aggregate([{ $group: { _id: null, size: { $sum: { $bsonSize: '$$ROOT' } } } } ]); 142 | } 143 | 144 | storageSize() { 145 | return this.dataSize(); 146 | } 147 | 148 | totalSize() { 149 | return this.storageSize(); 150 | } 151 | 152 | exists() { 153 | return this._docs.length > 0; 154 | } 155 | 156 | getFullName() { 157 | return '.mock'; 158 | } 159 | 160 | getName() { 161 | return '.mock'; 162 | } 163 | 164 | stats() { 165 | const size = this.totalSize(); 166 | const count = this._docs.length; 167 | return { 168 | ns: this.getFullName(), 169 | size, 170 | count, 171 | avgObjSize: size / count, 172 | storageSize: size, 173 | freeStorageSize: 0, 174 | capped: this.isCapped(), 175 | wiredTiger: null, 176 | nindexes: 1, 177 | indexBuilds: [], 178 | totalIndexSize: 0, 179 | totalSize: size, 180 | indexSizes: { _id_: 0 }, 181 | scaleFactor: 1, 182 | ok: 1 183 | }; 184 | } 185 | 186 | latencyStats() { 187 | return [ 188 | { 189 | ns: this.getFullName(), 190 | host: '', 191 | localTime: new Date(), 192 | latencyStats: { 193 | reads: { latency: 0, ops: 0 }, 194 | writes: { latency: 0, ops: 0 }, 195 | commands: { latency: 0, ops: 0 }, 196 | transactions: { latency: 0, ops: 0 }, 197 | } 198 | } 199 | ]; 200 | } 201 | 202 | mapReduce(map, reduce, opts) { 203 | if (!opts) { 204 | opts = { out: { inline: 1 } }; 205 | } 206 | const mapResult = new Map(); 207 | const contextObj = Object.create(globalThis); 208 | contextObj.emit = function(key, val) { 209 | if (mapResult.has(key)) { 210 | mapResult.get(key).push(val); 211 | } else { 212 | mapResult.set(key, [val]); 213 | } 214 | }; 215 | const { mapf, reducef } = vm.runInContext(`({ 216 | mapf: ${map.toString()}, reducef: ${reduce.toString()} 217 | })`, vm.createContext(contextObj)); 218 | for (const doc of this._docs) { 219 | mapf.call(doc); 220 | } 221 | const results = []; 222 | for (const [key, values] of mapResult) { 223 | results.push({ _id: key, value: reducef(key, values) }); 224 | } 225 | if (typeof opts === 'string') { 226 | opts = { out: opts }; 227 | } 228 | if (opts.out.inline) { 229 | return { results, ok: 1 }; 230 | } 231 | db[opts.out].insertMany(results); 232 | return { result: opts.out, ok: 1 }; 233 | } 234 | 235 | getShardVersion() { 236 | return { 237 | configServer: '', 238 | inShardedMode: false, 239 | mine: Timestamp(0, 0), 240 | global: 'UNKNOWN', 241 | ok: 1 242 | }; 243 | } 244 | } 245 | 246 | globalThis.mockCollection = function(documents) { 247 | return new MockCollection([...documents]); 248 | }; 249 | })(); 250 | -------------------------------------------------------------------------------- /snippets/llm-command/LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. 10 | 11 | "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. 12 | 13 | "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. 14 | 15 | "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. 16 | 17 | "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. 18 | 19 | "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. 20 | 21 | "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). 22 | 23 | "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. 24 | 25 | "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." 26 | 27 | "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 28 | 29 | 2. Grant of Copyright License. 30 | 31 | Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 32 | 33 | 3. Grant of Patent License. 34 | 35 | Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 36 | 37 | 4. Redistribution. 38 | 39 | You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: 40 | 41 | You must give any other recipients of the Work or Derivative Works a copy of this License; and 42 | 43 | You must cause any modified files to carry prominent notices stating that You changed the files; and 44 | 45 | You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and 46 | 47 | If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. 48 | 49 | You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 50 | 51 | 5. Submission of Contributions. 52 | 53 | Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 54 | 55 | 6. Trademarks. 56 | 57 | This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 58 | 59 | 7. Disclaimer of Warranty. 60 | 61 | Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 62 | 63 | 8. Limitation of Liability. 64 | 65 | In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 66 | 67 | 9. Accepting Warranty or Additional Liability. 68 | 69 | While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of Your accepting any such warranty or additional liability. 70 | 71 | END OF TERMS AND CONDITIONS 72 | 73 | APPENDIX: How to apply the Apache License to your work. 74 | 75 | To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. 76 | 77 | Copyright 2024 Juanan Pereira 78 | 79 | Licensed under the Apache License, Version 2.0 (the "License"); 80 | you may not use this file except in compliance with the License. 81 | You may obtain a copy of the License at 82 | 83 | http://www.apache.org/licenses/LICENSE-2.0 84 | 85 | Unless required by applicable law or agreed to in writing, software 86 | distributed under the License is distributed on an "AS IS" BASIS, 87 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 88 | See the License for the specific language governing permissions and 89 | limitations under the License. 90 | -------------------------------------------------------------------------------- /snippets/mongocompat/test.js: -------------------------------------------------------------------------------- 1 | load(__dirname + '/index.js'); 2 | 3 | const bson = require('bson'); 4 | 5 | assert.strictEqual(ObjectId('0123456789abcdef01234567').tojson(), 'ObjectId("0123456789abcdef01234567")'); 6 | 7 | assert.strictEqual(BinData(4, 'abcdefgh').toString(), 'BinData(4,"abcdefgh")'); 8 | 9 | assert.strictEqual(NumberLong(2147483647).toString(), 'NumberLong(2147483647)'); 10 | assert.strictEqual(NumberLong("2147483648").toString(), 'NumberLong("2147483648")'); 11 | assert.strictEqual(NumberLong(-2147483648).toString(), 'NumberLong(-2147483648)'); 12 | assert.strictEqual(NumberLong(-2147483649).toString(), 'NumberLong("-2147483649")'); 13 | 14 | const int1 = NumberInt(42); 15 | assert.strictEqual(int1.toString(), 'NumberInt(42)'); 16 | assert.strictEqual(int1.tojson(), 'NumberInt(42)'); 17 | assert.strictEqual(int1.toNumber(), 42); 18 | assert.strictEqual(int1.valueOf(), 42); 19 | assert.strictEqual(int1.toJSON(), 42); 20 | const int2 = NumberInt(-100); 21 | assert.strictEqual(int2.toString(), 'NumberInt(-100)'); 22 | assert.strictEqual(int2.tojson(), 'NumberInt(-100)'); 23 | assert.strictEqual(int2.toNumber(), -100); 24 | const maxInt32 = NumberInt(2147483647); 25 | assert.strictEqual(maxInt32.toString(), 'NumberInt(2147483647)'); 26 | assert.strictEqual(maxInt32.toNumber(), 2147483647); 27 | const minInt32 = NumberInt(-2147483648); 28 | assert.strictEqual(minInt32.toString(), 'NumberInt(-2147483648)'); 29 | assert.strictEqual(minInt32.toNumber(), -2147483648); 30 | 31 | assert.strictEqual(NumberLong(9223372036854775807).toString(), 'NumberLong("9223372036854775807")'); 32 | assert.strictEqual(NumberLong(-9223372036854775808).toString(), 'NumberLong("-9223372036854775808")'); 33 | const maxLong = NumberLong(9223372036854775807, 2147483647, -1); 34 | assert.strictEqual(maxLong.floatApprox, 9223372036854775807); 35 | assert.strictEqual(maxLong.top, 2147483647); 36 | assert.strictEqual(maxLong.bottom, -1);//mongosh uses signed representation, while old shell uses unsigned 37 | assert.strictEqual(maxLong.exactValueString, "9223372036854775807"); 38 | const minLong = NumberLong(-9223372036854775808); 39 | assert.strictEqual(minLong.floatApprox, -9223372036854776000); 40 | assert.strictEqual(minLong.top, -2147483648); 41 | assert.strictEqual(minLong.bottom, 0); 42 | assert.strictEqual(minLong.exactValueString, "-9223372036854775808"); 43 | const nl2 = NumberLong("200"); 44 | assert.strictEqual(maxLong.compare(nl2), 1); 45 | 46 | const decimal = NumberDecimal("1.1"); 47 | assert.strictEqual(decimal.toString(), 'NumberDecimal("1.1")'); 48 | assert.strictEqual(decimal.tojson(), 'NumberDecimal("1.1")'); 49 | 50 | const ts1 = Timestamp(); 51 | assert.strictEqual(ts1.toString(), 'Timestamp(0, 0)'); 52 | const ts2 = Timestamp(100, 200); 53 | assert.strictEqual(ts2.toString(), 'Timestamp(100, 200)'); 54 | const ts3 = Timestamp(1.9, 2.1); 55 | assert.strictEqual(ts3.toString(), 'Timestamp(1, 2)'); 56 | try { 57 | Timestamp(-1, 0); 58 | assert.fail('Should throw for negative time'); 59 | } catch (e) { 60 | assert(e.message.includes('must be non-negative')); 61 | } 62 | try { 63 | Timestamp(0, 5000000000); 64 | assert.fail('Should throw for i > uint32 max'); 65 | } catch (e) { 66 | assert(e.message.includes('not greater than 4294967295')); 67 | } 68 | const ts4 = Timestamp(123, 456); 69 | assert(ts4 instanceof Timestamp); 70 | assert.strictEqual(ts4.toString(), 'Timestamp(123, 456)'); 71 | assert.strictEqual(ts4.tojson(), 'Timestamp(123, 456)'); 72 | assert.strictEqual(ts4.getTime(), 123); 73 | assert.strictEqual(ts4.getInc(), 456); 74 | assert.strictEqual(ts4._bsontype, 'Timestamp'); 75 | const tsFromBits = Timestamp.fromBits(100, 200); 76 | assert(tsFromBits instanceof Timestamp); 77 | assert.strictEqual(tsFromBits.i, 100); 78 | assert.strictEqual(tsFromBits.t, 200); 79 | assert.strictEqual(tsFromBits.toString(), 'Timestamp(200, 100)'); 80 | const tsFromInt = Timestamp.fromInt(12345); 81 | assert.strictEqual(tsFromInt._bsontype, 'Timestamp'); 82 | assert.strictEqual(tsFromInt.i, 12345); 83 | assert.strictEqual(tsFromInt.t, 0); 84 | const tsFromNum = Timestamp.fromNumber(67890); 85 | assert.strictEqual(tsFromNum._bsontype, 'Timestamp'); 86 | assert.strictEqual(tsFromNum.i, 67890); 87 | assert.strictEqual(tsFromNum.t, 0); 88 | const tsFromStr = Timestamp.fromString('ff', 16); 89 | assert.strictEqual(tsFromStr.i, 255); 90 | assert.strictEqual(tsFromStr.t, 0); 91 | assert.strictEqual(Timestamp.MAX_VALUE._bsontype, 'Long'); 92 | assert.strictEqual(Timestamp.MAX_VALUE, Long.MAX_UNSIGNED_VALUE); 93 | 94 | const id = ObjectId('68ffa28b77bba38c9ddcf376'); 95 | const dbRef = DBRef('testColl', id, 'testDb'); 96 | assert.strictEqual(dbRef.toString(), 'DBRef("testColl", ObjectId("68ffa28b77bba38c9ddcf376"), "testDb")'); 97 | assert.strictEqual(dbRef.tojson(), 'DBRef("testColl", ObjectId("68ffa28b77bba38c9ddcf376"), "testDb")'); 98 | assert.strictEqual(dbRef.$ref, 'testColl'); 99 | assert.strictEqual(dbRef.$id, id); 100 | assert.strictEqual(dbRef.$db, 'testDb'); 101 | const dbRefNoDb = DBRef('testColl', id); 102 | assert.strictEqual(dbRefNoDb.toString(), 'DBRef("testColl", ObjectId("68ffa28b77bba38c9ddcf376"))'); 103 | assert.strictEqual(dbRefNoDb.$db, undefined); 104 | const dbRefStringId = DBRef('testColl', '68ffa28b77bba38c9ddcf376'); 105 | assert.strictEqual(dbRefStringId.toString(), 'DBRef("testColl", "68ffa28b77bba38c9ddcf376")'); 106 | const dbRefForSetters = DBRef('originalColl', id, 'originalDb'); 107 | dbRefForSetters.$ref = 'newColl'; 108 | assert.strictEqual(dbRefForSetters.$ref, 'newColl'); 109 | assert.strictEqual(dbRefForSetters.collection, 'newColl'); 110 | assert.strictEqual(dbRefForSetters.toString(), 'DBRef("newColl", ObjectId("68ffa28b77bba38c9ddcf376"), "originalDb")'); 111 | const newId = ObjectId('507f1f77bcf86cd799439011'); 112 | dbRefForSetters.$id = newId; 113 | assert.strictEqual(dbRefForSetters.$id, newId); 114 | assert.strictEqual(dbRefForSetters.oid, newId); 115 | assert.strictEqual(dbRefForSetters.toString(), 'DBRef("newColl", ObjectId("507f1f77bcf86cd799439011"), "originalDb")'); 116 | dbRefForSetters.$db = 'newDb'; 117 | assert.strictEqual(dbRefForSetters.$db, 'newDb'); 118 | assert.strictEqual(dbRefForSetters.db, 'newDb'); 119 | assert.strictEqual(dbRefForSetters.toString(), 'DBRef("newColl", ObjectId("507f1f77bcf86cd799439011"), "newDb")'); 120 | 121 | try { 122 | tojsonObject("not an object"); 123 | assert.fail('Should throw TypeError for string'); 124 | } catch (e) { 125 | assert(e instanceof TypeError); 126 | assert(e.message.includes('tojsonObject needs object, not [string]')); 127 | } 128 | try { 129 | tojsonObject(true); 130 | assert.fail('Should throw TypeError for boolean'); 131 | } catch (e) { 132 | assert(e.message.includes('tojsonObject needs object, not [boolean]')); 133 | } 134 | assert.strictEqual(typeof tojsonObject({ key: "value" }), 'string'); 135 | assert.strictEqual(typeof tojsonObject([1, 2, 3]), 'string'); 136 | 137 | // Test sortedkey parameter 138 | const unsortedObj = { z: 1, a: 2, m: 3 }; 139 | const sortedJson = tojson(unsortedObj, "", true, 0, true); 140 | const unsortedJson = tojson(unsortedObj, "", true, 0, false); 141 | const defaultJson = tojson(unsortedObj); 142 | assert(sortedJson.indexOf('"a"') < sortedJson.indexOf('"m"'), 'sortedJson should be sorted alphabetically'); 143 | assert(sortedJson.indexOf('"m"') < sortedJson.indexOf('"z"'), 'sortedJson should be sorted alphabetically'); 144 | assert(unsortedJson.indexOf('"z"') < unsortedJson.indexOf('"a"'), 'unsortedJson should not be sorted alphabetically'); 145 | assert(defaultJson.indexOf('"z"') < defaultJson.indexOf('"a"'), 'tojson without sortedkey should not sort keys'); 146 | const nestedObj = { b: { y: 1, x: 2 }, a: { z: 1, a: 2 } }; 147 | const sortedNestedJson = tojson(nestedObj, "", true, 0, true); 148 | assert(sortedNestedJson.indexOf('"a"') < sortedNestedJson.indexOf('"b"'), 'sortedkey=true should sort top-level keys'); 149 | assert(sortedNestedJson.indexOf('"a" :') < sortedNestedJson.indexOf('"z" :'), 'sortedkey=true should sort nested keys'); 150 | const objWithBson = { 151 | c: NumberLong(123), 152 | b: ObjectId('0123456789abcdef01234567'), 153 | a: NumberDecimal("1.1") 154 | }; 155 | const sortedBsonJson = tojson(objWithBson, "", true, 0, true); 156 | assert(sortedBsonJson.indexOf('"a"') < sortedBsonJson.indexOf('"b"'), 'sortedkey=true should sort keys with BSON types'); 157 | assert(sortedBsonJson.indexOf('"b"') < sortedBsonJson.indexOf('"c"'), 'sortedkey=true should sort keys with BSON types'); 158 | const arrayWithObjects = [{ z: 1, a: 2 }, { y: 3, b: 4 }]; 159 | const sortedArrayJson = Array.tojson(arrayWithObjects, "", true, 0, true); 160 | const unsortedArrayJson = Array.tojson(arrayWithObjects, "", true, 0, false); 161 | const defaultArrayJson = Array.tojson(arrayWithObjects, "", true, 0); 162 | assert(sortedArrayJson.indexOf('"a"') < sortedArrayJson.indexOf('"z"'), 'Array.tojson with sortedKeys=true should sort object keys in array elements'); 163 | assert(sortedArrayJson.indexOf('"b"') < sortedArrayJson.indexOf('"y"'), 'Array.tojson with sortedKeys=true should sort object keys in array elements'); 164 | assert(unsortedArrayJson.indexOf('"z"') < unsortedArrayJson.indexOf('"a"'), 'Array.tojson with sortedKeys=false should not sort keys'); 165 | assert(defaultArrayJson.indexOf('"z"') < defaultArrayJson.indexOf('"a"'), 'Array.tojson without sortedKeys should not sort keys'); 166 | 167 | // Test MinKey 168 | const minKey = new MinKey(); 169 | assert(minKey instanceof MinKey, "minKey should be an instance of MinKey"); 170 | assert.strictEqual(minKey.tojson(), '{ "$minKey" : 1 }'); 171 | assert.strictEqual(minKey.toString(), "[object Function]"); 172 | assert.strictEqual(minKey.toJSON(), '{ "$minKey" : 1 }'); 173 | 174 | // Test that multiple references return the same instance 175 | const anotherMinKeyRef = new MinKey(); 176 | assert.strictEqual(minKey, anotherMinKeyRef); 177 | assert.strictEqual(MinKey(), MinKey()); 178 | 179 | const serializedBsonMinKey = bson.serialize({ key1: MinKey, key2: MinKey() }); 180 | const deserializedBsonMinKey = bson.deserialize(serializedBsonMinKey); 181 | assert.deepStrictEqual(deserializedBsonMinKey.key1, deserializedBsonMinKey.key2); 182 | 183 | // Test MaxKey 184 | const maxKey = new MaxKey(); 185 | assert(maxKey instanceof MaxKey, "MaxKey should be an instance of MaxKey"); 186 | assert.strictEqual(maxKey.tojson(), '{ "$MaxKey" : 1 }'); 187 | assert.strictEqual(maxKey.toString(), "[object Function]"); 188 | assert.strictEqual(maxKey.toJSON(), '{ "$MaxKey" : 1 }'); 189 | 190 | // Test that multiple references return the same instance 191 | const anotherMaxKeyRef = new MaxKey(); 192 | assert.strictEqual(maxKey, anotherMaxKeyRef); 193 | assert.strictEqual(MaxKey(), MaxKey()); 194 | 195 | const serializedBsonMaxKey = bson.serialize({ key1: MaxKey, key2: MaxKey() }); 196 | const deserializedBsonMaxKey = bson.deserialize(serializedBsonMaxKey); 197 | assert.deepStrictEqual(deserializedBsonMaxKey.key1, deserializedBsonMaxKey.key2); 198 | -------------------------------------------------------------------------------- /snippets/llm-command/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@juananpe/snippet-llm-command", 3 | "version": "0.0.9", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "@juananpe/snippet-llm-command", 9 | "version": "0.0.6", 10 | "license": "Apache-2.0", 11 | "dependencies": { 12 | "groq-sdk": "^0.5.0", 13 | "node-fetch-commonjs": "*" 14 | } 15 | }, 16 | "node_modules/@types/node": { 17 | "version": "18.19.46", 18 | "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.46.tgz", 19 | "integrity": "sha512-vnRgMS7W6cKa1/0G3/DTtQYpVrZ8c0Xm6UkLaVFrb9jtcVC3okokW09Ki1Qdrj9ISokszD69nY4WDLRlvHlhAA==", 20 | "dependencies": { 21 | "undici-types": "~5.26.4" 22 | } 23 | }, 24 | "node_modules/@types/node-fetch": { 25 | "version": "2.6.11", 26 | "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.11.tgz", 27 | "integrity": "sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==", 28 | "dependencies": { 29 | "@types/node": "*", 30 | "form-data": "^4.0.0" 31 | } 32 | }, 33 | "node_modules/abort-controller": { 34 | "version": "3.0.0", 35 | "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", 36 | "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", 37 | "dependencies": { 38 | "event-target-shim": "^5.0.0" 39 | }, 40 | "engines": { 41 | "node": ">=6.5" 42 | } 43 | }, 44 | "node_modules/agentkeepalive": { 45 | "version": "4.5.0", 46 | "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.5.0.tgz", 47 | "integrity": "sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==", 48 | "dependencies": { 49 | "humanize-ms": "^1.2.1" 50 | }, 51 | "engines": { 52 | "node": ">= 8.0.0" 53 | } 54 | }, 55 | "node_modules/asynckit": { 56 | "version": "0.4.0", 57 | "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", 58 | "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" 59 | }, 60 | "node_modules/combined-stream": { 61 | "version": "1.0.8", 62 | "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", 63 | "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", 64 | "dependencies": { 65 | "delayed-stream": "~1.0.0" 66 | }, 67 | "engines": { 68 | "node": ">= 0.8" 69 | } 70 | }, 71 | "node_modules/delayed-stream": { 72 | "version": "1.0.0", 73 | "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", 74 | "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", 75 | "engines": { 76 | "node": ">=0.4.0" 77 | } 78 | }, 79 | "node_modules/event-target-shim": { 80 | "version": "5.0.1", 81 | "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", 82 | "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", 83 | "engines": { 84 | "node": ">=6" 85 | } 86 | }, 87 | "node_modules/form-data": { 88 | "version": "4.0.0", 89 | "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", 90 | "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", 91 | "dependencies": { 92 | "asynckit": "^0.4.0", 93 | "combined-stream": "^1.0.8", 94 | "mime-types": "^2.1.12" 95 | }, 96 | "engines": { 97 | "node": ">= 6" 98 | } 99 | }, 100 | "node_modules/form-data-encoder": { 101 | "version": "1.7.2", 102 | "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.2.tgz", 103 | "integrity": "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==" 104 | }, 105 | "node_modules/formdata-node": { 106 | "version": "4.4.1", 107 | "resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-4.4.1.tgz", 108 | "integrity": "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==", 109 | "dependencies": { 110 | "node-domexception": "1.0.0", 111 | "web-streams-polyfill": "4.0.0-beta.3" 112 | }, 113 | "engines": { 114 | "node": ">= 12.20" 115 | } 116 | }, 117 | "node_modules/formdata-node/node_modules/web-streams-polyfill": { 118 | "version": "4.0.0-beta.3", 119 | "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz", 120 | "integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==", 121 | "engines": { 122 | "node": ">= 14" 123 | } 124 | }, 125 | "node_modules/groq-sdk": { 126 | "version": "0.5.0", 127 | "resolved": "https://registry.npmjs.org/groq-sdk/-/groq-sdk-0.5.0.tgz", 128 | "integrity": "sha512-RVmhW7qZ+XZoy5fIuSdx/LGQJONpL8MHgZEW7dFwTdgkzStub2XQx6OKv28CHogijdwH41J+Npj/z2jBPu3vmw==", 129 | "dependencies": { 130 | "@types/node": "^18.11.18", 131 | "@types/node-fetch": "^2.6.4", 132 | "abort-controller": "^3.0.0", 133 | "agentkeepalive": "^4.2.1", 134 | "form-data-encoder": "1.7.2", 135 | "formdata-node": "^4.3.2", 136 | "node-fetch": "^2.6.7", 137 | "web-streams-polyfill": "^3.2.1" 138 | } 139 | }, 140 | "node_modules/humanize-ms": { 141 | "version": "1.2.1", 142 | "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", 143 | "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", 144 | "dependencies": { 145 | "ms": "^2.0.0" 146 | } 147 | }, 148 | "node_modules/mime-db": { 149 | "version": "1.52.0", 150 | "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", 151 | "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", 152 | "engines": { 153 | "node": ">= 0.6" 154 | } 155 | }, 156 | "node_modules/mime-types": { 157 | "version": "2.1.35", 158 | "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", 159 | "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", 160 | "dependencies": { 161 | "mime-db": "1.52.0" 162 | }, 163 | "engines": { 164 | "node": ">= 0.6" 165 | } 166 | }, 167 | "node_modules/ms": { 168 | "version": "2.1.3", 169 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", 170 | "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" 171 | }, 172 | "node_modules/node-domexception": { 173 | "version": "1.0.0", 174 | "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", 175 | "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", 176 | "funding": [ 177 | { 178 | "type": "github", 179 | "url": "https://github.com/sponsors/jimmywarting" 180 | }, 181 | { 182 | "type": "github", 183 | "url": "https://paypal.me/jimmywarting" 184 | } 185 | ], 186 | "engines": { 187 | "node": ">=10.5.0" 188 | } 189 | }, 190 | "node_modules/node-fetch": { 191 | "version": "2.7.0", 192 | "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", 193 | "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", 194 | "dependencies": { 195 | "whatwg-url": "^5.0.0" 196 | }, 197 | "engines": { 198 | "node": "4.x || >=6.0.0" 199 | }, 200 | "peerDependencies": { 201 | "encoding": "^0.1.0" 202 | }, 203 | "peerDependenciesMeta": { 204 | "encoding": { 205 | "optional": true 206 | } 207 | } 208 | }, 209 | "node_modules/node-fetch-commonjs": { 210 | "version": "3.3.2", 211 | "resolved": "https://registry.npmjs.org/node-fetch-commonjs/-/node-fetch-commonjs-3.3.2.tgz", 212 | "integrity": "sha512-VBlAiynj3VMLrotgwOS3OyECFxas5y7ltLcK4t41lMUZeaK15Ym4QRkqN0EQKAFL42q9i21EPKjzLUPfltR72A==", 213 | "dependencies": { 214 | "node-domexception": "^1.0.0", 215 | "web-streams-polyfill": "^3.0.3" 216 | }, 217 | "engines": { 218 | "node": "^12.20.0 || ^14.13.1 || >=16.0.0" 219 | }, 220 | "funding": { 221 | "type": "opencollective", 222 | "url": "https://opencollective.com/node-fetch" 223 | } 224 | }, 225 | "node_modules/tr46": { 226 | "version": "0.0.3", 227 | "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", 228 | "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" 229 | }, 230 | "node_modules/undici-types": { 231 | "version": "5.26.5", 232 | "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", 233 | "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" 234 | }, 235 | "node_modules/web-streams-polyfill": { 236 | "version": "3.3.3", 237 | "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", 238 | "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", 239 | "engines": { 240 | "node": ">= 8" 241 | } 242 | }, 243 | "node_modules/webidl-conversions": { 244 | "version": "3.0.1", 245 | "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", 246 | "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" 247 | }, 248 | "node_modules/whatwg-url": { 249 | "version": "5.0.0", 250 | "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", 251 | "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", 252 | "dependencies": { 253 | "tr46": "~0.0.3", 254 | "webidl-conversions": "^3.0.0" 255 | } 256 | } 257 | } 258 | } 259 | -------------------------------------------------------------------------------- /snippets/spawn-mongod/LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | Copyright 2020 MongoDB Inc. 180 | 181 | Licensed under the Apache License, Version 2.0 (the "License"); 182 | you may not use this file except in compliance with the License. 183 | You may obtain a copy of the License at 184 | 185 | http://www.apache.org/licenses/LICENSE-2.0 186 | 187 | Unless required by applicable law or agreed to in writing, software 188 | distributed under the License is distributed on an "AS IS" BASIS, 189 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 190 | See the License for the specific language governing permissions and 191 | limitations under the License. 192 | 193 | -------------------------------------------------------------------------------- /snippets/uuidhelpers/LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | Copyright 2020 MongoDB Inc. 180 | 181 | Licensed under the Apache License, Version 2.0 (the "License"); 182 | you may not use this file except in compliance with the License. 183 | You may obtain a copy of the License at 184 | 185 | http://www.apache.org/licenses/LICENSE-2.0 186 | 187 | Unless required by applicable law or agreed to in writing, software 188 | distributed under the License is distributed on an "AS IS" BASIS, 189 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 190 | See the License for the specific language governing permissions and 191 | limitations under the License. 192 | 193 | -------------------------------------------------------------------------------- /snippets/analyze-schema/LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | Copyright 2020 MongoDB Inc. 180 | 181 | Licensed under the Apache License, Version 2.0 (the "License"); 182 | you may not use this file except in compliance with the License. 183 | You may obtain a copy of the License at 184 | 185 | http://www.apache.org/licenses/LICENSE-2.0 186 | 187 | Unless required by applicable law or agreed to in writing, software 188 | distributed under the License is distributed on an "AS IS" BASIS, 189 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 190 | See the License for the specific language governing permissions and 191 | limitations under the License. 192 | 193 | -------------------------------------------------------------------------------- /snippets/mock-collection/LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | Copyright 2020 MongoDB Inc. 180 | 181 | Licensed under the Apache License, Version 2.0 (the "License"); 182 | you may not use this file except in compliance with the License. 183 | You may obtain a copy of the License at 184 | 185 | http://www.apache.org/licenses/LICENSE-2.0 186 | 187 | Unless required by applicable law or agreed to in writing, software 188 | distributed under the License is distributed on an "AS IS" BASIS, 189 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 190 | See the License for the specific language governing permissions and 191 | limitations under the License. 192 | 193 | -------------------------------------------------------------------------------- /snippets/change-streams-monitor/README.md: -------------------------------------------------------------------------------- 1 | # change-stream-monitor 2 | 3 | ## Index 4 | - [change-stream-monitor](#change-stream-monitor) 5 | - [listChangeStreams(extended?: boolean, allUsers?: boolean, nsFilter?: Array)](#listchangestreamsextended-boolean-allusers-boolean-nsfilter-array) 6 | - [Sample Output - Normal Mode](#sample-output---normal-mode) 7 | - [Sample Output - Extended](#sample-output---extended) 8 | - [listChangeStreams.help()](#listchangestreamshelp) 9 | - [listChangeStreamsAsTable(extended?: boolean, allUsers?: boolean, nsFilter?: Array)](#listchangestreamsastableextended-boolean-allusers-boolean-nsfilter-array) 10 | - [listChangeStreamsAsTable.help()](#listchangestreamsastablehelp) 11 | - [listChangeStreamsAsJSON(extended?: boolean, allUsers?: boolean, nsFilter?: Array)](#listchangestreamsasjsonextended-boolean-allusers-boolean-nsfilter-array) 12 | - [listChangeStreamsAsJSON.help()](#listchangestreamsasjsonhelp) 13 | - [listChangeStreamsAsCSV(extended?: boolean, delimiter: string, allUsers?: boolean, nsFilter?: Array)](#listchangestreamsascsvextended-boolean-delimiter-string-allusers-boolean-nsfilter-array) 14 | - [listChangeStreamsAsCSV.help()](#listchangestreamsascsvhelp) 15 | - [prettyPrintChangeStreamPipeline(connectionId: any)](#prettyprintchangestreampipelineconnectionid-any) 16 | - [Example](#example) 17 | - [prettyPrintChangeStreamPipeline.help()](#prettyprintchangestreampipelinehelp) 18 | - [ChangeStreamsData.help()](#changestreamsdatahelp) 19 | - [ExtendedChangeStreamsData.help()](#extendedchangestreamsdatahelp) 20 | 21 | This snippet allows mongosh users to monitor Change Streams on the current server. 22 | 23 | On installation of this snippet, the following are available to the user. 24 | 25 | ## listChangeStreams(extended?: boolean, allUsers?: boolean, nsFilter?: Array) 26 | 27 | Prints a table with the currently open Change Streams. Note that the table resizes itself based on the size of the terminal. 28 | 29 | The behaviour of the function can be controlled with the available parameters (see parameter defaults for default behaviour). See prettyPrintChangeStreamPipeline() to pretty print a change stream pipeline. See ChangeStreamsData and ExtendedChangeStreamsData for data outputted in extended and non-extended mode. 30 | 31 | * *extended* - Controls whether a simple or extended output is presented. Refer to ExtendedChangeStreamsData. Defaults to false. 32 | * *allUsers* - Boolean that correspond's to the allUsers flag of the $currentOp MongoDB Pipeline Stage i.e. If set to false, $currentOp only reports on operations/idle connections/idle cursors/idle sessions belonging to the user who ran the command. If set to true, $currentOp reports operations belonging to all users. Defailts to true. 33 | * *nsFilter* - An optional array of namespace filter. Defaults to [] i.e. to filter. 34 | 35 | | Column Name | Extended Output | Description | 36 | |----------------|-----------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| 37 | | ConnID | No | An identifier for the connection where the specific operation originated. | 38 | | AppName | No | The identifier of the client application which ran the operation. Use the appName connection string option to set a custom value for the appName field. | 39 | | Remote | No | The IP address (or hostname) and the ephemeral port of the client connection where the operation originates. | 40 | | Driver | No | The MongoDB Driver used to connect and run the Change Stream. | 41 | | NS | No | The namespace the operation targets. A namespace consists of the database name and the collection name concatenated with a dot (.); that is, ".". | 42 | | Type | No | The type of operation. Values are either: op / idleSession / idleCursor. | 43 | | Pipeline | No | The Change Stream pipeline. Use prettyPrintChangeStreamPipeline(connId) to pretty print the full pipeline. | 44 | | LastAccessDate | No | The date and time when the cursor was last used. | 45 | | Docs Returned | No | The cumulative number of documents returned by the cursor. | 46 | | Active | Yes | A boolean value specifying whether the operation has started. | 47 | | User | Yes | Users associated with the operation | 48 | | CursorId | Yes | The ID of the cursor. | 49 | | CreatedDate | Yes | The date and time when the cursor was created. | 50 | 51 | 52 | ### Sample Output - Normal Mode 53 | 54 | ``` 55 | replset [primary] test> listChangeStreams() 56 | ┏━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┓ 57 | ┃ ConnID ┃ AppName ┃ Remote ┃ Driver ┃ NS ┃ Type ┃ Pipeline ┃ LastAccess ┃ DocsReturn ┃ 58 | ┃ ┃ ┃ ┃ ┃ ┃ ┃ ┃ Date ┃ ed ┃ 59 | ┡━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━┩ 60 | │ 74 │ cs2 │ 127.0.0.1: │ mongo-java │ test.event │ op │ [ │ "2024-04-2 │ 0 │ 61 | │ │ │ 54989 │ -driver|sy │ s │ │ { │ 2T13:23:10 │ │ 62 | │ │ │ │ nc: 4.9.1 │ │ │ "$changeSt │ .160Z" │ │ 63 | │ │ │ │ │ │ │ ream": {} │ │ │ 64 | │ │ │ │ │ │ │ }, │ │ │ 65 | │ │ │ │ │ │ │ { │ │ │ 66 | │ │ │ │ │ │ │ "$match": │ │ │ 67 | │ │ │ │ │ │ │ { │ │ │ 68 | │ │ │ │ │ │ │ "operation │ │ │ 69 | │ │ │ │ │ │ │ Type": { │ │ │ 70 | │ │ │ │ │ │ │ "$in": [ │ │ │ 71 | │ │ │ │ │ │ │ "insert", │ │ │ 72 | │ │ │ │ │ │ │ "update" │ │ │ 73 | │ │ │ │ │ │ │ ] │ │ │ 74 | │ │ │ │ │ │ │ } │ │ │ 75 | │ │ │ │ │ │ │ } │ │ │ 76 | │ │ │ │ │ │ │ } │ │ │ 77 | │ │ │ │ │ │ │ ] │ │ │ 78 | ├───────────┼────────────┼──────────────┼──────────────┼──────────────┼────────┼──────────────┼──────────────┼──────────────┤ 79 | │ 79 │ cs1 │ 127.0.0.1: │ mongo-java │ test.event │ op │ [ │ "2024-04-2 │ 0 │ 80 | │ │ │ 55011 │ -driver|sy │ s │ │ { │ 2T13:23:10 │ │ 81 | │ │ │ │ nc: 4.9.1 │ │ │ "$changeSt │ .181Z" │ │ 82 | │ │ │ │ │ │ │ ream": {} │ │ │ 83 | │ │ │ │ │ │ │ }, │ │ │ 84 | │ │ │ │ │ │ │ { │ │ │ 85 | │ │ │ │ │ │ │ "$match": │ │ │ 86 | │ │ │ │ │ │ │ { │ │ │ 87 | │ │ │ │ │ │ │ "operation │ │ │ 88 | │ │ │ │ │ │ │ Type": { │ │ │ 89 | │ │ │ │ │ │ │ "$in": [ │ │ │ 90 | │ │ │ │ │ │ │ "insert", │ │ │ 91 | │ │ │ │ │ │ │ "update" │ │ │ 92 | │ │ │ │ │ │ │ ] │ │ │ 93 | │ │ │ │ │ │ │ } │ │ │ 94 | │ │ │ │ │ │ │ } │ │ │ 95 | │ │ │ │ │ │ │ } │ │ │ 96 | │ │ │ │ │ │ │ ] │ │ │ 97 | └───────────┴────────────┴──────────────┴──────────────┴──────────────┴────────┴──────────────┴──────────────┴──────────────┘ 98 | Found 2 change streams 99 | ``` 100 | 101 | ### Sample Output - Extended 102 | 103 | ``` 104 | replset [primary] test> listChangeStreams(true) 105 | ┏━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━┓ 106 | ┃ ConnID ┃ AppNam ┃ Remote ┃ Driver ┃ NS ┃ Type ┃ Pipeli ┃ LastAc ┃ DocsRe ┃ Active ┃ User ┃ Cursor ┃ Create ┃ 107 | ┃ ┃ e ┃ ┃ ┃ ┃ ┃ ne ┃ cessDa ┃ turned ┃ ┃ ┃ Id ┃ dDate ┃ 108 | ┃ ┃ ┃ ┃ ┃ ┃ ┃ ┃ te ┃ ┃ ┃ ┃ ┃ ┃ 109 | ┡━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━┩ 110 | │ 74 │ cs2 │ 127.0. │ mongo- │ test.e │ op │ [ │ "2024- │ 0 │ true │ john@a │ 754369 │ "2024- │ 111 | │ │ │ 0.1:54 │ java-d │ vents │ │ { │ 04-22T │ │ │ dmin │ 716098 │ 04-22T │ 112 | │ │ │ 989 │ river| │ │ │ "$chan │ 13:24: │ │ │ │ 703700 │ 12:15: │ 113 | │ │ │ │ sync: │ │ │ geStre │ 25.528 │ │ │ │ 0 │ 31.896 │ 114 | │ │ │ │ 4.9.1 │ │ │ am": │ Z" │ │ │ │ │ Z" │ 115 | │ │ │ │ │ │ │ {} │ │ │ │ │ │ │ 116 | │ │ │ │ │ │ │ }, │ │ │ │ │ │ │ 117 | │ │ │ │ │ │ │ { │ │ │ │ │ │ │ 118 | │ │ │ │ │ │ │ "$matc │ │ │ │ │ │ │ 119 | │ │ │ │ │ │ │ h": { │ │ │ │ │ │ │ 120 | │ │ │ │ │ │ │ "opera │ │ │ │ │ │ │ 121 | │ │ │ │ │ │ │ tionTy │ │ │ │ │ │ │ 122 | │ │ │ │ │ │ │ pe": { │ │ │ │ │ │ │ 123 | │ │ │ │ │ │ │ "$in": │ │ │ │ │ │ │ 124 | │ │ │ │ │ │ │ [ │ │ │ │ │ │ │ 125 | │ │ │ │ │ │ │ "inser │ │ │ │ │ │ │ 126 | │ │ │ │ │ │ │ t", │ │ │ │ │ │ │ 127 | │ │ │ │ │ │ │ "updat │ │ │ │ │ │ │ 128 | │ │ │ │ │ │ │ e" │ │ │ │ │ │ │ 129 | │ │ │ │ │ │ │ ] │ │ │ │ │ │ │ 130 | │ │ │ │ │ │ │ } │ │ │ │ │ │ │ 131 | │ │ │ │ │ │ │ } │ │ │ │ │ │ │ 132 | │ │ │ │ │ │ │ } │ │ │ │ │ │ │ 133 | │ │ │ │ │ │ │ ] │ │ │ │ │ │ │ 134 | ├──────────┼──────────┼──────────┼──────────┼──────────┼────────┼──────────┼──────────┼──────────┼──────────┼──────────┼──────────┼──────────┤ 135 | │ 79 │ cs1 │ 127.0. │ mongo- │ test.e │ op │ [ │ "2024- │ 0 │ true │ mary@a │ 697267 │ "2024- │ 136 | │ │ │ 0.1:55 │ java-d │ vents │ │ { │ 04-22T │ │ │ dmin │ 149292 │ 04-22T │ 137 | │ │ │ 011 │ river| │ │ │ "$chan │ 13:24: │ │ │ │ 716100 │ 12:16: │ 138 | │ │ │ │ sync: │ │ │ geStre │ 25.542 │ │ │ │ 0 │ 01.889 │ 139 | │ │ │ │ 4.9.1 │ │ │ am": │ Z" │ │ │ │ │ Z" │ 140 | │ │ │ │ │ │ │ {} │ │ │ │ │ │ │ 141 | │ │ │ │ │ │ │ }, │ │ │ │ │ │ │ 142 | │ │ │ │ │ │ │ { │ │ │ │ │ │ │ 143 | │ │ │ │ │ │ │ "$matc │ │ │ │ │ │ │ 144 | │ │ │ │ │ │ │ h": { │ │ │ │ │ │ │ 145 | │ │ │ │ │ │ │ "opera │ │ │ │ │ │ │ 146 | │ │ │ │ │ │ │ tionTy │ │ │ │ │ │ │ 147 | │ │ │ │ │ │ │ pe": { │ │ │ │ │ │ │ 148 | │ │ │ │ │ │ │ "$in": │ │ │ │ │ │ │ 149 | │ │ │ │ │ │ │ [ │ │ │ │ │ │ │ 150 | │ │ │ │ │ │ │ "inser │ │ │ │ │ │ │ 151 | │ │ │ │ │ │ │ t", │ │ │ │ │ │ │ 152 | │ │ │ │ │ │ │ "updat │ │ │ │ │ │ │ 153 | │ │ │ │ │ │ │ e" │ │ │ │ │ │ │ 154 | │ │ │ │ │ │ │ ] │ │ │ │ │ │ │ 155 | │ │ │ │ │ │ │ } │ │ │ │ │ │ │ 156 | │ │ │ │ │ │ │ } │ │ │ │ │ │ │ 157 | │ │ │ │ │ │ │ } │ │ │ │ │ │ │ 158 | │ │ │ │ │ │ │ ] │ │ │ │ │ │ │ 159 | └──────────┴──────────┴──────────┴──────────┴──────────┴────────┴──────────┴──────────┴──────────┴──────────┴──────────┴──────────┴──────────┘ 160 | Found 2 change streams 161 | ``` 162 | 163 | ## listChangeStreams.help() 164 | Provides help on how to use the function. 165 | 166 | ## listChangeStreamsAsTable(extended?: boolean, allUsers?: boolean, nsFilter?: Array) 167 | Alias for `listChangeStreams(extended?: boolean, allUsers?: boolean, nsFilter?: Array)` 168 | 169 | ## listChangeStreamsAsTable.help() 170 | Provides help on how to use the function. Alias for `listChangeStreams.help()` 171 | 172 | ## listChangeStreamsAsJSON(extended?: boolean, allUsers?: boolean, nsFilter?: Array) 173 | Prints the currently open Change Streams as a JSON string. A JSON string is printed separately on a newline for each open Change Stream. The behaviour of the function can be controlled with the available parameters (see parameter defaults for default behaviour). See documentation for `listChangeStreams(extended?: boolean, allUsers?: boolean, nsFilter?: Array)` for more details about the available parameters. 174 | 175 | ## listChangeStreamsAsJSON.help() 176 | Provides help on how to use the function. 177 | 178 | ## listChangeStreamsAsCSV(extended?: boolean, delimiter: string, allUsers?: boolean, nsFilter?: Array) 179 | Prints the currently open Change Streams as a CSV string with "||||" as the default delimeter. A string is printed separately on a newline for each open Change Stream. The behaviour of the function can be controlled with the available parameters (see parameter defaults for default behaviour). The delimiter parameter allows overriding the default delimiter. See documentation for `listChangeStreams(extended?: boolean, allUsers?: boolean, nsFilter?: Array)` for more details about the other available parameters. 180 | 181 | ## listChangeStreamsAsCSV.help() 182 | Provides help on how to use the function. 183 | 184 | ## prettyPrintChangeStreamPipeline(connectionId: any) 185 | 186 | Pretty prints the Change Stream pipeline for a given Connection ID. 187 | * *connectionId* - The connection ID where the change stream is executing. 188 | 189 | ### Example 190 | 191 | ``` 192 | replset [primary] test> prettyPrintChangeStreamPipeline(74) 193 | [ 194 | { '$changeStream': {} }, 195 | { 196 | '$match': { operationType: { '$in': [ 'insert', 'update' ] } } 197 | } 198 | ] 199 | ``` 200 | 201 | ## prettyPrintChangeStreamPipeline.help() 202 | Provides help on how to use the function. 203 | 204 | ## ChangeStreamsData.help() 205 | Describes the table output in normal mode. 206 | 207 | ## ExtendedChangeStreamsData.help() 208 | Describes the table output in extended mode. -------------------------------------------------------------------------------- /snippets/change-streams-monitor/changestreammonitor.js: -------------------------------------------------------------------------------- 1 | const localRequire = require("module").createRequire(__filename); 2 | const { Table } = localRequire("to-tabel"); 3 | const { templates } = localRequire("boks"); 4 | 5 | const OutputTypeEnum = { 6 | TABLE: 'TABLE', 7 | JSON: 'JSON', 8 | CSV : 'CSV' 9 | }; 10 | 11 | const PipelineFormatEnum = { 12 | EJSON : 'EJSON', 13 | NONE: 'NONE', 14 | JSON: 'JSON' 15 | }; 16 | 17 | const DEFAULT_DELIMITER="||||" 18 | 19 | function _listChangeStreams (extended = false, allUsers = true, nsFilter = [], outputType = OutputTypeEnum.TABLE, pipelineFormat=PipelineFormatEnum.JSON, delimiter=DEFAULT_DELIMITER) { 20 | tableData = []; 21 | let changeStreamsDataRaw = getChangeStreams(allUsers, nsFilter); 22 | 23 | changeStreamsDataRaw.forEach(changeStreamOpData => { 24 | let clientDriver = "N/A"; 25 | try { 26 | clientDriver = 27 | changeStreamOpData.clientMetadata.driver.name + 28 | ": " + 29 | changeStreamOpData.clientMetadata.driver.version; 30 | } catch (error) {} 31 | 32 | //format the pipeline for better rendering 33 | let changeStreamPipeline = "" 34 | switch (pipelineFormat){ 35 | case PipelineFormatEnum.EJSON: 36 | changeStreamPipeline = EJSON.stringify(changeStreamOpData.cursor.originatingCommand.pipeline, null,1) 37 | break; 38 | case PipelineFormatEnum.JSON: 39 | changeStreamPipeline = JSON.stringify(changeStreamOpData.cursor.originatingCommand.pipeline) 40 | break; 41 | case PipelineFormatEnum.NONE: 42 | changeStreamPipeline = changeStreamOpData.cursor.originatingCommand.pipeline 43 | break; 44 | default: 45 | throw new Error("Internal Error: Unexepected PipelineFormatEnum value " + pipelineFormat) 46 | } 47 | 48 | let usersStr = ""; 49 | if (changeStreamOpData.effectiveUsers){ 50 | changeStreamOpData.effectiveUsers.forEach(user => { 51 | if (usersStr !== "") usersStr+= "; "; 52 | usersStr = usersStr + user.user + "@" + user.db; 53 | }); 54 | } 55 | 56 | if (extended) { 57 | tableData.push( 58 | new ExtendedChangeStreamsData( 59 | changeStreamOpData.connectionId, 60 | changeStreamOpData.appName, 61 | changeStreamOpData.client, 62 | clientDriver, 63 | changeStreamOpData.ns, 64 | changeStreamOpData.type, 65 | changeStreamPipeline, 66 | changeStreamOpData.cursor.lastAccessDate, 67 | changeStreamOpData.cursor.nDocsReturned, 68 | changeStreamOpData.active, 69 | usersStr, 70 | changeStreamOpData.cursor.cursorId, 71 | changeStreamOpData.cursor.createdDate 72 | ) 73 | ); 74 | } else { 75 | tableData.push( 76 | new ChangeStreamsData( 77 | changeStreamOpData.connectionId, 78 | changeStreamOpData.appName, 79 | changeStreamOpData.client, 80 | clientDriver, 81 | changeStreamOpData.ns, 82 | changeStreamOpData.type, 83 | changeStreamPipeline, 84 | changeStreamOpData.cursor.lastAccessDate, 85 | changeStreamOpData.cursor.nDocsReturned 86 | ) 87 | ); 88 | } 89 | 90 | }) 91 | 92 | switch (outputType){ 93 | case OutputTypeEnum.TABLE: 94 | generateTableOutput(tableData, extended); 95 | print("Found " + changeStreamsDataRaw.length + " change streams"); 96 | break; 97 | case OutputTypeEnum.JSON: 98 | generateJsonOutput(tableData, extended); 99 | break; 100 | case OutputTypeEnum.CSV: 101 | generateCsvOutput(tableData, extended, delimiter); 102 | break; 103 | default: 104 | throw new Error("Internal Error: Unexepected OutputTypeEnum value " + outputType) 105 | } 106 | }; 107 | 108 | function _listChangeStreamsHelp(){ 109 | print("listChangeStreams(extended?: boolean, allUsers?: boolean, nsFilter?: any): void") 110 | print("listChangeStreamsAsTable(extended?: boolean, allUsers?: boolean, nsFilter?: any): void") 111 | print("Prints a table with the currently open Change Streams. The behaviour of the function can be controlled with the available parameters (see parameter defaults for default behaviour).") 112 | print("\t See prettyPrintChangeStreamPipeline.help() to pretty print a change stream pipeline. ") 113 | print("\t See ChangeStreamsData.help() and ExtendedChangeStreamsData.help() for data outputted in extended and non-extended mode.") 114 | print("\t @param extended — Controls whether a simple or extended output is presented. Refer to ExtendedChangeStreamsData. Defaults to false.") 115 | print("\t @param allUsers — Boolean that correspond's to the allUsers flag of the $currentOp MongoDB Pipeline Stage i.e. If set to false, $currentOp only reports on operations/idle connections/idle cursors/idle sessions belonging to the user who ran the command. If set to true, $currentOp reports operations belonging to all users. Defaults to true.") 116 | print("\t @param nsFilter — An optional array of namespace filter. Defaults to [] i.e. to filter.") 117 | } 118 | 119 | /** 120 | * Prints a table with the currently open Change Streams. The behaviour of the function can be controlled with the available parameters (see parameter defaults for default behaviour). 121 | * See prettyPrintChangeStreamPipeline() to pretty print a change stream pipeline. 122 | * See ChangeStreamsData and ExtendedChangeStreamsData for data outputted in extended and non-extended mode. 123 | * @param {boolean} extended Controls whether a simple or extended output is presented. Refer to ExtendedChangeStreamsData. Defaults to false. 124 | * @param {boolean} allUsers Boolean that correspond's to the allUsers flag of the $currentOp MongoDB Pipeline Stage i.e. 125 | * If set to false, $currentOp only reports on operations/idle connections/idle cursors/idle sessions belonging to the user who ran the command. 126 | * If set to true, $currentOp reports operations belonging to all users. 127 | * Defaults to true. 128 | * @param {Array.} nsFilter An optional array of namespace filter. Defaults to [] i.e. to filter. 129 | */ 130 | globalThis.listChangeStreams = function (extended = false, allUsers = true, nsFilter = []) {_listChangeStreams(extended, allUsers, nsFilter, OutputTypeEnum.TABLE, PipelineFormatEnum.EJSON);} 131 | globalThis.listChangeStreams.help = function () {_listChangeStreamsHelp();} 132 | 133 | /** 134 | * Alias for {@link listChangeStreams} 135 | */ 136 | globalThis.listChangeStreamsAsTable = globalThis.listChangeStreams 137 | globalThis.listChangeStreamsAsTable.help = function () {_listChangeStreamsHelp();} 138 | 139 | 140 | function _listChangeStreamsAsJSONHelp(){ 141 | print("listChangeStreamsAsJSON(extended?: boolean, allUsers?: boolean, nsFilter?: any): void") 142 | print("Prints the currently open Change Streams as a JSON string. A JSON string is printed separately on a newline for each open Change Stream. The behaviour of the function can be controlled with the available parameters (see parameter defaults for default behaviour).") 143 | print("\t See prettyPrintChangeStreamPipeline() to pretty print a change stream pipeline. ") 144 | print("\t See ChangeStreamsData.help() and ExtendedChangeStreamsData.help() for data outputted in extended and non-extended mode.") 145 | print("\t @param extended — Controls whether a simple or extended output is presented. Refer to ExtendedChangeStreamsData. Defaults to false.") 146 | print("\t @param allUsers — Boolean that correspond's to the allUsers flag of the $currentOp MongoDB Pipeline Stage i.e. If set to false, $currentOp only reports on operations/idle connections/idle cursors/idle sessions belonging to the user who ran the command. If set to true, $currentOp reports operations belonging to all users. Defaults to true.") 147 | print("\t @param nsFilter — An optional array of namespace filter. Defaults to [] i.e. to filter.") 148 | } 149 | 150 | /** 151 | * Prints the currently open Change Streams as a JSON string. A JSON string is printed separately on a newline for each open Change Stream. The behaviour of the function can be controlled with the available parameters (see parameter defaults for default behaviour). 152 | * See prettyPrintChangeStreamPipeline() to pretty print a change stream pipeline. 153 | * See ChangeStreamsData and ExtendedChangeStreamsData for data outputted in extended and non-extended mode. 154 | * @param {boolean} extended Controls whether a simple or extended output is presented. Refer to ExtendedChangeStreamsData. Defaults to false. 155 | * @param {boolean} allUsers Boolean that correspond's to the allUsers flag of the $currentOp MongoDB Pipeline Stage i.e. 156 | * If set to false, $currentOp only reports on operations/idle connections/idle cursors/idle sessions belonging to the user who ran the command. 157 | * If set to true, $currentOp reports operations belonging to all users. 158 | * Defaults to true. 159 | * @param {Array.} nsFilter An optional array of namespace filter. Defaults to [] i.e. to filter. 160 | */ 161 | globalThis.listChangeStreamsAsJSON = function (extended = false, allUsers = true, nsFilter = []) {_listChangeStreams(extended, allUsers, nsFilter, OutputTypeEnum.JSON, PipelineFormatEnum.NONE);} 162 | globalThis.listChangeStreamsAsJSON.help = function () {_listChangeStreamsAsJSONHelp();} 163 | 164 | 165 | function _listChangeStreamsAsCSVHelp(){ 166 | print("listChangeStreamsAsJSON(extended?: boolean, delimiter?: string, allUsers?: boolean, nsFilter?: any): void") 167 | print("Prints the currently open Change Streams as a CSV string with \"" + DEFAULT_DELIMITER + "\" as the default delimeter. A string is printed separately on a newline for each open Change Stream. The behaviour of the function can be controlled with the available parameters (see parameter defaults for default behaviour). ") 168 | print("\t See prettyPrintChangeStreamPipeline() to pretty print a change stream pipeline. ") 169 | print("\t See ChangeStreamsData.help() and ExtendedChangeStreamsData.help() for data outputted in extended and non-extended mode.") 170 | print("\t @param extended — Controls whether a simple or extended output is presented. Refer to ExtendedChangeStreamsData. Defaults to false.") 171 | print("\t @param delimiter — Provide a custom delimeter for the CSV string. Defaults to \"" + DEFAULT_DELIMITER + "\"") 172 | print("\t @param allUsers — Boolean that correspond's to the allUsers flag of the $currentOp MongoDB Pipeline Stage i.e. If set to false, $currentOp only reports on operations/idle connections/idle cursors/idle sessions belonging to the user who ran the command. If set to true, $currentOp reports operations belonging to all users. Defaults to true.") 173 | print("\t @param nsFilter — An optional array of namespace filter. Defaults to [] i.e. to filter.") 174 | } 175 | /** 176 | * Prints the currently open Change Streams as a CSV string with "||||" as the default delimeter. A string is printed separately on a newline for each open Change Stream. The behaviour of the function can be controlled with the available parameters (see parameter defaults for default behaviour). 177 | * See prettyPrintChangeStreamPipeline() to pretty print a change stream pipeline. 178 | * See ChangeStreamsData and ExtendedChangeStreamsData for data outputted in extended and non-extended mode. 179 | * @param {boolean} extended Controls whether a simple or extended output is presented. Refer to ExtendedChangeStreamsData. Defaults to false. 180 | * @param {string} delimiter Provide a custom delimeter for the CSV string 181 | * @param {boolean} allUsers Boolean that correspond's to the allUsers flag of the $currentOp MongoDB Pipeline Stage i.e. 182 | * If set to false, $currentOp only reports on operations/idle connections/idle cursors/idle sessions belonging to the user who ran the command. 183 | * If set to true, $currentOp reports operations belonging to all users. 184 | * Defaults to true. 185 | * @param {Array.} nsFilter An optional array of namespace filter. Defaults to [] i.e. to filter. 186 | */ 187 | globalThis.listChangeStreamsAsCSV = function (extended = false, delimiter=DEFAULT_DELIMITER, allUsers = true, nsFilter = []) {_listChangeStreams(extended, allUsers, nsFilter, OutputTypeEnum.CSV, PipelineFormatEnum.JSON, delimiter);} 188 | globalThis.listChangeStreamsAsCSV.help = function () {_listChangeStreamsAsCSVHelp();} 189 | 190 | 191 | /** 192 | * @class Contains the data that will be presented in tabular format. This is the basic data set - @see {ExtendedChangeStreamsData} for the extended version. 193 | * @param {*} connId An identifier for the connection where the specific operation originated. 194 | * @param {*} appName The identifier of the client application which ran the operation. Use the appName connection string option to set a custom value for the appName field. 195 | * @param {*} clientIp The IP address (or hostname) and the ephemeral port of the client connection where the operation originates. 196 | * @param {*} clientDriver The MongoDB Driver used to connect and run the Change Stream. 197 | * @param {*} ns The namespace the operation targets. A namespace consists of the database name and the collection name concatenated with a dot (.); that is, ".". 198 | * @param {*} type The type of operation. Values are either: op / idleSession / idleCursor. 199 | * @param {*} pipeline The Change Stream pipeline. Use prettyPrintChangeStreamPipeline(connId) to pretty print the full pipeline. 200 | * @param {*} lastAccessDate The date and time when the cursor was last used. 201 | * @param {*} nDocsReturned The cumulative number of documents returned by the cursor. 202 | */ 203 | class ChangeStreamsData { 204 | constructor( 205 | connId, 206 | appName, 207 | clientIp, 208 | clientDriver, 209 | ns, 210 | type, 211 | pipeline, 212 | lastAccessDate, 213 | nDocsReturned 214 | ) { 215 | this.connId = connId; 216 | this.appName = appName; 217 | this.clientIp = clientIp; 218 | this.clientDriver = clientDriver; 219 | this.ns = ns; 220 | this.type = type; 221 | this.pipeline = pipeline; 222 | this.lastAccessDate = lastAccessDate; 223 | 224 | if (nDocsReturned && nDocsReturned instanceof Long) { 225 | this.nDocsReturned = nDocsReturned.toNumber(); 226 | } else { 227 | this.nDocsReturned = nDocsReturned; 228 | } 229 | } 230 | 231 | static headers() { 232 | return [ 233 | { name: "connId", printName: "ConnID", description: "An identifier for the connection where the specific operation originated." }, 234 | { name: "appName", printName: "AppName", description: "The identifier of the client application which ran the operation. Use the appName connection string option to set a custom value for the appName field." }, 235 | { name: "clientIp", printName: "Remote", description: "The IP address (or hostname) and the ephemeral port of the client connection where the operation originates." }, 236 | { name: "clientDriver", printName: "Driver", description: "The MongoDB Driver used to connect and run the Change Stream." }, 237 | { name: "ns", printName: "NS", description: "The namespace the operation targets. A namespace consists of the database name and the collection name concatenated with a dot (.); that is, \".\"." }, 238 | { name: "type", printName: "Type", description: "The type of operation. Values are either: op / idleSession / idleCursor." }, 239 | { name: "pipeline", printName: "Pipeline", description: "The Change Stream pipeline. Use prettyPrintChangeStreamPipeline(connId) to pretty print the full pipeline." }, 240 | { name: "lastAccessDate", printName: "LastAccessDate", description: "The date and time when the cursor was last used." }, 241 | { name: "nDocsReturned", printName: "DocsReturned", description: "The cumulative number of documents returned by the cursor." }, 242 | ]; 243 | } 244 | 245 | static help(){ 246 | const options = { 247 | maxSize: process.stdout.columns - 10, 248 | borders: [templates.bold, templates.single], 249 | columns: [{name: "printName", printName: "Column Name"}, {name: "description", printName: "Description"}], 250 | maxDepth: 1, 251 | fill: true, 252 | inclusive: true, 253 | }; 254 | 255 | let newTbl = new Table(ChangeStreamsData.headers(), options); 256 | newTbl.print(); 257 | } 258 | 259 | toCsvString(delimiter){ 260 | return this.constructor.headers().reduce( 261 | (accumulator, currentValue) => accumulator === "" ? this[currentValue.name] : accumulator + delimiter + this[currentValue.name], 262 | "" 263 | ) 264 | } 265 | }; 266 | 267 | globalThis.ChangeStreamsData = ChangeStreamsData; 268 | 269 | /** 270 | * @class 271 | * @extends {ChangeStreamsData} 272 | * @param {*} connId @see {ChangeStreamsData#connId} 273 | * @param {*} appName @see {ChangeStreamsData#appName} 274 | * @param {*} clientIp @see {ChangeStreamsData#clientIp} 275 | * @param {*} clientDriver @see {ChangeStreamsData#clientDriver} 276 | * @param {*} ns @see {ChangeStreamsData#ns} 277 | * @param {*} type @see {ChangeStreamsData#type} 278 | * @param {*} pipeline @see {ChangeStreamsData#pipeline} 279 | * @param {*} lastAccessDate @see {ChangeStreamsData#lastAccessDate} 280 | * @param {*} nDocsReturned @see {ChangeStreamsData#nDocsReturned} 281 | * @param {*} active A boolean value specifying whether the operation has started. 282 | * @param {*} users Users associated with the operation 283 | * @param {*} cursorId The ID of the cursor. 284 | * @param {*} createdDate The date and time when the cursor was created. 285 | */ 286 | class ExtendedChangeStreamsData extends ChangeStreamsData { 287 | constructor( 288 | connId, 289 | appName, 290 | clientIp, 291 | clientDriver, 292 | ns, 293 | type, 294 | pipeline, 295 | lastAccessDate, 296 | nDocsReturned, 297 | active, 298 | users, 299 | cursorId, 300 | createdDate 301 | ) { 302 | super( 303 | connId, 304 | appName, 305 | clientIp, 306 | clientDriver, 307 | ns, 308 | type, 309 | pipeline, 310 | lastAccessDate, 311 | nDocsReturned 312 | ); 313 | this.active = active; 314 | this.users = users; 315 | 316 | if (cursorId && cursorId instanceof Long) { 317 | this.cursorId = cursorId.toNumber(); 318 | } else { 319 | this.cursorId = cursorId; 320 | } 321 | 322 | this.createdDate = createdDate; 323 | } 324 | 325 | static headers() { 326 | return ChangeStreamsData.headers().concat([ 327 | { name: "active", printName: "Active", description: "A boolean value specifying whether the operation has started." }, 328 | { name: "users", printName: "User", description: "Users associated with the operation" }, 329 | { name: "cursorId", printName: "CursorId", description: "The ID of the cursor." }, 330 | { name: "createdDate", printName: "CreatedDate", description: "The date and time when the cursor was created." }, 331 | ]); 332 | } 333 | 334 | static help(){ 335 | const options = { 336 | maxSize: process.stdout.columns - 10, 337 | borders: [templates.bold, templates.single], 338 | columns: [{name: "printName", printName: "Column Name"}, {name: "description", printName: "Description"}], 339 | maxDepth: 1, 340 | fill: true, 341 | inclusive: true, 342 | }; 343 | 344 | let newTbl = new Table(ExtendedChangeStreamsData.headers(), options); 345 | newTbl.print(); 346 | } 347 | }; 348 | 349 | globalThis.ExtendedChangeStreamsData = ExtendedChangeStreamsData; 350 | 351 | /** 352 | * Retrieves the currently open change streams by running the $currentOp aggregation stage on the admin database. 353 | * @param {*} allUsers Boolean that correspond's to the allUsers flag of the $currentOp MongoDB Pipeline Stage i.e. 354 | * If set to false, $currentOp only reports on operations/idle connections/idle cursors/idle sessions belonging to the user who ran the command. 355 | * If set to true, $currentOp reports operations belonging to all users. 356 | * Defailts to true. 357 | * @param {Array.} nsFilter An optional array of namespace filter. Defaults to [] i.e. to filter. 358 | * @returns currently open change streams by running the $currentOp aggregation stage on the admin database 359 | */ 360 | globalThis.getChangeStreams = function (allUsers, nsFilter) { 361 | //define admin pipeline to extract changestreams 362 | let idleConnections = true; 363 | let idleCursors = true; 364 | let idleSessions = true; 365 | let backtrace = true; 366 | let localOps = true; 367 | 368 | let pipeline = [ 369 | { 370 | $currentOp: { 371 | allUsers: allUsers, 372 | idleConnections: idleConnections, 373 | idleCursors: idleCursors, 374 | idleSessions: idleSessions, 375 | backtrace: backtrace, 376 | localOps: localOps, 377 | }, 378 | } 379 | ] 380 | 381 | let match = { 382 | $match: { 383 | "cursor.tailable": true, 384 | "cursor.originatingCommand.pipeline.0.$changeStream": { 385 | $exists: true, 386 | }, 387 | }, 388 | } 389 | if (nsFilter && Array.isArray(nsFilter) && nsFilter.length > 0){ 390 | match['$match'].ns = {'$in' : nsFilter} 391 | } 392 | pipeline.push(match) 393 | 394 | //excute pipeline 395 | let changeStreamsDataRaw = db 396 | .getSiblingDB("admin") 397 | .aggregate(pipeline) 398 | .toArray(); 399 | return changeStreamsDataRaw; 400 | }; 401 | 402 | /** 403 | * Generates a table for the extracted changestream data 404 | * @param {*} data The data to be displayed in a table 405 | * @param {boolean} extended Whether the extended output format is being used. This is used to generate the output table headers. 406 | */ 407 | globalThis.generateTableOutput = function (data, extended) { 408 | if (data && data.length > 0) { 409 | const options = { 410 | maxSize: process.stdout.columns - 10, 411 | borders: [templates.bold, templates.single], 412 | columns: extended 413 | ? ExtendedChangeStreamsData.headers() 414 | : ChangeStreamsData.headers(), 415 | maxDepth: 1, 416 | fill: true, 417 | inclusive: true, 418 | }; 419 | 420 | let newTbl = new Table(data, options); 421 | newTbl.print(); 422 | } else { 423 | print("No Change Streams found!"); 424 | } 425 | }; 426 | 427 | /** 428 | * Generates JSON output for the extracted changestream data 429 | * @param {*} data The data to be displayed in a table 430 | * @param {boolean} extended Whether the extended output format is being used. This is used to generate the output table headers. 431 | */ 432 | globalThis.generateJsonOutput = function (data, extended) { 433 | if (data && data.length > 0) { 434 | data.forEach(changeStreamOpData => { 435 | print(JSON.stringify(changeStreamOpData)) 436 | }) 437 | 438 | } else { 439 | print("No Change Streams found!"); 440 | } 441 | }; 442 | 443 | /** 444 | * Generates CSV output for the extracted changestream data 445 | * @param {*} data The data to be displayed in a table 446 | * @param {boolean} extended Whether the extended output format is being used. This is used to generate the output table headers. 447 | */ 448 | globalThis.generateCsvOutput = function (data, extended, delimiter) { 449 | if (data && data.length > 0) { 450 | let headersSource = extended ? ExtendedChangeStreamsData.headers() : ChangeStreamsData.headers() 451 | let headers = headersSource.map(h => h.name) 452 | let headersStr = headers.reduce( 453 | (accumulator, currentValue) => accumulator === "" ? currentValue : accumulator + delimiter + currentValue, 454 | "" 455 | ) 456 | print(headersStr) 457 | 458 | data.forEach(changeStreamOpData => { 459 | print(changeStreamOpData.toCsvString(delimiter)) 460 | }) 461 | 462 | } else { 463 | print("No Change Streams found!"); 464 | } 465 | }; 466 | 467 | 468 | 469 | function _prettyPrintChangeStreamPipeline(connectionId) { 470 | let pipeline = [ 471 | { 472 | $currentOp: { 473 | allUsers: true, 474 | idleConnections: true, 475 | idleCursors: true, 476 | idleSessions: true, 477 | backtrace: true, 478 | localOps: true, 479 | }, 480 | }, 481 | { 482 | $match: { 483 | connectionId: connectionId, 484 | }, 485 | }, 486 | ]; 487 | 488 | //excute pipeline 489 | let changeStreamsDataRaw = db 490 | .getSiblingDB("admin") 491 | .aggregate(pipeline) 492 | .toArray()[0]; 493 | if ( 494 | changeStreamsDataRaw && 495 | changeStreamsDataRaw.cursor && 496 | changeStreamsDataRaw.cursor.originatingCommand && 497 | changeStreamsDataRaw.cursor.originatingCommand.pipeline 498 | ) { 499 | print(changeStreamsDataRaw.cursor.originatingCommand.pipeline); 500 | } else { 501 | print("Not found"); 502 | } 503 | 504 | }; 505 | 506 | function _prettyPrintChangeStreamPipelineHelp(){ 507 | print("prettyPrintChangeStreamPipeline(connectionId: any): void") 508 | print("Pretty prints the Change Stream pipeline for a given Connection ID.") 509 | print("\t * @param {*} connectionId The connection ID where the change stream is executing.") 510 | } 511 | 512 | /** 513 | * Pretty prints the Change Stream pipeline for a given Connection ID. 514 | * @param {*} connectionId The connection ID where the change stream is executing. 515 | */ 516 | globalThis.prettyPrintChangeStreamPipeline = function (connectionId) { 517 | _prettyPrintChangeStreamPipeline(connectionId); 518 | } 519 | globalThis.prettyPrintChangeStreamPipeline.help = function () {_prettyPrintChangeStreamPipelineHelp();} -------------------------------------------------------------------------------- /snippets/mongocompat/mongotypes.js: -------------------------------------------------------------------------------- 1 | // Date and time types 2 | if (typeof (Timestamp) != "undefined") { 3 | const OriginalTimestamp = Timestamp; 4 | 5 | // Reference: https://github.com/mongodb/mongo/blob/c4d21d3346572e28df2f174df4d87e7618df4a77/src/mongo/scripting/mozjs/timestamp.cpp#L67-L78 6 | function validateTimestampComponent(component, name) { 7 | const MAX_UINT32 = 4294967295; 8 | 9 | if (typeof component !== 'number') { 10 | throw new TypeError(`${name} must be a number`); 11 | } 12 | 13 | const val = Math.floor(component); 14 | if (val < 0 || val > MAX_UINT32) { 15 | throw new TypeError( 16 | `${name} must be non-negative and not greater than ${MAX_UINT32}, got ${val}` 17 | ); 18 | } 19 | 20 | return val; 21 | } 22 | 23 | Timestamp = function(t, i) { 24 | if (arguments.length === 0) { 25 | return new OriginalTimestamp({ t: 0, i: 0 }); 26 | } 27 | 28 | if (arguments.length === 1) { 29 | const proto = Object.getPrototypeOf(t); 30 | if ((proto === null || proto === Object.prototype) && ('t' in t || 'i' in t)) { 31 | const validatedT = validateTimestampComponent(t.t || 0, "Timestamp time (seconds)"); 32 | const validatedI = validateTimestampComponent(t.i || 0, "Timestamp increment"); 33 | return new OriginalTimestamp({ t: validatedT, i: validatedI }); 34 | } 35 | return new OriginalTimestamp(t); 36 | } 37 | 38 | // Reference: https://github.com/mongodb/mongo/blob/c4d21d3346572e28df2f174df4d87e7618df4a77/src/mongo/scripting/mozjs/timestamp.cpp#L91-L98 39 | if (arguments.length === 2) { 40 | const validatedT = validateTimestampComponent(t, "Timestamp time (seconds)"); 41 | const validatedI = validateTimestampComponent(i, "Timestamp increment"); 42 | return new OriginalTimestamp({ t: validatedT, i: validatedI }); 43 | } 44 | 45 | throw new Error("Timestamp needs 0 or 2 arguments"); 46 | }; 47 | 48 | Timestamp.prototype = OriginalTimestamp.prototype; 49 | 50 | for (const key of Object.getOwnPropertyNames(OriginalTimestamp)) { 51 | // Skip prototype, length, name(function internals) 52 | if (key !== 'prototype' && key !== 'length' && key !== 'name') { 53 | Timestamp[key] = OriginalTimestamp[key]; 54 | } 55 | } 56 | 57 | Timestamp.prototype.tojson = function() { 58 | return this.toStringIncomparable(); 59 | }; 60 | 61 | Timestamp.prototype.getTime = function() { 62 | return this.hasOwnProperty("t") ? this.t : this.high; 63 | }; 64 | 65 | Timestamp.prototype.getInc = function() { 66 | return this.hasOwnProperty("i") ? this.i : this.low; 67 | }; 68 | 69 | Timestamp.prototype.toString = function() { 70 | // Resmoke overrides `toString` to throw an error to prevent accidental operator 71 | // comparisons, e.g: >, -, etc... 72 | return this.toStringIncomparable(); 73 | }; 74 | 75 | Timestamp.prototype.toStringIncomparable = function() { 76 | var t = this.hasOwnProperty("t") ? this.t : this.high; 77 | var i = this.hasOwnProperty("i") ? this.i : this.low; 78 | return "Timestamp(" + t + ", " + i + ")"; 79 | }; 80 | } else { 81 | print("warning: no Timestamp class"); 82 | } 83 | 84 | Date.timeFunc = function(theFunc, numTimes) { 85 | var start = new Date(); 86 | numTimes = numTimes || 1; 87 | for (var i = 0; i < numTimes; i++) { 88 | theFunc.apply(null, Array.from(arguments).slice(2)); 89 | } 90 | 91 | return (new Date()).getTime() - start.getTime(); 92 | }; 93 | 94 | Date.prototype.tojson = function() { 95 | try { 96 | // If this === Date.prototype or this is a Date instance created from 97 | // Object.create(Date.prototype), then the [[DateValue]] internal slot won't be set and will 98 | // lead to a TypeError. We instead treat it as though the [[DateValue]] internal slot is NaN 99 | // in order to be consistent with the ES5 behavior in MongoDB 3.2 and earlier. 100 | this.getTime(); 101 | } catch (e) { 102 | if (e instanceof TypeError && 103 | e.message.includes("getTime method called on incompatible Object")) { 104 | return new Date(NaN).tojson(); 105 | } 106 | throw e; 107 | } 108 | 109 | var UTC = 'UTC'; 110 | var year = this['get' + UTC + 'FullYear']().zeroPad(4); 111 | var month = (this['get' + UTC + 'Month']() + 1).zeroPad(2); 112 | var date = this['get' + UTC + 'Date']().zeroPad(2); 113 | var hour = this['get' + UTC + 'Hours']().zeroPad(2); 114 | var minute = this['get' + UTC + 'Minutes']().zeroPad(2); 115 | var sec = this['get' + UTC + 'Seconds']().zeroPad(2); 116 | 117 | if (this['get' + UTC + 'Milliseconds']()) 118 | sec += '.' + this['get' + UTC + 'Milliseconds']().zeroPad(3); 119 | 120 | var ofs = 'Z'; 121 | // // print a non-UTC time 122 | // var ofsmin = this.getTimezoneOffset(); 123 | // if (ofsmin != 0){ 124 | // ofs = ofsmin > 0 ? '-' : '+'; // This is correct 125 | // ofs += (ofsmin/60).zeroPad(2) 126 | // ofs += (ofsmin%60).zeroPad(2) 127 | // } 128 | return 'ISODate("' + year + '-' + month + '-' + date + 'T' + hour + ':' + minute + ':' + sec + 129 | ofs + '")'; 130 | }; 131 | 132 | ISODate = function(isoDateStr) { 133 | if (!isoDateStr) 134 | return new Date(); 135 | 136 | var isoDateRegex = 137 | /^(\d{4})-?(\d{2})-?(\d{2})([T ](\d{2})(:?(\d{2})(:?(\d{2}(\.\d+)?))?)?(Z|([+-])(\d{2}):?(\d{2})?)?)?$/; 138 | var res = isoDateRegex.exec(isoDateStr); 139 | 140 | if (!res) 141 | throw Error("invalid ISO date: " + isoDateStr); 142 | 143 | var year = parseInt(res[1], 10); 144 | var month = (parseInt(res[2], 10)) - 1; 145 | var date = parseInt(res[3], 10); 146 | var hour = parseInt(res[5], 10) || 0; 147 | var min = parseInt(res[7], 10) || 0; 148 | var sec = parseInt((res[9] && res[9].substr(0, 2)), 10) || 0; 149 | var ms = Math.round((parseFloat(res[10]) || 0) * 1000); 150 | 151 | var dateTime = new Date(); 152 | 153 | dateTime.setUTCFullYear(year, month, date); 154 | dateTime.setUTCHours(hour); 155 | dateTime.setUTCMinutes(min); 156 | dateTime.setUTCSeconds(sec); 157 | var time = dateTime.setUTCMilliseconds(ms); 158 | 159 | if (res[11] && res[11] != 'Z') { 160 | var ofs = 0; 161 | ofs += (parseInt(res[13], 10) || 0) * 60 * 60 * 1000; // hours 162 | ofs += (parseInt(res[14], 10) || 0) * 60 * 1000; // mins 163 | if (res[12] == '+') // if ahead subtract 164 | ofs *= -1; 165 | 166 | time += ofs; 167 | } 168 | 169 | // If we are outside the range 0000-01-01T00:00:00.000Z - 9999-12-31T23:59:59.999Z, abort with 170 | // error. 171 | const DATE_RANGE_MIN_MICROSECONDS = -62167219200000; 172 | const DATE_RANGE_MAX_MICROSECONDS = 253402300799999; 173 | 174 | if (time < DATE_RANGE_MIN_MICROSECONDS || time > DATE_RANGE_MAX_MICROSECONDS) 175 | throw Error("invalid ISO date: " + isoDateStr); 176 | 177 | return new Date(time); 178 | }; 179 | 180 | // Regular Expression 181 | RegExp.escape = function(text) { 182 | return text.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); 183 | }; 184 | 185 | RegExp.prototype.tojson = RegExp.prototype.toString; 186 | 187 | // Array 188 | Array.contains = function(a, x) { 189 | if (!Array.isArray(a)) { 190 | throw new Error("The first argument to Array.contains must be an array"); 191 | } 192 | 193 | for (var i = 0; i < a.length; i++) { 194 | if (a[i] == x) 195 | return true; 196 | } 197 | return false; 198 | }; 199 | 200 | Array.unique = function(a) { 201 | if (!Array.isArray(a)) { 202 | throw new Error("The first argument to Array.unique must be an array"); 203 | } 204 | 205 | var u = []; 206 | for (var i = 0; i < a.length; i++) { 207 | var o = a[i]; 208 | if (!Array.contains(u, o)) { 209 | u.push(o); 210 | } 211 | } 212 | return u; 213 | }; 214 | 215 | Array.shuffle = function(arr) { 216 | if (!Array.isArray(arr)) { 217 | throw new Error("The first argument to Array.shuffle must be an array"); 218 | } 219 | 220 | for (var i = 0; i < arr.length - 1; i++) { 221 | var pos = i + Random.randInt(arr.length - i); 222 | var save = arr[i]; 223 | arr[i] = arr[pos]; 224 | arr[pos] = save; 225 | } 226 | return arr; 227 | }; 228 | 229 | Array.tojson = function(a, indent, nolint, depth, sortedKeys) { 230 | if (!Array.isArray(a)) { 231 | throw new Error("The first argument to Array.tojson must be an array"); 232 | } 233 | 234 | if (typeof depth !== 'number') { 235 | depth = 0; 236 | } 237 | if (depth > tojson.MAX_DEPTH) { 238 | return "[Array]"; 239 | } 240 | 241 | var elementSeparator = nolint ? " " : "\n"; 242 | 243 | if (!indent) 244 | indent = ""; 245 | if (nolint) 246 | indent = ""; 247 | 248 | if (a.length == 0) { 249 | return "[ ]"; 250 | } 251 | 252 | var s = "[" + elementSeparator; 253 | 254 | // add to indent if we are pretty 255 | if (!nolint) 256 | indent += "\t"; 257 | 258 | for (var i = 0; i < a.length; i++) { 259 | s += indent + tojson(a[i], indent, nolint, depth + 1, sortedKeys); 260 | if (i < a.length - 1) { 261 | s += "," + elementSeparator; 262 | } 263 | } 264 | 265 | // remove from indent if we are pretty 266 | if (!nolint) 267 | indent = indent.substring(1); 268 | 269 | s += elementSeparator + indent + "]"; 270 | return s; 271 | }; 272 | 273 | Array.fetchRefs = function(arr, coll) { 274 | if (!Array.isArray(arr)) { 275 | throw new Error("The first argument to Array.fetchRefs must be an array"); 276 | } 277 | 278 | var n = []; 279 | for (var i = 0; i < arr.length; i++) { 280 | var z = arr[i]; 281 | if (coll && coll != z.getCollection()) 282 | continue; 283 | n.push(z.fetch()); 284 | } 285 | return n; 286 | }; 287 | 288 | Array.sum = function(arr) { 289 | if (!Array.isArray(arr)) { 290 | throw new Error("The first argument to Array.sum must be an array"); 291 | } 292 | 293 | if (arr.length == 0) 294 | return null; 295 | var s = arr[0]; 296 | for (var i = 1; i < arr.length; i++) 297 | s += arr[i]; 298 | return s; 299 | }; 300 | 301 | Array.avg = function(arr) { 302 | if (!Array.isArray(arr)) { 303 | throw new Error("The first argument to Array.avg must be an array"); 304 | } 305 | 306 | if (arr.length == 0) 307 | return null; 308 | return Array.sum(arr) / arr.length; 309 | }; 310 | 311 | Array.stdDev = function(arr) { 312 | if (!Array.isArray(arr)) { 313 | throw new Error("The first argument to Array.stdDev must be an array"); 314 | } 315 | 316 | var avg = Array.avg(arr); 317 | var sum = 0; 318 | 319 | for (var i = 0; i < arr.length; i++) { 320 | sum += Math.pow(arr[i] - avg, 2); 321 | } 322 | 323 | return Math.sqrt(sum / arr.length); 324 | }; 325 | 326 | // Object 327 | Object.extend = function(dst, src, deep) { 328 | for (var k in src) { 329 | var v = src[k]; 330 | if (deep && typeof (v) == "object" && v !== null) { 331 | if (v.constructor === ObjectId) { // convert ObjectId properly 332 | eval("v = " + tojson(v)); 333 | } else if ("floatApprox" in v) { // convert NumberLong properly 334 | eval("v = " + tojson(v)); 335 | } else { 336 | v = Object.extend(typeof (v.length) == "number" ? [] : {}, v, true); 337 | } 338 | } 339 | dst[k] = v; 340 | } 341 | return dst; 342 | }; 343 | 344 | Object.merge = function(dst, src, deep) { 345 | var clone = Object.extend({}, dst, deep); 346 | return Object.extend(clone, src, deep); 347 | }; 348 | 349 | Object.keySet = function(o) { 350 | var ret = new Array(); 351 | for (var i in o) { 352 | if (!(i in o.__proto__ && o[i] === o.__proto__[i])) { 353 | ret.push(i); 354 | } 355 | } 356 | return ret; 357 | }; 358 | 359 | // mongosh-specific addition 360 | Object.bsonsize = bsonsize; 361 | 362 | // String 363 | if (String.prototype.trim === undefined) { 364 | String.prototype.trim = function() { 365 | return this.replace(/^\s+|\s+$/g, ""); 366 | }; 367 | } 368 | if (String.prototype.trimLeft === undefined) { 369 | String.prototype.trimLeft = function() { 370 | return this.replace(/^\s+/, ""); 371 | }; 372 | } 373 | if (String.prototype.trimRight === undefined) { 374 | String.prototype.trimRight = function() { 375 | return this.replace(/\s+$/, ""); 376 | }; 377 | } 378 | 379 | // always provide ltrim and rtrim for backwards compatibility 380 | String.prototype.ltrim = String.prototype.trimLeft; 381 | String.prototype.rtrim = String.prototype.trimRight; 382 | 383 | String.prototype.startsWith = function(str) { 384 | return this.indexOf(str) == 0; 385 | }; 386 | 387 | String.prototype.endsWith = function(str) { 388 | return this.indexOf(str, this.length - str.length) !== -1; 389 | }; 390 | 391 | // Polyfill taken from 392 | // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes 393 | if (!String.prototype.includes) { 394 | String.prototype.includes = function() { 395 | 'use strict'; 396 | return String.prototype.indexOf.apply(this, arguments) !== -1; 397 | }; 398 | } 399 | 400 | // Returns a copy padded with the provided character _chr_ so it becomes (at least) _length_ 401 | // characters long. 402 | // No truncation is performed if the string is already longer than _length_. 403 | // @param length minimum length of the returned string 404 | // @param right if falsy add leading whitespace, otherwise add trailing whitespace 405 | // @param chr character to be used for padding, defaults to whitespace 406 | // @return the padded string 407 | String.prototype.pad = function(length, right, chr) { 408 | if (typeof chr == 'undefined') 409 | chr = ' '; 410 | var str = this; 411 | for (var i = length - str.length; i > 0; i--) { 412 | if (right) { 413 | str = str + chr; 414 | } else { 415 | str = chr + str; 416 | } 417 | } 418 | return str; 419 | }; 420 | 421 | // Number 422 | Number.prototype.toPercentStr = function() { 423 | return (this * 100).toFixed(2) + "%"; 424 | }; 425 | 426 | Number.prototype.zeroPad = function(width) { 427 | return ('' + this).pad(width, false, '0'); 428 | }; 429 | 430 | // NumberLong 431 | if (!NumberLong.prototype) { 432 | NumberLong.prototype = {}; 433 | } 434 | 435 | NumberLong.prototype.nativeToString = NumberLong.prototype.toString; 436 | NumberLong.prototype.toString = function () { 437 | const INT32_MIN = -2147483648; 438 | const INT32_MAX = 2147483647; 439 | 440 | const numValue = this.toNumber ? this.toNumber() : Number(this); 441 | if (numValue >= INT32_MIN && numValue <= INT32_MAX && Number.isInteger(numValue)) { 442 | return `NumberLong(${numValue})`; 443 | } 444 | return `NumberLong("${this.exactValueString}")`; 445 | }; 446 | 447 | NumberLong.prototype.tojson = function() { 448 | return this.toString(); 449 | }; 450 | 451 | Object.defineProperty(NumberLong.prototype, 'floatApprox', { 452 | enumerable: false, 453 | configurable: true, 454 | get: function() { 455 | return this.toNumber ? this.toNumber() : Number(this); 456 | } 457 | }); 458 | 459 | Object.defineProperty(NumberLong.prototype, 'top', { 460 | enumerable: false, 461 | configurable: true, 462 | get: function() { 463 | return this.high; 464 | } 465 | }); 466 | 467 | Object.defineProperty(NumberLong.prototype, 'bottom', { 468 | enumerable: false, 469 | configurable: true, 470 | get: function() { 471 | return this.low; 472 | } 473 | }); 474 | 475 | Object.defineProperty(NumberLong.prototype, 'exactValueString', { 476 | enumerable: false, 477 | configurable: true, 478 | get: function() { 479 | const high = BigInt(this.high); 480 | const low = BigInt(this.low >>> 0); 481 | const value = (high << 32n) | low; 482 | return value.toString(); 483 | } 484 | }); 485 | 486 | // NumberInt 487 | if (!NumberInt.prototype) { 488 | NumberInt.prototype = {}; 489 | } 490 | NumberInt.prototype.nativeToString = NumberInt.prototype.toString; 491 | NumberInt.prototype.toString = function() { 492 | return `NumberInt(${this.valueOf()})`; 493 | }; 494 | NumberInt.prototype.tojson = function() { 495 | return this.toString(); 496 | }; 497 | NumberInt.prototype.toNumber = function() { 498 | return this.valueOf(); 499 | }; 500 | 501 | // NumberDecimal 502 | if (typeof NumberDecimal !== 'undefined') { 503 | if (!NumberDecimal.prototype) { 504 | NumberDecimal.prototype = {}; 505 | } 506 | 507 | NumberDecimal.prototype.nativeToString = NumberDecimal.prototype.toString 508 | NumberDecimal.prototype.toString = function() { 509 | return `NumberDecimal("${this.nativeToString()}")`; 510 | }; 511 | 512 | NumberDecimal.prototype.tojson = function() { 513 | return this.toString(); 514 | }; 515 | } 516 | 517 | // ObjectId 518 | if (!ObjectId.prototype) 519 | ObjectId.prototype = {}; 520 | 521 | ObjectId.prototype.toString = function() { 522 | return this.inspect(); 523 | }; 524 | 525 | ObjectId.prototype.tojson = function() { 526 | return this.toString(); 527 | }; 528 | 529 | Object.defineProperty(ObjectId.prototype, 'str', { 530 | enumerable: true, 531 | get() { 532 | return this.toHexString(); 533 | } 534 | }); 535 | 536 | ObjectId.prototype.valueOf = function() { 537 | return this.str; 538 | }; 539 | 540 | ObjectId.prototype.isObjectId = true; 541 | 542 | ObjectId.prototype.getTimestamp = function() { 543 | return new Date(parseInt(this.valueOf().slice(0, 8), 16) * 1000); 544 | }; 545 | 546 | ObjectId.prototype.equals = function(other) { 547 | return this.str == other.str; 548 | }; 549 | 550 | // Creates an ObjectId from a Date. 551 | // Based on solution discussed here: 552 | // http://stackoverflow.com/questions/8749971/can-i-query-mongodb-objectid-by-date 553 | ObjectId.fromDate = function(source) { 554 | if (!source) { 555 | throw Error("date missing or undefined"); 556 | } 557 | 558 | var sourceDate; 559 | 560 | // Extract Date from input. 561 | // If input is a string, assume ISO date string and 562 | // create a Date from the string. 563 | if (source instanceof Date) { 564 | sourceDate = source; 565 | } else { 566 | throw Error("Cannot create ObjectId from " + typeof (source) + ": " + tojson(source)); 567 | } 568 | 569 | // Convert date object to seconds since Unix epoch. 570 | var seconds = Math.floor(sourceDate.getTime() / 1000); 571 | 572 | // Generate hex timestamp with padding. 573 | var hexTimestamp = seconds.toString(16).pad(8, false, '0') + "0000000000000000"; 574 | 575 | // Create an ObjectId with hex timestamp. 576 | var objectId = ObjectId(hexTimestamp); 577 | 578 | return objectId; 579 | }; 580 | 581 | // DBPointer 582 | if (typeof (DBPointer) != "undefined") { 583 | DBPointer.prototype.fetch = function() { 584 | assert(this.ns, "need a ns"); 585 | assert(this.id, "need an id"); 586 | return db[this.ns].findOne({_id: this.id}); 587 | }; 588 | 589 | DBPointer.prototype.tojson = function(indent) { 590 | return this.toString(); 591 | }; 592 | 593 | DBPointer.prototype.getCollection = function() { 594 | return this.ns; 595 | }; 596 | 597 | DBPointer.prototype.getId = function() { 598 | return this.id; 599 | }; 600 | 601 | DBPointer.prototype.toString = function() { 602 | return "DBPointer(" + tojson(this.ns) + ", " + tojson(this.id) + ")"; 603 | }; 604 | } else { 605 | // print("warning: no DBPointer"); 606 | } 607 | 608 | // DBRef 609 | if (typeof (DBRef) != "undefined") { 610 | DBRef.prototype.fetch = function() { 611 | assert(this.collection, "need a ns"); 612 | assert(this.oid, "need an id"); 613 | var coll = this.db ? db.getSiblingDB(this.db).getCollection(this.collection) : db[this.collection]; 614 | return coll.findOne({_id: this.oid}); 615 | }; 616 | 617 | DBRef.prototype.tojson = function(indent) { 618 | return this.toString(); 619 | }; 620 | 621 | DBRef.prototype.getDb = function() { 622 | return this.db || undefined; 623 | }; 624 | 625 | DBRef.prototype.getCollection = function() { 626 | return this.collection; 627 | }; 628 | 629 | DBRef.prototype.getRef = function() { 630 | return this.collection; 631 | }; 632 | 633 | DBRef.prototype.getId = function() { 634 | return this.oid; 635 | }; 636 | 637 | DBRef.prototype.toString = function() { 638 | return `DBRef("${this.collection}", ${tojson(this.oid)}` + 639 | (this.db ? `, "${this.db}"` : "") + ")"; 640 | }; 641 | 642 | Object.defineProperty(DBRef.prototype, "$ref", { 643 | get: function () { 644 | return this.collection; 645 | }, 646 | set: function (value) { 647 | this.collection = value; 648 | }, 649 | }); 650 | Object.defineProperty(DBRef.prototype, "$id", { 651 | get: function () { 652 | return this.oid; 653 | }, 654 | set: function (value) { 655 | this.oid = value; 656 | }, 657 | }); 658 | Object.defineProperty(DBRef.prototype, "$db", { 659 | get: function () { 660 | return this.db; 661 | }, 662 | set: function (value) { 663 | this.db = value; 664 | }, 665 | }); 666 | } else { 667 | print("warning: no DBRef"); 668 | } 669 | 670 | // BinData 671 | if (typeof (BinData) != "undefined") { 672 | BinData.prototype.tojson = function() { 673 | return this.toString(); 674 | }; 675 | 676 | BinData.prototype.subtype = function() { 677 | return this.type; 678 | }; 679 | BinData.prototype.length = function() { 680 | return this.len; 681 | }; 682 | 683 | BinData.prototype.nativeToString = BinData.prototype.toString; 684 | BinData.prototype.toString = function (encoding) { 685 | if (encoding) { 686 | return this.nativeToString(encoding); 687 | } 688 | return `BinData(${this.type},"${this.base64()}")`; 689 | }; 690 | 691 | BinData.prototype.base64 = function () { 692 | return this.toString("base64"); 693 | }; 694 | BinData.prototype.hex = function () { 695 | return this.toString("hex"); 696 | }; 697 | Object.defineProperty(BinData.prototype, "len", { 698 | get: function () { 699 | return this.buffer ? this.buffer.byteLength : 0; 700 | }, 701 | }); 702 | Object.defineProperty(BinData.prototype, "type", { 703 | get: function () { 704 | return this.sub_type; 705 | }, 706 | }); 707 | } else { 708 | print("warning: no BinData class"); 709 | } 710 | 711 | if (typeof (gc) == "undefined") { 712 | gc = function() { 713 | print("warning: using noop gc()"); 714 | }; 715 | } 716 | 717 | // MinKey 718 | if (typeof (MinKey) != "undefined") { 719 | const OriginalMinKey = MinKey; 720 | MinKey = function () { 721 | if (MinKey.prototype.__instance__ === undefined) { 722 | MinKey.prototype.__instance__ = new OriginalMinKey(); 723 | } 724 | 725 | return MinKey.prototype.__instance__; 726 | }; 727 | 728 | MinKey.prototype = OriginalMinKey.prototype; 729 | 730 | for (const key of Object.getOwnPropertyNames(OriginalMinKey)) { 731 | // Skip prototype, length, name(function internals) 732 | if (key !== 'prototype' && key !== 'length' && key !== 'name') { 733 | MinKey[key] = OriginalMinKey[key]; 734 | } 735 | } 736 | 737 | MinKey.prototype.toJSON = function () { 738 | return this.tojson(); 739 | }; 740 | 741 | MinKey.prototype.tojson = function () { 742 | return "{ \"$minKey\" : 1 }"; 743 | }; 744 | 745 | MinKey.prototype.toString = function () { 746 | return "[object Function]"; 747 | }; 748 | } else { 749 | print("warning: no MinKey class"); 750 | } 751 | 752 | // MaxKey 753 | if (typeof (MaxKey) != "undefined") { 754 | const OriginalMaxKey = MaxKey; 755 | MaxKey = function () { 756 | if (MaxKey.prototype.__instance__ === undefined) { 757 | MaxKey.prototype.__instance__ = new OriginalMaxKey(); 758 | } 759 | 760 | return MaxKey.prototype.__instance__; 761 | }; 762 | 763 | MaxKey.prototype = OriginalMaxKey.prototype; 764 | 765 | for (const key of Object.getOwnPropertyNames(OriginalMaxKey)) { 766 | // Skip prototype, length, name(function internals) 767 | if (key !== 'prototype' && key !== 'length' && key !== 'name') { 768 | MaxKey[key] = OriginalMaxKey[key]; 769 | } 770 | } 771 | 772 | MaxKey.prototype.toJSON = function () { 773 | return this.tojson(); 774 | }; 775 | 776 | MaxKey.prototype.tojson = function () { 777 | return "{ \"$MaxKey\" : 1 }"; 778 | }; 779 | 780 | MaxKey.prototype.toString = function () { 781 | return "[object Function]"; 782 | }; 783 | } else { 784 | print("warning: no MaxKey class"); 785 | } 786 | 787 | // Free Functions 788 | tojsononeline = function(x) { 789 | return tojson(x, " ", true); 790 | }; 791 | 792 | tojson = function(x, indent, nolint, depth, sortKeys) { 793 | if (x === null) 794 | return "null"; 795 | 796 | if (x === undefined) 797 | return "undefined"; 798 | 799 | if (!indent) 800 | indent = ""; 801 | 802 | if (typeof depth !== 'number') { 803 | depth = 0; 804 | } 805 | 806 | switch (typeof x) { 807 | case "string": 808 | return JSON.stringify(x); 809 | case "number": 810 | case "boolean": 811 | return "" + x; 812 | case "object": { 813 | var s = tojsonObject(x, indent, nolint, depth, sortKeys); 814 | if ((nolint == null || nolint == true) && s.length < 80 && 815 | (indent == null || indent.length == 0)) { 816 | s = s.replace(/[\t\r\n]+/gm, " "); 817 | } 818 | return s; 819 | } 820 | case "function": 821 | if (x === MinKey || x === MaxKey) 822 | return x.tojson(); 823 | return x.toString(); 824 | default: 825 | throw Error("tojson can't handle type " + (typeof x)); 826 | } 827 | }; 828 | tojson.MAX_DEPTH = 100; 829 | 830 | tojsonObject = function(x, indent, nolint, depth, sortKeys) { 831 | if (typeof depth !== 'number') { 832 | depth = 0; 833 | } 834 | if (typeof sortKeys !== 'boolean') { 835 | sortKeys = false; 836 | } 837 | var lineEnding = nolint ? " " : "\n"; 838 | var tabSpace = nolint ? "" : "\t"; 839 | if (typeof x !== "object") { 840 | throw new TypeError(`tojsonObject needs object, not [${typeof x}]`); 841 | } 842 | 843 | if (!indent) 844 | indent = ""; 845 | 846 | if (typeof (x.tojson) == "function" && x.tojson != tojson) { 847 | return x.tojson(indent, nolint, depth, sortKeys); 848 | } 849 | 850 | if (x.constructor && typeof (x.constructor.tojson) == "function" && 851 | x.constructor.tojson != tojson) { 852 | return x.constructor.tojson(x, indent, nolint, depth, sortKeys); 853 | } 854 | 855 | if (x instanceof Error) { 856 | return x.toString(); 857 | } 858 | 859 | try { 860 | x.toString(); 861 | } catch (e) { 862 | // toString not callable 863 | return "[Object]"; 864 | } 865 | 866 | if (depth > tojson.MAX_DEPTH) { 867 | return "[Object]"; 868 | } 869 | 870 | var s = "{" + lineEnding; 871 | 872 | // push one level of indent 873 | indent += tabSpace; 874 | 875 | var keys = x; 876 | if (typeof (x._simpleKeys) == "function") 877 | keys = x._simpleKeys(); 878 | var keyNames = []; 879 | for (var k in keys) { 880 | keyNames.push(k); 881 | } 882 | if (sortKeys) keyNames.sort(); 883 | 884 | var fieldStrings = []; 885 | for (var k of keyNames) { 886 | var val = x[k]; 887 | 888 | // skip internal DB types to avoid issues with interceptors 889 | if (typeof DB != 'undefined' && val == DB.prototype) 890 | continue; 891 | if (typeof DBCollection != 'undefined' && val == DBCollection.prototype) 892 | continue; 893 | 894 | fieldStrings.push(indent + "\"" + k + "\" : " + tojson(val, indent, nolint, depth + 1, sortKeys)); 895 | } 896 | 897 | if (fieldStrings.length > 0) { 898 | s += fieldStrings.join("," + lineEnding); 899 | } else { 900 | s += indent; 901 | } 902 | s += lineEnding; 903 | 904 | // pop one level of indent 905 | indent = indent.substring(1); 906 | return s + indent + "}"; 907 | }; 908 | 909 | printjson = function(x) { 910 | print(tojson(x)); 911 | }; 912 | 913 | printjsononeline = function(x) { 914 | print(tojsononeline(x)); 915 | }; 916 | 917 | isString = function(x) { 918 | return typeof (x) == "string"; 919 | }; 920 | 921 | isNumber = function(x) { 922 | return typeof (x) == "number"; 923 | }; 924 | 925 | // This function returns true even if the argument is an array. See SERVER-14220. 926 | isObject = function(x) { 927 | return typeof (x) == "object"; 928 | }; 929 | --------------------------------------------------------------------------------