├── .babelrc ├── .prettierrc ├── index.js ├── jest.config.js ├── config ├── jest │ ├── helper.js │ └── reactNativeMock.js └── AsyncStorageMock.js ├── .gitignore ├── test ├── typings.spec.ts └── persistence.test.js ├── LICENSE ├── tsconfig.json ├── package.json ├── lib ├── storage.js ├── promisefy.js ├── customUtils.js ├── executor.js ├── cursor.js ├── indexes.js ├── persistence.js ├── datastore.js └── model.js ├── index.d.ts └── README.md /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": ["env"] 3 | } 4 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": true 3 | } 4 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | var Datastore = require('./lib/datastore'); 2 | 3 | module.exports = Datastore; 4 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | preset: "ts-jest", 3 | testEnvironment: "node", 4 | testRegex: '(spec|test)\\.tsx?$', 5 | "moduleNameMapper": { 6 | "react-native": "/config/jest/reactNativeMock.js" 7 | }, 8 | }; 9 | -------------------------------------------------------------------------------- /config/jest/helper.js: -------------------------------------------------------------------------------- 1 | import DataStore from '../../index' 2 | import { AsyncStorage } from './reactNativeMock' 3 | 4 | export const getDb = async () => { 5 | AsyncStorage.__reset() 6 | const db = new DataStore({ filename: 'foo' }) 7 | await db.loadDatabaseAsync() 8 | return db 9 | } 10 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | lib-cov 2 | *.seed 3 | *.log 4 | *.csv 5 | *.dat 6 | *.out 7 | *.pid 8 | *.gz 9 | .idea 10 | pids 11 | logs 12 | results 13 | 14 | npm-debug.log 15 | workspace 16 | node_modules 17 | yarn.lock 18 | 19 | browser-version/src 20 | browser-version/node_modules 21 | 22 | *.swp 23 | *~ 24 | *.swo 25 | -------------------------------------------------------------------------------- /test/typings.spec.ts: -------------------------------------------------------------------------------- 1 | import { Options } from "react-native-local-mongodb"; 2 | import AsyncStorage from "@react-native-async-storage/async-storage"; 3 | 4 | describe("typings", () => { 5 | test('Options["storage"]', async () => { 6 | const storage: Options["storage"] = AsyncStorage; 7 | expect(storage).toBeTruthy(); 8 | }); 9 | }); 10 | -------------------------------------------------------------------------------- /config/AsyncStorageMock.js: -------------------------------------------------------------------------------- 1 | let items = {}; 2 | 3 | module.exports = { 4 | __reset: () => (items = {}), 5 | 6 | setItem: (item, value, cb) => { 7 | items[item] = value; 8 | cb(null, value); 9 | }, 10 | multiSet: (item, value, cb) => { 11 | items[item] = value; 12 | cb(null, value); 13 | }, 14 | getItem: (item, cb) => { 15 | const res = items[item]; 16 | cb(null, res); 17 | }, 18 | multiGet: (item, cb) => { 19 | const res = items[item]; 20 | cb(null, res); 21 | }, 22 | removeItem: (item, cb) => { 23 | const res = delete items[item]; 24 | cb(null, res); 25 | }, 26 | getAllKeys: (items, cb) => { 27 | const res = items.keys(); 28 | cb(null, res); 29 | } 30 | }; 31 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | (The MIT License) 2 | 3 | Copyright (c) 2016 Antonio G Silva <antoniopresto@gmail.com> 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining 6 | a copy of this software and associated documentation files (the 7 | 'Software'), to deal in the Software without restriction, including 8 | without limitation the rights to use, copy, modify, merge, publish, 9 | distribute, sublicense, and/or sell copies of the Software, and to 10 | permit persons to whom the Software is furnished to do so, subject to 11 | the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be 14 | included in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 19 | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 21 | TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 22 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /config/jest/reactNativeMock.js: -------------------------------------------------------------------------------- 1 | let items = {}; 2 | 3 | module.exports = { 4 | AsyncStorage: { 5 | __reset: () => items = {}, 6 | 7 | setItem: jest.fn((item, value, cb) => { 8 | return new Promise(resolve => { 9 | items[item] = value; 10 | cb(null, value); 11 | resolve(value); 12 | }); 13 | }), 14 | multiSet: jest.fn((item, value, cb) => { 15 | return new Promise(resolve => { 16 | items[item] = value; 17 | cb(null, value); 18 | resolve(value); 19 | }); 20 | }), 21 | getItem: jest.fn((item, cb) => { 22 | return new Promise(resolve => { 23 | const res = items[item]; 24 | cb(null, res); 25 | resolve(res); 26 | }); 27 | }), 28 | multiGet: jest.fn((item, cb) => { 29 | return new Promise(resolve => { 30 | const res = items[item]; 31 | cb(null, res); 32 | resolve(res); 33 | }); 34 | }), 35 | removeItem: jest.fn((item, cb) => { 36 | return new Promise(resolve => { 37 | const res = delete items[item]; 38 | cb(null, res); 39 | resolve(res); 40 | }); 41 | }), 42 | getAllKeys: jest.fn((items, cb) => { 43 | return new Promise(resolve => { 44 | const res = items.keys(); 45 | cb(null, res); 46 | resolve(res); 47 | }); 48 | }) 49 | } 50 | }; 51 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "incremental": false, 4 | "target": "es2017", 5 | "moduleResolution": "node", 6 | "module": "commonjs", 7 | "allowJs": true, 8 | "declaration": true, 9 | "inlineSourceMap": true, 10 | "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */, 11 | "resolveJsonModule": true /* Include modules imported with .json extension. */, 12 | "strict": true /* Enable all strict type-checking options. */, 13 | 14 | /* Strict Type-Checking Options */ 15 | "noImplicitAny": false /* Raise error on expressions and declarations with an implied 'any' type. */, 16 | 17 | /* Additional Checks */ 18 | "noUnusedLocals": false /* Report errors on unused locals. */, 19 | "noUnusedParameters": false /* Report errors on unused parameters. */, 20 | "noImplicitReturns": true /* Report error when not all code paths in function return a value. */, 21 | "noFallthroughCasesInSwitch": true /* Report errors for fallthrough cases in switch statement. */, 22 | 23 | /* Debugging Options */ 24 | "traceResolution": false /* Report module resolution log messages. */, 25 | "listEmittedFiles": false /* Print names of generated files part of the compilation. */, 26 | "listFiles": false /* Print names of files part of the compilation. */, 27 | "pretty": true /* Stylize errors and messages using color and context. */, 28 | "lib": ["es2019"], 29 | "skipLibCheck": true 30 | }, 31 | "compileOnSave": false 32 | } 33 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "react-native-local-mongodb", 3 | "version": "4.0.0", 4 | "author": "Antonio Presto ", 5 | "contributors": [ 6 | "Antonio Presto" 7 | ], 8 | "scripts": { 9 | "test": "NODE_ENV=test ./node_modules/.bin/jest --verbose && npm run test-db", 10 | "test-db": "./node_modules/.bin/mocha test/dbTest.test.js --reporter spec --timeout 10000" 11 | }, 12 | "main": "index", 13 | "typings": "index.d.ts", 14 | "license": "SEE LICENSE IN LICENSE", 15 | "bugs": { 16 | "url": "https://github.com/antoniopresto/react-native-local-mongodb/issues" 17 | }, 18 | "description": "react-native local mongodb storage. Embedded persistent or in memory database for react-native. API is a subset of MongoDB's and it's plenty fast.", 19 | "keywords": [ 20 | "database", 21 | "datastore", 22 | "embedded", 23 | "react-native", 24 | "mongodb" 25 | ], 26 | "homepage": "https://github.com/antoniopresto/react-native-local-mongodb", 27 | "repository": { 28 | "type": "git", 29 | "url": "git+ssh://git@github.com/antoniopresto/react-native-local-mongodb.git" 30 | }, 31 | "dependencies": { 32 | "async": "2.6.2", 33 | "binary-search-tree": "0.2.5", 34 | "events": "1.1.0", 35 | "underscore": "^1.13.2", 36 | "util": "0.10.3" 37 | }, 38 | "peerDependencies": { 39 | "react-native": "*" 40 | }, 41 | "devDependencies": { 42 | "@react-native-async-storage/async-storage": "1.15.13", 43 | "@types/react-native": "^0.66.9", 44 | "babel-preset-env": "1.6.1", 45 | "@types/jest": "27.0.3", 46 | "@types/node": "16.11.12", 47 | "jest": "27.4.5", 48 | "ts-jest": "27.1.1", 49 | "typescript": "4.5.4", 50 | "chai": "3.2.0", 51 | "mocha": "^5.2.0", 52 | "react-native": "latest", 53 | "sinon": "1.3.x", 54 | "prettier": "2.5.1" 55 | }, 56 | "directories": { 57 | "test": "test" 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /lib/storage.js: -------------------------------------------------------------------------------- 1 | module.exports = class Storage { 2 | constructor(storage) { 3 | this.storage = storage; 4 | this.crashSafeWriteFile = this.writeFile; 5 | } 6 | 7 | exists(filename, callback) { 8 | this.storage.getItem(filename, (err, value) => { 9 | if (value !== null) { 10 | return callback(true); 11 | } else { 12 | return callback(false); 13 | } 14 | }); 15 | } 16 | 17 | rename(filename, newFilename, callback) { 18 | this.storage.getItem(filename, (err, value) => { 19 | if (value === null) { 20 | this.storage.removeItem(newFilename, callback); 21 | } else { 22 | this.storage.setItem(newFilename, value, () => { 23 | this.storage.removeItem(filename, callback); 24 | }); 25 | } 26 | }); 27 | } 28 | 29 | writeFile(filename, contents, options, callback) { 30 | if (typeof options === 'function') { 31 | callback = options; 32 | } 33 | this.storage.setItem(filename, contents, callback); 34 | } 35 | 36 | appendFile(filename, toAppend, options, callback) { 37 | if (typeof options === 'function') { 38 | callback = options; 39 | } 40 | 41 | this.storage.getItem(filename, (err, contents) => { 42 | contents = contents || ''; 43 | contents += toAppend; 44 | this.storage.setItem(filename, contents, callback); 45 | }); 46 | } 47 | 48 | readFile(filename, options, callback) { 49 | if (typeof options === 'function') { 50 | callback = options; 51 | } 52 | this.storage.getItem(filename, (err, contents) => { 53 | return callback(null, contents || ''); 54 | }); 55 | } 56 | 57 | unlink(filename, callback) { 58 | this.storage.removeItem(filename, callback); 59 | } 60 | 61 | mkdirp(dir, callback) { 62 | return callback(); 63 | } 64 | 65 | ensureDatafileIntegrity(filename, callback) { 66 | return callback(null); 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /lib/promisefy.js: -------------------------------------------------------------------------------- 1 | // export function promisefy(Datastore, methodName) { 2 | // const method = Datastore.prototype[methodName]; 3 | // 4 | // return function (...args) { 5 | // const self = this 6 | // 7 | // return new Promise(function (resolve, reject) { 8 | // let callback = args[args.length -1] 9 | // let newArgs = [...args] 10 | // 11 | // if (typeof callback === 'function') { 12 | // newArgs = newArgs.slice(0, -1) 13 | // } else { 14 | // callback = function () {} 15 | // } 16 | // 17 | // method.bind(self)(...newArgs, function (err, res) { 18 | // callback(err, res) 19 | // if (err) return reject(err) 20 | // resolve(res) 21 | // }) 22 | // }) 23 | // } 24 | // } 25 | 26 | "use strict"; 27 | 28 | Object.defineProperty(exports, "__esModule", { 29 | value: true 30 | }); 31 | exports.promisefy = promisefy; 32 | 33 | function _toConsumableArray(arr) { 34 | if (Array.isArray(arr)) { 35 | for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { 36 | arr2[i] = arr[i]; 37 | } 38 | return arr2; 39 | } else { 40 | return Array.from(arr); 41 | } 42 | } 43 | 44 | function promisefy(Datastore, methodName) { 45 | var method = Datastore.prototype[methodName]; 46 | 47 | return function() { 48 | for ( 49 | var _len = arguments.length, args = Array(_len), _key = 0; 50 | _key < _len; 51 | _key++ 52 | ) { 53 | args[_key] = arguments[_key]; 54 | } 55 | 56 | var self = this; 57 | 58 | return new Promise(function(resolve, reject) { 59 | var callback = args[args.length - 1]; 60 | var newArgs = [].concat(args); 61 | 62 | if (typeof callback === "function") { 63 | newArgs = newArgs.slice(0, -1); 64 | } else { 65 | callback = function callback() {}; 66 | } 67 | 68 | method.bind(self).apply( 69 | undefined, 70 | _toConsumableArray(newArgs).concat([ 71 | function(err, res) { 72 | callback(err, res); 73 | if (err) return reject(err); 74 | resolve(res); 75 | } 76 | ]) 77 | ); 78 | }); 79 | }; 80 | } 81 | -------------------------------------------------------------------------------- /lib/customUtils.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Specific customUtils for the browser, where we don't have access to the Crypto and Buffer modules 3 | */ 4 | 5 | /** 6 | * Taken from the crypto-browserify module 7 | * https://github.com/dominictarr/crypto-browserify 8 | * NOTE: Math.random() does not guarantee "cryptographic quality" but we actually don't need it 9 | */ 10 | function randomBytes(size) { 11 | var bytes = new Array(size); 12 | var r; 13 | 14 | for (var i = 0, r; i < size; i++) { 15 | if ((i & 0x03) == 0) r = Math.random() * 0x100000000; 16 | bytes[i] = (r >>> ((i & 0x03) << 3)) & 0xff; 17 | } 18 | 19 | return bytes; 20 | } 21 | 22 | /** 23 | * Taken from the base64-js module 24 | * https://github.com/beatgammit/base64-js/ 25 | */ 26 | function byteArrayToBase64(uint8) { 27 | var lookup = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/', 28 | extraBytes = uint8.length % 3, // if we have 1 byte left, pad 2 bytes 29 | output = '', 30 | temp, 31 | length, 32 | i; 33 | 34 | function tripletToBase64(num) { 35 | return ( 36 | lookup[(num >> 18) & 0x3f] + 37 | lookup[(num >> 12) & 0x3f] + 38 | lookup[(num >> 6) & 0x3f] + 39 | lookup[num & 0x3f] 40 | ); 41 | } 42 | 43 | // go through the array every three bytes, we'll deal with trailing stuff later 44 | for (i = 0, length = uint8.length - extraBytes; i < length; i += 3) { 45 | temp = (uint8[i] << 16) + (uint8[i + 1] << 8) + uint8[i + 2]; 46 | output += tripletToBase64(temp); 47 | } 48 | 49 | // pad the end with zeros, but make sure to not forget the extra bytes 50 | switch (extraBytes) { 51 | case 1: 52 | temp = uint8[uint8.length - 1]; 53 | output += lookup[temp >> 2]; 54 | output += lookup[(temp << 4) & 0x3f]; 55 | output += '=='; 56 | break; 57 | case 2: 58 | temp = (uint8[uint8.length - 2] << 8) + uint8[uint8.length - 1]; 59 | output += lookup[temp >> 10]; 60 | output += lookup[(temp >> 4) & 0x3f]; 61 | output += lookup[(temp << 2) & 0x3f]; 62 | output += '='; 63 | break; 64 | } 65 | 66 | return output; 67 | } 68 | 69 | /** 70 | * Return a random alphanumerical string of length len 71 | * There is a very small probability (less than 1/1,000,000) for the length to be less than len 72 | * (il the base64 conversion yields too many pluses and slashes) but 73 | * that's not an issue here 74 | * The probability of a collision is extremely small (need 3*10^12 documents to have one chance in a million of a collision) 75 | * See http://en.wikipedia.org/wiki/Birthday_problem 76 | */ 77 | function uid(len) { 78 | return byteArrayToBase64(randomBytes(Math.ceil(Math.max(8, len * 2)))) 79 | .replace(/[+\/]/g, '') 80 | .slice(0, len); 81 | } 82 | 83 | module.exports.uid = uid; 84 | -------------------------------------------------------------------------------- /lib/executor.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Responsible for sequentially executing actions on the database 3 | */ 4 | 5 | var async = require('async'); 6 | 7 | function Executor() { 8 | this.buffer = []; 9 | this.ready = false; 10 | 11 | // This queue will execute all commands, one-by-one in order 12 | this.queue = async.queue(function(task, cb) { 13 | var newArguments = []; 14 | 15 | // task.arguments is an array-like object on which adding a new field doesn't work, so we transform it into a real array 16 | for (var i = 0; i < task.arguments.length; i += 1) { 17 | newArguments.push(task.arguments[i]); 18 | } 19 | var lastArg = task.arguments[task.arguments.length - 1]; 20 | 21 | // Always tell the queue task is complete. Execute callback if any was given. 22 | if (typeof lastArg === 'function') { 23 | // Callback was supplied 24 | newArguments[newArguments.length - 1] = function() { 25 | if (typeof setImmediate === 'function') { 26 | setImmediate(cb); 27 | } else { 28 | process.nextTick(cb); 29 | } 30 | lastArg.apply(null, arguments); 31 | }; 32 | } else if (!lastArg && task.arguments.length !== 0) { 33 | // false/undefined/null supplied as callbback 34 | newArguments[newArguments.length - 1] = function() { 35 | cb(); 36 | }; 37 | } else { 38 | // Nothing supplied as callback 39 | newArguments.push(function() { 40 | cb(); 41 | }); 42 | } 43 | 44 | task.fn.apply(task.this, newArguments); 45 | }, 1); 46 | } 47 | 48 | /** 49 | * If executor is ready, queue task (and process it immediately if executor was idle) 50 | * If not, buffer task for later processing 51 | * @param {Object} task 52 | * task.this - Object to use as this 53 | * task.fn - Function to execute 54 | * task.arguments - Array of arguments, IMPORTANT: only the last argument may be a function (the callback) 55 | * and the last argument cannot be false/undefined/null 56 | * @param {Boolean} forceQueuing Optional (defaults to false) force executor to queue task even if it is not ready 57 | */ 58 | Executor.prototype.push = function(task, forceQueuing) { 59 | if (this.ready || forceQueuing) { 60 | this.queue.push(task); 61 | } else { 62 | this.buffer.push(task); 63 | } 64 | }; 65 | 66 | /** 67 | * Queue all tasks in buffer (in the same order they came in) 68 | * Automatically sets executor as ready 69 | */ 70 | Executor.prototype.processBuffer = function() { 71 | var i; 72 | this.ready = true; 73 | for (i = 0; i < this.buffer.length; i += 1) { 74 | this.queue.push(this.buffer[i]); 75 | } 76 | this.buffer = []; 77 | }; 78 | 79 | // Interface 80 | module.exports = Executor; 81 | -------------------------------------------------------------------------------- /test/persistence.test.js: -------------------------------------------------------------------------------- 1 | import { getDb } from '../config/jest/helper' 2 | 3 | it('update with promise', async () => { 4 | const db = await getDb() 5 | const items0 = await db.findAsync({}) 6 | 7 | await db.insertAsync({ name: 'Maggie' }) 8 | await db.insertAsync({ name: 'Bob' }) 9 | 10 | const items = await db.findAsync({}) 11 | 12 | const maggie1 = await db.findOneAsync({ name: 'Maggie' }) 13 | const bob1 = await db.findOneAsync({ name: 'Bob' }) 14 | 15 | await db.updateAsync({ name: { $in: ['Maggie', 'Bob'] } }, { $set: { age: 1 } }, { multi: true }) 16 | 17 | const maggie2 = await db.findOneAsync({ name: 'Maggie' }) 18 | const bob2 = await db.findOneAsync({ name: 'Bob' }) 19 | 20 | expect(items0).toHaveLength(0) 21 | expect(items).toHaveLength(2) 22 | expect(maggie1.age).toBeUndefined() 23 | expect(bob1.age).toBeUndefined() 24 | expect(bob2.age).toEqual(1) 25 | expect(maggie2.age).toEqual(1) 26 | }) 27 | 28 | it('update with callback', async done => { 29 | const db = await getDb() 30 | const items0 = await db.findAsync({}) 31 | 32 | await db.insertAsync({ name: 'Maggie' }) 33 | await db.insertAsync({ name: 'Bob' }) 34 | 35 | const items = await db.findAsync({}) 36 | 37 | const maggie1 = await db.findOneAsync({ name: 'Maggie' }) 38 | const bob1 = await db.findOneAsync({ name: 'Bob' }) 39 | 40 | db.update({ name: { $in: ['Maggie', 'Bob'] } }, { $set: { age: 1 } }, { multi: true }, async function(err, res) { 41 | const maggie2 = await db.findOneAsync({ name: 'Maggie' }) 42 | const bob2 = await db.findOneAsync({ name: 'Bob' }) 43 | 44 | expect(res).toEqual(2) 45 | expect(items0).toHaveLength(0) 46 | expect(items).toHaveLength(2) 47 | expect(maggie1.age).toBeUndefined() 48 | expect(bob1.age).toBeUndefined() 49 | expect(bob2.age).toEqual(1) 50 | expect(maggie2.age).toEqual(1) 51 | done() 52 | }) 53 | }) 54 | 55 | it('remove with callback', async done => { 56 | const db = await getDb() 57 | const items0 = await db.findAsync({}) 58 | 59 | await db.insertAsync({ name: 'Maggie' }) 60 | await db.insertAsync({ name: 'Bob' }) 61 | 62 | const items = await db.findAsync({}) 63 | 64 | db.remove({ name: { $in: ['Bob'] } }, { multi: true }, async function(err, res) { 65 | const bob2 = await db.findOneAsync({ name: 'Bob' }) 66 | 67 | expect(res).toEqual(1) 68 | expect(items0).toHaveLength(0) 69 | expect(items).toHaveLength(2) 70 | expect(bob2).toBeNull() 71 | done() 72 | }) 73 | }) 74 | 75 | it('resolve remove nonexistent', async done => { 76 | const db = await getDb() 77 | const items0 = await db.findAsync({}) 78 | 79 | await db.insertAsync({ name: 'Maggie' }) 80 | await db.insertAsync({ name: 'Bob' }) 81 | 82 | const items = await db.findAsync({}) 83 | 84 | db.remove({ name: 'nonexistent' }, { multi: true }, async function(err, res) { 85 | const nonexistent = await db.findOneAsync({ name: 'nonexistent' }) 86 | 87 | expect(res).toEqual(0) 88 | expect(items0).toHaveLength(0) 89 | expect(items).toHaveLength(2) 90 | expect(nonexistent).toBeNull() 91 | done() 92 | }) 93 | }) 94 | 95 | it('resolve findOne nonexistent', async () => { 96 | const db = await getDb() 97 | await db.insertAsync({ name: 'Maggie' }) 98 | await db.insertAsync({ name: 'Bob' }) 99 | 100 | const items = await db.findAsync({ name: 'nonexistent' }) 101 | 102 | const item = await db.findOneAsync({ name: 'nonexistent' }, function() {}) 103 | 104 | expect(item).toBeNull() 105 | expect(items.length).toEqual(0) 106 | }) 107 | 108 | 109 | it('should limit', async (done) => { 110 | const db = await getDb() 111 | await db.insertAsync({ name: 'A' }) 112 | await db.insertAsync({ name: 'B' }) 113 | await db.insertAsync({ name: 'C' }) 114 | await db.insertAsync({ name: 'D' }) 115 | 116 | db.find({}).sort({ name: 1 }).skip(1).limit(2).exec(function (err, docs) { 117 | expect(docs.length).toEqual(2) 118 | expect(docs[1].name).toEqual('C') 119 | done() 120 | }); 121 | }) 122 | 123 | it('should limit async', async (done) => { 124 | const db = await getDb() 125 | await db.insertAsync({ name: 'A' }) 126 | await db.insertAsync({ name: 'B' }) 127 | await db.insertAsync({ name: 'C' }) 128 | await db.insertAsync({ name: 'D' }) 129 | 130 | const docs = await db.find({}).sort({ name: 1 }).skip(1).limit(2).exec(); 131 | expect(docs.length).toEqual(2) 132 | expect(docs[1].name).toEqual('C') 133 | done() 134 | }) 135 | -------------------------------------------------------------------------------- /index.d.ts: -------------------------------------------------------------------------------- 1 | declare module "react-native-local-mongodb" { 2 | export interface StorageStatic { 3 | getItem( 4 | key: string, 5 | callback?: (error?: Error, result?: string) => void 6 | ): Promise; 7 | 8 | setItem( 9 | key: string, 10 | value: string, 11 | callback?: (error?: Error) => void 12 | ): Promise; 13 | 14 | removeItem(key: string, callback?: (error?: Error) => void): Promise; 15 | } 16 | 17 | export interface Options { 18 | filename?: string; 19 | inMemoryOnly?: boolean; 20 | timestampData?: boolean; 21 | autoload?: boolean; 22 | onload?: Function; 23 | afterSerialization?: Function; 24 | beforeDeserialization?: Function; 25 | corruptAlertThreshold?: number; 26 | compareStrings?: Function; 27 | storage: StorageStatic; 28 | } 29 | 30 | export interface IndexOptions { 31 | fieldName: string; 32 | unique?: boolean; 33 | sparse?: boolean; 34 | expireAfterSeconds?: number; 35 | } 36 | 37 | export interface UpdateOptions { 38 | multi?: boolean; 39 | upsert?: boolean; 40 | returnUpdatedDocs?: boolean; 41 | } 42 | 43 | export interface RemoveOptions { 44 | multi?: boolean; 45 | } 46 | 47 | export interface MongoDocument { 48 | [key: string]: any; 49 | } 50 | 51 | export interface Cursor { 52 | exec(): Promise; 53 | 54 | exec(cb: Callback): void; 55 | 56 | skip(value: number): Cursor; 57 | 58 | limit(value: number): Cursor; 59 | 60 | sort(doc: MongoDocument): Cursor; 61 | } 62 | 63 | export type Query = object; 64 | export type Projection = any; 65 | export type Callback = (err: Error | null, value: T) => void; 66 | export type InsertCallback = (err: Error | null, doc: MongoDocument) => void; 67 | export type CountCallback = (err: Error | null, count: number) => void; 68 | export type FindCallback = (err: Error | null, docs: MongoDocument[]) => void; 69 | export type FindOneCallback = (err: Error | null, doc: MongoDocument) => void; 70 | export type UpdateCallback = ( 71 | err: Error | null, 72 | numAffected: number, 73 | affectedDocuments: MongoDocument | MongoDocument[] | null, 74 | upsert: boolean 75 | ) => void; 76 | export type RemoveCallback = (err: Error | null, numAffected: number) => void; 77 | 78 | export default class Datastore { 79 | constructor(options?: Options); 80 | 81 | public loadDatabase(): void; 82 | 83 | public getAllData(): any[]; 84 | 85 | public resetIndexes(newData: any): void; 86 | 87 | public ensureIndex(options: IndexOptions, callback?: Callback): void; 88 | 89 | public removeIndex(fieldName: string, callback?: Callback): void; 90 | 91 | public addToIndexes(doc: MongoDocument): void; 92 | 93 | public removeFromIndexes(doc: MongoDocument): void; 94 | 95 | public updateIndexes(oldDoc: MongoDocument, newDoc: MongoDocument): void; 96 | 97 | public getCandidates( 98 | query: Query, 99 | dontExpireStaleDocs: boolean, 100 | callback?: Callback 101 | ): void; 102 | 103 | public insert(newDoc: MongoDocument, cb: InsertCallback): void; 104 | 105 | public createNewId(): number; 106 | 107 | public count(query: Query): Cursor; 108 | public count(query: Query, callback: Callback): void; 109 | 110 | public find(query: Query): Cursor; 111 | public find(query: Query, projection: Projection): Cursor; 112 | public find( 113 | query: Query, 114 | projection: Projection, 115 | callback: Callback 116 | ): void; 117 | 118 | public findOne(query: Query): Cursor; 119 | public findOne(query: Query, projection: Projection): Cursor; 120 | public findOne( 121 | query: Query, 122 | projection: Projection, 123 | callback: Callback 124 | ): void; 125 | 126 | public update( 127 | query: Query, 128 | doc: MongoDocument, 129 | options?: UpdateOptions, 130 | callback?: UpdateCallback 131 | ): void; 132 | 133 | public remove( 134 | query: Query, 135 | options?: RemoveOptions, 136 | callback?: RemoveCallback 137 | ): void; 138 | 139 | public loadDatabaseAsync(): Promise; 140 | 141 | public findAsync(query: Query): Promise; 142 | 143 | public findOneAsync(query: Query): Promise; 144 | 145 | public insertAsync(newDoc: MongoDocument): Promise; 146 | 147 | public updateAsync( 148 | query: Query, 149 | doc: MongoDocument, 150 | options?: UpdateOptions 151 | ): Promise; 152 | 153 | public removeAsync(query: Query, options?: RemoveOptions): Promise; 154 | } 155 | } 156 | -------------------------------------------------------------------------------- /lib/cursor.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Manage access to data, be it to find, update or remove it 3 | */ 4 | var model = require('./model'), 5 | _ = require('underscore'); 6 | 7 | /** 8 | * Create a new cursor for this collection 9 | * @param {Datastore} db - The datastore this cursor is bound to 10 | * @param {Query} query - The query this cursor will operate on 11 | * @param {Function} execFn - Handler to be executed after cursor has found the results and before the callback passed to find/findOne/update/remove 12 | */ 13 | function Cursor(db, query, execFn) { 14 | this.db = db; 15 | this.query = query || {}; 16 | if (execFn) { 17 | this.execFn = execFn; 18 | } 19 | } 20 | 21 | /** 22 | * Set a limit to the number of results 23 | */ 24 | Cursor.prototype.limit = function(limit) { 25 | this._limit = limit; 26 | return this; 27 | }; 28 | 29 | /** 30 | * Skip a the number of results 31 | */ 32 | Cursor.prototype.skip = function(skip) { 33 | this._skip = skip; 34 | return this; 35 | }; 36 | 37 | /** 38 | * Sort results of the query 39 | * @param {SortQuery} sortQuery - SortQuery is { field: order }, field can use the dot-notation, order is 1 for ascending and -1 for descending 40 | */ 41 | Cursor.prototype.sort = function(sortQuery) { 42 | this._sort = sortQuery; 43 | return this; 44 | }; 45 | 46 | /** 47 | * Add the use of a projection 48 | * @param {Object} projection - MongoDB-style projection. {} means take all fields. Then it's { key1: 1, key2: 1 } to take only key1 and key2 49 | * { key1: 0, key2: 0 } to omit only key1 and key2. Except _id, you can't mix takes and omits 50 | */ 51 | Cursor.prototype.projection = function(projection) { 52 | this._projection = projection; 53 | return this; 54 | }; 55 | 56 | /** 57 | * Apply the projection 58 | */ 59 | Cursor.prototype.project = function(candidates) { 60 | var res = [], 61 | self = this, 62 | keepId, 63 | action, 64 | keys; 65 | 66 | if (this._projection === undefined || Object.keys(this._projection).length === 0) { 67 | return candidates; 68 | } 69 | 70 | keepId = this._projection._id === 0 ? false : true; 71 | this._projection = _.omit(this._projection, '_id'); 72 | 73 | // Check for consistency 74 | keys = Object.keys(this._projection); 75 | keys.forEach(function(k) { 76 | if (action !== undefined && self._projection[k] !== action) { 77 | throw new Error("Can't both keep and omit fields except for _id"); 78 | } 79 | action = self._projection[k]; 80 | }); 81 | 82 | // Do the actual projection 83 | candidates.forEach(function(candidate) { 84 | var toPush; 85 | if (action === 1) { 86 | // pick-type projection 87 | toPush = { $set: {} }; 88 | keys.forEach(function(k) { 89 | toPush.$set[k] = model.getDotValue(candidate, k); 90 | if (toPush.$set[k] === undefined) { 91 | delete toPush.$set[k]; 92 | } 93 | }); 94 | toPush = model.modify({}, toPush); 95 | } else { 96 | // omit-type projection 97 | toPush = { $unset: {} }; 98 | keys.forEach(function(k) { 99 | toPush.$unset[k] = true; 100 | }); 101 | toPush = model.modify(candidate, toPush); 102 | } 103 | if (keepId) { 104 | toPush._id = candidate._id; 105 | } else { 106 | delete toPush._id; 107 | } 108 | res.push(toPush); 109 | }); 110 | 111 | return res; 112 | }; 113 | 114 | /** 115 | * Get all matching elements 116 | * Will return pointers to matched elements (shallow copies), returning full copies is the role of find or findOne 117 | * This is an internal function, use exec which uses the executor 118 | * 119 | * @param {Function} callback - Signature: err, results 120 | */ 121 | Cursor.prototype._exec = function(_callback) { 122 | var res = [], 123 | added = 0, 124 | skipped = 0, 125 | self = this, 126 | error = null, 127 | i, 128 | keys, 129 | key; 130 | 131 | function callback(error, res) { 132 | if (self.execFn) { 133 | return self.execFn(error, res, _callback); 134 | } else { 135 | return _callback(error, res); 136 | } 137 | } 138 | 139 | this.db.getCandidates(this.query, function(err, candidates) { 140 | if (err) { 141 | return callback(err); 142 | } 143 | 144 | try { 145 | for (i = 0; i < candidates.length; i += 1) { 146 | if (model.match(candidates[i], self.query)) { 147 | // If a sort is defined, wait for the results to be sorted before applying limit and skip 148 | if (!self._sort) { 149 | if (self._skip && self._skip > skipped) { 150 | skipped += 1; 151 | } else { 152 | res.push(candidates[i]); 153 | added += 1; 154 | if (self._limit && self._limit <= added) { 155 | break; 156 | } 157 | } 158 | } else { 159 | res.push(candidates[i]); 160 | } 161 | } 162 | } 163 | } catch (err) { 164 | return callback(err); 165 | } 166 | 167 | // Apply all sorts 168 | if (self._sort) { 169 | keys = Object.keys(self._sort); 170 | 171 | // Sorting 172 | var criteria = []; 173 | for (i = 0; i < keys.length; i++) { 174 | key = keys[i]; 175 | criteria.push({ key: key, direction: self._sort[key] }); 176 | } 177 | res.sort(function(a, b) { 178 | var criterion, compare, i; 179 | for (i = 0; i < criteria.length; i++) { 180 | criterion = criteria[i]; 181 | compare = 182 | criterion.direction * 183 | model.compareThings( 184 | model.getDotValue(a, criterion.key), 185 | model.getDotValue(b, criterion.key), 186 | self.db.compareStrings 187 | ); 188 | if (compare !== 0) { 189 | return compare; 190 | } 191 | } 192 | return 0; 193 | }); 194 | 195 | // Applying limit and skip 196 | var limit = self._limit || res.length, 197 | skip = self._skip || 0; 198 | 199 | res = res.slice(skip, skip + limit); 200 | } 201 | 202 | // Apply projection 203 | try { 204 | res = self.project(res); 205 | } catch (e) { 206 | error = e; 207 | res = undefined; 208 | } 209 | 210 | return callback(error, res); 211 | }); 212 | }; 213 | 214 | Cursor.prototype.exec = function() { 215 | var self = this 216 | var args = arguments 217 | 218 | return new Promise(function(resolve, reject) { 219 | function fn(_cb) { 220 | function cb(err, res) { 221 | if (err) { 222 | reject(err) 223 | } else { 224 | resolve(res) 225 | } 226 | return _cb && _cb(err, res) 227 | } 228 | 229 | self._exec(cb.bind(self)) 230 | } 231 | 232 | self.db.executor.push({ this: self, fn: fn.bind(self), arguments: args }) 233 | }) 234 | } 235 | 236 | // Interface 237 | module.exports = Cursor; 238 | -------------------------------------------------------------------------------- /lib/indexes.js: -------------------------------------------------------------------------------- 1 | var BinarySearchTree = require('binary-search-tree').AVLTree, 2 | model = require('./model'), 3 | _ = require('underscore'), 4 | util = require('util'); 5 | 6 | /** 7 | * Two indexed pointers are equal iif they point to the same place 8 | */ 9 | function checkValueEquality(a, b) { 10 | return a === b; 11 | } 12 | 13 | /** 14 | * Type-aware projection 15 | */ 16 | function projectForUnique(elt) { 17 | if (elt === null || Array.isArray(elt)) { 18 | return '$null'; 19 | } 20 | if (typeof elt === 'string') { 21 | return '$string' + elt; 22 | } 23 | if (typeof elt === 'boolean') { 24 | return '$boolean' + elt; 25 | } 26 | if (typeof elt === 'number') { 27 | return '$number' + elt; 28 | } 29 | if (util.isArray(elt)) { 30 | return '$date' + elt.getTime(); 31 | } 32 | 33 | return elt; // Arrays and objects, will check for pointer equality 34 | } 35 | 36 | /** 37 | * Create a new index 38 | * All methods on an index guarantee that either the whole operation was successful and the index changed 39 | * or the operation was unsuccessful and an error is thrown while the index is unchanged 40 | * @param {String} options.fieldName On which field should the index apply (can use dot notation to index on sub fields) 41 | * @param {Boolean} options.unique Optional, enforce a unique constraint (default: false) 42 | * @param {Boolean} options.sparse Optional, allow a sparse index (we can have documents for which fieldName is undefined) (default: false) 43 | */ 44 | function Index(options) { 45 | this.fieldName = options.fieldName; 46 | this.unique = options.unique || false; 47 | this.sparse = options.sparse || false; 48 | 49 | this.treeOptions = { 50 | unique: this.unique, 51 | compareKeys: model.compareThings, 52 | checkValueEquality: checkValueEquality, 53 | }; 54 | 55 | this.reset(); // No data in the beginning 56 | } 57 | 58 | /** 59 | * Reset an index 60 | * @param {Document or Array of documents} newData Optional, data to initialize the index with 61 | * If an error is thrown during insertion, the index is not modified 62 | */ 63 | Index.prototype.reset = function(newData) { 64 | this.tree = new BinarySearchTree(this.treeOptions); 65 | 66 | if (newData) { 67 | this.insert(newData); 68 | } 69 | }; 70 | 71 | /** 72 | * Insert a new document in the index 73 | * If an array is passed, we insert all its elements (if one insertion fails the index is not modified) 74 | * O(log(n)) 75 | */ 76 | Index.prototype.insert = function(doc) { 77 | var key, 78 | self = this, 79 | keys, 80 | i, 81 | failingI, 82 | error; 83 | 84 | if (util.isArray(doc)) { 85 | this.insertMultipleDocs(doc); 86 | return; 87 | } 88 | 89 | key = model.getDotValue(doc, this.fieldName); 90 | 91 | // We don't index documents that don't contain the field if the index is sparse 92 | if (key === undefined && this.sparse) { 93 | return; 94 | } 95 | 96 | if (!util.isArray(key)) { 97 | this.tree.insert(key, doc); 98 | } else { 99 | // If an insert fails due to a unique constraint, roll back all inserts before it 100 | keys = _.uniq(key, projectForUnique); 101 | 102 | for (i = 0; i < keys.length; i += 1) { 103 | try { 104 | this.tree.insert(keys[i], doc); 105 | } catch (e) { 106 | error = e; 107 | failingI = i; 108 | break; 109 | } 110 | } 111 | 112 | if (error) { 113 | for (i = 0; i < failingI; i += 1) { 114 | this.tree.delete(keys[i], doc); 115 | } 116 | 117 | throw error; 118 | } 119 | } 120 | }; 121 | 122 | /** 123 | * Insert an array of documents in the index 124 | * If a constraint is violated, the changes should be rolled back and an error thrown 125 | * 126 | * @API private 127 | */ 128 | Index.prototype.insertMultipleDocs = function(docs) { 129 | var i, error, failingI; 130 | 131 | for (i = 0; i < docs.length; i += 1) { 132 | try { 133 | this.insert(docs[i]); 134 | } catch (e) { 135 | error = e; 136 | failingI = i; 137 | break; 138 | } 139 | } 140 | 141 | if (error) { 142 | for (i = 0; i < failingI; i += 1) { 143 | this.remove(docs[i]); 144 | } 145 | 146 | throw error; 147 | } 148 | }; 149 | 150 | /** 151 | * Remove a document from the index 152 | * If an array is passed, we remove all its elements 153 | * The remove operation is safe with regards to the 'unique' constraint 154 | * O(log(n)) 155 | */ 156 | Index.prototype.remove = function(doc) { 157 | var key, 158 | self = this; 159 | 160 | if (util.isArray(doc)) { 161 | doc.forEach(function(d) { 162 | self.remove(d); 163 | }); 164 | return; 165 | } 166 | 167 | key = model.getDotValue(doc, this.fieldName); 168 | 169 | if (key === undefined && this.sparse) { 170 | return; 171 | } 172 | 173 | if (!util.isArray(key)) { 174 | this.tree.delete(key, doc); 175 | } else { 176 | _.uniq(key, projectForUnique).forEach(function(_key) { 177 | self.tree.delete(_key, doc); 178 | }); 179 | } 180 | }; 181 | 182 | /** 183 | * Update a document in the index 184 | * If a constraint is violated, changes are rolled back and an error thrown 185 | * Naive implementation, still in O(log(n)) 186 | */ 187 | Index.prototype.update = function(oldDoc, newDoc) { 188 | if (util.isArray(oldDoc)) { 189 | this.updateMultipleDocs(oldDoc); 190 | return; 191 | } 192 | 193 | this.remove(oldDoc); 194 | 195 | try { 196 | this.insert(newDoc); 197 | } catch (e) { 198 | this.insert(oldDoc); 199 | throw e; 200 | } 201 | }; 202 | 203 | /** 204 | * Update multiple documents in the index 205 | * If a constraint is violated, the changes need to be rolled back 206 | * and an error thrown 207 | * @param {Array of oldDoc, newDoc pairs} pairs 208 | * 209 | * @API private 210 | */ 211 | Index.prototype.updateMultipleDocs = function(pairs) { 212 | var i, failingI, error; 213 | 214 | for (i = 0; i < pairs.length; i += 1) { 215 | this.remove(pairs[i].oldDoc); 216 | } 217 | 218 | for (i = 0; i < pairs.length; i += 1) { 219 | try { 220 | this.insert(pairs[i].newDoc); 221 | } catch (e) { 222 | error = e; 223 | failingI = i; 224 | break; 225 | } 226 | } 227 | 228 | // If an error was raised, roll back changes in the inverse order 229 | if (error) { 230 | for (i = 0; i < failingI; i += 1) { 231 | this.remove(pairs[i].newDoc); 232 | } 233 | 234 | for (i = 0; i < pairs.length; i += 1) { 235 | this.insert(pairs[i].oldDoc); 236 | } 237 | 238 | throw error; 239 | } 240 | }; 241 | 242 | /** 243 | * Revert an update 244 | */ 245 | Index.prototype.revertUpdate = function(oldDoc, newDoc) { 246 | var revert = []; 247 | 248 | if (!util.isArray(oldDoc)) { 249 | this.update(newDoc, oldDoc); 250 | } else { 251 | oldDoc.forEach(function(pair) { 252 | revert.push({ oldDoc: pair.newDoc, newDoc: pair.oldDoc }); 253 | }); 254 | this.update(revert); 255 | } 256 | }; 257 | 258 | /** 259 | * Get all documents in index whose key match value (if it is a Thing) or one of the elements of value (if it is an array of Things) 260 | * @param {Thing} value Value to match the key against 261 | * @return {Array of documents} 262 | */ 263 | Index.prototype.getMatching = function(value) { 264 | var self = this; 265 | 266 | if (!util.isArray(value)) { 267 | return self.tree.search(value); 268 | } else { 269 | var _res = {}, 270 | res = []; 271 | 272 | value.forEach(function(v) { 273 | self.getMatching(v).forEach(function(doc) { 274 | _res[doc._id] = doc; 275 | }); 276 | }); 277 | 278 | Object.keys(_res).forEach(function(_id) { 279 | res.push(_res[_id]); 280 | }); 281 | 282 | return res; 283 | } 284 | }; 285 | 286 | /** 287 | * Get all documents in index whose key is between bounds are they are defined by query 288 | * Documents are sorted by key 289 | * @param {Query} query 290 | * @return {Array of documents} 291 | */ 292 | Index.prototype.getBetweenBounds = function(query) { 293 | return this.tree.betweenBounds(query); 294 | }; 295 | 296 | /** 297 | * Get all elements in the index 298 | * @return {Array of documents} 299 | */ 300 | Index.prototype.getAll = function() { 301 | var res = []; 302 | 303 | this.tree.executeOnEveryNode(function(node) { 304 | var i; 305 | 306 | for (i = 0; i < node.data.length; i += 1) { 307 | res.push(node.data[i]); 308 | } 309 | }); 310 | 311 | return res; 312 | }; 313 | 314 | // Interface 315 | module.exports = Index; 316 | -------------------------------------------------------------------------------- /lib/persistence.js: -------------------------------------------------------------------------------- 1 | const Storage = require('./storage'); 2 | let storage; 3 | 4 | /** 5 | * Handle every persistence-related task 6 | * The interface Datastore expects to be implemented is 7 | * * Persistence.loadDatabase(callback) and callback has signature err 8 | * * Persistence.persistNewState(newDocs, callback) where newDocs is an array of documents and callback has signature err 9 | */ 10 | 11 | var path = { 12 | join: function(...args) { 13 | return args.join('_'); 14 | }, 15 | }, 16 | model = require('./model'), 17 | async = require('async'), 18 | customUtils = require('./customUtils'), 19 | Index = require('./indexes'); 20 | 21 | /** 22 | * Create a new Persistence object for database options.db 23 | * @param {Datastore} options.db 24 | * @param {Boolean} options.nodeWebkitAppName Optional, specify the name of your NW app if you want options.filename to be relative to the directory where 25 | * Node Webkit stores application data such as cookies and local storage (the best place to store data in my opinion) 26 | */ 27 | function Persistence(options) { 28 | var i, j, randomString; 29 | 30 | this.db = options.db; 31 | this.inMemoryOnly = this.db.inMemoryOnly; 32 | this.filename = this.db.filename; 33 | this.corruptAlertThreshold = 34 | options.corruptAlertThreshold !== undefined ? options.corruptAlertThreshold : 0.1; 35 | storage = new Storage(this.db.storage); 36 | 37 | if ( 38 | !this.inMemoryOnly && 39 | this.filename && 40 | this.filename.charAt(this.filename.length - 1) === '~' 41 | ) { 42 | throw new Error( 43 | "The datafile name can't end with a ~, which is reserved for crash safe backup files", 44 | ); 45 | } 46 | 47 | // After serialization and before deserialization hooks with some basic sanity checks 48 | if (options.afterSerialization && !options.beforeDeserialization) { 49 | throw new Error( 50 | 'Serialization hook defined but deserialization hook undefined, cautiously refusing to start NeDB to prevent dataloss', 51 | ); 52 | } 53 | if (!options.afterSerialization && options.beforeDeserialization) { 54 | throw new Error( 55 | 'Serialization hook undefined but deserialization hook defined, cautiously refusing to start NeDB to prevent dataloss', 56 | ); 57 | } 58 | this.afterSerialization = 59 | options.afterSerialization || 60 | function(s) { 61 | return s; 62 | }; 63 | this.beforeDeserialization = 64 | options.beforeDeserialization || 65 | function(s) { 66 | return s; 67 | }; 68 | for (i = 1; i < 30; i += 1) { 69 | for (j = 0; j < 10; j += 1) { 70 | randomString = customUtils.uid(i); 71 | if (this.beforeDeserialization(this.afterSerialization(randomString)) !== randomString) { 72 | throw new Error( 73 | 'beforeDeserialization is not the reverse of afterSerialization, cautiously refusing to start NeDB to prevent dataloss', 74 | ); 75 | } 76 | } 77 | } 78 | 79 | // For NW apps, store data in the same directory where NW stores application data 80 | if (this.filename && options.nodeWebkitAppName) { 81 | console.log('=================================================================='); 82 | console.log('WARNING: The nodeWebkitAppName option is deprecated'); 83 | console.log('To get the path to the directory where Node Webkit stores the data'); 84 | console.log('for your app, use the internal nw.gui module like this'); 85 | //for some reason, react native was trying to run the "require" code that is inside the console.log... 86 | console.log("require('nw.gui').App.dataPath"); 87 | console.log('See https://github.com/rogerwang/node-webkit/issues/500'); 88 | console.log('=================================================================='); 89 | this.filename = Persistence.getNWAppFilename(options.nodeWebkitAppName, this.filename); 90 | } 91 | } 92 | 93 | /** 94 | * Check if a directory exists and create it on the fly if it is not the case 95 | * cb is optional, signature: err 96 | */ 97 | Persistence.ensureDirectoryExists = function(dir, cb) { 98 | var callback = cb || function() {}; 99 | storage.mkdirp(dir, function(err) { 100 | return callback(err); 101 | }); 102 | }; 103 | 104 | /** 105 | * Return the path the datafile if the given filename is relative to the directory where Node Webkit stores 106 | * data for this application. Probably the best place to store data 107 | */ 108 | Persistence.getNWAppFilename = function(appName, relativeFilename) { 109 | var home; 110 | 111 | switch (process.platform) { 112 | case 'win32': 113 | case 'win64': 114 | home = process.env.LOCALAPPDATA || process.env.APPDATA; 115 | if (!home) { 116 | throw new Error("Couldn't find the base application data folder"); 117 | } 118 | home = path.join(home, appName); 119 | break; 120 | case 'darwin': 121 | home = process.env.HOME; 122 | if (!home) { 123 | throw new Error("Couldn't find the base application data directory"); 124 | } 125 | home = path.join(home, 'Library', 'Application Support', appName); 126 | break; 127 | case 'linux': 128 | home = process.env.HOME; 129 | if (!home) { 130 | throw new Error("Couldn't find the base application data directory"); 131 | } 132 | home = path.join(home, '.config', appName); 133 | break; 134 | default: 135 | throw new Error("Can't use the Node Webkit relative path for platform " + process.platform); 136 | break; 137 | } 138 | 139 | return path.join(home, 'nedb-data', relativeFilename); 140 | }; 141 | 142 | /** 143 | * Persist cached database 144 | * This serves as a compaction function since the cache always contains only the number of documents in the collection 145 | * while the data file is append-only so it may grow larger 146 | * @param {Function} cb Optional callback, signature: err 147 | */ 148 | Persistence.prototype.persistCachedDatabase = function(cb) { 149 | var callback = cb || function() {}, 150 | toPersist = '', 151 | self = this; 152 | 153 | if (this.inMemoryOnly) { 154 | return callback(null); 155 | } 156 | 157 | this.db.getAllData().forEach(function(doc) { 158 | toPersist += self.afterSerialization(model.serialize(doc)) + '\n'; 159 | }); 160 | Object.keys(this.db.indexes).forEach(function(fieldName) { 161 | if (fieldName != '_id') { 162 | // The special _id index is managed by datastore.js, the others need to be persisted 163 | toPersist += 164 | self.afterSerialization( 165 | model.serialize({ 166 | $$indexCreated: { 167 | fieldName: fieldName, 168 | unique: self.db.indexes[fieldName].unique, 169 | sparse: self.db.indexes[fieldName].sparse, 170 | }, 171 | }), 172 | ) + '\n'; 173 | } 174 | }); 175 | 176 | storage.crashSafeWriteFile(this.filename, toPersist, function(err) { 177 | if (err) { 178 | return callback(err); 179 | } 180 | self.db.emit('compaction.done'); 181 | return callback(null); 182 | }); 183 | }; 184 | 185 | /** 186 | * Queue a rewrite of the datafile 187 | */ 188 | Persistence.prototype.compactDatafile = function() { 189 | this.db.executor.push({ this: this, fn: this.persistCachedDatabase, arguments: [] }); 190 | }; 191 | 192 | /** 193 | * Set automatic compaction every interval ms 194 | * @param {Number} interval in milliseconds, with an enforced minimum of 5 seconds 195 | */ 196 | Persistence.prototype.setAutocompactionInterval = function(interval) { 197 | var self = this, 198 | minInterval = 5000, 199 | realInterval = Math.max(interval || 0, minInterval); 200 | 201 | this.stopAutocompaction(); 202 | 203 | this.autocompactionIntervalId = setInterval(function() { 204 | self.compactDatafile(); 205 | }, realInterval); 206 | }; 207 | 208 | /** 209 | * Stop autocompaction (do nothing if autocompaction was not running) 210 | */ 211 | Persistence.prototype.stopAutocompaction = function() { 212 | if (this.autocompactionIntervalId) { 213 | clearInterval(this.autocompactionIntervalId); 214 | } 215 | }; 216 | 217 | /** 218 | * Persist new state for the given newDocs (can be insertion, update or removal) 219 | * Use an append-only format 220 | * @param {Array} newDocs Can be empty if no doc was updated/removed 221 | * @param {Function} cb Optional, signature: err 222 | */ 223 | Persistence.prototype.persistNewState = function(newDocs, cb) { 224 | var self = this, 225 | toPersist = '', 226 | callback = cb || function() {}; 227 | 228 | // In-memory only datastore 229 | if (self.inMemoryOnly) { 230 | return callback(null); 231 | } 232 | 233 | newDocs.forEach(function(doc) { 234 | toPersist += self.afterSerialization(model.serialize(doc)) + '\n'; 235 | }); 236 | 237 | if (toPersist.length === 0) { 238 | return callback(null); 239 | } 240 | 241 | storage.appendFile(self.filename, toPersist, 'utf8', function(err) { 242 | return callback(err); 243 | }); 244 | }; 245 | 246 | /** 247 | * From a database's raw data, return the corresponding 248 | * machine understandable collection 249 | */ 250 | Persistence.prototype.treatRawData = function(rawData) { 251 | var data = rawData.split('\n'), 252 | dataById = {}, 253 | tdata = [], 254 | i, 255 | indexes = {}, 256 | corruptItems = -1; // Last line of every data file is usually blank so not really corrupt 257 | 258 | for (i = 0; i < data.length; i += 1) { 259 | var doc; 260 | 261 | try { 262 | doc = model.deserialize(this.beforeDeserialization(data[i])); 263 | if (doc._id) { 264 | if (doc.$$deleted === true) { 265 | delete dataById[doc._id]; 266 | } else { 267 | dataById[doc._id] = doc; 268 | } 269 | } else if (doc.$$indexCreated && doc.$$indexCreated.fieldName != undefined) { 270 | indexes[doc.$$indexCreated.fieldName] = doc.$$indexCreated; 271 | } else if (typeof doc.$$indexRemoved === 'string') { 272 | delete indexes[doc.$$indexRemoved]; 273 | } 274 | } catch (e) { 275 | corruptItems += 1; 276 | } 277 | } 278 | 279 | // A bit lenient on corruption 280 | if (data.length > 0 && corruptItems / data.length > this.corruptAlertThreshold) { 281 | throw new Error( 282 | 'More than ' + 283 | Math.floor(100 * this.corruptAlertThreshold) + 284 | '% of the data file is corrupt, the wrong beforeDeserialization hook may be used. Cautiously refusing to start NeDB to prevent dataloss', 285 | ); 286 | } 287 | 288 | Object.keys(dataById).forEach(function(k) { 289 | tdata.push(dataById[k]); 290 | }); 291 | 292 | return { data: tdata, indexes: indexes }; 293 | }; 294 | 295 | /** 296 | * Load the database 297 | * 1) Create all indexes 298 | * 2) Insert all data 299 | * 3) Compact the database 300 | * This means pulling data out of the data file or creating it if it doesn't exist 301 | * Also, all data is persisted right away, which has the effect of compacting the database file 302 | * This operation is very quick at startup for a big collection (60ms for ~10k docs) 303 | * @param {Function} cb Optional callback, signature: err 304 | */ 305 | Persistence.prototype.loadDatabase = function(cb) { 306 | var callback = cb || function() {}, 307 | self = this; 308 | 309 | self.db.resetIndexes(); 310 | 311 | // In-memory only datastore 312 | if (self.inMemoryOnly) { 313 | return callback(null); 314 | } 315 | 316 | async.waterfall( 317 | [ 318 | function(cb) { 319 | Persistence.ensureDirectoryExists(self.filename, function(err) { 320 | storage.ensureDatafileIntegrity(self.filename, function(err) { 321 | storage.readFile(self.filename, 'utf8', function(err, rawData) { 322 | if (err) { 323 | return cb(err); 324 | } 325 | 326 | try { 327 | var treatedData = self.treatRawData(rawData); 328 | } catch (e) { 329 | return cb(e); 330 | } 331 | 332 | // Recreate all indexes in the datafile 333 | Object.keys(treatedData.indexes).forEach(function(key) { 334 | self.db.indexes[key] = new Index(treatedData.indexes[key]); 335 | }); 336 | 337 | // Fill cached database (i.e. all indexes) with data 338 | try { 339 | self.db.resetIndexes(treatedData.data); 340 | } catch (e) { 341 | self.db.resetIndexes(); // Rollback any index which didn't fail 342 | return cb(e); 343 | } 344 | 345 | self.db.persistence.persistCachedDatabase(cb); 346 | }); 347 | }); 348 | }); 349 | }, 350 | ], 351 | function(err) { 352 | if (err) { 353 | return callback(err); 354 | } 355 | 356 | self.db.executor.processBuffer(); 357 | return callback(null); 358 | }, 359 | ); 360 | }; 361 | 362 | // Interface 363 | module.exports = Persistence; 364 | -------------------------------------------------------------------------------- /lib/datastore.js: -------------------------------------------------------------------------------- 1 | var customUtils = require('./customUtils'), 2 | model = require('./model'), 3 | async = require('async'), 4 | Executor = require('./executor'), 5 | Index = require('./indexes'), 6 | util = require('util'), 7 | _ = require('underscore'), 8 | Persistence = require('./persistence'), 9 | Cursor = require('./cursor'), 10 | promisefy = require('./promisefy').promisefy; 11 | /** 12 | * Create a new collection 13 | * @param {String} options.filename Optional, datastore will be in-memory only if not provided 14 | * @param {Boolean} options.timestampData Optional, defaults to false. If set to true, createdAt and updatedAt will be created and populated automatically (if not specified by user) 15 | * @param {Boolean} options.inMemoryOnly Optional, defaults to false 16 | * @param {String} options.nodeWebkitAppName Optional, specify the name of your NW app if you want options.filename to be relative to the directory where 17 | * Node Webkit stores application data such as cookies and local storage (the best place to store data in my opinion) 18 | * @param {Boolean} options.autoload Optional, defaults to false 19 | * @param {Function} options.onload Optional, if autoload is used this will be called after the load database with the error object as parameter. If you don't pass it the error will be thrown 20 | * @param {Function} options.afterSerialization/options.beforeDeserialization Optional, serialization hooks 21 | * @param {Number} options.corruptAlertThreshold Optional, threshold after which an alert is thrown if too much data is corrupt 22 | * @param {Function} options.compareStrings Optional, string comparison function that overrides default for sorting 23 | * 24 | * Event Emitter - Events 25 | * * compaction.done - Fired whenever a compaction operation was finished 26 | */ 27 | function Datastore(options) { 28 | var filename; 29 | 30 | // Retrocompatibility with v0.6 and before 31 | if (typeof options === 'string') { 32 | filename = options; 33 | this.inMemoryOnly = false; // Default 34 | } else { 35 | options = options || {}; 36 | filename = options.filename; 37 | this.inMemoryOnly = options.inMemoryOnly || false; 38 | this.autoload = options.autoload || false; 39 | this.timestampData = options.timestampData || false; 40 | this.storage = options.storage; 41 | 42 | if ( 43 | !( 44 | this.storage && 45 | this.storage.getItem && 46 | this.storage.setItem && 47 | this.storage.removeItem 48 | ) 49 | ) { 50 | throw new Error( 51 | `expected options.storage to be defined. \n--> received ${ 52 | this.storage 53 | ? `object with keys: [${Object.getOwnPropertyNames( 54 | this.storage 55 | ).join(', ')}]` 56 | : this.storage 57 | }` 58 | ); 59 | } 60 | } 61 | 62 | // Determine whether in memory or persistent 63 | if (!filename || typeof filename !== 'string' || filename.length === 0) { 64 | this.filename = null; 65 | this.inMemoryOnly = true; 66 | } else { 67 | this.filename = filename; 68 | } 69 | 70 | // String comparison function 71 | this.compareStrings = options.compareStrings; 72 | 73 | // Persistence handling 74 | this.persistence = new Persistence({ 75 | db: this, 76 | nodeWebkitAppName: options.nodeWebkitAppName, 77 | afterSerialization: options.afterSerialization, 78 | beforeDeserialization: options.beforeDeserialization, 79 | corruptAlertThreshold: options.corruptAlertThreshold, 80 | }); 81 | 82 | // This new executor is ready if we don't use persistence 83 | // If we do, it will only be ready once loadDatabase is called 84 | this.executor = new Executor(); 85 | if (this.inMemoryOnly) { 86 | this.executor.ready = true; 87 | } 88 | 89 | // Indexed by field name, dot notation can be used 90 | // _id is always indexed and since _ids are generated randomly the underlying 91 | // binary is always well-balanced 92 | this.indexes = {}; 93 | this.indexes._id = new Index({ fieldName: '_id', unique: true }); 94 | this.ttlIndexes = {}; 95 | 96 | // Queue a load of the database right away and call the onload handler 97 | // By default (no onload handler), if there is an error there, no operation will be possible so warn the user by throwing an exception 98 | if (this.autoload) { 99 | this.loadDatabase( 100 | options.onload || 101 | function (err) { 102 | if (err) { 103 | throw err; 104 | } 105 | } 106 | ); 107 | } 108 | } 109 | 110 | util.inherits(Datastore, require('events').EventEmitter); 111 | 112 | /** 113 | * Load the database from the datafile, and trigger the execution of buffered commands if any 114 | */ 115 | Datastore.prototype.loadDatabase = function () { 116 | this.executor.push( 117 | { 118 | this: this.persistence, 119 | fn: this.persistence.loadDatabase, 120 | arguments: arguments, 121 | }, 122 | true 123 | ); 124 | }; 125 | 126 | /** 127 | * Get an array of all the data in the database 128 | */ 129 | Datastore.prototype.getAllData = function () { 130 | return this.indexes._id.getAll(); 131 | }; 132 | 133 | /** 134 | * Reset all currently defined indexes 135 | */ 136 | Datastore.prototype.resetIndexes = function (newData) { 137 | var self = this; 138 | 139 | Object.keys(this.indexes).forEach(function (i) { 140 | self.indexes[i].reset(newData); 141 | }); 142 | }; 143 | 144 | /** 145 | * Ensure an index is kept for this field. Same parameters as lib/indexes 146 | * For now this function is synchronous, we need to test how much time it takes 147 | * We use an async API for consistency with the rest of the code 148 | * @param {String} options.fieldName 149 | * @param {Boolean} options.unique 150 | * @param {Boolean} options.sparse 151 | * @param {Number} options.expireAfterSeconds - Optional, if set this index becomes a TTL index (only works on Date fields, not arrays of Date) 152 | * @param {Function} cb Optional callback, signature: err 153 | */ 154 | Datastore.prototype.ensureIndex = function (options, cb) { 155 | var err, 156 | callback = cb || function () {}; 157 | 158 | options = options || {}; 159 | 160 | if (!options.fieldName) { 161 | err = new Error('Cannot create an index without a fieldName'); 162 | err.missingFieldName = true; 163 | return callback(err); 164 | } 165 | if (this.indexes[options.fieldName]) { 166 | return callback(null); 167 | } 168 | 169 | this.indexes[options.fieldName] = new Index(options); 170 | if (options.expireAfterSeconds !== undefined) { 171 | this.ttlIndexes[options.fieldName] = options.expireAfterSeconds; 172 | } // With this implementation index creation is not necessary to ensure TTL but we stick with MongoDB's API here 173 | 174 | try { 175 | this.indexes[options.fieldName].insert(this.getAllData()); 176 | } catch (e) { 177 | delete this.indexes[options.fieldName]; 178 | return callback(e); 179 | } 180 | 181 | // We may want to force all options to be persisted including defaults, not just the ones passed the index creation function 182 | this.persistence.persistNewState( 183 | [{ $$indexCreated: options }], 184 | function (err) { 185 | if (err) { 186 | return callback(err); 187 | } 188 | return callback(null); 189 | } 190 | ); 191 | }; 192 | 193 | /** 194 | * Remove an index 195 | * @param {String} fieldName 196 | * @param {Function} cb Optional callback, signature: err 197 | */ 198 | Datastore.prototype.removeIndex = function (fieldName, cb) { 199 | var callback = cb || function () {}; 200 | 201 | delete this.indexes[fieldName]; 202 | 203 | this.persistence.persistNewState( 204 | [{ $$indexRemoved: fieldName }], 205 | function (err) { 206 | if (err) { 207 | return callback(err); 208 | } 209 | return callback(null); 210 | } 211 | ); 212 | }; 213 | 214 | /** 215 | * Add one or several document(s) to all indexes 216 | */ 217 | Datastore.prototype.addToIndexes = function (doc) { 218 | var i, 219 | failingIndex, 220 | error, 221 | keys = Object.keys(this.indexes); 222 | 223 | for (i = 0; i < keys.length; i += 1) { 224 | try { 225 | this.indexes[keys[i]].insert(doc); 226 | } catch (e) { 227 | failingIndex = i; 228 | error = e; 229 | break; 230 | } 231 | } 232 | 233 | // If an error happened, we need to rollback the insert on all other indexes 234 | if (error) { 235 | for (i = 0; i < failingIndex; i += 1) { 236 | this.indexes[keys[i]].remove(doc); 237 | } 238 | 239 | throw error; 240 | } 241 | }; 242 | 243 | /** 244 | * Remove one or several document(s) from all indexes 245 | */ 246 | Datastore.prototype.removeFromIndexes = function (doc) { 247 | var self = this; 248 | 249 | Object.keys(this.indexes).forEach(function (i) { 250 | self.indexes[i].remove(doc); 251 | }); 252 | }; 253 | 254 | /** 255 | * Update one or several documents in all indexes 256 | * To update multiple documents, oldDoc must be an array of { oldDoc, newDoc } pairs 257 | * If one update violates a constraint, all changes are rolled back 258 | */ 259 | Datastore.prototype.updateIndexes = function (oldDoc, newDoc) { 260 | var i, 261 | failingIndex, 262 | error, 263 | keys = Object.keys(this.indexes); 264 | 265 | for (i = 0; i < keys.length; i += 1) { 266 | try { 267 | this.indexes[keys[i]].update(oldDoc, newDoc); 268 | } catch (e) { 269 | failingIndex = i; 270 | error = e; 271 | break; 272 | } 273 | } 274 | 275 | // If an error happened, we need to rollback the update on all other indexes 276 | if (error) { 277 | for (i = 0; i < failingIndex; i += 1) { 278 | this.indexes[keys[i]].revertUpdate(oldDoc, newDoc); 279 | } 280 | 281 | throw error; 282 | } 283 | }; 284 | 285 | /** 286 | * Return the list of candidates for a given query 287 | * Crude implementation for now, we return the candidates given by the first usable index if any 288 | * We try the following query types, in this order: basic match, $in match, comparison match 289 | * One way to make it better would be to enable the use of multiple indexes if the first usable index 290 | * returns too much data. I may do it in the future. 291 | * 292 | * Returned candidates will be scanned to find and remove all expired documents 293 | * 294 | * @param {Query} query 295 | * @param {Boolean} dontExpireStaleDocs Optional, defaults to false, if true don't remove stale docs. Useful for the remove function which shouldn't be impacted by expirations 296 | * @param {Function} callback Signature err, candidates 297 | */ 298 | Datastore.prototype.getCandidates = function ( 299 | query, 300 | dontExpireStaleDocs, 301 | callback 302 | ) { 303 | var indexNames = Object.keys(this.indexes), 304 | self = this, 305 | usableQueryKeys; 306 | 307 | if (typeof dontExpireStaleDocs === 'function') { 308 | callback = dontExpireStaleDocs; 309 | dontExpireStaleDocs = false; 310 | } 311 | 312 | async.waterfall([ 313 | // STEP 1: get candidates list by checking indexes from most to least frequent usecase 314 | function (cb) { 315 | // For a basic match 316 | usableQueryKeys = []; 317 | Object.keys(query).forEach(function (k) { 318 | if ( 319 | typeof query[k] === 'string' || 320 | typeof query[k] === 'number' || 321 | typeof query[k] === 'boolean' || 322 | util.isDate(query[k]) || 323 | query[k] === null 324 | ) { 325 | usableQueryKeys.push(k); 326 | } 327 | }); 328 | usableQueryKeys = _.intersection(usableQueryKeys, indexNames); 329 | if (usableQueryKeys.length > 0) { 330 | return cb( 331 | null, 332 | self.indexes[usableQueryKeys[0]].getMatching( 333 | query[usableQueryKeys[0]] 334 | ) 335 | ); 336 | } 337 | 338 | // For a $in match 339 | usableQueryKeys = []; 340 | Object.keys(query).forEach(function (k) { 341 | if (query[k] && query[k].hasOwnProperty('$in')) { 342 | usableQueryKeys.push(k); 343 | } 344 | }); 345 | usableQueryKeys = _.intersection(usableQueryKeys, indexNames); 346 | if (usableQueryKeys.length > 0) { 347 | return cb( 348 | null, 349 | self.indexes[usableQueryKeys[0]].getMatching( 350 | query[usableQueryKeys[0]].$in 351 | ) 352 | ); 353 | } 354 | 355 | // For a comparison match 356 | usableQueryKeys = []; 357 | Object.keys(query).forEach(function (k) { 358 | if ( 359 | query[k] && 360 | (query[k].hasOwnProperty('$lt') || 361 | query[k].hasOwnProperty('$lte') || 362 | query[k].hasOwnProperty('$gt') || 363 | query[k].hasOwnProperty('$gte')) 364 | ) { 365 | usableQueryKeys.push(k); 366 | } 367 | }); 368 | usableQueryKeys = _.intersection(usableQueryKeys, indexNames); 369 | if (usableQueryKeys.length > 0) { 370 | return cb( 371 | null, 372 | self.indexes[usableQueryKeys[0]].getBetweenBounds( 373 | query[usableQueryKeys[0]] 374 | ) 375 | ); 376 | } 377 | 378 | // By default, return all the DB data 379 | return cb(null, self.getAllData()); 380 | }, 381 | // STEP 2: remove all expired documents 382 | function (docs) { 383 | if (dontExpireStaleDocs) { 384 | return callback(null, docs); 385 | } 386 | 387 | var expiredDocsIds = [], 388 | validDocs = [], 389 | ttlIndexesFieldNames = Object.keys(self.ttlIndexes); 390 | 391 | docs.forEach(function (doc) { 392 | var valid = true; 393 | ttlIndexesFieldNames.forEach(function (i) { 394 | if ( 395 | doc[i] !== undefined && 396 | util.isDate(doc[i]) && 397 | Date.now() > doc[i].getTime() + self.ttlIndexes[i] * 1000 398 | ) { 399 | valid = false; 400 | } 401 | }); 402 | if (valid) { 403 | validDocs.push(doc); 404 | } else { 405 | expiredDocsIds.push(doc._id); 406 | } 407 | }); 408 | 409 | async.eachSeries( 410 | expiredDocsIds, 411 | function (_id, cb) { 412 | self._remove({ _id: _id }, {}, function (err) { 413 | if (err) { 414 | return callback(err); 415 | } 416 | return cb(); 417 | }); 418 | }, 419 | function (err) { 420 | return callback(null, validDocs); 421 | } 422 | ); 423 | }, 424 | ]); 425 | }; 426 | 427 | /** 428 | * Insert a new document 429 | * @param {Function} cb Optional callback, signature: err, insertedDoc 430 | * 431 | * @api private Use Datastore.insert which has the same signature 432 | */ 433 | Datastore.prototype._insert = function (newDoc, cb) { 434 | var callback = cb || function () {}, 435 | preparedDoc; 436 | 437 | try { 438 | preparedDoc = this.prepareDocumentForInsertion(newDoc); 439 | this._insertInCache(preparedDoc); 440 | } catch (e) { 441 | return callback(e); 442 | } 443 | 444 | this.persistence.persistNewState( 445 | util.isArray(preparedDoc) ? preparedDoc : [preparedDoc], 446 | function (err) { 447 | if (err) { 448 | return callback(err); 449 | } 450 | return callback(null, model.deepCopy(preparedDoc)); 451 | } 452 | ); 453 | }; 454 | 455 | /** 456 | * Create a new _id that's not already in use 457 | */ 458 | Datastore.prototype.createNewId = function () { 459 | var tentativeId = customUtils.uid(16); 460 | // Try as many times as needed to get an unused _id. As explained in customUtils, the probability of this ever happening is extremely small, so this is O(1) 461 | if (this.indexes._id.getMatching(tentativeId).length > 0) { 462 | tentativeId = this.createNewId(); 463 | } 464 | return tentativeId; 465 | }; 466 | 467 | /** 468 | * Prepare a document (or array of documents) to be inserted in a database 469 | * Meaning adds _id and timestamps if necessary on a copy of newDoc to avoid any side effect on user input 470 | * @api private 471 | */ 472 | Datastore.prototype.prepareDocumentForInsertion = function (newDoc) { 473 | var preparedDoc, 474 | self = this; 475 | 476 | if (util.isArray(newDoc)) { 477 | preparedDoc = []; 478 | newDoc.forEach(function (doc) { 479 | preparedDoc.push(self.prepareDocumentForInsertion(doc)); 480 | }); 481 | } else { 482 | preparedDoc = model.deepCopy(newDoc); 483 | if (preparedDoc._id === undefined) { 484 | preparedDoc._id = this.createNewId(); 485 | } 486 | var now = new Date(); 487 | if (this.timestampData && preparedDoc.createdAt === undefined) { 488 | preparedDoc.createdAt = now; 489 | } 490 | if (this.timestampData && preparedDoc.updatedAt === undefined) { 491 | preparedDoc.updatedAt = now; 492 | } 493 | model.checkObject(preparedDoc); 494 | } 495 | 496 | return preparedDoc; 497 | }; 498 | 499 | /** 500 | * If newDoc is an array of documents, this will insert all documents in the cache 501 | * @api private 502 | */ 503 | Datastore.prototype._insertInCache = function (preparedDoc) { 504 | if (util.isArray(preparedDoc)) { 505 | this._insertMultipleDocsInCache(preparedDoc); 506 | } else { 507 | this.addToIndexes(preparedDoc); 508 | } 509 | }; 510 | 511 | /** 512 | * If one insertion fails (e.g. because of a unique constraint), roll back all previous 513 | * inserts and throws the error 514 | * @api private 515 | */ 516 | Datastore.prototype._insertMultipleDocsInCache = function (preparedDocs) { 517 | var i, failingI, error; 518 | 519 | for (i = 0; i < preparedDocs.length; i += 1) { 520 | try { 521 | this.addToIndexes(preparedDocs[i]); 522 | } catch (e) { 523 | error = e; 524 | failingI = i; 525 | break; 526 | } 527 | } 528 | 529 | if (error) { 530 | for (i = 0; i < failingI; i += 1) { 531 | this.removeFromIndexes(preparedDocs[i]); 532 | } 533 | 534 | throw error; 535 | } 536 | }; 537 | 538 | Datastore.prototype.insert = function () { 539 | this.executor.push({ this: this, fn: this._insert, arguments: arguments }); 540 | }; 541 | 542 | /** 543 | * Count all documents matching the query 544 | * @param {Object} query MongoDB-style query 545 | */ 546 | Datastore.prototype.count = function (query, callback) { 547 | var cursor = new Cursor(this, query, function (err, docs, callback) { 548 | if (err) { 549 | return callback(err); 550 | } 551 | return callback(null, docs.length); 552 | }); 553 | 554 | if (typeof callback === 'function') { 555 | cursor.exec(callback); 556 | } else { 557 | return cursor; 558 | } 559 | }; 560 | 561 | /** 562 | * Find all documents matching the query 563 | * If no callback is passed, we return the cursor so that user can limit, skip and finally exec 564 | * @param {Object} query MongoDB-style query 565 | * @param {Object} projection MongoDB-style projection 566 | */ 567 | Datastore.prototype.find = function (query, projection, callback) { 568 | switch (arguments.length) { 569 | case 1: 570 | projection = {}; 571 | // callback is undefined, will return a cursor 572 | break; 573 | case 2: 574 | if (typeof projection === 'function') { 575 | callback = projection; 576 | projection = {}; 577 | } // If not assume projection is an object and callback undefined 578 | break; 579 | } 580 | 581 | var cursor = new Cursor(this, query, function (err, docs, callback) { 582 | var res = [], 583 | i; 584 | 585 | if (err) { 586 | return callback(err); 587 | } 588 | 589 | for (i = 0; i < docs.length; i += 1) { 590 | res.push(model.deepCopy(docs[i])); 591 | } 592 | return callback(null, res); 593 | }); 594 | 595 | cursor.projection(projection); 596 | if (typeof callback === 'function') { 597 | cursor.exec(callback); 598 | } else { 599 | return cursor; 600 | } 601 | }; 602 | 603 | /** 604 | * Find one document matching the query 605 | * @param {Object} query MongoDB-style query 606 | * @param {Object} projection MongoDB-style projection 607 | */ 608 | Datastore.prototype.findOne = function (query, projection, callback) { 609 | switch (arguments.length) { 610 | case 1: 611 | projection = {}; 612 | // callback is undefined, will return a cursor 613 | break; 614 | case 2: 615 | if (typeof projection === 'function') { 616 | callback = projection; 617 | projection = {}; 618 | } // If not assume projection is an object and callback undefined 619 | break; 620 | } 621 | 622 | var cursor = new Cursor(this, query, function (err, docs, callback) { 623 | if (err) { 624 | return callback(err); 625 | } 626 | if (docs.length === 1) { 627 | return callback(null, model.deepCopy(docs[0])); 628 | } else { 629 | return callback(null, null); 630 | } 631 | }); 632 | 633 | cursor.projection(projection).limit(1); 634 | if (typeof callback === 'function') { 635 | cursor.exec(callback); 636 | } else { 637 | return cursor; 638 | } 639 | }; 640 | 641 | /** 642 | * Update all docs matching query 643 | * @param {Object} query 644 | * @param {Object} updateQuery 645 | * @param {Object} options Optional options 646 | * options.multi If true, can update multiple documents (defaults to false) 647 | * options.upsert If true, document is inserted if the query doesn't match anything 648 | * options.returnUpdatedDocs Defaults to false, if true return as third argument the array of updated matched documents (even if no change actually took place) 649 | * @param {Function} cb Optional callback, signature: (err, numAffected, affectedDocuments, upsert) 650 | * If update was an upsert, upsert flag is set to true 651 | * affectedDocuments can be one of the following: 652 | * * For an upsert, the upserted document 653 | * * For an update with returnUpdatedDocs option false, null 654 | * * For an update with returnUpdatedDocs true and multi false, the updated document 655 | * * For an update with returnUpdatedDocs true and multi true, the array of updated documents 656 | * 657 | * WARNING: The API was changed between v1.7.4 and v1.8, for consistency and readability reasons. Prior and including to v1.7.4, 658 | * the callback signature was (err, numAffected, updated) where updated was the updated document in case of an upsert 659 | * or the array of updated documents for an update if the returnUpdatedDocs option was true. That meant that the type of 660 | * affectedDocuments in a non multi update depended on whether there was an upsert or not, leaving only two ways for the 661 | * user to check whether an upsert had occured: checking the type of affectedDocuments or running another find query on 662 | * the whole dataset to check its size. Both options being ugly, the breaking change was necessary. 663 | * 664 | * @api private Use Datastore.update which has the same signature 665 | */ 666 | Datastore.prototype._update = function (query, updateQuery, options, cb) { 667 | var callback, 668 | self = this, 669 | numReplaced = 0, 670 | multi, 671 | upsert, 672 | i; 673 | 674 | if (typeof options === 'function') { 675 | cb = options; 676 | options = {}; 677 | } 678 | callback = cb || function () {}; 679 | multi = options.multi !== undefined ? options.multi : false; 680 | upsert = options.upsert !== undefined ? options.upsert : false; 681 | 682 | async.waterfall([ 683 | function (cb) { 684 | // If upsert option is set, check whether we need to insert the doc 685 | if (!upsert) { 686 | return cb(); 687 | } 688 | 689 | // Need to use an internal function not tied to the executor to avoid deadlock 690 | var cursor = new Cursor(self, query); 691 | cursor.limit(1)._exec(function (err, docs) { 692 | if (err) { 693 | return callback(err); 694 | } 695 | if (docs.length === 1) { 696 | return cb(); 697 | } else { 698 | var toBeInserted; 699 | 700 | try { 701 | model.checkObject(updateQuery); 702 | // updateQuery is a simple object with no modifier, use it as the document to insert 703 | toBeInserted = updateQuery; 704 | } catch (e) { 705 | // updateQuery contains modifiers, use the find query as the base, 706 | // strip it from all operators and update it according to updateQuery 707 | try { 708 | toBeInserted = model.modify( 709 | model.deepCopy(query, true), 710 | updateQuery 711 | ); 712 | } catch (err) { 713 | return callback(err); 714 | } 715 | } 716 | 717 | return self._insert(toBeInserted, function (err, newDoc) { 718 | if (err) { 719 | return callback(err); 720 | } 721 | return callback(null, 1, newDoc, true); 722 | }); 723 | } 724 | }); 725 | }, 726 | function () { 727 | // Perform the update 728 | var modifiedDoc, 729 | modifications = [], 730 | createdAt; 731 | 732 | self.getCandidates(query, function (err, candidates) { 733 | if (err) { 734 | return callback(err); 735 | } 736 | 737 | // Preparing update (if an error is thrown here neither the datafile nor 738 | // the in-memory indexes are affected) 739 | try { 740 | for (i = 0; i < candidates.length; i += 1) { 741 | if ( 742 | model.match(candidates[i], query) && 743 | (multi || numReplaced === 0) 744 | ) { 745 | numReplaced += 1; 746 | if (self.timestampData) { 747 | createdAt = candidates[i].createdAt; 748 | } 749 | modifiedDoc = model.modify(candidates[i], updateQuery); 750 | if (self.timestampData) { 751 | modifiedDoc.createdAt = createdAt; 752 | modifiedDoc.updatedAt = new Date(); 753 | } 754 | modifications.push({ 755 | oldDoc: candidates[i], 756 | newDoc: modifiedDoc, 757 | }); 758 | } 759 | } 760 | } catch (err) { 761 | return callback(err); 762 | } 763 | 764 | // Change the docs in memory 765 | try { 766 | self.updateIndexes(modifications); 767 | } catch (err) { 768 | return callback(err); 769 | } 770 | 771 | // Update the datafile 772 | var updatedDocs = _.pluck(modifications, 'newDoc'); 773 | self.persistence.persistNewState(updatedDocs, function (err) { 774 | if (err) { 775 | return callback(err); 776 | } 777 | if (!options.returnUpdatedDocs) { 778 | return callback(null, numReplaced); 779 | } else { 780 | var updatedDocsDC = []; 781 | updatedDocs.forEach(function (doc) { 782 | updatedDocsDC.push(model.deepCopy(doc)); 783 | }); 784 | if (!multi) { 785 | updatedDocsDC = updatedDocsDC[0]; 786 | } 787 | return callback(null, numReplaced, updatedDocsDC); 788 | } 789 | }); 790 | }); 791 | }, 792 | ]); 793 | }; 794 | 795 | Datastore.prototype.update = function () { 796 | this.executor.push({ this: this, fn: this._update, arguments: arguments }); 797 | }; 798 | 799 | /** 800 | * Remove all docs matching the query 801 | * For now very naive implementation (similar to update) 802 | * @param {Object} query 803 | * @param {Object} options Optional options 804 | * options.multi If true, can update multiple documents (defaults to false) 805 | * @param {Function} cb Optional callback, signature: err, numRemoved 806 | * 807 | * @api private Use Datastore.remove which has the same signature 808 | */ 809 | Datastore.prototype._remove = function (query, options, cb) { 810 | var callback, 811 | self = this, 812 | numRemoved = 0, 813 | removedDocs = [], 814 | multi; 815 | 816 | if (typeof options === 'function') { 817 | cb = options; 818 | options = {}; 819 | } 820 | callback = cb || function () {}; 821 | multi = options.multi !== undefined ? options.multi : false; 822 | 823 | this.getCandidates(query, true, function (err, candidates) { 824 | if (err) { 825 | return callback(err); 826 | } 827 | 828 | try { 829 | candidates.forEach(function (d) { 830 | if (model.match(d, query) && (multi || numRemoved === 0)) { 831 | numRemoved += 1; 832 | removedDocs.push({ $$deleted: true, _id: d._id }); 833 | self.removeFromIndexes(d); 834 | } 835 | }); 836 | } catch (err) { 837 | return callback(err); 838 | } 839 | 840 | self.persistence.persistNewState(removedDocs, function (err) { 841 | if (err) { 842 | return callback(err); 843 | } 844 | return callback(null, numRemoved); 845 | }); 846 | }); 847 | }; 848 | 849 | Datastore.prototype.remove = function () { 850 | this.executor.push({ this: this, fn: this._remove, arguments: arguments }); 851 | }; 852 | 853 | Datastore.prototype.loadDatabaseAsync = promisefy(Datastore, 'loadDatabase'); 854 | Datastore.prototype.findAsync = promisefy(Datastore, 'find'); 855 | Datastore.prototype.findOneAsync = promisefy(Datastore, 'findOne'); 856 | Datastore.prototype.insertAsync = promisefy(Datastore, 'insert'); 857 | Datastore.prototype.updateAsync = promisefy(Datastore, 'update'); 858 | Datastore.prototype.upsertAsync = promisefy(Datastore, 'upsert'); 859 | Datastore.prototype.removeAsync = promisefy(Datastore, 'remove'); 860 | 861 | module.exports = Datastore; 862 | -------------------------------------------------------------------------------- /lib/model.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Handle models (i.e. docs) 3 | * Serialization/deserialization 4 | * Copying 5 | * Querying, update 6 | */ 7 | 8 | var util = require('util'), 9 | _ = require('underscore'), 10 | modifierFunctions = {}, 11 | lastStepModifierFunctions = {}, 12 | comparisonFunctions = {}, 13 | logicalOperators = {}, 14 | arrayComparisonFunctions = {}; 15 | 16 | /** 17 | * Check a key, throw an error if the key is non valid 18 | * @param {String} k key 19 | * @param {Model} v value, needed to treat the Date edge case 20 | * Non-treatable edge cases here: if part of the object if of the form { $$date: number } or { $$deleted: true } 21 | * Its serialized-then-deserialized version it will transformed into a Date object 22 | * But you really need to want it to trigger such behaviour, even when warned not to use '$' at the beginning of the field names... 23 | */ 24 | function checkKey(k, v) { 25 | if (typeof k === 'number') { 26 | k = k.toString(); 27 | } 28 | 29 | if ( 30 | k[0] === '$' && 31 | !(k === '$$date' && typeof v === 'number') && 32 | !(k === '$$deleted' && v === true) && 33 | !(k === '$$indexCreated') && 34 | !(k === '$$indexRemoved') 35 | ) { 36 | throw new Error('Field names cannot begin with the $ character'); 37 | } 38 | 39 | if (k.indexOf('.') !== -1) { 40 | throw new Error('Field names cannot contain a .'); 41 | } 42 | } 43 | 44 | /** 45 | * Check a DB object and throw an error if it's not valid 46 | * Works by applying the above checkKey function to all fields recursively 47 | */ 48 | function checkObject(obj) { 49 | if (util.isArray(obj)) { 50 | obj.forEach(function(o) { 51 | checkObject(o); 52 | }); 53 | } 54 | 55 | if (typeof obj === 'object' && obj !== null) { 56 | Object.keys(obj).forEach(function(k) { 57 | checkKey(k, obj[k]); 58 | checkObject(obj[k]); 59 | }); 60 | } 61 | } 62 | 63 | /** 64 | * Serialize an object to be persisted to a one-line string 65 | * For serialization/deserialization, we use the native JSON parser and not eval or Function 66 | * That gives us less freedom but data entered in the database may come from users 67 | * so eval and the like are not safe 68 | * Accepted primitive types: Number, String, Boolean, Date, null 69 | * Accepted secondary types: Objects, Arrays 70 | */ 71 | function serialize(obj) { 72 | var res; 73 | 74 | res = JSON.stringify(obj, function(k, v) { 75 | checkKey(k, v); 76 | 77 | if (v === undefined) { 78 | return undefined; 79 | } 80 | if (v === null) { 81 | return null; 82 | } 83 | 84 | // Hackish way of checking if object is Date (this way it works between execution contexts in node-webkit). 85 | // We can't use value directly because for dates it is already string in this function (date.toJSON was already called), so we use this 86 | if (typeof this[k].getTime === 'function') { 87 | return { $$date: this[k].getTime() }; 88 | } 89 | 90 | return v; 91 | }); 92 | 93 | return res; 94 | } 95 | 96 | /** 97 | * From a one-line representation of an object generate by the serialize function 98 | * Return the object itself 99 | */ 100 | function deserialize(rawData) { 101 | return JSON.parse(rawData, function(k, v) { 102 | if (k === '$$date') { 103 | return new Date(v); 104 | } 105 | if (typeof v === 'string' || typeof v === 'number' || typeof v === 'boolean' || v === null) { 106 | return v; 107 | } 108 | if (v && v.$$date) { 109 | return v.$$date; 110 | } 111 | 112 | return v; 113 | }); 114 | } 115 | 116 | /** 117 | * Deep copy a DB object 118 | * The optional strictKeys flag (defaulting to false) indicates whether to copy everything or only fields 119 | * where the keys are valid, i.e. don't begin with $ and don't contain a . 120 | */ 121 | function deepCopy(obj, strictKeys) { 122 | var res; 123 | 124 | if ( 125 | typeof obj === 'boolean' || 126 | typeof obj === 'number' || 127 | typeof obj === 'string' || 128 | obj === null || 129 | util.isDate(obj) 130 | ) { 131 | return obj; 132 | } 133 | 134 | if (util.isArray(obj)) { 135 | res = []; 136 | obj.forEach(function(o) { 137 | res.push(deepCopy(o, strictKeys)); 138 | }); 139 | return res; 140 | } 141 | 142 | if (typeof obj === 'object') { 143 | res = {}; 144 | Object.keys(obj).forEach(function(k) { 145 | if (!strictKeys || (k[0] !== '$' && k.indexOf('.') === -1)) { 146 | res[k] = deepCopy(obj[k], strictKeys); 147 | } 148 | }); 149 | return res; 150 | } 151 | 152 | return undefined; // For now everything else is undefined. We should probably throw an error instead 153 | } 154 | 155 | /** 156 | * Tells if an object is a primitive type or a "real" object 157 | * Arrays are considered primitive 158 | */ 159 | function isPrimitiveType(obj) { 160 | return ( 161 | typeof obj === 'boolean' || 162 | typeof obj === 'number' || 163 | typeof obj === 'string' || 164 | obj === null || 165 | util.isDate(obj) || 166 | util.isArray(obj) 167 | ); 168 | } 169 | 170 | /** 171 | * Utility functions for comparing things 172 | * Assumes type checking was already done (a and b already have the same type) 173 | * compareNSB works for numbers, strings and booleans 174 | */ 175 | function compareNSB(a, b) { 176 | if (a < b) { 177 | return -1; 178 | } 179 | if (a > b) { 180 | return 1; 181 | } 182 | return 0; 183 | } 184 | 185 | function compareArrays(a, b) { 186 | var i, comp; 187 | 188 | for (i = 0; i < Math.min(a.length, b.length); i += 1) { 189 | comp = compareThings(a[i], b[i]); 190 | 191 | if (comp !== 0) { 192 | return comp; 193 | } 194 | } 195 | 196 | // Common section was identical, longest one wins 197 | return compareNSB(a.length, b.length); 198 | } 199 | 200 | /** 201 | * Compare { things U undefined } 202 | * Things are defined as any native types (string, number, boolean, null, date) and objects 203 | * We need to compare with undefined as it will be used in indexes 204 | * In the case of objects and arrays, we deep-compare 205 | * If two objects dont have the same type, the (arbitrary) type hierarchy is: undefined, null, number, strings, boolean, dates, arrays, objects 206 | * Return -1 if a < b, 1 if a > b and 0 if a = b (note that equality here is NOT the same as defined in areThingsEqual!) 207 | * 208 | * @param {Function} _compareStrings String comparing function, returning -1, 0 or 1, overriding default string comparison (useful for languages with accented letters) 209 | */ 210 | function compareThings(a, b, _compareStrings) { 211 | var aKeys, 212 | bKeys, 213 | comp, 214 | i, 215 | compareStrings = _compareStrings || compareNSB; 216 | 217 | // undefined 218 | if (a === undefined) { 219 | return b === undefined ? 0 : -1; 220 | } 221 | if (b === undefined) { 222 | return a === undefined ? 0 : 1; 223 | } 224 | 225 | // null 226 | if (a === null) { 227 | return b === null ? 0 : -1; 228 | } 229 | if (b === null) { 230 | return a === null ? 0 : 1; 231 | } 232 | 233 | // Numbers 234 | if (typeof a === 'number') { 235 | return typeof b === 'number' ? compareNSB(a, b) : -1; 236 | } 237 | if (typeof b === 'number') { 238 | return typeof a === 'number' ? compareNSB(a, b) : 1; 239 | } 240 | 241 | // Strings 242 | if (typeof a === 'string') { 243 | return typeof b === 'string' ? compareStrings(a, b) : -1; 244 | } 245 | if (typeof b === 'string') { 246 | return typeof a === 'string' ? compareStrings(a, b) : 1; 247 | } 248 | 249 | // Booleans 250 | if (typeof a === 'boolean') { 251 | return typeof b === 'boolean' ? compareNSB(a, b) : -1; 252 | } 253 | if (typeof b === 'boolean') { 254 | return typeof a === 'boolean' ? compareNSB(a, b) : 1; 255 | } 256 | 257 | // Dates 258 | if (util.isDate(a)) { 259 | return util.isDate(b) ? compareNSB(a.getTime(), b.getTime()) : -1; 260 | } 261 | if (util.isDate(b)) { 262 | return util.isDate(a) ? compareNSB(a.getTime(), b.getTime()) : 1; 263 | } 264 | 265 | // Arrays (first element is most significant and so on) 266 | if (util.isArray(a)) { 267 | return util.isArray(b) ? compareArrays(a, b) : -1; 268 | } 269 | if (util.isArray(b)) { 270 | return util.isArray(a) ? compareArrays(a, b) : 1; 271 | } 272 | 273 | // Objects 274 | aKeys = Object.keys(a).sort(); 275 | bKeys = Object.keys(b).sort(); 276 | 277 | for (i = 0; i < Math.min(aKeys.length, bKeys.length); i += 1) { 278 | comp = compareThings(a[aKeys[i]], b[bKeys[i]]); 279 | 280 | if (comp !== 0) { 281 | return comp; 282 | } 283 | } 284 | 285 | return compareNSB(aKeys.length, bKeys.length); 286 | } 287 | 288 | // ============================================================== 289 | // Updating documents 290 | // ============================================================== 291 | 292 | /** 293 | * The signature of modifier functions is as follows 294 | * Their structure is always the same: recursively follow the dot notation while creating 295 | * the nested documents if needed, then apply the "last step modifier" 296 | * @param {Object} obj The model to modify 297 | * @param {String} field Can contain dots, in that case that means we will set a subfield recursively 298 | * @param {Model} value 299 | */ 300 | 301 | /** 302 | * Set a field to a new value 303 | */ 304 | lastStepModifierFunctions.$set = function(obj, field, value) { 305 | obj[field] = value; 306 | }; 307 | 308 | /** 309 | * Unset a field 310 | */ 311 | lastStepModifierFunctions.$unset = function(obj, field, value) { 312 | delete obj[field]; 313 | }; 314 | 315 | /** 316 | * Push an element to the end of an array field 317 | * Optional modifier $each instead of value to push several values 318 | * Optional modifier $slice to slice the resulting array, see https://docs.mongodb.org/manual/reference/operator/update/slice/ 319 | * Différeence with MongoDB: if $slice is specified and not $each, we act as if value is an empty array 320 | */ 321 | lastStepModifierFunctions.$push = function(obj, field, value) { 322 | // Create the array if it doesn't exist 323 | if (!obj.hasOwnProperty(field)) { 324 | obj[field] = []; 325 | } 326 | 327 | if (!util.isArray(obj[field])) { 328 | throw new Error("Can't $push an element on non-array values"); 329 | } 330 | 331 | if (value !== null && typeof value === 'object' && value.$slice && value.$each === undefined) { 332 | value.$each = []; 333 | } 334 | 335 | if (value !== null && typeof value === 'object' && value.$each) { 336 | if ( 337 | Object.keys(value).length >= 3 || 338 | (Object.keys(value).length === 2 && value.$slice === undefined) 339 | ) { 340 | throw new Error('Can only use $slice in cunjunction with $each when $push to array'); 341 | } 342 | if (!util.isArray(value.$each)) { 343 | throw new Error('$each requires an array value'); 344 | } 345 | 346 | value.$each.forEach(function(v) { 347 | obj[field].push(v); 348 | }); 349 | 350 | if (value.$slice === undefined || typeof value.$slice !== 'number') { 351 | return; 352 | } 353 | 354 | if (value.$slice === 0) { 355 | obj[field] = []; 356 | } else { 357 | var start, 358 | end, 359 | n = obj[field].length; 360 | if (value.$slice < 0) { 361 | start = Math.max(0, n + value.$slice); 362 | end = n; 363 | } else if (value.$slice > 0) { 364 | start = 0; 365 | end = Math.min(n, value.$slice); 366 | } 367 | obj[field] = obj[field].slice(start, end); 368 | } 369 | } else { 370 | obj[field].push(value); 371 | } 372 | }; 373 | 374 | /** 375 | * Add an element to an array field only if it is not already in it 376 | * No modification if the element is already in the array 377 | * Note that it doesn't check whether the original array contains duplicates 378 | */ 379 | lastStepModifierFunctions.$addToSet = function(obj, field, value) { 380 | var addToSet = true; 381 | 382 | // Create the array if it doesn't exist 383 | if (!obj.hasOwnProperty(field)) { 384 | obj[field] = []; 385 | } 386 | 387 | if (!util.isArray(obj[field])) { 388 | throw new Error("Can't $addToSet an element on non-array values"); 389 | } 390 | 391 | if (value !== null && typeof value === 'object' && value.$each) { 392 | if (Object.keys(value).length > 1) { 393 | throw new Error("Can't use another field in conjunction with $each"); 394 | } 395 | if (!util.isArray(value.$each)) { 396 | throw new Error('$each requires an array value'); 397 | } 398 | 399 | value.$each.forEach(function(v) { 400 | lastStepModifierFunctions.$addToSet(obj, field, v); 401 | }); 402 | } else { 403 | obj[field].forEach(function(v) { 404 | if (compareThings(v, value) === 0) { 405 | addToSet = false; 406 | } 407 | }); 408 | if (addToSet) { 409 | obj[field].push(value); 410 | } 411 | } 412 | }; 413 | 414 | /** 415 | * Remove the first or last element of an array 416 | */ 417 | lastStepModifierFunctions.$pop = function(obj, field, value) { 418 | if (!util.isArray(obj[field])) { 419 | throw new Error("Can't $pop an element from non-array values"); 420 | } 421 | if (typeof value !== 'number') { 422 | throw new Error(value + " isn't an integer, can't use it with $pop"); 423 | } 424 | if (value === 0) { 425 | return; 426 | } 427 | 428 | if (value > 0) { 429 | obj[field] = obj[field].slice(0, obj[field].length - 1); 430 | } else { 431 | obj[field] = obj[field].slice(1); 432 | } 433 | }; 434 | 435 | /** 436 | * Removes all instances of a value from an existing array 437 | */ 438 | lastStepModifierFunctions.$pull = function(obj, field, value) { 439 | var arr, i; 440 | 441 | if (!util.isArray(obj[field])) { 442 | throw new Error("Can't $pull an element from non-array values"); 443 | } 444 | 445 | arr = obj[field]; 446 | for (i = arr.length - 1; i >= 0; i -= 1) { 447 | if (match(arr[i], value)) { 448 | arr.splice(i, 1); 449 | } 450 | } 451 | }; 452 | 453 | /** 454 | * Increment a numeric field's value 455 | */ 456 | lastStepModifierFunctions.$inc = function(obj, field, value) { 457 | if (typeof value !== 'number') { 458 | throw new Error(value + ' must be a number'); 459 | } 460 | 461 | if (typeof obj[field] !== 'number') { 462 | if (!_.has(obj, field)) { 463 | obj[field] = value; 464 | } else { 465 | throw new Error("Don't use the $inc modifier on non-number fields"); 466 | } 467 | } else { 468 | obj[field] += value; 469 | } 470 | }; 471 | 472 | /** 473 | * Updates the value of the field, only if specified field is greater than the current value of the field 474 | */ 475 | lastStepModifierFunctions.$max = function(obj, field, value) { 476 | if (typeof obj[field] === 'undefined') { 477 | obj[field] = value; 478 | } else if (value > obj[field]) { 479 | obj[field] = value; 480 | } 481 | }; 482 | 483 | /** 484 | * Updates the value of the field, only if specified field is smaller than the current value of the field 485 | */ 486 | lastStepModifierFunctions.$min = function(obj, field, value) { 487 | if (typeof obj[field] === 'undefined') { 488 | obj[field] = value; 489 | } else if (value < obj[field]) { 490 | obj[field] = value; 491 | } 492 | }; 493 | 494 | // Given its name, create the complete modifier function 495 | function createModifierFunction(modifier) { 496 | return function(obj, field, value) { 497 | var fieldParts = typeof field === 'string' ? field.split('.') : field; 498 | 499 | if (fieldParts.length === 1) { 500 | lastStepModifierFunctions[modifier](obj, field, value); 501 | } else { 502 | if (obj[fieldParts[0]] === undefined) { 503 | if (modifier === '$unset') { 504 | return; 505 | } // Bad looking specific fix, needs to be generalized modifiers that behave like $unset are implemented 506 | obj[fieldParts[0]] = {}; 507 | } 508 | modifierFunctions[modifier](obj[fieldParts[0]], fieldParts.slice(1), value); 509 | } 510 | }; 511 | } 512 | 513 | // Actually create all modifier functions 514 | Object.keys(lastStepModifierFunctions).forEach(function(modifier) { 515 | modifierFunctions[modifier] = createModifierFunction(modifier); 516 | }); 517 | 518 | /** 519 | * Modify a DB object according to an update query 520 | */ 521 | function modify(obj, updateQuery) { 522 | var keys = Object.keys(updateQuery), 523 | firstChars = _.map(keys, function(item) { 524 | return item[0]; 525 | }), 526 | dollarFirstChars = _.filter(firstChars, function(c) { 527 | return c === '$'; 528 | }), 529 | newDoc, 530 | modifiers; 531 | 532 | if (keys.indexOf('_id') !== -1 && updateQuery._id !== obj._id) { 533 | throw new Error("You cannot change a document's _id"); 534 | } 535 | 536 | if (dollarFirstChars.length !== 0 && dollarFirstChars.length !== firstChars.length) { 537 | throw new Error('You cannot mix modifiers and normal fields'); 538 | } 539 | 540 | if (dollarFirstChars.length === 0) { 541 | // Simply replace the object with the update query contents 542 | newDoc = deepCopy(updateQuery); 543 | newDoc._id = obj._id; 544 | } else { 545 | // Apply modifiers 546 | modifiers = _.uniq(keys); 547 | newDoc = deepCopy(obj); 548 | modifiers.forEach(function(m) { 549 | var keys; 550 | 551 | if (!modifierFunctions[m]) { 552 | throw new Error('Unknown modifier ' + m); 553 | } 554 | 555 | // Can't rely on Object.keys throwing on non objects since ES6 556 | // Not 100% satisfying as non objects can be interpreted as objects but no false negatives so we can live with it 557 | if (typeof updateQuery[m] !== 'object') { 558 | throw new Error('Modifier ' + m + "'s argument must be an object"); 559 | } 560 | 561 | keys = Object.keys(updateQuery[m]); 562 | keys.forEach(function(k) { 563 | modifierFunctions[m](newDoc, k, updateQuery[m][k]); 564 | }); 565 | }); 566 | } 567 | 568 | // Check result is valid and return it 569 | checkObject(newDoc); 570 | 571 | if (obj._id !== newDoc._id) { 572 | throw new Error("You can't change a document's _id"); 573 | } 574 | return newDoc; 575 | } 576 | 577 | // ============================================================== 578 | // Finding documents 579 | // ============================================================== 580 | 581 | /** 582 | * Get a value from object with dot notation 583 | * @param {Object} obj 584 | * @param {String} field 585 | */ 586 | function getDotValue(obj, field) { 587 | var fieldParts = typeof field === 'string' ? field.split('.') : field, 588 | i, 589 | objs; 590 | 591 | if (!obj) { 592 | return undefined; 593 | } // field cannot be empty so that means we should return undefined so that nothing can match 594 | 595 | if (fieldParts.length === 0) { 596 | return obj; 597 | } 598 | 599 | if (fieldParts.length === 1) { 600 | return obj[fieldParts[0]]; 601 | } 602 | 603 | if (util.isArray(obj[fieldParts[0]])) { 604 | // If the next field is an integer, return only this item of the array 605 | i = parseInt(fieldParts[1], 10); 606 | if (typeof i === 'number' && !isNaN(i)) { 607 | return getDotValue(obj[fieldParts[0]][i], fieldParts.slice(2)); 608 | } 609 | 610 | // Return the array of values 611 | objs = new Array(); 612 | for (i = 0; i < obj[fieldParts[0]].length; i += 1) { 613 | objs.push(getDotValue(obj[fieldParts[0]][i], fieldParts.slice(1))); 614 | } 615 | return objs; 616 | } else { 617 | return getDotValue(obj[fieldParts[0]], fieldParts.slice(1)); 618 | } 619 | } 620 | 621 | /** 622 | * Check whether 'things' are equal 623 | * Things are defined as any native types (string, number, boolean, null, date) and objects 624 | * In the case of object, we check deep equality 625 | * Returns true if they are, false otherwise 626 | */ 627 | function areThingsEqual(a, b) { 628 | var aKeys, bKeys, i; 629 | 630 | // Strings, booleans, numbers, null 631 | if ( 632 | a === null || 633 | typeof a === 'string' || 634 | typeof a === 'boolean' || 635 | typeof a === 'number' || 636 | b === null || 637 | typeof b === 'string' || 638 | typeof b === 'boolean' || 639 | typeof b === 'number' 640 | ) { 641 | return a === b; 642 | } 643 | 644 | // Dates 645 | if (util.isDate(a) || util.isDate(b)) { 646 | return util.isDate(a) && util.isDate(b) && a.getTime() === b.getTime(); 647 | } 648 | 649 | // Arrays (no match since arrays are used as a $in) 650 | // undefined (no match since they mean field doesn't exist and can't be serialized) 651 | if ( 652 | (!(util.isArray(a) && util.isArray(b)) && (util.isArray(a) || util.isArray(b))) || 653 | a === undefined || 654 | b === undefined 655 | ) { 656 | return false; 657 | } 658 | 659 | // General objects (check for deep equality) 660 | // a and b should be objects at this point 661 | try { 662 | aKeys = Object.keys(a); 663 | bKeys = Object.keys(b); 664 | } catch (e) { 665 | return false; 666 | } 667 | 668 | if (aKeys.length !== bKeys.length) { 669 | return false; 670 | } 671 | for (i = 0; i < aKeys.length; i += 1) { 672 | if (bKeys.indexOf(aKeys[i]) === -1) { 673 | return false; 674 | } 675 | if (!areThingsEqual(a[aKeys[i]], b[aKeys[i]])) { 676 | return false; 677 | } 678 | } 679 | return true; 680 | } 681 | 682 | /** 683 | * Check that two values are comparable 684 | */ 685 | function areComparable(a, b) { 686 | if ( 687 | typeof a !== 'string' && 688 | typeof a !== 'number' && 689 | !util.isDate(a) && 690 | typeof b !== 'string' && 691 | typeof b !== 'number' && 692 | !util.isDate(b) 693 | ) { 694 | return false; 695 | } 696 | 697 | if (typeof a !== typeof b) { 698 | return false; 699 | } 700 | 701 | return true; 702 | } 703 | 704 | /** 705 | * Arithmetic and comparison operators 706 | * @param {Native value} a Value in the object 707 | * @param {Native value} b Value in the query 708 | */ 709 | comparisonFunctions.$lt = function(a, b) { 710 | return areComparable(a, b) && a < b; 711 | }; 712 | 713 | comparisonFunctions.$lte = function(a, b) { 714 | return areComparable(a, b) && a <= b; 715 | }; 716 | 717 | comparisonFunctions.$gt = function(a, b) { 718 | return areComparable(a, b) && a > b; 719 | }; 720 | 721 | comparisonFunctions.$gte = function(a, b) { 722 | return areComparable(a, b) && a >= b; 723 | }; 724 | 725 | comparisonFunctions.$ne = function(a, b) { 726 | if (a === undefined) { 727 | return true; 728 | } 729 | return !areThingsEqual(a, b); 730 | }; 731 | 732 | comparisonFunctions.$in = function(a, b) { 733 | var i; 734 | 735 | if (!util.isArray(b)) { 736 | throw new Error('$in operator called with a non-array'); 737 | } 738 | 739 | for (i = 0; i < b.length; i += 1) { 740 | if (areThingsEqual(a, b[i])) { 741 | return true; 742 | } 743 | } 744 | 745 | return false; 746 | }; 747 | 748 | comparisonFunctions.$nin = function(a, b) { 749 | if (!util.isArray(b)) { 750 | throw new Error('$nin operator called with a non-array'); 751 | } 752 | 753 | return !comparisonFunctions.$in(a, b); 754 | }; 755 | 756 | comparisonFunctions.$regex = function(a, b) { 757 | if (!util.isRegExp(b)) { 758 | throw new Error('$regex operator called with non regular expression'); 759 | } 760 | 761 | if (typeof a !== 'string') { 762 | return false; 763 | } else { 764 | return b.test(a); 765 | } 766 | }; 767 | 768 | comparisonFunctions.$exists = function(value, exists) { 769 | if (exists || exists === '') { 770 | // This will be true for all values of exists except false, null, undefined and 0 771 | exists = true; // That's strange behaviour (we should only use true/false) but that's the way Mongo does it... 772 | } else { 773 | exists = false; 774 | } 775 | 776 | if (value === undefined) { 777 | return !exists; 778 | } else { 779 | return exists; 780 | } 781 | }; 782 | 783 | // Specific to arrays 784 | comparisonFunctions.$size = function(obj, value) { 785 | if (!util.isArray(obj)) { 786 | return false; 787 | } 788 | if (value % 1 !== 0) { 789 | throw new Error('$size operator called without an integer'); 790 | } 791 | 792 | return obj.length == value; 793 | }; 794 | comparisonFunctions.$elemMatch = function(obj, value) { 795 | if (!util.isArray(obj)) { 796 | return false; 797 | } 798 | var i = obj.length; 799 | var result = false; // Initialize result 800 | while (i--) { 801 | if (match(obj[i], value)) { 802 | // If match for array element, return true 803 | result = true; 804 | break; 805 | } 806 | } 807 | return result; 808 | }; 809 | arrayComparisonFunctions.$size = true; 810 | arrayComparisonFunctions.$elemMatch = true; 811 | 812 | /** 813 | * Match any of the subqueries 814 | * @param {Model} obj 815 | * @param {Array of Queries} query 816 | */ 817 | logicalOperators.$or = function(obj, query) { 818 | var i; 819 | 820 | if (!util.isArray(query)) { 821 | throw new Error('$or operator used without an array'); 822 | } 823 | 824 | for (i = 0; i < query.length; i += 1) { 825 | if (match(obj, query[i])) { 826 | return true; 827 | } 828 | } 829 | 830 | return false; 831 | }; 832 | 833 | /** 834 | * Match all of the subqueries 835 | * @param {Model} obj 836 | * @param {Array of Queries} query 837 | */ 838 | logicalOperators.$and = function(obj, query) { 839 | var i; 840 | 841 | if (!util.isArray(query)) { 842 | throw new Error('$and operator used without an array'); 843 | } 844 | 845 | for (i = 0; i < query.length; i += 1) { 846 | if (!match(obj, query[i])) { 847 | return false; 848 | } 849 | } 850 | 851 | return true; 852 | }; 853 | 854 | /** 855 | * Inverted match of the query 856 | * @param {Model} obj 857 | * @param {Query} query 858 | */ 859 | logicalOperators.$not = function(obj, query) { 860 | return !match(obj, query); 861 | }; 862 | 863 | /** 864 | * Use a function to match 865 | * @param {Model} obj 866 | * @param {Query} query 867 | */ 868 | logicalOperators.$where = function(obj, fn) { 869 | var result; 870 | 871 | if (!_.isFunction(fn)) { 872 | throw new Error('$where operator used without a function'); 873 | } 874 | 875 | result = fn.call(obj); 876 | if (!_.isBoolean(result)) { 877 | throw new Error('$where function must return boolean'); 878 | } 879 | 880 | return result; 881 | }; 882 | 883 | /** 884 | * Tell if a given document matches a query 885 | * @param {Object} obj Document to check 886 | * @param {Object} query 887 | */ 888 | function match(obj, query) { 889 | var queryKeys, queryKey, queryValue, i; 890 | 891 | // Primitive query against a primitive type 892 | // This is a bit of a hack since we construct an object with an arbitrary key only to dereference it later 893 | // But I don't have time for a cleaner implementation now 894 | if (isPrimitiveType(obj) || isPrimitiveType(query)) { 895 | return matchQueryPart({ needAKey: obj }, 'needAKey', query); 896 | } 897 | 898 | // Normal query 899 | queryKeys = Object.keys(query); 900 | for (i = 0; i < queryKeys.length; i += 1) { 901 | queryKey = queryKeys[i]; 902 | queryValue = query[queryKey]; 903 | 904 | if (queryKey[0] === '$') { 905 | if (!logicalOperators[queryKey]) { 906 | throw new Error('Unknown logical operator ' + queryKey); 907 | } 908 | if (!logicalOperators[queryKey](obj, queryValue)) { 909 | return false; 910 | } 911 | } else { 912 | if (!matchQueryPart(obj, queryKey, queryValue)) { 913 | return false; 914 | } 915 | } 916 | } 917 | 918 | return true; 919 | } 920 | 921 | /** 922 | * Match an object against a specific { key: value } part of a query 923 | * if the treatObjAsValue flag is set, don't try to match every part separately, but the array as a whole 924 | */ 925 | function matchQueryPart(obj, queryKey, queryValue, treatObjAsValue) { 926 | var objValue = getDotValue(obj, queryKey), 927 | i, 928 | keys, 929 | firstChars, 930 | dollarFirstChars; 931 | 932 | // Check if the value is an array if we don't force a treatment as value 933 | if (util.isArray(objValue) && !treatObjAsValue) { 934 | // If the queryValue is an array, try to perform an exact match 935 | if (util.isArray(queryValue)) { 936 | return matchQueryPart(obj, queryKey, queryValue, true); 937 | } 938 | 939 | // Check if we are using an array-specific comparison function 940 | if (queryValue !== null && typeof queryValue === 'object' && !util.isRegExp(queryValue)) { 941 | keys = Object.keys(queryValue); 942 | for (i = 0; i < keys.length; i += 1) { 943 | if (arrayComparisonFunctions[keys[i]]) { 944 | return matchQueryPart(obj, queryKey, queryValue, true); 945 | } 946 | } 947 | } 948 | 949 | // If not, treat it as an array of { obj, query } where there needs to be at least one match 950 | for (i = 0; i < objValue.length; i += 1) { 951 | if (matchQueryPart({ k: objValue[i] }, 'k', queryValue)) { 952 | return true; 953 | } // k here could be any string 954 | } 955 | return false; 956 | } 957 | 958 | // queryValue is an actual object. Determine whether it contains comparison operators 959 | // or only normal fields. Mixed objects are not allowed 960 | if ( 961 | queryValue !== null && 962 | typeof queryValue === 'object' && 963 | !util.isRegExp(queryValue) && 964 | !util.isArray(queryValue) 965 | ) { 966 | keys = Object.keys(queryValue); 967 | firstChars = _.map(keys, function(item) { 968 | return item[0]; 969 | }); 970 | dollarFirstChars = _.filter(firstChars, function(c) { 971 | return c === '$'; 972 | }); 973 | 974 | if (dollarFirstChars.length !== 0 && dollarFirstChars.length !== firstChars.length) { 975 | throw new Error('You cannot mix operators and normal fields'); 976 | } 977 | 978 | // queryValue is an object of this form: { $comparisonOperator1: value1, ... } 979 | if (dollarFirstChars.length > 0) { 980 | for (i = 0; i < keys.length; i += 1) { 981 | if (!comparisonFunctions[keys[i]]) { 982 | throw new Error('Unknown comparison function ' + keys[i]); 983 | } 984 | 985 | if (!comparisonFunctions[keys[i]](objValue, queryValue[keys[i]])) { 986 | return false; 987 | } 988 | } 989 | return true; 990 | } 991 | } 992 | 993 | // Using regular expressions with basic querying 994 | if (util.isRegExp(queryValue)) { 995 | return comparisonFunctions.$regex(objValue, queryValue); 996 | } 997 | 998 | // queryValue is either a native value or a normal object 999 | // Basic matching is possible 1000 | if (!areThingsEqual(objValue, queryValue)) { 1001 | return false; 1002 | } 1003 | 1004 | return true; 1005 | } 1006 | 1007 | // Interface 1008 | module.exports.serialize = serialize; 1009 | module.exports.deserialize = deserialize; 1010 | module.exports.deepCopy = deepCopy; 1011 | module.exports.checkObject = checkObject; 1012 | module.exports.isPrimitiveType = isPrimitiveType; 1013 | module.exports.modify = modify; 1014 | module.exports.getDotValue = getDotValue; 1015 | module.exports.match = match; 1016 | module.exports.areThingsEqual = areThingsEqual; 1017 | module.exports.compareThings = compareThings; 1018 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | **Embedded persistent or in memory database for react-native**. API is a subset of MongoDB's (NeDB). 2 | 3 | ## Installation, tests 4 | Module name on npm is `react-native-local-mongodb`. 5 | 6 | # 7 | > Starting with version 2.0.0, the `insert`,` update`, `remove`,` find`, and `findOne` methods no longer return promises - now they have a corresponding `Async` method (insertAsync, updateAsync, etc ) 8 | # 9 | 10 | ``` 11 | npm install react-native-local-mongodb --save # Put latest version in your package.json 12 | npm test # You'll need the dev dependencies to launch tests 13 | ``` 14 | 15 | ## API 16 | It is a subset of MongoDB's (NeDB) API (the most used operations). 17 | 18 | * Creating/loading a database 19 | * Persistence 20 | * Using a custom storage 21 | * Inserting documents 22 | * Finding documents 23 | * Basic Querying 24 | * Operators ($lt, $lte, $gt, $gte, $in, $nin, $ne, $exists, $regex) 25 | * Array fields 26 | * Logical operators $or, $and, $not, $where 27 | * Sorting and paginating 28 | * Projections 29 | * Counting documents 30 | * Updating documents 31 | * Removing documents 32 | * Indexing 33 | * Browser version 34 | 35 | ### Creating/loading a database 36 | You can use react-native-local-mongodb as an in-memory only datastore or as a persistent datastore. One datastore is the equivalent of a MongoDB collection. The constructor is used as follows `new Datastore(options)` where `options` is an object with the following fields: 37 | 38 | * `filename` (optional): the key where AsyncStorage will save data. 39 | * `inMemoryOnly` (optional, defaults to `false`): as the name implies. 40 | * `timestampData` (optional, defaults to `false`): timestamp the insertion and last update of all documents, with the fields `createdAt` and `updatedAt`. User-specified values override automatic generation, usually useful for testing. 41 | * `autoload` (optional, defaults to `false`): if used, the database will automatically be loaded from the datafile upon creation (you don't need to call `loadDatabase`). Any command issued before load is finished is buffered and will be executed when load is done. 42 | * `onload` (optional): if you use autoloading, this is the handler called after the `loadDatabase`. It takes one `error` argument. If you use autoloading without specifying this handler, and an error happens during load, an error will be thrown. 43 | * `afterSerialization` (optional): hook you can use to transform data after it was serialized and before it is written to disk. Can be used for example to encrypt data before writing database to disk. This function takes a string as parameter (one line of an react-native-local-mongodb data file) and outputs the transformed string, **which must absolutely not contain a `\n` character** (or data will be lost). 44 | * `beforeDeserialization` (optional): inverse of `afterSerialization`. Make sure to include both and not just one or you risk data loss. For the same reason, make sure both functions are inverses of one another. Some failsafe mechanisms are in place to prevent data loss if you misuse the serialization hooks: react-native-local-mongodb checks that never one is declared without the other, and checks that they are reverse of one another by testing on random strings of various lengths. In addition, if too much data is detected as corrupt, react-native-local-mongodb will refuse to start as it could mean you're not using the deserialization hook corresponding to the serialization hook used before (see below). 45 | * `corruptAlertThreshold` (optional): between 0 and 1, defaults to 10%. react-native-local-mongodb will refuse to start if more than this percentage of the datafile is corrupt. 0 means you don't tolerate any corruption, 1 means you don't care. 46 | * `compareStrings` (optional): function compareStrings(a, b) compares 47 | strings a and b and return -1, 0 or 1. If specified, it overrides 48 | default string comparison which is not well adapted to non-US characters 49 | in particular accented letters. Native `localCompare` will most of the 50 | time be the right choice 51 | 52 | If you use a persistent datastore without the `autoload` option, you need to call `loadDatabase` manually. 53 | This function fetches the data from datafile and prepares the database. **Don't forget it!** If you use a 54 | persistent datastore, no command (insert, find, update, remove) will be executed before `loadDatabase` 55 | is called, so make sure to call it yourself or use the `autoload` option. 56 | 57 | Also, if `loadDatabase` fails, all commands registered to the executor afterwards will not be executed. They will be registered and executed, in sequence, only after a successful `loadDatabase`. 58 | 59 | ```javascript 60 | // Type 1: In-memory only datastore (no need to load the database) 61 | var Datastore = require('react-native-local-mongodb') 62 | , db = new Datastore(); 63 | 64 | 65 | // Type 2: Persistent datastore with manual loading 66 | const Datastore = require('react-native-local-mongodb') 67 | , db = new Datastore({ filename: 'asyncStorageKey', storage: AsyncStorage }); 68 | 69 | db.loadDatabase(function (err) { // Callback is optional 70 | // Now commands will be executed 71 | }); 72 | 73 | 74 | // Type 3: Persistent datastore with automatic loading 75 | var Datastore = require('react-native-local-mongodb') 76 | , db = new Datastore({ filename: 'asyncStorageKey', storage: AsyncStorage, autoload: true }); 77 | // You can issue commands right away 78 | ``` 79 | 80 | ### Persistence 81 | Under the hood, react-native-local-mongodb's persistence uses an append-only format, meaning that all updates and deletes actually result in lines added at the end of the string in AsyncStorage, for performance reasons. The database is automatically compacted (i.e. put back in the one-line-per-document format) every time you load each database within your application. 82 | 83 | You can manually call the compaction function with `yourDatabase.persistence.compactDatafile` which takes no argument. It queues a compaction of the datafile in the executor, to be executed sequentially after all pending operations. The datastore will fire a `compaction.done` event once compaction is finished. 84 | 85 | You can also set automatic compaction at regular intervals with `yourDatabase.persistence.setAutocompactionInterval(interval)`, `interval` in milliseconds (a minimum of 5s is enforced), and stop automatic compaction with `yourDatabase.persistence.stopAutocompaction()`. 86 | 87 | Keep in mind that compaction takes a bit of time (not too much: 130ms for 50k records on a typical development machine) and no other operation can happen when it does, so most projects actually don't need to use it. 88 | 89 | Compaction will also immediately remove any documents whose data line has become corrupted, assuming that the total percentage of all corrupted documents in that database still falls below the specified `corruptAlertThreshold` option's value. 90 | 91 | Durability works similarly to major databases: compaction forces the OS to physically flush data to disk, while appends to the data file do not (the OS is responsible for flushing the data). That guarantees that a server crash can never cause complete data loss, while preserving performance. The worst that can happen is a crash between two syncs, causing a loss of all data between the two syncs. Usually syncs are 30 seconds appart so that's at most 30 seconds of data. This post by Antirez on Redis persistence explains this in more details, react-native-local-mongodb being very close to Redis AOF persistence with `appendfsync` option set to `no`. 92 | 93 | ### Using a custom Storage 94 | You can pass a custom storage in the Datastore configuration object. Your storage must be an object that matches [this interface](https://github.com/react-native-community/react-native-async-storage/blob/67ae214ad5caebc83bb21e659e898924cb27fe6e/types/index.d.ts#L19). 95 | 96 | ```javascript 97 | import Datastore from 'react-native-local-mongodb'; 98 | import AsyncStorage from '@react-native-community/async-storage'; 99 | 100 | const db = new Datastore({ 101 | filename: 'asyncStorageKey', 102 | storage: AsyncStorage 103 | }); 104 | 105 | // === or === 106 | 107 | const items = {}; 108 | 109 | const db2 = new Datastore({ 110 | filename: 'asyncStorageKey', 111 | 112 | // custom storage 113 | storage: { 114 | setItem: (key, value, cb) => { 115 | items[key] = value; 116 | cb(null, value); 117 | }, 118 | 119 | getItem: (key, cb) => { 120 | const res = items[key]; 121 | cb(null, res); 122 | }, 123 | 124 | removeItem: (key, cb) => { 125 | const res = delete items[key]; 126 | cb(null, res); 127 | }, 128 | 129 | // ... 130 | }, 131 | }); 132 | ``` 133 | 134 | ### Inserting documents 135 | The native types are `String`, `Number`, `Boolean`, `Date` and `null`. You can also use 136 | arrays and subdocuments (objects). If a field is `undefined`, it will not be saved (this is different from 137 | MongoDB which transforms `undefined` in `null`, something I find counter-intuitive). 138 | 139 | If the document does not contain an `_id` field, react-native-local-mongodb will automatically generate one for you (a 16-characters alphanumerical string). The `_id` of a document, once set, cannot be modified. 140 | 141 | Field names cannot begin by '$' or contain a '.'. 142 | 143 | ```javascript 144 | var doc = { hello: 'world' 145 | , n: 5 146 | , today: new Date() 147 | , react-native-local-mongodbIsAwesome: true 148 | , notthere: null 149 | , notToBeSaved: undefined // Will not be saved 150 | , fruits: [ 'apple', 'orange', 'pear' ] 151 | , infos: { name: 'react-native-local-mongodb' } 152 | }; 153 | 154 | db.insert(doc, function (err, newDoc) { // Callback is optional 155 | // newDoc is the newly inserted document, including its _id 156 | // newDoc has no key called notToBeSaved since its value was undefined 157 | }); 158 | ``` 159 | 160 | You can also bulk-insert an array of documents. This operation is atomic, meaning that if one insert fails due to a unique constraint being violated, all changes are rolled back. 161 | 162 | ```javascript 163 | db.insert([{ a: 5 }, { a: 42 }], function (err, newDocs) { 164 | // Two documents were inserted in the database 165 | // newDocs is an array with these documents, augmented with their _id 166 | }); 167 | 168 | // If there is a unique constraint on field 'a', this will fail 169 | db.insert([{ a: 5 }, { a: 42 }, { a: 5 }], function (err) { 170 | // err is a 'uniqueViolated' error 171 | // The database was not modified 172 | }); 173 | ``` 174 | 175 | ### Finding documents 176 | Use `find` to look for multiple documents matching you query, or `findOne` to look for one specific document. You can select documents based on field equality or use comparison operators (`$lt`, `$lte`, `$gt`, `$gte`, `$in`, `$nin`, `$ne`). You can also use logical operators `$or`, `$and`, `$not` and `$where`. See below for the syntax. 177 | 178 | You can use regular expressions in two ways: in basic querying in place of a string, or with the `$regex` operator. 179 | 180 | You can sort and paginate results using the cursor API (see below). 181 | 182 | You can use standard projections to restrict the fields to appear in the results (see below). 183 | 184 | #### Basic querying 185 | Basic querying means are looking for documents whose fields match the ones you specify. You can use regular expression to match strings. 186 | You can use the dot notation to navigate inside nested documents, arrays, arrays of subdocuments and to match a specific element of an array. 187 | 188 | ```javascript 189 | // Let's say our datastore contains the following collection 190 | // { _id: 'id1', planet: 'Mars', system: 'solar', inhabited: false, satellites: ['Phobos', 'Deimos'] } 191 | // { _id: 'id2', planet: 'Earth', system: 'solar', inhabited: true, humans: { genders: 2, eyes: true } } 192 | // { _id: 'id3', planet: 'Jupiter', system: 'solar', inhabited: false } 193 | // { _id: 'id4', planet: 'Omicron Persei 8', system: 'futurama', inhabited: true, humans: { genders: 7 } } 194 | // { _id: 'id5', completeData: { planets: [ { name: 'Earth', number: 3 }, { name: 'Mars', number: 2 }, { name: 'Pluton', number: 9 } ] } } 195 | 196 | // Finding all planets in the solar system 197 | db.find({ system: 'solar' }, function (err, docs) { 198 | // docs is an array containing documents Mars, Earth, Jupiter 199 | // If no document is found, docs is equal to [] 200 | }); 201 | 202 | // Finding all planets whose name contain the substring 'ar' using a regular expression 203 | db.find({ planet: /ar/ }, function (err, docs) { 204 | // docs contains Mars and Earth 205 | }); 206 | 207 | // Finding all inhabited planets in the solar system 208 | db.find({ system: 'solar', inhabited: true }, function (err, docs) { 209 | // docs is an array containing document Earth only 210 | }); 211 | 212 | // Use the dot-notation to match fields in subdocuments 213 | db.find({ "humans.genders": 2 }, function (err, docs) { 214 | // docs contains Earth 215 | }); 216 | 217 | // Use the dot-notation to navigate arrays of subdocuments 218 | db.find({ "completeData.planets.name": "Mars" }, function (err, docs) { 219 | // docs contains document 5 220 | }); 221 | 222 | db.find({ "completeData.planets.name": "Jupiter" }, function (err, docs) { 223 | // docs is empty 224 | }); 225 | 226 | db.find({ "completeData.planets.0.name": "Earth" }, function (err, docs) { 227 | // docs contains document 5 228 | // If we had tested against "Mars" docs would be empty because we are matching against a specific array element 229 | }); 230 | 231 | 232 | // You can also deep-compare objects. Don't confuse this with dot-notation! 233 | db.find({ humans: { genders: 2 } }, function (err, docs) { 234 | // docs is empty, because { genders: 2 } is not equal to { genders: 2, eyes: true } 235 | }); 236 | 237 | // Find all documents in the collection 238 | db.find({}, function (err, docs) { 239 | }); 240 | 241 | // The same rules apply when you want to only find one document 242 | db.findOne({ _id: 'id1' }, function (err, doc) { 243 | // doc is the document Mars 244 | // If no document is found, doc is null 245 | }); 246 | ``` 247 | 248 | #### Operators ($lt, $lte, $gt, $gte, $in, $nin, $ne, $exists, $regex) 249 | The syntax is `{ field: { $op: value } }` where `$op` is any comparison operator: 250 | 251 | * `$lt`, `$lte`: less than, less than or equal 252 | * `$gt`, `$gte`: greater than, greater than or equal 253 | * `$in`: member of. `value` must be an array of values 254 | * `$ne`, `$nin`: not equal, not a member of 255 | * `$exists`: checks whether the document posses the property `field`. `value` should be true or false 256 | * `$regex`: checks whether a string is matched by the regular expression. Contrary to MongoDB, the use of `$options` with `$regex` is not supported, because it doesn't give you more power than regex flags. Basic queries are more readable so only use the `$regex` operator when you need to use another operator with it (see example below) 257 | 258 | ```javascript 259 | // $lt, $lte, $gt and $gte work on numbers and strings 260 | db.find({ "humans.genders": { $gt: 5 } }, function (err, docs) { 261 | // docs contains Omicron Persei 8, whose humans have more than 5 genders (7). 262 | }); 263 | 264 | // When used with strings, lexicographical order is used 265 | db.find({ planet: { $gt: 'Mercury' }}, function (err, docs) { 266 | // docs contains Omicron Persei 8 267 | }) 268 | 269 | // Using $in. $nin is used in the same way 270 | db.find({ planet: { $in: ['Earth', 'Jupiter'] }}, function (err, docs) { 271 | // docs contains Earth and Jupiter 272 | }); 273 | 274 | // Using $exists 275 | db.find({ satellites: { $exists: true } }, function (err, docs) { 276 | // docs contains only Mars 277 | }); 278 | 279 | // Using $regex with another operator 280 | db.find({ planet: { $regex: /ar/, $nin: ['Jupiter', 'Earth'] } }, function (err, docs) { 281 | // docs only contains Mars because Earth was excluded from the match by $nin 282 | }); 283 | ``` 284 | 285 | #### Array fields 286 | When a field in a document is an array, react-native-local-mongodb first tries to see if the query value is an array to perform an exact match, then whether there is an array-specific comparison function (for now there is only `$size` and `$elemMatch`) being used. If not, the query is treated as a query on every element and there is a match if at least one element matches. 287 | 288 | * `$size`: match on the size of the array 289 | * `$elemMatch`: matches if at least one array element matches the query entirely 290 | 291 | ```javascript 292 | // Exact match 293 | db.find({ satellites: ['Phobos', 'Deimos'] }, function (err, docs) { 294 | // docs contains Mars 295 | }) 296 | db.find({ satellites: ['Deimos', 'Phobos'] }, function (err, docs) { 297 | // docs is empty 298 | }) 299 | 300 | // Using an array-specific comparison function 301 | // $elemMatch operator will provide match for a document, if an element from the array field satisfies all the conditions specified with the `$elemMatch` operator 302 | db.find({ completeData: { planets: { $elemMatch: { name: 'Earth', number: 3 } } } }, function (err, docs) { 303 | // docs contains documents with id 5 (completeData) 304 | }); 305 | 306 | db.find({ completeData: { planets: { $elemMatch: { name: 'Earth', number: 5 } } } }, function (err, docs) { 307 | // docs is empty 308 | }); 309 | 310 | // You can use inside #elemMatch query any known document query operator 311 | db.find({ completeData: { planets: { $elemMatch: { name: 'Earth', number: { $gt: 2 } } } } }, function (err, docs) { 312 | // docs contains documents with id 5 (completeData) 313 | }); 314 | 315 | // Note: you can't use nested comparison functions, e.g. { $size: { $lt: 5 } } will throw an error 316 | db.find({ satellites: { $size: 2 } }, function (err, docs) { 317 | // docs contains Mars 318 | }); 319 | 320 | db.find({ satellites: { $size: 1 } }, function (err, docs) { 321 | // docs is empty 322 | }); 323 | 324 | // If a document's field is an array, matching it means matching any element of the array 325 | db.find({ satellites: 'Phobos' }, function (err, docs) { 326 | // docs contains Mars. Result would have been the same if query had been { satellites: 'Deimos' } 327 | }); 328 | 329 | // This also works for queries that use comparison operators 330 | db.find({ satellites: { $lt: 'Amos' } }, function (err, docs) { 331 | // docs is empty since Phobos and Deimos are after Amos in lexicographical order 332 | }); 333 | 334 | // This also works with the $in and $nin operator 335 | db.find({ satellites: { $in: ['Moon', 'Deimos'] } }, function (err, docs) { 336 | // docs contains Mars (the Earth document is not complete!) 337 | }); 338 | ``` 339 | 340 | #### Logical operators $or, $and, $not, $where 341 | You can combine queries using logical operators: 342 | 343 | * For `$or` and `$and`, the syntax is `{ $op: [query1, query2, ...] }`. 344 | * For `$not`, the syntax is `{ $not: query }` 345 | * For `$where`, the syntax is `{ $where: function () { /* object is "this", return a boolean */ } }` 346 | 347 | ```javascript 348 | db.find({ $or: [{ planet: 'Earth' }, { planet: 'Mars' }] }, function (err, docs) { 349 | // docs contains Earth and Mars 350 | }); 351 | 352 | db.find({ $not: { planet: 'Earth' } }, function (err, docs) { 353 | // docs contains Mars, Jupiter, Omicron Persei 8 354 | }); 355 | 356 | db.find({ $where: function () { return Object.keys(this) > 6; } }, function (err, docs) { 357 | // docs with more than 6 properties 358 | }); 359 | 360 | // You can mix normal queries, comparison queries and logical operators 361 | db.find({ $or: [{ planet: 'Earth' }, { planet: 'Mars' }], inhabited: true }, function (err, docs) { 362 | // docs contains Earth 363 | }); 364 | 365 | ``` 366 | 367 | #### Sorting and paginating 368 | If you don't specify a callback to `find`, `findOne` or `count`, a `Cursor` object is returned. You can modify the cursor with `sort`, `skip` and `limit` and then execute it with `exec(callback) - a promise is returned`. 369 | 370 | ```javascript 371 | // Let's say the database contains these 4 documents 372 | // doc1 = { _id: 'id1', planet: 'Mars', system: 'solar', inhabited: false, satellites: ['Phobos', 'Deimos'] } 373 | // doc2 = { _id: 'id2', planet: 'Earth', system: 'solar', inhabited: true, humans: { genders: 2, eyes: true } } 374 | // doc3 = { _id: 'id3', planet: 'Jupiter', system: 'solar', inhabited: false } 375 | // doc4 = { _id: 'id4', planet: 'Omicron Persei 8', system: 'futurama', inhabited: true, humans: { genders: 7 } } 376 | 377 | // No query used means all results are returned (before the Cursor modifiers) 378 | db.find({}).sort({ planet: 1 }).skip(1).limit(2).exec(function (err, docs) { 379 | // docs is [doc3, doc1] 380 | }); 381 | 382 | // You can sort in reverse order like this 383 | db.find({ system: 'solar' }).sort({ planet: -1 }).exec(function (err, docs) { 384 | // docs is [doc1, doc3, doc2] 385 | }); 386 | 387 | // You can sort on one field, then another, and so on like this: 388 | db.find({}).sort({ firstField: 1, secondField: -1 }) ... // You understand how this works! 389 | ``` 390 | 391 | #### Projections 392 | You can give `find` and `findOne` an optional second argument, `projections`. The syntax is the same as MongoDB: `{ a: 1, b: 1 }` to return only the `a` and `b` fields, `{ a: 0, b: 0 }` to omit these two fields. You cannot use both modes at the time, except for `_id` which is by default always returned and which you can choose to omit. You can project on nested documents. 393 | 394 | ```javascript 395 | // Same database as above 396 | 397 | // Keeping only the given fields 398 | db.find({ planet: 'Mars' }, { planet: 1, system: 1 }, function (err, docs) { 399 | // docs is [{ planet: 'Mars', system: 'solar', _id: 'id1' }] 400 | }); 401 | 402 | // Keeping only the given fields but removing _id 403 | db.find({ planet: 'Mars' }, { planet: 1, system: 1, _id: 0 }, function (err, docs) { 404 | // docs is [{ planet: 'Mars', system: 'solar' }] 405 | }); 406 | 407 | // Omitting only the given fields and removing _id 408 | db.find({ planet: 'Mars' }, { planet: 0, system: 0, _id: 0 }, function (err, docs) { 409 | // docs is [{ inhabited: false, satellites: ['Phobos', 'Deimos'] }] 410 | }); 411 | 412 | // Failure: using both modes at the same time 413 | db.find({ planet: 'Mars' }, { planet: 0, system: 1 }, function (err, docs) { 414 | // err is the error message, docs is undefined 415 | }); 416 | 417 | // You can also use it in a Cursor way but this syntax is not compatible with MongoDB 418 | db.find({ planet: 'Mars' }).projection({ planet: 1, system: 1 }).exec(function (err, docs) { 419 | // docs is [{ planet: 'Mars', system: 'solar', _id: 'id1' }] 420 | }); 421 | 422 | // Project on a nested document 423 | db.findOne({ planet: 'Earth' }).projection({ planet: 1, 'humans.genders': 1 }).exec(function (err, doc) { 424 | // doc is { planet: 'Earth', _id: 'id2', humans: { genders: 2 } } 425 | }); 426 | ``` 427 | 428 | 429 | 430 | ### Counting documents 431 | You can use `count` to count documents. It has the same syntax as `find`. For example: 432 | 433 | ```javascript 434 | // Count all planets in the solar system 435 | db.count({ system: 'solar' }, function (err, count) { 436 | // count equals to 3 437 | }); 438 | 439 | // Count all documents in the datastore 440 | db.count({}, function (err, count) { 441 | // count equals to 4 442 | }); 443 | ``` 444 | 445 | 446 | ### Updating documents 447 | `db.update(query, update, options, callback)` will update all documents matching `query` according to the `update` rules: 448 | * `query` is the same kind of finding query you use with `find` and `findOne` 449 | * `update` specifies how the documents should be modified. It is either a new document or a set of modifiers (you cannot use both together, it doesn't make sense!) 450 | * A new document will replace the matched docs 451 | * The modifiers create the fields they need to modify if they don't exist, and you can apply them to subdocs. Available field modifiers are `$set` to change a field's value, `$unset` to delete a field, `$inc` to increment a field's value and `$min`/`$max` to change field's value, only if provided value is less/greater than current value. To work on arrays, you have `$push`, `$pop`, `$addToSet`, `$pull`, and the special `$each` and `$slice`. See examples below for the syntax. 452 | * `options` is an object with two possible parameters 453 | * `multi` (defaults to `false`) which allows the modification of several documents if set to true 454 | * `upsert` (defaults to `false`) if you want to insert a new document corresponding to the `update` rules if your `query` doesn't match anything. If your `update` is a simple object with no modifiers, it is the inserted document. In the other case, the `query` is stripped from all operator recursively, and the `update` is applied to it. 455 | * `returnUpdatedDocs` (defaults to `false`, not MongoDB-compatible) if set to true and update is not an upsert, will return the array of documents matched by the find query and updated. Updated documents will be returned even if the update did not actually modify them 456 | * `callback` (optional) signature: `(err, numAffected, affectedDocuments, upsert)`. **Warning**: the API was changed between v1.7.4 and v1.8. Please refer to the change log to see the change. 457 | * For an upsert, `affectedDocuments` contains the inserted document and the `upsert` flag is set to `true`. 458 | * For a standard update with `returnUpdatedDocs` flag set to `false`, `affectedDocuments` is not set. 459 | * For a standard update with `returnUpdatedDocs` flag set to `true` and `multi` to `false`, `affectedDocuments` is the updated document. 460 | * For a standard update with `returnUpdatedDocs` flag set to `true` and `multi` to `true`, `affectedDocuments` is the array of updated documents. 461 | 462 | **Note**: you can't change a document's _id. 463 | 464 | ```javascript 465 | // Let's use the same example collection as in the "finding document" part 466 | // { _id: 'id1', planet: 'Mars', system: 'solar', inhabited: false } 467 | // { _id: 'id2', planet: 'Earth', system: 'solar', inhabited: true } 468 | // { _id: 'id3', planet: 'Jupiter', system: 'solar', inhabited: false } 469 | // { _id: 'id4', planet: 'Omicron Persia 8', system: 'futurama', inhabited: true } 470 | 471 | // Replace a document by another 472 | db.update({ planet: 'Jupiter' }, { planet: 'Pluton'}, {}, function (err, numReplaced) { 473 | // numReplaced = 1 474 | // The doc #3 has been replaced by { _id: 'id3', planet: 'Pluton' } 475 | // Note that the _id is kept unchanged, and the document has been replaced 476 | // (the 'system' and inhabited fields are not here anymore) 477 | }); 478 | 479 | // Set an existing field's value 480 | db.update({ system: 'solar' }, { $set: { system: 'solar system' } }, { multi: true }, function (err, numReplaced) { 481 | // numReplaced = 3 482 | // Field 'system' on Mars, Earth, Jupiter now has value 'solar system' 483 | }); 484 | 485 | // Setting the value of a non-existing field in a subdocument by using the dot-notation 486 | db.update({ planet: 'Mars' }, { $set: { "data.satellites": 2, "data.red": true } }, {}, function () { 487 | // Mars document now is { _id: 'id1', system: 'solar', inhabited: false 488 | // , data: { satellites: 2, red: true } 489 | // } 490 | // Not that to set fields in subdocuments, you HAVE to use dot-notation 491 | // Using object-notation will just replace the top-level field 492 | db.update({ planet: 'Mars' }, { $set: { data: { satellites: 3 } } }, {}, function () { 493 | // Mars document now is { _id: 'id1', system: 'solar', inhabited: false 494 | // , data: { satellites: 3 } 495 | // } 496 | // You lost the "data.red" field which is probably not the intended behavior 497 | }); 498 | }); 499 | 500 | // Deleting a field 501 | db.update({ planet: 'Mars' }, { $unset: { planet: true } }, {}, function () { 502 | // Now the document for Mars doesn't contain the planet field 503 | // You can unset nested fields with the dot notation of course 504 | }); 505 | 506 | // Upserting a document 507 | db.update({ planet: 'Pluton' }, { planet: 'Pluton', inhabited: false }, { upsert: true }, function (err, numReplaced, upsert) { 508 | // numReplaced = 1, upsert = { _id: 'id5', planet: 'Pluton', inhabited: false } 509 | // A new document { _id: 'id5', planet: 'Pluton', inhabited: false } has been added to the collection 510 | }); 511 | 512 | // If you upsert with a modifier, the upserted doc is the query modified by the modifier 513 | // This is simpler than it sounds :) 514 | db.update({ planet: 'Pluton' }, { $inc: { distance: 38 } }, { upsert: true }, function () { 515 | // A new document { _id: 'id5', planet: 'Pluton', distance: 38 } has been added to the collection 516 | }); 517 | 518 | // If we insert a new document { _id: 'id6', fruits: ['apple', 'orange', 'pear'] } in the collection, 519 | // let's see how we can modify the array field atomically 520 | 521 | // $push inserts new elements at the end of the array 522 | db.update({ _id: 'id6' }, { $push: { fruits: 'banana' } }, {}, function () { 523 | // Now the fruits array is ['apple', 'orange', 'pear', 'banana'] 524 | }); 525 | 526 | // $pop removes an element from the end (if used with 1) or the front (if used with -1) of the array 527 | db.update({ _id: 'id6' }, { $pop: { fruits: 1 } }, {}, function () { 528 | // Now the fruits array is ['apple', 'orange'] 529 | // With { $pop: { fruits: -1 } }, it would have been ['orange', 'pear'] 530 | }); 531 | 532 | // $addToSet adds an element to an array only if it isn't already in it 533 | // Equality is deep-checked (i.e. $addToSet will not insert an object in an array already containing the same object) 534 | // Note that it doesn't check whether the array contained duplicates before or not 535 | db.update({ _id: 'id6' }, { $addToSet: { fruits: 'apple' } }, {}, function () { 536 | // The fruits array didn't change 537 | // If we had used a fruit not in the array, e.g. 'banana', it would have been added to the array 538 | }); 539 | 540 | // $pull removes all values matching a value or even any react-native-local-mongodb query from the array 541 | db.update({ _id: 'id6' }, { $pull: { fruits: 'apple' } }, {}, function () { 542 | // Now the fruits array is ['orange', 'pear'] 543 | }); 544 | db.update({ _id: 'id6' }, { $pull: { fruits: $in: ['apple', 'pear'] } }, {}, function () { 545 | // Now the fruits array is ['orange'] 546 | }); 547 | 548 | // $each can be used to $push or $addToSet multiple values at once 549 | // This example works the same way with $addToSet 550 | db.update({ _id: 'id6' }, { $push: { fruits: { $each: ['banana', 'orange'] } } }, {}, function () { 551 | // Now the fruits array is ['apple', 'orange', 'pear', 'banana', 'orange'] 552 | }); 553 | 554 | // $slice can be used in cunjunction with $push and $each to limit the size of the resulting array. 555 | // A value of 0 will update the array to an empty array. A positive value n will keep only the n first elements 556 | // A negative value -n will keep only the last n elements. 557 | // If $slice is specified but not $each, $each is set to [] 558 | db.update({ _id: 'id6' }, { $push: { fruits: { $each: ['banana'], $slice: 2 } } }, {}, function () { 559 | // Now the fruits array is ['apple', 'orange'] 560 | }); 561 | 562 | // $min/$max to update only if provided value is less/greater than current value 563 | // Let's say the database contains this document 564 | // doc = { _id: 'id', name: 'Name', value: 5 } 565 | db.update({ _id: 'id1' }, { $min: { value: 2 } }, {}, function () { 566 | // The document will be updated to { _id: 'id', name: 'Name', value: 2 } 567 | }); 568 | 569 | db.update({ _id: 'id1' }, { $min: { value: 8 } }, {}, function () { 570 | // The document will not be modified 571 | }); 572 | ``` 573 | 574 | ### Removing documents 575 | `db.remove(query, options, callback)` will remove all documents matching `query` according to `options` 576 | * `query` is the same as the ones used for finding and updating 577 | * `options` only one option for now: `multi` which allows the removal of multiple documents if set to true. Default is false 578 | * `callback` is optional, signature: err, numRemoved 579 | 580 | ```javascript 581 | // Let's use the same example collection as in the "finding document" part 582 | // { _id: 'id1', planet: 'Mars', system: 'solar', inhabited: false } 583 | // { _id: 'id2', planet: 'Earth', system: 'solar', inhabited: true } 584 | // { _id: 'id3', planet: 'Jupiter', system: 'solar', inhabited: false } 585 | // { _id: 'id4', planet: 'Omicron Persia 8', system: 'futurama', inhabited: true } 586 | 587 | // Remove one document from the collection 588 | // options set to {} since the default for multi is false 589 | db.remove({ _id: 'id2' }, {}, function (err, numRemoved) { 590 | // numRemoved = 1 591 | }); 592 | 593 | // Remove multiple documents 594 | db.remove({ system: 'solar' }, { multi: true }, function (err, numRemoved) { 595 | // numRemoved = 3 596 | // All planets from the solar system were removed 597 | }); 598 | 599 | // Removing all documents with the 'match-all' query 600 | db.remove({}, { multi: true }, function (err, numRemoved) { 601 | }); 602 | ``` 603 | 604 | ### Indexing 605 | react-native-local-mongodb supports indexing. It gives a very nice speed boost and can be used to enforce a unique constraint on a field. You can index any field, including fields in nested documents using the dot notation. For now, indexes are only used to speed up basic queries and queries using `$in`, `$lt`, `$lte`, `$gt` and `$gte`. The indexed values cannot be of type array of object. 606 | 607 | To create an index, use `datastore.ensureIndex(options, cb)`, where callback is optional and get passed an error if any (usually a unique constraint that was violated). `ensureIndex` can be called when you want, even after some data was inserted, though it's best to call it at application startup. The options are: 608 | 609 | * **fieldName** (required): name of the field to index. Use the dot notation to index a field in a nested document. 610 | * **unique** (optional, defaults to `false`): enforce field uniqueness. Note that a unique index will raise an error if you try to index two documents for which the field is not defined. 611 | * **sparse** (optional, defaults to `false`): don't index documents for which the field is not defined. Use this option along with "unique" if you want to accept multiple documents for which it is not defined. 612 | * **expireAfterSeconds** (number of seconds, optional): if set, the created index is a TTL (time to live) index, that will automatically remove documents when the system date becomes larger than the date on the indexed field plus `expireAfterSeconds`. Documents where the indexed field is not specified or not a `Date` object are ignored 613 | 614 | Note: the `_id` is automatically indexed with a unique constraint, no need to call `ensureIndex` on it. 615 | 616 | You can remove a previously created index with `datastore.removeIndex(fieldName, cb)`. 617 | 618 | If your datastore is persistent, the indexes you created are persisted in the datafile, when you load the database a second time they are automatically created for you. No need to remove any `ensureIndex` though, if it is called on a database that already has the index, nothing happens. 619 | 620 | ```javascript 621 | db.ensureIndex({ fieldName: 'somefield' }, function (err) { 622 | // If there was an error, err is not null 623 | }); 624 | 625 | // Using a unique constraint with the index 626 | db.ensureIndex({ fieldName: 'somefield', unique: true }, function (err) { 627 | }); 628 | 629 | // Using a sparse unique index 630 | db.ensureIndex({ fieldName: 'somefield', unique: true, sparse: true }, function (err) { 631 | }); 632 | 633 | 634 | // Format of the error message when the unique constraint is not met 635 | db.insert({ somefield: 'react-native-local-mongodb' }, function (err) { 636 | // err is null 637 | db.insert({ somefield: 'react-native-local-mongodb' }, function (err) { 638 | // err is { errorType: 'uniqueViolated' 639 | // , key: 'name' 640 | // , message: 'Unique constraint violated for key name' } 641 | }); 642 | }); 643 | 644 | // Remove index on field somefield 645 | db.removeIndex('somefield', function (err) { 646 | }); 647 | 648 | // Example of using expireAfterSeconds to remove documents 1 hour 649 | // after their creation (db's timestampData option is true here) 650 | db.ensureIndex({ fieldName: 'createdAt', expireAfterSeconds: 3600 }, function (err) { 651 | }); 652 | 653 | // You can also use the option to set an expiration date like so 654 | db.ensureIndex({ fieldName: 'expirationDate', expireAfterSeconds: 0 }, function (err) { 655 | // Now all documents will expire when system time reaches the date in their 656 | // expirationDate field 657 | }); 658 | 659 | ``` 660 | 661 | **Note:** the `ensureIndex` function creates the index synchronously, so it's best to use it at application startup. It's quite fast so it doesn't increase startup time much (35 ms for a collection containing 10,000 documents). 662 | 663 | ## License 664 | 665 | See [License](LICENSE) 666 | --------------------------------------------------------------------------------