├── .gitignore
├── .npmignore
├── LICENSE
├── README.md
├── db
└── mongoShell.js
├── dist
└── tortoiseDB.min.js
├── index.js
├── package-lock.json
├── package.json
├── server
├── routes
│ ├── syncFromRoutes.js
│ └── syncToRoutes.js
└── server.js
├── syncFrom.js
├── syncTo.js
├── tortoiseDB-logo-1.png
├── tortoiseDB.js
└── webpack.prod.js
/.gitignore:
--------------------------------------------------------------------------------
1 | /node_modules/
2 | /.tmp/
3 | yarn.lock
4 | package-lock.json
5 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | /node_modules/
2 | /db/
3 | /server/
4 | index.js
5 | syncFrom.js
6 | syncTo.js
7 | tortoiseDB-logo-1.png
8 | tortoiseDB.js
9 | webpack.prod.js
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 tortoiseDB
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |

2 |
3 |
4 | Overview •
5 | Getting Started •
6 | Features •
7 | Contributors •
8 | License •
9 |
10 |
11 | # Overview
12 |
13 | An easy to set-up NodeJS server and mongoDB wrapper for clients to sync to when using [turtleDB](https://github.com/turtle-DB/turtleDB). Enables offline-first applications built with turtleDB to be fully collaborative with automated document versioning, history merging, and synchonization management.
14 |
15 | Built using [Express](https://github.com/expressjs/express) and [Mongo DB Native NodeJS Driver](https://github.com/mongodb/node-mongodb-native).
16 |
17 | # Getting Started
18 |
19 | ## Install
20 |
21 | Install
22 |
23 | ```javascript
24 | npm i tortoisedb
25 | ```
26 |
27 | ## Usage
28 |
29 | ```javascript
30 | import TortoiseDB from 'tortoisedb';
31 | // or
32 | const TortoiseDB = require('tortoisedb');
33 | ```
34 |
35 | ```javascript
36 | // Create a new instance
37 | const app = new TortoiseDB({
38 | // Choose database name - defaults to 'default' if not provided
39 | name: 'demo',
40 | // Set server port - defaults to process.env.PORT if not provided
41 | port: 3000,
42 | // Provide mongodb URI - defaults to process.env.MONGODB_URI if not provided
43 | mongoURI: 'mongodb://localhost:27017',
44 | // Set batch limit - defaults to 1000 if not provided
45 | batchLimit: 1000
46 | });
47 |
48 | // Start up server
49 | app.start();
50 |
51 | // Drop database
52 | app.dropDB();
53 | ```
54 |
55 | # Features
56 |
57 | - One-line simple setup
58 | - Automatic integration with mongoDB and turtleDB
59 | - Batching during synchronization
60 |
61 | # Contributors
62 |
63 |
64 | Andrew Houston-Floyd - NYC - Website
65 |
66 | Max Appleton - SF/Bay Area - Website
67 |
68 | Steven Shen - Toronto - Website
69 |
70 | # License
71 |
72 | This project is licensed under the MIT License.
73 |
74 |
75 |
--------------------------------------------------------------------------------
/db/mongoShell.js:
--------------------------------------------------------------------------------
1 | const { MongoClient, ObjectId } = require('mongodb');
2 |
3 | class MongoShell {
4 | constructor(name, url) {
5 | this._store = 'store';
6 | this._meta = 'metaStore';
7 | this._syncFromStore = 'syncFromStore';
8 | this._syncToStore = 'syncToStore';
9 | this._url = url;
10 | this._dbName = `tortoiseDB-${name}`;
11 |
12 | let db;
13 | this.connect()
14 | .then(tempDB => {
15 | db = tempDB;
16 | return db.listCollections().toArray();
17 | })
18 | .then(stores => {
19 | const storeNames = stores.map(store => store.name);
20 | if (!storeNames.includes(this._store)) {
21 | db.createCollection(this._store)
22 | .then(() => db.collection(this._store).createIndex({ _id_rev: 1 }))
23 | }
24 | if (!storeNames.includes(this._meta)) {
25 | db.createCollection(this._meta)
26 | }
27 | if (!storeNames.includes(this._syncFromStore)) {
28 | db.createCollection(this._syncFromStore)
29 | }
30 | if (!storeNames.includes(this.syncToStore)) {
31 | db.createCollection(this._syncToStore)
32 | }
33 | })
34 | .catch(err => console.log("Error:", err));
35 | }
36 |
37 | connect() {
38 | return MongoClient.connect(this._url, { useNewUrlParser: true })
39 | .then(client => {
40 | this._client = client;
41 | return this._client.db(this._dbName);
42 | })
43 | .catch(err => console.log("error:", err));
44 | }
45 |
46 | // STORE OPERATIONS
47 | command(store, action, query, projection) {
48 | return this.connect()
49 | .then(db => db.collection(store))
50 | .then(collection => {
51 | if (action === "CREATE") {
52 | return collection.insertOne(query);
53 | } else if (action === "CREATE_MANY") {
54 | return collection.insertMany(query);
55 | } else if (action === "READ") {
56 | return collection.find(query, projection).toArray();
57 | } else if (action === 'READ_ALL') {
58 | return collection.find({}).toArray();
59 | } else if (action === 'READ_BETWEEN') {
60 | return collection.find({
61 | _id: {
62 | $gt: ObjectId(query.min),
63 | $lte: ObjectId(query.max)
64 | }
65 | }).toArray();
66 | } else if (action === 'READ_UP_TO') {
67 | return collection.find({
68 | _id: {
69 | $lte: ObjectId(query.max)
70 | }
71 | }).toArray();
72 | } else if (action === 'GET_MAX_ID') {
73 | return collection.find().sort({ _id: -1 }).limit(1).toArray();
74 | } else if (action === 'GET_ALL_IDS') {
75 | return collection.find({}, { _id: 1 }).sort({ _id: 1 }).map(function (item) { return item._id; }).toArray();
76 | } else if (action === 'GET_ALL_IDS_GREATER_THAN') {
77 | return collection.find({
78 | _id: {
79 | $gt: ObjectId(query.min)
80 | }
81 | }, { _id: 1 }).sort({ _id: 1 }).map(function (item) { return item._id; }).toArray();
82 | } else if (action === "UPDATE") {
83 | return collection.update({ _id: query._id }, query, { upsert: true });
84 | } else if (action === "UPDATE_MANY") {
85 | // let result = Promise.resolve();
86 | // query.forEach(doc => {
87 | // result = result.then(() => collection.update({ _id: doc._id }, doc, {upsert: true}));
88 | // });
89 | // return result;
90 | }
91 | })
92 | .then(res => {
93 | this._client.close();
94 | return res;
95 | })
96 | .catch(err => {
97 | this._client.close();
98 | console.log(`${action} error:`, err)
99 | })
100 | }
101 |
102 | updateManyMetaDocs(docs) {
103 | let result = Promise.resolve();
104 | docs.forEach(doc => {
105 | result = result.then(() => this.command(this._meta, "UPDATE", doc));
106 | });
107 |
108 | return result;
109 | }
110 |
111 | getStoreDocsByIdRevs(idRevs) {
112 | return this.command(this._store, 'READ', { _id_rev: { $in: idRevs } });
113 | }
114 |
115 | // METASTORE OPERATIONS
116 |
117 | getMetaDocsByIds(ids) {
118 | return this.command(this._meta, 'READ', { _id: { $in: ids } })
119 | }
120 |
121 | updateMetaDocs(docs) {
122 | return this.command(this._meta, 'UPDATE_MANY', docs);
123 | }
124 |
125 | dropDB() {
126 | return this.connect()
127 | .then(db => db.dropDatabase())
128 | .then(res => this._client.close())
129 | .catch(err => console.log('DropDB Error', err));
130 | }
131 | }
132 |
133 | module.exports = MongoShell;
134 |
--------------------------------------------------------------------------------
/dist/tortoiseDB.min.js:
--------------------------------------------------------------------------------
1 | !function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.TortoiseDB=t():e.TortoiseDB=t()}(global,function(){return function(e){var t={};function o(s){if(t[s])return t[s].exports;var n=t[s]={i:s,l:!1,exports:{}};return e[s].call(n.exports,n,n.exports,o),n.l=!0,n.exports}return o.m=e,o.c=t,o.d=function(e,t,s){o.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:s})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,t){if(1&t&&(e=o(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var s=Object.create(null);if(o.r(s),Object.defineProperty(s,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var n in e)o.d(s,n,function(t){return e[t]}.bind(null,n));return s},o.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(t,"a",t),t},o.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},o.p="",o(o.s=2)}([function(e,t){e.exports=require("express")},function(e,t){e.exports=require("debug")},function(e,t,o){const s=o(3),n=o(5),r=o(6),i=o(7);e.exports=class{constructor({name:e="default",port:t=process.env.PORT,mongoURI:o=process.env.MONGODB_URI,batchLimit:n=1e3}={}){this.port=t,this.mongoShell=new s(e,o),this.server=i(this),this.syncInProgress=!1,this.batchLimit=n}start(){this.server.listen(this.port),console.log(`TurtleDB server ready to go on port ${this.port}!`)}startSyncSession(){const e=e=>{this.syncInProgress?console.log("Sorry another sync still in progress."):(clearInterval(this.intervalObj),this.syncInProgress=!0,this.syncFrom(),e())};return new Promise((t,o)=>{this.syncInProgress?(console.log("Sorry another sync still in progress."),this.intervalObj=setInterval(e.bind(this,t),200)):(this.syncInProgress=!0,this.syncFrom(),t())})}syncFrom(){this.syncFromSession=new r(this.mongoShell)}syncTo(){this.syncToSession=new n(this.mongoShell,this.batchLimit)}dropDB(){return this.mongoShell.dropDB()}}},function(e,t,o){const{MongoClient:s,ObjectId:n}=o(4);e.exports=class{constructor(e,t){let o;this._store="store",this._meta="metaStore",this._syncFromStore="syncFromStore",this._syncToStore="syncToStore",this._url=t,this._dbName=`tortoiseDB-${e}`,this.connect().then(e=>(o=e).listCollections().toArray()).then(e=>{const t=e.map(e=>e.name);t.includes(this._store)||o.createCollection(this._store).then(()=>o.collection(this._store).createIndex({_id_rev:1})),t.includes(this._meta)||o.createCollection(this._meta),t.includes(this._syncFromStore)||o.createCollection(this._syncFromStore),t.includes(this.syncToStore)||o.createCollection(this._syncToStore)}).catch(e=>console.log("Error:",e))}connect(){return s.connect(this._url,{useNewUrlParser:!0}).then(e=>(this._client=e,this._client.db(this._dbName))).catch(e=>console.log("error:",e))}command(e,t,o,s){return this.connect().then(t=>t.collection(e)).then(e=>"CREATE"===t?e.insertOne(o):"CREATE_MANY"===t?e.insertMany(o):"READ"===t?e.find(o,s).toArray():"READ_ALL"===t?e.find({}).toArray():"READ_BETWEEN"===t?e.find({_id:{$gt:n(o.min),$lte:n(o.max)}}).toArray():"READ_UP_TO"===t?e.find({_id:{$lte:n(o.max)}}).toArray():"GET_MAX_ID"===t?e.find().sort({_id:-1}).limit(1).toArray():"GET_ALL_IDS"===t?e.find({},{_id:1}).sort({_id:1}).map(function(e){return e._id}).toArray():"GET_ALL_IDS_GREATER_THAN"===t?e.find({_id:{$gt:n(o.min)}},{_id:1}).sort({_id:1}).map(function(e){return e._id}).toArray():"UPDATE"===t?e.update({_id:o._id},o,{upsert:!0}):void 0).then(e=>(this._client.close(),e)).catch(e=>{this._client.close(),console.log(`${t} error:`,e)})}updateManyMetaDocs(e){let t=Promise.resolve();return e.forEach(e=>{t=t.then(()=>this.command(this._meta,"UPDATE",e))}),t}getStoreDocsByIdRevs(e){return this.command(this._store,"READ",{_id_rev:{$in:e}})}getMetaDocsByIds(e){return this.command(this._meta,"READ",{_id:{$in:e}})}updateMetaDocs(e){return this.command(this._meta,"UPDATE_MANY",e)}dropDB(){return this.connect().then(e=>e.dropDatabase()).then(e=>this._client.close()).catch(e=>console.log("DropDB Error",e))}}},function(e,t){e.exports=require("mongodb")},function(e,t){e.exports=class{constructor(e,t){this.sessionID=(new Date).toISOString(),this.mongoShell=e,this.batchLimit=t}getChangedMetaDocsForTurtle(e){const t=e.body.lastTurtleKey;return this.turtleID=e.body.turtleID,this.getHighestTortoiseKey().then(()=>t===this.highestTortoiseKey?{lastBatch:!0,metaDocs:[]}:this.getMetaDocsBetweenStoreKeys(t,this.highestTortoiseKey).then(e=>this.getUniqueIDs(e)).then(e=>this.getMetaDocsByIDs(e)).then(()=>this.sendBatchChangedMetaDocsToTurtle()))}getHighestTortoiseKey(){return this.mongoShell.command(this.mongoShell._store,"GET_MAX_ID",{}).then(e=>{0===e.length?this.highestTortoiseKey="0":this.highestTortoiseKey=e[0]._id.toString()})}getMetaDocsBetweenStoreKeys(e,t){return"0"!==e?this.mongoShell.command(this.mongoShell._store,"READ_BETWEEN",{min:e,max:t}):this.mongoShell.command(this.mongoShell._store,"READ_UP_TO",{max:t})}getUniqueIDs(e){let t={};for(let o=0;othis.changedTortoiseMetaDocs=e)}sendBatchChangedMetaDocsToTurtle(){return{metaDocs:this.changedTortoiseMetaDocs.splice(0,this.batchLimit),lastBatch:0===this.changedTortoiseMetaDocs.length}}getTortoiseDocsForTurtle(e){const t=e.body.revIds;return this.getStoreDocsForTurtle(t).then(()=>this.createNewSyncToTurtleDoc()).then(()=>this.sendBatchDocsToTurtle())}getStoreDocsForTurtle(e){return this.mongoShell.command(this.mongoShell._store,"READ",{_id_rev:{$in:e}},{fields:{_id:0}}).then(e=>this.storeDocsForTurtle=e)}sendBatchDocsToTurtle(){let e=this.storeDocsForTurtle.splice(0,this.batchLimit),t=0===this.storeDocsForTurtle.length;const o={docs:e,lastBatch:t};return t&&(o.newSyncToTurtleDoc=this.newSyncToTurtleDoc),o}updateSyncToTurtleDoc(){return this.mongoShell.command(this.mongoShell._syncToStore,"UPDATE",this.newSyncToTurtleDoc)}createNewSyncToTurtleDoc(){return this.getSyncToTurtleDoc().then(e=>{let t={lastKey:this.highestTortoiseKey,sessionID:this.sessionID};this.newSyncToTurtleDoc=Object.assign(e,{history:[t].concat(e.history)})})}getSyncToTurtleDoc(){return this.mongoShell.command(this.mongoShell._syncToStore,"READ",{_id:this.turtleID}).then(e=>0===e.length?this.initializeSyncToTurtleDoc(this.turtleID):e[0])}initializeSyncToTurtleDoc(e){const t={_id:e,history:[]};return this.mongoShell.command(this.mongoShell._syncToStore,"CREATE",t).then(()=>t).catch(e=>console.log(e))}}},function(e,t){e.exports=class{constructor(e){this.docsFromTurtle=[],this.updatedMetaDocs=[],this.newTurtleMetaDocs=[],this.mongoShell=e}getLastTortoiseKey(e){const t=e._id,o=e.history[0];return this.mongoShell.command(this.mongoShell._syncFromStore,"READ",{_id:t}).then(e=>{const s=e[0];if(s){const e=s.history[0];return e?e.lastKey!==o.lastKey?0:e.lastKey:0}return this.createSyncFromDoc(t).then(()=>0)})}createSyncFromDoc(e){const t={_id:e,history:[]};return this.mongoShell.command(this.mongoShell._syncFromStore,"CREATE",t)}findAllMissingLeafNodes(e){return this.createMetaDocPairs(e).then(e=>this.createNewMetaDocs(e)).then(e=>this.findMissingLeafNodes(e)).then(e=>e)}createMetaDocPairs(e){let t={};const o=e.map(e=>e._id);return this.mongoShell.getMetaDocsByIds(o).then(o=>(e.forEach(e=>{t[e._id]={turtle:e,tortoise:null}}),o.forEach(e=>{t[e._id].tortoise=e}),t))}createNewMetaDocs(e){return this.newRevisionTrees(e),this.newMetaDocs(e),e}newRevisionTrees(e){Object.keys(e).forEach(t=>{let o=e[t],s=o.tortoise,n=o.turtle;if(s){const e=s._revisions,t=n._revisions;o.newRevisionTree=this.mergeRevTrees(e,t)}else o.new=null})}newMetaDocs(e){Object.keys(e).forEach(t=>{let o=e[t],s=o.tortoise;o.new=s?{_id:s._id,_revisions:o.newRevisionTree,_winningRev:this.getWinningRev(o.newRevisionTree),_leafRevs:this.collectActiveLeafRevs(o.newRevisionTree)}:null})}findMissingLeafNodes(e){console.log("metaDocTrios",e);let t=[],o=this.collectAllLeafIdRevs(e);return console.log("allLeafNodes",o),this.filterToMissingLeafNodes(o).then(o=>{t=o;let s=this.getNewTurtleLeafNodes(e);t=t.concat(s)}).then(()=>{this.sortMetaDocsForSave(e)}).then(()=>t)}collectAllLeafIdRevs(e){let t=[];return Object.keys(e).forEach(o=>{let s=e[o],n=(s.tortoise,s.new);if(n){const e=this.collectAllLeafRevs(n._revisions),o=n._id;e.forEach(e=>{let s=o+"::"+e;t.push(s)})}}),t}collectAllLeafRevs(e,t=[]){0===e[2].length&&t.push(e[0]);for(let o=0;o{const o=t.map(e=>e._id_rev);return e.filter(e=>!o.includes(e))})}getNewTurtleLeafNodes(e){let t=[];return Object.keys(e).forEach(o=>{let s=e[o],n=s.new,r=s.turtle;if(!n&&r._winningRev)for(let e=0;e{let o=e[t],s=o.new,n=o.turtle;s?this.updatedMetaDocs.push(s):this.newTurtleMetaDocs.push(n)})}insertUpdatedMetaDocs(){return Promise.resolve().then(()=>this.mongoShell.updateManyMetaDocs(this.updatedMetaDocs)).then(()=>{if(this.newTurtleMetaDocs.length>0)return this.mongoShell.command(this.mongoShell._meta,"CREATE_MANY",this.newTurtleMetaDocs)})}insertNewDocsIntoStore(){return 0===this.docsFromTurtle?(console.log("FYI: No docs were sent over from turtle to insert."),Promise.resolve()):this.docsFromTurtle.length>0?this.mongoShell.command(this.mongoShell._store,"CREATE_MANY",this.docsFromTurtle):void 0}updateSyncFromTurtleDoc(e){return this.mongoShell.command(this.mongoShell._syncFromStore,"UPDATE",e)}saveDocsBatch(e){return Promise.resolve(this.docsFromTurtle.push(...e))}mergeRevTrees(e,t){const o=e[2],s=t[2],n=this.findCommonNodes(o,s),r=this.getNode2ChildrenDiffs(o,s);e[2]=[...o,...r];for(let e=0;ee[0]);return t.filter(e=>!o.includes(e[0]))}getWinningRev(e){return this.collectActiveLeafRevs(e).sort((e,t)=>{let[o,s]=e.split("-"),[n,r]=t.split("-");return(o=parseInt(o,10))>(n=parseInt(n,10))?-1:or?-1:1})[0]}collectActiveLeafRevs(e,t=[]){0!==e[2].length||e[1]._deleted||t.push(e[0]);for(let o=0;oo+"::"+e);return this.mongoShell.getStoreDocsByIdRevs(s).then(e=>{const t=e.map(e=>e._id_rev);return s.filter(e=>!t.includes(e))})}}},function(e,t,o){e.exports=function(e){const t=o(0),s=o(8),n=o(9)(e),r=o(10)(e),i=t();return i.use(s.json({limit:"50mb"})),i.use((e,t,o)=>{t.header("Access-Control-Allow-Origin","*"),t.header("Access-Control-Allow-Headers","Origin, X-Requested-With, Content-Type, Accept"),o()}),i.use("/",n),i.use("/",r),i.get("/connect",(e,t)=>{t.status(200).send()}),i.get("/dropdb",(t,o)=>{e.dropDB().then(()=>o.status(200).send())}),i}},function(e,t){e.exports=require("body-parser")},function(e,t,o){e.exports=function(e){var t=o(0).Router();const s=o(1);var n=s("tortoiseDB:syncTo"),r=s("tortoiseDB:syncToSummary");return t.post("/_changed_meta_docs",(t,o)=>{if(t.body.initial)e.syncTo(),r("\n\n ------- NEW Tortoise ==> Turtle SYNC ------"),n("\n #1 HTTP POST request <== Initial Turtle requesting any changes"),e.syncToSession.getChangedMetaDocsForTurtle(t).then(t=>{n(`\n #2 HTTP response ==> Turtle with (${t.metaDocs.length}) changed metadocs`),0===t.metaDocs.length&&(n("\n No sync needed - last key and highest key are equal"),r("\n ------- Tortoise ==> Turtle sync complete ------"),e.syncInProgress=!1),o.send(t)}).catch(e=>console.log(e));else{n("\n #1 HTTP POST request <== Turtle follow up request for next batch of metadocs");let t=e.syncToSession.sendBatchChangedMetaDocsToTurtle();o.send(t),n(`\n #2 HTTP response ==> Turtle with (${t.metaDocs.length}) changed metadocs`)}}),t.post("/_changed_docs",(t,o)=>{if(t.body.initial)n(`\n #3 HTTP POST request <== Initial Turtle requesting (${t.body.revIds.length}) store docs`),e.syncToSession.getTortoiseDocsForTurtle(t).then(e=>{n(`\n #4 HTTP response ==> Turtle with (${e.docs.length}) store docs`),o.send(e)});else{n("\n #3 HTTP POST request <== Turtle follow up request for store docs");let t=e.syncToSession.sendBatchDocsToTurtle();n(`\n #4 HTTP response ==> Turtle with (${t.docs.length}) store docs`),o.send(t)}}),t.get("/_confirm_sync",(t,o)=>{n("\n #5 HTTP GET request <== Turtle with confirmation"),e.syncToSession.updateSyncToTurtleDoc().then(()=>{n("\n #6 HTTP response ==> Turtle with updated sync history and confirmation"),o.status(200).send(),e.syncInProgress=!1,r("\n ------- Tortoise ==> Turtle sync complete ------")}).catch(e=>console.log(e))}),t}},function(e,t,o){e.exports=function(e){var t=o(0);const s=o(1);var n=t.Router(),r=s("tortoiseDB:syncFrom"),i=s("tortoiseDB:syncFromSummary");return n.post("/_last_tortoise_key",(t,o)=>{e.startSyncSession().then(()=>{i("\n\n ------- NEW Turtle ==> Tortoise SYNC ------"),r("\n #1 HTTP POST request <== Turtle requesting checkpoint from last sync session")}).then(()=>e.syncFromSession.getLastTortoiseKey(t.body)).then(e=>{r("\n #2 HTTP response ==> Turtle with last key"),o.send(e.toString())}).catch(e=>console.log("_last_tortoise_key error:",e))}),n.post("/_missing_rev_ids",(t,o)=>{r(`\n #3 HTTP POST request <== Turtle with (${t.body.metaDocs.length}) changed meta docs`),e.syncFromSession.findAllMissingLeafNodes(t.body.metaDocs).then(e=>{r(`\n #4 HTTP response ==> Turtle requesting (${e.length}) missing leaf-revs/docs`),o.send(e)}).catch(e=>console.log("_missing_rev_ids route error:",e))}),n.post("/_insert_docs",(t,o)=>{r(`\n #5 HTTP POST request <== Turtle with (${t.body.docs.length}) missing leaf-revs/docs`),t.body.lastBatch?e.syncFromSession.saveDocsBatch(t.body.docs).then(()=>e.syncFromSession.insertUpdatedMetaDocs()).then(()=>e.syncFromSession.insertNewDocsIntoStore()).then(()=>e.syncFromSession.updateSyncFromTurtleDoc(t.body.newSyncToTortoiseDoc)).then(()=>r("\n #6 HTTP response ==> Turtle with confirmation of insert and sync history")).then(()=>o.status(200).send()).then(()=>i("\n ------- Turtle ==> Tortoise sync complete ------ ")).catch(e=>console.log("_insert_docs error:",e)):e.syncFromSession.saveDocsBatch(t.body.docs).then(()=>{r("\n #6 HTTP response ==> Batch saved to Tortoise"),o.status(200).send()}).catch(e=>console.log("_insert_docs error:",e))}),n}}])});
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | // const TortoiseDB = require('./tortoiseDB');
2 | const TortoiseDB = require('./dist/tortoiseDB.min.js');
3 |
4 | const db = new TortoiseDB({
5 | name: 'demo',
6 | port: 3000,
7 | mongoURI: 'mongodb://localhost:27017',
8 | batchLimit: 1000
9 | });
10 |
11 | db.start();
12 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "tortoisedb",
3 | "version": "1.0.8",
4 | "main": "dist/tortoiseDB.min.js",
5 | "license": "MIT",
6 | "description": "A simple Node server and mongoDB wrapper for clients to sync to when using turtleDB.",
7 | "homepage": "https://turtle-db.github.io/",
8 | "repository": {
9 | "type": "git",
10 | "url": "https://github.com/turtle-DB/tortoiseDB.git"
11 | },
12 | "keywords": [
13 | "indexeddb",
14 | "offline-first",
15 | "document",
16 | "database",
17 | "JSON",
18 | "versioning"
19 | ],
20 | "contributors": [
21 | {
22 | "name": "Andrew Houston-Floyd",
23 | "email": "andrew.houstonfloyd@gmail.com",
24 | "url": "https://turtle-db.github.io/"
25 | },
26 | {
27 | "name": "Steven Shen",
28 | "email": "steeve.shen@gmail.com",
29 | "url": "https://rockdinosaur.github.io/"
30 | },
31 | {
32 | "name": "Max Appleton",
33 | "email": "maxiappleton@gmail.com",
34 | "url": "https://maxiappleton.github.io/"
35 | }
36 | ],
37 | "scripts": {
38 | "build": "webpack --config webpack.prod.js",
39 | "start": "nodemon index.js",
40 | "debug": "DEBUG=tortoiseDB:* node index.js"
41 | },
42 | "unpkg": "dist/tortoiseDB.min.js",
43 | "dependencies": {
44 | "debug": "^3.1.0",
45 | "express": "^4.16.3",
46 | "mongodb": "^3.1.1",
47 | "supports-color": "^5.4.0"
48 | },
49 | "devDependencies": {
50 | "nodemon": "^1.18.3",
51 | "webpack": "^4.16.5",
52 | "webpack-cli": "^3.1.0",
53 | "webpack-node-externals": "^1.7.2"
54 | }
55 | }
--------------------------------------------------------------------------------
/server/routes/syncFromRoutes.js:
--------------------------------------------------------------------------------
1 | module.exports = function setUpSyncFromRoutes(tortoiseDB) {
2 | var express = require('express');
3 | const debug = require('debug');
4 |
5 | var router = express.Router();
6 |
7 | var log = debug('tortoiseDB:syncFrom');
8 | var logFrom = debug('tortoiseDB:syncFromSummary');
9 |
10 | router.post('/_last_tortoise_key', (req, res) => {
11 | // Initialize new replicateFrom object
12 | tortoiseDB.startSyncSession()
13 | .then(() => {
14 | logFrom('\n\n ------- NEW Turtle ==> Tortoise SYNC ------');
15 | log('\n #1 HTTP POST request <== Turtle requesting checkpoint from last sync session');
16 | })
17 | .then(() => tortoiseDB.syncFromSession.getLastTortoiseKey(req.body))
18 | .then(lastKey => {
19 | // log(`\n Get last Turtle key (${lastKey}) from previous sync session`);
20 | log('\n #2 HTTP response ==> Turtle with last key');
21 | res.send(lastKey.toString())
22 | })
23 | .catch(err => console.log("_last_tortoise_key error:", err))
24 |
25 |
26 | // logFrom('\n\n ------- NEW Turtle ==> Tortoise SYNC ------');
27 | // log('\n #1 HTTP POST request <== Turtle requesting checkpoint from last sync session');
28 |
29 | // // Then begin replication process
30 | // tortoiseDB.syncFromSession.getLastTortoiseKey(req.body)
31 | // .then(lastKey => {
32 | // // log(`\n Get last Turtle key (${lastKey}) from previous sync session`);
33 | // log('\n #2 HTTP response ==> Turtle with last key');
34 | // res.send(lastKey.toString())
35 | // })
36 | // .catch(err => console.log("_last_tortoise_key error:", err))
37 | });
38 |
39 | router.post('/_missing_rev_ids', (req, res) => {
40 |
41 | log(`\n #3 HTTP POST request <== Turtle with (${req.body.metaDocs.length}) changed meta docs`);
42 |
43 | tortoiseDB.syncFromSession.findAllMissingLeafNodes(req.body.metaDocs)
44 | .then(missingRevIds => {
45 | // log('\n Merge revision trees and list all missing records');
46 | log(`\n #4 HTTP response ==> Turtle requesting (${missingRevIds.length}) missing leaf-revs/docs`);
47 | res.send(missingRevIds)
48 | })
49 | .catch(err => console.log("_missing_rev_ids route error:", err));
50 | });
51 |
52 | router.post('/_insert_docs', (req, res) => {
53 | log(`\n #5 HTTP POST request <== Turtle with (${req.body.docs.length}) missing leaf-revs/docs`);
54 |
55 | if (req.body.lastBatch) {
56 | tortoiseDB.syncFromSession.saveDocsBatch(req.body.docs)
57 | .then(() => tortoiseDB.syncFromSession.insertUpdatedMetaDocs())
58 | .then(() => tortoiseDB.syncFromSession.insertNewDocsIntoStore())
59 | .then(() => tortoiseDB.syncFromSession.updateSyncFromTurtleDoc(req.body.newSyncToTortoiseDoc))
60 | .then(() => log('\n #6 HTTP response ==> Turtle with confirmation of insert and sync history'))
61 | .then(() => res.status(200).send())
62 | .then(() => logFrom('\n ------- Turtle ==> Tortoise sync complete ------ '))
63 | .catch(err => console.log("_insert_docs error:", err));
64 |
65 | } else {
66 | tortoiseDB.syncFromSession.saveDocsBatch(req.body.docs)
67 | .then(() => {
68 | log('\n #6 HTTP response ==> Batch saved to Tortoise');
69 | res.status(200).send();
70 | })
71 | .catch(err => console.log("_insert_docs error:", err));
72 | }
73 | });
74 |
75 | return router;
76 | }
77 |
--------------------------------------------------------------------------------
/server/routes/syncToRoutes.js:
--------------------------------------------------------------------------------
1 | module.exports = function setUpSyncToRoutes(tortoiseDB) {
2 | var express = require('express');
3 | var router = express.Router();
4 |
5 | const debug = require('debug');
6 | var log = debug('tortoiseDB:syncTo');
7 | var logTo = debug('tortoiseDB:syncToSummary');
8 |
9 | router.post('/_changed_meta_docs', (req, res) => {
10 | if (req.body.initial) {
11 | // Initialize new syncTo object
12 | tortoiseDB.syncTo();
13 | logTo('\n\n ------- NEW Tortoise ==> Turtle SYNC ------');
14 | log('\n #1 HTTP POST request <== Initial Turtle requesting any changes');
15 |
16 | tortoiseDB.syncToSession.getChangedMetaDocsForTurtle(req)
17 | .then((metaDocs) => {
18 | log(`\n #2 HTTP response ==> Turtle with (${metaDocs.metaDocs.length}) changed metadocs`);
19 | // If keys are the same and no new metadocs
20 | if (metaDocs.metaDocs.length === 0) {
21 | log('\n No sync needed - last key and highest key are equal');
22 | logTo('\n ------- Tortoise ==> Turtle sync complete ------');
23 | tortoiseDB.syncInProgress = false;
24 | }
25 | res.send(metaDocs)
26 | })
27 | .catch(err => console.log(err));
28 | } else {
29 | log('\n #1 HTTP POST request <== Turtle follow up request for next batch of metadocs');
30 | let metaDocs = tortoiseDB.syncToSession.sendBatchChangedMetaDocsToTurtle();
31 | res.send(metaDocs);
32 | log(`\n #2 HTTP response ==> Turtle with (${metaDocs.metaDocs.length}) changed metadocs`);
33 | }
34 | });
35 |
36 | router.post('/_changed_docs', (req, res) => {
37 |
38 | if (req.body.initial) {
39 | log(`\n #3 HTTP POST request <== Initial Turtle requesting (${req.body.revIds.length}) store docs`);
40 | tortoiseDB.syncToSession.getTortoiseDocsForTurtle(req)
41 | .then(docs => {
42 | log(`\n #4 HTTP response ==> Turtle with (${docs.docs.length}) store docs`);
43 | res.send(docs);
44 | });
45 | } else {
46 | log(`\n #3 HTTP POST request <== Turtle follow up request for store docs`);
47 | let docs = tortoiseDB.syncToSession.sendBatchDocsToTurtle();
48 | log(`\n #4 HTTP response ==> Turtle with (${docs.docs.length}) store docs`);
49 | res.send(docs);
50 | }
51 | });
52 |
53 | router.get('/_confirm_sync', (req, res) => {
54 | log('\n #5 HTTP GET request <== Turtle with confirmation');
55 | tortoiseDB.syncToSession.updateSyncToTurtleDoc()
56 | .then(() => {
57 | log('\n #6 HTTP response ==> Turtle with updated sync history and confirmation');
58 | res.status(200).send();
59 | tortoiseDB.syncInProgress = false;
60 | logTo('\n ------- Tortoise ==> Turtle sync complete ------');
61 | })
62 | // .then(() => log('\n #6 HTTP response ==> Turtle with updated sync history and confirmation'))
63 | // .then(() => res.status(200).send())
64 | // .then(() => tortoiseDB.syncInProgress = false)
65 | // .then(() => logTo('\n ------- Tortoise ==> Turtle sync complete ------'))
66 | .catch(err => console.log(err));
67 | });
68 |
69 | return router;
70 | }
71 |
--------------------------------------------------------------------------------
/server/server.js:
--------------------------------------------------------------------------------
1 | module.exports = function setUpServer(tortoiseDB) {
2 | const express = require('express');
3 | const bodyParser = require('body-parser');
4 |
5 | const syncToRoutes = require('./routes/syncToRoutes')(tortoiseDB);
6 | const syncFromRoutes = require('./routes/syncFromRoutes')(tortoiseDB);
7 |
8 | const app = express();
9 |
10 | // Tell the bodyparser middleware to accept more data
11 | app.use(bodyParser.json({ limit: '50mb' }));
12 | // app.use(bodyParser.urlencoded({limit: '50mb', extended: true}));
13 |
14 | app.use((req, res, next) => {
15 | res.header("Access-Control-Allow-Origin", "*");
16 | res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
17 | next();
18 | });
19 |
20 | app.use('/', syncToRoutes);
21 | app.use('/', syncFromRoutes);
22 |
23 | // Check connection
24 | app.get("/connect", (req, res) => {
25 | res.status(200).send();
26 | })
27 |
28 | // Drop DB
29 |
30 | app.get("/dropdb", (req, res) => {
31 | tortoiseDB.dropDB().then(() => res.status(200).send());
32 | });
33 |
34 | return app;
35 | }
36 |
--------------------------------------------------------------------------------
/syncFrom.js:
--------------------------------------------------------------------------------
1 | class SyncFrom {
2 | constructor(mongoShell) {
3 | this.docsFromTurtle = [];
4 | this.updatedMetaDocs = [];
5 | this.newTurtleMetaDocs = [];
6 | this.mongoShell = mongoShell;
7 | }
8 |
9 | // #1 HTTP POST '/_last_tortoise_key'
10 |
11 | getLastTortoiseKey(req) {
12 | const turtleID = req._id;
13 | const turtleSyncToLatestHistory = req.history[0];
14 |
15 | return this.mongoShell.command(this.mongoShell._syncFromStore, "READ", { _id: turtleID })
16 | .then(tortoiseSyncFromDocs => {
17 | const tortoiseSyncFromDoc = tortoiseSyncFromDocs[0];
18 |
19 | // If sync from doc already exists
20 | if (tortoiseSyncFromDoc) {
21 | const tortoiseSyncFromLatestHistory = tortoiseSyncFromDoc.history[0];
22 |
23 | // If doc exists but history never created for some reason
24 | if (!tortoiseSyncFromLatestHistory) {
25 | return 0;
26 | } else {
27 | // If last keys don't match, just start from 0
28 | if (tortoiseSyncFromLatestHistory.lastKey !== turtleSyncToLatestHistory.lastKey) {
29 | return 0;
30 | } else {
31 | return tortoiseSyncFromLatestHistory.lastKey;
32 | }
33 | }
34 | } else {
35 | return this.createSyncFromDoc(turtleID).then(() => 0);
36 | }
37 | })
38 | }
39 |
40 | createSyncFromDoc(turtleID) {
41 | const newHistory = { _id: turtleID, history: [] };
42 | return this.mongoShell.command(this.mongoShell._syncFromStore, "CREATE", newHistory)
43 | }
44 |
45 | // #3 HTTP POST '/_missing_rev_ids'
46 |
47 | findAllMissingLeafNodes(turtleMetaDocs) {
48 | // log(`\n\t --- Begin revision tree merge and conflict identification for ${turtleMetaDocs.length} metadocs --- `);
49 | return this.createMetaDocPairs(turtleMetaDocs)
50 | .then((metaDocPairs) => {
51 | // log(`\n\t\t createMetaDocPairs() - Get all matching tortoise metadocs from Mongo`);
52 | return this.createNewMetaDocs(metaDocPairs)
53 | })
54 | .then((metaDocTrios) => {
55 | return this.findMissingLeafNodes(metaDocTrios)
56 | })
57 | .then((missingLeafNodes) => {
58 | // log(`\n\t\t findMissingLeafNodes() - Make a list of leaf nodes that Tortoise is missing`);
59 | // log(`\n\t --- Complete revision tree merge and conflict identification for ${turtleMetaDocs.length} metadocs --- `);
60 | return missingLeafNodes;
61 | });
62 | }
63 |
64 | createMetaDocPairs(turtleMetaDocs) {
65 | // log(`\n\t\t Begin searching for matching metadocs...`);
66 | let metaDocPairs = {};
67 |
68 | const turtleMetaDocIds = turtleMetaDocs.map(turtleMetaDoc => turtleMetaDoc._id);
69 | return this.mongoShell.getMetaDocsByIds(turtleMetaDocIds)
70 | .then(tortoiseMetaDocs => {
71 | turtleMetaDocs.forEach(turtleMetaDoc => {
72 | metaDocPairs[turtleMetaDoc._id] = { 'turtle': turtleMetaDoc, 'tortoise': null };
73 | })
74 |
75 | tortoiseMetaDocs.forEach(tortoiseMetaDoc => {
76 | metaDocPairs[tortoiseMetaDoc._id]['tortoise'] = tortoiseMetaDoc;
77 | })
78 |
79 | return metaDocPairs;
80 | })
81 | }
82 |
83 | createNewMetaDocs(metaDocPairs) {
84 | this.newRevisionTrees(metaDocPairs);
85 | // log(`\n\t\t newRevisionTrees() - Merge revision trees`);
86 | this.newMetaDocs(metaDocPairs);
87 | // log(`\n\t\t createNewMetaDocs() - Use new tree to update _winningRev, activeLeaf properties; create new metadocs`);
88 | return metaDocPairs;
89 | }
90 |
91 | newRevisionTrees(metaDocPairs) {
92 | let docIDs = Object.keys(metaDocPairs);
93 |
94 | docIDs.forEach(id => {
95 | let metaDocPair = metaDocPairs[id];
96 | let tortoiseMetaDoc = metaDocPair.tortoise;
97 | let turtleMetaDoc = metaDocPair.turtle;
98 |
99 | if (tortoiseMetaDoc) {
100 | const tortoiseRevTree = tortoiseMetaDoc._revisions;
101 | const turtleRevTree = turtleMetaDoc._revisions;
102 | metaDocPair.newRevisionTree = this.mergeRevTrees(tortoiseRevTree, turtleRevTree);
103 | } else {
104 | metaDocPair.new = null;
105 | }
106 | });
107 | }
108 |
109 | newMetaDocs(metaDocPairs) {
110 | let docIDs = Object.keys(metaDocPairs);
111 |
112 | docIDs.forEach(id => {
113 | let metaDocPair = metaDocPairs[id];
114 | let tortoiseMetaDoc = metaDocPair.tortoise;
115 |
116 | if (tortoiseMetaDoc) {
117 | metaDocPair.new = {
118 | _id: tortoiseMetaDoc._id,
119 | _revisions: metaDocPair.newRevisionTree,
120 | _winningRev: this.getWinningRev(metaDocPair.newRevisionTree),
121 | _leafRevs: this.collectActiveLeafRevs(metaDocPair.newRevisionTree)
122 | };
123 | } else {
124 | metaDocPair.new = null;
125 | }
126 | });
127 | }
128 |
129 | findMissingLeafNodes(metaDocTrios) {
130 | let dummymissingLeafNodes = [];
131 | let allLeafNodes = this.collectAllLeafIdRevs(metaDocTrios);
132 |
133 | return this.filterToMissingLeafNodes(allLeafNodes) //this.mongoShell.getStoreDocsByIdRevs
134 | .then(leafNodes => {
135 | dummymissingLeafNodes = leafNodes;
136 | let remainingTurtleLeafNodes = this.getNewTurtleLeafNodes(metaDocTrios);
137 | dummymissingLeafNodes = dummymissingLeafNodes.concat(remainingTurtleLeafNodes);
138 | })
139 | .then(() => {
140 | this.sortMetaDocsForSave(metaDocTrios);
141 | })
142 | .then(() => dummymissingLeafNodes)
143 | }
144 |
145 | collectAllLeafIdRevs(metaDocTrios) {
146 | let docIDs = Object.keys(metaDocTrios);
147 | let leafIdRevs = [];
148 |
149 | docIDs.forEach(id => {
150 | let metaDocTrio = metaDocTrios[id];
151 | let tortoiseMetaDoc = metaDocTrio.tortoise;
152 | let newMetaDoc = metaDocTrio.new;
153 | if (newMetaDoc) {
154 | const leafRevs = this.collectAllLeafRevs(newMetaDoc._revisions);
155 | const docId = newMetaDoc._id;
156 | leafRevs.forEach(rev => {
157 | let idRev = docId + '::' + rev;
158 | leafIdRevs.push(idRev);
159 | });
160 | }
161 | });
162 |
163 | return leafIdRevs;
164 | }
165 |
166 | collectAllLeafRevs(node, leafRevs = []) {
167 | if (node[2].length === 0) {
168 | leafRevs.push(node[0]);
169 | }
170 |
171 | for (let i = 0; i < node[2].length; i++) {
172 | this.collectAllLeafRevs(node[2][i], leafRevs);
173 | }
174 |
175 | return leafRevs;
176 | }
177 |
178 | filterToMissingLeafNodes(allLeafNodes) {
179 | return this.mongoShell.getStoreDocsByIdRevs(allLeafNodes)
180 | .then(tortoiseDocs => {
181 | const existingTortoiseIdRevs = tortoiseDocs.map(doc => doc._id_rev);
182 | return allLeafNodes.filter(idRev => !existingTortoiseIdRevs.includes(idRev));
183 | });
184 | }
185 |
186 | getNewTurtleLeafNodes(metaDocTrios) {
187 | let docIDs = Object.keys(metaDocTrios);
188 | let remainingTurtleLeafNodes = [];
189 |
190 | docIDs.forEach(id => {
191 | let metaDocTrio = metaDocTrios[id];
192 | let newMetaDoc = metaDocTrio.new;
193 | let turtleMetaDoc = metaDocTrio.turtle;
194 |
195 | if (!newMetaDoc && turtleMetaDoc._winningRev) {
196 | for (let i = 0; i < turtleMetaDoc._leafRevs.length; i++) {
197 | remainingTurtleLeafNodes.push(turtleMetaDoc._id + '::' + turtleMetaDoc._leafRevs[i]);
198 | }
199 | // remainingTurtleLeafNodes.push(turtleMetaDoc._id + '::' + turtleMetaDoc._winningRev);
200 | }
201 | });
202 | return remainingTurtleLeafNodes;
203 | }
204 |
205 | sortMetaDocsForSave(metaDocTrios) {
206 | let docIDs = Object.keys(metaDocTrios);
207 |
208 | docIDs.forEach(id => {
209 | let metaDocTrio = metaDocTrios[id];
210 | let newMetaDoc = metaDocTrio.new;
211 | let turtleMetaDoc = metaDocTrio.turtle;
212 |
213 | newMetaDoc ? this.updatedMetaDocs.push(newMetaDoc) : this.newTurtleMetaDocs.push(turtleMetaDoc);
214 | });
215 |
216 | return;
217 | }
218 |
219 | // #5 HTTP POST '/_insert_docs'
220 |
221 | insertUpdatedMetaDocs() {
222 | return Promise.resolve().then(() => {
223 | return this.mongoShell.updateManyMetaDocs(this.updatedMetaDocs);
224 | })
225 | .then(() => {
226 | if (this.newTurtleMetaDocs.length > 0) {
227 | return this.mongoShell.command(this.mongoShell._meta, "CREATE_MANY", this.newTurtleMetaDocs);
228 | }
229 | })
230 | }
231 |
232 | insertNewDocsIntoStore() {
233 | if (this.docsFromTurtle === 0) {
234 | console.log('FYI: No docs were sent over from turtle to insert.');
235 | return Promise.resolve();
236 | } else {
237 | if (this.docsFromTurtle.length > 0) {
238 | return this.mongoShell.command(this.mongoShell._store, "CREATE_MANY", this.docsFromTurtle);
239 | }
240 | }
241 | }
242 |
243 | updateSyncFromTurtleDoc(newSyncFromTurtleDoc) {
244 | return this.mongoShell.command(this.mongoShell._syncFromStore, "UPDATE", newSyncFromTurtleDoc)
245 | }
246 |
247 | saveDocsBatch(docs) {
248 | return Promise.resolve(this.docsFromTurtle.push(...docs));
249 | }
250 |
251 |
252 | // Helpers
253 |
254 | mergeRevTrees(node1, node2) {
255 | const node1Children = node1[2];
256 | const node2Children = node2[2];
257 |
258 | const commonNodes = this.findCommonNodes(node1Children, node2Children);
259 |
260 | const node2ChildrenDiffs = this.getNode2ChildrenDiffs(node1Children, node2Children);
261 | node1[2] = [...node1Children, ...node2ChildrenDiffs];
262 |
263 | for (let i = 0; i < commonNodes.length; i++) {
264 | let commonNodesPair = commonNodes[i];
265 | this.mergeRevTrees(commonNodesPair[0], commonNodesPair[1]);
266 | }
267 |
268 | return node1;
269 | }
270 |
271 | findCommonNodes(node1Children, node2Children) {
272 | let commonNodes = [];
273 |
274 | for (let i = 0; i < node1Children.length; i++) {
275 | let node1Child = node1Children[i];
276 | for (let j = 0; j < node2Children.length; j++) {
277 | let node2Child = node2Children[j];
278 | if (node2Child[0] === node1Child[0]) {
279 | commonNodes.push([node1Child, node2Child]);
280 | }
281 | }
282 | }
283 |
284 | return commonNodes;
285 | }
286 |
287 | getNode2ChildrenDiffs(node1Children, node2Children) {
288 | const node1ChildRevs = node1Children.map(node => node[0]);
289 | return node2Children.filter(node2Child => !node1ChildRevs.includes(node2Child[0]));
290 | }
291 |
292 | getWinningRev(node) {
293 | const leafRevs = this.collectActiveLeafRevs(node);
294 |
295 | return leafRevs.sort((a, b) => {
296 | let [revNumA, revHashA] = a.split('-');
297 | let [revNumB, revHashB] = b.split('-');
298 | revNumA = parseInt(revNumA, 10);
299 | revNumB = parseInt(revNumB, 10);
300 |
301 | if (revNumA > revNumB) {
302 | return -1;
303 | } else if (revNumA < revNumB) {
304 | return 1;
305 | } else {
306 | if (revHashA > revHashB) {
307 | return -1;
308 | } else {
309 | return 1;
310 | }
311 | }
312 | })[0];
313 | }
314 |
315 | collectActiveLeafRevs(node, leafRevs = []) {
316 | if (node[2].length === 0 && !node[1]._deleted) {
317 | leafRevs.push(node[0]);
318 | }
319 |
320 | for (let i = 0; i < node[2].length; i++) {
321 | this.collectActiveLeafRevs(node[2][i], leafRevs);
322 | }
323 |
324 | return leafRevs;
325 | }
326 |
327 | findMissingLeafNodesOfDoc(metaDoc) {
328 | const leafRevs = this.collectAllLeafRevs(metaDoc._revisions);
329 | const docId = metaDoc._id;
330 | const leafIdRevs = leafRevs.map(rev => docId + '::' + rev);
331 |
332 | return this.mongoShell.getStoreDocsByIdRevs(leafIdRevs)
333 | .then(tortoiseDocs => {
334 | const existingTortoiseIdRevs = tortoiseDocs.map(doc => doc._id_rev);
335 | return leafIdRevs.filter(idRev => !existingTortoiseIdRevs.includes(idRev));
336 | });
337 | }
338 | }
339 |
340 | module.exports = SyncFrom;
341 |
--------------------------------------------------------------------------------
/syncTo.js:
--------------------------------------------------------------------------------
1 | class SyncTo {
2 | constructor(mongoShell, batchLimit) {
3 | this.sessionID = new Date().toISOString();
4 | this.mongoShell = mongoShell;
5 | this.batchLimit = batchLimit;
6 | }
7 |
8 | // #1 HTTP POST '/_changed_meta_docs'
9 |
10 | getChangedMetaDocsForTurtle(req) {
11 | const lastTurtleKey = req.body.lastTurtleKey;
12 | this.turtleID = req.body.turtleID;
13 |
14 |
15 | return this.getHighestTortoiseKey() // this.highestTortoiseKey
16 | .then(() => {
17 | if (lastTurtleKey === this.highestTortoiseKey) {
18 | return {
19 | lastBatch: true,
20 | metaDocs: []
21 | };
22 | } else {
23 | return this.getMetaDocsBetweenStoreKeys(lastTurtleKey, this.highestTortoiseKey)
24 | .then(docs => this.getUniqueIDs(docs))
25 | .then(ids => this.getMetaDocsByIDs(ids)) // this.changedTortoiseMetaDocs
26 | .then(() => this.sendBatchChangedMetaDocsToTurtle());
27 | }
28 | });
29 | }
30 |
31 | getHighestTortoiseKey() {
32 | return this.mongoShell.command(this.mongoShell._store, "GET_MAX_ID", {})
33 | .then(key => {
34 | if (key.length === 0) {
35 | this.highestTortoiseKey = '0';
36 | } else {
37 | this.highestTortoiseKey = key[0]._id.toString();
38 | }
39 | });
40 | }
41 |
42 | getMetaDocsBetweenStoreKeys(lastTurtleKey, highestTortoiseKey) {
43 | if (lastTurtleKey !== '0') {
44 | return this.mongoShell.command(this.mongoShell._store, "READ_BETWEEN", { min: lastTurtleKey, max: highestTortoiseKey });
45 | } else {
46 | return this.mongoShell.command(this.mongoShell._store, "READ_UP_TO", { max: highestTortoiseKey });
47 | }
48 | }
49 |
50 | getUniqueIDs(docs) {
51 | let ids = {};
52 | for (let i = 0; i < docs.length; i++) {
53 | const id = docs[i]._id_rev.split("::")[0];
54 | if (ids[id]) continue;
55 | ids[id] = true;
56 | }
57 | const uniqueIDs = Object.keys(ids);
58 | return uniqueIDs;
59 | }
60 |
61 | getMetaDocsByIDs(ids) {
62 | return this.mongoShell.command(this.mongoShell._meta, "READ", { _id: { $in: ids } })
63 | .then((metaDocs) => this.changedTortoiseMetaDocs = metaDocs);
64 | }
65 |
66 | sendBatchChangedMetaDocsToTurtle() {
67 | let currentBatch = this.changedTortoiseMetaDocs.splice(0, this.batchLimit);
68 | return {
69 | metaDocs: currentBatch,
70 | lastBatch: this.changedTortoiseMetaDocs.length === 0
71 | };
72 | }
73 |
74 | // #3 HTTP POST '/_changed_docs'
75 |
76 | getTortoiseDocsForTurtle(req) {
77 | const revIds = req.body.revIds;
78 |
79 | return this.getStoreDocsForTurtle(revIds) // this.storeDocsForTurtle
80 | .then(() => this.createNewSyncToTurtleDoc())
81 | .then(() => this.sendBatchDocsToTurtle());
82 | }
83 |
84 | getStoreDocsForTurtle(revIds) {
85 | return this.mongoShell.command(this.mongoShell._store, "READ", { _id_rev: { $in: revIds } }, { fields: { _id: 0 } })
86 | .then(docs => this.storeDocsForTurtle = docs);
87 | }
88 |
89 | sendBatchDocsToTurtle() {
90 | let currentBatch = this.storeDocsForTurtle.splice(0, this.batchLimit);
91 | let lastBatch = this.storeDocsForTurtle.length === 0;
92 | const payload = {
93 | docs: currentBatch,
94 | lastBatch: lastBatch
95 | };
96 | if (lastBatch) { payload.newSyncToTurtleDoc = this.newSyncToTurtleDoc; }
97 |
98 | return payload;
99 | }
100 |
101 | // #5 HTTP GET '/_confirm_sync'
102 |
103 | updateSyncToTurtleDoc() {
104 | return this.mongoShell.command(this.mongoShell._syncToStore, "UPDATE", this.newSyncToTurtleDoc);
105 | }
106 |
107 | // Sync To Turtle Doc Helper Methods...
108 |
109 | createNewSyncToTurtleDoc() {
110 | return this.getSyncToTurtleDoc()
111 | .then(syncToTurtleDoc => {
112 | let newHistory = { lastKey: this.highestTortoiseKey, sessionID: this.sessionID };
113 | this.newSyncToTurtleDoc = Object.assign(
114 | syncToTurtleDoc, { history: [newHistory].concat(syncToTurtleDoc.history) }
115 | );
116 | })
117 | }
118 |
119 | getSyncToTurtleDoc() {
120 | return this.mongoShell.command(this.mongoShell._syncToStore, "READ", { _id: this.turtleID })
121 | .then(docs => {
122 | if (docs.length === 0) {
123 | return this.initializeSyncToTurtleDoc(this.turtleID)
124 | } else {
125 | return docs[0];
126 | }
127 | });
128 | }
129 |
130 | initializeSyncToTurtleDoc(turtleID) {
131 | const newHistory = { _id: turtleID, history: [] }
132 | return this.mongoShell.command(this.mongoShell._syncToStore, "CREATE", newHistory)
133 | .then(() => newHistory)
134 | .catch(err => console.log(err));
135 | }
136 | }
137 |
138 | module.exports = SyncTo;
139 |
--------------------------------------------------------------------------------
/tortoiseDB-logo-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/turtle-DB/tortoiseDB/b0972d648bd7a3e76e218df42b83a0eb5de40bdb/tortoiseDB-logo-1.png
--------------------------------------------------------------------------------
/tortoiseDB.js:
--------------------------------------------------------------------------------
1 | const MongoShell = require('./db/mongoShell');
2 | const SyncTo = require('./syncTo');
3 | const SyncFrom = require('./syncFrom');
4 | const setUpServer = require('./server/server');
5 |
6 | class TortoiseDB {
7 | constructor({ name = 'default', port = process.env.PORT, mongoURI = process.env.MONGODB_URI, batchLimit = 1000 } = {}) {
8 | this.port = port;
9 | this.mongoShell = new MongoShell(name, mongoURI);
10 | this.server = setUpServer(this);
11 | this.syncInProgress = false;
12 | this.batchLimit = batchLimit;
13 | }
14 |
15 | start() {
16 | this.server.listen(this.port);
17 | console.log(`TurtleDB server ready to go on port ${this.port}!`);
18 | }
19 |
20 | startSyncSession() {
21 | const checkSyncProgress = (resolve) => {
22 | if (!this.syncInProgress) {
23 | clearInterval(this.intervalObj);
24 | this.syncInProgress = true;
25 | this.syncFrom();
26 | resolve();
27 | } else {
28 | console.log('Sorry another sync still in progress.');
29 | }
30 | };
31 |
32 | return new Promise((resolve, reject) => {
33 | if (!this.syncInProgress) {
34 | this.syncInProgress = true;
35 | this.syncFrom();
36 | resolve();
37 | } else {
38 | console.log('Sorry another sync still in progress.');
39 | this.intervalObj = setInterval(checkSyncProgress.bind(this, resolve), 200);
40 | }
41 | });
42 | }
43 |
44 | syncFrom() {
45 | this.syncFromSession = new SyncFrom(this.mongoShell);
46 | }
47 |
48 | syncTo() {
49 | this.syncToSession = new SyncTo(this.mongoShell, this.batchLimit);
50 | }
51 |
52 | dropDB() {
53 | return this.mongoShell.dropDB();
54 | }
55 | }
56 |
57 | module.exports = TortoiseDB;
58 |
--------------------------------------------------------------------------------
/webpack.prod.js:
--------------------------------------------------------------------------------
1 | const path = require('path');
2 | const nodeExternals = require('webpack-node-externals');
3 |
4 | module.exports = {
5 | mode: 'production',
6 | entry: './tortoiseDB.js',
7 | target: 'node',
8 | externals: [nodeExternals()],
9 | output: {
10 | path: path.resolve(__dirname, 'dist'),
11 | filename: 'tortoiseDB.min.js',
12 | library: 'TortoiseDB',
13 | libraryTarget: 'umd'
14 | }
15 | };
16 |
--------------------------------------------------------------------------------