├── .gitignore ├── .npmignore ├── .travis.yml ├── CHANGELOG.md ├── Gruntfile.js ├── LICENSE ├── README.md ├── appveyor.yml ├── examples ├── callback.js ├── download.tar ├── drop.js ├── dropCollections.js ├── fs_stream.js ├── simple.js └── tar.js ├── index.js ├── index.min.js ├── package.json └── test ├── 1.js ├── directory.js ├── empty_directory.js ├── error.js ├── indexes.js ├── parser.js ├── stream.js ├── tar.js ├── z_clear.js └── z_int64id.js /.gitignore: -------------------------------------------------------------------------------- 1 | # OS and Ide 2 | .DS_Store 3 | .project 4 | .settings 5 | 6 | # Logs 7 | *.log 8 | 9 | # Runtime data 10 | pids 11 | *.pid 12 | *.seed 13 | 14 | # Directory for instrumented libs generated by jscoverage/JSCover 15 | lib-cov/ 16 | 17 | # Coverage directory used by tools like istanbul 18 | coverage/ 19 | tmp/ 20 | 21 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 22 | .grunt 23 | 24 | # Compiled binary addons (http://nodejs.org/api/addons.html) 25 | build/ 26 | 27 | # Dependency directory 28 | # Deployed apps should consider commenting this line out: 29 | # see https://npmjs.org/doc/faq.html#Should-I-check-my-node_modules-folder-into-git 30 | node_modules/ 31 | bower_components/ 32 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | # Env configuration 2 | .* 3 | *.yml 4 | Gruntfile.js 5 | bower.json 6 | 7 | # Logs 8 | *.log 9 | 10 | # Coverage directory used by tools like istanbul 11 | coverage/ 12 | tmp/ 13 | 14 | # Repository directory 15 | examples/ 16 | test/ 17 | 18 | # Dependency directory 19 | # Deployed apps should consider commenting this line out: 20 | # see https://npmjs.org/doc/faq.html#Should-I-check-my-node_modules-folder-into-git 21 | node_modules/ 22 | bower_components/ 23 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | language: node_js 3 | node_js: 4 | - "4" 5 | - "5" 6 | - "6" 7 | - "7" 8 | matrix: 9 | include: 10 | - node_js: "8" 11 | dist: trusty 12 | env: "NVM_NODEJS_ORG_MIRROR=https://nodejs.org/download/nightly" 13 | allow_failures: 14 | - env: "NVM_NODEJS_ORG_MIRROR=https://nodejs.org/download/nightly" 15 | os: 16 | - linux 17 | - osx 18 | cache: 19 | directories: 20 | - node_modules 21 | before_install: 22 | - "test ! -d node_modules || npm prune" 23 | - "test ! -d node_modules || npm rebuild" 24 | script: "npm run-script test-cov" 25 | after_script: "npm i coveralls@~2.11 && cat ./coverage/lcov.info | coveralls" 26 | notifications: 27 | email: 28 | on_success: never 29 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | v1.6.2 / 2016-12-13 2 | ================== 3 | 4 | * Update `bson`@1.0.1 5 | * Update `mongodb`@2.2.16 6 | * Update `graceful-fs`@4.1.11 7 | * Update `logger-request`@3.7.3 8 | 9 | v1.6.1 / 2016-11-08 10 | ================== 11 | 12 | * Remove support for `node`@0 13 | * Update `graceful-fs`@4.1.10 14 | * Update `logger-request`@3.7.2 15 | 16 | v1.6.0 / 2016-10-27 17 | ================== 18 | 19 | * Prevent TOCTOU race condiction 20 | * Fix index creation with 2dsphereIndex 21 | * Tested against `node`@7 22 | * Improve pathname join with node API 23 | * Prevent logger crash if err is null 24 | * Update `bson`@0.5.6 25 | * Update `mongodb`@2.2.11 26 | 27 | v1.5.6 / 2016-09-20 28 | ================== 29 | 30 | * Update `bson`@0.5.5 31 | * Update `mongodb`@2.2.10 32 | 33 | v1.5.5 / 2016-09-13 34 | ================== 35 | 36 | * Pass `err` argument to callback function 37 | * Update `bson`@0.5.4 38 | * Update `graceful-fs`@4.1.6 39 | * Update `mongodb`@2.2.9 40 | * Update `logger-request`@3.7.1 41 | 42 | v1.5.4 / 2016-07-31 43 | ================== 44 | 45 | * Tested against `node`@6 46 | * Install `graceful-fs`@4.1.5 for #https://github.com/hex7c0/mongodb-backup/issues/16 47 | * Update `logger-request`@3.6.1 48 | * Update `bson`@0.5.2 49 | * Update `mongodb`@2.2.5 50 | 51 | v1.5.3 / 2016-04-24 52 | ================== 53 | 54 | * Change LICENSE to Apache2 from GPL3 55 | * Tested against `node`@5.x 56 | * Update `logger-request`@3.6.1 57 | * Update `bson`@0.4.23 58 | * Update `mongodb`@2.1.16 59 | 60 | v1.5.2 / 2016-02-07 61 | ================== 62 | 63 | * Using "bulkWrite" (insertOne) operations 64 | * Update `mongodb`@2.1.6 65 | * Update `bson`@0.4.21 66 | 67 | v1.5.1 / 2016-01-05 68 | ================== 69 | 70 | * Tested against `node`@5.3 71 | * Update `mongodb`@2.1.3 72 | 73 | v1.5.0 / 2015-12-29 74 | ================== 75 | 76 | * Versions 1.4.x will be release under `stable` tag 77 | * Update `mongodb`@2.1.2 78 | 79 | v1.4.3 / 2015-12-27 80 | ================== 81 | 82 | * Tested against `node`@5 83 | * Update `logger-request`@3.6.0 84 | * Update `mongodb`@2.0.53 85 | * Update `bson`@0.4.20 86 | 87 | v1.4.2 / 2015-10-07 88 | ================== 89 | 90 | * Update `mongodb`@2.0.45 91 | * Update `bson`@0.4.16 92 | 93 | v1.4.1 / 2015-09-17 94 | ================== 95 | 96 | * Tested against `node`@4 97 | * Update `mongodb`@2.0.43 98 | * Update `tar`@2.2.1 99 | 100 | v1.4.0 / 2015-09-03 101 | ================== 102 | 103 | * Add "dropCollections" (options) 104 | 105 | v1.3.0 / 2015-09-02 106 | ================== 107 | 108 | * Tested against `iojs`@3 109 | * Update `logger-request`@3.4.0 110 | * Update `mongodb`@2.0.42 111 | * Update `bson`@0.4.11 112 | * Update `tar`@2.2.0 113 | 114 | v1.2.2 / 2015-07-18 115 | ================== 116 | 117 | * Update `logger-request`@3.3.5 118 | * Update `mongodb`@2.0.39 119 | * Update `bson`@0.4.8 120 | 121 | v1.2.1 / 2015-06-21 122 | ================== 123 | 124 | * SPDX license 125 | * Update `logger-request`@3.3.4 126 | * Update `mongodb`@2.0.34 127 | 128 | v1.2.0 / 2015-05-14 129 | ================== 130 | 131 | * Tested against `iojs`@2 132 | * Update `tar`@2.1.1 133 | * Update `logger-request`@3.3.3 134 | * Update `bson`@0.3.2 135 | * Update `mongodb`@2.0.31 136 | 137 | v1.1.1 / 2015-04-13 138 | ================== 139 | 140 | * Update `tar`@2.0.1 141 | * Update `logger-request`@3.3.2 142 | * Update `bson`@0.3.1 143 | * Update `mongodb`@2.0.27 144 | 145 | v1.1.0 / 2015-03-02 146 | ================== 147 | 148 | * Add "stream" (options) 149 | * Add "drop" (options) 150 | * `coveralls` test 151 | * Update `logger-request`@3.3.1 152 | * Update `bson`@0.2.19 153 | * Update `mongodb`@2.0.18 154 | 155 | v1.0.2 / 2015-02-09 156 | ================== 157 | 158 | * `windows` test 159 | * `iojs` test 160 | * Update `logger-request`@3.2.8 161 | * Update `bson`@0.2.18 162 | * Update `mongodb`@2.0.15 163 | 164 | v1.0.1 / 2015-01-03 165 | ================== 166 | 167 | * Remove json formatter from logger 168 | * Update `logger-request`@3.2.7 169 | 170 | v1.0.0 / 2014-12-27 171 | ================== 172 | 173 | * Use mongodb logger 174 | * Use `mongodb` 2 175 | * Add "options" (options) 176 | 177 | v0.1.0 / 2014-12-27 178 | ================== 179 | 180 | * Add custom parser 181 | 182 | v0.0.2 / 2014-12-27 183 | ================== 184 | 185 | * Update documentation 186 | 187 | v0.0.1 / 2014-12-26 188 | ================== 189 | 190 | * Project ready 191 | 192 | v0.0.0 / 2014-12-26 193 | ================== 194 | 195 | * Project start 196 | -------------------------------------------------------------------------------- /Gruntfile.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file gruntfile 4 | * @subpackage main 5 | * @version 0.0.1 6 | * @author hex7c0 7 | * @copyright hex7c0 2015 8 | * @license GPLv3 9 | */ 10 | 11 | module.exports = function(grunt) { 12 | 13 | grunt.initConfig({ 14 | pkg: grunt.file.readJSON('package.json'), 15 | banner: '/*\n' + ' * <%= pkg.name %> v<%= pkg.version %>\n' 16 | + ' * (c) <%= pkg.author.name %> <%= pkg.homepage %>\n' 17 | + ' * Licensed under <%= pkg.license %>\n' + ' */\n', 18 | 19 | clean: [ 'index.min.js', 'min/**/*.js' ], 20 | 21 | uglify: { 22 | target: { 23 | options: { 24 | mangle: false, 25 | beautify: true 26 | }, 27 | files: [ { 28 | expand: true, 29 | src: 'lib/**/*.js', 30 | dest: 'min' 31 | }, { 32 | expand: true, 33 | src: 'module/**/*.js', 34 | dest: 'min' 35 | }, { 36 | 'index.min.js': 'index.js' 37 | } ] 38 | } 39 | }, 40 | 41 | jshint: { 42 | options: { 43 | curly: true, 44 | indent: 2, 45 | quotmark: 'single', 46 | undef: true, 47 | unused: true, 48 | strict: true, 49 | node: true, 50 | // relax 51 | laxbreak: true, 52 | loopfunc: true, 53 | shadow: true 54 | }, 55 | target: { 56 | src: [ 'lib/**/*.js', 'module/**/*.js', 'index.js' ] 57 | } 58 | }, 59 | 60 | shell: { 61 | options: { 62 | failOnError: false 63 | }, 64 | docs: { 65 | command: 'jsdoc ./lib/*.js ./module/*.js -c .jsdoc.json' 66 | } 67 | }, 68 | 69 | safer: { 70 | target: { 71 | files: [ { 72 | src: 'lib/**/*.js' 73 | }, { 74 | src: 'module/**/*.js' 75 | }, { 76 | src: 'index.js', 77 | } ] 78 | } 79 | }, 80 | 81 | endline: { 82 | target: { 83 | files: [ { 84 | src: 'lib/**/*.js' 85 | }, { 86 | src: 'min/**/*.js' 87 | }, { 88 | src: 'module/**/*.js' 89 | }, { 90 | src: '*.js', 91 | } ] 92 | } 93 | } 94 | }); 95 | 96 | grunt.loadNpmTasks('grunt-contrib-uglify'); 97 | grunt.loadNpmTasks('grunt-contrib-jshint'); 98 | grunt.loadNpmTasks('grunt-endline'); 99 | grunt.loadNpmTasks('grunt-safer-regex'); 100 | 101 | grunt.registerTask('lint', [ 'jshint', 'safer' ]); 102 | grunt.registerTask('min', [ 'uglify', 'endline' ]); 103 | grunt.registerTask('default', [ 'lint', 'min' ]); 104 | 105 | return; 106 | }; 107 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # [mongodb-restore](https://github.com/hex7c0/mongodb-restore) 2 | 3 | [![NPM version](https://img.shields.io/npm/v/mongodb-restore.svg)](https://www.npmjs.com/package/mongodb-restore) 4 | [![Linux Status](https://img.shields.io/travis/hex7c0/mongodb-restore.svg?label=linux)](https://travis-ci.org/hex7c0/mongodb-restore) 5 | [![Windows Status](https://img.shields.io/appveyor/ci/hex7c0/mongodb-restore.svg?label=windows)](https://ci.appveyor.com/project/hex7c0/mongodb-restore) 6 | [![Dependency Status](https://img.shields.io/david/hex7c0/mongodb-restore.svg)](https://david-dm.org/hex7c0/mongodb-restore) 7 | [![Coveralls](https://img.shields.io/coveralls/hex7c0/mongodb-restore.svg)](https://coveralls.io/r/hex7c0/mongodb-restore) 8 | 9 | Restore data from [`mongodb-backup`](https://github.com/hex7c0/mongodb-backup) 10 | 11 | Look at [`mongodb-restore-cli`](https://github.com/hex7c0/mongodb-restore-cli) for command line usage, similar to [mongorestore](http://docs.mongodb.org/manual/reference/program/mongorestore/) 12 | 13 | ## Installation 14 | 15 | Install through NPM 16 | 17 | ```bash 18 | npm install mongodb-restore 19 | ``` 20 | or 21 | ```bash 22 | git clone git://github.com/hex7c0/mongodb-restore.git 23 | ``` 24 | 25 | Bson@0.4.11 has been pulled out, so versions >= `1.3.0` and <= `1.4.1` are deprecate 26 | 27 | ## API 28 | 29 | inside nodejs project 30 | ```js 31 | var restore = require('mongodb-restore'); 32 | 33 | restore({ 34 | uri: 'uri', // mongodb://:@.mongolab.com:/ 35 | root: __dirname + '/dbName' 36 | }); 37 | ``` 38 | 39 | ### restore(options) 40 | 41 | #### options 42 | 43 | - `uri` - **String** [URI](http://mongodb.github.io/node-mongodb-native/2.0/tutorials/urls/) for MongoDb connection *(default "required")* 44 | - `root`- **String** Path where get the backup *(default "required")* 45 | - `[parser]` - **String | Function** Data parser (bson, json) or custom *(default "bson")* 46 | - `[callback]` - **Function** Callback when done *(default "disabled")* 47 | - `[stream]`- **Object** Get `.tar` file from Node stream *(default "disabled")* 48 | - `[tar]` - **String** Extract files from a .tar file *(default "disabled")* 49 | - `[logger]` - **String** Path where save a .log file *(default "disabled")* 50 | - `[metadata]` - **Boolean** Set metadata of collections as Index, ecc *(default "false")* 51 | - `[drop]` - **Boolean** Drop every collection from the target database before restoring the collection *(default "false")* 52 | - `[dropCollections]` - **Boolean|Array** Drop every collection from the target database before restoring if Boolean (similar to `drop` option), or selected collections if Array *(default "false")* 53 | - `[options]` - **Object** MongoDb [options](http://mongodb.github.io/node-mongodb-native/2.0/tutorials/connecting/#toc_7) *(default)* 54 | 55 | ## Examples 56 | 57 | Take a look at my [examples](examples) 58 | 59 | ### [License Apache2](LICENSE) 60 | -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | environment: 2 | matrix: 3 | - nodejs_version: "4" 4 | - nodejs_version: "5" 5 | - nodejs_version: "6" 6 | - nodejs_version: "7" 7 | cache: 8 | - node_modules 9 | install: 10 | - ps: Update-NodeJsInstallation (Get-NodeJsLatestBuild $env:nodejs_version) 11 | - if exist node_modules npm prune 12 | - if exist node_modules npm rebuild 13 | - npm install 14 | build: off 15 | test_script: 16 | - node --version 17 | - npm --version 18 | - npm test 19 | -------------------------------------------------------------------------------- /examples/callback.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file callback example 4 | * @module mongodb-restore 5 | * @subpackage examples 6 | * @version 0.0.1 7 | * @author hex7c0 8 | * @license GPLv3 9 | */ 10 | 11 | /* 12 | * initialize module 13 | */ 14 | var restore = require('..'); // use require('mongodb-restore') instead 15 | 16 | /* 17 | * use 18 | */ 19 | restore({ 20 | uri: 'uri', // mongodb://:@.mongolab.com:/ 21 | root: __dirname, // write files into this dir 22 | callback: function(err) { 23 | 24 | if (err) { 25 | console.error(err); 26 | } else { 27 | console.log('finish'); 28 | } 29 | } 30 | }); 31 | -------------------------------------------------------------------------------- /examples/download.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hex7c0/mongodb-restore/f67183bd8fcff89ce9bf2ccc79ab8691488d7667/examples/download.tar -------------------------------------------------------------------------------- /examples/drop.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file simple example 4 | * @module mongodb-restore 5 | * @subpackage examples 6 | * @version 0.0.1 7 | * @author hex7c0 8 | * @license GPLv3 9 | */ 10 | 11 | /* 12 | * initialize module 13 | */ 14 | var restore = require('..'); // use require('mongodb-restore') instead 15 | 16 | /* 17 | * use 18 | */ 19 | restore({ 20 | uri: 'uri', // mongodb://:@.mongolab.com:/ 21 | root: __dirname, // read backup(s) file(s) from this dir 22 | drop: true, // drop entire database before restore backup 23 | }); 24 | -------------------------------------------------------------------------------- /examples/dropCollections.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file simple example 4 | * @module mongodb-restore 5 | * @subpackage examples 6 | * @version 0.0.1 7 | * @author hex7c0 8 | * @license GPLv3 9 | */ 10 | 11 | /* 12 | * initialize module 13 | */ 14 | var restore = require('..'); // use require('mongodb-restore') instead 15 | 16 | /* 17 | * use 18 | */ 19 | restore({ 20 | uri: 'uri', // mongodb://:@.mongolab.com:/ 21 | root: __dirname, // read backup(s) file(s) from this dir 22 | dropCollections: [ 'login' ], // drop this collections before restore 23 | }); 24 | -------------------------------------------------------------------------------- /examples/fs_stream.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file fs stream example 4 | * @module mongodb-restore 5 | * @subpackage examples 6 | * @version 0.0.1 7 | * @author hex7c0 8 | * @license GPLv3 9 | */ 10 | 11 | /* 12 | * initialize module 13 | */ 14 | var restore = require('..'); // use require('mongodb-restore') instead 15 | var fs = require('fs'); 16 | 17 | /* 18 | * use 19 | */ 20 | var stream = fs.createReadStream('download.tar'); // simulate filesystem stream 21 | 22 | restore({ 23 | uri: 'uri', // mongodb://:@.mongolab.com:/ 24 | stream: stream, // send this stream into db 25 | callback: function(err) { // callback after restore 26 | 27 | console.log('done'); 28 | } 29 | }); 30 | -------------------------------------------------------------------------------- /examples/simple.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file simple example 4 | * @module mongodb-restore 5 | * @subpackage examples 6 | * @version 0.0.1 7 | * @author hex7c0 8 | * @license GPLv3 9 | */ 10 | 11 | /* 12 | * initialize module 13 | */ 14 | var restore = require('..'); // use require('mongodb-restore') instead 15 | 16 | /* 17 | * use 18 | */ 19 | restore({ 20 | uri: 'uri', // mongodb://:@.mongolab.com:/ 21 | root: __dirname, // read backup(s) file(s) from this dir 22 | }); 23 | -------------------------------------------------------------------------------- /examples/tar.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file tar example 4 | * @module mongodb-restore 5 | * @subpackage examples 6 | * @version 0.0.1 7 | * @author hex7c0 8 | * @license GPLv3 9 | */ 10 | 11 | /* 12 | * initialize module 13 | */ 14 | var restore = require('..'); // use require('mongodb-restore') instead 15 | 16 | /* 17 | * use 18 | */ 19 | restore({ 20 | uri: 'uri', // mongodb://:@.mongolab.com:/ 21 | root: __dirname, // read tar file from this dir 22 | tar: 'dump.tar' // restore backup from this tar file 23 | }); 24 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file mongodb-restore main 4 | * @module mongodb-restore 5 | * @subpackage main 6 | * @version 1.6.0 7 | * @author hex7c0 8 | * @copyright hex7c0 2014 9 | * @license GPLv3 10 | */ 11 | 12 | /* 13 | * initialize module 14 | */ 15 | var systemRegex = /^system\./; 16 | var fs = require('graceful-fs'); 17 | var path = require('path'); 18 | var BSON; 19 | var logger; 20 | var meta; 21 | 22 | /* 23 | * functions 24 | */ 25 | /** 26 | * error handler 27 | * 28 | * @function error 29 | * @param {Object} err - raised error 30 | */ 31 | function error(err) { 32 | 33 | if (err) { 34 | logger(err.message); 35 | } 36 | } 37 | 38 | /** 39 | * read collection metadata from file 40 | * 41 | * @function readMetadata 42 | * @param {Object} collection - db collection 43 | * @param {String} metadata - path of metadata 44 | * @param {Function} next - callback 45 | */ 46 | function readMetadata(collection, metadata, next) { 47 | 48 | var doc, data; 49 | try { 50 | data = fs.readFileSync(metadata + collection.collectionName); 51 | } catch (err) { 52 | return next(null); 53 | } 54 | try { 55 | doc = JSON.parse(data); 56 | } catch (err) { 57 | return next(err); 58 | } 59 | 60 | var last = ~~doc.length, counter = 0; 61 | if (last === 0) { 62 | return next(null); 63 | } 64 | 65 | doc.forEach(function(index) { 66 | 67 | collection.createIndex(index.key, index, function(err) { 68 | 69 | if (err) { 70 | return last === ++counter ? next(err) : error(err); 71 | } 72 | return last === ++counter ? next(null) : null; 73 | }); 74 | }); 75 | } 76 | 77 | /** 78 | * make dir 79 | * 80 | * @function makeDir 81 | * @param {String} pathname - pathname of dir 82 | * @param {Function} next - callback 83 | */ 84 | function makeDir(pathname, next) { 85 | 86 | fs.stat(pathname, function(err, stats) { 87 | 88 | if (err && err.code === 'ENOENT') { 89 | logger('make dir at ' + pathname); 90 | return fs.mkdir(pathname, function(err) { 91 | 92 | next(err, pathname); 93 | }); 94 | 95 | } else if (stats && stats.isDirectory() === false) { 96 | logger('unlink file at ' + pathname); 97 | return fs.unlink(pathname, function() { 98 | 99 | logger('make dir at ' + pathname); 100 | fs.mkdir(pathname, function(err) { 101 | 102 | next(err, pathname); 103 | }); 104 | }); 105 | } 106 | 107 | next(null, pathname); 108 | }); 109 | } 110 | 111 | /** 112 | * remove dir 113 | * 114 | * @function rmDir 115 | * @param {String} pathname - path of dir 116 | * @param {Function} [next] - callback 117 | */ 118 | function rmDir(pathname, next) { 119 | 120 | fs.readdirSync(pathname).forEach(function(first) { // database 121 | 122 | var database = pathname + first; 123 | if (fs.statSync(database).isDirectory() === false) { 124 | return; 125 | } 126 | 127 | var metadata = ''; 128 | var collections = fs.readdirSync(database); 129 | var metadataPath = path.join(database, '.metadata'); 130 | if (fs.existsSync(metadataPath) === true) { 131 | metadata = metadataPath + path.sep; 132 | delete collections[collections.indexOf('.metadata')]; // undefined is not a dir 133 | } 134 | 135 | collections.forEach(function(second) { // collection 136 | 137 | var collection = path.join(database, second); 138 | if (fs.statSync(collection).isDirectory() === false) { 139 | return; 140 | } 141 | fs.readdirSync(collection).forEach(function(third) { // document 142 | 143 | var document = path.join(collection, third); 144 | fs.unlinkSync(document); 145 | return next ? next(null, document) : ''; 146 | }); 147 | 148 | if (metadata !== '') { 149 | fs.unlinkSync(metadata + second); 150 | } 151 | fs.rmdirSync(collection); 152 | }); 153 | 154 | if (metadata !== '') { 155 | fs.rmdirSync(metadata); 156 | } 157 | fs.rmdirSync(database); 158 | }); 159 | } 160 | 161 | /** 162 | * JSON parser 163 | * 164 | * @function fromJson 165 | * @param {Object} collection - collection model 166 | * @param {String} collectionPath - path of collection 167 | * @param {Function} next - callback 168 | */ 169 | function fromJson(collection, collectionPath, next) { 170 | 171 | var docsBulk = []; 172 | var docs = fs.readdirSync(collectionPath); 173 | var last = ~~docs.length, counter = 0; 174 | if (last === 0) { 175 | return next(null); 176 | } 177 | 178 | docs.forEach(function(docName) { 179 | 180 | var doc, data; 181 | try { 182 | data = fs.readFileSync(collectionPath + docName); 183 | } catch (err) { 184 | return last === ++counter ? next(null) : null; 185 | } 186 | try { 187 | doc = JSON.parse(data); 188 | } catch (err) { 189 | return last === ++counter ? next(err) : error(err); 190 | } 191 | 192 | docsBulk.push({ 193 | insertOne: { 194 | document: doc 195 | } 196 | }); 197 | 198 | return last === ++counter ? collection.bulkWrite(docsBulk, next) : null; 199 | }); 200 | } 201 | 202 | /** 203 | * BSON parser 204 | * 205 | * @function fromBson 206 | * @param {Object} collection - collection model 207 | * @param {String} collectionPath - path of collection 208 | * @param {Function} next - callback 209 | */ 210 | function fromBson(collection, collectionPath, next) { 211 | 212 | var docsBulk = []; 213 | var docs = fs.readdirSync(collectionPath); 214 | var last = ~~docs.length, counter = 0; 215 | if (last === 0) { 216 | return next(null); 217 | } 218 | 219 | docs.forEach(function(docName) { 220 | 221 | var doc, data; 222 | try { 223 | data = fs.readFileSync(collectionPath + docName); 224 | } catch (err) { 225 | return last === ++counter ? next(null) : null; 226 | } 227 | try { 228 | doc = BSON.deserialize(data); 229 | } catch (err) { 230 | return last === ++counter ? next(err) : error(err); 231 | } 232 | 233 | docsBulk.push({ 234 | insertOne: { 235 | document: doc 236 | } 237 | }); 238 | 239 | return last === ++counter ? collection.bulkWrite(docsBulk, next) : null; 240 | }); 241 | } 242 | 243 | /** 244 | * set data to all collections available 245 | * 246 | * @function allCollections 247 | * @param {Object} db - database 248 | * @param {String} name - path of database 249 | * @param {String} metadata - path of metadata 250 | * @param {Function} parser - data parser 251 | * @param {Function} next - callback 252 | */ 253 | function allCollections(db, name, metadata, parser, next) { 254 | 255 | var collections = fs.readdirSync(name); 256 | var last = ~~collections.length, counter = 0; 257 | if (last === 0) { // empty set 258 | return next(null); 259 | } 260 | 261 | if (collections.indexOf('.metadata') >= 0) { // undefined is not a dir 262 | delete collections[collections.indexOf('.metadata')]; 263 | last--; 264 | } 265 | 266 | collections.forEach(function(collectionName) { 267 | 268 | var collectionPath = name + collectionName; 269 | if (!fs.statSync(collectionPath).isDirectory()) { 270 | var err = new Error(collectionPath + ' is not a directory'); 271 | return last === ++counter ? next(err) : error(err); 272 | } 273 | db.createCollection(collectionName, function(err, collection) { 274 | 275 | if (err) { 276 | return last === ++counter ? next(err) : error(err); 277 | } 278 | logger('select collection ' + collectionName); 279 | meta(collection, metadata, function(err) { 280 | 281 | if (err) { 282 | error(err); 283 | } 284 | parser(collection, collectionPath + path.sep, function(err) { 285 | 286 | if (err) { 287 | return last === ++counter ? next(err) : error(err); 288 | } 289 | return last === ++counter ? next(null) : null; 290 | }); 291 | }); 292 | }); 293 | }); 294 | } 295 | 296 | /** 297 | * drop data from some collections 298 | * 299 | * @function someCollections 300 | * @param {Object} db - database 301 | * @param {Array} collections - selected collections 302 | * @param {Function} next - callback 303 | */ 304 | function someCollections(db, collections, next) { 305 | 306 | var last = ~~collections.length, counter = 0; 307 | if (last === 0) { // empty set 308 | return next(null); 309 | } 310 | 311 | collections.forEach(function(collection) { 312 | 313 | db.collection(collection, function(err, collection) { 314 | 315 | logger('select collection ' + collection.collectionName); 316 | if (err) { 317 | return last === ++counter ? next(err) : error(err); 318 | } 319 | collection.drop(function(err) { 320 | 321 | if (err) { 322 | error(err); // log if missing 323 | } 324 | return last === ++counter ? next(null) : null; 325 | }); 326 | }); 327 | }); 328 | } 329 | 330 | /** 331 | * function wrapper 332 | * 333 | * @function wrapper 334 | * @param {Object} my - parsed options 335 | */ 336 | function wrapper(my) { 337 | 338 | var parser; 339 | if (typeof my.parser === 'function') { 340 | parser = my.parser; 341 | } else { 342 | switch (my.parser.toLowerCase()) { 343 | case 'bson': 344 | BSON = require('bson'); 345 | BSON = new BSON(); 346 | parser = fromBson; 347 | break; 348 | case 'json': 349 | // JSON error on ObjectId and Date 350 | parser = fromJson; 351 | break; 352 | default: 353 | throw new Error('missing parser option'); 354 | } 355 | } 356 | 357 | var discriminator = allCollections; 358 | 359 | if (my.logger === null) { 360 | logger = function() { 361 | 362 | return; 363 | }; 364 | } else { 365 | logger = require('logger-request')({ 366 | filename: my.logger, 367 | standalone: true, 368 | daily: true, 369 | winston: { 370 | logger: '_mongo_r' + my.logger, 371 | level: 'info', 372 | json: false 373 | } 374 | }); 375 | logger('restore start'); 376 | var log = require('mongodb').Logger; 377 | log.setLevel('info'); 378 | log.setCurrentLogger(function(msg) { 379 | 380 | logger(msg); 381 | }); 382 | } 383 | 384 | var metadata = ''; 385 | if (my.metadata === true) { 386 | meta = readMetadata; 387 | } else { 388 | meta = function(a, b, c) { 389 | 390 | return c(); 391 | }; 392 | } 393 | 394 | /** 395 | * latest callback 396 | * 397 | * @return {Null} 398 | */ 399 | function callback(err) { 400 | 401 | logger('restore stop'); 402 | if (my.tar) { 403 | rmDir(my.dir); 404 | } 405 | 406 | if (my.callback !== null) { 407 | logger('callback run'); 408 | my.callback(err); 409 | 410 | } else if (err) { 411 | logger(err); 412 | } 413 | } 414 | 415 | /** 416 | * entry point 417 | * 418 | * @return {Null} 419 | */ 420 | function go(root) { 421 | 422 | if (my.metadata === true) { 423 | metadata = path.join(root, '.metadata', path.sep); 424 | } 425 | require('mongodb').MongoClient.connect(my.uri, my.options, 426 | function(err, db) { 427 | 428 | logger('db open'); 429 | if (err) { 430 | return callback(err); 431 | } 432 | 433 | function next(err) { 434 | 435 | if (err) { 436 | logger('db close'); 437 | db.close(); 438 | return callback(err); 439 | } 440 | 441 | // waiting for `db.fsyncLock()` on node driver 442 | discriminator(db, root, metadata, parser, function(err) { 443 | 444 | logger('db close'); 445 | db.close(); 446 | callback(err); 447 | }); 448 | } 449 | 450 | if (my.drop === true) { 451 | logger('drop database'); 452 | return db.dropDatabase(next); 453 | 454 | } else if (my.dropCollections) { 455 | logger('drop collections'); 456 | if (Array.isArray(my.dropCollections) === true) { 457 | return someCollections(db, my.dropCollections, next); 458 | } 459 | return db.collections(function(err, collections) { 460 | 461 | if (err) { // log if missing 462 | error(err); 463 | } 464 | my.dropCollections = []; 465 | for (var i = 0, ii = collections.length; i < ii; ++i) { 466 | var collectionName = collections[i].collectionName; 467 | if (systemRegex.test(collectionName) === false) { 468 | my.dropCollections.push(collectionName); 469 | } 470 | } 471 | someCollections(db, my.dropCollections, next); 472 | }); 473 | } 474 | 475 | next(null); 476 | }); 477 | } 478 | 479 | if (!my.tar) { 480 | return go(my.root); 481 | } 482 | 483 | makeDir(my.dir, function() { 484 | 485 | var extractor = require('tar').Extract({ 486 | path: my.dir 487 | }).on('error', callback).on('end', function() { 488 | 489 | var dirs = fs.readdirSync(my.dir); 490 | for (var i = 0, ii = dirs.length; i < ii; ++i) { 491 | var t = my.dir + dirs[i]; 492 | if (fs.statSync(t).isFile() === false) { 493 | return go(t + path.sep); 494 | } 495 | } 496 | }); 497 | 498 | if (my.stream !== null) { // user stream 499 | logger('get tar file from stream'); 500 | my.stream.pipe(extractor); 501 | 502 | } else { // filesystem stream 503 | logger('open tar file at ' + my.root + my.tar); 504 | fs.createReadStream(my.root + my.tar).on('error', callback).pipe( 505 | extractor); 506 | } 507 | }); 508 | } 509 | 510 | /** 511 | * option setting 512 | * 513 | * @exports restore 514 | * @function restore 515 | * @param {Object} options - various options. Check README.md 516 | */ 517 | function restore(options) { 518 | 519 | var opt = options || Object.create(null); 520 | if (!opt.uri) { 521 | throw new Error('missing uri option'); 522 | } 523 | if (!opt.stream) { 524 | if (!opt.root) { 525 | throw new Error('missing root option'); 526 | } else if (!fs.existsSync(opt.root) || !fs.statSync(opt.root).isDirectory()) { 527 | throw new Error('root option is not a directory'); 528 | } 529 | } 530 | 531 | var my = { 532 | dir: path.join(__dirname, 'dump', path.sep), 533 | uri: String(opt.uri), 534 | root: path.resolve(String(opt.root)) + path.sep, 535 | stream: opt.stream || null, 536 | parser: opt.parser || 'bson', 537 | callback: typeof opt.callback === 'function' ? opt.callback : null, 538 | tar: typeof opt.tar === 'string' ? opt.tar : null, 539 | logger: typeof opt.logger === 'string' ? path.resolve(opt.logger) : null, 540 | metadata: Boolean(opt.metadata), 541 | drop: Boolean(opt.drop), 542 | dropCollections: Boolean(opt.dropCollections) ? opt.dropCollections : null, 543 | options: typeof opt.options === 'object' ? opt.options : {} 544 | }; 545 | if (my.stream) { 546 | my.tar = true; // override 547 | } 548 | wrapper(my); 549 | } 550 | module.exports = restore; 551 | -------------------------------------------------------------------------------- /index.min.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | function error(err) { 4 | err && logger(err.message); 5 | } 6 | 7 | function readMetadata(collection, metadata, next) { 8 | var doc, data; 9 | try { 10 | data = fs.readFileSync(metadata + collection.collectionName); 11 | } catch (err) { 12 | return next(null); 13 | } 14 | try { 15 | doc = JSON.parse(data); 16 | } catch (err) { 17 | return next(err); 18 | } 19 | var last = ~~doc.length, counter = 0; 20 | if (0 === last) return next(null); 21 | doc.forEach(function(index) { 22 | collection.createIndex(index.key, index, function(err) { 23 | return err ? last === ++counter ? next(err) : error(err) : last === ++counter ? next(null) : null; 24 | }); 25 | }); 26 | } 27 | 28 | function makeDir(pathname, next) { 29 | fs.stat(pathname, function(err, stats) { 30 | return err && "ENOENT" === err.code ? (logger("make dir at " + pathname), fs.mkdir(pathname, function(err) { 31 | next(err, pathname); 32 | })) : stats && !1 === stats.isDirectory() ? (logger("unlink file at " + pathname), 33 | fs.unlink(pathname, function() { 34 | logger("make dir at " + pathname), fs.mkdir(pathname, function(err) { 35 | next(err, pathname); 36 | }); 37 | })) : void next(null, pathname); 38 | }); 39 | } 40 | 41 | function rmDir(pathname, next) { 42 | fs.readdirSync(pathname).forEach(function(first) { 43 | var database = pathname + first; 44 | if (!1 !== fs.statSync(database).isDirectory()) { 45 | var metadata = "", collections = fs.readdirSync(database), metadataPath = path.join(database, ".metadata"); 46 | !0 === fs.existsSync(metadataPath) && (metadata = metadataPath + path.sep, delete collections[collections.indexOf(".metadata")]), 47 | collections.forEach(function(second) { 48 | var collection = path.join(database, second); 49 | !1 !== fs.statSync(collection).isDirectory() && (fs.readdirSync(collection).forEach(function(third) { 50 | var document = path.join(collection, third); 51 | return fs.unlinkSync(document), next ? next(null, document) : ""; 52 | }), "" !== metadata && fs.unlinkSync(metadata + second), fs.rmdirSync(collection)); 53 | }), "" !== metadata && fs.rmdirSync(metadata), fs.rmdirSync(database); 54 | } 55 | }); 56 | } 57 | 58 | function fromJson(collection, collectionPath, next) { 59 | var docsBulk = [], docs = fs.readdirSync(collectionPath), last = ~~docs.length, counter = 0; 60 | if (0 === last) return next(null); 61 | docs.forEach(function(docName) { 62 | var doc, data; 63 | try { 64 | data = fs.readFileSync(collectionPath + docName); 65 | } catch (err) { 66 | return last === ++counter ? next(null) : null; 67 | } 68 | try { 69 | doc = JSON.parse(data); 70 | } catch (err) { 71 | return last === ++counter ? next(err) : error(err); 72 | } 73 | return docsBulk.push({ 74 | insertOne: { 75 | document: doc 76 | } 77 | }), last === ++counter ? collection.bulkWrite(docsBulk, next) : null; 78 | }); 79 | } 80 | 81 | function fromBson(collection, collectionPath, next) { 82 | var docsBulk = [], docs = fs.readdirSync(collectionPath), last = ~~docs.length, counter = 0; 83 | if (0 === last) return next(null); 84 | docs.forEach(function(docName) { 85 | var doc, data; 86 | try { 87 | data = fs.readFileSync(collectionPath + docName); 88 | } catch (err) { 89 | return last === ++counter ? next(null) : null; 90 | } 91 | try { 92 | doc = BSON.deserialize(data); 93 | } catch (err) { 94 | return last === ++counter ? next(err) : error(err); 95 | } 96 | return docsBulk.push({ 97 | insertOne: { 98 | document: doc 99 | } 100 | }), last === ++counter ? collection.bulkWrite(docsBulk, next) : null; 101 | }); 102 | } 103 | 104 | function allCollections(db, name, metadata, parser, next) { 105 | var collections = fs.readdirSync(name), last = ~~collections.length, counter = 0; 106 | if (0 === last) return next(null); 107 | collections.indexOf(".metadata") >= 0 && (delete collections[collections.indexOf(".metadata")], 108 | last--), collections.forEach(function(collectionName) { 109 | var collectionPath = name + collectionName; 110 | if (!fs.statSync(collectionPath).isDirectory()) { 111 | var err = new Error(collectionPath + " is not a directory"); 112 | return last === ++counter ? next(err) : error(err); 113 | } 114 | db.createCollection(collectionName, function(err, collection) { 115 | if (err) return last === ++counter ? next(err) : error(err); 116 | logger("select collection " + collectionName), meta(collection, metadata, function(err) { 117 | err && error(err), parser(collection, collectionPath + path.sep, function(err) { 118 | return err ? last === ++counter ? next(err) : error(err) : last === ++counter ? next(null) : null; 119 | }); 120 | }); 121 | }); 122 | }); 123 | } 124 | 125 | function someCollections(db, collections, next) { 126 | var last = ~~collections.length, counter = 0; 127 | if (0 === last) return next(null); 128 | collections.forEach(function(collection) { 129 | db.collection(collection, function(err, collection) { 130 | if (logger("select collection " + collection.collectionName), err) return last === ++counter ? next(err) : error(err); 131 | collection.drop(function(err) { 132 | return err && error(err), last === ++counter ? next(null) : null; 133 | }); 134 | }); 135 | }); 136 | } 137 | 138 | function wrapper(my) { 139 | function callback(err) { 140 | logger("restore stop"), my.tar && rmDir(my.dir), null !== my.callback ? (logger("callback run"), 141 | my.callback(err)) : err && logger(err); 142 | } 143 | function go(root) { 144 | !0 === my.metadata && (metadata = path.join(root, ".metadata", path.sep)), require("mongodb").MongoClient.connect(my.uri, my.options, function(err, db) { 145 | function next(err) { 146 | if (err) return logger("db close"), db.close(), callback(err); 147 | discriminator(db, root, metadata, parser, function(err) { 148 | logger("db close"), db.close(), callback(err); 149 | }); 150 | } 151 | return logger("db open"), err ? callback(err) : !0 === my.drop ? (logger("drop database"), 152 | db.dropDatabase(next)) : my.dropCollections ? (logger("drop collections"), !0 === Array.isArray(my.dropCollections) ? someCollections(db, my.dropCollections, next) : db.collections(function(err, collections) { 153 | err && error(err), my.dropCollections = []; 154 | for (var i = 0, ii = collections.length; i < ii; ++i) { 155 | var collectionName = collections[i].collectionName; 156 | !1 === systemRegex.test(collectionName) && my.dropCollections.push(collectionName); 157 | } 158 | someCollections(db, my.dropCollections, next); 159 | })) : void next(null); 160 | }); 161 | } 162 | var parser; 163 | if ("function" == typeof my.parser) parser = my.parser; else switch (my.parser.toLowerCase()) { 164 | case "bson": 165 | BSON = require("bson"), BSON = new BSON(), parser = fromBson; 166 | break; 167 | 168 | case "json": 169 | parser = fromJson; 170 | break; 171 | 172 | default: 173 | throw new Error("missing parser option"); 174 | } 175 | var discriminator = allCollections; 176 | if (null === my.logger) logger = function() {}; else { 177 | (logger = require("logger-request")({ 178 | filename: my.logger, 179 | standalone: !0, 180 | daily: !0, 181 | winston: { 182 | logger: "_mongo_r" + my.logger, 183 | level: "info", 184 | json: !1 185 | } 186 | }))("restore start"); 187 | var log = require("mongodb").Logger; 188 | log.setLevel("info"), log.setCurrentLogger(function(msg) { 189 | logger(msg); 190 | }); 191 | } 192 | var metadata = ""; 193 | if (meta = !0 === my.metadata ? readMetadata : function(a, b, c) { 194 | return c(); 195 | }, !my.tar) return go(my.root); 196 | makeDir(my.dir, function() { 197 | var extractor = require("tar").Extract({ 198 | path: my.dir 199 | }).on("error", callback).on("end", function() { 200 | for (var dirs = fs.readdirSync(my.dir), i = 0, ii = dirs.length; i < ii; ++i) { 201 | var t = my.dir + dirs[i]; 202 | if (!1 === fs.statSync(t).isFile()) return go(t + path.sep); 203 | } 204 | }); 205 | null !== my.stream ? (logger("get tar file from stream"), my.stream.pipe(extractor)) : (logger("open tar file at " + my.root + my.tar), 206 | fs.createReadStream(my.root + my.tar).on("error", callback).pipe(extractor)); 207 | }); 208 | } 209 | 210 | function restore(options) { 211 | var opt = options || Object.create(null); 212 | if (!opt.uri) throw new Error("missing uri option"); 213 | if (!opt.stream) { 214 | if (!opt.root) throw new Error("missing root option"); 215 | if (!fs.existsSync(opt.root) || !fs.statSync(opt.root).isDirectory()) throw new Error("root option is not a directory"); 216 | } 217 | var my = { 218 | dir: path.join(__dirname, "dump", path.sep), 219 | uri: String(opt.uri), 220 | root: path.resolve(String(opt.root)) + path.sep, 221 | stream: opt.stream || null, 222 | parser: opt.parser || "bson", 223 | callback: "function" == typeof opt.callback ? opt.callback : null, 224 | tar: "string" == typeof opt.tar ? opt.tar : null, 225 | logger: "string" == typeof opt.logger ? path.resolve(opt.logger) : null, 226 | metadata: Boolean(opt.metadata), 227 | drop: Boolean(opt.drop), 228 | dropCollections: Boolean(opt.dropCollections) ? opt.dropCollections : null, 229 | options: "object" == typeof opt.options ? opt.options : {} 230 | }; 231 | my.stream && (my.tar = !0), wrapper(my); 232 | } 233 | 234 | var systemRegex = /^system\./, fs = require("graceful-fs"), path = require("path"), BSON, logger, meta; 235 | 236 | module.exports = restore; 237 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "1.6.2", 3 | "name": "mongodb-restore", 4 | "description": "restore data from mongodb-backup", 5 | "keywords": [ 6 | "mongodb", 7 | "restore", 8 | "dump" 9 | ], 10 | "preferGlobal": false, 11 | "homepage": "https://github.com/hex7c0/mongodb-restore", 12 | "author": { 13 | "name": "hex7c0", 14 | "email": "hex7c0@gmail.com", 15 | "url": "https://hex7c0.github.io/" 16 | }, 17 | "repository": { 18 | "type": "git", 19 | "url": "https://github.com/hex7c0/mongodb-restore.git" 20 | }, 21 | "bugs": { 22 | "url": "https://github.com/hex7c0/mongodb-restore/issues", 23 | "email": "hex7c0@gmail.com" 24 | }, 25 | "main": "index.min.js", 26 | "dependencies": { 27 | "bson": "1.0.4", 28 | "graceful-fs": "4.1.11", 29 | "logger-request": "3.8.0", 30 | "mongodb": "2.2.26", 31 | "tar": "2.2.1" 32 | }, 33 | "devDependencies": { 34 | "grunt": "~1.0", 35 | "grunt-contrib-uglify": "~3.0", 36 | "grunt-contrib-jshint": "~1.1", 37 | "grunt-endline": "~0.6", 38 | "grunt-safer-regex": "~0.0", 39 | "istanbul": "~0.4", 40 | "mocha": "~3.4", 41 | "mongodb-backup": "~1.6" 42 | }, 43 | "engines": { 44 | "node": ">=4" 45 | }, 46 | "scripts": { 47 | "prepublish": "npm prune", 48 | "test": "mocha --bail --check-leaks --globals Promise --timeout 15000", 49 | "test-cov": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --timeout 15000" 50 | }, 51 | "license": "Apache-2.0" 52 | } 53 | -------------------------------------------------------------------------------- /test/1.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file 1 test 4 | * @module mongodb-restore 5 | * @subpackage test 6 | * @version 0.0.1 7 | * @author hex7c0 8 | * @license GPLv3 9 | */ 10 | 11 | /* 12 | * initialize module 13 | */ 14 | var backup = require('mongodb-backup'); 15 | var assert = require('assert'); 16 | var fs = require('fs'); 17 | var client = require('mongodb').MongoClient; 18 | var URI = process.env.URI; 19 | var URI2 = process.env.URI2; 20 | 21 | /* 22 | * test module 23 | */ 24 | describe('start', function() { 25 | 26 | var ROOT = __dirname + '/dump'; 27 | 28 | describe('mongodb-backup', function() { 29 | 30 | describe('directory', function() { 31 | 32 | it('should build 1 directory and drop database', function(done) { 33 | 34 | backup({ 35 | uri: URI2, 36 | root: ROOT, 37 | collections: [ 'auths' ], 38 | metadata: true, 39 | callback: function(err) { 40 | 41 | assert.ifError(err); 42 | fs.readdirSync(ROOT).forEach(function(first) { // database 43 | 44 | var database = ROOT + '/' + first; 45 | assert.equal(fs.statSync(database).isDirectory(), true); 46 | var second = fs.readdirSync(database); 47 | assert.equal(second.indexOf('auths') >= 0, true); 48 | }); 49 | done(); 50 | } 51 | }); 52 | }); 53 | }); 54 | 55 | describe('tar', function() { 56 | 57 | var path0 = ROOT + '/t1.tar'; 58 | var path1 = ROOT + '/t_stream.tar'; 59 | 60 | it('should make a tar file', function(done) { 61 | 62 | backup({ 63 | uri: URI2, 64 | root: ROOT, 65 | tar: 't1.tar', 66 | callback: function(err) { 67 | 68 | assert.ifError(err); 69 | assert.equal(fs.existsSync(path0), true); 70 | done(); 71 | } 72 | }); 73 | }); 74 | it('should make a tar file for stream', function(done) { 75 | 76 | backup({ 77 | uri: URI2, 78 | root: ROOT, 79 | collections: [ 'logins' ], 80 | tar: 't_stream.tar', 81 | callback: function(err) { 82 | 83 | assert.ifError(err); 84 | assert.equal(fs.existsSync(path1), true); 85 | done(); 86 | } 87 | }); 88 | }); 89 | }); 90 | }); 91 | }); 92 | -------------------------------------------------------------------------------- /test/directory.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file directory test 4 | * @module mongodb-restore 5 | * @subpackage test 6 | * @version 0.0.1 7 | * @author hex7c0 8 | * @license GPLv3 9 | */ 10 | 11 | /* 12 | * initialize module 13 | */ 14 | var restore = require('..'); 15 | var assert = require('assert'); 16 | var fs = require('fs'); 17 | var client = require('mongodb').MongoClient; 18 | var URI = process.env.URI; 19 | var URI2 = process.env.URI2; 20 | 21 | var pad = function(val, len) { 22 | 23 | var val = String(val); 24 | var len = len || 2; 25 | while (val.length < len) { 26 | val = '0' + val; 27 | } 28 | return val; 29 | }; 30 | 31 | /* 32 | * test module 33 | */ 34 | describe('directory', function() { 35 | 36 | var DOCS = {}; 37 | var ROOT = __dirname + '/dump/'; 38 | var COLLECTION = ''; 39 | var INDEX = []; 40 | 41 | it('should get db directory, not tar file', function(done) { 42 | 43 | fs.readdirSync(ROOT).forEach(function(first) { // database 44 | 45 | var t = ROOT + first; 46 | if (!fs.existsSync(t) || !fs.statSync(t).isDirectory()) { 47 | return; 48 | } 49 | ROOT += first; 50 | done(); 51 | }); 52 | }); 53 | it('should get original data from db', function(done) { 54 | 55 | var second = fs.readdirSync(ROOT); 56 | assert.equal(second.length, 2); // .metadata + collection 57 | assert.equal(second[1], 'auths'); 58 | COLLECTION = second[1]; 59 | client.connect(URI2, function(err, db) { 60 | 61 | db.collection(COLLECTION, function(err, collection) { 62 | 63 | assert.ifError(err); 64 | collection.indexes(function(err, index) { 65 | 66 | assert.ifError(err); 67 | INDEX = index; 68 | collection.find({}, { 69 | sort: { 70 | _id: 1 71 | } 72 | }).toArray(function(err, docs) { 73 | 74 | assert.equal(Array.isArray(docs), true); 75 | assert.equal(docs.length > 0, true); 76 | DOCS = docs; 77 | done(); 78 | }); 79 | }); 80 | }); 81 | }); 82 | }); 83 | 84 | describe('restore', function() { 85 | 86 | var l = 'l1.log'; 87 | var date = new Date(); 88 | var dailyF = date.getUTCFullYear() + '-' + pad(date.getUTCMonth() + 1) 89 | + '-' + pad(date.getUTCDate()) + '.' + l; 90 | it('should check that log file not exist before test', function(done) { 91 | 92 | assert.equal(fs.existsSync(l), false); 93 | assert.equal(fs.existsSync(dailyF), false); 94 | done(); 95 | }); 96 | it('should save data to db, with Id duplicated', function(done) { 97 | 98 | restore({ 99 | uri: URI, 100 | root: ROOT, 101 | logger: l, 102 | metadata: true, 103 | drop: false, // for coverage 104 | callback: function(err) { 105 | 106 | assert.notEqual(err, null); 107 | assert.ok(/duplicate key error index/.test(err.message)); 108 | setTimeout(done, 500); // time for mongod 109 | } 110 | }); 111 | }); 112 | it('should test original data and saved data', function(done) { 113 | 114 | client.connect(URI, function(err, db) { 115 | 116 | db.listCollections({}).toArray(function(err, items) { 117 | 118 | assert.ifError(err); 119 | // assert.equal(items.length, 2); // collection + indexes 120 | assert.equal(items.length >= 2, true); // travis same workspace 121 | db.collection(COLLECTION, function(err, collection) { 122 | 123 | assert.ifError(err); 124 | collection.indexes(function(err, index) { 125 | 126 | assert.ifError(err); 127 | assert.equal(index.length, INDEX.length); 128 | for (var i = 0, ii = index.length; i < ii; i++) { // remove db releated data 129 | delete index[i].ns; 130 | delete INDEX[i].ns; 131 | } 132 | assert.equal(index[0].name, INDEX[0].name); 133 | // assert.deepEqual(index, INDEX); // not work on travis. but it's ok in local istance 134 | collection.find({}, { 135 | sort: { 136 | _id: 1 137 | } 138 | }).toArray(function(err, docs) { 139 | 140 | assert.ifError(err); 141 | assert.deepEqual(docs, DOCS); // same above 142 | done(); 143 | }); 144 | }); 145 | }); 146 | }); 147 | }); 148 | }); 149 | it('should remove log', function(done) { 150 | 151 | fs.unlink(dailyF, done); 152 | }); 153 | }); 154 | 155 | describe('restore - drop', function() { 156 | 157 | var l = 'ld1.log'; 158 | var date = new Date(); 159 | var dailyF = date.getUTCFullYear() + '-' + pad(date.getUTCMonth() + 1) 160 | + '-' + pad(date.getUTCDate()) + '.' + l; 161 | it('should check that log file not exist before test', function(done) { 162 | 163 | assert.equal(fs.existsSync(l), false); 164 | assert.equal(fs.existsSync(dailyF), false); 165 | done(); 166 | }); 167 | it('should save data to db', function(done) { 168 | 169 | restore({ 170 | uri: URI, 171 | root: ROOT, 172 | logger: l, 173 | metadata: true, 174 | drop: true, 175 | callback: function(err) { 176 | 177 | assert.ifError(err); 178 | setTimeout(done, 500); // time for mongod 179 | } 180 | }); 181 | }); 182 | it('should test original data and saved data', function(done) { 183 | 184 | client.connect(URI, function(err, db) { 185 | 186 | db.listCollections({}).toArray(function(err, items) { 187 | 188 | assert.ifError(err); 189 | // assert.equal(items.length, 2); // collection + indexes 190 | assert.equal(items.length >= 2, true); // travis same workspace 191 | db.collection(COLLECTION, function(err, collection) { 192 | 193 | assert.ifError(err); 194 | collection.indexes(function(err, index) { 195 | 196 | assert.ifError(err); 197 | assert.equal(index.length, INDEX.length); 198 | for (var i = 0, ii = index.length; i < ii; i++) { // remove db releated data 199 | delete index[i].ns; 200 | delete INDEX[i].ns; 201 | } 202 | assert.equal(index[0].name, INDEX[0].name); 203 | // assert.deepEqual(index, INDEX); // not work on travis. but it's ok in local istance 204 | collection.find({}, { 205 | sort: { 206 | _id: 1 207 | } 208 | }).toArray(function(err, docs) { 209 | 210 | assert.ifError(err); 211 | assert.deepEqual(docs, DOCS); // same above 212 | done(); 213 | }); 214 | }); 215 | }); 216 | }); 217 | }); 218 | }); 219 | it('should remove log', function(done) { 220 | 221 | fs.unlink(dailyF, done); 222 | }); 223 | }); 224 | 225 | describe('restore - dropCollections without Array', function() { 226 | 227 | var l = 'ldc1.log'; 228 | var date = new Date(); 229 | var dailyF = date.getUTCFullYear() + '-' + pad(date.getUTCMonth() + 1) 230 | + '-' + pad(date.getUTCDate()) + '.' + l; 231 | it('should check that log file not exist before test', function(done) { 232 | 233 | assert.equal(fs.existsSync(l), false); 234 | assert.equal(fs.existsSync(dailyF), false); 235 | done(); 236 | }); 237 | it('should save data to db', function(done) { 238 | 239 | restore({ 240 | uri: URI, 241 | root: ROOT, 242 | logger: l, 243 | metadata: true, 244 | dropCollections: true, 245 | callback: function(err) { 246 | 247 | assert.ifError(err); 248 | setTimeout(done, 500); // time for mongod 249 | } 250 | }); 251 | }); 252 | it('should test original data and saved data', function(done) { 253 | 254 | client.connect(URI, function(err, db) { 255 | 256 | db.listCollections({}).toArray(function(err, items) { 257 | 258 | assert.ifError(err); 259 | // assert.equal(items.length, 2); // collection + indexes 260 | assert.equal(items.length >= 2, true); // travis same workspace 261 | db.collection(COLLECTION, function(err, collection) { 262 | 263 | assert.ifError(err); 264 | collection.indexes(function(err, index) { 265 | 266 | assert.ifError(err); 267 | assert.equal(index.length, INDEX.length); 268 | for (var i = 0, ii = index.length; i < ii; i++) { // remove db releated data 269 | delete index[i].ns; 270 | delete INDEX[i].ns; 271 | } 272 | assert.equal(index[0].name, INDEX[0].name); 273 | // assert.deepEqual(index, INDEX); // not work on travis. but it's ok in local istance 274 | collection.find({}, { 275 | sort: { 276 | _id: 1 277 | } 278 | }).toArray(function(err, docs) { 279 | 280 | assert.ifError(err); 281 | assert.deepEqual(docs, DOCS); // same above 282 | done(); 283 | }); 284 | }); 285 | }); 286 | }); 287 | }); 288 | }); 289 | it('should remove log', function(done) { 290 | 291 | fs.unlink(dailyF, done); 292 | }); 293 | }); 294 | 295 | describe('restore - dropCollections with Array', function() { 296 | 297 | var l = 'lda1.log'; 298 | var date = new Date(); 299 | var dailyF = date.getUTCFullYear() + '-' + pad(date.getUTCMonth() + 1) 300 | + '-' + pad(date.getUTCDate()) + '.' + l; 301 | it('should check that log file not exist before test', function(done) { 302 | 303 | assert.equal(fs.existsSync(l), false); 304 | assert.equal(fs.existsSync(dailyF), false); 305 | done(); 306 | }); 307 | it('should test original data and saved data', function(done) { 308 | 309 | client.connect(URI, function(err, db) { 310 | 311 | db.listCollections({}).toArray(function(err, items) { 312 | 313 | assert.ifError(err); 314 | // assert.equal(items.length, 2); // collection + indexes 315 | assert.equal(items.length >= 2, true); // travis same workspace 316 | db.collection(COLLECTION, function(err, collection) { 317 | 318 | assert.ifError(err); 319 | collection.indexes(function(err, index) { 320 | 321 | assert.ifError(err); 322 | assert.equal(index.length, INDEX.length); 323 | for (var i = 0, ii = index.length; i < ii; i++) { // remove db releated data 324 | delete index[i].ns; 325 | delete INDEX[i].ns; 326 | } 327 | assert.equal(index[0].name, INDEX[0].name); 328 | // assert.deepEqual(index, INDEX); // not work on travis. but it's ok in local istance 329 | collection.find({}, { 330 | sort: { 331 | _id: 1 332 | } 333 | }).toArray(function(err, docs) { 334 | 335 | assert.ifError(err); 336 | assert.deepEqual(docs, DOCS); // same above 337 | done(); 338 | }); 339 | }); 340 | }); 341 | }); 342 | }); 343 | }); 344 | }); 345 | 346 | describe('restore - dropCollections wrong Array', function() { 347 | 348 | var l = 'ldw1.log'; 349 | var date = new Date(); 350 | var dailyF = date.getUTCFullYear() + '-' + pad(date.getUTCMonth() + 1) 351 | + '-' + pad(date.getUTCDate()) + '.' + l; 352 | it('should check that log file not exist before test', function(done) { 353 | 354 | assert.equal(fs.existsSync(l), false); 355 | assert.equal(fs.existsSync(dailyF), false); 356 | done(); 357 | }); 358 | it('should save data to db', function(done) { 359 | 360 | var path = require('path'); 361 | 362 | restore({ 363 | uri: URI, 364 | root: ROOT + path.sep, 365 | logger: l, 366 | metadata: true, 367 | dropCollections: {}, 368 | callback: function(err) { 369 | 370 | assert.ifError(err); 371 | setTimeout(done, 500); // time for mongod 372 | } 373 | }); 374 | }); 375 | it('should test original data and saved data', function(done) { 376 | 377 | client.connect(URI, function(err, db) { 378 | 379 | db.listCollections({}).toArray(function(err, items) { 380 | 381 | assert.ifError(err); 382 | // assert.equal(items.length, 2); // collection + indexes 383 | assert.equal(items.length >= 2, true); // travis same workspace 384 | db.collection(COLLECTION, function(err, collection) { 385 | 386 | assert.ifError(err); 387 | collection.indexes(function(err, index) { 388 | 389 | assert.ifError(err); 390 | assert.equal(index.length, INDEX.length); 391 | for (var i = 0, ii = index.length; i < ii; i++) { // remove db releated data 392 | delete index[i].ns; 393 | delete INDEX[i].ns; 394 | } 395 | assert.equal(index[0].name, INDEX[0].name); 396 | // assert.deepEqual(index, INDEX); // not work on travis. but it's ok in local istance 397 | collection.find({}, { 398 | sort: { 399 | _id: 1 400 | } 401 | }).toArray(function(err, docs) { 402 | 403 | assert.ifError(err); 404 | assert.deepEqual(docs, DOCS); // same above 405 | done(); 406 | }); 407 | }); 408 | }); 409 | }); 410 | }); 411 | }); 412 | it('should remove log', function(done) { 413 | 414 | fs.unlink(dailyF, done); 415 | }); 416 | }); 417 | }); 418 | -------------------------------------------------------------------------------- /test/empty_directory.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file directory test 4 | * @module mongodb-restore 5 | * @subpackage test 6 | * @version 0.0.1 7 | * @author hex7c0 8 | * @license GPLv3 9 | */ 10 | 11 | /* 12 | * initialize module 13 | */ 14 | var restore = require('..'); 15 | var assert = require('assert'); 16 | var fs = require('fs'); 17 | var client = require('mongodb').MongoClient; 18 | var URI = process.env.URI; 19 | var URI2 = process.env.URI2; 20 | 21 | /* 22 | * test module 23 | */ 24 | describe('empty directory', function() { 25 | 26 | var ROOT = __dirname; 27 | 28 | describe('issue10 - error handling crash', function() { 29 | 30 | it('should create another dir inside ROOT path', function(done) { 31 | 32 | fs.mkdir(ROOT + '/foobar', done); 33 | }); 34 | it('should save nothing, because path is empty (corrupt bson message)', 35 | function(done) { 36 | 37 | restore({ 38 | uri: URI, 39 | root: ROOT, 40 | metadata: true, 41 | callback: function(err) { 42 | 43 | assert.equal(err, null); 44 | done(); 45 | } 46 | }); 47 | }); 48 | it('should save nothing, because path is empty (corrupt json message)', 49 | function(done) { 50 | 51 | restore({ 52 | uri: URI, 53 | root: ROOT, 54 | parser: 'json', 55 | metadata: true, 56 | callback: function(err) { 57 | 58 | assert.equal(err, null); 59 | done(); 60 | } 61 | }); 62 | }); 63 | it('should delete dir inside ROOT path', function(done) { 64 | 65 | fs.rmdir(ROOT + '/foobar', done); 66 | }); 67 | }); 68 | }); 69 | -------------------------------------------------------------------------------- /test/error.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file error test 4 | * @module mongodb-restore 5 | * @subpackage test 6 | * @version 0.0.1 7 | * @author hex7c0 8 | * @license GPLv3 9 | */ 10 | 11 | /* 12 | * initialize module 13 | */ 14 | var restore = require('..'); 15 | var assert = require('assert'); 16 | 17 | /* 18 | * test module 19 | */ 20 | describe('error', function() { 21 | 22 | it('should return missing uri', function(done) { 23 | 24 | var mex = /missing uri option/; 25 | assert.throws(function() { 26 | 27 | restore(); 28 | }, mex); 29 | assert.throws(function() { 30 | 31 | restore({}); 32 | }, mex); 33 | assert.throws(function() { 34 | 35 | restore({ 36 | root: 'ciao' 37 | }); 38 | }, mex); 39 | done(); 40 | }); 41 | it('should return parser root', function(done) { 42 | 43 | var mex = /missing parser option/; 44 | assert.throws(function() { 45 | 46 | restore({ 47 | uri: 'ciao', 48 | root: __dirname, 49 | parser: 'ciao' 50 | }); 51 | }, mex); 52 | done(); 53 | }); 54 | it('should return wrong uri', function(done) { 55 | 56 | var mex = /invalid schema, expected mongodb/; 57 | assert.throws(function() { 58 | 59 | restore({ 60 | uri: 'ciao', 61 | root: __dirname 62 | }); 63 | }, mex); 64 | done(); 65 | }); 66 | 67 | describe('root', function() { 68 | 69 | it('should return missing root', function(done) { 70 | 71 | var mex = /missing root option/; 72 | assert.throws(function() { 73 | 74 | restore({ 75 | uri: 'ciao' 76 | }); 77 | }, mex); 78 | done(); 79 | }); 80 | it('should return wrong root (not exists)', function(done) { 81 | 82 | var mex = /root option is not a directory/; 83 | assert.throws(function() { 84 | 85 | restore({ 86 | uri: 'ciao', 87 | root: 'ciao' 88 | }); 89 | }, mex); 90 | done(); 91 | }); 92 | it('should return different error message (exists)', function(done) { 93 | 94 | var mex = /root option is not a directory/; 95 | assert.throws(function() { 96 | 97 | restore({ 98 | uri: 'ciao', 99 | root: __dirname + 'error.js' 100 | }); 101 | }, mex); 102 | done(); 103 | }); 104 | it('should return wrong root (not dir)', function(done) { 105 | 106 | var mex = 'root option is not a directory'; 107 | assert.throws(function() { 108 | 109 | restore({ 110 | uri: 'ciao', 111 | root: __dirname + '/error.js' 112 | }); 113 | }, mex); 114 | done(); 115 | }); 116 | }); 117 | }); 118 | -------------------------------------------------------------------------------- /test/indexes.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file indexes test 4 | * @module mongodb-restore 5 | * @subpackage test 6 | * @version 0.0.1 7 | * @author hex7c0 8 | * @license GPLv3 9 | */ 10 | 11 | /* 12 | * initialize module 13 | */ 14 | var restore = require('..'); 15 | var backup = require('mongodb-backup'); 16 | var assert = require('assert'); 17 | var fs = require('fs'); 18 | var client = require('mongodb').MongoClient; 19 | var URI = process.env.URI; // restore to here 20 | var URI2 = process.env.URI2; // backup from here 21 | 22 | /* 23 | * test module 24 | */ 25 | describe('indexes', function() { 26 | 27 | var ROOT = __dirname + '/foobar/'; 28 | var COLLECTION = 'foobar_ix'; 29 | var INDEXES = []; 30 | var FILES = []; 31 | var DIRS = []; 32 | 33 | describe('init', function() { 34 | 35 | it('should create another dir inside ROOT path if missing', function(done) { 36 | 37 | fs.stat(ROOT, function(err) { 38 | 39 | if (err) { 40 | fs.mkdir(ROOT, done); 41 | } 42 | }); 43 | }); 44 | it('should drop collection and create another', function(done) { 45 | 46 | client.connect(URI2, function(err, db) { 47 | 48 | db.dropCollection(COLLECTION, function(err, result) { 49 | 50 | // raise err if missing (already deleted) 51 | 52 | db.createCollection(COLLECTION, function(err, collection) { 53 | 54 | assert.ifError(err); 55 | var docs = [ { 56 | loc: { 57 | type: 'Point', 58 | coordinates: [ -73.97, 40.77 ] 59 | }, 60 | name: 'Central Park', 61 | category: 'Parks' 62 | }, { 63 | loc: { 64 | type: 'Point', 65 | coordinates: [ -73.88, 40.78 ] 66 | }, 67 | name: 'La Guardia Airport', 68 | category: 'Airport' 69 | } ]; 70 | 71 | collection.insertMany(docs, { 72 | w: 1 73 | }, function(err, result) { 74 | 75 | assert.ifError(err); 76 | assert.equal(result.insertedCount, 2); 77 | 78 | collection.createIndex({ 79 | loc: '2dsphere' 80 | }, function(err, indexName) { 81 | 82 | assert.ifError(err); 83 | assert.equal(indexName, 'loc_2dsphere'); 84 | 85 | collection.indexes(function(err, indexes) { 86 | 87 | assert.ifError(err); 88 | assert.equal(indexes.length, 2); 89 | assert.equal(indexes[0].name, '_id_'); 90 | assert.equal(indexes[0].key['_id'], '1'); 91 | delete (indexes[0].ns); // different dbName 92 | assert.deepEqual(Object.keys(indexes[0].key), [ '_id' ]); 93 | assert.equal(indexes[1].name, 'loc_2dsphere'); 94 | assert.equal(indexes[1].key['loc'], '2dsphere'); 95 | assert.deepEqual(Object.keys(indexes[1].key), [ 'loc' ]); 96 | delete (indexes[1].ns); // different dbName 97 | 98 | INDEXES = indexes; 99 | done(); 100 | }); 101 | }); 102 | }); 103 | }); 104 | }); 105 | }); 106 | }); 107 | it('should backup this collection', function(done) { 108 | 109 | backup({ 110 | uri: URI2, 111 | root: ROOT, 112 | collections: [ COLLECTION ], 113 | metadata: true, 114 | callback: function(err) { 115 | 116 | assert.ifError(err); 117 | var dirs = fs.readdirSync(ROOT); 118 | assert.equal(dirs.length, 1); 119 | 120 | /* 121 | * path 122 | */ 123 | var database = ROOT + dirs[0]; 124 | assert.equal(fs.statSync(database).isDirectory(), true); 125 | 126 | var collections = fs.readdirSync(database); 127 | assert.equal(collections.length, 2, 'collection + .metadata'); 128 | assert.equal(collections[0], '.metadata'); 129 | assert.equal(collections[1], COLLECTION); 130 | 131 | var metadata = database + '/' + collections[0]; 132 | var collection = database + '/' + collections[1]; 133 | 134 | /* 135 | * metadata 136 | */ 137 | assert.equal(fs.statSync(metadata).isDirectory(), true); 138 | var collectionsMetadata = fs.readdirSync(metadata); 139 | assert.equal(collectionsMetadata.length, 1); 140 | assert.equal(collectionsMetadata[0], COLLECTION); 141 | assert.equal(fs.statSync(metadata + '/' + collectionsMetadata[0]) 142 | .isFile(), true); 143 | 144 | try { 145 | var doc = JSON.parse(fs.readFileSync(metadata + '/' + COLLECTION)); 146 | } catch (err) { 147 | assert.ifError(err); 148 | } 149 | delete (doc[0].ns); // different dbName 150 | delete (doc[1].ns); // different dbName 151 | assert.deepEqual(doc, INDEXES); 152 | 153 | /* 154 | * data 155 | */ 156 | assert.equal(fs.statSync(collection).isDirectory(), true); 157 | var collectionData = fs.readdirSync(collection); 158 | assert.equal(collectionData.length, 2); 159 | assert.equal(fs.statSync(collection + '/' + collectionData[0]) 160 | .isFile(), true); 161 | assert.equal(collectionData[0].substr(-5), '.bson'); 162 | assert.equal(fs.statSync(collection + '/' + collectionData[1]) 163 | .isFile(), true); 164 | assert.equal(collectionData[1].substr(-5), '.bson'); 165 | 166 | DIRS.push(collection); 167 | DIRS.push(metadata); 168 | DIRS.push(database); 169 | DIRS.push(ROOT); 170 | ROOT = database; 171 | 172 | FILES.push(collection + '/' + collectionData[1]); 173 | FILES.push(collection + '/' + collectionData[0]); 174 | FILES.push(metadata + '/' + collectionsMetadata[0]); 175 | 176 | done(); 177 | } 178 | }); 179 | }); 180 | }); 181 | 182 | describe('restore with metadata', function() { 183 | 184 | it('should drop collection before restore', function(done) { 185 | 186 | client.connect(URI, function(err, db) { 187 | 188 | assert.ifError(err); 189 | db.dropCollection(COLLECTION, function(err, result) { 190 | 191 | // raise err if missing (already deleted) 192 | done(); 193 | }); 194 | }); 195 | }); 196 | it('should restore this collection', function(done) { 197 | 198 | restore({ 199 | uri: URI, 200 | root: ROOT, 201 | metadata: true, 202 | dropCollections: [ COLLECTION ], 203 | callback: function(err) { 204 | 205 | assert.ifError(err, null); 206 | setTimeout(done, 500); // time for mongod 207 | } 208 | }); 209 | }); 210 | it('should have 2 indexes', function(done) { 211 | 212 | client.connect(URI, function(err, db) { 213 | 214 | assert.ifError(err); 215 | db.collection(COLLECTION, function(err, collection) { 216 | 217 | assert.ifError(err); 218 | collection.indexes(function(err, indexes) { 219 | 220 | assert.ifError(err); 221 | assert.equal(indexes.length, 2); 222 | assert.equal(indexes[0].name, '_id_'); 223 | assert.equal(indexes[0].key['_id'], '1'); 224 | assert.deepEqual(Object.keys(indexes[0].key), [ '_id' ]); 225 | delete (indexes[0].ns); // different dbName 226 | assert.equal(indexes[1].name, 'loc_2dsphere'); 227 | assert.equal(indexes[1].key['loc'], '2dsphere'); 228 | assert.deepEqual(Object.keys(indexes[1].key), [ 'loc' ]); 229 | delete (indexes[1].ns); // different dbName 230 | 231 | assert.deepEqual(indexes, INDEXES); 232 | 233 | done(); 234 | }); 235 | }); 236 | }); 237 | }); 238 | }); 239 | 240 | describe('restore without metadata', function() { 241 | 242 | it('should delete metadata file before restore', function(done) { 243 | 244 | fs.unlinkSync(FILES.pop(2)); 245 | done(); 246 | }); 247 | it('should restore this collection', function(done) { 248 | 249 | restore({ 250 | uri: URI, 251 | root: ROOT, 252 | metadata: true, 253 | dropCollections: [ COLLECTION ], 254 | callback: function(err) { 255 | 256 | assert.ifError(err, null); 257 | setTimeout(done, 500); // time for mongod 258 | } 259 | }); 260 | }); 261 | it('should have 1 indexes (default)', function(done) { 262 | 263 | client.connect(URI, function(err, db) { 264 | 265 | assert.ifError(err); 266 | db.collection(COLLECTION, function(err, collection) { 267 | 268 | assert.ifError(err); 269 | collection.indexes(function(err, indexes) { 270 | 271 | assert.ifError(err); 272 | assert.equal(indexes.length, 1); 273 | assert.equal(indexes[0].name, '_id_'); 274 | assert.equal(indexes[0].key['_id'], '1'); 275 | assert.deepEqual(Object.keys(indexes[0].key), [ '_id' ]); 276 | delete (indexes[0].ns); // different dbName 277 | 278 | assert.notDeepEqual(indexes, INDEXES); 279 | 280 | done(); 281 | }); 282 | }); 283 | }); 284 | }); 285 | }); 286 | 287 | describe('clear', function() { 288 | 289 | it('should remove all files', function(done) { 290 | 291 | for (var i = 0, ii = FILES.length; i < ii; ++i) { 292 | fs.unlinkSync(FILES[i]); 293 | } 294 | done(); 295 | }); 296 | it('should remove all dirs', function(done) { 297 | 298 | for (var i = 0, ii = DIRS.length; i < ii; ++i) { 299 | fs.rmdirSync(DIRS[i]); 300 | } 301 | done(); 302 | }); 303 | }); 304 | }); 305 | -------------------------------------------------------------------------------- /test/parser.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file parser test 4 | * @module mongodb-restore 5 | * @subpackage test 6 | * @version 0.0.1 7 | * @author hex7c0 8 | * @license GPLv3 9 | */ 10 | 11 | /* 12 | * initialize module 13 | */ 14 | var restore = require('..'); 15 | var assert = require('assert'); 16 | var URI = process.env.URI; 17 | 18 | /* 19 | * test module 20 | */ 21 | describe('parser', function() { 22 | 23 | var ROOT = __dirname + '/dump'; 24 | 25 | it('should check custom parser', function(done) { 26 | 27 | var c = 0; 28 | restore({ 29 | uri: URI, 30 | root: ROOT, 31 | collections: [ 'logins' ], 32 | parser: function(collections, name, next) { 33 | 34 | c++; 35 | assert.equal(typeof collections, 'object'); 36 | assert.equal(typeof name, 'string'); 37 | assert.equal(typeof next, 'function'); 38 | next(); 39 | }, 40 | callback: function(err) { 41 | 42 | assert.ifError(err); 43 | assert.equal(c > 0, true); 44 | done(); 45 | } 46 | }); 47 | }); 48 | }); 49 | -------------------------------------------------------------------------------- /test/stream.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file stream test 4 | * @module mongodb-restore 5 | * @subpackage test 6 | * @version 0.0.1 7 | * @author hex7c0 8 | * @license GPLv3 9 | */ 10 | 11 | /* 12 | * initialize module 13 | */ 14 | var restore = require('..'); 15 | var assert = require('assert'); 16 | var fs = require('fs'); 17 | var client = require('mongodb').MongoClient; 18 | var URI = process.env.URI; 19 | var URI2 = process.env.URI2; 20 | 21 | var pad = function(val, len) { 22 | 23 | var val = String(val); 24 | var len = len || 2; 25 | while (val.length < len) { 26 | val = '0' + val; 27 | } 28 | return val; 29 | }; 30 | 31 | /* 32 | * test module 33 | */ 34 | describe('stream', function() { 35 | 36 | var DOCS = {}; 37 | var ROOT = __dirname + '/dump/'; 38 | var COLLECTION = 'logins'; 39 | var INDEX = []; 40 | 41 | it('should get tar file, not db directory', function(done) { 42 | 43 | fs.readdirSync(ROOT).forEach(function(first) { // database 44 | 45 | var t = ROOT + first; 46 | if (!fs.existsSync(t) || !fs.statSync(t).isFile()) { 47 | return; 48 | } 49 | if (first == 't_stream.tar') done(); 50 | }); 51 | }); 52 | it('should get original data from db', function(done) { 53 | 54 | client.connect(URI2, function(err, db) { 55 | 56 | db.collection(COLLECTION, function(err, collection) { 57 | 58 | assert.ifError(err); 59 | collection.indexes(function(err, index) { 60 | 61 | assert.ifError(err); 62 | INDEX = index; 63 | collection.find({}, { 64 | sort: { 65 | _id: 1 66 | } 67 | }).toArray(function(err, docs) { 68 | 69 | assert.equal(Array.isArray(docs), true); 70 | assert.equal(docs.length > 0, true); 71 | DOCS = docs; 72 | done(); 73 | }); 74 | }); 75 | }); 76 | }); 77 | }); 78 | 79 | describe('restore', function() { 80 | 81 | var l = 'l3.log'; 82 | var date = new Date(); 83 | var dailyF = date.getUTCFullYear() + '-' + pad(date.getUTCMonth() + 1) 84 | + '-' + pad(date.getUTCDate()) + '.' + l; 85 | var stream; 86 | it('should check that log file not exist before test', function(done) { 87 | 88 | assert.equal(fs.existsSync(l), false); 89 | assert.equal(fs.existsSync(dailyF), false); 90 | done(); 91 | }); 92 | it('should create file system stream', function(done) { 93 | 94 | stream = fs.createReadStream(ROOT + 't_stream.tar'); 95 | done(); 96 | }); 97 | it('should save data to db with stream', function(done) { 98 | 99 | restore({ 100 | uri: URI, 101 | logger: l, 102 | stream: stream, 103 | callback: function(err) { 104 | 105 | assert.ifError(err); 106 | setTimeout(done, 500); // time for mongod 107 | } 108 | }); 109 | }); 110 | it('should test original data and saved data', function(done) { 111 | 112 | client.connect(URI, function(err, db) { 113 | 114 | db.listCollections({}).toArray(function(err, items) { 115 | 116 | assert.ifError(err); 117 | db.collection(COLLECTION, function(err, collection) { 118 | 119 | assert.ifError(err); 120 | collection.indexes(function(err, index) { 121 | 122 | assert.ifError(err); 123 | assert.equal(index.length, INDEX.length); 124 | for (var i = 0, ii = index.length; i < ii; i++) { // remove db releated data 125 | delete index[i].ns; 126 | delete INDEX[i].ns; 127 | } 128 | assert.equal(index[0].name, INDEX[0].name); 129 | // assert.deepEqual(index, INDEX); // not work on travis. but it's ok in local istance 130 | collection.find({}, { 131 | sort: { 132 | _id: 1 133 | } 134 | }).toArray(function(err, docs) { 135 | 136 | assert.ifError(err); 137 | assert.deepEqual(docs, DOCS); // same above 138 | done(); 139 | }); 140 | }); 141 | }); 142 | }); 143 | }); 144 | }); 145 | it('should check that buffer dir not exist', function(done) { 146 | 147 | var paths = __dirname + '/../dump'; 148 | assert.equal(fs.existsSync(paths), true); // stay alive 149 | assert.equal(fs.readdirSync(paths).length, 0, 'empty dir'); 150 | done(); 151 | }); 152 | it('should remove log', function(done) { 153 | 154 | fs.unlink(dailyF, done); 155 | }); 156 | }); 157 | }); 158 | -------------------------------------------------------------------------------- /test/tar.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file tar test 4 | * @module mongodb-restore 5 | * @subpackage test 6 | * @version 0.0.1 7 | * @author hex7c0 8 | * @license GPLv3 9 | */ 10 | 11 | /* 12 | * initialize module 13 | */ 14 | var restore = require('..'); 15 | var assert = require('assert'); 16 | var fs = require('fs'); 17 | var client = require('mongodb').MongoClient; 18 | var URI = process.env.URI; 19 | var URI2 = process.env.URI2; 20 | 21 | var pad = function(val, len) { 22 | 23 | var val = String(val); 24 | var len = len || 2; 25 | while (val.length < len) { 26 | val = '0' + val; 27 | } 28 | return val; 29 | }; 30 | 31 | /* 32 | * test module 33 | */ 34 | describe('tar', function() { 35 | 36 | var DOCS = {}; 37 | var ROOT = __dirname + '/dump/'; 38 | var COLLECTION = 'logins'; 39 | var INDEX = []; 40 | 41 | it('should get tar file, not db directory', function(done) { 42 | 43 | fs.readdirSync(ROOT).forEach(function(first) { // database 44 | 45 | var t = ROOT + first; 46 | if (!fs.existsSync(t) || !fs.statSync(t).isFile()) { 47 | return; 48 | } 49 | if (first == 't1.tar') done(); 50 | }); 51 | }); 52 | it('should get original data from db', function(done) { 53 | 54 | client.connect(URI2, function(err, db) { 55 | 56 | db.collection(COLLECTION, function(err, collection) { 57 | 58 | assert.ifError(err); 59 | collection.indexes(function(err, index) { 60 | 61 | assert.ifError(err); 62 | INDEX = index; 63 | collection.find({}, { 64 | sort: { 65 | _id: 1 66 | } 67 | }).toArray(function(err, docs) { 68 | 69 | assert.equal(Array.isArray(docs), true); 70 | assert.equal(docs.length > 0, true); 71 | DOCS = docs; 72 | done(); 73 | }); 74 | }); 75 | }); 76 | }); 77 | }); 78 | 79 | describe('restore', function() { 80 | 81 | var l = 'l2.log'; 82 | var date = new Date(); 83 | var dailyF = date.getUTCFullYear() + '-' + pad(date.getUTCMonth() + 1) 84 | + '-' + pad(date.getUTCDate()) + '.' + l; 85 | it('should check that log file not exist before test', function(done) { 86 | 87 | assert.equal(fs.existsSync(l), false); 88 | assert.equal(fs.existsSync(dailyF), false); 89 | done(); 90 | }); 91 | it('should save data to db', function(done) { 92 | 93 | restore({ 94 | uri: URI, 95 | root: ROOT, 96 | logger: l, 97 | tar: 't1.tar', 98 | callback: function(err) { 99 | 100 | assert.ifError(err); 101 | setTimeout(done, 500); // time for mongod 102 | } 103 | }); 104 | }); 105 | it('should test original data and saved data', function(done) { 106 | 107 | client.connect(URI, function(err, db) { 108 | 109 | db.listCollections({}).toArray(function(err, items) { 110 | 111 | assert.ifError(err); 112 | db.collection(COLLECTION, function(err, collection) { 113 | 114 | assert.ifError(err); 115 | collection.indexes(function(err, index) { 116 | 117 | assert.ifError(err); 118 | assert.equal(index.length, INDEX.length); 119 | for (var i = 0, ii = index.length; i < ii; i++) { // remove db releated data 120 | delete index[i].ns; 121 | delete INDEX[i].ns; 122 | } 123 | assert.equal(index[0].name, INDEX[0].name); 124 | // assert.deepEqual(index, INDEX); // not work on travis. but it's ok in local istance 125 | collection.find({}, { 126 | sort: { 127 | _id: 1 128 | } 129 | }).toArray(function(err, docs) { 130 | 131 | assert.ifError(err); 132 | assert.deepEqual(docs, DOCS); // same above 133 | done(); 134 | }); 135 | }); 136 | }); 137 | }); 138 | }); 139 | }); 140 | it('should check that buffer dir not exist', function(done) { 141 | 142 | var paths = __dirname + '/../dump'; 143 | assert.equal(fs.existsSync(paths), true); // stay alive 144 | assert.equal(fs.readdirSync(paths).length, 0, 'empty dir'); 145 | done(); 146 | }); 147 | it('should remove log', function(done) { 148 | 149 | fs.unlink(dailyF, done); 150 | }); 151 | }); 152 | }); 153 | -------------------------------------------------------------------------------- /test/z_clear.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file z test 4 | * @module mongodb-restore 5 | * @subpackage test 6 | * @version 0.0.1 7 | * @author hex7c0 8 | * @license GPLv3 9 | */ 10 | 11 | /* 12 | * initialize module 13 | */ 14 | var assert = require('assert'); 15 | var fs = require('fs'); 16 | 17 | /* 18 | * test module 19 | */ 20 | describe('last', function() { 21 | 22 | var ROOT = __dirname + '/dump/'; 23 | 24 | describe('tar', function() { 25 | 26 | it('should unlink tar0 file', function(done) { 27 | 28 | fs.unlink(ROOT + 't1.tar', done); 29 | }); 30 | it('should unlink tar1 file', function(done) { 31 | 32 | fs.unlink(ROOT + 't_stream.tar', done); 33 | }); 34 | }); 35 | 36 | describe('directory', function() { 37 | 38 | function rmDir(path, next) { 39 | 40 | fs.readdirSync(path).forEach(function(first) { // database 41 | 42 | var database = path + first; 43 | assert.equal(fs.statSync(database).isDirectory(), true); 44 | var metadata = ''; 45 | var collections = fs.readdirSync(database); 46 | if (fs.existsSync(database + '/.metadata') === true) { 47 | metadata = database + '/.metadata/'; 48 | delete collections[collections.indexOf('.metadata')]; // undefined is not a dir 49 | } 50 | collections.forEach(function(second) { // collection 51 | 52 | var collection = database + '/' + second; 53 | if (fs.statSync(collection).isDirectory() === false) { 54 | return; 55 | } 56 | fs.readdirSync(collection).forEach(function(third) { // document 57 | 58 | var document = collection + '/' + third; 59 | if (next !== undefined) { 60 | next(null, document); 61 | } 62 | fs.unlinkSync(document); 63 | }); 64 | if (metadata !== '') { 65 | fs.unlinkSync(metadata + second); 66 | } 67 | fs.rmdirSync(collection); 68 | }); 69 | if (metadata !== '') { 70 | fs.rmdirSync(metadata); 71 | } 72 | fs.rmdirSync(database); 73 | }); 74 | } 75 | it('should rm db directory', function(done) { 76 | 77 | rmDir(ROOT); 78 | done(); 79 | }); 80 | }); 81 | }); 82 | -------------------------------------------------------------------------------- /test/z_int64id.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /** 3 | * @file issue #8 test 4 | * @module mongodb-restore 5 | * @subpackage test 6 | * @version 0.0.1 7 | * @author hex7c0 8 | * @license GPLv3 9 | */ 10 | 11 | /* 12 | * initialize module 13 | */ 14 | var backup = require('mongodb-backup'); 15 | var restore = require('..'); 16 | var assert = require('assert'); 17 | var fs = require('fs'); 18 | var extname = require('path').extname; 19 | var mongodb = require('mongodb'); 20 | var bson = require('bson'); 21 | 22 | var client = mongodb.MongoClient; 23 | var BSON = new bson.BSON(); 24 | var MLong = mongodb.Long; 25 | var BLong = bson.Long; 26 | var URI = process.env.URI; 27 | var URI2 = process.env.URI2; 28 | var Root = __dirname + '/dump'; 29 | var Collection = 'test_8'; 30 | 31 | /* 32 | * test module 33 | */ 34 | describe('int64 id', function() { 35 | 36 | describe('issue8 - parsed as a TimeStamp', function() { 37 | 38 | var NInt64, SInt64, NLong, SLong; 39 | 40 | describe('create new collection', function() { 41 | 42 | it('should create long number', function(done) { 43 | 44 | var long1 = MLong.fromNumber(100); 45 | var long2 = BLong.fromNumber(100); 46 | assert.deepEqual(long1, long2); 47 | var long1 = MLong.fromString('100'); 48 | var long2 = BLong.fromString('100'); 49 | assert.deepEqual(long1, long2); 50 | 51 | NInt64 = 1000576093407275579; 52 | SInt64 = '1000576093407275579'; 53 | NLong = MLong.fromNumber(NInt64); 54 | SLong = MLong.fromString(SInt64); 55 | 56 | done(); 57 | }); 58 | it('should create "' + Collection + '" collection', function(done) { 59 | 60 | client.connect(URI2, function(err, db) { 61 | 62 | assert.ifError(err); 63 | db.createCollection(Collection, function(err, collection) { 64 | 65 | assert.ifError(err); 66 | collection.remove({}, function(err, result) { // remove previous data 67 | 68 | assert.ifError(err); 69 | collection.insert([ { 70 | _id: 'nint64', 71 | d: NInt64, 72 | t: 'foo1' 73 | }, { 74 | _id: 'sint64', 75 | d: SInt64, 76 | t: 'foo2' 77 | }, { 78 | _id: 'nlong', 79 | d: NLong, 80 | t: 'foo3' 81 | }, { 82 | _id: 'slong', 83 | d: SLong, 84 | t: 'foo4' 85 | } ], function(err, result) { 86 | 87 | assert.ifError(err); 88 | assert.equal(result.result.ok, 1); 89 | assert.equal(result.result.n, 4); 90 | db.close(); 91 | done(); 92 | }); 93 | }); 94 | }); 95 | }); 96 | }); 97 | }); 98 | 99 | describe('backup', function() { 100 | 101 | it('should build 1 directory and 4 files', function(done) { 102 | 103 | backup({ 104 | uri: URI2, 105 | root: Root, 106 | collections: [ Collection ], 107 | callback: function(err) { 108 | 109 | assert.ifError(err); 110 | setTimeout(done, 500); // time for mongod 111 | } 112 | }); 113 | }); 114 | }); 115 | 116 | describe('deserialize', function() { 117 | 118 | var database, collection; 119 | var nint64_file, nlong_file, sint64_file, slong_file; 120 | 121 | it('should find 2 files', function(done) { 122 | 123 | var first = fs.readdirSync(Root); 124 | assert.equal(first.length, 1, 'database'); 125 | 126 | database = Root + '/' + first[0]; 127 | assert.equal(fs.statSync(database).isDirectory(), true); 128 | 129 | var second = fs.readdirSync(database); 130 | assert.equal(second.length, 1, 'collection'); 131 | assert.equal(second[0], Collection); 132 | 133 | collection = database + '/' + second[0]; 134 | assert.equal(fs.statSync(collection).isDirectory(), true); 135 | 136 | var docs = fs.readdirSync(collection); 137 | assert.equal(docs.length, 4); 138 | nint64_file = collection + '/' + docs[0]; 139 | nlong_file = collection + '/' + docs[1]; 140 | sint64_file = collection + '/' + docs[2]; 141 | slong_file = collection + '/' + docs[3]; 142 | 143 | docs.forEach(function(file) { 144 | 145 | var p = collection + '/' + file; 146 | assert.equal(fs.statSync(p).isFile(), true); 147 | assert.equal(extname(p), '.bson'); 148 | }); 149 | 150 | done(); 151 | }); 152 | it('should deserialize nint64 file', function(done) { 153 | 154 | var data = BSON.deserialize(fs.readFileSync(nint64_file)); 155 | assert.strictEqual(data._id, 'nint64'); 156 | assert.deepEqual(data.d, NInt64); 157 | assert.strictEqual(data.t, 'foo1'); 158 | done(); 159 | }); 160 | it('should deserialize sint64 file', function(done) { 161 | 162 | var data = BSON.deserialize(fs.readFileSync(sint64_file)); 163 | assert.strictEqual(data._id, 'sint64'); 164 | assert.deepEqual(data.d, SInt64); 165 | assert.strictEqual(data.t, 'foo2'); 166 | done(); 167 | }); 168 | it('should deserialize nlong file', function(done) { 169 | 170 | var data = BSON.deserialize(fs.readFileSync(nlong_file)); 171 | assert.strictEqual(data._id, 'nlong'); 172 | assert.deepEqual(data.d, NLong); 173 | assert.strictEqual(data.t, 'foo3'); 174 | done(); 175 | }); 176 | it('should deserialize slong file', function(done) { 177 | 178 | var data = BSON.deserialize(fs.readFileSync(slong_file)); 179 | assert.strictEqual(data._id, 'slong'); 180 | assert.deepEqual(data.d, SLong); 181 | assert.strictEqual(data.t, 'foo4'); 182 | done(); 183 | }); 184 | 185 | describe('restore', function() { 186 | 187 | var ROOT = __dirname + '/dump/'; 188 | 189 | it('should save data to db', function(done) { 190 | 191 | restore({ 192 | uri: URI, 193 | root: ROOT, 194 | callback: function(err) { 195 | 196 | assert.ifError(err); 197 | setTimeout(done, 500); // time for mongod 198 | } 199 | }); 200 | }); 201 | it('should check "' + Collection + '" collection', function(done) { 202 | 203 | client.connect(URI, function(err, db) { 204 | 205 | assert.ifError(err); 206 | db.collection(Collection, function(err, collection) { 207 | 208 | assert.ifError(err); 209 | collection.find({ 210 | _id: 'nint64' 211 | }).limit(1).next(function(err, doc) { 212 | 213 | assert.ifError(err); 214 | assert.ok(doc); 215 | assert.deepEqual(doc.d, NInt64); 216 | assert.strictEqual(doc.t, 'foo1'); 217 | fs.unlinkSync(nint64_file); 218 | 219 | collection.find({ 220 | _id: 'sint64' 221 | }).limit(1).next(function(err, doc) { 222 | 223 | assert.ifError(err); 224 | assert.ok(doc); 225 | assert.deepEqual(doc.d, SInt64); 226 | assert.strictEqual(doc.t, 'foo2'); 227 | fs.unlinkSync(sint64_file); 228 | }); 229 | 230 | collection.find({ 231 | _id: 'nlong' 232 | }).limit(1).next(function(err, doc) { 233 | 234 | assert.ifError(err); 235 | assert.ok(doc); 236 | assert.deepEqual(doc.d, NLong); 237 | assert.strictEqual(doc.t, 'foo3'); 238 | fs.unlinkSync(nlong_file); 239 | }); 240 | 241 | collection.find({ 242 | _id: 'slong' 243 | }).limit(1).next(function(err, doc) { 244 | 245 | assert.ifError(err); 246 | assert.ok(doc); 247 | assert.deepEqual(doc.d, SLong); 248 | assert.strictEqual(doc.t, 'foo4'); 249 | fs.unlinkSync(slong_file); 250 | 251 | db.close(); 252 | done(); 253 | }); 254 | }); 255 | }); 256 | }); 257 | }); 258 | it('should remove dirs', function(done) { 259 | 260 | fs.rmdirSync(collection); 261 | fs.rmdirSync(database); 262 | done(); 263 | }); 264 | }); 265 | }); 266 | }); 267 | }); 268 | --------------------------------------------------------------------------------