├── .gitignore ├── LICENSE ├── README.md ├── package.json └── src └── firebase-import.js /.gitignore: -------------------------------------------------------------------------------- 1 | *.swp 2 | bin/ 3 | node_modules/ 4 | package-lock.json 5 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 Firebase 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Firebase-Import 2 | Firebase-import is a helper utility for importing large JSON files into your 3 | [Firebase Realtime Database](https://firebase.google.com/docs/database/). It breaks the JSON into smaller 4 | chunks and uploads them individually through the Firebase API. 5 | 6 | This utility is designed and tested for imports of files up to 400MB. 7 | 8 | ## Installing 9 | 10 | Install the firebase-import module globally: 11 | 12 | $ npm install -g firebase-import 13 | 14 | or install it locally and add it to your path: 15 | 16 | $ npm install firebase-import 17 | $ export PATH=$PATH:`npm bin` 18 | 19 | ## Obtaining Service account file 20 | Using your Google account, login to Firebase console: https://console.firebase.google.com/ 21 | 22 | See Example below 23 | 24 | 1. Project Settings -> 25 | 2. Service Accounts -> 26 | 3. Generate new private key 27 | 28 | ![Service Account Image](https://image.ibb.co/cBuuo9/service_account.png) 29 | 30 | 31 | ## Usage 32 | 33 | $ firebase-import 34 | Usage: firebase-import 35 | 36 | Options: 37 | --database_url Firebase database URL (e.g. https://databaseName.firebaseio.com). [required] 38 | --path Database path (e.g. /products). [required] 39 | --json The JSON file to import. [required] 40 | --merge Write the top-level children without overwriting the whole parent. 41 | --force Don't prompt before overwriting data. 42 | --service_account Path to a JSON file with your service account credentials. 43 | 44 | ## Example 45 | 46 | $ firebase-import --database_url https://test.firebaseio-demo.com --path / --json test.json --service_account /path/to/your/service_account.json 47 | All data at https://test.firebaseio-demo.com/ will be overwritten. 48 | Press to proceed, Ctrl-C to abort. 49 | 50 | Reading /Users/michael/tmp/test.json... (may take a minute) 51 | Preparing JSON for import... (may take a minute) 52 | Importing [=================================================] 100% (9431/9431) 53 | Import completed. 54 | 55 | Or an example of merging the contents of test.json with what's already in Firebase: 56 | 57 | $ firebase-import --database_url https://test.firebaseio-demo.com --path / --json test.json --merge --service_account /path/to/your/service_account.json 58 | Each top-level child in test.json will be written under https://test.firebaseio-demo.com/. 59 | If a child already exists, it will be overwritten. 60 | Press to proceed, Ctrl-C to abort. 61 | 62 | Reading /Users/michael/tmp/test.json... (may take a minute) 63 | Preparing JSON for import... (may take a minute) 64 | Importing [=================================================] 100% (9431/9431) 65 | Import completed. 66 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "firebase-import", 3 | "description": "npm config for Firebase Import", 4 | "version": "2.2.2", 5 | "dependencies": { 6 | "JSONStream": "^1.2.1", 7 | "firebase": "^3.4.0", 8 | "firebase-admin": "^6.0.0", 9 | "optimist": "^0.6.1", 10 | "progress": "^1.1.8" 11 | }, 12 | "devDependencies": { 13 | "copyfiles": "^1.0.0" 14 | }, 15 | "repository": { 16 | "type": "git", 17 | "url": "git://github.com/firebase/firebase-import.git" 18 | }, 19 | "keywords": [ 20 | "firebase", 21 | "import" 22 | ], 23 | "files": [ 24 | "bin/**", 25 | "LICENSE", 26 | "README.md", 27 | "package.json" 28 | ], 29 | "author": "Firebase", 30 | "license": "MIT", 31 | "bin": { 32 | "firebase-import": "./bin/firebase-import.js" 33 | }, 34 | "scripts": { 35 | "build": "copyfiles -f src/firebase-import.js bin" 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/firebase-import.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | var firebase = require('firebase'), 3 | optimist = require('optimist'), 4 | ProgressBar = require('progress'), 5 | assert = require('assert'), 6 | path = require('path'), 7 | fs = require('fs'), 8 | JSONStream = require('JSONStream'), 9 | util = require('util'); 10 | admin = require("firebase-admin"); 11 | 12 | // We try to write data in ~1MB chunks (in reality this often ends up being much smaller, due to the JSON structure). 13 | var CHUNK_SIZE = 1024*1024; 14 | 15 | // Keep ~50 writes outstanding at a time (this increases throughput, so we're not delayed by server round-trips). 16 | var OUTSTANDING_WRITE_COUNT = 50; 17 | 18 | var argv = require('optimist') 19 | .usage('Usage: $0') 20 | 21 | .demand('database_url') 22 | .describe('database_url', 'Firebase database URL (e.g. https://databaseName.firebaseio.com).') 23 | .alias('d', 'database_url') 24 | 25 | .demand('path') 26 | .describe('path', 'Database path (e.g. /products).') 27 | .alias('p', 'path') 28 | 29 | .demand('json') 30 | .describe('json', 'The JSON file to import.') 31 | .alias('j', 'json') 32 | 33 | .boolean('merge') 34 | .describe('merge', 'Write the top-level children without overwriting the whole parent.') 35 | .alias('m', 'merge') 36 | 37 | .boolean('force') 38 | .describe('force', 'Don\'t prompt before overwriting data.') 39 | 40 | .describe('service_account', 'Path to a JSON file with your service account credentials.') 41 | .alias('s', 'service_account') 42 | 43 | .argv; 44 | 45 | function main() { 46 | admin.initializeApp({ 47 | credential: argv.service_account ? admin.credential.cert(argv.service_account) : admin.credential.applicationDefault(), 48 | databaseURL: argv.database_url 49 | }); 50 | 51 | var db = admin.database(); 52 | var ref = db.ref(argv.path); 53 | 54 | var connFailTimeout = setTimeout(function() { 55 | console.log('Failed to connect to Firebase.'); 56 | process.exit(); 57 | }, 10000); 58 | 59 | function ready() { 60 | clearTimeout(connFailTimeout); 61 | promptToContinue(ref, function() { start(ref); }); 62 | } 63 | 64 | var connFunc = db.ref('.info/connected').on('value', function(s) { 65 | if(s.val() === true) { 66 | db.ref('.info/connected').off('value', connFunc); 67 | ready(); 68 | } 69 | }); 70 | } 71 | 72 | function promptToContinue(ref, next) { 73 | if (argv.force) { 74 | next(); 75 | } else { 76 | if (argv.merge) { 77 | console.log('Each top-level child in ' + argv.json + ' will be written under ' + ref.toString() + '. If a child already exists, it will be overwritten.'); 78 | } else { 79 | console.log('All data at ' + ref.toString() + ' will be overwritten.'); 80 | } 81 | console.log('Press to proceed, Ctrl-C to abort.'); 82 | process.stdin.resume(); 83 | process.stdin.once('data', next); 84 | } 85 | } 86 | 87 | function readFirstNonWhitespaceChar(file, callback) { 88 | var firstChar; 89 | var rs = fs.createReadStream(file); 90 | rs.on('data', function(chunk) { 91 | var s = chunk.toString().trim(); 92 | if (s !== "") { 93 | rs.close(); 94 | } 95 | firstChar = s[0]; 96 | }) 97 | .on('error', callback) 98 | .on('close', function() { 99 | return callback(null, firstChar); 100 | }); 101 | } 102 | 103 | function getJsonFromFile(file, callback) { 104 | readFirstNonWhitespaceChar(file, function(err, firstChar) { 105 | var json; 106 | if (firstChar === "[" || firstChar === "{") { 107 | var jsonStream; 108 | var onFunc; 109 | if (firstChar === "[") { 110 | json = []; 111 | jsonStream = JSONStream.parse("*"); 112 | onFunc = function(r) { 113 | json.push(r); 114 | }; 115 | } else { 116 | json = {}; 117 | jsonStream = JSONStream.parse("$*"); 118 | onFunc = function(r) { 119 | json[r.key] = r.value; 120 | }; 121 | } 122 | fs.createReadStream(file) 123 | .pipe(jsonStream) 124 | .on('data', onFunc) 125 | .on('error', callback) 126 | .on('close', function() { 127 | return callback(null, json); 128 | }); 129 | } else { 130 | json = require(file); 131 | return callback(null, json); 132 | } 133 | }); 134 | } 135 | 136 | function start(ref) { 137 | var file = path.resolve(argv.json); 138 | console.log('Reading ' + file + '... (may take a minute)'); 139 | 140 | getJsonFromFile(file, function(err, json) { 141 | var clearFirst = true, splitTopLevel = false; 142 | if (argv.merge) { 143 | clearFirst = false; 144 | // Need to split into chunks at the top level to ensure we don't overwrite the parent. 145 | splitTopLevel = true; 146 | } 147 | 148 | console.log('Preparing JSON for import... (may take a minute)'); 149 | var chunks = createChunks(ref, json, splitTopLevel); 150 | 151 | if (clearFirst) { 152 | ref.remove(function(error) { 153 | if (error) throw(error); 154 | uploadChunks(chunks); 155 | }); 156 | } else { 157 | uploadChunks(chunks); 158 | } 159 | }); 160 | } 161 | 162 | function uploadChunks(chunks) { 163 | var uploader = new ChunkUploader(chunks); 164 | uploader.go(function() { 165 | console.log('\nImport completed.'); 166 | process.exit(); 167 | }); 168 | } 169 | 170 | function createChunks(ref, json, forceSplit) { 171 | var chunkRes = chunkInternal(ref, json, forceSplit); 172 | if (!chunkRes.chunks) { 173 | return [{ref: ref, json: json}]; 174 | } else { 175 | return chunkRes.chunks; 176 | } 177 | } 178 | 179 | function chunkInternal(ref, json, forceSplit) { 180 | var size = 0; 181 | var priority = null; 182 | var jsonIsObject = json !== null && typeof json === 'object'; 183 | if (jsonIsObject) { 184 | size += 2; // {} 185 | } 186 | 187 | if (jsonIsObject && ('.priority' in json)) { 188 | size += 12; // ".priority": 189 | priority = json['.priority']; 190 | size += json['.priority'].toString().length; 191 | } 192 | 193 | var value = json; 194 | if (jsonIsObject && ('.value' in json)) { 195 | size += 9; // ".value": 196 | value = json['.value']; 197 | } 198 | 199 | if (value === null || typeof value !== 'object') { 200 | // It's a leaf, it can't be chunked. 201 | size += JSON.stringify(value).length; 202 | return { chunks: null, size: size }; 203 | } else { 204 | // children node. 205 | var chunks = []; 206 | var splitUp = false; 207 | for(var key in json) { 208 | if (key !== '.priority') { 209 | size += key.length + 3; 210 | 211 | var chunkRes = chunkInternal(ref.child(key), json[key]); 212 | size += chunkRes.size; 213 | 214 | if (chunkRes.chunks) { 215 | for(var i = 0; i < chunkRes.chunks.length; i++) { 216 | chunks.push(chunkRes.chunks[i]); 217 | } 218 | // One of the children had to be broken into chunks. We have to break all of them. 219 | splitUp = true; 220 | } else { 221 | chunks.push({ref: ref.child(key), json: json[key]}); 222 | } 223 | } 224 | } 225 | 226 | // Add priority last since it must be added after at least one child. 227 | if (priority !== null) { 228 | chunks.push({ref: ref, priority: priority}); 229 | } 230 | 231 | if (forceSplit || splitUp || size >= CHUNK_SIZE) { 232 | return { chunks: chunks, size: size }; 233 | } else { 234 | return { chunks: null, size: size } 235 | } 236 | } 237 | } 238 | 239 | function ChunkUploader(chunks) { 240 | this.next = 0; 241 | this.chunks = chunks; 242 | if (process.stdout.isTTY) { 243 | this.bar = new ProgressBar('Importing [:bar] :percent (:current/:total)', { width: 50, total: chunks.length, incomplete: ' ' }); 244 | } else { 245 | console.log('Importing... (may take a while)'); 246 | } 247 | } 248 | 249 | ChunkUploader.prototype.go = function(onComplete) { 250 | this.onComplete = onComplete; 251 | 252 | for(var i = 0; i < OUTSTANDING_WRITE_COUNT && i < this.chunks.length; i++) { 253 | this.uploadNext(); 254 | } 255 | }; 256 | 257 | ChunkUploader.prototype.uploadNext = function() { 258 | var chunkNum = this.next, chunk = this.chunks[chunkNum]; 259 | assert(chunkNum < this.chunks.length); 260 | this.next++; 261 | 262 | var self = this; 263 | var onComplete = function(error) { 264 | if (error) { 265 | console.log('Error uploading to ' + self.chunks[i].ref.toString() + ': ' + util.inspect(json)); 266 | console.error(error); 267 | throw error; 268 | } 269 | 270 | if (process.stdout.isTTY && self.bar) { 271 | self.bar.tick(); 272 | } 273 | 274 | if (chunkNum === self.chunks.length - 1) { 275 | self.onComplete(); 276 | } else { 277 | // upload next chunk. 278 | assert(self.next === self.chunks.length || self.next === chunkNum + OUTSTANDING_WRITE_COUNT); 279 | if (self.next < self.chunks.length) 280 | self.uploadNext(); 281 | } 282 | }; 283 | 284 | if ('json' in chunk) { 285 | chunk.ref.set(chunk.json, onComplete); 286 | } else { 287 | assert('priority' in chunk) 288 | chunk.ref.setPriority(chunk.priority, onComplete); 289 | } 290 | } 291 | 292 | main(); 293 | --------------------------------------------------------------------------------