├── .gitignore ├── README.md ├── package-lock.json ├── package.json ├── samples ├── csv-indexed │ ├── test.7aM3oA2Vey.csv │ ├── test.INDEX.csv │ ├── test.WrZcYDcSEK.csv │ ├── test.ZAeXu3t9C1.csv │ ├── test.bqXnVStK7.csv │ ├── test.iIuPVhg34y.csv │ ├── test.pdLpX1tVHd.csv │ ├── test.tCyFRILto0.csv │ └── test.wba2gQC7m-.csv ├── test-db.json ├── test-obj-subcoll.json ├── test-obj.json ├── test.csv ├── test.indexed.xlsx ├── test.json ├── test.simple-with-empty-fields.csv ├── test.simple-with-empty-fields.xlsx └── test.simple.xlsx ├── src ├── exportCollection │ └── index.ts ├── importCollection │ └── index.ts ├── index.ts └── shared.ts └── tsconfig.json /.gitignore: -------------------------------------------------------------------------------- 1 | /node_modules 2 | credentials.json 3 | /scraps/* 4 | /dist/* 5 | 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Fire Migrate 2 | 3 | 👀 Firestore now has an [official import/export process](https://firebase.google.com/docs/firestore/manage-data/export-import). Consider that your first option if it suits your needs. If it does not, continue reading... 4 | 5 | CLI tool for moving data in-n-out of [Cloud Firestore](https://firebase.google.com/docs/firestore/). 6 | 7 | - Import/Export CSV, Excel, or JSON files to/from Firestore. 8 | - Encode/Decode Firestore data types such as GeoPoint, Reference, Timestamp, etc. 9 | 10 | Watch the [screencast](https://angularfirebase.com/lessons/import-csv-json-or-excel-to-firestore/) 11 | 12 | ## Install 13 | 14 | - Clone and run `npm install` 15 | - Download the service account from your Firebase project settings, then save it as `credentials.json` in the project root. 16 | - `npm run build` and you're off and running. 17 | 18 | ## Import Data to Firestore 19 | 20 | - Push your local data to the Firestore database. 21 | - Selectively import [collections...] from source file to Firestore. 22 | - Omitting [collections...], or specifying root "/" will import all collections. 23 | 24 | ``` 25 | import|i [options] [collections...] 26 | ``` 27 | 28 | Options: 29 | ``` 30 | -i, --id [field] Field to use for Document IDs. (default: doc_id) 31 | -a, --auto-id [str] Document ID token specifying auto generated Document ID. (default: Auto-ID) 32 | -m, --merge Merge Firestore documents. Default is Replace. 33 | -k, --chunk [size] Split upload into batches. Max 500 by Firestore constraints. (default: 500) 34 | -p, --coll-prefix [prefix] (Sub-)Collection prefix. (default: collection) 35 | 36 | -s, --sheet [#] Single mode XLSX Sheet # to import. 37 | 38 | -T, --truncate Delete all documents from target collections before import. 39 | 40 | -d, --dry-run Perform a dry run, without committing data. Implies --verbose. 41 | -v, --verbose Output document insert paths 42 | -h, --help output usage information 43 | ``` 44 | 45 | Examples: 46 | ``` 47 | fire-migrate import --dry-run test.json myCollection 48 | fire-migrate import --merge test.INDEX.csv myCollection 49 | fire-migrate i -m --id docid test.xlsx myCollection 50 | ``` 51 | 52 | ## Export Data from Firestore 53 | 54 | - Pull data from Firestore to a JSON, CSV or XLSX file. 55 | - Selectively export [collections...], or entire database with root "/". 56 | - Exports Sub-Collections by default, optionally disabled. 57 | - Splits CSV/XLSX collections into separate files/sheets with an INDEX. 58 | 59 | ``` 60 | export|e [options] [collections...] 61 | ``` 62 | 63 | Options: 64 | ``` 65 | 66 | -n, --no-subcolls Do not export sub-collections. 67 | -p, --coll-prefix [prefix] Collection prefix (default: collection) 68 | -i, --id-field [id] Field name to use for document IDs (default: doc_id) 69 | 70 | -v, --verbose Output traversed document paths 71 | -h, --help output usage information 72 | ``` 73 | 74 | Examples: 75 | ``` 76 | fire-migrate export --verbose --no-subcolls myCollectionRootLevel.json myCollection 77 | fire-migrate export users-posts.json users posts 78 | fire-migrate e -v firestore-dump.xlsx 79 | ``` 80 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "firestoremigrate", 3 | "version": "0.3.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "build": "tsc", 8 | "build-link": "tsc && npm link", 9 | "migrate": "node dist/index.js" 10 | }, 11 | "author": "", 12 | "license": "ISC", 13 | "devDependencies": { 14 | "@types/csvtojson": "^1.1.5", 15 | "@types/fs-extra": "^5.0.1", 16 | "@types/lodash": "^4.14.106", 17 | "@types/node": "^10.5.7", 18 | "typescript": "^3.0.1" 19 | }, 20 | "dependencies": { 21 | "commander": "^2.15.1", 22 | "dot-object": "^1.7.0", 23 | "firebase-admin": "^7.1.1", 24 | "fs-extra": "^7.0.0", 25 | "lodash": "^4.17.5", 26 | "shortid": "^2.2.12", 27 | "xlsx": "^0.13.3" 28 | }, 29 | "bin": { 30 | "fire-migrate": "dist/index.js" 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /samples/csv-indexed/test.7aM3oA2Vey.csv: -------------------------------------------------------------------------------- 1 | doc_id,timestamp,text 2 | aaaa,2018-03-30T14:01:03.652Z,hello 3 | bbbb,2018-03-30T14:01:03.652Z,goodbye 4 | -------------------------------------------------------------------------------- /samples/csv-indexed/test.INDEX.csv: -------------------------------------------------------------------------------- 1 | Sheet Name,Collection,Depth,Documents,Link 2 | bqXnVStK7,test4,1,3,link 3 | WrZcYDcSEK,test5,1,1,link 4 | iIuPVhg34y,test4/22222/friends,3,4,link 5 | tCyFRILto0,test4/22222/posts,3,4,link 6 | 7aM3oA2Vey,test4/33333/posts,3,2,link 7 | pdLpX1tVHd,test5/t5d1/subcolHere,3,1,link 8 | ZAeXu3t9C1,test4/22222/posts/1X8WqVY2NTw96Dzsazyi/replies,5,1,link 9 | wba2gQC7m-,test4/22222/posts/ITy7d47LrYE5FDQUwyyv/replies,5,1,link 10 | -------------------------------------------------------------------------------- /samples/csv-indexed/test.WrZcYDcSEK.csv: -------------------------------------------------------------------------------- 1 | doc_id,a.0,a.1,a.2.arr.0,a.2.arr.1.obj.msg,a.2.arr.1.obj.things.0,a.2.num,a.2.obj.str,a.3,ref,b,s,n,o.ts,o.n,o.s2,gp,meNull,ts 2 | t5d1,String 1 in Array,"{""type"":""number"",""data"":12}",nested here,Look down here,"{""type"":""geopoint"",""data"":{""_latitude"":45,""_longitude"":23}}","{""type"":""number"",""data"":13}",123,"{""type"":""geopoint"",""data"":{""_latitude"":14,""_longitude"":15}}","{""type"":""ref"",""data"":""test5/t5d2""}","{""type"":""bool"",""data"":true}",This is a string,"{""type"":""number"",""data"":99.956}","{""type"":""timestamp"",""data"":""2018-08-01T21:59:59.000Z""}","{""type"":""number"",""data"":567}",String in Object,"{""type"":""geopoint"",""data"":{""_latitude"":20,""_longitude"":21}}","{""type"":""null""}","{""type"":""timestamp"",""data"":""2018-08-16T09:12:13.000Z""}" 3 | -------------------------------------------------------------------------------- /samples/csv-indexed/test.ZAeXu3t9C1.csv: -------------------------------------------------------------------------------- 1 | doc_id,text,timestamp 2 | qQELC2QgHbD31ohBSo46,hi,018-03-30T14:01:03.652Z 3 | -------------------------------------------------------------------------------- /samples/csv-indexed/test.bqXnVStK7.csv: -------------------------------------------------------------------------------- 1 | doc_id,posts.0.timestamp,posts.0.text,posts.1.text,posts.1.timestamp,last_name,age,first_name 2 | 11111,"{""type"":""timestamp"",""data"":""2018-06-30T22:00:00.000Z""}",hello,goodbye,"{""type"":""timestamp"",""data"":""2018-03-30T12:01:03.000Z""}",Phisher,"{""type"":""number"",""data"":22}",Wendy 3 | 22222,,,,,Bagelson,"{""type"":""number"",""data"":27}",Magen 4 | 33333,,,,,Muffty,"{""type"":""number"",""data"":55}",Doug 5 | -------------------------------------------------------------------------------- /samples/csv-indexed/test.iIuPVhg34y.csv: -------------------------------------------------------------------------------- 1 | doc_id,id 2 | 46xcZVeiqnQCWgBVZUrJ,33333 3 | OOpXHouoR4MjyareCxC4,11111 4 | WvRtxN1cSOnYbHkWi7Pc,11111 5 | ilf0GLsxKRncW1PjiFjI,33333 6 | -------------------------------------------------------------------------------- /samples/csv-indexed/test.pdLpX1tVHd.csv: -------------------------------------------------------------------------------- 1 | doc_id,gp 2 | subDoc1,"{""type"":""geopoint"",""data"":{""_latitude"":90,""_longitude"":180}}" 3 | -------------------------------------------------------------------------------- /samples/csv-indexed/test.tCyFRILto0.csv: -------------------------------------------------------------------------------- 1 | doc_id,timestamp,text 2 | 1X8WqVY2NTw96Dzsazyi,2018-03-30T14:01:03.652Z,hello 3 | Acc56NmfZqn4QczPllCE,2018-03-30T14:01:03.652Z,goodbye 4 | C1buCBxe6MttJnKfKCIm,2018-03-30T14:01:03.652Z,goodbye 5 | ITy7d47LrYE5FDQUwyyv,2018-03-30T14:01:03.652Z,hello 6 | -------------------------------------------------------------------------------- /samples/csv-indexed/test.wba2gQC7m-.csv: -------------------------------------------------------------------------------- 1 | doc_id,timestamp,text 2 | GXkDFRSeMIhcEuMZ9GTH,018-03-30T14:01:03.652Z,hi 3 | -------------------------------------------------------------------------------- /samples/test-db.json: -------------------------------------------------------------------------------- 1 | {"collection:test-csv":{"88pKzZ7jn3g4OQrRP7eQ":{"Name":"Rug","Price":"99.98","SKU":"n232ej23m","Color":"Orange"},"SsUczOfBOlCyUD7UBCvf":{"Price":"23.34","SKU":"2123m12mxi","Color":"Blue","Name":"Lamp"}},"collection:test-json":{"7HilQv0zeat1Z1KhXCxX":{"last_name":"Phisher","age":"{\"type\":\"number\",\"data\":22}","first_name":"Wendy","id":"{\"type\":\"number\",\"data\":11111}"},"dP6PcSziRVf2IlOVExdH":{"last_name":"Bagelson","age":"{\"type\":\"number\",\"data\":27}","first_name":"Magen","id":"{\"type\":\"number\",\"data\":22222}"},"rvu5fUpFa410ie9W6efF":{"last_name":"Muffty","age":"{\"type\":\"number\",\"data\":55}","first_name":"Doug","id":"{\"type\":\"number\",\"data\":33333}"},"viCuQP0c0iRQAisxy5js":{"last_name":"Jones","age":"{\"type\":\"number\",\"data\":31}","first_name":"Bob","id":"{\"type\":\"number\",\"data\":44444}"}},"collection:test-obj-json":{"11111":{"last_name":"Phisher","age":"{\"type\":\"number\",\"data\":22}","first_name":"Wendy"},"22222":{"last_name":"Bagelson","age":"{\"type\":\"number\",\"data\":27}","first_name":"Magen"},"33333":{"age":"{\"type\":\"number\",\"data\":55}","first_name":"Doug","last_name":"Muffty"},"44444":{"last_name":"Jones","age":"{\"type\":\"number\",\"data\":31}","first_name":"Bob"}},"collection:test-obj-subcoll-json":{"11111":{"last_name":"Phisher","age":"{\"type\":\"number\",\"data\":22}","first_name":"Wendy","posts":[{"timestamp":"2018-03-30T14:01:03.652Z","text":"hello"},{"text":"goodbye","timestamp":"2018-03-30T14:01:03.652Z"}]},"22222":{"last_name":"Bagelson","age":"{\"type\":\"number\",\"data\":27}","first_name":"Magen","collection:friends":{"1kifmmEHtiuNjbrkPBG5":{"id":"11111"},"TAV2ZLfcF9jzLKucjH2N":{"id":"33333"}},"collection:posts":{"Ma4lB7YZH4AWfO4RFzWM":{"timestamp":"2018-03-30T14:01:03.652Z","text":"goodbye"},"WETWHci2Pj203tGUYEwL":{"text":"hello","timestamp":"2018-03-30T14:01:03.652Z","collection:replies":{"z8STAygJkesu1gKRS3FF":{"timestamp":"018-03-30T14:01:03.652Z","text":"hi"}}}}},"33333":{"last_name":"Muffty","age":"{\"type\":\"number\",\"data\":55}","first_name":"Doug","collection:posts":{"aaaa":{"text":"hello","timestamp":"2018-03-30T14:01:03.652Z"},"bbbb":{"timestamp":"2018-03-30T14:01:03.652Z","text":"goodbye"}}}},"collection:test4":{"11111":{"posts":[{"text":"hello","timestamp":"{\"type\":\"timestamp\",\"data\":\"2018-06-30T22:00:00.000Z\"}"},{"timestamp":"{\"type\":\"timestamp\",\"data\":\"2018-03-30T12:01:03.000Z\"}","text":"goodbye"}],"last_name":"Phisher","age":"{\"type\":\"number\",\"data\":22}","first_name":"Wendy"},"22222":{"last_name":"Bagelson","age":"{\"type\":\"number\",\"data\":27}","first_name":"Magen","collection:friends":{"46xcZVeiqnQCWgBVZUrJ":{"id":"33333"},"OOpXHouoR4MjyareCxC4":{"id":"11111"},"WvRtxN1cSOnYbHkWi7Pc":{"id":"11111"},"ilf0GLsxKRncW1PjiFjI":{"id":"33333"}},"collection:posts":{"1X8WqVY2NTw96Dzsazyi":{"timestamp":"3/30/18","text":"hello","collection:replies":{"qQELC2QgHbD31ohBSo46":{"timestamp":"018-03-30T14:01:03.652Z","text":"hi"}}},"Acc56NmfZqn4QczPllCE":{"text":"goodbye","timestamp":"3/30/18"},"C1buCBxe6MttJnKfKCIm":{"text":"goodbye","timestamp":"3/30/18"},"ITy7d47LrYE5FDQUwyyv":{"text":"hello","timestamp":"3/30/18","collection:replies":{"GXkDFRSeMIhcEuMZ9GTH":{"text":"hi","timestamp":"018-03-30T14:01:03.652Z"}}}}},"33333":{"last_name":"Muffty","age":"{\"type\":\"number\",\"data\":55}","first_name":"Doug","collection:posts":{"aaaa":{"text":"hello","timestamp":"3/30/18"},"bbbb":{"text":"goodbye","timestamp":"3/30/18"}}}},"collection:test5":{"t5d1":{"a":["String 1 in Array","{\"type\":\"number\",\"data\":12}",{"obj":{"str":"123"},"arr":["nested here",{"obj":{"msg":"Look down here","things":["{\"type\":\"geopoint\",\"data\":{\"_latitude\":45,\"_longitude\":23}}"]}}],"num":"{\"type\":\"number\",\"data\":13}"},"{\"type\":\"geopoint\",\"data\":{\"_latitude\":14,\"_longitude\":15}}"],"ref":"{\"type\":\"ref\",\"data\":\"test5/t5d2\"}","b":"{\"type\":\"bool\",\"data\":true}","s":"This is a string","n":"{\"type\":\"number\",\"data\":99.956}","o":{"ts":"{\"type\":\"timestamp\",\"data\":\"2018-08-01T21:59:59.000Z\"}","n":"{\"type\":\"number\",\"data\":567}","s2":"String in Object"},"gp":"{\"type\":\"geopoint\",\"data\":{\"_latitude\":20,\"_longitude\":21}}","meNull":"{\"type\":\"null\"}","ts":"{\"type\":\"timestamp\",\"data\":\"2018-08-16T09:12:13.000Z\"}","collection:subcolHere":{"subDoc1":{"gp":"{\"type\":\"geopoint\",\"data\":{\"_latitude\":90,\"_longitude\":180}}"}}}}} 2 | -------------------------------------------------------------------------------- /samples/test-obj-subcoll.json: -------------------------------------------------------------------------------- 1 | { 2 | "11111":{ 3 | "first_name":"Wendy", 4 | "last_name":"Phisher", 5 | "age":22, 6 | "posts":[ 7 | { 8 | "timestamp":"2018-03-30T14:01:03.652Z", 9 | "text":"hello" 10 | }, 11 | { 12 | "timestamp":"2018-03-30T14:01:03.652Z", 13 | "text":"goodbye" 14 | } 15 | ] 16 | }, 17 | "22222":{ 18 | "first_name":"Magen", 19 | "last_name":"Bagelson", 20 | "age":27, 21 | "collection:posts":[ 22 | { 23 | "timestamp":"2018-03-30T14:01:03.652Z", 24 | "text":"hello", 25 | "collection:replies": [ 26 | { 27 | "timestamp": "018-03-30T14:01:03.652Z", 28 | "text": "hi" 29 | } 30 | ] 31 | }, 32 | { 33 | "timestamp":"2018-03-30T14:01:03.652Z", 34 | "text":"goodbye" 35 | } 36 | ], 37 | "collection:friends":[ 38 | { "id": "11111" }, 39 | { "id": "33333" } 40 | ] 41 | }, 42 | "33333":{ 43 | "first_name":"Doug", 44 | "last_name":"Muffty", 45 | "age":55, 46 | "collection:posts":{ 47 | "aaaa":{ 48 | "timestamp":"2018-03-30T14:01:03.652Z", 49 | "text":"hello" 50 | }, 51 | "bbbb":{ 52 | "timestamp":"2018-03-30T14:01:03.652Z", 53 | "text":"goodbye" 54 | } 55 | } 56 | } 57 | } -------------------------------------------------------------------------------- /samples/test-obj.json: -------------------------------------------------------------------------------- 1 | { 2 | "11111": { 3 | "first_name": "Wendy", 4 | "last_name": "Phisher", 5 | "age": 22 6 | }, 7 | "22222": { 8 | "first_name": "Magen", 9 | "last_name": "Bagelson", 10 | "age": 27 11 | }, 12 | "33333": { 13 | "first_name": "Doug", 14 | "last_name": "Muffty", 15 | "age": 55 16 | }, 17 | "44444": { 18 | "first_name": "Bob", 19 | "last_name": "Jones", 20 | "age": 31 21 | } 22 | } -------------------------------------------------------------------------------- /samples/test.csv: -------------------------------------------------------------------------------- 1 | Name,Price,SKU,Color 2 | Lamp,23.34,2123m12mxi,Blue 3 | Rug,99.98,n232ej23m,Orange -------------------------------------------------------------------------------- /samples/test.indexed.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codediodeio/firestore-migrator/a24c851009150b7905231ca8308be913e5c7ef69/samples/test.indexed.xlsx -------------------------------------------------------------------------------- /samples/test.json: -------------------------------------------------------------------------------- 1 | [{ 2 | "id": 11111, 3 | "first_name": "Wendy", 4 | "last_name": "Phisher", 5 | "age": 22 6 | }, { 7 | "id": 22222, 8 | "first_name": "Magen", 9 | "last_name": "Bagelson", 10 | "age": 27 11 | }, { 12 | "id": 33333, 13 | "first_name": "Doug", 14 | "last_name": "Muffty", 15 | "age": 55 16 | }, { 17 | "id": 44444, 18 | "first_name": "Bob", 19 | "last_name": "Jones", 20 | "age": 31 21 | }] -------------------------------------------------------------------------------- /samples/test.simple-with-empty-fields.csv: -------------------------------------------------------------------------------- 1 | Name,Price,SKU,Color,Cat 2 | Lamp,23.34,2123m12mxi,Blue,Lights 3 | Rug,99.98,n232ej23m,Orange, 4 | Mug,5.20,lkj123lke,White,Mugs -------------------------------------------------------------------------------- /samples/test.simple-with-empty-fields.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codediodeio/firestore-migrator/a24c851009150b7905231ca8308be913e5c7ef69/samples/test.simple-with-empty-fields.xlsx -------------------------------------------------------------------------------- /samples/test.simple.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codediodeio/firestore-migrator/a24c851009150b7905231ca8308be913e5c7ef69/samples/test.simple.xlsx -------------------------------------------------------------------------------- /src/exportCollection/index.ts: -------------------------------------------------------------------------------- 1 | import * as admin from 'firebase-admin'; 2 | import * as fs from 'fs-extra'; 3 | import * as _ from 'lodash'; 4 | import * as XLSX from 'xlsx'; 5 | import * as shortid from 'shortid'; 6 | import * as dot from 'dot-object'; 7 | 8 | import { sortByKeysFn, decodeDoc } from '../shared'; 9 | 10 | const db = admin.firestore(); 11 | let args; 12 | 13 | export const execute = async (file: string, collectionPaths: string[], options) => { 14 | args = options; 15 | let json = {}; 16 | 17 | // If no collection arguments, select all root collections 18 | if (collectionPaths.length === 0) { 19 | console.log('Selecting root collections...'); 20 | collectionPaths = await db.getCollections().then(colls => colls.map(coll => coll.path)); 21 | } 22 | 23 | console.log('Getting selected collections...'); 24 | getCollections(collectionPaths) 25 | .then(collections => { 26 | 27 | if (file.endsWith('.xlsx')) { 28 | console.log('Writing to Excel:', file); 29 | 30 | const book = json2book(collections); 31 | XLSX.writeFile(book, file); 32 | 33 | } else if (file.endsWith('.csv')) { 34 | console.log('Writing to CSV:', file); 35 | 36 | const book = json2book(collections); 37 | bookWriteCSV(book, file); 38 | 39 | } else { 40 | console.log('Writing to JSON:', file); 41 | 42 | return fs.writeJson(file, collections); 43 | } 44 | }) 45 | .then(() => { 46 | console.log('Download was a success!'); 47 | }) 48 | .catch(err => { 49 | console.log('Failure: ', err); 50 | }); 51 | 52 | } 53 | 54 | function getCollections(paths): Promise { 55 | return new Promise(async (resolve, reject) =>{ 56 | let collections = {}; 57 | 58 | try { 59 | // A heavily nested sub-collection-tree will cause a parallel promise explosion, 60 | // so we rather request them sequentially. Might be worth allowing parallel 61 | // recursion upon user request, for smaller trees and faster execution. 62 | for (const path of paths) { 63 | const collection = await getCollection(path); 64 | _.assign(collections, collection); 65 | } 66 | 67 | resolve(collections); 68 | } catch (err) { 69 | reject(err); 70 | } 71 | }); 72 | } 73 | 74 | function getCollection(path): Promise { 75 | let collection = {}; 76 | 77 | return db.collection(path).get().then( async snaps => { 78 | // try { 79 | 80 | if (snaps.size === 0) { 81 | throw `No ducuments in collection: ${path}`; 82 | }; 83 | 84 | for (let snap of snaps.docs) { 85 | let doc = { [snap.id]: snap.data() }; 86 | 87 | // log if requested 88 | args.verbose && console.log(snap.ref.path); 89 | 90 | // Decode Doc 91 | decodeDoc(doc[snap.id]); 92 | 93 | // process sub-collections 94 | if (args.subcolls) { 95 | const subCollPaths = await snap.ref.getCollections().then(colls => colls.map(coll => coll.path)); 96 | if (subCollPaths.length) { 97 | const subCollections = await getCollections(subCollPaths); 98 | _.assign(doc[snap.id], subCollections); 99 | } 100 | } 101 | 102 | // doc to collection 103 | _.assign(collection, doc); 104 | } 105 | // } catch (error) { 106 | // console.log(error); 107 | // } 108 | }).then(() =>{ 109 | const collId = path.split('/').pop(); 110 | const collPath = `${args.collPrefix}:${collId}`; 111 | return ({[collPath]: collection }); 112 | }); 113 | } 114 | 115 | function bookWriteCSV(book: XLSX.WorkBook, file: string) { 116 | const fileParts = file.split('.'); 117 | const indexSheet = book.Sheets['INDEX']; 118 | const indexJson = XLSX.utils.sheet_to_json(indexSheet); 119 | 120 | // If only one collection, write single file 121 | const single = book.SheetNames.length === 2; 122 | if (single) { 123 | const sheet = book.Sheets[indexJson[0]['Sheet Name']]; 124 | XLSX.writeFile(book, file, { bookType: 'csv' }); 125 | return; 126 | } 127 | // Otherwise write an index file and csv per collection 128 | 129 | // write index file 130 | const filename = [...fileParts]; 131 | filename.splice(-1, 0, 'INDEX'); 132 | XLSX.writeFile(book, filename.join('.'), { bookType: 'csv', sheet: 'INDEX' }); 133 | 134 | // write collection files 135 | indexJson.forEach(index => { 136 | const sheetName = index['Sheet Name']; 137 | const sheet = book.Sheets[sheetName]; 138 | const filename = [...fileParts]; 139 | filename.splice(-1, 0, sheetName ); 140 | XLSX.writeFile(book, filename.join('.'), { bookType: 'csv', sheet: sheetName }); 141 | }); 142 | } 143 | 144 | function json2book(json): XLSX.WorkBook { 145 | let book = XLSX.utils.book_new(); 146 | const collPrefixSliceLength = (args.collPrefix).length + 1; 147 | const collectionIndex = []; 148 | 149 | book.Props = { 150 | ...book.Props, 151 | Title: 'FireStore Export', 152 | Author: 'firestore-migrator', 153 | CreatedDate: new Date() 154 | } 155 | 156 | const addCollection = (coll, path:string) => { 157 | const sheetName = shortid.generate(); 158 | const docs = []; 159 | 160 | // Turn key'd document objects into an array of flat documents objects, each with a document id 161 | _.forEach(coll, (doc, id:string) => { 162 | // process any sub-collections 163 | const subCollFields = Object.keys(doc).filter(key => key.startsWith(args.collPrefix+':')); 164 | subCollFields.forEach(name => { 165 | addCollection(doc[name], [path, id, name.slice(collPrefixSliceLength)].join('/')); 166 | delete(doc[name]); 167 | }); 168 | 169 | // flatten objects 170 | const flatDoc = dot.dot(doc); 171 | 172 | docs.push({ [args.idField]: id, ...flatDoc }); 173 | }); 174 | 175 | // add collection sheet to book 176 | const sheet = XLSX.utils.json_to_sheet(docs); 177 | XLSX.utils.book_append_sheet(book, sheet, sheetName); 178 | 179 | // add an index entry 180 | collectionIndex.push({ 181 | sheetName, 182 | path, 183 | depth: path.split('/').length, 184 | count: docs.length 185 | }); 186 | 187 | }; 188 | 189 | // process collections 190 | _.forEach(json, (coll, key) => { 191 | addCollection(coll, key.slice(collPrefixSliceLength)); 192 | }); 193 | 194 | // index sheet 195 | const indexSheet = XLSX.utils.aoa_to_sheet([ 196 | ['Sheet Name', 'Collection', 'Depth', 'Documents', 'Link'] 197 | ]); 198 | collectionIndex.sort(sortByKeysFn(['depth', 'path'])); 199 | collectionIndex.forEach((coll, index) => { 200 | const n = index + 2; 201 | indexSheet['!ref'] = `A1:E${n}`; 202 | indexSheet[`A${n}`] = { t: 's', v: coll.sheetName }; 203 | indexSheet[`B${n}`] = { t: 's', v: coll.path }; 204 | indexSheet[`C${n}`] = { t: 'n', v: +coll.depth }; 205 | indexSheet[`D${n}`] = { t: 'n', v: +coll.count }; 206 | indexSheet[`E${n}`] = { t: 's', v: 'link', l: { Target: `#${coll.sheetName}!A1` }}; 207 | }); 208 | XLSX.utils.book_append_sheet(book, indexSheet, 'INDEX'); 209 | 210 | 211 | return book; 212 | } 213 | 214 | -------------------------------------------------------------------------------- /src/importCollection/index.ts: -------------------------------------------------------------------------------- 1 | import * as admin from 'firebase-admin'; 2 | import * as fs from 'fs-extra'; 3 | import * as _ from 'lodash'; 4 | import * as XLSX from 'xlsx'; 5 | import * as dot from 'dot-object'; 6 | 7 | import { encodeDoc, cleanCollectionPath, isCollectionPath, isDocumentPath } from '../shared'; 8 | 9 | 10 | const db = admin.firestore(); 11 | let batch = db.batch(); 12 | let batchCount = 0; 13 | let totalSetCount = 0; 14 | let totalDelCount = 0; 15 | let args; 16 | let delPaths = []; 17 | 18 | export const execute = async (file: string, collections: string[], options) => { 19 | args = options; 20 | if( args.dryRun ) args.verbose = true; 21 | 22 | try { 23 | 24 | if( collections.length === 0 ) { 25 | // root if no paths 26 | collections = ['/']; 27 | } else { 28 | // clean all collection paths 29 | collections.map(cleanCollectionPath); 30 | // root overrides all other selections 31 | if (collections.includes('/')) { 32 | collections = ['/']; 33 | } 34 | } 35 | 36 | let data = {}; 37 | 38 | if (file.endsWith(".json")) { 39 | data = await readJSON(file, collections); 40 | } 41 | 42 | else if (file.endsWith('.xlsx')) { 43 | data = await readXLSXBook(file, collections); 44 | } 45 | 46 | else if (file.endsWith(".csv")) { 47 | data = await readCSV(file, collections); 48 | } 49 | 50 | else { 51 | throw "Unknown file extension. Supports .json, .csv or .xlsx!"; 52 | } 53 | 54 | await writeCollections(data); 55 | 56 | // Final Batch commit and completion message. 57 | await batchCommit(false); 58 | console.log(args.dryRun 59 | ? 'Dry-Run complete, Firestore was not updated.' 60 | : 'Import success, Firestore updated!' 61 | ); 62 | args.truncate && console.log(`Total documents deleted: ${totalDelCount}`); 63 | console.log(`Total documents written: ${totalSetCount}`); 64 | 65 | } catch (error) { 66 | console.log("Import failed: ", error); 67 | } 68 | 69 | 70 | } 71 | 72 | 73 | // Firestore Write/Batch Handlers 74 | async function batchDel(ref: FirebaseFirestore.DocumentReference) { 75 | // Log if requested 76 | args.verbose && console.log(`Deleting: ${ref.path}`); 77 | 78 | // Mark for batch delete 79 | ++totalDelCount; 80 | await batch.delete(ref); 81 | 82 | // Commit batch on chunk size 83 | if (++batchCount % args.chunk === 0) { 84 | await batchCommit() 85 | } 86 | 87 | } 88 | 89 | async function batchSet(ref: FirebaseFirestore.DocumentReference, item, options) { 90 | // Log if requested 91 | args.verbose && console.log(`Writing: ${ref.path}`); 92 | 93 | // Set the Document Data 94 | ++totalSetCount; 95 | await batch.set(ref, item, options); 96 | 97 | // Commit batch on chunk size 98 | if (++batchCount % args.chunk === 0) { 99 | await batchCommit() 100 | } 101 | } 102 | 103 | async function batchCommit(recycle:boolean = true) { 104 | // Nothing to commit 105 | if (!batchCount) return; 106 | // Don't commit on Dry Run 107 | if (args.dryRun) return; 108 | 109 | // Log if requested 110 | args.verbose && console.log('Committing write batch...') 111 | 112 | // Commit batch 113 | await batch.commit(); 114 | 115 | // Get a new batch 116 | if (recycle) { 117 | batch = db.batch(); 118 | batchCount = 0; 119 | } 120 | } 121 | 122 | function writeCollections(data): Promise { 123 | const promises = []; 124 | _.forEach(data, (docs, coll) => { 125 | promises.push( 126 | writeCollection(docs, coll) 127 | ); 128 | }); 129 | return Promise.all(promises); 130 | } 131 | 132 | function writeCollection(data:JSON, path: string): Promise { 133 | return new Promise(async (resolve, reject) => { 134 | const colRef = db.collection(path); 135 | 136 | if (args.truncate) { 137 | await truncateCollection(colRef); 138 | } 139 | 140 | const mode = (data instanceof Array) ? 'array' : 'object'; 141 | for ( let [id, item] of Object.entries(data)) { 142 | 143 | // doc-id preference: object key, invoked --id field, auto-id 144 | if (mode === 'array') { 145 | id = args.autoId; 146 | } 147 | if (_.hasIn(item, args.id)) { 148 | id = item[args.id].toString(); 149 | delete(item[args.id]); 150 | } 151 | if (!id || (id.toLowerCase() === args.autoId.toLowerCase()) ) { 152 | id = colRef.doc().id; 153 | } 154 | 155 | // Look for and process sub-collections 156 | const subColKeys = Object.keys(item).filter(k => k.startsWith(args.collPrefix+':')); 157 | for ( let key of subColKeys ) { 158 | const subPath = [path, id, key.slice(args.collPrefix.length + 1) ].join('/'); 159 | await writeCollection(item[key], subPath); 160 | delete item[key]; 161 | } 162 | 163 | // Encode item to Firestore 164 | encodeDoc(item); 165 | 166 | // set document data into path/id 167 | const docRef = colRef.doc(id); 168 | await batchSet(docRef, item, { merge: !!(args.merge) }); 169 | 170 | } 171 | 172 | resolve(); 173 | }); 174 | } 175 | 176 | async function truncateCollection(colRef: FirebaseFirestore.CollectionReference) { 177 | // TODO: Consider firebase-tools:delete 178 | 179 | const path = colRef.path; 180 | if (delPaths.includes(path)) { 181 | // Collection Path already processed 182 | return; 183 | } 184 | delPaths.push(path); 185 | 186 | await colRef.get().then(async (snap) => { 187 | for (let doc of snap.docs) { 188 | // recurse sub-collections 189 | const subCollPaths = await doc.ref.getCollections(); 190 | for (let subColRef of subCollPaths) { 191 | await truncateCollection(subColRef); 192 | } 193 | // mark doc for deletion 194 | await batchDel(doc.ref); 195 | } 196 | }); 197 | } 198 | 199 | // File Handling Helpers 200 | function dataFromJSON(json) { 201 | _.forEach(json, row => { 202 | dot.object(row); 203 | }); 204 | return json; 205 | } 206 | 207 | function dataFromSheet(sheet) { 208 | const json = XLSX.utils.sheet_to_json(sheet); 209 | return dataFromJSON(json); 210 | } 211 | 212 | function JSONfromCSV(file:string) { 213 | const book = XLSX.readFile(file); 214 | const sheet = book.Sheets['Sheet1']; 215 | return XLSX.utils.sheet_to_json(sheet); 216 | } 217 | 218 | function datafromCSV(file:string) { 219 | const json = JSONfromCSV(file); 220 | return dataFromJSON(json); 221 | } 222 | 223 | 224 | 225 | // File Handlers 226 | 227 | function readJSON(path: string, collections: string[]): Promise { 228 | return new Promise(async (resolve, reject) => { 229 | const json = await fs.readJSON(path); 230 | const data = {}; 231 | 232 | const mode = (json instanceof Array) ? 'array' : 'object'; 233 | 234 | // Array of Docs, Single Anonymous Collection; 235 | if (mode === 'array') { 236 | const coll = collections[0]; 237 | if (coll === '/' || collections.length > 1 || isDocumentPath(coll)) { 238 | reject('Specify single target collection path for import of JSON array of documents.'); 239 | return; 240 | } 241 | data[coll] = json; 242 | resolve(data); 243 | return; 244 | } 245 | 246 | const rootJsonCollections = Object.keys(json).filter(k => k.startsWith(args.collPrefix + ':')); 247 | 248 | // Docs of Keyed Objects, Single Anonymous Collection; 249 | if (rootJsonCollections.length === 0) { 250 | const coll = collections[0]; 251 | if (coll === '/' || collections.length > 1 || isDocumentPath(coll)) { 252 | reject('Specify single target collection path for import of JSON keyed object documents.'); 253 | return; 254 | } 255 | 256 | data[collections[0]] = json; 257 | resolve(data); 258 | return; 259 | } 260 | 261 | // Selected Collections; 262 | if (collections[0] !== '/') { 263 | collections.forEach(collection => { 264 | if (isDocumentPath(collection)) { 265 | console.log('ISDOC'); 266 | reject(`Invalid collection path: ${collection}`); 267 | return; 268 | }; 269 | 270 | const labelledPath = collection.split('/').map((segment, index) => { 271 | return (index % 2 === 0) ? args.collPrefix + ':' + segment : segment; 272 | }).join('.'); 273 | 274 | const coll = dot.pick(labelledPath, json); 275 | if (!coll) { 276 | reject(`Source JSON file contains no collection named: ${collection}`); 277 | return; 278 | } 279 | 280 | data[collection] = coll; 281 | }); 282 | resolve(data); 283 | return; 284 | } 285 | 286 | // All Collections from JSON file 287 | if (collections[0] === '/') { 288 | rootJsonCollections.forEach(coll => { 289 | const path = coll.substr(args.collPrefix.length + 1); 290 | data[path] = json[coll]; 291 | }) 292 | resolve(data); 293 | return; 294 | } 295 | 296 | // Import options exhausted 297 | reject(`Invalid import options`); 298 | }); 299 | } 300 | 301 | 302 | function readCSV(file: string, collections: string[]): Promise { 303 | return new Promise((resolve, reject) => { 304 | let lineCount = 0; 305 | let data = {}; 306 | 307 | // Single Mode CSV, single collection 308 | if (!file.endsWith('INDEX.csv')) { 309 | args.verbose && console.log(`Mode: Single CSV Collection`); 310 | 311 | if (collections.length > 1) { 312 | reject('Multiple collection import from CSV requires an *.INDEX.csv file.'); 313 | return; 314 | } 315 | const collection = collections[0]; 316 | if (collection === '/') { 317 | reject('Specify a collection for single mode CSV import.'); 318 | return; 319 | } 320 | data[collection] = datafromCSV(file); 321 | resolve(data); 322 | return; 323 | } 324 | 325 | const index = JSONfromCSV(file); 326 | 327 | // Indexed Mode CSV, selected collections and sub-cols 328 | if (collections[0] !== '/') { 329 | args.verbose && console.log(`Mode: Selected collections from Indexed CSV`); 330 | collections.forEach(collection => { 331 | const colls = index.filter(coll => (coll['Collection'] + '/').startsWith(collection + '/')); 332 | if (colls.length) { 333 | colls.forEach(coll => { 334 | const colPath = coll['Collection']; 335 | const sheetName = coll['Sheet Name']; 336 | const fileParts = file.split('.'); 337 | fileParts.splice(-2,1,sheetName); 338 | const fileName = fileParts.join('.'); 339 | data[colPath] = datafromCSV(fileName); 340 | }); 341 | } else { 342 | reject(`INDEX contains no paths matching: ${collection}`); 343 | return; 344 | } 345 | }); 346 | resolve(data); 347 | return; 348 | } 349 | 350 | // Indexed Mode CSV, all collections 351 | if (collections[0] === '/') { 352 | args.verbose && console.log(`Mode: All collections from Indexed CSV`); 353 | const collection = collections[0]; 354 | _.forEach(index, coll => { 355 | const colPath = coll['Collection']; 356 | const sheetName = coll['Sheet Name']; 357 | const fileParts = file.split('.'); 358 | fileParts.splice(-2,1,sheetName); 359 | const fileName = fileParts.join('.'); 360 | data[colPath] = datafromCSV(fileName); 361 | }); 362 | resolve(data); 363 | return; 364 | } 365 | 366 | // Import options exhausted 367 | reject(`Invalid collections or CSV`); 368 | 369 | }); 370 | } 371 | 372 | 373 | function readXLSXBook(path, collections: string[]): Promise { 374 | 375 | return new Promise((resolve, reject) => { 376 | const book = XLSX.readFile(path); 377 | const sheetCount = book.SheetNames.length; 378 | const indexSheet = book.Sheets['INDEX']; 379 | let data = {}; 380 | 381 | let sheetNum = args.sheet; 382 | if ((sheetCount === 1) && (sheetNum == undefined)) { 383 | sheetNum = 1; 384 | } 385 | 386 | // Single Sheet as Collection, typically from Non-Indexed Workbook 387 | if (sheetNum !== undefined) { 388 | args.verbose && console.log(`Mode: Single XLSX Sheet #${sheetNum}`); 389 | const collection = collections[0]; 390 | if(isDocumentPath(collection)) { 391 | reject(`Invalid collection path for single collection: ${collection}`); 392 | return; 393 | } 394 | const sheetName = book.SheetNames[+sheetNum - 1]; 395 | const sheet = book.Sheets[sheetName]; 396 | if (!sheet) { 397 | reject(`Sheet #${sheetNum} not found in workbook`); 398 | return; 399 | } 400 | data[collection] = dataFromSheet(sheet); 401 | resolve(data); 402 | return; 403 | } 404 | 405 | const index = XLSX.utils.sheet_to_json(indexSheet); 406 | 407 | // Selected Collections and Sub Colls from Indexed Workbook 408 | if (collections[0] !== '/') { 409 | args.verbose && console.log('Mode: Selected Sheets from indexed XLSX Workbook'); 410 | collections.forEach(collection => { 411 | const colls = index.filter(coll => (coll['Collection'] + '/').startsWith(collection + '/')); 412 | if (colls.length) { 413 | colls.forEach(coll => { 414 | const colPath = coll['Collection']; 415 | const sheetName = coll['Sheet Name']; 416 | const sheet = book.Sheets[sheetName]; 417 | data[colPath] = dataFromSheet(sheet); 418 | }); 419 | } else { 420 | reject(`INDEX contains no paths matching: ${collection}`); 421 | return; 422 | } 423 | }); 424 | resolve(data); 425 | return; 426 | } 427 | 428 | // All Collections from Indexed Workbook 429 | if (collections[0] === '/') { 430 | args.verbose && console.log('Mode: All Sheets from indexed XLSX Workbook'); 431 | const collection = collections[0]; 432 | _.forEach(index, coll => { 433 | const sheetName = coll['Sheet Name']; 434 | const path = cleanCollectionPath([collection, coll['Collection']]); 435 | const sheet = book.Sheets[sheetName]; 436 | data[path] = dataFromSheet(sheet); 437 | }); 438 | resolve(data); 439 | return; 440 | } 441 | 442 | // Import options exhausted 443 | reject(`Invalid collections`); 444 | }); 445 | } 446 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import * as admin from 'firebase-admin'; 4 | import * as args from 'commander'; 5 | import * as _ from 'lodash'; 6 | 7 | // Firebase App Initialization 8 | var serviceAccount = require("../credentials.json"); 9 | admin.initializeApp({ 10 | credential: admin.credential.cert(serviceAccount) 11 | }); 12 | 13 | import * as importCollection from './importCollection'; 14 | import * as exportCollection from './exportCollection'; 15 | 16 | // Help Descriptions 17 | const rootDescription = [ 18 | 'Import/Export JSON data to/from a Firestore Database' 19 | ].join('\n').replace(/^/gm, ' '); 20 | 21 | const rootHelp = [ 22 | '', 23 | 'For command specific help try:', 24 | ' fire-migrate import -h', 25 | ' fire-migrate export -h', 26 | '' 27 | ].join('\n').replace(/^/gm, ' '); 28 | 29 | const importDescription = [ 30 | 'Import JSON data to a Firestore collection', 31 | ' Optionally converts Excel or CSV to JSON before import.' 32 | ].join('\n').replace(/^/gm, ' ');; 33 | 34 | const importHelp = [ 35 | '','Examples:','', 36 | ' fire-migrate import --dry-run test.json myCollection', 37 | ' fire-migrate import --merge test.INDEX.csv myCollection', 38 | ' fire-migrate i -m --id docid test.xlsx myCollection', 39 | '' 40 | ].join('\n').replace(/^/gm, ' '); 41 | 42 | const exportDescription = 43 | 'Export Firestore collection(s) to a JSON file'; 44 | 45 | const exportHelp = [ 46 | '','Examples:','', 47 | ' fire-migrate export --verbose --no-subcolls myCollectionRootLevel.json myCollection', 48 | ' fire-migrate export users-posts.json users posts', 49 | ' fire-migrate e -v firestore-dump.xlsx', 50 | '' 51 | ].join('\n').replace(/^/gm, ' '); 52 | 53 | 54 | // Some option helper functions 55 | 56 | function parseChunk(v:number) { 57 | return _.clamp(v, 1, 500); 58 | } 59 | 60 | 61 | // Base options 62 | args.version('0.3.0') 63 | .description(rootDescription) 64 | .on('--help', () => { 65 | console.log(rootHelp); 66 | }); 67 | 68 | 69 | // Import options 70 | args.command('import') 71 | .alias('i') 72 | .description(importDescription) 73 | .arguments(' [collections...]') 74 | .option('-i, --id [field]', 'Field to use for Document IDs.', 'doc_id') 75 | .option('-a, --auto-id [str]', 'Document ID token specifying auto generated Document ID.', 'Auto-ID') 76 | .option('-m, --merge', 'Merge Firestore documents. Default is Replace.') 77 | .option('-k, --chunk [size]', 'Split upload into batches. Max 500 by Firestore constraints.', parseChunk, 500 ) 78 | .option('-p, --coll-prefix [prefix]', '(Sub-)Collection prefix.', 'collection') 79 | .option('') 80 | .option('-s, --sheet [#]', 'Single mode XLSX Sheet # to import.') 81 | .option('') 82 | .option('-T, --truncate', 'Delete all documents from target collections before import.') 83 | .option('') 84 | .option('-d, --dry-run', 'Perform a dry run, without committing data. Implies --verbose.') 85 | .option('-v, --verbose', 'Output document insert paths') 86 | .action((file, collections, options) => { 87 | importCollection.execute(file, collections, options); 88 | }).on('--help', () => { 89 | console.log(importHelp); 90 | }); 91 | 92 | 93 | // Export options 94 | args.command('export [collections...]') 95 | .alias('e') 96 | .description('Export Firestore collection(s) to a JSON/XLSX/CSV file') 97 | .option('-n, --no-subcolls', 'Do not export sub-collections.') 98 | .option('-p, --coll-prefix [prefix]', 'Collection prefix', 'collection') 99 | .option('-i, --id-field [id]', 'Field name to use for document IDs', 'doc_id') 100 | // .option('') 101 | // .option('-x, --separator [/]', 'Collection/Document path separator', '/' ) 102 | .option('') 103 | .option('-v, --verbose', 'Output traversed document paths') 104 | .action((file, collections, options) => { 105 | exportCollection.execute(file, collections, options); 106 | }).on('--help', () => { 107 | console.log(exportHelp) 108 | }); 109 | 110 | 111 | args.parse(process.argv); 112 | -------------------------------------------------------------------------------- /src/shared.ts: -------------------------------------------------------------------------------- 1 | import * as admin from 'firebase-admin'; 2 | import * as _ from 'lodash'; 3 | import { DocumentReference } from '@google-cloud/firestore'; 4 | import { isNull } from 'util'; 5 | 6 | const db = admin.firestore(); 7 | 8 | 9 | 10 | const toType = function(obj) { 11 | return ({}).toString.call(obj).match(/\s([a-zA-Z]+)/)[1].toLowerCase(); 12 | } 13 | 14 | const arrayOrObject = function(test) { 15 | return ((test instanceof Array) || (toType(test) === 'object')); 16 | } 17 | 18 | 19 | 20 | export const cleanCollectionPath = (path:string | string[]): string => { 21 | if (typeof path === 'string') { 22 | const p = ('/' + path + '/').replace(/\/{2,}/,'/').split('/').filter(v=>!!v).join('/'); 23 | return p || '/'; 24 | } else { 25 | return cleanCollectionPath(path.join('/')); 26 | } 27 | } 28 | 29 | export const isCollectionPath = (path: string | string[]): boolean => { 30 | const sections = cleanCollectionPath(path).split('/').length; 31 | return sections % 2 === 1; 32 | } 33 | 34 | export const isDocumentPath = (path: string | string[]): boolean => { 35 | const sections = cleanCollectionPath(path).split('/').length; 36 | return sections % 2 === 0; 37 | } 38 | 39 | 40 | // Field Handlers, en/decoders, etc 41 | 42 | class FieldHandler { 43 | 44 | constructor(protected prefix: string = 'prefix') {} 45 | 46 | 47 | public isDecodeType = (key: string, val, doc): boolean => { 48 | return false; 49 | }; 50 | protected decodeFn = (key: string, val, doc) => { 51 | return val; 52 | }; 53 | public decode = (key: string, val, doc) => { 54 | return JSON.stringify({ type: this.prefix, data: this.decodeFn(key, val, doc) }); 55 | }; 56 | 57 | 58 | public isEncodeType = (key: string, val, doc): boolean => { 59 | if (typeof val !== 'string') return false; 60 | return val.startsWith(`{"type":"${this.prefix}"`); 61 | }; 62 | protected encodeFn = (key: string, val, doc) => { 63 | const {type, data} = val; 64 | return data; 65 | 66 | }; 67 | public encode = (key: string, val, doc) => { 68 | return this.encodeFn(key, JSON.parse(val), doc); 69 | } 70 | } 71 | 72 | class GeoPointFH extends FieldHandler { 73 | isDecodeType = (key: string, val, doc) => { 74 | return (val instanceof admin.firestore.GeoPoint); 75 | }; 76 | encodeFn = (key: string, val, doc) => { 77 | const {data} = val; 78 | return new admin.firestore.GeoPoint(data._latitude, data._longitude); 79 | } 80 | } 81 | 82 | class BooleanFH extends FieldHandler { 83 | isDecodeType = (key, val, doc) => { 84 | return (typeof val === 'boolean'); 85 | }; 86 | encodeFn = (key: string, val, doc) => { 87 | const {data} = val; 88 | return data; 89 | }; 90 | } 91 | 92 | class TimeStampFH extends FieldHandler { 93 | isDecodeType = (key, val, doc) => { 94 | return (val instanceof Date); 95 | }; 96 | encodeFn = (key: string, val, doc) => { 97 | const {data} = val; 98 | return new Date(data); 99 | }; 100 | } 101 | 102 | class NumberFH extends FieldHandler { 103 | isDecodeType = (key, val, doc) => { 104 | return (typeof val === 'number'); 105 | }; 106 | 107 | public isEncodeType = (key: string, val, doc): boolean => { 108 | // simple numbers, or number-like strings 109 | if (+val === +val) return true; 110 | if (typeof val !== 'string') return false; 111 | return val.startsWith(`{"type":"${this.prefix}"`); 112 | }; 113 | public encode = (key: string, val, doc) => { 114 | if (+val === +val) { 115 | return +val; 116 | } 117 | return this.encodeFn(key, JSON.parse(val), doc); 118 | } 119 | } 120 | 121 | class ReferenceFH extends FieldHandler { 122 | isDecodeType = (key: string, val, doc) => { 123 | return (val instanceof admin.firestore.DocumentReference); 124 | }; 125 | decodeFn = (key: string, val:DocumentReference, doc) => { 126 | return val.path; 127 | }; 128 | encodeFn = (key: string, val, doc) => { 129 | const {data} = val; 130 | return db.doc(data); 131 | } 132 | } 133 | 134 | class NullFH extends FieldHandler { 135 | isDecodeType = (key: string, val, doc) => { 136 | return isNull(val); 137 | }; 138 | decode = (key: string, val, doc) => { 139 | return JSON.stringify({ type: this.prefix }); 140 | }; 141 | encode = (key: string, val, doc) => { 142 | return null; 143 | } 144 | } 145 | 146 | 147 | class ArrayOrObjectFH extends FieldHandler { 148 | isDecodeType = (key: string, val, doc) => { 149 | return arrayOrObject(val); 150 | }; 151 | decode = (key: string, val, doc) => { 152 | decodeDoc(val); 153 | return val; 154 | }; 155 | isEncodeType = (key: string, val, doc): boolean => { 156 | return arrayOrObject(val); 157 | }; 158 | encode = (key: string, val, doc) => { 159 | encodeDoc(val); 160 | return val; 161 | }; 162 | } 163 | 164 | class TestFH extends FieldHandler { 165 | isDecodeType = (key, val, doc) => { 166 | if (['_a'].includes(key)) { 167 | // if (1) { 168 | console.log(`Test isDecode on ${key} = ${toType(val)}`); 169 | console.log('typeof', typeof val); 170 | console.log('instanceof', val instanceof Object); 171 | console.log('isNull', isNull(val)); 172 | console.log('val', val); 173 | } 174 | return false; 175 | }; 176 | isEncodeType = (key, val, doc) => { 177 | if (key==='o') { 178 | console.log(`Test isEncode on ${key} = ${toType(val)}`); 179 | console.log('typeof', typeof val); 180 | console.log('instanceof', val instanceof Object); 181 | console.log('val', val); 182 | } 183 | return false; 184 | } 185 | } 186 | 187 | // decodeDoc() and encodeDoc() traverses specialFieldTypes[] in order of appearance for every field (and nested fields) 188 | // of every document of every collection, and uses only the first matched handler per field. 189 | // So list FieldHandlers by descending order of your typical field type use. But always keep ArrayOrObjectFH last, since matching 190 | // objects is tricky and will result in a false positives if moved up the chain. 191 | const specialFieldTypes: FieldHandler[] = [ 192 | // new TestFH('test'), 193 | new GeoPointFH('geopoint'), 194 | new BooleanFH('bool'), 195 | new TimeStampFH('timestamp'), 196 | new NumberFH('number'), 197 | new ReferenceFH('ref'), 198 | new NullFH('null'), 199 | new ArrayOrObjectFH() 200 | ]; 201 | 202 | // Decode from Firestore field 203 | export function decodeDoc(doc) { 204 | _.forEach(doc, (fieldValue, fieldName) => { 205 | const fieldHandler = specialFieldTypes.find(fieldHandler => fieldHandler.isDecodeType(fieldName, fieldValue, doc)); 206 | if (!fieldHandler) return; 207 | doc[fieldName] = fieldHandler.decode(fieldName, fieldValue, doc); 208 | }); 209 | } 210 | 211 | // Encode to Firestore field 212 | export function encodeDoc(doc) { 213 | _.forEach(doc, (fieldValue, fieldName) => { 214 | const fieldHandler = specialFieldTypes.find(fieldHandler => fieldHandler.isEncodeType(fieldName, fieldValue, doc)); 215 | if (!fieldHandler) return; 216 | doc[fieldName] = fieldHandler.encode(fieldName, fieldValue, doc); 217 | }); 218 | } 219 | 220 | 221 | 222 | 223 | 224 | // Sorting utils 225 | 226 | export interface SortTuple { 227 | [key: string]: 'ASC' | 'DSC'; 228 | } 229 | 230 | export function sortByKeysFn(keys: string | (string | SortTuple)[] ): (a:any, b:any) => number { 231 | const sortTuples: SortTuple[] = []; 232 | const dir = 'ASC'; 233 | 234 | if (typeof keys === 'string') { 235 | sortTuples.push({ [keys]: dir }); 236 | } else { 237 | _.forEach(keys, key => sortTuples.push( typeof key === 'string' ? {[key]: dir} : key )); 238 | } 239 | 240 | return (a, b) => { 241 | let sort = 0; 242 | for(let tuple of sortTuples) { 243 | if (sort !== 0) break; 244 | for(let key in tuple) { 245 | if (tuple[key] === 'ASC') { 246 | sort= a[key] > b[key] ? 1 247 | : a[key] < b[key] ? -1 248 | : 0; 249 | } else { 250 | sort= a[key] > b[key] ? -1 251 | : a[key] < b[key] ? 1 252 | : 0; 253 | } 254 | } 255 | } 256 | return sort; 257 | }; 258 | } -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "outDir": "./dist/", 4 | "noImplicitAny": false, 5 | "module": "commonjs", 6 | "target": "es5", 7 | "allowJs": true, 8 | "sourceMap": true, 9 | "moduleResolution": "node", 10 | "lib": [ 11 | "es2017" 12 | ], 13 | "types": [ 14 | "node" 15 | ] 16 | }, 17 | "include": [ 18 | "src/**/*" 19 | ] 20 | } --------------------------------------------------------------------------------