├── .gitignore ├── .eslintignore ├── .DS_Store ├── preview.png ├── .eslintrc.json ├── tests └── createXlsx.js ├── package.json ├── template-html-diagram.html ├── README.md ├── extract-mongo-schema.js └── cli.js /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | *.xlsx 3 | 4 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | node_modules/* 2 | **/node_modules/* 3 | -------------------------------------------------------------------------------- /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/perak/extract-mongo-schema/HEAD/.DS_Store -------------------------------------------------------------------------------- /preview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/perak/extract-mongo-schema/HEAD/preview.png -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "airbnb-base", 3 | "env": { 4 | "es6": true, 5 | "node": true, 6 | "mocha": true 7 | } 8 | } -------------------------------------------------------------------------------- /tests/createXlsx.js: -------------------------------------------------------------------------------- 1 | const xlsx = require("xlsx"); 2 | 3 | var wb = xlsx.utils.book_new(); 4 | var wsName = "SheetJS"; 5 | var wsData = [["a", "b", "c"], ["1", "2", "3"]]; 6 | var ws = xlsx.utils.aoa_to_sheet(wsData); 7 | xlsx.utils.book_append_sheet(wb, ws, wsName); 8 | xlsx.writeFile(wb, "out.xlsx"); -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "extract-mongo-schema", 3 | "version": "0.2.12", 4 | "description": "Extract (and visualize) schema from Mongo database (including foreign keys)", 5 | "main": "extract-mongo-schema.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "repository": { 10 | "type": "git", 11 | "url": "https://github.com/perak/extract-mongo-schema.git" 12 | }, 13 | "keywords": [ 14 | "mongodb", 15 | "mongo", 16 | "schema", 17 | "data model", 18 | "foreign keys", 19 | "erm", 20 | "er diagram" 21 | ], 22 | "preferGlobal": "true", 23 | "bin": { 24 | "extract-mongo-schema": "cli.js" 25 | }, 26 | "author": "Petar Korponaić ", 27 | "license": "MIT", 28 | "dependencies": { 29 | "command-line-args": "^3.0.5", 30 | "fs": "0.0.1-security", 31 | "mongodb": "^3.5.8", 32 | "path": "^0.12.7", 33 | "xlsx": "^0.17.0" 34 | }, 35 | "devDependencies": { 36 | "eslint": "^5.14.0", 37 | "eslint-config-airbnb": "^17.1.0", 38 | "eslint-config-airbnb-base": "^13.1.0", 39 | "eslint-plugin-import": "^2.16.0" 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /template-html-diagram.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Mongo Schema 4 | 5 | 37 | 38 | 39 | 40 |
41 |
42 | 43 |
44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 202 | 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Extract Mongo Schema 2 | 3 | Extract (and visualize) schema from Mongo database, including foreign keys. Output is simple json file or html with dagre/d3.js diagram (depending on command line options). 4 | 5 | ## Installation 6 | 7 | ```sh 8 | npm -g install extract-mongo-schema 9 | ``` 10 | 11 | ## Usage 12 | 13 | ```sh 14 | 15 | Usage: 16 | extract-mongo-schema -d connection_string -o schema.json -f json 17 | -u, --authSource Database for authentication. Example: "admin". 18 | -d, --database Database connection string. Example: "mongodb://localhost:3001/meteor". 19 | -o, --output Output file 20 | -f, --format Output file format. Can be "json", "html-diagram" or "xlsx". Default is "json". 21 | -i, --inputJson Input JSON file, to be used instead of --database. NOTE: this will ignore the remainder of input params and use a previously generated JSON file to generate the diagram. 22 | -c, --collection Comma separated list of collections to analyze. Example: "collection1,collection2". 23 | -a, --array Comma separated list of types of arrays to analyze. Example: "Uint8Array,ArrayBuffer,Array". 24 | -r, --raw Shows the exact list of types with frequency instead of the most frequent type only. 25 | -l, --limit Number of records to parse to get the schema, default is 100. 26 | -n, --dont-follow-fk Don't follow specified foreign key. Can be simply "fieldName" (all collections) or "collectionName:fieldName" (only for given collection). 27 | -s, --include-system string Analyzes system collections as well. 28 | 29 | ``` 30 | 31 | ## Example usage 32 | 33 | **Extract schema into json** 34 | 35 | ``` 36 | extract-mongo-schema -d "mongodb://localhost:3001/meteor" -o schema.json 37 | ``` 38 | 39 | 40 | **Extract schema into html** 41 | 42 | ``` 43 | extract-mongo-schema -d "mongodb://localhost:3001/meteor" -o schema.html -f html-diagram 44 | ``` 45 | 46 | **Extract schema into xlsx** 47 | 48 | ``` 49 | extract-mongo-schema -d "mongodb://localhost:3001/meteor" -o schema.xlsx -f xlsx 50 | ``` 51 | 52 | **Convert json to html** 53 | 54 | ``` 55 | extract-mongo-schema -i schema.json -o schema.html -f html-diagram 56 | ``` 57 | 58 | **Extract specific collections in raw format and analyze Array items** 59 | 60 | ``` 61 | extract-mongo-schema -d "mongodb://localhost:3001/meteor" -o schema.json -c "collection1,collection2,collection3" -a "Array" -r 62 | ``` 63 | 64 | Open html in your browser and you'll see rendered ER diagram. 65 | 66 | 67 | **Ignore some foreign keys** 68 | 69 | Use `-n` switch to prevent detecting and drawing links for specified fields. You can specify simply `fieldName` (that applies to all collections) or `collectionName:fieldName` (foreign key is ignored only in given collection). 70 | 71 | Example: 72 | 73 | ``` 74 | extract-mongo-schema -d "mongodb://localhost:3001/meteor" -o schema.html -f html-diagram -n createdBy -n users:modifiedBy 75 | ``` 76 | *(in this example: any foreign key named "createdBy" will be ignored. Also "modifiedBy" but only in users collection)* 77 | 78 | 79 | ## Example output .html (screenshot) 80 | 81 | ![Alt text](/preview.png?raw=true "Preview") 82 | 83 | 84 | ## Example output .json 85 | 86 | **schema.json** 87 | 88 | ```json 89 | { 90 | "customers": { 91 | "_id": { 92 | "primaryKey": true, 93 | "type": "string", 94 | "required": true 95 | }, 96 | "name": { 97 | "type": "string", 98 | "required": true 99 | }, 100 | "phone": { 101 | "type": "string", 102 | "required": true 103 | }, 104 | "email": { 105 | "type": "string", 106 | "required": true 107 | }, 108 | "note": { 109 | "type": "string", 110 | "required": true 111 | }, 112 | "createdAt": { 113 | "type": "Date", 114 | "required": true 115 | }, 116 | "createdBy": { 117 | "key": true, 118 | "type": "string", 119 | "required": true 120 | }, 121 | "modifiedAt": { 122 | "type": "Date", 123 | "required": true 124 | }, 125 | "modifiedBy": { 126 | "key": true, 127 | "type": "string", 128 | "required": true 129 | }, 130 | "ownerId": { 131 | "key": true, 132 | "type": "string", 133 | "required": true 134 | } 135 | }, 136 | "invoices": { 137 | "_id": { 138 | "primaryKey": true, 139 | "type": "string", 140 | "required": true 141 | }, 142 | "invoiceNumber": { 143 | "type": "string", 144 | "required": true 145 | }, 146 | "date": { 147 | "type": "Date", 148 | "required": true 149 | }, 150 | "customerId": { 151 | "foreignKey": true, 152 | "references": "customers", 153 | "key": true, 154 | "type": "string", 155 | "required": true 156 | }, 157 | "createdAt": { 158 | "type": "Date", 159 | "required": true 160 | }, 161 | "createdBy": { 162 | "key": true, 163 | "type": "string", 164 | "required": true 165 | }, 166 | "modifiedAt": { 167 | "type": "Date", 168 | "required": true 169 | }, 170 | "modifiedBy": { 171 | "key": true, 172 | "type": "string", 173 | "required": true 174 | }, 175 | "ownerId": { 176 | "key": true, 177 | "type": "string", 178 | "required": true 179 | }, 180 | "totalAmount": { 181 | "type": "number", 182 | "required": true 183 | } 184 | }, 185 | "users": { 186 | "_id": { 187 | "primaryKey": true, 188 | "type": "string", 189 | "required": true 190 | }, 191 | "createdAt": { 192 | "type": "Date", 193 | "required": true 194 | }, 195 | "services": { 196 | "type": "Object", 197 | "structure": { 198 | "password": { 199 | "type": "Object", 200 | "structure": { 201 | "bcrypt": { 202 | "type": "string", 203 | "required": true 204 | } 205 | }, 206 | "required": true 207 | }, 208 | "resume": { 209 | "type": "Object", 210 | "structure": { 211 | "loginTokens": { 212 | "type": "Array", 213 | "required": true 214 | } 215 | }, 216 | "required": true 217 | } 218 | }, 219 | "required": true 220 | }, 221 | "emails": { 222 | "type": "Array", 223 | "required": true 224 | }, 225 | "roles": { 226 | "type": "Array", 227 | "required": true 228 | }, 229 | "profile": { 230 | "type": "Object", 231 | "structure": { 232 | "name": { 233 | "type": "string", 234 | "required": true 235 | }, 236 | "email": { 237 | "type": "string", 238 | "required": true 239 | }, 240 | "facebook": { 241 | "type": "string", 242 | "required": true 243 | }, 244 | "google": { 245 | "type": "string", 246 | "required": true 247 | }, 248 | "twitter": { 249 | "type": "string", 250 | "required": true 251 | }, 252 | "website": { 253 | "type": "string", 254 | "required": true 255 | } 256 | }, 257 | "required": true 258 | } 259 | }, 260 | "meteor_accounts_loginServiceConfiguration": {}, 261 | "invoice_items": { 262 | "_id": { 263 | "primaryKey": true, 264 | "type": "string", 265 | "required": true 266 | }, 267 | "description": { 268 | "type": "string", 269 | "required": true 270 | }, 271 | "quantity": { 272 | "type": "number", 273 | "required": true 274 | }, 275 | "price": { 276 | "type": "number", 277 | "required": true 278 | }, 279 | "invoiceId": { 280 | "key": true, 281 | "foreignKey": true, 282 | "references": "invoices", 283 | "type": "string", 284 | "required": true 285 | }, 286 | "createdAt": { 287 | "type": "Date", 288 | "required": true 289 | }, 290 | "createdBy": { 291 | "key": true, 292 | "foreignKey": true, 293 | "references": "users", 294 | "type": "string", 295 | "required": true 296 | }, 297 | "modifiedAt": { 298 | "type": "Date", 299 | "required": true 300 | }, 301 | "modifiedBy": { 302 | "key": true, 303 | "foreignKey": true, 304 | "references": "users", 305 | "type": "string", 306 | "required": true 307 | }, 308 | "ownerId": { 309 | "key": true, 310 | "foreignKey": true, 311 | "references": "users", 312 | "type": "string", 313 | "required": true 314 | }, 315 | "amount": { 316 | "type": "number", 317 | "required": true 318 | } 319 | } 320 | } 321 | ``` 322 | 323 | 324 | That's all folks. 325 | Enjoy! :) 326 | -------------------------------------------------------------------------------- /extract-mongo-schema.js: -------------------------------------------------------------------------------- 1 | const { MongoClient, ObjectId } = require('mongodb'); 2 | 3 | const connect = async (connectionURL, authSource) => new Promise((resolve, reject) => { 4 | const client = new MongoClient(connectionURL, { authSource, useNewUrlParser: true, useUnifiedTopology: true }); 5 | client.connect((err) => { 6 | if (err) throw err; 7 | const db = client.db(); 8 | return resolve({ client, db }); 9 | }); 10 | }); 11 | 12 | const getSchema = async (url, opts) => { 13 | //console.log(opts); 14 | const { client, db } = await connect(url, opts.authSource); 15 | 16 | const l = await db.listCollections(); 17 | const collectionInfos = await l.toArray(); 18 | const schema = {}; 19 | const collections = {}; 20 | const relations = {}; 21 | 22 | const findRelatedCollection = async (value, field) => { 23 | const valueToString = value.toString(); 24 | if (relations[valueToString]) { 25 | for (const collectionName in collections) { 26 | if (relations[valueToString].collectionName === collectionName) { 27 | delete field.key; 28 | field.foreignKey = true; 29 | field.references = collectionName; 30 | } else { 31 | field.key = true; 32 | } 33 | } 34 | return; 35 | } 36 | for (const collectionName in collections) { 37 | const related = await collections[collectionName].collection.findOne({ _id: ObjectId(valueToString) }, { projection: { _id: 1 } }); 38 | if (related) { 39 | delete field.key; 40 | field.foreignKey = true; 41 | field.references = collectionName; 42 | relations[valueToString] = { collectionName }; 43 | } else { 44 | field.key = true; 45 | relations[valueToString] = { collectionName: '' }; 46 | } 47 | } 48 | }; 49 | 50 | const setTypeName = (item) => { 51 | let typeName = typeof item; 52 | if (typeName === 'object') { 53 | typeName = Object.prototype.toString.call(item); 54 | } 55 | typeName = typeName.replace('[object ', ''); 56 | typeName = typeName.replace(']', ''); 57 | return typeName; 58 | }; 59 | 60 | const getDocSchema = async (collectionName, doc, docSchema) => { 61 | for (const key in doc) { 62 | if(opts.excludeFields.includes(key)) continue; 63 | 64 | if (!docSchema[key]) { 65 | docSchema[key] = { types: {} }; 66 | } 67 | 68 | if (!docSchema[key].types) { 69 | docSchema[key].types = {}; 70 | } 71 | let typeName = setTypeName(doc[key]); 72 | 73 | if (!docSchema[key].types[typeName]) { 74 | docSchema[key].types[typeName] = { frequency: 0 }; 75 | } 76 | docSchema[key].types[typeName].frequency++; 77 | 78 | if (typeName === 'Object' && ObjectId.isValid(doc[key])) { 79 | typeName = 'string'; 80 | doc[key] = doc[key].toString(); 81 | } 82 | 83 | if (typeName === 'string' && ObjectId.isValid(doc[key])) { 84 | if (key === '_id') { 85 | docSchema[key].primaryKey = true; 86 | } else { 87 | // only if is not already processes 88 | if (!docSchema[key].foreignKey || !docSchema[key].references) { 89 | // only if is not ignored 90 | if (!(opts.dontFollowFK.__ANY__[key] || (opts.dontFollowFK[collectionName] && opts.dontFollowFK[collectionName][key]))) { 91 | await findRelatedCollection(doc[key], docSchema[key]); 92 | } 93 | } 94 | } 95 | } 96 | 97 | if (typeName === 'Object') { 98 | if (!docSchema[key].types[typeName].structure) { 99 | docSchema[key].types[typeName].structure = {}; 100 | } 101 | await getDocSchema(collectionName, doc[key], docSchema[key].types[typeName].structure); 102 | } 103 | 104 | if (opts.arrayList && opts.arrayList.indexOf(typeName) !== -1) { 105 | if (!docSchema[key].types[typeName].structure) { 106 | docSchema[key].types[typeName].structure = { types: {} }; 107 | } 108 | 109 | if (!docSchema[key].types[typeName].structure.types) { 110 | docSchema[key].types[typeName].structure.types = {}; 111 | } 112 | for (let i = 0; i < doc[key].length; i++) { 113 | const typeNameArray = setTypeName(doc[key][i]); 114 | if (typeNameArray === 'Object') { 115 | if (!docSchema[key].types[typeName].structure.types[typeNameArray]) { 116 | docSchema[key].types[typeName].structure.types[typeNameArray] = { structure: {} }; 117 | } 118 | 119 | if (!docSchema[key].types[typeName].structure.types[typeNameArray].structure) { 120 | docSchema[key].types[typeName].structure.types[typeNameArray].structure = {}; 121 | } 122 | await getDocSchema(collectionName, doc[key][i], docSchema[key].types[typeName].structure.types[typeNameArray].structure); 123 | } else { 124 | if (!docSchema[key].types[typeName].structure.types[typeNameArray]) { 125 | docSchema[key].types[typeName].structure.types[typeNameArray] = { frequency: 0 }; 126 | } 127 | docSchema[key].types[typeName].structure.types[typeNameArray].frequency++; 128 | } 129 | } 130 | } 131 | } 132 | }; 133 | 134 | const setMostFrequentType = (field, processed) => { 135 | let max = 0; 136 | let notNull = true; 137 | for (const typeName in field.types) { 138 | if (typeName === 'Null') { 139 | notNull = false; 140 | } 141 | field.types[typeName].frequency = field.types[typeName].frequency / processed; 142 | if (field.types[typeName].frequency > max) { 143 | max = field.types[typeName].frequency; 144 | if (typeName !== 'undefined' && typeName !== 'Null') { 145 | field.type = typeName; 146 | } 147 | } 148 | } 149 | return notNull; 150 | }; 151 | 152 | const mostFrequentType = (docSchema, processed) => { 153 | if (processed) { 154 | for (const fieldName in docSchema) { 155 | if (docSchema[fieldName]) { 156 | let notNull = setMostFrequentType(docSchema[fieldName], processed); 157 | if (!docSchema[fieldName].type) { 158 | docSchema[fieldName].type = 'undefined'; 159 | notNull = false; 160 | } 161 | 162 | const dataType = docSchema[fieldName].type; 163 | if (dataType === 'Object') { 164 | mostFrequentType(docSchema[fieldName].types[dataType].structure, processed); 165 | docSchema[fieldName].structure = docSchema[fieldName].types[dataType].structure; 166 | } 167 | 168 | if (opts.arrayList && opts.arrayList.indexOf(dataType) !== -1) { 169 | if (Object.keys(docSchema[fieldName].types[dataType].structure.types)[0] === 'Object') { 170 | mostFrequentType(docSchema[fieldName].types[dataType].structure.types.Object.structure, processed); 171 | docSchema[fieldName].types[dataType].structure.type = 'Object'; 172 | docSchema[fieldName].types[dataType].structure.structure = docSchema[fieldName].types[dataType].structure.types.Object.structure; 173 | delete docSchema[fieldName].types[dataType].structure.types; 174 | } else { 175 | mostFrequentType(docSchema[fieldName].types[dataType], processed); 176 | } 177 | docSchema[fieldName].structure = docSchema[fieldName].types[dataType].structure; 178 | } 179 | 180 | delete docSchema[fieldName].types; 181 | 182 | docSchema[fieldName].required = notNull; 183 | } 184 | } 185 | } 186 | }; 187 | 188 | if (opts.collectionList !== null) { 189 | for (let i = collectionInfos.length - 1; i >= 0; i--) { 190 | if (opts.collectionList.indexOf(collectionInfos[i].name) === -1) { 191 | collectionInfos.splice(i, 1); 192 | } 193 | } 194 | } 195 | 196 | if (!opts.includeSystem) { 197 | for (let i = collectionInfos.length - 1; i >= 0; i--) { 198 | if (collectionInfos[i].name.startsWith('system.')) { 199 | collectionInfos.splice(i, 1); 200 | } 201 | } 202 | } 203 | 204 | await Promise.all(collectionInfos.map(async (collectionInfo, index) => { 205 | collections[collectionInfo.name] = {}; 206 | schema[collectionInfo.name] = {}; 207 | collections[collectionInfo.name].collection = await db.collection(collectionInfo.name); 208 | const docs = await collections[collectionInfo.name].collection.find({}, { limit: opts.limit }).toArray(); 209 | await Promise.all(docs.map(async doc => await getDocSchema(collectionInfo.name, doc, schema[collectionInfo.name]))); 210 | if (!opts.raw) mostFrequentType(schema[collectionInfo.name], docs.length); 211 | })); 212 | 213 | await client.close(); 214 | return schema; 215 | }; 216 | 217 | const extractMongoSchema = async (url, opts) => getSchema(url, opts); 218 | 219 | 220 | if (typeof module !== 'undefined' && module.exports) { 221 | module.exports.extractMongoSchema = extractMongoSchema; 222 | } else { 223 | this.extractMongoSchema = extractMongoSchema; 224 | } 225 | -------------------------------------------------------------------------------- /cli.js: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env node 2 | 3 | const commandLineArgs = require('command-line-args'); 4 | const fs = require('fs'); 5 | const path = require('path'); 6 | const extractMongoSchema = require('./extract-mongo-schema'); 7 | const xlsx = require("xlsx"); 8 | 9 | const optionDefinitions = [ 10 | { name: 'database', alias: 'd', type: String }, 11 | { name: 'authSource', alias: 'u', type: String }, 12 | { name: 'inputJson', alias: 'i', type: String }, 13 | { name: 'output', alias: 'o', type: String }, 14 | { name: 'format', alias: 'f', type: String }, 15 | { name: 'collection', alias: 'c', type: String }, 16 | { name: 'array', alias: 'a', type: String }, 17 | { 18 | name: 'raw', alias: 'r', type: Boolean, defaultValue: false, 19 | }, 20 | { 21 | name: 'limit', alias: 'l', type: Number, defaultValue: 100, 22 | }, 23 | { 24 | name: 'dont-follow-fk', alias: 'n', multiple: true, type: String, 25 | }, 26 | { 27 | name: 'include-system', alias: 's', type: Boolean, defaultValue: false, 28 | }, 29 | { 30 | name: 'exclude-field', alias: 'e', type: String 31 | }, 32 | ]; 33 | 34 | const args = commandLineArgs(optionDefinitions); 35 | if(args.output !== "-") { 36 | console.log(''); 37 | console.log('Extract schema from Mongo database (including foreign keys)'); 38 | } 39 | const printUsage = function () { 40 | console.log(''); 41 | console.log('Usage:'); 42 | console.log('\textract-mongo-schema -d connection_string -o schema.json'); 43 | console.log('\t\t-u, --authSource string\tDatabase for authentication. Example: "admin".'); 44 | console.log('\t\t-d, --database string\tDatabase connection string. Example: "mongodb://localhost:3001/meteor".'); 45 | console.log('\t\t-o, --output string\tOutput file Use - to output to STDOUT'); 46 | console.log('\t\t-f, --format string\tOutput file format. Can be "json", "html-diagram" or "xlsx".'); 47 | console.log('\t\t-i, --inputJson string\tInput JSON file, to be used instead of --database. NOTE: this will ignore the remainder of input params and use a previously generated JSON file to generate the diagram.'); 48 | console.log('\t\t-c, --collection\tComma separated list of collections to analyze. Example: "collection1,collection2".'); 49 | console.log('\t\t-a, --array\tComma separated list of types of arrays to analyze. Example: "Uint8Array,ArrayBuffer,Array".'); 50 | console.log('\t\t-r, --raw\tShows the exact list of types with frequency instead of the most frequent type only.'); 51 | console.log('\t\t-l, --limit\tChanges the amount of items to parse from the collections. Default is 100.'); 52 | console.log('\t\t-n, --dont-follow-fk string\tDon\'t follow specified foreign key. Can be simply "fieldName" (all collections) or "collectionName:fieldName" (only for given collection).'); 53 | console.log('\t\t-s, --include-system string\tAnalyzes system collections as well.'); 54 | console.log('\t\t-e, --exclude-field string\tExcludes a field from being included in the output schema. Example: -e "_id".'); 55 | console.log(''); 56 | console.log('Enjoy! :)'); 57 | console.log(''); 58 | }; 59 | 60 | if (args.database && args.inputJson) { 61 | console.log(''); 62 | console.log('Cannot provide both database connection string and input JSON path.'); 63 | printUsage(); 64 | process.exit(1); 65 | } 66 | 67 | if (!args.database && !args.inputJson) { 68 | console.log(''); 69 | console.log('Database connection string or input JSON path is missing.'); 70 | printUsage(); 71 | process.exit(1); 72 | } 73 | 74 | if (!args.output) { 75 | console.log(''); 76 | console.log('Output path is missing.'); 77 | printUsage(); 78 | process.exit(1); 79 | } 80 | 81 | if (fs.existsSync(args.output)) { 82 | const outputStat = fs.lstatSync(args.output); 83 | 84 | if (outputStat.isDirectory()) { 85 | console.log(`Error: output "${args.output}" is not a file.`); 86 | process.exit(1); 87 | } 88 | } 89 | 90 | let collectionList = null; 91 | if (args.collection) { 92 | collectionList = args.collection.split(','); 93 | } 94 | 95 | let arrayList = null; 96 | if (args.array) { 97 | arrayList = args.array.split(','); 98 | } 99 | 100 | let fieldExclusionList = []; 101 | if(args["exclude-field"]) { 102 | fieldExclusionList = args["exclude-field"].split(','); 103 | } 104 | 105 | const outputFormat = args.format || 'json'; 106 | 107 | const dontFollowTMP = args['dont-follow-fk'] || []; 108 | 109 | const dontFollowFK = { 110 | __ANY__: {}, 111 | }; 112 | 113 | dontFollowTMP.map((df) => { 114 | const dfArray = df.split(':'); 115 | 116 | let collection = ''; 117 | let field = ''; 118 | 119 | if (dfArray.length > 1) { 120 | collection = dfArray[0]; 121 | field = dfArray[1]; 122 | } else { 123 | collection = '__ANY__'; 124 | field = dfArray[0]; 125 | } 126 | dontFollowFK[collection][field] = true; 127 | }); 128 | 129 | if(args.output !== "-") { 130 | console.log(''); 131 | console.log('Extracting...'); 132 | } 133 | 134 | const opts = { 135 | authSource: args.authSource, 136 | collectionList, 137 | arrayList, 138 | raw: args.raw, 139 | limit: args.limit, 140 | dontFollowFK, 141 | includeSystem: args['include-system'], 142 | excludeFields: fieldExclusionList 143 | }; 144 | 145 | 146 | (async () => { 147 | try { 148 | let schema; 149 | if (args.inputJson) { 150 | // read input json 151 | const inputJsonPath = path.join(__dirname, args.inputJson) 152 | try { 153 | const inputJsonString = fs.readFileSync(inputJsonPath, 'utf8') 154 | schema = JSON.parse(inputJsonString) 155 | } catch (e) { 156 | console.log(`Error: cannot read input json file "${inputJsonPath}". ${e.message}`); 157 | process.exit(1); 158 | } 159 | } 160 | else { 161 | schema = await extractMongoSchema.extractMongoSchema(args.database, opts); 162 | } 163 | 164 | if (outputFormat === 'json') { 165 | try { 166 | if(args.output === "-") 167 | console.log(JSON.stringify(schema, null, '\t')); 168 | else 169 | fs.writeFileSync(args.output, JSON.stringify(schema, null, '\t'), 'utf8'); 170 | } catch (e) { 171 | console.log(`Error: cannot write output "${args.output}". ${e.message}`); 172 | process.exit(1); 173 | } 174 | } 175 | 176 | if (outputFormat === 'html-diagram') { 177 | const templateFileName = path.join(__dirname, '/template-html-diagram.html'); 178 | 179 | // read input file 180 | let templateHTML = ''; 181 | try { 182 | templateHTML = fs.readFileSync(templateFileName, 'utf8'); 183 | } catch (e) { 184 | console.log(`Error: cannot read template file "${templateFileName}". ${e.message}`); 185 | process.exit(1); 186 | } 187 | 188 | templateHTML = templateHTML.replace('{/*DATA_HERE*/}', JSON.stringify(schema, null, '\t')); 189 | 190 | try { 191 | fs.writeFileSync(args.output, templateHTML, 'utf8'); 192 | } catch (e) { 193 | console.log(`Error: cannot write output "${args.output}". ${e.message}`); 194 | process.exit(1); 195 | } 196 | } 197 | if(outputFormat == "xlsx"){ 198 | if(!args.output.endsWith(".xlsx")){ 199 | console.log("Wrong output format [xlsx]"); 200 | process.exit(1); 201 | } 202 | //get all collections 203 | var collections = Object.keys(schema); 204 | var wb = xlsx.utils.book_new(); 205 | //one worksheet per collection 206 | collections.forEach(element => { 207 | var wsName = element; 208 | 209 | // console.log(element); 210 | var wsData = [["Collection", "primaryKey", "type", "structure", "require"]]; 211 | var items = Object.keys(schema[element]);//items in collection 212 | items.forEach( item => { 213 | var props = Object.keys(schema[element][item]); 214 | var itemProperties = { 215 | primaryKey: schema[element][item]["primaryKey"] != "undefined" ? schema[element][item]["primaryKey"] == "undefined" : false, 216 | type: schema[element][item]["type"] != "undefined" ? schema[element][item]["type"] : "undefined", 217 | structure: schema[element][item]["structure"] != "undefined" ? schema[element][item]["structure"] : "undefined", 218 | require: schema[element][item]["required"] != "undefined" ? schema[element][item]["required"] : "undefined" 219 | }; 220 | if(itemProperties.type != "undefined" && itemProperties.type == "Object"){ 221 | itemProperties.structure = JSON.stringify(itemProperties.structure); 222 | } 223 | var data = []; 224 | data.push(item); 225 | data.push(itemProperties.primaryKey); 226 | data.push(itemProperties.type); 227 | data.push(itemProperties.structure); 228 | data.push(itemProperties.require); 229 | wsData.push(data); 230 | }); 231 | // console.log(wsData); 232 | var ws = xlsx.utils.aoa_to_sheet(wsData); 233 | xlsx.utils.book_append_sheet(wb, ws, wsName); 234 | }); 235 | xlsx.writeFile(wb, args.output); 236 | } 237 | if(args.output !== "-") { 238 | console.log('Success.'); 239 | console.log(''); 240 | } 241 | } catch (err) { 242 | console.log(err); 243 | process.exit(1); 244 | } 245 | })(); 246 | 247 | 248 | 249 | --------------------------------------------------------------------------------