├── .gitignore ├── LICENSE ├── README.md ├── index.js └── package.json /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | 5 | # Runtime data 6 | pids 7 | *.pid 8 | *.seed 9 | 10 | # Directory for instrumented libs generated by jscoverage/JSCover 11 | lib-cov 12 | 13 | # Coverage directory used by tools like istanbul 14 | coverage 15 | 16 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 17 | .grunt 18 | 19 | # Compiled binary addons (http://nodejs.org/api/addons.html) 20 | build/Release 21 | 22 | # Dependency directory 23 | # Commenting this out is preferred by some people, see 24 | # https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git- 25 | node_modules 26 | 27 | # Users Environment Variables 28 | .lock-wscript 29 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 Daniel Cohen Gindi 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | node-optimize 2 | ============= 3 | 4 | [![npm Version](https://badge.fury.io/js/node-optimize.png)](https://npmjs.org/package/node-optimize) 5 | 6 | We all need a tool to optimize a node.js project and create a single `js` file from it, 7 | Taking care of `require`s and leaving out `node_modules`. 8 | 9 | Well I needed one too, and there wasn't one, so I build it! 10 | 11 | Usage: 12 | ```javascript 13 | 14 | var NodeOptimizer = require('node-optimize'); 15 | var optimizer = new NodeOptimizer({ 16 | ignore: [ 17 | 'config/db.js', 18 | 'private/some-other-file.js', 19 | ] 20 | }); 21 | 22 | var mergedJs = optimizer.merge('main.js'); // node-optimize will automatically resolve that path for 'main.js' using path.resolve(...) 23 | 24 | require('fs').writeFile(require('path').resolve('main.optimized.js'), mergedJs); 25 | 26 | ``` 27 | 28 | ## What's in the bag 29 | 30 | * `options.ignore` -> Tell it which files to ignore in the process of expanding the `require` calls. 31 | * Automatically ignores core modules, or modules from `node_modules`. 32 | * Currently handles `*.js`, `*.json`, and directory modules (with or without package.json/main). 33 | * Functionality of `require` statements stay the same - loading on demand, loading once, and synthesizing the `module` global object. 34 | * Handling of cyclic references same as node.js's' native implementation 35 | * Using `include` option to include files which are not automatically detected (because of dynamic `require`s using variables and other complex loading mechanisms) 36 | * Loading modules which were specified using complex `require` statement (i.e. `require(moduleName + '_' + index)`) 37 | 38 | *Note*: Support for `require` of module folders (with parsing of `package.json` etc.) will be added in the future. 39 | 40 | ## CoffeeScript 41 | 42 | If you need support for CoffeScript, simply use Grunt to "compile" your Coffee files, and then run the optimizer on a pure JS project. 43 | 44 | ## Binary modules 45 | 46 | There's no simple way to embed native binary modules (*.node), or modules that depend on other local raw files. 47 | In case you have a module which is known to have binary files, you should exclude it from optimization, and put it in a known path, or on a private NPM etc. 48 | 49 | I've tried to also support squashing `node_modules` for cases where one wants to eliminate the need of an `npm install` in a production project, 50 | but I have abandon those trials, as it makes no sense: 51 | In 99% of the cases on of the modules in the `node_modules` tree will have binaries, and `npm install`/`npm update` is a strength anyway as it allows for bugfixes even in a production project. 52 | 53 | ## Grunt 54 | 55 | See [https://github.com/danielgindi/grunt-node-optimize](https://github.com/danielgindi/grunt-node-optimize) 56 | 57 | 58 | ## Contributing 59 | 60 | If you have anything to contribute, or functionality that you luck - you are more than welcome to participate in this! 61 | If anyone wishes to contribute unit tests - that also would be great :-) 62 | 63 | ## Me 64 | * Hi! I am Daniel Cohen Gindi. Or in short- Daniel. 65 | * danielgindi@gmail.com is my email address. 66 | * That's all you need to know. 67 | 68 | ## Help 69 | 70 | If you want to buy me a beer, you are very welcome to 71 | [![Donate](https://www.paypalobjects.com/en_US/i/btn/btn_donate_LG.gif)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=G6CELS3E997ZE) 72 | Thanks :-) 73 | 74 | ## License 75 | 76 | All the code here is under MIT license. Which means you could do virtually anything with the code. 77 | I will appreciate it very much if you keep an attribution where appropriate. 78 | 79 | The MIT License (MIT) 80 | 81 | Copyright (c) 2013 Daniel Cohen Gindi (danielgindi@gmail.com) 82 | 83 | Permission is hereby granted, free of charge, to any person obtaining a copy 84 | of this software and associated documentation files (the "Software"), to deal 85 | in the Software without restriction, including without limitation the rights 86 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 87 | copies of the Software, and to permit persons to whom the Software is 88 | furnished to do so, subject to the following conditions: 89 | 90 | The above copyright notice and this permission notice shall be included in all 91 | copies or substantial portions of the Software. 92 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var Fs = require('fs'), 4 | Path = require('path'), 5 | UglifyJS = require('uglify-js'); 6 | 7 | /** 8 | * Checks if a path exists and is a file (not a directory), without throwing any error 9 | * @param {String} path 10 | * @returns {Boolean} 11 | */ 12 | var isFileSync = function (path) { 13 | var stat = null; 14 | try { 15 | stat = Fs.statSync(path); 16 | } catch (e) { 17 | 18 | } 19 | return !!stat && stat.isFile(); 20 | }; 21 | 22 | /** 23 | * Checks if a path exists and is directory, without throwing any error 24 | * @param {String} path 25 | * @returns {Boolean} 26 | */ 27 | var isDirectorySync = function (path) { 28 | var stat = null; 29 | try { 30 | stat = Fs.statSync(path); 31 | } catch (e) { 32 | 33 | } 34 | return !!stat && stat.isDirectory(); 35 | }; 36 | 37 | /** 38 | * @const 39 | */ 40 | var CORE_MODULE_LIST = (function () { 41 | var core = {}; 42 | ['assert', 'buffer', 'child_process', 'cluster', 43 | 'crypto', 'dgram', 'dns', 'events', 'fs', 'http', 'https', 'net', 44 | 'os', 'path', 'punycode', 'querystring', 'readline', 'repl', 45 | 'string_decoder', 'tls', 'tty', 'url', 'util', 'vm', 'zlib'].forEach(function (key) { 46 | core[key] = true; 47 | }); 48 | return core; 49 | })(); 50 | 51 | /** 52 | * @constructor 53 | * @param options 54 | */ 55 | var optimizer = function (options) { 56 | 57 | this.options = { 58 | ignore: (options ? options.ignore || options.ignoreRequired : null) || [], 59 | include: (options ? options.include : null) || [] 60 | }; 61 | 62 | }; 63 | 64 | /** 65 | * Resolves a path relative to another path 66 | * I.e. what is the meaning of Y in `require(Y)` when called within a specific module? 67 | * @param {String} from - base path 68 | * @param {String} to - relative path to normalize 69 | * @returns {String} 70 | */ 71 | var resolveRelativePath = function (from, to) { 72 | 73 | var relPath = Path.relative(from, to); 74 | if (!/^[\./\\]/.test(relPath) && !/:\//.test(relPath)) { 75 | relPath = './' + relPath; 76 | } 77 | 78 | // Normalize path if possible 79 | if (relPath.indexOf(':') === -1) { 80 | relPath = relPath.replace(/\\/g, '/'); 81 | } 82 | 83 | return relPath; 84 | 85 | }; 86 | 87 | /** 88 | * Enumerate files in `rootDir`, using `filters` 89 | * @param {String} rootDir 90 | * @param {String} filters 91 | * @returns {Array.} array of absolute paths 92 | */ 93 | var getMatchingFilesSync = function(rootDir, filters) { 94 | var results = []; 95 | 96 | filters.forEach(function (filter) { 97 | var destination = Path.resolve(rootDir, filter), 98 | file = null; 99 | 100 | try { 101 | file = Fs.lstatSync(destination); 102 | } catch (e) { 103 | } 104 | 105 | if (file && file.isDirectory()) { 106 | Fs.readdirSync(destination).reduce((function(prev, curr) { 107 | prev.push(Path.join(destination, curr)); 108 | return prev; 109 | }), results); 110 | } else { 111 | if (Path.extname(destination) === '') { 112 | var fileName = Path.basename(destination); 113 | Fs.readdirSync(Path.dirname(destination)).filter(function(fileNameLoc) { 114 | return fileNameLoc.indexOf(fileName) !== -1; 115 | }).reduce((function(prev, curr) { 116 | prev.push(Path.join(destination, curr)); 117 | return prev; 118 | }), results); 119 | } else { 120 | results.push(destination); 121 | } 122 | } 123 | }); 124 | 125 | return results; 126 | 127 | }; 128 | 129 | /** 130 | * Performs a `JSON.parse(...)` on `data`, without throwing an exception 131 | * @param {String?} data 132 | * @returns {*} the parsed data, or null if failed 133 | */ 134 | var tryJsonParse = function (data) { 135 | try { 136 | return JSON.parse(data); 137 | } catch (e) { 138 | return null; 139 | } 140 | }; 141 | 142 | /** 143 | * Searches for all `require` statements inside `sourceCode`, and returns a normalized set of data about it 144 | * @param {String} sourceCode - the source code body of the file being investigated 145 | * @param {String} filePath - path of the file being investigated 146 | * @returns {Array.<{statement: String, statementArguments: String, text: String, path: String}>} 147 | */ 148 | var getRequireStatements = function(sourceCode, filePath) { 149 | 150 | // Replace newlines in the same way that UglifyJS does 151 | // So we'll have correct `pos` properties 152 | 153 | sourceCode = sourceCode.replace(/\r\n?|[\n\u2028\u2029]/g, "\n").replace(/\uFEFF/g, ''); 154 | var ast = UglifyJS.parse(sourceCode); 155 | 156 | var results = []; 157 | var MODULE_PATH_Y = Path.dirname(filePath); 158 | 159 | var processRequireNode = function(originalText, text, args) { 160 | if (args.length !== 1) { 161 | return 'unknown'; 162 | } 163 | 164 | var CREATE_RESULT = function (path, type) { 165 | var result = { 166 | statement: originalText, 167 | statementArguments: originalText.match(/^require\s*\(([\s\S]*)\)/)[1], 168 | text: tryJsonParse(text), 169 | path: path, 170 | type: type 171 | }; 172 | results.push(result); 173 | return result; 174 | }; 175 | 176 | /** 177 | * Implements the LOAD_AS_FILE function 178 | * @param {String} X - the path 179 | * @returns {Boolean|String} `true` if processed and fine, `false` if not found, 'node' if it's a binary module 180 | */ 181 | var LOAD_AS_FILE = function (X) { 182 | 183 | // 1. If X is a file, load X as JavaScript text. STOP 184 | if (isFileSync(X)) { 185 | CREATE_RESULT(X, 'js'); 186 | return true; 187 | } 188 | 189 | // 2. If X.js is a file, load X.js as JavaScript text. STOP 190 | if (isFileSync(X + '.js')) { 191 | CREATE_RESULT(X + '.js', 'js'); 192 | return true; 193 | } 194 | 195 | // 3. If X.json is a file, parse X.json to a JavaScript Object. STOP 196 | if (isFileSync(X + '.json')) { 197 | CREATE_RESULT(X + '.json', 'json'); 198 | return true; 199 | } 200 | 201 | // 4. If X.node is a file, load X.node as binary addon. STOP 202 | if (isFileSync(X + '.node')) { 203 | return 'node'; 204 | } 205 | 206 | return false; 207 | }; 208 | 209 | /** 210 | * Implements the LOAD_AS_DIRECTORY function 211 | * @param {String} X - the path 212 | * @returns {Boolean|String} `true` if processed and fine, `false` if not found, 'node' if it's a binary module 213 | */ 214 | var LOAD_AS_DIRECTORY = function (X) { 215 | 216 | if (!isDirectorySync(X)) return false; 217 | 218 | // 1. If X/package.json is a file, 219 | var packageJson = null; 220 | try { 221 | // 1. a. Parse X/package.json, and look for "main" field. 222 | packageJson = JSON.parse(Fs.readFileSync(Path.join(X, 'package.json'), { encoding: 'utf8' }).toString()); 223 | } catch (e) { 224 | 225 | } 226 | 227 | if (packageJson && packageJson.main) { 228 | // 1. b. let M = X + (json main field) 229 | var M = Path.join(X, packageJson.main); 230 | 231 | // 1. c. LOAD_AS_FILE(M) 232 | var loadedAsFile = LOAD_AS_FILE(M); 233 | if (loadedAsFile) { 234 | return loadedAsFile; 235 | } 236 | } 237 | 238 | // 2. If X/index.js is a file, load X/index.js as JavaScript text. STOP 239 | if (isFileSync(Path.join(X, 'index.js'))) { 240 | CREATE_RESULT(Path.join(X, 'index.js'), 'js'); 241 | return true; 242 | } 243 | 244 | // 3. If X/index.json is a file, parse X/index.json to a JavaScript object. STOP 245 | if (isFileSync(Path.join(X, 'index.json'))) { 246 | CREATE_RESULT(Path.join(X, 'index.json'), 'json'); 247 | return true; 248 | } 249 | 250 | // 4. If X/index.node is a file, load X/index.node as binary addon. STOP 251 | if (isFileSync(Path.join(X, 'index.node'))) { 252 | return 'node'; 253 | } 254 | 255 | return false; 256 | }; 257 | 258 | // require(X) from module at path Y 259 | var REQUIRE_X = args[0].value; 260 | 261 | if (REQUIRE_X) { 262 | 263 | // 1. If X is a core module 264 | if (CORE_MODULE_LIST.hasOwnProperty(REQUIRE_X)) { 265 | return 'core'; 266 | } 267 | 268 | // 2. If X begins with './' or '/' or '../' (Windows: OR [DRIVE LETTER]:/ OR [DRIVE LETTER]:\) 269 | if (/^(\.{0,2}[/\\]|[a-zA-Z]:)/.test(REQUIRE_X)) { 270 | return LOAD_AS_FILE(Path.resolve(MODULE_PATH_Y, REQUIRE_X)) /* 2. a. LOAD_AS_FILE(Y + X) */ 271 | || LOAD_AS_DIRECTORY(Path.resolve(MODULE_PATH_Y, REQUIRE_X)); /* 2. b. LOAD_AS_DIRECTORY(Y + X) */ 272 | } 273 | 274 | // 3. LOAD_NODE_MODULES(X, dirname(Y)) 275 | // We ignore node_modules, as it makes no sense. 276 | // They will most probably contain binaries, and will probably benefit from `npm update`s... 277 | 278 | // 4. THROW "not found" 279 | return 'not-exists'; 280 | } else { 281 | 282 | // The expression inside the `require` is too complex, we can't parse it. 283 | 284 | CREATE_RESULT(REQUIRE_X, 'complex'); 285 | 286 | return 'complex'; 287 | } 288 | }; 289 | 290 | ast.walk(new UglifyJS.TreeWalker(function(node) { 291 | 292 | if (node instanceof UglifyJS.AST_Call) { 293 | 294 | while (/\brequire\b/.test(node.print_to_string()) && node.expression && node.expression.print_to_string() !== 'require') { 295 | node = node.expression; 296 | } 297 | 298 | if (node.expression && node.expression.print_to_string() === 'require') { 299 | 300 | var originalText = sourceCode.substring(node.start.pos, node.end.pos + 1); 301 | var text = node.print_to_string({ beautify: false }); 302 | 303 | var ret = processRequireNode(originalText, text, node.args); 304 | 305 | if (ret !== true && 306 | ret !== 'core' && 307 | ret !== 'node' && 308 | ret !== 'not-exists') { 309 | 310 | console.log('Ignoring complex require statement in:\n' + 311 | ' file : ' + filePath + '\n' + 312 | ' statement : ' + originalText + '\n' + 313 | ' You may want to add that file to options.include.'); 314 | } 315 | 316 | return true; 317 | 318 | } 319 | } 320 | })); 321 | 322 | return results; 323 | }; 324 | 325 | var regexEscapePattern = /[-\/()[\]?{}|*+\\:\.$^#|]/g; 326 | var regexEscape = function (string) { 327 | return string.replace(regexEscapePattern, "\\$&"); 328 | }; 329 | 330 | /** 331 | * Do the actual optimization process 332 | * @param {String} mainFilePath - path for the main file of the project 333 | * @returns {string} optimized project file 334 | */ 335 | optimizer.prototype.merge = function(mainFilePath) { 336 | 337 | mainFilePath = Path.resolve(mainFilePath) || Path.resolve(process.cwd(), mainFilePath); 338 | var rootDir = Fs.lstatSync(mainFilePath).isDirectory() ? Path.resolve(mainFilePath) : Path.dirname(Path.resolve(mainFilePath)); 339 | rootDir += /\\/.test(Path.resolve('/path/to')) ? '\\' : '/'; 340 | 341 | if (!isFileSync(mainFilePath)) { 342 | throw new Error("Main file not found " + mainFilePath); 343 | } 344 | 345 | var filteredOutFiles = getMatchingFilesSync(rootDir, this.options.ignore); 346 | var includedFiles = getMatchingFilesSync(rootDir, this.options.include); 347 | 348 | var requiredMap = {}; 349 | 350 | var requireFileMode = function (filePath) { 351 | 352 | // This is a complex `required` statement which is not a simple script, leave that to runtime 353 | if (filePath === false) return 'complex'; 354 | 355 | // These will surely be included 356 | if (includedFiles.filter(function(filter) { 357 | return Path.normalize(filter) === Path.normalize(filePath); 358 | }).length > 0) return true; 359 | 360 | // These will be excluded, but we know that we still need to normalize paths of require to those 361 | if (filteredOutFiles.filter(function(filter) { 362 | return Path.normalize(filter) === Path.normalize(filePath); 363 | }).length > 0) return 'normalize_path'; 364 | 365 | // These are not in the scope of the project, and should not be included 366 | if (filePath.substr(0, rootDir.length).toLowerCase() !== rootDir.toLowerCase()) { 367 | return false; 368 | } 369 | 370 | // Now we only need the path without the project dir prefix 371 | filePath = filePath.substr(rootDir.length); 372 | 373 | // The file is in node_modules under current project - exclude 374 | if (/^node_modules$|\/node_modules$|^node_modules\/|\\node_modules$|^node_modules\\/.test(filePath)) { 375 | return false; 376 | } 377 | 378 | return true; 379 | }; 380 | 381 | var recursiveSourceGrabber = function(filePath) { 382 | 383 | if (requiredMap.hasOwnProperty(filePath)) return; 384 | 385 | var required = {}; 386 | 387 | var sourceCode = required.source = Fs.readFileSync(filePath, { encoding: 'utf8' }).toString(); 388 | requiredMap[filePath] = required; 389 | 390 | var requireStatements = getRequireStatements(sourceCode, filePath); 391 | 392 | requireStatements.forEach(function (requireStatement) { 393 | if (requireStatement.path) { 394 | requireStatement.path = Path.resolve(filePath, requireStatement.path); 395 | } 396 | }); 397 | 398 | requireStatements.forEach(function (requireStatement) { 399 | if (requireStatement.path) { 400 | requireStatement.mode = requireFileMode(requireStatement.path); 401 | } else if (requireStatement.type === 'complex') { 402 | requireStatement.mode = 'complex'; 403 | } 404 | }); 405 | 406 | requireStatements = requireStatements.filter(function (requireStatement) { 407 | return requireStatement.mode; 408 | }); 409 | 410 | required.required = requireStatements; 411 | 412 | requireStatements.forEach(function (requireStatement) { 413 | 414 | if (requireStatement.mode !== true) return; // Ignore files that do not need to be dealt with deeply 415 | 416 | recursiveSourceGrabber(requireStatement.path); 417 | 418 | }); 419 | 420 | }; 421 | 422 | // Recurse through the main file 423 | recursiveSourceGrabber(mainFilePath); 424 | 425 | // Now include any files that were specifically included using options.include 426 | includedFiles.forEach(function (includedFile) { 427 | recursiveSourceGrabber(includedFile); 428 | }); 429 | 430 | // Assign module keys and prepare for storing in the 'required' container 431 | Object.keys(requiredMap).forEach(function (modulePath) { 432 | if (modulePath === mainFilePath) return; 433 | 434 | var moduleToInline = requiredMap[modulePath]; 435 | 436 | // Figure out the relative path of said module, relative to the main file 437 | moduleToInline.relativePath = resolveRelativePath(rootDir, modulePath); 438 | 439 | if (/\.json$/i.test(moduleToInline.relativePath)) { 440 | // Generate the json's data to inline later 441 | moduleToInline.source = '__JSON_LOADER__(' + JSON.stringify(moduleToInline.source) + ')'; 442 | } else { 443 | // Generate the module's data to inline later 444 | moduleToInline.source = '\ 445 | __MODULE_LOADER__(function(module, exports){\n\n' + moduleToInline.source + '\n\n})'; 446 | } 447 | 448 | }); 449 | 450 | // Replace require calls 451 | Object.keys(requiredMap).forEach(function (modulePath) { 452 | var moduleToInline = requiredMap[modulePath]; 453 | moduleToInline.required.forEach(function (requiredStatement) { 454 | 455 | if (requiredStatement.mode) { 456 | /** 457 | * In the past we were only normalizing paths of excluded modules, 458 | * And replacing `require` calls of included user modules. 459 | * 460 | * Now we replace all require calls which are not core modules 461 | */ 462 | 463 | // Prepare a replacing statement 464 | var regex = regexEscape(requiredStatement.statement); 465 | 466 | if (requiredStatement.text) { 467 | var relativePath = resolveRelativePath(mainFilePath, requiredStatement.text); 468 | moduleToInline.source = moduleToInline.source.replace(new RegExp(regex), '__FAKE_REQUIRE__(' + JSON.stringify(relativePath) + ')'); 469 | } else { 470 | moduleToInline.source = moduleToInline.source.replace(new RegExp(regex), '__FAKE_REQUIRE__(' + requiredStatement.statementArguments + ', ' + JSON.stringify(modulePath) + ')'); 471 | } 472 | 473 | } 474 | 475 | }); 476 | }); 477 | 478 | // Prepare this for cases when we do a "soft" lookup (adding .js or .json to pathnames) and pathes differ in case 479 | // So we simulate the behavior on real FSes with case insensitivity 480 | // On a case sensitive FS, if node.js looks for MODULEPATH + .js, I do not know if it will find .JS files too. 481 | var caseInsensitivePathMap = {}; 482 | Object.keys(requiredMap).forEach(function (modulePath) { 483 | caseInsensitivePathMap[modulePath.toLowerCase()] = modulePath; 484 | }); 485 | 486 | // Start writing the actual output 487 | 488 | var source = '', isFirstRequired; 489 | 490 | // Write "required" wrapper beginning 491 | // NOTE: It is mandatory that `fakeModule.loaded = true` is done before `moduleLoadFunction`, 492 | // in order to supprt cyclic requires. 493 | source += '\ 494 | (function(){ \ 495 | \ 496 | var __CORE_MODULE_LIST__ = ' + JSON.stringify(CORE_MODULE_LIST) + '; \ 497 | \ 498 | var __MODULE_LOADER__ = function (moduleLoadFunction) {\ 499 | var fakeModule = { \ 500 | id: module.id, \ 501 | parent: module.parent, \ 502 | filename: module.filename, \ 503 | loaded: false, \ 504 | children: [], \ 505 | paths: module.paths, \ 506 | exports: {} \ 507 | }; \ 508 | \ 509 | return function () { \ 510 | if (!fakeModule.loaded && !fakeModule.__isLoading) { \ 511 | fakeModule.__isLoading = true; \ 512 | try {\ 513 | moduleLoadFunction(fakeModule, fakeModule.exports); \ 514 | fakeModule.__isLoading = false;\ 515 | } catch (e) {\ 516 | fakeModule.__isLoading = false;\ 517 | throw e;\ 518 | }\ 519 | fakeModule.loaded = true; \ 520 | } \ 521 | return fakeModule.exports; \ 522 | }; \ 523 | }; \ 524 | \ 525 | var __JSON_LOADER__ = function (json) {\ 526 | return function () { \ 527 | return JSON.parse(json); \ 528 | }; \ 529 | }; \ 530 | \ 531 | var __REQUIRED_NODE_MODULES__ = { \ 532 | '; 533 | 534 | // Write known modules 535 | isFirstRequired = true; 536 | Object.keys(requiredMap).forEach(function (modulePath) { 537 | if (modulePath === mainFilePath) return; 538 | 539 | var moduleToInline = requiredMap[modulePath]; 540 | 541 | if (isFirstRequired) isFirstRequired = false; 542 | else source += ', '; 543 | 544 | source += JSON.stringify(moduleToInline.relativePath) + ': \n' + moduleToInline.source + '\n'; 545 | }); 546 | 547 | // Write "required" wrapper end 548 | source += '\ 549 | }; \ 550 | \ 551 | var __CI_MODULE_PATH_MAP__ = { \ 552 | '; 553 | 554 | // Write known modules 555 | isFirstRequired = true; 556 | Object.keys(caseInsensitivePathMap).forEach(function (ciPath) { 557 | if (isFirstRequired) isFirstRequired = false; 558 | else source += ', '; 559 | source += JSON.stringify(ciPath) + ': \n' + JSON.stringify(caseInsensitivePathMap[ciPath]) + '\n'; 560 | }); 561 | 562 | // Write "required" wrapper end 563 | source += '\ 564 | }; \ 565 | \ 566 | var Path = require("path");\ 567 | var resolveRelativePath = ' + resolveRelativePath.toString() + '; \ 568 | var __MAIN_ORIGINAL_PATH__ = ' + JSON.stringify(mainFilePath) + ';\ 569 | \ 570 | var __LOOK_FOR_FILE__ = function (relPath) {\ 571 | var module = __REQUIRED_NODE_MODULES__.hasOwnProperty(relPath) ? __REQUIRED_NODE_MODULES__[relPath] : null;\ 572 | if (!module) {\ 573 | relPath = __CI_MODULE_PATH_MAP__[relPath];\ 574 | if (relPath) {\ 575 | module = __REQUIRED_NODE_MODULES__.hasOwnProperty(relPath) ? __REQUIRED_NODE_MODULES__[relPath] : null;\ 576 | }\ 577 | }\ 578 | return module;\ 579 | };\ 580 | \ 581 | var __FAKE_REQUIRE__ = function (modulePath, originalModulePath) {\ 582 | if (!__CORE_MODULE_LIST__.hasOwnProperty(modulePath)) {\ 583 | /* Transform path to distribution path */\ 584 | var relPath;\ 585 | if (originalModulePath) {\ 586 | relPath = Path.resolve(Path.dirname(originalModulePath), modulePath);\ 587 | relPath = resolveRelativePath(Path.dirname(__MAIN_ORIGINAL_PATH__), relPath);\ 588 | } else {\ 589 | relPath = resolveRelativePath(__dirname, modulePath);\ 590 | }\ 591 | \ 592 | /* Try inlined modules */\ 593 | var module = __LOOK_FOR_FILE__(relPath) || __LOOK_FOR_FILE__(relPath + \'.js\') || __LOOK_FOR_FILE__(relPath + \'.json\');\ 594 | if (module) return module();\ 595 | \ 596 | /* Try original `require` with transformed path */\ 597 | try {\ 598 | return require(relPath);\ 599 | } catch (e) {\ 600 | }\ 601 | }\ 602 | \ 603 | /* Try original `require` with original statement */\ 604 | return require(modulePath);\ 605 | };\ 606 | \ 607 | this.__FAKE_REQUIRE__ = __FAKE_REQUIRE__; \ 608 | \ 609 | })();'; 610 | 611 | // Write main file source 612 | source += requiredMap[mainFilePath].source; 613 | 614 | console.log('Optimized node project starting with ' + mainFilePath); 615 | 616 | return source; 617 | }; 618 | 619 | module.exports = optimizer; 620 | 621 | 622 | /** 623 | Node's require algorithm: 624 | 625 | require(X) from module at path Y 626 | 1. If X is a core module, (IN: module.exports._builtinLibs) 627 | 1. a. return the core module 628 | 1. b. STOP 629 | 2. If X begins with './' or '/' or '../' (Windows: OR [DRIVE LETTER]:/ OR [DRIVE LETTER]:\) 630 | 2. a. LOAD_AS_FILE(Y + X) 631 | 2. b. LOAD_AS_DIRECTORY(Y + X) 632 | 3. LOAD_NODE_MODULES(X, dirname(Y)) 633 | 4. THROW "not found" 634 | 635 | LOAD_AS_FILE(X) 636 | 1. If X is a file, load X as JavaScript text. STOP 637 | 2. If X.js is a file, load X.js as JavaScript text. STOP 638 | 3. If X.json is a file, parse X.json to a JavaScript Object. STOP 639 | 4. If X.node is a file, load X.node as binary addon. STOP 640 | 641 | LOAD_AS_DIRECTORY(X) 642 | 1. If X/package.json is a file, 643 | 1. a. Parse X/package.json, and look for "main" field. 644 | 1. b. let M = X + (json main field) 645 | 1. c. LOAD_AS_FILE(M) 646 | 2. If X/index.js is a file, load X/index.js as JavaScript text. STOP 647 | 3. If X/index.json is a file, parse X/index.json to a JavaScript object. STOP 648 | 4. If X/index.node is a file, load X/index.node as binary addon. STOP 649 | 650 | LOAD_NODE_MODULES(X, START) 651 | 1. let DIRS=NODE_MODULES_PATHS(START) 652 | 2. for each DIR in DIRS: 653 | 2. a. LOAD_AS_FILE(DIR/X) 654 | 2. b. LOAD_AS_DIRECTORY(DIR/X) 655 | 656 | NODE_MODULES_PATHS(START) 657 | 1. let PARTS = path split(START) 658 | 2. let I = count of PARTS - 1 659 | 3. let DIRS = [] 660 | 4. while I >= 0, 661 | 4. a. if PARTS[I] = "node_modules" CONTINUE 662 | 4. c. DIR = path join(PARTS[0 .. I] + "node_modules") 663 | 4. b. DIRS = DIRS + DIR 664 | 4. c. let I = I - 1 665 | 5. return DIRS 666 | 667 | Things we may want to simulate: 668 | 1. module.cache 669 | 2. module.require 670 | 3. module.id: The identifier for the module. Typically this is the fully resolved filename. 671 | 4. module.filename: The fully resolved filename to the module. 672 | 5. module.loaded: Whether or not the module is done loading, or is in the process of loading. 673 | 6. module.parent: The module that first required this one. 674 | 7. module.children: The module objects required by this one. 675 | 676 | */ 677 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "node-optimize", 3 | "version": "1.2.3", 4 | "description": "Optimize a node project to a single JS file for distribution release.", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "repository": { 10 | "type": "git", 11 | "url": "https://github.com/danielgindi/node-optimize" 12 | }, 13 | "keywords": [ 14 | "node.js", 15 | "optimize", 16 | "compact", 17 | "distribute", 18 | "production", 19 | "uglify", 20 | "uglify-js" 21 | ], 22 | "author": "Daniel Cohen Gindi (https://github.com/danielgindi)", 23 | "license": "MIT", 24 | "bugs": { 25 | "url": "https://github.com/danielgindi/node-optimize/issues" 26 | }, 27 | "homepage": "https://github.com/danielgindi/node-optimize", 28 | "dependencies": { 29 | "uglify-js": "^2.8.0" 30 | } 31 | } 32 | --------------------------------------------------------------------------------