├── .gitignore ├── .jshintignore ├── .jshintrc ├── .npmignore ├── Makefile ├── bin ├── get.js ├── gluejs ├── usage-amd.txt └── usage.txt ├── command.md ├── index.js ├── lib ├── file-tasks │ ├── capture.js │ ├── spawn.js │ ├── stream-size.js │ ├── wrap-amd-vendor.js │ ├── wrap-commonjs-web.js │ └── wrap-json-web.js ├── list-tasks │ ├── annotate-basepath.js │ ├── annotate-lookup.js │ ├── annotate-stat.js │ ├── annotate-structured.js │ ├── annotate-with-task.js │ ├── filter-npm.js │ ├── filter-packages.js │ ├── filter-regex.js │ ├── infer-packages.js │ └── watch.js ├── list │ ├── amd.js │ └── detective.js ├── require │ ├── Makefile │ ├── index.js │ ├── readme.md │ ├── require.js │ ├── require.min.js │ ├── umd.js │ └── umd.min.js ├── runner │ ├── amd │ │ ├── deps.js │ │ ├── index.js │ │ └── load-config.js │ ├── commonjs │ │ ├── get-commands.js │ │ ├── get-file-tasks.js │ │ ├── index.js │ │ ├── report-package.js │ │ └── style.js │ ├── concat.js │ └── static-server.js └── util │ ├── amd-vendor-wrap-global.js │ ├── amd-vendor-wrap.js │ └── sort-dependencies.js ├── package.json ├── readme.md ├── test ├── command-integration │ ├── Makefile │ ├── es6-module.js │ ├── jade-module.js │ ├── robot.html │ ├── test.brfs.js │ ├── test.coffee │ ├── test.es6.js │ ├── test.hbs │ └── test.jade ├── fixtures │ ├── complex-package │ │ ├── lib │ │ │ └── index.js │ │ └── package.json │ ├── empty-file │ │ └── empty.js │ ├── jade-file │ │ ├── foo.jade │ │ └── index.js │ ├── json-file │ │ ├── foo.json │ │ └── index.js │ └── single-file │ │ ├── has_dependency.js │ │ └── simple.js ├── integration.test.js ├── list-tasks │ ├── annotate-stat.test.js │ ├── filter-npm.test.js │ ├── filter-package.test.js │ ├── infer-packages.test.js │ └── tmp │ │ ├── Makefile │ │ ├── index.html │ │ ├── index.js │ │ ├── lib │ │ └── bar.js │ │ └── package.json ├── node_modules │ └── gluejs.js ├── package.json ├── require.test.js ├── runner │ ├── concat.test.js │ ├── package.test.js │ └── server.test.js ├── tmp │ └── placeholder.txt └── tools │ └── file-task.js └── todo.md /.gitignore: -------------------------------------------------------------------------------- 1 | /node_modules/ 2 | test/tmp/ 3 | npm-debug.log 4 | test/list-tasks/tmp/node_modules/ 5 | test/node_modules/ 6 | -------------------------------------------------------------------------------- /.jshintignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | test/node_modules 3 | test/tmp 4 | lib/runner/commonjs/resources 5 | test/command-integration 6 | -------------------------------------------------------------------------------- /.jshintrc: -------------------------------------------------------------------------------- 1 | { 2 | // double !! is allowed 3 | "-W018": true, 4 | // and I don't care what jshint thinks is better written in dot notation, because using string notation 5 | // is nice for things that are configuration options. 6 | "-W069": true 7 | } 8 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | /test/ 2 | todo.md 3 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | TESTS += test/*.test.js 2 | TESTS += test/list-tasks/*.test.js 3 | 4 | test: 5 | @mocha \ 6 | --ui exports \ 7 | --reporter spec \ 8 | --slow 2000ms \ 9 | --bail \ 10 | $(TESTS) 11 | @echo "Note: you need to go run npm install in ./test to install the test dependencies..." 12 | 13 | .PHONY: test lint 14 | 15 | # Note: use latest gjslint e.g 2.3.13 16 | lint: 17 | jshint . 18 | gjslint \ 19 | --nojsdoc \ 20 | --jslint_error=all \ 21 | --disable=6 \ 22 | --max_line_length=120 \ 23 | --custom_jsdoc_tags=api \ 24 | --exclude_directories=node_modules,lib/runner/commonjs/resources,test,lib/require \ 25 | --max_line_length=120 --disable=0131,300,2,1,6 \ 26 | -r . 27 | 28 | test-lint: 29 | ./bin/gluejs \ 30 | --no-cache \ 31 | --include ./lib \ 32 | --basepath ./ \ 33 | --out ./test/tmp/lint.js 34 | gjslint --nojsdoc --custom_jsdoc_tags=api --max_line_length=120 --disable=0131,300,2,1,6 ./test/tmp/lint.js 35 | -------------------------------------------------------------------------------- /bin/get.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | var fs = require('fs'), 3 | os = require('os'), 4 | path = require('path'), 5 | util = require('util'), 6 | Minilog = require('minilog'), 7 | AmdList = require('../lib/list/amd.js'), 8 | loadAMDConfig = require('../lib/runner/amd/load-config.js'), 9 | runner = require('../lib/runner/amd'), 10 | Cache = require('minitask').Cache, 11 | nodeResolve = require('resolve'); 12 | 13 | var optimist = require('optimist') 14 | .usage('Usage: $0 --include ') 15 | .options({ 16 | 'amd': { }, 17 | 'cache': { default: true }, 18 | 'include': { }, 19 | 'main': { }, 20 | }) 21 | .boolean('amd'), 22 | argv = optimist.parse(process.argv); 23 | 24 | if(!argv['include']) { 25 | console.log('Usage: --include '); 26 | console.log('Options:'); 27 | console.log(' --amd'); 28 | console.log(' --config'); 29 | console.log(' --vendor'); 30 | console.log(' --main '); 31 | process.exit(1); 32 | } 33 | 34 | 35 | Minilog.enable(); 36 | 37 | var homePath = process.env[(process.platform == 'win32') ? 'USERPROFILE' : 'HOME']; 38 | homePath = (typeof homePath === 'string' ? path.normalize(homePath) : process.cwd()); 39 | 40 | if(!argv['cache-path']) { 41 | argv['cache-path'] = homePath + path.sep + '.gluejs-cache' + path.sep; 42 | } 43 | 44 | // if the cache is disabled, then use a temp path 45 | if(!argv.cache) { 46 | argv['cache-path'] = os.tmpDir() + '/gluejs-' + new Date().getTime(); 47 | } 48 | 49 | var opts = { 50 | 'cache-method': argv['cache-method'] || 'stat', 51 | 'cache-path': argv['cache-path'] 52 | }; 53 | 54 | if(!Array.isArray(argv.include)) { 55 | argv.include = [ argv.include ]; 56 | } 57 | 58 | // determine main 59 | var main = argv.main || argv.include[0], 60 | basepath = path.resolve(process.cwd(), argv.basepath) || path.dirname(main); 61 | 62 | // resolve paths relative to process.cwd 63 | ['list-files', 'out', 'vendor-base'].forEach(function(key) { 64 | if(argv[key]) { 65 | argv[key] = path.resolve(process.cwd(), argv[key]); 66 | } 67 | }); 68 | 69 | // resolve paths relative to basepath 70 | ['config', 'vendor'].forEach(function(key) { 71 | if(argv[key]) { 72 | argv[key] = path.resolve(basepath, argv[key]); 73 | } 74 | }); 75 | 76 | argv.include = argv.include.map(function(p) { 77 | return path.resolve(basepath, p); 78 | }); 79 | 80 | // load resources 81 | 82 | if(argv.amd) { 83 | opts.amdresolve = loadAMDConfig(argv.config); 84 | } 85 | 86 | if(argv.amd && main) { 87 | // baseDir is required for AMD 88 | opts.amdresolve.baseDir = basepath; 89 | } 90 | 91 | function findModule(name) { 92 | var result = ''; 93 | try { 94 | result = nodeResolve.sync(name, { basedir: process.cwd() }); 95 | } catch(e) { 96 | try { 97 | result = nodeResolve.sync(name, { basedir: __dirname }); 98 | } catch(e) { 99 | console.error('Cannot find module ' + name + ' from ' + process.cwd() + ' or ' + __dirname); 100 | throw e; 101 | } 102 | } 103 | return result; 104 | } 105 | 106 | var list = new AmdList(opts); 107 | 108 | var cache = Cache.instance({ 109 | method: opts['cache-method'], 110 | path: opts['cache-path'] 111 | }); 112 | cache.begin(); 113 | 114 | console.log('Reading files: '); 115 | argv.include.forEach(function(filepath) { 116 | console.log(' ' + filepath); 117 | list.add(filepath); 118 | }); 119 | 120 | list.exec(function(err, files) { 121 | console.log('Processing ' + files.length + ' files.'); 122 | 123 | var vendor = require(path.resolve(process.cwd(), argv.vendor)); 124 | var vendorMap = vendor.paths; 125 | 126 | // resolve relative to --vendor-base 127 | Object.keys(vendorMap).forEach(function(name) { 128 | var value = vendorMap[name]; 129 | if (typeof value === 'string' && value.charAt(0) == '.') { 130 | vendorMap[name] = path.resolve(argv['vendor-base'], value); 131 | } 132 | }); 133 | Object.keys(vendorMap).forEach(function(name) { 134 | var value = vendorMap[name]; 135 | if(!fs.existsSync(value)) { 136 | vendorMap[name] = false; 137 | } 138 | }); 139 | 140 | // prefix: function(name, filepath) {} 141 | var plugins = {}; 142 | 143 | Object.keys(argv).forEach(function(name) { 144 | var matched = (typeof name === 'string' ? name.match(/plugin\-(.*)/) : false); 145 | if(matched) { 146 | var ext = matched[1]; 147 | argv[name] = findModule(argv[name]); 148 | plugins[ext] = require(argv[name]); 149 | } 150 | }); 151 | 152 | runner({ files: files }, { 153 | main: argv.main, 154 | basepath: basepath, 155 | configjs: opts.amdresolve, 156 | errs: list.resolveErrors(), 157 | 'cache-method': opts['cache-method'], 158 | 'cache-path': opts['cache-path'], 159 | cache: true, 160 | jobs: require('os').cpus().length * 2, 161 | vendor: vendorMap, 162 | exclude: vendor.exclude, 163 | extras: ['underscore'], 164 | command: argv.command, 165 | nomin: vendor.nomin || [], 166 | plugins: plugins, 167 | // set this so that builds are invalidated as the version changes 168 | 'gluejs-version': require('../package.json').version 169 | }, fs.createWriteStream(argv['out']), function(err, processedFiles) { 170 | if(argv['list-files']) { 171 | fs.appendFileSync(argv['list-files'], processedFiles.join('\n')); 172 | } 173 | cache.end(); 174 | }); 175 | }); 176 | -------------------------------------------------------------------------------- /bin/gluejs: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | var fs = require('fs'), 3 | path = require('path'), 4 | Glue = require('../index.js'), 5 | Minilog = require('minilog'); 6 | 7 | var opts = require('optimist') 8 | .usage('Usage: $0 --include --out filename.js') 9 | .options({ 10 | 'amd': { }, 11 | 'basepath': { 'default': process.cwd() }, 12 | 'cache': { default: true }, 13 | 'cache-path': { }, 14 | 'command': { }, 15 | 'exclude': { }, 16 | 'global': { }, 17 | 'global-require': { }, 18 | 'include': { }, 19 | 'jobs': { }, 20 | 'main': { 'default': 'index.js' }, 21 | 'out': { }, 22 | 'parse': { }, 23 | 'remap': { }, 24 | 'replace': { }, 25 | 'reset-exclude': { }, 26 | 'silent': { }, 27 | 'source-url': { }, 28 | 'verbose': { }, 29 | 'version': { } 30 | }) 31 | .boolean('cache') 32 | .boolean('report') 33 | .boolean('progress') 34 | .boolean('verbose') 35 | .boolean('parse'), 36 | argv = opts.parse(process.argv); 37 | 38 | if(argv['version'] || argv['v'] ) { 39 | console.log(require('../package.json').version); 40 | process.exit(); 41 | } 42 | 43 | if(!argv['include']) { 44 | return fs.createReadStream(__dirname + '/usage.txt').pipe(process.stdout).on('close', function () { process.exit(1) }); 45 | } 46 | 47 | var g = new Glue(); 48 | 49 | // set verbose early, so that all messages are shown 50 | if(argv['verbose']) { 51 | g.set('report', true); 52 | g.set('verbose', true); 53 | argv['progress'] = false; 54 | } 55 | 56 | // Booleans: 57 | // --reset-exclude 58 | // --source-url 59 | // --global-require 60 | // --parse 61 | [ 'reset-exclude', 'source-url', 'global-require', 'parse' ].forEach(function(key) { 62 | if(argv[key]) { 63 | g.set(key, true); 64 | } 65 | }); 66 | 67 | // Strings: 68 | // --exclude 69 | // --amd 70 | // --umd 71 | // --cache-method 72 | // --command 73 | // --transform 74 | // --jobs 75 | [ 'exclude', 'amd', 'umd', 'cache-method', 'command', 'transform', 'jobs' ].forEach(function(key) { 76 | if(argv[key]) { 77 | g.set(key, argv[key]); 78 | } 79 | }); 80 | 81 | // --include 82 | g.include(argv['include']); 83 | 84 | // --global 85 | g.export(argv['global']); 86 | 87 | // --basepath 88 | g.basepath(argv['basepath']); 89 | 90 | // --main 91 | g.main(argv['main']) 92 | 93 | // --replace 94 | if(argv['replace']) { 95 | // allow --replace foo=bar,baz=abc or --replace foo=bar --replace bar=abc 96 | if(Array.isArray(argv['replace'])) { 97 | argv['replace'].forEach(function(item) { 98 | var pos = item.indexOf('='), 99 | name = item.substring(0, pos), 100 | value = item.substring(pos+1); 101 | g.replace(name, value); 102 | }); 103 | } else { 104 | argv['replace'].split(',').forEach(function(item) { 105 | var pos = item.indexOf('='), 106 | name = item.substring(0, pos), 107 | value = item.substring(pos+1); 108 | g.replace(name, value); 109 | }); 110 | } 111 | 112 | } 113 | if(argv['remap']) { 114 | (Array.isArray(argv['remap']) ? argv['remap'] : [argv['remap']]).forEach(function(item) { 115 | var pos = item.indexOf('='), 116 | name = item.substring(0, pos), 117 | value = item.substring(pos+1); 118 | g.remap(name, value); 119 | }); 120 | } 121 | 122 | // if not explicitly disabled and output is defined 123 | if(argv['out']) { 124 | g.set('progress', true); 125 | argv['progress'] = true; 126 | } 127 | 128 | if(typeof argv['report'] !== 'undefined') { 129 | g.set('report', argv['report']); 130 | } 131 | 132 | if(typeof argv['progress'] !== 'undefined') { 133 | g.set('progress', argv['progress']); 134 | } 135 | 136 | // --silent 137 | if(argv['silent']) { 138 | g.set('report', false); 139 | g.set('verbose', false); 140 | g.set('progress', false); 141 | } 142 | 143 | g.set('cache', argv['cache']); 144 | 145 | if(argv['cache-path']) { 146 | g.set('cache-path', path.resolve(process.cwd(), argv['cache-path'])); 147 | } 148 | 149 | if(argv['npm']) { 150 | process.stderr.write('Warning: the --npm flag is deprecated. ' + 151 | 'Directly include the file path to module, e.g. --include ./node_modules/foo instead.\n'); 152 | } 153 | 154 | // set this so that builds are invalidated as the version changes 155 | g.set('gluejs-version', require('../package.json').version); 156 | 157 | // -- out 158 | if(argv['out']) { 159 | g.render(fs.createWriteStream(argv['out'])); 160 | } else { 161 | g.render(process.stdout); 162 | } 163 | -------------------------------------------------------------------------------- /bin/usage-amd.txt: -------------------------------------------------------------------------------- 1 | BASIC 2 | 3 | --include Path to import. 4 | 5 | --out File to write. Default: stdout 6 | 7 | --basepath Base path for the list of files. Default: process.cwd() 8 | 9 | --main Name of the main file/module to export. Default: index.js 10 | 11 | --config Path to AMD config file. 12 | 13 | --vendor Path to vendor file. 14 | 15 | --vendor-base Basepath for the paths defined in the vendor file. 16 | 17 | MINIFICATION / SOURCE TRANSFORMS 18 | 19 | --command Pipe each file through a shell command and capture the 20 | output (e.g. --command "uglifyjs --no-copyright"). 21 | 22 | REPORTING 23 | 24 | --list-files List included files into a single file. If the file exists, 25 | append to the file. Useful for removing minified files. 26 | 27 | --silent Disable all output, including the reporter. 28 | 29 | --verbose More verbose output. 30 | -------------------------------------------------------------------------------- /bin/usage.txt: -------------------------------------------------------------------------------- 1 | USAGE 2 | 3 | gluejs --include {OPTIONS} 4 | 5 | BASIC 6 | 7 | --include Path to import. 8 | 9 | --exclude JS regular expression string to match against 10 | the included paths. 11 | 12 | --out File to write. Default: stdout 13 | 14 | --global Name of the global to export. Default: "App" 15 | 16 | --basepath Base path for the list of files. Default: process.cwd() 17 | 18 | --main Name of the main file/module to export. Default: index.js 19 | 20 | REPLACE / REMAP 21 | 22 | --replace Bind require("name") to an expression. 23 | e.g. --replace jQuery=window.$ 24 | 25 | --remap Remap require("name") to another (internal) module 26 | e.g. --replace "assert=require('chai').assert" 27 | 28 | BUILD OPTIONS 29 | 30 | --source-url Add source URL annotations to the files. Useful for 31 | development, but note that this is not compatible with IE. 32 | 33 | --global-require Export the require() implementation into the global space. 34 | 35 | --amd Export the module via the require.js AMD 36 | define("name", ...) using the name specified in --global. 37 | Note that the requirejs will not pick up modules defined 38 | like this unless you do at least one asynchronous require() 39 | call. 40 | 41 | MINIFICATION / SOURCE TRANSFORMS 42 | 43 | --command Pipe each file through a shell command and capture the 44 | output (e.g. --command "uglifyjs --no-copyright"). 45 | 46 | --transform Activates a source transformation module. 47 | 48 | PERFORMANCE 49 | 50 | --cache-path

Use a cache directory to store file builds. The cache 51 | speeds up large builds (and minified builds) significantly 52 | since only source files that have changed are updated. 53 | 54 | --jobs Sets the maximum level of parallelism for the task 55 | execution pipeline. Default: `os.cpus().length * 2` 56 | 57 | --cache-method Sets the cache method: stat | hash algorithm name. 58 | 59 | REPORTING 60 | 61 | --report Display the file size report. 62 | 63 | --silent Disable all output, including the reporter. 64 | 65 | --verbose More verbose output. 66 | 67 | --version Version info 68 | 69 | ADVANCED 70 | 71 | --reset-exclude Advanced: do not apply the default exclusions 72 | (/dist/, /example/, /benchmark/, .min.js). 73 | -------------------------------------------------------------------------------- /command.md: -------------------------------------------------------------------------------- 1 | ## Support for template pre-compilation 2 | 3 | I've included examples for the following: 4 | 5 | - Coffeescript (directly via `coffee`) 6 | - DoT (directly via `dottojs`) 7 | - EJS (plugin) 8 | - Handlebars (directly via `handlebars`) 9 | - Jade (directly via `jade`) 10 | - Mustache (plugin) 11 | - Underscore templates (plugin) 12 | 13 | These are generally triggered by passing a JSON descriptor to the `--command` option: 14 | 15 | `--command ` / `.set('command', [ { expr: new RegExp('\.foo$'), cmd: '' }, ... ])`: Advanced usage. You can apply a command to files with a particular extension. 16 | 17 | JSON options: 18 | 19 | - `cmd`: The command to run. 20 | - `ext`: The command is run on files matching this extension. 21 | - `expr`: The command is run on files matching this regular expression (passed to `new RegExp` and matched). 22 | - `wrap`: Optional wrapping. Right now, only `exports` is supported; it wraps the result in `module.exports = ;`. 23 | 24 | For example, for Coffeescript (e.g. files matching `.coffee`): 25 | 26 | --command '{ "ext":".coffee", "cmd": "coffee --compile --stdio" }' 27 | 28 | Or for Jade (e.g. files matching `.jade`): 29 | 30 | --command '{ "ext":".jade", "wrap": "exports", "cmd": "jade --client --no-debug" }' 31 | 32 | Sadly, some commands are not unixy: they don't support pipes. For those commands, you can use the automatic placeholders `` and ``. This alters how `glue` runs: it will replace the `` string with the actual filename, and `` with a temporary directory, from which the file is included into the build and then later removed. 33 | 34 | For example, for Handlebars (e.g. files matching `.hbs`): 35 | 36 | --command '{ "ext":".hbs", "wrap": "exports", "cmd": "handlebars --simple" }' 37 | 38 | Handlebars requires an input file, but does not need an output file (can write to stdout but not read from stdin). 39 | 40 | For example, for DoT (e.g. `.dot`): 41 | 42 | --command-dot "dottojs -s -d " 43 | 44 | ## Plugins for pre-compilation 45 | 46 | What about [templating libraries](http://garann.github.io/template-chooser/) that don't have builtin precompilation support? 47 | 48 | ejs: 49 | 50 | --command-ejs "~/precompile-ejs.js" 51 | 52 | console.log(ejs.compile(str, { client: true })); 53 | 54 | underscore.js templates: 55 | 56 | console.log(_.template(str)); 57 | 58 | Mustache.js: 59 | 60 | console.log(Mustache.compile(stringTemplate)); 61 | 62 | ## Using packages from other package managers 63 | 64 | AMD/RequireJS to CommonJS conversion: 65 | 66 | Component: 67 | 68 | Bower: 69 | 70 | ## Exporting to AMD etc 71 | 72 | UMD support documentation 73 | 74 | ## Generating obfuscated server side code 75 | 76 | ## --no-json 77 | 78 | By default `.json` files are included - this is how Node's native `require` implementation works. 79 | 80 | This means that something like `var config = require('./config.json')` works. 81 | 82 | Specify `--no-json` to exclude json files. 83 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | var os = require('os'), 2 | path = require('path'), 3 | List = require('minitask').list, 4 | DetectiveList = require('./lib/list/detective.js'), 5 | packageCommonJs = require('./lib/runner/commonjs/index.js'), 6 | Capture = require('./lib/file-tasks/capture.js'), 7 | Minilog = require('minilog'), 8 | Cache = require('minitask').Cache, 9 | log = require('minilog')('api'); 10 | 11 | var homePath = process.env[(process.platform == 'win32') ? 'USERPROFILE' : 'HOME']; 12 | homePath = (typeof homePath === 'string' ? path.normalize(homePath) : process.cwd()); 13 | 14 | // API wrapper 15 | function API() { 16 | // default options 17 | this.options = { 18 | replaced: {}, 19 | remap: {}, 20 | cache: true, 21 | 'cache-path': homePath + path.sep + '.gluejs-cache' + path.sep, 22 | include: [], 23 | _rename: {}, 24 | // set options here so that the cache hash does not change 25 | jobs: require('os').cpus().length * 2 26 | }; 27 | } 28 | 29 | API.prototype.include = function(filepath) { 30 | if(!filepath) return this; 31 | this.options.include.push(filepath); 32 | return this; 33 | }; 34 | 35 | API.prototype.render = function(dest) { 36 | var self = this; 37 | // if the cache is disabled, then use a temp path 38 | if(!this.options.cache) { 39 | this.options['cache-path'] = os.tmpDir() + '/gluejs-' + new Date().getTime(); 40 | } 41 | 42 | if(!this.options['cache-method']) { 43 | this.options['cache-method'] = 'stat'; 44 | } 45 | 46 | // LIST 47 | // only instantiate the list just before running the code 48 | // this avoids issues with the order between `.set('parse', true)` and .include() calls 49 | var opts = { 50 | 'cache-path': this.options['cache-path'], 51 | 'cache-method': this.options['cache-method'], 52 | 'cache-hash': Cache.hash(JSON.stringify(this.options)) 53 | }, 54 | list = (this.options['parse'] ? new DetectiveList(opts) : new List()); 55 | 56 | // set the cache mode to transactional and begin a single cache scope 57 | var cache = Cache.instance({ 58 | method: opts['cache-method'], 59 | path: opts['cache-path'] 60 | }); 61 | cache.begin(); 62 | 63 | // --reset-exclude should also reset the pre-processing exclusion 64 | if(this.options['reset-exclude']) { 65 | list.exclude(null); 66 | } 67 | // --basepath 68 | if(this.options['basepath']) { 69 | list.basepath(this.options['basepath']); 70 | } 71 | 72 | this.options['include'].map(function(filepath) { 73 | list.add(filepath); 74 | }); 75 | 76 | list.onRename = function(canonical, normalized) { 77 | self.options._rename[normalized] = canonical; 78 | }; 79 | // END LIST 80 | 81 | // console.time('list enum'); 82 | 83 | list.exec(function(err, files) { 84 | 85 | // console.timeEnd('list enum'); 86 | 87 | var capture; 88 | if(typeof dest == 'function') { 89 | capture = new Capture(); 90 | 91 | capture.on('error', function(err) { 92 | console.error('Error in the capture stream: ', err); 93 | console.trace(); 94 | }); 95 | 96 | capture.once('finish', function() { 97 | dest(null, capture.get()); 98 | }); 99 | } 100 | 101 | // console.time('package files'); 102 | packageCommonJs({ files: files }, self.options, capture ? capture : dest, function() { 103 | 104 | // console.timeEnd('package files'); 105 | 106 | cache.end(); 107 | }); 108 | }); 109 | }; 110 | 111 | // setters 112 | API.prototype.set = function(key, value) { 113 | this.options[key] = value; 114 | if(key == 'verbose' && value) { 115 | Minilog.enable(); 116 | } 117 | if(key == 'jobs') { 118 | log.info('Maximum number of parallel tasks:', this.options.jobs); 119 | } 120 | return this; 121 | }; 122 | 123 | ['export', 'main'].forEach(function(key) { 124 | API.prototype[key] = function(value) { 125 | this.options[key] = value; 126 | return this; 127 | }; 128 | }); 129 | 130 | API.prototype.basepath = function(value) { 131 | this.options.basepath = path.resolve(process.cwd(), value); 132 | return this; 133 | }; 134 | 135 | // other 136 | API.prototype.replace = function(module, code) { 137 | if(arguments.length == 1 && module === Object(module)) { 138 | Object.keys(module).forEach(function(k) { 139 | this.replace(k, module[k]); 140 | }, this); 141 | } else { 142 | // TODO: exclude the module with the same name 143 | if(typeof code == 'object') { 144 | this.options.replaced[module] = JSON.stringify(code); 145 | } else { 146 | // function / number / boolean / undefined all convert to string already 147 | this.options.replaced[module] = code; 148 | } 149 | } 150 | 151 | return this; 152 | }; 153 | 154 | API.prototype.remap = function(module, code) { 155 | if(arguments.length == 1 && module === Object(module)) { 156 | Object.keys(module).forEach(function(k) { 157 | this.remap(k, module[k]); 158 | }, this); 159 | } else { 160 | if(typeof code == 'object') { 161 | this.options.remap[module] = JSON.stringify(code); 162 | } else { 163 | // function / number / boolean / undefined all convert to string already 164 | this.options.remap[module] = code; 165 | } 166 | } 167 | return this; 168 | }; 169 | 170 | API.prototype.exclude = function(path) { 171 | if(!this.options['exclude']) { 172 | this.options['exclude'] = []; 173 | } 174 | this.options['exclude'].push((path instanceof RegExp ? path : new RegExp(path))); 175 | return this; 176 | }; 177 | 178 | // Express Middleware 179 | API.middleware = function (opts) { 180 | 181 | // -- Set some sane defaults 182 | opts = opts || {}; 183 | opts.include = opts.include || './lib'; 184 | if(!opts.basepath) { 185 | opts.basepath = Array.isArray(opts.include) ? opts.include[0] : opts.include; 186 | } 187 | opts.main = opts.main || 'index.js'; 188 | 189 | // -- Create an instance of the API to use 190 | var glue = new API() 191 | .include(opts.include) 192 | .basepath(opts.basepath); 193 | 194 | // -- All other options are set by clobbering the glue.options hash 195 | Object.keys(opts).forEach(function (key) { 196 | glue.set(key, opts[key]); 197 | }); 198 | 199 | // -- Middleware to return 200 | return function (req, res, next) { 201 | 202 | // -- Return all non GET requests 203 | if('GET' !== req.method) return next(); 204 | 205 | // -- Set content-type 206 | res.setHeader('Content-Type', 'application/javascript'); 207 | 208 | // -- Render file and pipe to response 209 | glue.render(res); 210 | }; 211 | }; 212 | 213 | module.exports = API; 214 | -------------------------------------------------------------------------------- /lib/file-tasks/capture.js: -------------------------------------------------------------------------------- 1 | // use readable-stream to use Node 0.10.x streams in Node 0.8.x 2 | var Writable = require('readable-stream').Writable, 3 | util = require('util'); 4 | 5 | function Wrap(options) { 6 | Writable.call(this, options); 7 | this.buffer = ''; 8 | } 9 | 10 | util.inherits(Wrap, Writable); 11 | 12 | Wrap.prototype._write = function(chunk, encoding, done) { 13 | // marked cannot stream input, so we need to accumulate it here. 14 | this.buffer += chunk; 15 | done(); 16 | }; 17 | 18 | Wrap.prototype.get = function() { 19 | return this.buffer; 20 | }; 21 | 22 | module.exports = Wrap; 23 | -------------------------------------------------------------------------------- /lib/file-tasks/spawn.js: -------------------------------------------------------------------------------- 1 | var spawn = require('child_process').spawn; 2 | 3 | // Return a duplex stream, or alternatively an object which has a readable stream called .stdout 4 | // and a writable stream called .stdin. 5 | // This function is called for each file in the list of tasks. 6 | // The output of the preceding task is piped into the writable/duplex stream, 7 | // and the output of the readable/duplex stream is captured or piped forward 8 | module.exports = function(options) { 9 | if(typeof options.task === 'string') { 10 | // parse quoted string variants: 11 | // 1) not whitespace, not quote followed by not whitespace OR 12 | // 2) double quote followed by not double quote or escaped double quote followed by double OR 13 | // 3) single quote followed by not single quote or escaped single quote followed by single 14 | options.task = options.task.match( 15 | /([^ \t\r\n"'][^ \t\r\n]*)|"(?:[\]["]|[^"])+"|'(?:[\][']|[^'])+'/g 16 | ); 17 | // since we're using uv_spawn which calls out to exec(3), we need to exclude the beginning quotes 18 | options.task = options.task.map(function(i, index) { 19 | if (index > 0 && 20 | i.charAt(0) == i.charAt(i.length-1) && 21 | (i.charAt(0) == '"' || i.charAt(0) == "'") 22 | ) { 23 | return i.substring(1, i.length-1); 24 | } 25 | return i; 26 | }); 27 | } 28 | var task = spawn.call(this, options.task[0], options.task.slice(1)); 29 | 30 | // The child_process API only emits errors when invoking the task fails. 31 | // To more closely match normal streams, listen for "exit" with exit status != 0 and emit 32 | // a error. 33 | task.on('exit', function(code) { 34 | if(code !== 0) { 35 | console.log(''); 36 | console.log('spawn-task: "' + options.task.join(' ') + '" on "' + 37 | options.name+ '" exited with nonzero exit code: '+ code); 38 | task.emit('error', new Error('Child process exited with nonzero exit code: '+ code)); 39 | } 40 | }); 41 | return task; 42 | }; 43 | -------------------------------------------------------------------------------- /lib/file-tasks/stream-size.js: -------------------------------------------------------------------------------- 1 | var Transform = require('readable-stream').Transform; 2 | 3 | function WrapCJS(options) { 4 | Transform.call(this, options); 5 | this.opts = options || {}; 6 | this.length = 0; 7 | } 8 | 9 | WrapCJS.prototype = Object.create(Transform.prototype, { constructor: { value: WrapCJS }}); 10 | 11 | WrapCJS.prototype._transform = function(chunk, encoding, done) { 12 | this.length += chunk.length; 13 | done(null, chunk); 14 | }; 15 | 16 | WrapCJS.prototype._flush = function(done) { 17 | if(this.opts.onDone) { 18 | this.opts.onDone(this.length); 19 | } 20 | done(); 21 | }; 22 | 23 | module.exports = function(options) { 24 | var instance = new WrapCJS(options); 25 | return { 26 | stdin: instance, 27 | stdout: instance 28 | }; 29 | }; 30 | -------------------------------------------------------------------------------- /lib/file-tasks/wrap-amd-vendor.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs'), 2 | template = fs.readFileSync(__dirname + '/../util/amd-vendor-wrap-global.js').toString(), 3 | template2 = fs.readFileSync(__dirname + '/../util/amd-vendor-wrap.js').toString(); 4 | 5 | module.exports = function wrapAMDVendor(name, content, deps, globalName) { 6 | var result = ''; 7 | 8 | // globalName is defined for modules which are "shimmed" e.g. they export a global which 9 | // is then wrapped as AMD 10 | if(globalName) { 11 | // newline here is necessary since files may not end with a newline and may end with a comment 12 | result += ('\n' + 'define(%name%, %deps%, (function (global) {\n' + 13 | ' return function () {\n') 14 | .replace(/%name%/g, JSON.stringify(name)) 15 | .replace(/%deps%/g, JSON.stringify(deps)); 16 | // the content is bundled within the define callback so that it is executed with AMD semantics 17 | result += content; 18 | result += template.replace('%global%', globalName); 19 | } else { 20 | // the r.js optimizer does a bunch of source transformations which are 21 | // basically impossible to extract without importing the whole damn thing 22 | // however, since the end result of those transformations is just to map to a specific form 23 | // of define() call, we can just wrap the module and deal with the four (!) or so variants 24 | // of define calls that r.js allows. 25 | result += '\n' + '(function(define) {\ndefine.amd = {};\n'; 26 | result += content; 27 | result += template2.replace(/%name%/g, JSON.stringify(name)) 28 | .replace(/%deps%/g, JSON.stringify(deps)); 29 | } 30 | return result; 31 | }; 32 | -------------------------------------------------------------------------------- /lib/file-tasks/wrap-commonjs-web.js: -------------------------------------------------------------------------------- 1 | var Transform = require('readable-stream').Transform; 2 | 3 | function WrapCJS(options) { 4 | Transform.call(this, options); 5 | this.opts = options || {}; 6 | this.first = true; 7 | } 8 | 9 | WrapCJS.prototype = Object.create(Transform.prototype, { constructor: { value: WrapCJS }}); 10 | 11 | WrapCJS.prototype.writeFirst = function() { 12 | this.push('function(module, exports, require){\n'); 13 | if(this.opts['source-url']) { 14 | this.push('eval('); 15 | } 16 | this.first = false; 17 | }; 18 | 19 | WrapCJS.prototype._transform = function(chunk, encoding, done) { 20 | if(this.first) { 21 | this.writeFirst(); 22 | } 23 | if(this.opts['source-url']) { 24 | this.push(JSON.stringify(chunk.toString())+'+'); 25 | } else { 26 | this.push(chunk); 27 | } 28 | done(); 29 | }; 30 | 31 | WrapCJS.prototype._flush = function(done) { 32 | // for 0-length files, only _flush is called 33 | if(this.first) { 34 | this.writeFirst(); 35 | } 36 | if(this.opts['source-url']) { 37 | // Chrome's inspector has a bug which eats some characters 38 | // (e.g. lib -> ib and example -> xample) 39 | // https://code.google.com/p/chromium/issues/detail?id=210421 40 | // Work around that by prepending / 41 | this.push(JSON.stringify('\n\/\/@ sourceURL= '+ 42 | (this.opts['name'].charAt(0) != '/' ? '/' : '') + this.opts['name'])+');'); 43 | } 44 | // newline here is important as the last line may be a unterminated comment 45 | this.push('\n}'); 46 | done(); 47 | }; 48 | 49 | module.exports = function(options) { 50 | var instance = new WrapCJS(options); 51 | return { 52 | stdin: instance, 53 | stdout: instance 54 | }; 55 | }; 56 | -------------------------------------------------------------------------------- /lib/file-tasks/wrap-json-web.js: -------------------------------------------------------------------------------- 1 | var Transform = require('readable-stream').Transform; 2 | 3 | function WrapCJS(options) { 4 | Transform.call(this, options); 5 | this.opts = options || {}; 6 | this.first = true; 7 | this.buffer = ''; 8 | } 9 | 10 | WrapCJS.prototype = Object.create(Transform.prototype, { constructor: { value: WrapCJS }}); 11 | 12 | WrapCJS.prototype.writeFirst = function() { 13 | this.push('function(module, exports, require){\n'); 14 | this.push('module.exports = '); 15 | this.first = false; 16 | }; 17 | 18 | WrapCJS.prototype._transform = function(chunk, encoding, done) { 19 | if(this.first) { 20 | this.writeFirst(); 21 | } 22 | this.buffer += chunk; 23 | done(); 24 | }; 25 | 26 | WrapCJS.prototype._flush = function(done) { 27 | // for 0-length files, only _flush is called 28 | if(this.first) { 29 | this.writeFirst(); 30 | this.push('{};}'); 31 | } else { 32 | this.push(this.buffer.trim() + ';\n}'); 33 | } 34 | done(); 35 | }; 36 | 37 | module.exports = function(options) { 38 | var instance = new WrapCJS(options); 39 | return { 40 | stdin: instance, 41 | stdout: instance 42 | }; 43 | }; 44 | -------------------------------------------------------------------------------- /lib/list-tasks/annotate-basepath.js: -------------------------------------------------------------------------------- 1 | // This task calculates the longest common substring among the file paths 2 | 3 | function check(obj) { 4 | return (obj.name.charAt(index) == first.name.charAt(index)); 5 | } 6 | 7 | module.exports = function(list) { 8 | var index = 0, 9 | first = list.files[0]; 10 | 11 | if(!first) return; 12 | 13 | while(list.files.every(check) && index < first.name.length) { 14 | index++; 15 | } 16 | 17 | list.basepath = first.name.substr(0, index); 18 | }; 19 | -------------------------------------------------------------------------------- /lib/list-tasks/annotate-lookup.js: -------------------------------------------------------------------------------- 1 | // creates list.lookup, which is list.files but indexed by filename rather than an array 2 | module.exports = function(list) { 3 | list.lookup = {}; 4 | list.files.forEach(function(item) { 5 | list.lookup[item.name] = item; 6 | }); 7 | }; 8 | -------------------------------------------------------------------------------- /lib/list-tasks/annotate-stat.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs'); 2 | 3 | // This task adds a .stat property to every file in the list 4 | module.exports = function(list) { 5 | list.files.forEach(function(obj, i) { 6 | list.files[i].stat = fs.statSync(obj.name); 7 | }); 8 | }; 9 | -------------------------------------------------------------------------------- /lib/list-tasks/annotate-structured.js: -------------------------------------------------------------------------------- 1 | var path = require('path'); 2 | 3 | // This task takes something that looks like this: 4 | // { files: [ { name: /path/to/foo }]} 5 | // into: 6 | // { path: { to: { ".": [ 'foo' ] }}} 7 | // 8 | // which makes many tasks a lot easier since you can recurse into directories in a JSON structure 9 | // files are always stored in the special key "." 10 | 11 | module.exports = function(list){ 12 | var structured = {}; 13 | // regroup the files by splitting each name on the directory separator 14 | 15 | list.files.forEach(function(obj) { 16 | var pathParts = obj.name.split(path.sep).filter(function(item) { return item.length > 0; }), 17 | current = structured, 18 | i, part; 19 | 20 | for(i = 0; i < pathParts.length - 1; i++) { 21 | part = pathParts[i]; 22 | if(!current[part]) { 23 | current[part] = {}; 24 | } 25 | current = current[part]; 26 | } 27 | 28 | // clone the object 29 | var item = JSON.parse(JSON.stringify(obj)); 30 | item.relname = pathParts[pathParts.length - 1 ]; 31 | 32 | // the last part is the file name - store under { ".": [ filename, filename ] } 33 | current['.'] = (current['.'] ? current['.'] : [] ).concat(item); 34 | 35 | }); 36 | 37 | list.structured = structured; 38 | }; 39 | -------------------------------------------------------------------------------- /lib/list-tasks/annotate-with-task.js: -------------------------------------------------------------------------------- 1 | module.exports = function(list, options) { 2 | var expression = options.expression, 3 | tasks = options.tasks; 4 | 5 | list.files.forEach(function(file) { 6 | if(expression.test(file)) { 7 | file.tasks = (file.tasks ? file.tasks : []).concat(tasks); 8 | } 9 | }); 10 | }; 11 | 12 | -------------------------------------------------------------------------------- /lib/list-tasks/filter-npm.js: -------------------------------------------------------------------------------- 1 | var log = require('minilog')('filter-npm'); 2 | // This task prunes a file list based on three things: 3 | // 1) npm's built-in ignore list <== Applied by filter-npm 4 | // 2) package.json's files field <=\ 5 | // 3) .npmignore files <== Application is scoped to a project - applied by filter-project after infer-packages 6 | // 4) .gitignore files <=/ 7 | // 8 | // The end result is that files that would be excluded by npm publish are dropped from the list. 9 | 10 | module.exports = function(list) { 11 | list.files = list.files.filter(function(obj, i) { 12 | var name = obj.name; 13 | if( 14 | name.match(new RegExp('/node_modules/[.]bin/')) || // this one is nonstandard but useful 15 | name.match(new RegExp('/[.]git/')) || 16 | name.match(new RegExp('[.]lock-wscript$')) || 17 | name.match(/\/[.]wafpickle-[0-9]+$/) || 18 | name.match(new RegExp('/CVS/')) || 19 | name.match(new RegExp('/[.]svn/')) || 20 | name.match(new RegExp('/[.]hg/')) || 21 | name.match(/\/[.].*[.]swp$/) || 22 | name.match(new RegExp('[.]DS_Store$')) || 23 | name.match(/\/[.]_/) || 24 | name.match(new RegExp('npm-debug[.]log$')) 25 | ) { 26 | // 1) npm's built-in ignore list 27 | log.info('Excluded by npm\'s built-in ignore list:', name); 28 | return false; 29 | } 30 | return true; 31 | }); 32 | }; 33 | -------------------------------------------------------------------------------- /lib/list-tasks/filter-packages.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs'), 2 | util = require('util'), 3 | path = require('path'), 4 | Minimatch = require("minimatch").Minimatch, 5 | log = require('minilog')('filter-packages'); 6 | 7 | // This task prunes a file list based on three things: 8 | // 1) npm's built-in ignore list <== Applied by filter-npm 9 | // 2) package.json's files field <=\ 10 | // 3) .npmignore files <== Application is scoped to a project - applied by filter-project after infer-packages 11 | // 4) .gitignore files <=/ 12 | // 13 | // The end result is that files that would be excluded by npm publish are dropped from the list. 14 | 15 | // This function is ported from isaacs/fstream-ignore 16 | function match(rules, fullpath) { 17 | var included = true; 18 | 19 | // console.log('--', fullpath); 20 | 21 | // it looks like the base logic in fstream-ignore basically relies on nested `parent.applyIgnores` 22 | // calls to exhaustively match the paths. 23 | // so we'll do the same thing here: split the entry by path parts, 24 | // then retry with every single alternative: 25 | // e.g. /a/b/c => [ c, 'b/c', 'a/b/c'].foreach(match) 26 | var parts = fullpath.substr(1).split('/').reverse(), entry = ''; 27 | // note: [ c, b, a ] rather than [ a, b ] because reversed 28 | parts.some(function(next){ 29 | entry = (entry === '' ? next : next + '/') + entry; 30 | 31 | rules.forEach(function(rule) { 32 | // negation means inclusion 33 | if (rule.negate && included || !rule.negate && !included) { 34 | // unnecessary 35 | return; 36 | } 37 | 38 | // first, match against /foo/bar 39 | var match = rule.match('/' + entry); 40 | // console.log('/' + entry, rule.pattern, match); 41 | 42 | if (!match) { 43 | // try with the leading / trimmed off the test 44 | // eg: foo/bar instead of /foo/bar 45 | match = rule.match(entry); 46 | // console.log(entry, rule.pattern, match); 47 | 48 | // Note: only try cases where entry is prefixed with a "/" 49 | // => otherwise, non-directories will match here (e.g. /examples/file.js 50 | // should not be matched as /examples/file.js/) 51 | } 52 | 53 | // since unlike fstream-ignore, we see the full paths for directories, 54 | // need to try against the basename as well 55 | // There are 4 options: plain, prefix, postfix, pre+postfix 56 | 57 | if (!match) { 58 | var dirname = path.dirname(entry); 59 | if(dirname != '.') { 60 | match = [ 61 | dirname, 62 | '/'+dirname, 63 | dirname + '/', 64 | '/' + dirname + '/' 65 | ].some(function(permutation) { 66 | // console.log(permutation, rule.pattern); 67 | return rule.match(permutation); 68 | }); 69 | } 70 | } 71 | 72 | if (match) { 73 | included = rule.negate; 74 | } 75 | }); 76 | // quick return 77 | if(!included) return false; 78 | }); 79 | 80 | return included; 81 | } 82 | 83 | module.exports = function(list) { 84 | // package.json, .npmignore and .gitignore are applied on a per-project basis 85 | // note that there can be multiple .gitignores inside a project 86 | 87 | function removePackageFiles(item) { 88 | item.files.forEach(function(file) { 89 | removed.push(file.name); 90 | }); 91 | } 92 | 93 | function removePackages(list, packageObj, removeById) { 94 | var paths = Object.keys(removeById).map(function(name) { 95 | return new RegExp('^'+packageObj.basepath+'node_modules/'+name); 96 | }); 97 | var depById = packageObj.dependenciesById; 98 | 99 | list.packages = list.packages.filter(function(pack) { 100 | var match = paths.some(function(basepath) { 101 | // e.g. a single file under node_modules will not match by base path 102 | // but will via uid 103 | if(pack.name && depById[pack.name] && removeById[pack.name] && pack.uid == depById[pack.name]) { 104 | delete packageObj.dependenciesById[pack.name]; 105 | return true; 106 | } 107 | // e.g. modules and submodules of modules will match by base path prefix 108 | return basepath.test(pack.basepath); 109 | }); 110 | // remove packages that match the name 111 | if(match) { 112 | // remove each file in that package 113 | removePackageFiles(pack); 114 | } 115 | // filter = only include non-matching 116 | return !match; 117 | }); 118 | } 119 | 120 | var removed = []; 121 | // find the ignore files (applying them in the correct order) 122 | function traverse(packageObj) { 123 | // sort by length 124 | packageObj.files = packageObj.files.sort(function(a, b) { return a.name.length - b.name.length; } ); 125 | var rules = [], raw, 126 | files = packageObj.files; 127 | 128 | // handle files 129 | if(files) { 130 | // scan for files matching 131 | files.forEach(function(item) { 132 | var fullpath = item.name; 133 | 134 | if(/\/package.json$/.test(fullpath)) { 135 | var p = JSON.parse(fs.readFileSync(fullpath)); 136 | 137 | if(p.files && Array.isArray(p.files)) { 138 | // from https://github.com/isaacs/fstream-npm/blob/master/fstream-npm.js#L213 139 | raw = ["*"].concat(p.files.map(function (f) { 140 | return "!" + f; 141 | })).concat(p.files.map(function (f) { 142 | return "!" + f.replace(/\/+$/, "") + "/**"; 143 | })); 144 | 145 | raw = raw.map(function (s) { 146 | var m = new Minimatch(s, { matchBase: true, dot: true, flipNegate: true }); 147 | m.ignoreFile = fullpath; 148 | return m; 149 | }); 150 | 151 | rules = rules.concat(raw); 152 | } 153 | 154 | if(p.devDependencies) { 155 | removePackages(list, packageObj, p.devDependencies); 156 | } 157 | } 158 | 159 | 160 | if(/\/\.npmignore$/.test(fullpath) || /\/\.gitignore$/.test(fullpath)) { 161 | // ported from isaacs/fstream-ignore 162 | var set = fs.readFileSync(fullpath).toString().split(/\r?\n/); 163 | // filter comments and empty lines 164 | set = set.filter(function (s) { 165 | s = s.trim(); 166 | return s && !s.match(/^#/); 167 | }); 168 | 169 | raw = set.map(function (s) { 170 | var m = new Minimatch(s, { matchBase: true, dot: true, flipNegate: true }); 171 | m.ignoreFile = fullpath; 172 | return m; 173 | }); 174 | 175 | rules = rules.concat(raw); 176 | } 177 | }); 178 | 179 | 180 | if(rules.length > 0) { 181 | packageObj.files = files.filter(function(item) { 182 | var name = item.name, 183 | result = match(rules, name); 184 | // also update list.files 185 | if(!result) { 186 | if(name.charAt(0) == '/' ) { 187 | removed.push(name); 188 | } else { 189 | removed.push((packageObj.basepath ? packageObj.basepath : '') + name); 190 | } 191 | } 192 | return result; 193 | }); 194 | } 195 | } 196 | } 197 | 198 | list.packages.forEach(traverse); 199 | 200 | // console.log(removed); 201 | removed.forEach(function(file) { 202 | log.info('Excluded by .{git,npm}ignore:', file); 203 | }); 204 | 205 | // update files 206 | list.files = list.files.filter(function(obj) { 207 | return removed.indexOf(obj.name) == -1; 208 | }); 209 | 210 | }; 211 | 212 | 213 | // to override the fs module, which is only used for reading in package.json files 214 | module.exports._setFS = function(newFs) { 215 | fs = newFs; 216 | }; 217 | -------------------------------------------------------------------------------- /lib/list-tasks/filter-regex.js: -------------------------------------------------------------------------------- 1 | var log = require('minilog')('filter-regex'); 2 | 3 | // Filter out files from a list by a blacklist of regular expressions 4 | module.exports = function(list, expressions) { 5 | list.files = list.files.filter(function(obj, i) { 6 | var name = obj.name, 7 | matchedExpr, 8 | match = expressions.some(function(expr) { 9 | var result = name.match(expr); 10 | if(result) { 11 | matchedExpr = expr; 12 | } 13 | return result; 14 | }); 15 | if(match) { 16 | log.info('Excluded by regexp ', matchedExpr, ':', name); 17 | } 18 | return !match; 19 | }); 20 | }; 21 | -------------------------------------------------------------------------------- /lib/list-tasks/infer-packages.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs'), 2 | path = require('path'), 3 | annotateStructured = require('./annotate-structured.js'); 4 | 5 | // assigns unique ids to packages to disabiguate between packages with the same name 6 | // without referring to packages directly by their index in the structure 7 | var uid = 0; 8 | 9 | // Applies multiple annotations 10 | // 11 | // Takes `list.files` as the input and generates `list.packages` 12 | 13 | module.exports = function(list, options) { 14 | uid = 0; 15 | if(!options) { 16 | options = {}; 17 | } 18 | // regroup the files by splitting each name on the directory separator 19 | annotateStructured(list); 20 | var structured = list.structured; 21 | 22 | function exists(arr, path) { 23 | // console.log('exists', path); 24 | if(typeof path == 'string') { 25 | return arr.some(function(item) { return item.name == path; }); 26 | } else if(path instanceof RegExp) { 27 | return arr.some(function(item) { return path.test(item.name); }); 28 | } 29 | } 30 | 31 | function getMainFile(basepath, currentDirFiles) { 32 | var mainFile, 33 | packageJsonPath = path.normalize(basepath+'/package.json'); 34 | // use existsSync; for example, underscore has a .gitignore 35 | // that filters out the package.json file; we need to treat it as existing 36 | // even if it is excluded 37 | 38 | if(fs.existsSync(packageJsonPath)) { 39 | // 1. package.json 40 | var data = JSON.parse(fs.readFileSync(packageJsonPath)); 41 | 42 | // Note: lookups from list.files are intentional: need to look at all the available 43 | // paths, rather than just the ones in the current directory. 44 | 45 | if(data.main) { 46 | var guess = path.resolve(basepath, data.main); 47 | if(exists(list.files, guess)) { 48 | mainFile = data.main; 49 | } else if(exists(list.files, guess + '.js')) { 50 | mainFile = data.main + '.js'; 51 | } else if(exists(list.files, guess + '.json')) { 52 | mainFile = data.main + '.json'; 53 | } else if(exists(list.files, guess + '/index.js')) { 54 | mainFile = path.normalize(data.main + '/index.js'); 55 | } 56 | } 57 | } 58 | if(!mainFile && currentDirFiles && exists(currentDirFiles, /\/index.js$/)) { 59 | // 2. index.js 60 | mainFile = 'index.js'; 61 | } 62 | // remove ./ prefix for main files, since the require() implementation does not run 63 | // path processing when switching between contexts 64 | if(mainFile && mainFile.substr(0, 2) == './') { 65 | mainFile = mainFile.substr(2); 66 | } 67 | 68 | // 3. index.node (unsupported - binary addition) 69 | return mainFile; 70 | } 71 | 72 | // console.log(structured); 73 | 74 | var packages = [ { files: [], dependenciesById: {} } ]; 75 | 76 | // we cannot auto-detect the basepath (since the base package might consist of multiple 77 | // different directories) but we can read it in 78 | if(options.basepath) { 79 | packages[0].basepath = options.basepath; 80 | } 81 | 82 | function getPackage(root, currentPath, packageIndex) { 83 | // handle files 84 | if(root['.']) { 85 | root['.'].forEach(function(file) { 86 | // the relname was relative to the current path - it will be misleading for a package 87 | delete file.relname; 88 | // packages[packageIndex].files.push( relPath + file.name ); 89 | packages[packageIndex].files.push( file ); 90 | }); 91 | } 92 | 93 | Object.keys(root).forEach(function(dirname) { 94 | var packageName, packageBase, index, 95 | mainFile; 96 | if(dirname != 'node_modules' && dirname != '.') { 97 | getPackage(root[dirname], currentPath + dirname + '/', packageIndex); 98 | } else if(dirname == 'node_modules') { 99 | // single file packages 100 | if(root['node_modules']['.']) { 101 | root['node_modules']['.'].forEach(function(file) { 102 | // add single-file package 103 | mainFile = path.basename(file.name); 104 | packageName = mainFile.replace(/(\.js|\.json)$/, ''); 105 | packageBase = path.dirname(file.name) + '/'; 106 | index = packages.length; 107 | 108 | // the relname was relative to the current path - it will be misleading for a package 109 | delete file.relname; 110 | 111 | packages[index] = { 112 | name: packageName, 113 | uid: ++uid, 114 | basepath: packageBase, 115 | main: mainFile, 116 | files: [ file ], 117 | dependenciesById: {} 118 | }; 119 | // add parent dependency 120 | packages[packageIndex].dependenciesById[packageName] = packages[index].uid; 121 | }); 122 | } 123 | // handle modules 124 | Object.keys(root['node_modules']).forEach(function(dirname) { 125 | if(dirname != '.') { 126 | // create a new package 127 | index = packages.length; 128 | packageName = dirname; 129 | packageBase = currentPath + 'node_modules/'+ dirname +'/'; 130 | var files = root['node_modules'][dirname]['.']; 131 | 132 | // detect the main file 133 | mainFile = getMainFile(packageBase, files); 134 | 135 | packages[index] = { 136 | name: packageName, 137 | uid: ++uid, 138 | basepath: packageBase, 139 | main: mainFile, 140 | files: [], 141 | dependenciesById: {} 142 | }; 143 | // add parent dependency 144 | packages[packageIndex].dependenciesById[packageName] = packages[index].uid; 145 | // traverse 146 | getPackage(root.node_modules[dirname], packageBase, index); 147 | } 148 | }); 149 | } 150 | }); 151 | } 152 | 153 | // the first package contains all files until we reach the first 'node_modules' 154 | // all other packages are delineated by a node_modules transition 155 | getPackage(structured, '/', 0); 156 | 157 | // set main path from options if given 158 | if(options.main) { 159 | packages[0].main = options.main; 160 | } 161 | // after completing the packagification 162 | // detect the main file for the root package (but only if it has a basepath) 163 | if(packages[0].basepath && !packages[0].main) { 164 | packages[0].main = getMainFile(packages[0].basepath, packages[0].files); 165 | } 166 | 167 | // var util = require('util'); 168 | // console.log(util.inspect(list.packages, null, 5, true), 169 | // util.inspect(packages, null, 5, true)); 170 | 171 | list.packages = packages; 172 | }; 173 | 174 | // to override the fs module, which is only used for reading in package.json files 175 | module.exports._setFS = function(newFs) { 176 | fs = newFs; 177 | }; 178 | -------------------------------------------------------------------------------- /lib/list-tasks/watch.js: -------------------------------------------------------------------------------- 1 | // monitors a directory list, and triggers a user defined action whenever an observed file is modified 2 | -------------------------------------------------------------------------------- /lib/list/amd.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs'), 2 | path = require('path'), 3 | amdresolve = require('amd-resolve'), 4 | amdDeps = require('../runner/amd/deps.js'), 5 | List = require('minitask').list, 6 | Cache = require('minitask').Cache, 7 | log = require('minilog')('parse'); 8 | 9 | function DetectiveList(opts) { 10 | List.apply(this, Array.prototype.slice.call(arguments)); 11 | 12 | this._resolveErrors = []; 13 | 14 | var cache = Cache.instance({ 15 | method: opts['cache-method'], 16 | path: opts['cache-path'] 17 | }); 18 | 19 | // replace the find function to use node-detective 20 | this.find(function(filepath, stat, onDone) { 21 | var self = this; 22 | // only .js files 23 | if(path.extname(filepath) != '.js') { 24 | return onDone(null, []); 25 | } 26 | 27 | // log.info('Parsing:', filepath); 28 | 29 | var deps = amdDeps(cache, filepath, function(err) { 30 | console.log('parse error: ', filepath, err); 31 | }); 32 | 33 | // console.log(deps); 34 | 35 | if(!deps || deps.length === 0) { 36 | return onDone(null, []); 37 | } 38 | 39 | var queue = [], 40 | expected = deps.length, 41 | complete = 0; 42 | 43 | deps.filter(function(dep) { 44 | return !amdresolve.isSpecial(dep); 45 | }).forEach(function(dep) { 46 | var normalized, 47 | currOpts = opts.amdresolve || {}; 48 | 49 | // override relDir for each file 50 | currOpts.relDir = path.dirname(filepath); 51 | 52 | try { 53 | normalized = amdresolve.sync(dep, currOpts); 54 | } catch(err) { 55 | // console.log('resolve error: ', err, dep, filepath); 56 | self._resolveErrors.push({ err: err, dep: dep, filepath: filepath }); 57 | return; 58 | } 59 | // console.log('RESOLVE', dep, normalized); 60 | 61 | queue.push(path.normalize(normalized)); 62 | }); 63 | 64 | return onDone(null, queue); 65 | }); 66 | } 67 | 68 | DetectiveList.prototype = new List(); 69 | 70 | DetectiveList.prototype.resolveErrors = function() { 71 | return this._resolveErrors; 72 | }; 73 | 74 | var oldExec = DetectiveList.prototype.exec; 75 | DetectiveList.prototype.exec = function() { 76 | this._resolveErrors = []; 77 | oldExec.apply(this, Array.prototype.slice.apply(arguments)); 78 | }; 79 | 80 | module.exports = DetectiveList; 81 | -------------------------------------------------------------------------------- /lib/list/detective.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs'), 2 | path = require('path'), 3 | detective = require('detective'), 4 | resolve = require('browser-resolve'), 5 | nodeResolve = require('resolve'), 6 | List = require('minitask').list, 7 | Cache = require('minitask').Cache, 8 | log = require('minilog')('parse'); 9 | 10 | function DetectiveList(opts) { 11 | List.apply(this, Array.prototype.slice.call(arguments)); 12 | 13 | this._resolveErrors = []; 14 | 15 | var cache = Cache.instance({ 16 | method: opts['cache-method'], 17 | path: opts['cache-path'] 18 | }); 19 | 20 | var depKey = opts['cache-hash'] + '-dependencies', 21 | resultKey = opts['cache-hash'] + '-dependencies-norm', 22 | renameKey = opts['cache-hash'] + '-dependencies-rename', 23 | noCache = false; // for easy dev 24 | 25 | // replace the find function to use node-detective 26 | this.find(function(filepath, stat, onDone) { 27 | var self = this; 28 | // only .js files 29 | if(path.extname(filepath) != '.js') { 30 | return onDone(null, []); 31 | } 32 | // cache the whole operation! 33 | var result = cache.file(filepath).data(resultKey); 34 | 35 | if (!noCache && Array.isArray(result)) { 36 | // console.log('using cached result', filepath, result); 37 | var renames = cache.file(filepath).data(renameKey); 38 | if(renames && Array.isArray(renames)) { 39 | self.onRename(renames[0], renames[1]); 40 | } 41 | return onDone(null, result); 42 | } 43 | 44 | var deps; 45 | 46 | log.info('Parsing:', filepath, result); 47 | 48 | // check the cache 49 | deps = cache.file(filepath).data(depKey); 50 | if (noCache || typeof deps === 'undefined') { 51 | try { 52 | deps = detective(fs.readFileSync(filepath).toString()); 53 | } catch(e) { 54 | console.log('parse error: ', filepath, e); 55 | cache.file(filepath).data(depKey, []); 56 | return []; 57 | } 58 | // store result 59 | cache.file(filepath).data(depKey, deps); 60 | } else { 61 | // console.log('using cached result', filepath, deps); 62 | } 63 | 64 | // console.log(deps); 65 | 66 | if(!deps || deps.length === 0) { 67 | // store result 68 | cache.file(filepath).data(resultKey, []); 69 | return onDone(null, []); 70 | } 71 | 72 | var expected = deps.length, 73 | complete = 0; 74 | 75 | result = []; 76 | 77 | // return deps.filter(function(dep) { 78 | // return !resolve.isCore(dep); 79 | // }).map(function(dep) { 80 | 81 | deps.forEach(function(dep) { 82 | resolve(dep, { filename: filepath }, function(err, normalized) { 83 | complete++; 84 | if(err) { 85 | // console.log('resolve error: ', err, dep, filepath, result); 86 | self._resolveErrors.push({ err: err, dep: dep, filepath: filepath }); 87 | if(complete == expected) { 88 | // store result 89 | cache.file(filepath).data(resultKey, result); 90 | return onDone(null, result); 91 | } 92 | return; 93 | } 94 | 95 | // browser-resolve may replace specific files with different names 96 | if(self.onRename) { 97 | var canonical = nodeResolve.sync(dep, { basedir: path.dirname(filepath) }); 98 | if(canonical != normalized) { 99 | self.onRename(canonical, normalized); 100 | cache.file(filepath).data(renameKey, [ canonical, normalized ]); 101 | } 102 | } 103 | 104 | // console.log('RESOLVE', normalized); 105 | 106 | result.push(path.normalize(normalized)); 107 | if(complete == expected) { 108 | // store result 109 | cache.file(filepath).data(resultKey, result); 110 | 111 | return onDone(null, result); 112 | } 113 | }); 114 | }); 115 | }); 116 | } 117 | 118 | DetectiveList.prototype = new List(); 119 | 120 | DetectiveList.prototype.resolveErrors = function() { 121 | return this._resolveErrors; 122 | }; 123 | 124 | var oldExec = DetectiveList.prototype.exec; 125 | DetectiveList.prototype.exec = function() { 126 | this._resolveErrors = []; 127 | oldExec.apply(this, Array.prototype.slice.apply(arguments)); 128 | }; 129 | 130 | module.exports = DetectiveList; 131 | -------------------------------------------------------------------------------- /lib/require/Makefile: -------------------------------------------------------------------------------- 1 | COMPRESS := ../../node_modules/.bin/uglifyjs 2 | 3 | all: uglify 4 | uglify: 5 | @echo "Require shim char count:" 6 | @$(COMPRESS) --compress --mangle sort ./require.js 2> /dev/null | wc -c 7 | @$(COMPRESS) --compress --mangle sort ./require.js 2> /dev/null | $(COMPRESS) --beautify 2> /dev/null > require.min.js 8 | @echo "UMD shim char count:" 9 | @$(COMPRESS) --compress --mangle sort ./umd.js 2> /dev/null | wc -c 10 | @$(COMPRESS) --compress --mangle sort ./umd.js 2> /dev/null | $(COMPRESS) --beautify 2> /dev/null > umd.min.js 11 | 12 | build: 13 | @$(COMPRESS) --compress --mangle sort ./require.js 2> /dev/null > require.min.js 14 | @$(COMPRESS) --compress --mangle sort ./umd.js 2> /dev/null > umd.min.js 15 | @ls -lah 16 | 17 | debug: 18 | cp -v ./require.js require.min.js 19 | cp -v ./umd.js umd.min.js 20 | 21 | .PHONY: uglify build 22 | -------------------------------------------------------------------------------- /lib/require/index.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs'); 2 | 3 | // `root-file`: the expression 4 | // Type: 5 | // - `global` (plain global) 6 | // - `node` (module.exports) 7 | // - `umd` (node/amd/global) 8 | // 9 | // Option: `global-require` 10 | 11 | function check(opts) { 12 | if(!opts.type || 13 | !opts['root-file'] || 14 | typeof opts['global-require'] === 'undefined' || 15 | !opts['export']) { 16 | throw new Error('Invalid opts: ' + JSON.stringify(opts, 2)); 17 | } 18 | } 19 | 20 | exports.prelude = function(opts) { 21 | var str = ''; 22 | check(opts); 23 | str += '(function(){\n'; 24 | // 1) require implementation 25 | if(opts.require === 'max') { 26 | str += fs.readFileSync(__dirname +'/require.js'); 27 | } else { 28 | str += fs.readFileSync(__dirname +'/require.min.js'); 29 | } 30 | return str; 31 | }; 32 | 33 | exports.postlude = function(opts) { 34 | var str = ''; 35 | check(opts); 36 | // 3) code block to export values 37 | switch(opts.type) { 38 | case 'node': 39 | str += 'module.exports = r(' + JSON.stringify(opts['root-file']) +');'; 40 | break; 41 | case 'global': 42 | str += opts['export'] + ' = r(' + JSON.stringify(opts['root-file']) +');'; 43 | break; 44 | case 'umd': 45 | if(opts.require === 'max') { 46 | str += fs.readFileSync(__dirname +'/umd.js'); 47 | } else { 48 | str += fs.readFileSync(__dirname +'/umd.min.js'); 49 | } 50 | str += 'umd(r(' + JSON.stringify(opts['root-file']) +'), ' + JSON.stringify(opts['export']) + ');'; 51 | break; 52 | } 53 | if(opts['global-require']) { 54 | str += 'require = r.relative("", 0);\n'; 55 | } 56 | str += '}());\n'; 57 | return str; 58 | }; 59 | -------------------------------------------------------------------------------- /lib/require/readme.md: -------------------------------------------------------------------------------- 1 | Contains the require shim and UMD shim. 2 | 3 | ## require.js 4 | 5 | The require() shim is based on the stuff that visionmedia's require() shim, though with a bunch of additions to make it support nested packages and chained requires as well as to improve the minified footprint. 6 | 7 | ## umd.js 8 | 9 | The UMD shim is via https://github.com/ForbesLindesay/umd. 10 | 11 | I kind of wish I could use that, but it pulls in a lot of dependencies (stream wrappers) including running uglifyjs when called (rather than statically), and I can get a slightly smaller output by building the UMDification into the same IIFE. 12 | -------------------------------------------------------------------------------- /lib/require/require.js: -------------------------------------------------------------------------------- 1 | /* 2 | This immediately invoked function expression is needed to prevent chained 3 | require functions from sharing the same previousRequire. 4 | 5 | It returns a require() function in the root context. 6 | */ 7 | var r = (function() { 8 | var previousRequire = typeof require == "function" && require; 9 | 10 | /* Require the given `path`; returns {Object} exports; */ 11 | var r = function(p, context, parent) { 12 | 13 | if(!context) { 14 | context = 0; 15 | } 16 | /* Note: cannot inline here as resolve both resolves and sets the path var; 17 | in normal usage path is passed on to require.relative. 18 | */ 19 | var path = r.resolve(p, context), 20 | mod = r.m[context][path]; 21 | // if the module is not loaded and there is a top level require, try using it 22 | if (!mod && previousRequire) { 23 | mod = previousRequire(path); 24 | if (mod) { 25 | return mod; 26 | } 27 | } else if (mod && mod.c) { 28 | // the module was found, but it is a submodule, load the actual module 29 | context = mod.c; 30 | path = mod.m; 31 | mod = r.m[context][mod.m]; 32 | if(!mod) { 33 | throw new Error('failed to require "' + path + '" from ' + context); 34 | } 35 | } 36 | if (!mod) { 37 | // the initial load failed, or the previous require failed 38 | throw new Error('failed to require "' + p + '" from ' + parent); 39 | } 40 | if (!mod.exports) { 41 | mod.exports = {}; 42 | mod.call(mod.exports, mod, mod.exports, r.relative(path, context)); 43 | } 44 | return mod.exports; 45 | }; 46 | 47 | r.resolve = function(path, context){ 48 | var orig = path, 49 | reg = path + '.js', 50 | index = path + '/index.js'; 51 | if(r.m[context][reg] && reg) { 52 | return reg; 53 | } else if(r.m[context][index] && index) { 54 | return index; 55 | } 56 | return orig; 57 | }; 58 | 59 | /* Return a require function relative to the `relativeTo` path. 60 | * Note that this is needed to export a global require function. 61 | */ 62 | 63 | r.relative = function(relativeTo, context) { 64 | return function(p){ 65 | if ('.' != p.charAt(0)) { 66 | return r(p, context, relativeTo); 67 | } 68 | var path = relativeTo.split('/'), segs = p.split('/'); 69 | path.pop(); 70 | 71 | for (var i = 0; i < segs.length; i++) { 72 | var seg = segs[i]; 73 | if ('..' == seg) path.pop(); 74 | else if ('.' != seg) path.push(seg); 75 | } 76 | 77 | return r(path.join('/'), context, relativeTo); 78 | }; 79 | }; 80 | return r; 81 | }()); 82 | -------------------------------------------------------------------------------- /lib/require/require.min.js: -------------------------------------------------------------------------------- 1 | var r=function(){var e="function"==typeof require&&require,r=function(i,o,u){o||(o=0);var n=r.resolve(i,o),t=r.m[o][n];if(!t&&e){if(t=e(n))return t}else if(t&&t.c&&(o=t.c,n=t.m,t=r.m[o][t.m],!t))throw new Error('failed to require "'+n+'" from '+o);if(!t)throw new Error('failed to require "'+i+'" from '+u);return t.exports||(t.exports={},t.call(t.exports,t,t.exports,r.relative(n,o))),t.exports};return r.resolve=function(e,n){var i=e,t=e+".js",o=e+"/index.js";return r.m[n][t]&&t?t:r.m[n][o]&&o?o:i},r.relative=function(e,t){return function(n){if("."!=n.charAt(0))return r(n,t,e);var o=e.split("/"),f=n.split("/");o.pop();for(var i=0;i 1; 59 | if (hasExclamationMark && plugin[0]) { 60 | var pluginName = plugin[0], 61 | pluginPath = vendorPaths[name]; 62 | 63 | if (opts.plugins && opts.plugins[pluginName]) { 64 | if(opts.plugins[pluginName].load) { 65 | vendorPaths[name] = pluginPath = opts.plugins[pluginName].load(name); 66 | console.log(name, vendorPaths[name]); 67 | } 68 | // can return false from the load() resolution to skip 69 | if (!vendorPaths[name]) { 70 | return; 71 | } 72 | vendorComplete[name] = opts.plugins[pluginName](name, pluginPath); 73 | return; 74 | } 75 | } 76 | // is it defined 77 | if(vendorPaths[name]) { 78 | return; 79 | } 80 | var fullpath = ''; 81 | 82 | configjs.relDir = opts.basepath; 83 | configjs.baseDir = opts.basepath; 84 | 85 | try { 86 | fullpath = amdresolve.sync(name, configjs); 87 | } catch(err) { } 88 | 89 | 90 | if(fullpath) { 91 | fullpath = path.normalize(fullpath); 92 | stat = fs.statSync(fullpath); 93 | if(stat.isFile()) { 94 | vendorPaths[name] = fullpath; 95 | } 96 | } 97 | }); 98 | 99 | missing = missing.filter(function(name) { 100 | var hasPath = vendorPaths[name], 101 | isIgnored = (typeof vendorPaths[name] === 'boolean' && vendorPaths[name]), 102 | hasExternal = vendorComplete[name]; 103 | return !isIgnored && !hasPath && !hasExternal; 104 | }); 105 | 106 | // list the still not found items 107 | var failed = missing.sort().filter(uniq()); 108 | 109 | if(failed.length > 0) { 110 | console.log('Failed to resolve vendor files:'); 111 | var tmplObj = {}; 112 | failed.map(function(name) { 113 | tmplObj[name] = ''; 114 | }); 115 | console.log(JSON.stringify(tmplObj, null, 2)); 116 | //return onDone(); 117 | } 118 | 119 | function vendorDeps(name) { 120 | var items = []; 121 | if(configjs.shim && configjs.shim[name]) { 122 | if(Array.isArray(configjs.shim[name])) { 123 | items = configjs.shim[name]; 124 | } 125 | if (configjs.shim[name].deps) { 126 | items = configjs.shim[name].deps; 127 | } 128 | } else if (vendorPaths[name]) { 129 | items = amdDeps(cache, vendorPaths[name]); 130 | } 131 | var result = []; 132 | // some modules return a legacy format: { name: 'jquery', deps: [] } 133 | items.forEach(function(dep) { 134 | if(dep.deps) { 135 | dep.deps.forEach(function(d) { 136 | result.push(d); 137 | }); 138 | } else { 139 | result.push(dep); 140 | } 141 | }); 142 | return result; 143 | } 144 | 145 | // figure out the actual 3rd party dependencies (from the list) and extract names from paths key 146 | // Note: the CANONICAL names are from the paths array 147 | var allDeps = vendorNames 148 | .concat(opts.extras, desired.filter(function(i) { return !missing[i]; })) 149 | .sort() 150 | .filter(uniq()); 151 | allDeps.forEach(function(name) { 152 | sorter.add({ name: name, deps: vendorDeps(name) }); 153 | }); 154 | 155 | // Use the resolver to output the vendor files first 156 | 157 | sorter.resolve('require'); 158 | sorter.resolve('exports'); 159 | sorter.resolve('module'); 160 | 161 | // produce the file 162 | var packageTasks = []; 163 | 164 | var exclude = opts.exclude || []; 165 | 166 | console.log('Vendor'); 167 | while(!sorter.isEmpty()) { 168 | (function() { 169 | var next = sorter.next(), 170 | moduleName = next.name, 171 | filepath = vendorPaths[moduleName]; 172 | if(exclude.indexOf(moduleName) == -1 && filepath) { 173 | console.log(' ' + moduleName + ' (' + vendorDeps(moduleName).join(', ') + ') ' + sorter.verify(next)); 174 | 175 | packageTasks.push(function(out, done) { 176 | //out.write('/* ' + moduleName + ' */\n'); 177 | processedFiles.push(filepath); 178 | done(); 179 | }); 180 | 181 | if(vendorComplete[moduleName]) { 182 | packageTasks.push(function(out, done) { 183 | out.write(vendorComplete[moduleName]); 184 | done(); 185 | }); 186 | return; 187 | } 188 | 189 | var tasks = []; 190 | 191 | // "simple mode": one --command which only applies to .js files 192 | if(opts.command && path.extname(filepath) == '.js' && opts.nomin.indexOf(moduleName) == -1) { 193 | tasks.push(function() { 194 | return spawn({ 195 | name: filepath, // full path 196 | task: opts.command 197 | }); 198 | }); 199 | } 200 | 201 | tasks.push(function(input, done) { 202 | done(null, wrapAMDVendor( 203 | moduleName, 204 | input, 205 | vendorDeps(moduleName), 206 | (configjs.shim && configjs.shim[moduleName] ? configjs.shim[moduleName].exports : undefined) || '' 207 | )); 208 | }); 209 | 210 | var task = new Task(tasks).input(function() { 211 | return fs.createReadStream(filepath); 212 | }); 213 | 214 | // these are used to disambiguate cached results 215 | task.inputFilePath = filepath; 216 | task.taskHash = optsHash; 217 | 218 | task.once('hit', function() { 219 | }); 220 | 221 | task.once('miss', function() { 222 | log.info(' Processing file', filepath); 223 | }); 224 | 225 | packageTasks.push(task); 226 | } 227 | sorter.resolve(moduleName); 228 | }()); 229 | } 230 | 231 | // now add the other package files 232 | 233 | files.forEach(function(file) { 234 | sorter.add({ name: file.name, deps: amdDeps(cache, file.name) || [] }); 235 | }); 236 | 237 | sorter.resolve('require'); 238 | 239 | console.log('App'); 240 | while(!sorter.isEmpty()) { 241 | (function() { 242 | var next = sorter.next(), 243 | filepath = next.name, 244 | // the substr here will not be correct for files under folders which have been mapped unless the path length 245 | // happens to be identical e.g. app and lib 246 | moduleName = (path.dirname(filepath) + '/' + path.basename(filepath, path.extname(filepath))).substr(opts.basepath.length + 1); 247 | console.log(' ' + path.relative(opts.basepath, filepath) + ' ' + sorter.verify(next)); 248 | 249 | packageTasks.push(function(out, done) { 250 | // out.write('/* ' + moduleName + ' */\n'); 251 | processedFiles.push(filepath); 252 | done(); 253 | }); 254 | 255 | if(vendorComplete[filepath]) { 256 | packageTasks.push(function(out, done) { 257 | out.write(vendorComplete[filepath]); 258 | done(); 259 | }); 260 | return; 261 | } 262 | 263 | var tasks = []; 264 | 265 | // "simple mode": one --command which only applies to .js files 266 | if(opts.command && path.extname(filepath) == '.js' && opts.nomin.indexOf(moduleName) == -1) { 267 | tasks.push(function() { 268 | return spawn({ 269 | name: filepath, // full path 270 | task: opts.command 271 | }); 272 | }); 273 | } 274 | 275 | tasks.push(function(input, done) { 276 | done(null, wrapAMDVendor(moduleName, input, amdDeps(cache, filepath), false)); 277 | }); 278 | 279 | var task = new Task(tasks).input(function() { 280 | return fs.createReadStream(filepath); 281 | }); 282 | 283 | // these are used to disambiguate cached results 284 | task.inputFilePath = filepath; 285 | task.taskHash = optsHash; 286 | 287 | task.once('hit', function() { 288 | }); 289 | 290 | task.once('miss', function() { 291 | log.info(' Processing file', filepath); 292 | }); 293 | 294 | packageTasks.push(task); 295 | 296 | sorter.resolve(moduleName); 297 | }()); 298 | } 299 | 300 | runner.parallel(packageTasks, { 301 | cacheEnabled: (opts.cache ? true : false), 302 | cachePath: opts['cache-path'], 303 | cacheMethod: opts['cache-method'], 304 | output: out, 305 | limit: opts.jobs, 306 | end: (out !== process.stdout ? true : false), // e.g. no "end" for process.stdout 307 | onDone: function() { 308 | if(typeof onDone === 'function') { 309 | onDone(null, processedFiles); 310 | } 311 | } 312 | }); 313 | }; 314 | -------------------------------------------------------------------------------- /lib/runner/amd/load-config.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs'), 2 | vm = require('vm'); 3 | 4 | module.exports = function loadAMDConfig(filepath) { 5 | // the config specification for RJS is painful to parse as it's not a JSON file 6 | // but rather a JS file that defines as specifically named variable 7 | var sandbox = {}; 8 | vm.runInNewContext(fs.readFileSync(filepath).toString(), sandbox); 9 | 10 | if (typeof sandbox.require === 'object') { 11 | return sandbox.require; 12 | } else if (typeof sandbox.requirejs === 'object') { 13 | return sandbox.requirejs; 14 | } 15 | }; 16 | -------------------------------------------------------------------------------- /lib/runner/commonjs/get-commands.js: -------------------------------------------------------------------------------- 1 | var path = require('path'); 2 | 3 | var spawn = require('../../file-tasks/spawn.js'), 4 | streamSize = require('../../file-tasks/stream-size.js'), 5 | wrapCommonJs = require('../../file-tasks/wrap-commonjs-web.js'), 6 | wrapJson = require('../../file-tasks/wrap-json-web.js'); 7 | 8 | module.exports = function(options) { 9 | var result = [], 10 | noBrowserify = true; 11 | 12 | // 1st: any custom commands (in array order) 13 | // 2nd: any wrapping 14 | // 3rd: any reporting 15 | 16 | // the expected result is one of (sync(input), async(input, done), fn() { return stream | child process } ) 17 | // getFileTasks will call the function once with the item as the param 18 | // --> TODO in the future might want to just combine these two as the syntax is a bit awkward 19 | 20 | if (Array.isArray(options.command)) { 21 | var isObjectArray = options.command.every(function(item) { 22 | return typeof item === 'function'; 23 | }); 24 | 25 | if(isObjectArray) { 26 | result = options.command; 27 | } else { 28 | // basically, an array of strings 29 | throw new Error('Unknown --command format.'); 30 | } 31 | } else if(options.command) { 32 | // "simple mode": one --command which only applies to .js files 33 | result.push(function(filename) { 34 | if(path.extname(filename) != '.js') { 35 | return; 36 | } 37 | // extra level of nesting is annoying, but it avoids having to instantiate the task resources immediately 38 | return function() { 39 | return spawn({ 40 | name: filename, // full path 41 | task: options.command 42 | }); 43 | }; 44 | }); 45 | } 46 | 47 | var exportVariableName = options['export'] || 'App'; 48 | 49 | // transforms 50 | // Maybe this should not be global? not sure, need feedback. 51 | if(options['transform']) { 52 | var nodeResolve = require('resolve'), 53 | modulePath = nodeResolve.sync(options['transform'], { basedir: process.cwd() }), 54 | mod = require(modulePath); 55 | 56 | if(mod.gluejs) { 57 | // if the module exports '.gluejs = true' (hacky) then we'll assume it's a gluejs module, 58 | // that means it should accept function(filename, package) { } and 59 | // return false or a Minitask-compatible task. 60 | result.push(mod); 61 | } else { 62 | // otherwise, assume it's a browserify module 63 | // the problem with those is that browserify assumes it's safe to instantiate all 64 | // resources (e.g. file handles) immediately. That doesn't work when you queue 65 | // up work early on like gluejs does, so we wrap the module in an additional function. 66 | // This also prevents us from doing useful pre filtering so these are applied on all files 67 | // (since the match is done after calling the function in browserify and in browserify, 68 | // you return a plain through-stream to indicate a no-op). 69 | 70 | result.push( 71 | function(filename, pkg) { 72 | // filter (return false to skip this task) 73 | return function() { 74 | // wrapper fn (called only when the task is actually executed to pre-allocating file handles) 75 | return mod(filename); 76 | }; 77 | } 78 | ); 79 | } 80 | noBrowserify = false; 81 | // push a "wildcard exports" wrapper since the browserify stuff applies to all files 82 | result.push(function(filename, packageObj) { 83 | if(path.extname(filename) == '.json') { 84 | return; 85 | } 86 | var relname = path.relative(packageObj.basepath, filename); 87 | return function() { 88 | return wrapCommonJs({ 89 | 'source-url': options['source-url'], 90 | 'name': (packageObj.name ? exportVariableName+'/' + packageObj.name + '/' : exportVariableName+'/') + relname 91 | }); 92 | }; 93 | }); 94 | } 95 | 96 | if (noBrowserify) { 97 | // default task for wrapping .js 98 | result.push(function(filename, packageObj) { 99 | if(path.extname(filename) != '.js') { 100 | return; 101 | } 102 | var relname = path.relative(packageObj.basepath, filename); 103 | return function() { 104 | return wrapCommonJs({ 105 | 'source-url': options['source-url'], 106 | 'name': (packageObj.name ? exportVariableName+'/' + packageObj.name + '/' : exportVariableName+'/') + relname 107 | }); 108 | }; 109 | }); 110 | } 111 | // default task for wrapping .json 112 | result.push(function(filename) { 113 | if(path.extname(filename) != '.json') { 114 | return; 115 | } 116 | return function() { 117 | return wrapJson({ }); 118 | }; 119 | }); 120 | // if we are reporting, add the stream size capture task at the end 121 | // so we can report on results (e.g. of minification) 122 | if (options.report) { 123 | result.push(function(filename, packageObj) { 124 | if(path.extname(filename) != '.js') { 125 | return; 126 | } 127 | return streamSize({ 128 | onDone: function(size) { 129 | var match = packageObj.files.some(function(file, i) { 130 | var result = (file.name == filename); 131 | if(result) { 132 | packageObj.files[i].sizeAfter = size; 133 | } 134 | return result; 135 | }); 136 | if(!match) { 137 | throw new Error('File not found by index: ' + filename); 138 | } 139 | } 140 | }); 141 | }); 142 | } 143 | return result; 144 | }; 145 | -------------------------------------------------------------------------------- /lib/runner/commonjs/get-file-tasks.js: -------------------------------------------------------------------------------- 1 | module.exports = function(file, pkg, commands) { 2 | var result = []; 3 | if(typeof file.name !== 'string' || 4 | !Array.isArray(commands)) { 5 | throw new Error('Invalid params to getFileCommands ' + file + commands); 6 | } 7 | 8 | // task selection from commands 9 | commands.forEach(function(command) { 10 | // to be compatible with browserify's source transforms, 11 | // let commands be function(filename, [packageObj]) { } 12 | // which return either 13 | // 1) a function which returns a valid minitask obj (syncfn, asyncfn, duplex, child_process or through-stream) 14 | // 2) or a falsey value (= skip) 15 | 16 | // note that this not the exact API that browserify follows, since 17 | // we return functions that return functions - rather than directly returning a stream 18 | // this is to avoid running out of resources when setting up the queue of tasks to run 19 | var task = command(file.name, pkg); 20 | if(task) { 21 | // console.log('queue', file.name, task.toString()); 22 | result.push(task); 23 | } 24 | }); 25 | return result; 26 | }; 27 | 28 | 29 | -------------------------------------------------------------------------------- /lib/runner/commonjs/index.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs'), 2 | path = require('path'), 3 | util = require('util'), 4 | runner = require('minitask').runner, 5 | Task = require('minitask').Task, 6 | Cache = require('minitask').Cache, 7 | // tasks 8 | annotateStat = require('../../list-tasks/annotate-stat.js'), 9 | inferPackages = require('../../list-tasks/infer-packages.js'), 10 | filterNpm = require('../../list-tasks/filter-npm.js'), 11 | filterRegex = require('../../list-tasks/filter-regex.js'), 12 | filterPackages = require('../../list-tasks/filter-packages.js'), 13 | getFileTasks = require('./get-file-tasks.js'), 14 | getCommands = require('./get-commands.js'), 15 | reqWrap = require('../../require/index.js'), 16 | 17 | log = require('minilog')('commonjs'), 18 | ProgressBar = require('progress'); 19 | 20 | // this runner concatenates the files to stdout after running wrap-commonjs-web 21 | module.exports = function(list, options, out, onDone) { 22 | if(!options) { 23 | options = {}; 24 | } 25 | // unpack options 26 | var packageRootFileName, 27 | // normalize basepath 28 | basepath = (options.basepath ? path.normalize(options.basepath) : ''), 29 | // replaced modules (e.g. jquery => window.jquery) 30 | replaced = Object.keys(options.replaced || {}).map(function(key) { 31 | return JSON.stringify(key) + ': '+ '{ exports: ' + options.replaced[key] + ' }'; 32 | }).join(',\n'), 33 | // remapped modules (e.g. assert => require('chai').assert 34 | remapped = Object.keys(options.remap || {}).map(function(key) { 35 | return JSON.stringify(key) + ': '+ 36 | 'function(module, exports, require) { module.exports = ' + options.remap[key] + ' }'; 37 | }).join(',\n'), 38 | // commands 39 | commands = getCommands(options), 40 | // cache hit filepaths for reporting 41 | cacheHits = [], 42 | optsHash = Cache.hash(JSON.stringify(options)), 43 | progress; 44 | 45 | // console.log(util.inspect(list.files.map(function(i) { return i.name; }), false, 20, true)); 46 | 47 | // exclude files using the npmjs defaults for file and path exclusions 48 | filterNpm(list); 49 | // exclude files matching specific expressions 50 | // - because .npmignores often do not cover all the files to exclude 51 | var excludeList = [ 52 | new RegExp('\/dist\/'), 53 | new RegExp('\/example\/'), 54 | new RegExp('\/benchmark\/'), 55 | new RegExp('[-.]min.js$') 56 | ]; 57 | 58 | // allow --reset-exclude 59 | if(options['reset-exclude']) { 60 | excludeList = []; 61 | } 62 | 63 | // allow adding in expressions 64 | if(options['exclude']) { 65 | excludeList = excludeList.concat( 66 | (Array.isArray(options['exclude']) ? options['exclude'] : [ options['exclude'] ]) 67 | .map(function(expr) { 68 | return new RegExp(expr); 69 | }) 70 | ); 71 | } 72 | 73 | filterRegex(list, excludeList); 74 | 75 | annotateStat(list); 76 | 77 | // run list level tasks 78 | 79 | // - generate `.packages` from `.files` 80 | // (by grouping the set of `.files` into distinct dependencies) 81 | // ... and infer the package main file 82 | inferPackages(list, { main: options.main, basepath: basepath }); 83 | // - for each package, apply excludes (package.json.files, .npmignore, .gitignore) 84 | filterPackages(list); 85 | 86 | // console.log(util.inspect(list, false, 20, true)); 87 | 88 | // if the main package is empty, use the next one 89 | // TODO FIXME: this occurs when you are in a ./node_modules/ directory and run 90 | // a regular build via the command line. Suddenly, the folder you are in is detected as a 91 | // separate package! Need a better test for this in the long run... 92 | if(list.packages[0].files.length === 0) { 93 | list.packages.shift(); 94 | } 95 | if(list.packages.length === 0) { 96 | throw new Error('No files were included in the build. Check your `.include()` call paths.'); 97 | } 98 | 99 | // pluck the main file for the first package 100 | packageRootFileName = list.packages[0].main || options.main; 101 | 102 | if(typeof packageRootFileName === 'undefined') { 103 | throw new Error('You need to set the package root file explicitly, ' + 104 | 'e.g.: `.main(\'index.js\')` or `--main index.js`. This is the file that\'s exported ' + 105 | 'as the root of the package.'); 106 | } 107 | 108 | // filter out non-JS files (more accurately, files that have no tasks that match them) 109 | var removed = []; 110 | // find the ignore files (applying them in the correct order) 111 | 112 | delete list.structured; 113 | 114 | // produce the file 115 | var packageTasks = [], 116 | wrapOpts = { 117 | 'export': options['export'] || 'App', 118 | 'root-file': packageRootFileName, 119 | // `--amd` and `--umd` are synonyms (since umd provides a superset of the amd features) 120 | type: (options['amd'] || options['umd'] ? 'umd' : (options['node'] ? 'node' : 'global')), 121 | // options: global-require: export the require() implementation into the global space 122 | 'global-require': options['global-require'] || false, 123 | require: (options.require !== false ? 'min' : 'max') 124 | }; 125 | 126 | packageTasks.push(function(out, done) { 127 | // top level boundary + require() implementation 128 | out.write(reqWrap.prelude(wrapOpts)); 129 | // the registry definition 130 | out.write('r.m = [];\n'); 131 | done(); 132 | }); 133 | 134 | // for each module, write `r.m[n] = { normalizedName: .. code .. , };` 135 | 136 | list.packages.forEach(function(packageObj, current) { 137 | 138 | // package header 139 | packageTasks.push(function header(out, done) { 140 | // out.write('/* -- ' + (packageObj.name ? packageObj.name : 'root') + ' -- */\n'); 141 | log.info('Processing package:', (packageObj.name ? packageObj.name : 'root')); 142 | out.write('r.m['+current+'] = {\n'); 143 | // store replaced and remapped for all packages 144 | if(replaced) { 145 | out.write(replaced + ',\n'); 146 | } 147 | if(remapped) { 148 | out.write(remapped + ',\n'); 149 | } 150 | 151 | // store dependency references 152 | Object.keys(packageObj.dependenciesById).forEach(function(name) { 153 | var uid = packageObj.dependenciesById[name], 154 | index; 155 | 156 | // find the package in the (possibly altered) packages list by unique id 157 | list.packages.some(function(item, itemIndex) { 158 | var match = (item.uid == uid); 159 | if(match) { 160 | index = itemIndex; 161 | } 162 | return match; 163 | }); 164 | 165 | // r.m[n]['foo'] = { c: 1, m: 'lib/index.js' } 166 | out.write( 167 | JSON.stringify(name) + ': ' + JSON.stringify({ 168 | c: index, 169 | m: list.packages[index].main 170 | })); 171 | out.write(',\n'); 172 | }); 173 | 174 | done(); 175 | }); 176 | 177 | // filter files (and generate tasks) 178 | packageObj.files = packageObj.files.filter(function(item) { 179 | if(!fs.existsSync(item.name)) { 180 | throw new Error('File not found: ' + item.name + ' Basepath = "' + 181 | packageObj.basepath + '", filename="' + item.name + '"'); 182 | } 183 | 184 | item.tasks = getFileTasks(item, packageObj, commands); 185 | if(item.tasks.length === 0) { 186 | log.info('Excluded non-js/non-json file:', path.relative(packageObj.basepath, item.name)); 187 | // also update list.files 188 | removed.push(item.name); 189 | return false; // exclude from package.files 190 | } 191 | return true; // do not filter out this file 192 | }); 193 | 194 | // stream each file in serial order 195 | var totalFiles = packageObj.files.length; 196 | packageObj.files.forEach(function(item, index) { 197 | var exportVariableName = options['export'] || 'App', 198 | filePath = item.name, 199 | relativeName = path.relative(packageObj.basepath, filePath), 200 | moduleName = relativeName; 201 | 202 | // check for renames via options._rename 203 | if(options._rename[filePath]) { 204 | moduleName = path.relative(packageObj.basepath, options._rename[filePath]); 205 | } 206 | 207 | // all dependencies already have a basepath and the names are 208 | // already relative to it, but this is not true for the main package 209 | if(current === 0 && moduleName.substr(0, basepath.length) == basepath) { 210 | moduleName = moduleName.substr(basepath.length); 211 | } 212 | 213 | // add the first task 214 | packageTasks.push( 215 | function(out, done) { 216 | out.write(JSON.stringify(moduleName) + ': '); 217 | done(); 218 | }); 219 | 220 | // wrap in a function to reduce file handle usage 221 | var task = new Task(item.tasks).input(function() { return fs.createReadStream(filePath); } ); 222 | 223 | // these are used to disambiguate cached results 224 | task.inputFilePath = filePath; 225 | task.taskHash = optsHash; 226 | 227 | task.once('hit', function() { 228 | cacheHits.push(filePath); 229 | if(options.progress) { 230 | progress.tick(); 231 | } 232 | }); 233 | 234 | task.once('miss', function() { 235 | if(options.progress) { 236 | progress.tick(); 237 | } else { 238 | log.info(' Processing file', filePath); 239 | } 240 | }); 241 | 242 | packageTasks.push(task); 243 | 244 | packageTasks.push( 245 | function(out, done) { 246 | // determining when to write the last common becomes easy 247 | // when files are processed last 248 | if(index == totalFiles - 1) { 249 | out.write('\n'); 250 | } else { 251 | out.write(',\n'); 252 | } 253 | done(); 254 | }); 255 | }); 256 | 257 | // package footer 258 | packageTasks.push(function(out, done) { 259 | out.write('};\n'); 260 | done(); 261 | }); 262 | }); 263 | 264 | packageTasks.push(function(out, done) { 265 | // finally, close the package file 266 | out.write(reqWrap.postlude(wrapOpts)); 267 | 268 | delete list.structured; 269 | 270 | // tj's progress can make the process hang (!) if the total count is off due to exclusions 271 | if(progress && progress.rl && progress.rl.close) { 272 | progress.rl.close(); 273 | } 274 | 275 | // if any reporting is explicitly enabled 276 | if(options.report || options.verbose || options.progress) { 277 | if(cacheHits.length > 0) { 278 | console.log('Cache hits (' + options['cache-path'] + '):', 279 | cacheHits.length, '/', list.files.length, 'files'); 280 | // exclude cached files 281 | list.packages.forEach(function(pack, index) { 282 | list.packages[index].files = list.packages[index].files.filter(function(item) { 283 | return cacheHits.indexOf(item.name) == -1; 284 | }); 285 | }); 286 | } 287 | } 288 | if(options.report) { 289 | require('./report-package.js')(list); 290 | } 291 | 292 | done(); 293 | }); 294 | 295 | // update files by removing files in removed 296 | list.files = list.files.filter(function(obj) { 297 | return removed.indexOf(obj.name) == -1; 298 | }); 299 | 300 | if(options.progress) { 301 | progress = new ProgressBar('[:bar] :current / :total :percent :etas', { 302 | complete: '=', incomplete: ' ', width: 20, total: list.files.length 303 | }); 304 | } 305 | 306 | runner.parallel(packageTasks, { 307 | cacheEnabled: (options.cache ? true : false), 308 | cachePath: options['cache-path'], 309 | cacheMethod: options['cache-method'], 310 | output: (out ? out : process.stdout), 311 | limit: options.jobs, 312 | end: (out !== process.stdout ? true : false), // e.g. no "end" for process.stdout 313 | onDone: function() { 314 | if(typeof onDone === 'function') { 315 | onDone(); 316 | } 317 | } 318 | }); 319 | 320 | }; 321 | -------------------------------------------------------------------------------- /lib/runner/commonjs/report-package.js: -------------------------------------------------------------------------------- 1 | var path = require('path'), 2 | bytes = require('bytes'), 3 | style = require('./style.js'); 4 | 5 | // takes a list.packages object and produces a summary of packages 6 | // their relative sizes and total size 7 | 8 | function percentage(size, total) { 9 | if(total === 0) return 100; 10 | return Math.floor(size / total * 100 ); 11 | } 12 | 13 | function rpad(str, length) { 14 | if(str.toString().length >= length) return str; 15 | return str + new Array(length - str.toString().length).join(' '); 16 | } 17 | 18 | function compare(before, after) { 19 | if(after < before) { 20 | return [ 21 | style(' (' + bytes(after - before), 'green'), ' ', 22 | style((percentage(after, before) - 100) + '%)', 'green')]; 23 | } else if(after - before < 200) { 24 | return [ 25 | style(' (' + bytes(after - before), 'yellow'), ' ', 26 | style((percentage(after, before) - 100) + '%)', 'yellow')]; 27 | } 28 | return [ 29 | style(' (+ ' + bytes(after - before), 'red'), ' ', 30 | style((percentage(after, before) - 100) + '%)', 'red')]; 31 | } 32 | 33 | module.exports = function(list) { 34 | 35 | // calculate the totals - need this information to display percentage sizes of files 36 | var cwd = process.cwd(), 37 | totalsByPackageIndex = [], 38 | total = 0, 39 | totalAfterByPackageIndex = [], 40 | totalAfter = 0; 41 | 42 | list.packages.forEach(function(pack, index) { 43 | totalsByPackageIndex[index] = pack.files.reduce(function(prev, item) { 44 | if(!totalAfterByPackageIndex[index]) { 45 | totalAfterByPackageIndex[index] = 0; 46 | } 47 | totalAfterByPackageIndex[index] += (item.sizeAfter || 0); 48 | return prev + item.stat.size; 49 | }, 0); 50 | total += totalsByPackageIndex[index]; 51 | totalAfter += totalAfterByPackageIndex[index]; 52 | }); 53 | 54 | var rows = []; 55 | 56 | list.packages.forEach(function(pack, index) { 57 | rows.push('# ' + (pack.name ? pack.name : 'Root package')); 58 | 59 | pack.files.sort(function(a, b) { 60 | return b.stat.size - a.stat.size; 61 | }); 62 | 63 | pack.files.forEach(function(item) { 64 | var relpath = path.relative(cwd, item.name); 65 | var row = [' ', style(path.dirname(relpath)+'/', 'gray')+path.basename(relpath), ' ', bytes(item.stat.size), 66 | ' ', percentage(item.stat.size, total), '%' ]; 67 | if(item.sizeAfter) { 68 | row = row.concat([ 69 | ' -> ', bytes(item.sizeAfter)], 70 | compare(item.stat.size, item.sizeAfter) 71 | ); 72 | } 73 | rows.push(row); 74 | }); 75 | 76 | var line = [ 77 | 'Package total: ', bytes(totalsByPackageIndex[index]), 78 | ' ', percentage(totalsByPackageIndex[index], total), '%' 79 | ]; 80 | 81 | if(totalAfter > 0) { 82 | line = line.concat([ ' -> ', bytes(totalAfterByPackageIndex[index])], 83 | compare(totalsByPackageIndex[index], totalAfterByPackageIndex[index])); 84 | } 85 | rows.push(line.join('')); 86 | 87 | if(Object.keys(pack.dependenciesById).length > 0) { 88 | rows.push('Package dependencies: ' +Object.keys(pack.dependenciesById).join(', ')); 89 | } 90 | }); 91 | 92 | 93 | var colSizes = []; 94 | rows.forEach(function(row) { 95 | // ignore "log lines" 96 | if(!Array.isArray(row)) return; 97 | row.forEach(function(col, index) { 98 | colSizes[index] = Math.max(colSizes[index] || 0, col.toString().length + 1); 99 | }); 100 | }); 101 | 102 | rows.forEach(function(row) { 103 | if(!Array.isArray(row)) { 104 | console.log(row); 105 | return; 106 | } 107 | console.log( 108 | row.reduce(function(prev, curr, index) { 109 | return prev += rpad(curr, colSizes[index]); 110 | }, '')); 111 | }); 112 | 113 | if(totalAfter > 0) { 114 | console.log('Total size: ' + bytes(totalAfter) + (totalAfter > 0 ? compare(total, totalAfter).join('') : '')); 115 | } else { 116 | console.log('Total size: ' + bytes(total)); 117 | } 118 | 119 | }; 120 | -------------------------------------------------------------------------------- /lib/runner/commonjs/style.js: -------------------------------------------------------------------------------- 1 | var styles = { 2 | //styles 3 | 'bold' : ['\033[1m', '\033[22m'], 4 | 'italic' : ['\033[3m', '\033[23m'], 5 | 'underline' : ['\033[4m', '\033[24m'], 6 | 'inverse' : ['\033[7m', '\033[27m'], 7 | //grayscale 8 | 'white' : ['\033[37m', '\033[39m'], 9 | 'gray' : ['\033[90m', '\033[39m'], 10 | 'black' : ['\033[30m', '\033[39m'], 11 | //colors 12 | 'blue' : ['\033[34m', '\033[39m'], 13 | 'cyan' : ['\033[36m', '\033[39m'], 14 | 'green' : ['\033[32m', '\033[39m'], 15 | 'magenta' : ['\033[35m', '\033[39m'], 16 | 'red' : ['\033[31m', '\033[39m'], 17 | 'yellow' : ['\033[33m', '\033[39m'] 18 | }; 19 | 20 | module.exports = function(str, style) { 21 | return styles[style][0] + str + styles[style][1]; 22 | }; 23 | -------------------------------------------------------------------------------- /lib/runner/concat.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs'), 2 | runner = require('minitask').runner; 3 | 4 | // this runner concatenates the files to stdout after running wrap-commonjs-web 5 | module.exports = function(list, options) { 6 | var current = 0; 7 | 8 | // output the header 9 | console.log('/* header */'); 10 | 11 | function next() { 12 | var last = runner( 13 | { stdout: fs.createReadStream(list.files[current].name) }, 14 | [ require('../file-tasks/wrap-commonjs-web.js')] 15 | ); 16 | last.stdout.on('end', function() { 17 | if(current == list.files.length){ 18 | console.log('/* footer */'); 19 | return; 20 | } 21 | next(); 22 | }); 23 | 24 | // log a file header 25 | console.log('\n/* file: '+list.files[current].name+' */'); 26 | 27 | // need to do this here so we can catch the second-to-last stream's "end" event; 28 | last.stdout.pipe(process.stdout); 29 | current++; 30 | } 31 | 32 | next(); 33 | }; 34 | -------------------------------------------------------------------------------- /lib/runner/static-server.js: -------------------------------------------------------------------------------- 1 | var http = require('http'), 2 | annotateBasepath = require('../list-tasks/annotate-basepath.js'); 3 | 4 | module.exports = function(list, options) { 5 | // infer the basepath (longest common string) 6 | annotateBasepath(list); 7 | 8 | http.createServer(function(req, res) { 9 | if(req.url == '/') { 10 | res.end('

  • '+ list.files.map(function(file) { 11 | return file.name.substr(list.basepath.length); 12 | }).join('
  • ') +'
'); 13 | } else { 14 | res.end('Unknown: ' + req.url); 15 | } 16 | }).listen(8000).on('listening', function() { 17 | console.log('Listening on localhost:8000'); 18 | }); 19 | }; 20 | -------------------------------------------------------------------------------- /lib/util/amd-vendor-wrap-global.js: -------------------------------------------------------------------------------- 1 | var ret, fn; 2 | return ret || global.%global%; 3 | }; 4 | }(this))); 5 | -------------------------------------------------------------------------------- /lib/util/amd-vendor-wrap.js: -------------------------------------------------------------------------------- 1 | }(function(a,b,c) { 2 | var args = arguments; 3 | return define(args.length == 3 ? a : %name%, (b && b instanceof Array ? b : %deps%), args[args.length - 1]); 4 | })); 5 | -------------------------------------------------------------------------------- /lib/util/sort-dependencies.js: -------------------------------------------------------------------------------- 1 | function SortDependencies() { 2 | this.items = []; 3 | this.resolved = {}; 4 | } 5 | 6 | SortDependencies.prototype.add = function(item) { 7 | this.items.push(item); 8 | }; 9 | 10 | SortDependencies.prototype.resolve = function(name) { 11 | if(typeof name === 'object' && name.name) { 12 | name = name.name; 13 | } 14 | this.resolved[name] = true; 15 | }; 16 | 17 | SortDependencies.prototype.isEmpty = function() { 18 | return this.items.length === 0; 19 | }; 20 | 21 | SortDependencies.prototype.ignoreResolved = function(item) { 22 | var self = this; 23 | return item.deps.reduce(function(prev, current) { 24 | return prev + (!self.resolved[current] ? 1 : 0); 25 | }, 0); 26 | }; 27 | 28 | SortDependencies.prototype.next = function() { 29 | var self = this; 30 | this.items.sort(function(a, b) { 31 | var diff = self.ignoreResolved(b) - self.ignoreResolved(a); 32 | if (diff === 0) { 33 | return a.name.localeCompare(b.name); 34 | } 35 | return diff; 36 | }); 37 | return this.items.pop(); 38 | }; 39 | 40 | SortDependencies.prototype.verify = function(item) { 41 | var self = this; 42 | return item.deps.filter(function(name) { 43 | return !self.resolved[name]; 44 | }); 45 | }; 46 | 47 | module.exports = SortDependencies; 48 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gluejs", 3 | "description": "Build CommonJS modules for the browser via a chainable API", 4 | "version": "2.4.0", 5 | "author": { 6 | "name": "Mikito Takada", 7 | "email": "mikito.takada@gmail.com", 8 | "url": "http://blog.mixu.net/" 9 | }, 10 | "license": "BSD-3-Clause", 11 | "bin": { 12 | "gluejs": "./bin/gluejs" 13 | }, 14 | "keywords": [ 15 | "browser", 16 | "require", 17 | "bundle", 18 | "commonjs", 19 | "npm", 20 | "module", 21 | "package" 22 | ], 23 | "repository": { 24 | "type": "git", 25 | "url": "git://github.com/mixu/gluejs.git" 26 | }, 27 | "main": "index.js", 28 | "dependencies": { 29 | "amd-resolve": "~0.1.1", 30 | "amdetective": "0.0.1", 31 | "browser-resolve": "~1.2.2", 32 | "bytes": "~2.2.0", 33 | "detective": "~4.3.1", 34 | "minilog": "~2.1.0", 35 | "minimatch": "~0.2.12", 36 | "minitask": "~0.2.0", 37 | "optimist": "~0.6.1", 38 | "progress": "~1.1.2", 39 | "readable-stream": "1.1.9", 40 | "resolve": "~0.6.1" 41 | }, 42 | "devDependencies": { 43 | "mocha": "~2.3.4", 44 | "uglify-js": "~2.6.1" 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | # gluejs V2 2 | 3 | Package Node/CommonJS modules for the browser 4 | 5 | New version! gluejs v2 is now out with a bunch of new features ([v1 branch](https://github.com/mixu/gluejs/tree/master)) 6 | 7 | 8 | - Converts code written for Node.js to run in the browser 9 | - Lightweight require shim (~400 characters, minified but not gzipped) 10 | - Easy to connect to intermediate shell tasks (e.g. minifiers) due to streams2 support 11 | - Fast (can use caching to avoid rebuilding unchanged files) 12 | - Programmable: use the Node API to serve packages directly, or build static packages using the command line tool 13 | - render() to console, or directly to a HTTP request 14 | - include() files or full directories, blacklist using exclude(regexp) 15 | - Bind variables under window.* to require() statements using replace() 16 | - Compile templating language files to JS via a custom handler 17 | - Source url support 18 | 19 | ## Usage example: console 20 | 21 | gluejs \ 22 | --include ./lib/ \ 23 | --include ./node_modules/microee/ \ 24 | --global App \ 25 | --main lib/index.js \ 26 | --out app.js \ 27 | --command 'uglifyjs --no-copyright --mangle-toplevel' 28 | 29 | All of these options are also available via a Node API (e.g. `require('gluejs')`). 30 | 31 | ## Usage example: express middleware (new in v2.2!) 32 | 33 | var express = require('express'), 34 | glue = require('gluejs'), 35 | app = express(); 36 | 37 | app.use(express.static(__dirname)); 38 | 39 | app.use('/app.js', glue.middleware({ 40 | basepath: __dirname, 41 | include: [ './lib', '../node_modules/jade/' ] 42 | })); 43 | 44 | app.listen(3000); 45 | console.log('Listening on port 3000'); 46 | 47 | `glue.middleware()` can accept most of the options supported by the Node API. 48 | 49 | ## Using the resulting file 50 | 51 | The build result is a standalone file, which is exported as a global (`lib/index.js` is exposed as `App`): 52 | 53 | 54 | 57 | 58 | The require() statements inside the package work just like under Node, yet none of the internals are leaked into the global namespace. 59 | 60 | gluejs does not export a global "require()" function in the browser; this means that it is compatible with other code since all details are hidden and only a single interface is exported (main file's ```module.exports```). The reasons behind this are documented in much more detail in my book, "[Single page applications in depth](http://singlepageappbook.com/maintainability1.html)". If you want to export the require implementation, you can use `--global-require`. 61 | 62 | An additional benefit is that you only need one HTTP request to load a package, and that the resulting files can be redistributed (e.g. to a non-Node web application) without worry. If you need to set breakpoints inside files, use `--source-url` to enable source urls. 63 | 64 | ## Installation 65 | 66 | To install the command line tool globally, run 67 | 68 | npm install -g gluejs 69 | 70 | Alternatively, you can run the tool (e.g. via a Makefile) as `./node_modules/gluejs/bin/gluejs`. 71 | 72 | # What's new in v2.3 73 | 74 | gluejs v2.3 adds UMD support and performance / robustness improvements. 75 | 76 | - UMD support: you can now run the same build result in Node and AMD and in the browser. This enables three use cases: 77 | - you can use gluejs bundles in AMD/Require.js (via config.js, see the relevant section below) 78 | - you can share the same file between AMD and Node 79 | - you can use gluejs to produce a minified/obfuscated version of your codebase that's usable in Node 80 | - chained require() resolution. The gluejs `require()` shim has been redesigned so that if a `require` function is already defined, then it will fall back to that function. This has two implications: 81 | - if `--global-require` is set (exporting the `require()` function), you can split your app into multiple bundles loaded separately in the browser and they will appropriately chain require() calls as long they are loaded in prerequisite order 82 | - UMD bundles running under Node will fall back to using Node's native `require` for modules that are not in the bundle 83 | - Added pre-filters to skip .git / svn / hg / cvs directories for better performance 84 | - Improved the behavior of the cache when the metadata is corrupted or in an unexpected format 85 | 86 | ## What's new in v2.2 87 | 88 | Note: if you are upgrading from an older version: the default value for `--global` is now `App` rather than `Foo`. 89 | 90 | gluejs v2.2 adds Express middleware for serving gluejs packages, thanks [@JibSales](https://github.com/JibSales). 91 | 92 | ## What's new in v2.1 93 | 94 | Note: if you are upgrading from v2.0: `--cache` is now called `--cache-path`. 95 | 96 | gluejs v2.1 adds significant performance improvements over v2.0! In addition, it adds support for custom transformations, including ones that were written for [browserify](https://github.com/substack/node-browserify#list-of-source-transforms). 97 | 98 | - the task execution engine now supports running multiple tasks concurrently while producing a single output file. Most build systems only use a single output stream, which means that expensive tasks such as `uglifyjs` are run on each file in serial order. gluejs v2.1's new engine executes all tasks in parallel, kind of like MapReduce at a small scale (configurable via `--jobs`). 99 | - anecdotally, this has reduced build time for CPU-intensive builds (e.g. minifying a large number of files) by ~50% by making use of all the available CPU cores. 100 | - the system now enables caching by default; if you run the same gluejs task twice, only the changed files are re-processed. Changes are detected either using md5 hashing or filesize + modification time. Caching used to be an advanced option, but it helps a lot in practice so I figured I'd enable it by default. You can opt out via `--no-cache`, but why? 101 | - the cache supports multiple versions of the same input file (e.g. if you have a gluejs task for a debug build and a production build, switching between the two no longer invalidates the cache). 102 | - added support for custom transformations, such as compiling template files and other compile-to-JS files. 103 | 104 | For example, on a Macbook Pro using a ~1.2Mb input with ~600 files and applying minification (which is CPU-intensive), `--no-cache --jobs 1` (e.g. force serial execution): 105 | 106 | 0:56.75 wall clock time, 39.90 user, 21.18 system 107 | 108 | and `--no-cache` (e.g. parallel execution with default options): 109 | 110 | 0:18.89 wall clock time, 72.78 user, 29.04 system 111 | 112 | In other words, the build completes almost 3x faster than before. 113 | 114 | ## What's new in v2 115 | 116 | gluejs (v2) is a comprehensive refactoring to make use of Node 0.10.x -style streams (under 0.8.x via [readable-stream](https://github.com/isaacs/readable-stream)). 117 | 118 | - internals refactored to make working with unix pipes (e.g. minification, obfuscation etc.) much easier via Node 0.10.x streams 119 | - internals refactored to make file operation easier to apply (e.g. each task is separated into it's own pipe) 120 | - faster repeated builds via file caching 121 | - more accurate npmignore/gitignore matching 122 | 123 | ## Neat new features 124 | 125 | Easier minification (or other processing) via `--command`: 126 | 127 | gluejs \ 128 | --include ./lib \ 129 | --replace jQuery=window.jQuery \ 130 | --command 'uglifyjs --no-copyright' \ 131 | --global App \ 132 | --main lib/index.js \ 133 | --out app.js 134 | 135 | With that option, all files are piped through `uglifyjs` before writing to disk. 136 | 137 | Gorgeous new reporter (enable via `--report`), with stats on savings from minification: 138 | 139 | # Root package 140 | lib/web/shim.js 12.94kb 38% -> 3.84kb (-9324b -71%) 141 | lib/common/shim.util.js 4.65kb 13% -> 1.21kb (-3524b -75%) 142 | lib/common/outlet.js 4.07kb 12% -> 2.05kb (-2074b -50%) 143 | lib/common/view.js 3.94kb 11% -> 1.93kb (-2054b -51%) 144 | lib/common/collection_view.js 2.09kb 6 % -> 1.03kb (-1082b -51%) 145 | lib/common/collection.js 1.18kb 3 % -> 494b (-716b -60%) 146 | lib/common/table_view.js 458b 1 % -> 38b (-420b -92%) 147 | lib/web/index.js 271b 0 % -> 280b (9b 3%) 148 | Package total: 29.59kb 88% -> 10.85kb (-19185b -64%) 149 | Package dependencies: htmlparser-to-html, microee 150 | # htmlparser-to-html 151 | node_modules/htmlparser-to-html/index.js 2.43kb 7 % -> 1.26kb (-1190b -48%) 152 | Package total: 2.43kb 7% -> 1.26kb (-1190b -48%) 153 | # microee 154 | node_modules/microee/index.js 1.24kb 3 % -> 900b (-366b -29%) 155 | Package total: 1.24kb 3% -> 900b (-366b -29%) 156 | Total size: 12.99kb (-20741b -61%) 157 | 158 | Report explained: 159 | 160 | lib/web/shim.js 12.94kb 38% -> 3.84kb (-9324b -71%) 161 | [filename] [original size] [% of total] -> [minified size] (savings in bytes and %) 162 | 163 | The `.npmignore` and `package.json` exclude logic is now more accurate, leading to smaller builds. 164 | 165 | ## Upgrading from gluejs v1 166 | 167 | The command line option syntax has changed: `gluejs --include foo bar` has to be written as `gluejs --include foo --include bar`. 168 | 169 | The `--npm foo` option no longer exists. Instead, just `--include ./node_modules/foo`, the package inference engine will figure out that the target is a npm module and handle it correctly. 170 | 171 | The `.concat(packageA, packageB)`, `.define(module, code)`, `.defaults()` features are deprecated (use bash or string concatenation; use different --include statements). 172 | 173 | ## Usage 174 | 175 | ````markdown 176 | Usage: gluejs --include {OPTIONS} 177 | 178 | ## Basic 179 | 180 | --include Path to import. 181 | --exclude JS regular expression string to match against the included paths 182 | --out File to write. Default: stdout 183 | --global Name of the global to export. Default: "App" 184 | --basepath Base path for relative file paths. Default: process.cwd() 185 | --main Name of the main file/module to export. Default: index.js 186 | 187 | ## Replace / remap 188 | 189 | --replace foo=bar Bind require("name") to an expression, e.g. jQuery to window.$. 190 | --remap foo=bar Remap a name to another name (within the same package). See the docs. 191 | 192 | ## Build options 193 | 194 | --source-url Add source URL annotations to the files. Useful for development, 195 | but note that this is not compatible with IE. 196 | --global-require Export the require() implementation into the global space. 197 | --amd Export the module via the require.js AMD define("name", ...) using 198 | the name specified in --global. Note that the requirejs will not 199 | pick up modules defined like this unless you do at least one 200 | asynchronous require() call. 201 | 202 | ## Minification / source transforms 203 | 204 | --command Pipe each file through a shell command and capture the output 205 | (e.g. --command "uglifyjs --no-copyright"). 206 | --transform Activates a source transformation module. 207 | 208 | ## Performance 209 | 210 | --cache-path Use a cache directory to store file builds. The cache speeds up 211 | large builds (and minified builds) significantly since only source 212 | files that have changed are updated. 213 | --jobs Sets the maximum level of parallelism for the task 214 | execution pipeline. Default: `os.cpus().length * 2` 215 | --cache-method Sets the cache method: stat | hash algorighm name. 216 | 217 | ## Reporting 218 | 219 | --report Display the file size report. 220 | --silent Disable all output, including the reporter. 221 | --verbose More verbose output, such as files being filtered out and processed. 222 | --version Version info 223 | 224 | ## Advanced 225 | 226 | --reset-exclude Advanced: do not apply the default exclusions 227 | (/dist/, /example/, /benchmark/, .min.js). 228 | ```` 229 | 230 | ## API usage example 231 | 232 | ```javascript 233 | var Glue = require('gluejs'); 234 | new Glue() 235 | .basepath('./lib') // output paths are relative to this 236 | .main('index.js') // the file that's exported as the root of the package 237 | .include('./lib') // includes all files in the dir 238 | .exclude(new RegExp('.+\\.test\\.js')) // excludes .test.js 239 | .replace({ 240 | 'jquery': 'window.$ ', // binds require('jquery') to window.$ 241 | 'Chat': 'window.Chat' 242 | }) 243 | .export('App') // the package is output as window.App 244 | .render(fs.createWriteStream('./out.js')); 245 | ``` 246 | 247 | You can also render e.g. to a http response: 248 | 249 | ```javascript 250 | .render(function (err, txt) { 251 | // send the package as a response to a HTTP request 252 | res.setHeader('content-type', 'application/javascript'); 253 | res.end(txt); 254 | }); 255 | ``` 256 | 257 | ## --include 258 | 259 | `--include ` (console) / `.include(path)` (API). 260 | 261 | - If the path is a file, include it. 262 | - If the path is a directory, include all files in it recursively. 263 | - If the path is a node module, include all files in it and all subdependencies in the build. 264 | 265 | Sub-dependencies are also automatically bundled, as long as they've been installed by npm. Since the require() semantics are the same as in Node, subdependencies can depend on different versions of the same module without conflicting with each other. 266 | 267 | `.json` files are also supported; just like in Node, you can use `require('./foo.json')` within the resulting bundle. 268 | 269 | ## --exclude 270 | 271 | `--exclude ` / `.exclude(regexp)`: Excludes all files matching the regexp from the build. Evaluated just before rendering the build so it applies to all files. 272 | 273 | `--reset-exclude`: **New advanced option**. Removes the default exclusions (matching /dist/, /example/, /benchmark/, [-.]min.js$). For example: `--reset-exclude --exclude '/foo/'`. 274 | 275 | ## --global 276 | 277 | `--global ` / `.export(name)`: Name of the global name to export. Default: `App` (e.g. `window.App`) 278 | 279 | ## --basepath 280 | 281 | `--basepath ` / `.basepath(path)`: Base path for relative file paths. All relative paths are appended to this value. Default: process.cwd(). 282 | 283 | ## --main 284 | 285 | `--main ` / `.main('filename')`: Name of the main file/module to export. Default: index.js. 286 | 287 | ## --out 288 | 289 | `--out ` / `.render(destination)`: Write to the target path. 290 | 291 | For `.render`, the destination can be either a Writable Stream or a callback `function(err, output){}`. See the API usage example above. 292 | 293 | ## .middleware 294 | 295 | `.middleware({ include: ... })`: Returns a Express/Connect compatible request handler. 296 | 297 | For example: 298 | 299 | app.use('/js/app.js', glue.middleware({ 300 | include: __dirname + '/lib' 301 | })); 302 | 303 | Or at the route level: 304 | 305 | app.use(app.router); 306 | app.get('/js/app.js', glue.middleware({ 307 | include: __dirname + '/lib' 308 | })); 309 | 310 | Using full paths is recommended to avoid ambiguity. `basepath` defaults to the `include` path, and `main` defaults to `index.js`. 311 | 312 | ## --replace 313 | 314 | `--replace name=expr` / `.replace(name, value)` / `.replace({ name: ... })`: Replace the return value of a `require()` call. 315 | 316 | For example, to bind `require('underscore')` to `window._`: 317 | 318 | --replace underscore=window._ 319 | 320 | To bind `require('fs')` to `undefined`: 321 | 322 | --replace fs={} 323 | 324 | Using a global require (e.g. to bind to the value of a AMD module): 325 | 326 | --replace sha1="window.require('sha1')" 327 | 328 | ## --remap 329 | 330 | `--remap name=expr` / `.remap(key, value)`: Remap a name to another name (within the same package). 331 | 332 | For example, to remap `require('assert')` to `require('chai').assert`: 333 | 334 | --remap "assert=require('chai').assert" 335 | 336 | When you are binding to a external module, use `--replace`. When the module is internal to the package (e.g. fs, assert, ...), use `--remap`. Basically the difference is that `--remap` dependencies are only resolved when they are first required, whereas `--replace` is a direct assignment / evaluation. The delayed evaluation is needed for internal modules to prevent circular dependencies from causing issues during load time. 337 | 338 | ## --source-url 339 | 340 | `--source-url` / `.set('source-url', true)`: Source URLs are additional annotations that make it possible to show the directory tree when looking at scripts (instead of just the one compiled file): 341 | 342 | ![screenshot](https://github.com/mixu/gluejs/raw/master/test/sample/sourceurl.png) 343 | 344 | To enable source URLs, set the following option: 345 | 346 | ```javascript 347 | .set('source-url', true) 348 | ``` 349 | 350 | Note that source URLs require that scripts are wrapped in a eval block with a special comment, which is not supported by IE, so don't use source URLs for production builds. 351 | 352 | ## --command 353 | 354 | `--command ` / `.set('command', )`: Pipe each file through a shell command and capture the output. For example: 355 | 356 | --command "uglifyjs --no-copyright" 357 | 358 | For more complicated use cases, you'll probably want to use `--transform`. 359 | 360 | ## --transform (v2.1) 361 | 362 | `--transform `: activates a source transformation module. This enables 3rd party extensions for things that are more complex than just piping through via `--command`. API-compatible with browserify's [source transformation modules](https://github.com/substack/node-browserify#list-of-source-transforms). 363 | 364 | This feature is new, so let me know if you run into issues with it. 365 | 366 | For example, using coffeeify: 367 | 368 | npm install coffeeify 369 | gluejs --transform coffeeify --include index.coffee > bundle.js 370 | 371 | gluejs uses [minitask](https://github.com/mixu/minitask) internally, so you can also write modules that return sync / async functions, Node core duplex / transform streams or Node core child_process objects. 372 | 373 | See the [section on writing transform modules](#writing_transform_modules) as well as [this example which uses Square's ES6-module-compiler](https://github.com/mixu/gluejs/blob/glue2/test/command-integration/es6-module.js) and [Jade example](https://github.com/mixu/gluejs/blob/glue2/test/command-integration/jade-module.js) for examples. 374 | 375 | If you write a transformation, file a PR against the readme so I can feature it here. I've tested functionality using the examples above, but I haven't published them as modules as it's hard to maintain something I'm not using. 376 | 377 | ## --report 378 | 379 | Display the summary report. Particularly useful if you are minifying files, since the report will show the file size after transformation. 380 | 381 | ## --jobs (v2.1) 382 | 383 | `--jobs ` / `.set('jobs', )`: Sets the maximum level of parallelism for the task execution pipeline. Default: `os.cpus().length * 2`. 384 | 385 | ## --cache-path (v2.1) 386 | 387 | `--cache-path ` / `.set('cache-path', )`: Use a specific directory for caching. This is a directory where the results of the previous builds are stored along with metadata. Caching is enabled by default in v2.1. If a path is not set, then `~/.gluejs-cache` is used for storing cache results. You can just delete the directory to invalidate the cache. 388 | 389 | The cache speeds up large builds (and minified builds) significantly since only source files that have changed are updated. 390 | 391 | Use a directory with a dot in front to hide the cached files (remember to also gitignore the directory). The path is relative to the working directory. For example: 392 | 393 | --cache-path .cache 394 | 395 | When the cache is in use, the number of cache hits are shown: 396 | 397 | Cache hits: 2 / 2 files 398 | 399 | To get even more info, enable `--verbose`. 400 | 401 | ## --cache-method (v2.1) 402 | 403 | `--cache-method ` / `.set('cache-method', )`: Sets the cache invalidation method. `stat` uses the file size and last modified date of the input file. `md5` (and other hash algorithms supported by `crypto.createHash`) uses hashes to verify that the input file has not changed. Default: stat. 404 | 405 | ## --no-cache (v2.1) 406 | 407 | `--no-cache` / `.set('cache', false)`: Disables the cache; sets the cache directory to a temporary directory. 408 | 409 | ## --global-require 410 | 411 | `--global-require` / `.set('global-require', true)`: Overwrites / exports the require implementation from the package, allowing you to call `require()` from outside the package as if you were inside the package. 412 | 413 | One use case for this feature is when you want to package and load a fixed set of files and npm dependencies via `require()` calls in the browser. 414 | 415 | Dummy index.js: 416 | 417 | ```javascript 418 | module.export = {}; 419 | ``` 420 | 421 | Build command: 422 | 423 | ```bash 424 | gluejs \ 425 | --include index.js \ 426 | --include node_modules/ \ 427 | --global-require \ 428 | --global App \ 429 | --out package.js 430 | ``` 431 | 432 | HTML page (assuming "foo" is a node module): 433 | 434 | ```html 435 | 436 | 439 | ``` 440 | 441 | With `--global-require`, `require()` statements are resolved as if they were inside index.js. 442 | 443 | ## --umd (new in v2.3) 444 | 445 | `--umd` / `.set('umd', true)`: UMD compatible export. 446 | 447 | The resulting bundle can be loaded in Node (directly via require()), in AMD (as an external module) or alternatively as a global (in the browser). All you need to do is to add `--umd` to your build to include the UMD wrapper. 448 | 449 | Creating the bundle: 450 | 451 | gluejs \ 452 | --umd \ 453 | --include ./lib/ \ 454 | --include ./node_modules/microee/ \ 455 | --global App \ 456 | --main lib/index.js \ 457 | --out app.js \ 458 | 459 | In node: 460 | 461 | node -e "console.log(require('./app.js'););" 462 | 463 | In AMD/Require.js,`config.js`, assuming `--global` was set to `App`: 464 | 465 | { 466 | paths: { "myapp": "/app.js" }, 467 | myapp: { 468 | deps: [ ... ], 469 | exports: 'App' 470 | } 471 | } 472 | 473 | after which the module is accessible as `myapp`. 474 | 475 | Note that Require.js might not pick up modules defined like this unless you do at least one asynchronous require() call, e.g. you need to run the no-op code `require(['foo'], function(foo){ });` before `require('foo')` will work. This seems to be a quirk in the Require.js AMD shim. 476 | 477 | Upgrade note: `--amd`, an older option which was only compatible with AMD/requirejs, is now equivalent to `--umd`. 478 | 479 | ## --verbose 480 | 481 | `--verbose` / `.set('verbose', true)`: More verbose output, such as files being filtered out and processed. 482 | 483 | ## --silent 484 | 485 | `--silent` / `.set('silent', true)`: disable verbose logging 486 | 487 | ## A few notes about npm dependencies 488 | 489 | The main file is determined by looking at the "main" key in package.json and resolution follows the require() rules as documented in the Node API docs. 490 | 491 | Only files ending with .js are included in the builds, since require() only works with .js, .json and .node files (the last one being for compiled native modules). 492 | 493 | The .npmignore file is honored. It works like a .gitignore file. This is the preferred way of excluding files and directories from npm dependencies according to `npm help developers`. 494 | 495 | ## Writing transform modules 496 | 497 | By default, gluejs only handles files that end with ".js". 498 | 499 | You can create custom transform modules that handle other types of files, such as templates for your favorite templating language. 500 | 501 | Here is an example: 502 | 503 | var path = require('path'), 504 | jade = require('jade'); 505 | 506 | module.exports = function(filename) { 507 | // gluejs modules can be skipped by returning false 508 | if(path.extname(filename) != '.jade') { 509 | return; 510 | } 511 | 512 | // Minitask "sync" function 513 | return function(input) { 514 | return 'var jade = require(\'jade\').runtime;\n' + 515 | 'module.exports = ' + 516 | jade.compile(input, { filename: filename }).toString() + ';'; 517 | }; 518 | }; 519 | 520 | // indicate that this is a gluejs module rather than a browserify module 521 | module.exports.gluejs = true; 522 | 523 | ### Benchmark methodology 524 | 525 | Ran this: 526 | 527 | /usr/bin/time -f "\n%E wall clock,\n%U user mode CPU seconds,\n%S kernel mode CPU seconds" \ 528 | gluejs \ 529 | --no-cache \ 530 | --jobs 1 \ 531 | --command 'uglifyjs --no-copyright' \ 532 | --no-report \ 533 | --progress \ 534 | ... 535 | 536 | ## License 537 | 538 | BSD 539 | -------------------------------------------------------------------------------- /test/command-integration/Makefile: -------------------------------------------------------------------------------- 1 | all: brfs coffee hbsfy es6 jade 2 | 3 | brfs: 4 | @printf "\n\n" 5 | @printf "**** brfs ****\n" 6 | @printf "\n" 7 | ../../bin/gluejs --transform brfs --include ./test.brfs.js 8 | 9 | coffee: 10 | @printf "\n\n" 11 | @printf "**** coffee ****\n" 12 | @printf "\n" 13 | ../../bin/gluejs --transform coffeeify --include ./test.coffee 14 | 15 | hbsfy: 16 | @printf "\n\n" 17 | @printf "**** hbsfy ****\n" 18 | @printf "\n" 19 | ../../bin/gluejs --transform hbsfy --include ./test.hbs 20 | 21 | es6: 22 | @printf "\n\n" 23 | @printf "**** es6 ****\n" 24 | @printf "\n" 25 | ../../bin/gluejs --transform ./es6-module.js --include ./test.es6.js 26 | 27 | jade: 28 | @printf "\n\n" 29 | @printf "**** jade ****\n" 30 | @printf "\n" 31 | ../../bin/gluejs --transform ./jade-module.js --include ./test.jade 32 | -------------------------------------------------------------------------------- /test/command-integration/es6-module.js: -------------------------------------------------------------------------------- 1 | var path = require('path'), 2 | Compiler = require("es6-module-transpiler").Compiler; 3 | 4 | module.exports = function(filename) { 5 | // gluejs modules can be skipped by returning false 6 | if(path.extname(filename) != '.js') { 7 | return; 8 | } 9 | 10 | // Minitask "sync" function 11 | return function(input) { 12 | var compiler = new Compiler(input, path.basename(filename, '.js')); 13 | return compiler.toCJS(); 14 | }; 15 | }; 16 | 17 | // indicate that this is a gluejs module rather than a browserify module 18 | // this is needed because gluejs/Minitask features are a superset of browserify's features 19 | // see get-commands.js in gluejs for the details 20 | module.exports.gluejs = true; 21 | -------------------------------------------------------------------------------- /test/command-integration/jade-module.js: -------------------------------------------------------------------------------- 1 | var path = require('path'), 2 | jade = require('jade'); 3 | 4 | module.exports = function(filename) { 5 | // gluejs modules can be skipped by returning false 6 | if(path.extname(filename) != '.jade') { 7 | return; 8 | } 9 | 10 | // Minitask "sync" function 11 | return function(input) { 12 | return 'var jade = require(\'jade\').runtime;\n' + 13 | 'module.exports = ' + jade.compile(input, { filename: filename }).toString() + ';'; 14 | }; 15 | }; 16 | 17 | // indicate that this is a gluejs module rather than a browserify module 18 | // this is needed because gluejs/Minitask features are a superset of browserify's features 19 | // see get-commands.js in gluejs for the details 20 | module.exports.gluejs = true; 21 | -------------------------------------------------------------------------------- /test/command-integration/robot.html: -------------------------------------------------------------------------------- 1 | beep boop 2 | -------------------------------------------------------------------------------- /test/command-integration/test.brfs.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs'); 2 | var html = fs.readFileSync(__dirname + '/robot.html'); 3 | console.log(html); 4 | -------------------------------------------------------------------------------- /test/command-integration/test.coffee: -------------------------------------------------------------------------------- 1 | square = (x) -> x * x 2 | 3 | module.exports = square 4 | -------------------------------------------------------------------------------- /test/command-integration/test.es6.js: -------------------------------------------------------------------------------- 1 | import { inspect } from "util"; 2 | 3 | function log(foo) { 4 | console.log(inspect(foo)); 5 | } 6 | 7 | export { log }; 8 | -------------------------------------------------------------------------------- /test/command-integration/test.hbs: -------------------------------------------------------------------------------- 1 |
2 |

{{title}}

3 |
4 | {{body}} 5 |
6 |
7 | -------------------------------------------------------------------------------- /test/command-integration/test.jade: -------------------------------------------------------------------------------- 1 | h1 Jade! 2 | -------------------------------------------------------------------------------- /test/fixtures/complex-package/lib/index.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | text: 'lib/index.js from the root of the complex package', 3 | 'l1_dep': require('l1_dep') 4 | }; 5 | -------------------------------------------------------------------------------- /test/fixtures/complex-package/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "main": "lib/index.js", 3 | "dependencies": { 4 | "l1_dep": "*" 5 | }, 6 | "devDependencies": { 7 | "l1_dev_only": "~1.2.3" 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /test/fixtures/empty-file/empty.js: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mixu/gluejs/9ac8cf6cb03b8a254cde7379d5d47935d1323127/test/fixtures/empty-file/empty.js -------------------------------------------------------------------------------- /test/fixtures/jade-file/foo.jade: -------------------------------------------------------------------------------- 1 | h1 2 | | Hello 3 | = ' ' + name 4 | -------------------------------------------------------------------------------- /test/fixtures/jade-file/index.js: -------------------------------------------------------------------------------- 1 | module.exports = 'index.js'; 2 | -------------------------------------------------------------------------------- /test/fixtures/json-file/foo.json: -------------------------------------------------------------------------------- 1 | { 2 | "foo": "bar" 3 | } 4 | -------------------------------------------------------------------------------- /test/fixtures/json-file/index.js: -------------------------------------------------------------------------------- 1 | module.exports = require('./foo.json'); 2 | -------------------------------------------------------------------------------- /test/fixtures/single-file/has_dependency.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | has_dependency: true, 3 | dependency: require('dependency') 4 | }; 5 | -------------------------------------------------------------------------------- /test/fixtures/single-file/simple.js: -------------------------------------------------------------------------------- 1 | exports.simple = true; 2 | 3 | -------------------------------------------------------------------------------- /test/integration.test.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs'), 2 | path = require('path'), 3 | assert = require('assert'), 4 | Glue = require('gluejs'); 5 | 6 | exports['integration tests'] = { 7 | 8 | // ADD TESTS FOR: 9 | // - the stream size reporter 10 | 11 | 12 | 'can build a json file': function(done) { 13 | var file = fs.createWriteStream(__dirname + '/tmp/temp.js'); 14 | 15 | file.once('close', function() { 16 | var result = require(__dirname + '/tmp/temp.js'); 17 | assert.deepEqual(result, { foo: "bar" }); 18 | done(); 19 | }); 20 | 21 | new Glue() 22 | .basepath(__dirname +'/fixtures/json-file/') 23 | .include('./') 24 | .set('cache', false) 25 | .export('module.exports') 26 | .render(file); 27 | }, 28 | 29 | '--command with unix pipe': function(done) { 30 | new Glue() 31 | .basepath(__dirname +'/fixtures/jade-file/') 32 | .include('./') 33 | .set('cache', false) 34 | .set('command', 'bash -c "echo \'module.exports = \"bar\";\'"') 35 | .export('module.exports') 36 | .render(function(err, txt) { 37 | console.log(txt); 38 | done(); 39 | }); 40 | }, 41 | 42 | '--command with specific extension': function(done) { 43 | var file = fs.createWriteStream(__dirname + '/tmp/temp2.js'); 44 | 45 | file.once('close', function() { 46 | var name = new Date().getTime(); 47 | // use standard require 48 | var result = require(__dirname + '/tmp/temp2.js')({ name: name }); 49 | console.log(result); 50 | assert.deepEqual(result, '

Hello '+name+'

'); 51 | done(); 52 | }); 53 | 54 | var spawn = require('../lib/file-tasks/spawn.js'), 55 | wrapCommonJs = require('../lib/file-tasks/wrap-commonjs-web.js'); 56 | 57 | // There are way too many internals exposed here ... must encapsulate these better. 58 | 59 | new Glue() 60 | .basepath(__dirname +'/fixtures/jade-file/') 61 | .include('./') 62 | .set('cache', false) 63 | .set('require', false) 64 | .set('command', [ 65 | function(filename, pkg) { 66 | if(path.extname(filename) != '.jade') { 67 | return; 68 | } 69 | return function() { 70 | return spawn({ 71 | name: filename, // full path 72 | task: __dirname + '/node_modules/.bin/jade --client --no-debug' 73 | }); 74 | }; 75 | }, 76 | // NOTE: run the uglify beautify on the jade output (not on the partial produced by the 77 | // CJS wrapper... 78 | function(filename, pkg) { 79 | if(path.extname(filename) != '.jade') { 80 | return; 81 | } 82 | return function() { 83 | return spawn({ 84 | name: filename, // full path 85 | task: __dirname + '/node_modules/.bin/uglifyjs --no-copyright --beautify' 86 | }); 87 | }; 88 | }, 89 | // wrapper: 90 | // var jade = require("jade").runtime; module.exports = ; 91 | function(filename, pkg) { 92 | if(path.extname(filename) != '.jade') { 93 | return; 94 | } 95 | return function(input) { 96 | return 'function(module, exports, require){' + 97 | 'var jade = require(\'jade\').runtime;\n' + 98 | 'module.exports = ' + (input.length === 0 ? '{}' : input) + 99 | '}'; 100 | }; 101 | } 102 | ]) 103 | .main('foo.jade') 104 | .export('module.exports') 105 | .render(file); 106 | }, 107 | 108 | 'try brfs': function(done) { 109 | new Glue() 110 | .basepath(__dirname +'/command-integration/') 111 | .include('./test.brfs.js') 112 | .set('cache', false) 113 | .set('require', false) 114 | .set('report', true) 115 | .set('command', [ 116 | function(filename, pkg) { 117 | return function() { 118 | // note that brfs seems to only use the filename for resolving the fs calls 119 | return require('brfs')(filename); 120 | }; 121 | } 122 | ]) 123 | .main('test.brfs.js') 124 | .export('module.exports') 125 | .render(function(err, txt) { 126 | console.log(txt); 127 | done(); 128 | }); 129 | 130 | }, 131 | 132 | 'try coffeeify': function(done) { 133 | new Glue() 134 | .basepath(__dirname +'/command-integration/') 135 | .include('./test.coffee') 136 | .set('cache', false) 137 | .set('require', false) 138 | .set('report', true) 139 | .set('command', [ 140 | function(filename, pkg) { 141 | return function() { 142 | return require('coffeeify')(filename); 143 | }; 144 | } 145 | ]) 146 | .main('test.brfs.js') 147 | .export('module.exports') 148 | .render(function(err, txt) { 149 | console.log(txt); 150 | done(); 151 | }); 152 | 153 | } 154 | 155 | 156 | }; 157 | 158 | // if this module is the script being run, then run the tests: 159 | if (module == require.main) { 160 | var mocha = require('child_process').spawn('mocha', [ 161 | '--colors', '--ui', 'exports', '--reporter', 'spec', __filename 162 | ]); 163 | mocha.stderr.on('data', function (data) { 164 | if (/^execvp\(\)/.test(data)) { 165 | console.log('Failed to start child process. You need mocha: `npm install -g mocha`'); 166 | } 167 | }); 168 | mocha.stdout.pipe(process.stdout); 169 | mocha.stderr.pipe(process.stderr); 170 | } 171 | 172 | -------------------------------------------------------------------------------- /test/list-tasks/annotate-stat.test.js: -------------------------------------------------------------------------------- 1 | var assert = require('assert'); 2 | util = require('util'); 3 | 4 | var stat = require('../../lib/list-tasks/annotate-stat.js'), 5 | List = require('minitask').list; 6 | 7 | var list = new List(); 8 | 9 | list.add(__dirname+'/../fixtures/single-file/'); 10 | 11 | exports['annotate-stat'] = { 12 | 13 | 'can stat the list': function(done) { 14 | list.exec(function(err, files) { 15 | 16 | stat({ files: files }); 17 | //console.log(util.inspect(files, null, 10, true)); 18 | // each file has a stat property now 19 | assert.ok(files.every(function(file) { 20 | return typeof file.stat == 'object'; 21 | })); 22 | done(); 23 | }); 24 | } 25 | }; 26 | 27 | 28 | -------------------------------------------------------------------------------- /test/list-tasks/filter-npm.test.js: -------------------------------------------------------------------------------- 1 | var assert = require('assert'), 2 | util = require('util'); 3 | 4 | var filter = require('../../lib/list-tasks/filter-npm.js'); 5 | 6 | var cases = { 7 | 'built-in ignores': { 8 | 9 | files: [ 10 | '/a/.git/config', 11 | '/a/.git/foo/bar', 12 | '/a/.lock-wscript', 13 | '/a/.lock-wscript-keepme', 14 | '/a/.wafpickle-1', 15 | '/a/.wafpickle-2-keepme', 16 | '/a/CVS/foo', 17 | '/a/.svn/foo', 18 | '/a/.hg/foo', 19 | '/a/.foobar.swp', 20 | '/a/keepme.swp', 21 | '/a/.DS_Store', 22 | '/a/.DS_Store/keepme', 23 | '/a/.DS_Store-keepme', 24 | '/a/._', 25 | '/a/npm-debug.log', 26 | '/a/npm-debug.log/keepme', 27 | '/a/npm-debug.log-keepme' 28 | ] 29 | 30 | } 31 | 32 | }; 33 | 34 | Object.keys(cases).forEach(function(name) { 35 | cases[name].files = cases[name].files.map(function(file) { return { name: file }; }); 36 | }); 37 | 38 | exports['filter-npm'] = { 39 | 40 | 'can exclude using the npm built-in ignore list': function() { 41 | var list = cases['built-in ignores']; 42 | filter(list); 43 | //console.log(util.inspect(list, null, 10, true)); 44 | assert.deepEqual(list.files, [ 45 | { name: '/a/.lock-wscript-keepme' }, 46 | { name: '/a/.wafpickle-2-keepme' }, 47 | { name: '/a/keepme.swp' }, 48 | { name: '/a/.DS_Store/keepme' }, 49 | { name: '/a/.DS_Store-keepme' }, 50 | { name: '/a/npm-debug.log/keepme' }, 51 | { name: '/a/npm-debug.log-keepme' } 52 | ]); 53 | } 54 | }; 55 | 56 | // if this module is the script being run, then run the tests: 57 | if (module == require.main) { 58 | var mocha = require('child_process').spawn('mocha', [ '--colors', '--ui', 'exports', '--reporter', 'spec', __filename ]); 59 | mocha.stderr.on('data', function (data) { 60 | if (/^execvp\(\)/.test(data)) { 61 | console.log('Failed to start child process. You need mocha: `npm install -g mocha`'); 62 | } 63 | }); 64 | mocha.stdout.pipe(process.stdout); 65 | mocha.stderr.pipe(process.stderr); 66 | } 67 | -------------------------------------------------------------------------------- /test/list-tasks/filter-package.test.js: -------------------------------------------------------------------------------- 1 | var assert = require('assert'), 2 | util = require('util'); 3 | 4 | var infer = require('../../lib/list-tasks/infer-packages.js'), 5 | filter = require('../../lib/list-tasks/filter-packages.js'); 6 | 7 | var cases = { 8 | 9 | 'package.json whitelist' : { files: [ 10 | '/a/excludeme.js', 11 | '/a/included_file.js', 12 | '/a/included_file.foobar', 13 | '/a/package.json', 14 | '/a/excluded_directory/aaa.js', 15 | '/a/included_directory/bbb.js', 16 | '/a/included_directory/ccc/ddd.js' 17 | ], 18 | fakeFS: { 19 | existsSync: function(name) { 20 | return !!(name == '/a/package.json'); 21 | }, 22 | '/a/package.json': JSON.stringify({ 23 | files: [ 24 | '/a/included_directory', 25 | '/a/included_file.js', 26 | '/a/included_file.foobar' 27 | ] 28 | }) 29 | } 30 | }, 31 | 32 | 'npmignore blacklist': { 33 | files: [ 34 | '/a/excludeme.js', 35 | '/a/foo.excludeme', 36 | '/a/included_file.js', 37 | '/a/included_file.foobar', 38 | '/a/.npmignore', 39 | '/a/included_directory/bbb.js', 40 | '/a/included_directory/excluded_sub/ddd.js', 41 | '/a/examples/file', 42 | '/a/exclude/file', 43 | '/a/test/file', 44 | '/a/docs/file', 45 | '/a/glob/file' 46 | ], 47 | // to be honest, the way in which glob matching works in npm is quite nutty 48 | // https://github.com/isaacs/fstream-ignore/blob/master/ignore.js 49 | // e.g. trying multiple variants of the same path, recursively against the same rule 50 | // 51 | // My goal here is to 1) use the same underlying expression compiler (isaacs/minimatch) 52 | // and 2) to test that the most common cases are covered. 53 | fakeFS: { 54 | '/a/.npmignore': 55 | // should strip comments 56 | '# Comment\n' + 57 | // specific file 58 | '/a/excludeme.js\n' + 59 | // specific extension 60 | '*.excludeme\n'+ 61 | // specific directory (all four permutations) 62 | 'test\n' + 63 | '/exclude\n' + 64 | 'docs/\n' + 65 | '/examples/\n' + 66 | // glob "dir/*" 67 | 'glob/*\n' + 68 | // glob "dir/*/**" 69 | 'included_directory/*/**\n' 70 | } 71 | }, 72 | 73 | 'package.json devDependencies': { 74 | files: [ 75 | '/a/index.js', 76 | '/a/node_modules/bar/index.js', 77 | '/a/node_modules/bar/package.json', 78 | '/a/node_modules/bar/node_modules/include_me.js', 79 | '/a/node_modules/bar/node_modules/exclude_me.js', 80 | '/a/node_modules/bar/node_modules/include/second.js', 81 | '/a/node_modules/bar/node_modules/exclude/second.js', 82 | '/a/node_modules/bar/node_modules/exclude/node_modules/subdependency.js', 83 | ], 84 | fakeFS: { 85 | existsSync: function(name) { 86 | return !!(name == '/a/node_modules/bar/package.json'); 87 | }, 88 | '/a/node_modules/bar/package.json': JSON.stringify({ 89 | devDependencies: { 90 | exclude_me: '*', 91 | exclude: '*' 92 | } 93 | }) 94 | } 95 | } 96 | }; 97 | 98 | Object.keys(cases).forEach(function(name) { 99 | cases[name].files = cases[name].files.map(function(file) { return { name: file }; }); 100 | }); 101 | 102 | exports['filter-package'] = { 103 | 104 | before: function() { 105 | var self = this; 106 | 107 | function mock(filename) { 108 | if(self.fakeFS[filename]) { 109 | return self.fakeFS[filename]; 110 | } 111 | console.log('fs.readFileSync', filename); 112 | return '{}'; 113 | } 114 | 115 | infer._setFS({ 116 | existsSync: function(filename) { 117 | return self.fakeFS.existsSync(filename); 118 | }, 119 | readFileSync: mock 120 | }); 121 | 122 | filter._setFS({ 123 | readFileSync: mock 124 | }); 125 | }, 126 | 127 | 'can exclude via package.json whitelist': function() { 128 | var list = cases['package.json whitelist']; 129 | this.fakeFS = list.fakeFS; 130 | // first, infer the package structure 131 | infer(list); 132 | // now apply the filter 133 | filter(list); 134 | // console.log(util.inspect(list, null, 10, true)); 135 | assert.deepEqual(list.files, [ 136 | { name: '/a/included_file.js' }, 137 | { name: '/a/included_file.foobar' }, 138 | { name: '/a/included_directory/bbb.js' }, 139 | { name: '/a/included_directory/ccc/ddd.js' }, 140 | ]); 141 | }, 142 | 143 | 'can exclude via .npmignore': function() { 144 | var list = cases['npmignore blacklist']; 145 | this.fakeFS = list.fakeFS; 146 | // first, infer the package structure 147 | infer(list); 148 | // now apply the filter 149 | filter(list); 150 | // console.log(util.inspect(list, null, 10, true)); 151 | assert.deepEqual(list.files, [ 152 | { name: '/a/included_file.js' }, 153 | { name: '/a/included_file.foobar' }, 154 | { name: '/a/.npmignore' }, 155 | { name: '/a/included_directory/bbb.js' } 156 | ]); 157 | 158 | }, 159 | 160 | 'packages listed in package.json as devDependencies are ignored': function() { 161 | var list = cases['package.json devDependencies']; 162 | this.fakeFS = list.fakeFS; 163 | // first, infer the package structure 164 | infer(list); 165 | // now apply the filter 166 | filter(list); 167 | // console.log(util.inspect(list, null, 10, true)); 168 | assert.deepEqual(list.files, [ 169 | { name: '/a/index.js' }, 170 | { name: '/a/node_modules/bar/index.js' }, 171 | { name: '/a/node_modules/bar/package.json' }, 172 | { name: '/a/node_modules/bar/node_modules/include_me.js' }, 173 | { name: '/a/node_modules/bar/node_modules/include/second.js' } 174 | ]); 175 | // base package, bar, include and include_me = 4 packages 176 | assert.equal(list.packages.length, 4); 177 | } 178 | 179 | /* 180 | 'test minimatch': function() { 181 | var minimatch = require("minimatch"); 182 | var expr = 'a/*' + '/**'; 183 | console.log(minimatch('a/b', expr, { matchBase: true, dot: true, flipNegate: true })); 184 | console.log(minimatch('a/exlc/c', expr, { matchBase: true, dot: true, flipNegate: true })); 185 | console.log(minimatch('a/exlc/two/c', expr, { matchBase: true, dot: true, flipNegate: true })); 186 | expr = 'a/'; 187 | console.log(minimatch('a/b', expr, { matchBase: true, dot: true, flipNegate: true })); 188 | console.log(minimatch('a/exlc/c', expr, { matchBase: true, dot: true, flipNegate: true })); 189 | console.log(minimatch('a/exlc/two/c', expr, { matchBase: true, dot: true, flipNegate: true })); 190 | } 191 | */ 192 | }; 193 | 194 | // if this module is the script being run, then run the tests: 195 | if (module == require.main) { 196 | var mocha = require('child_process').spawn('mocha', [ '--colors', '--ui', 'exports', '--reporter', 'spec', __filename ]); 197 | mocha.stderr.on('data', function (data) { 198 | if (/^execvp\(\)/.test(data)) { 199 | console.log('Failed to start child process. You need mocha: `npm install -g mocha`'); 200 | } 201 | }); 202 | mocha.stdout.pipe(process.stdout); 203 | mocha.stderr.pipe(process.stderr); 204 | } 205 | -------------------------------------------------------------------------------- /test/list-tasks/infer-packages.test.js: -------------------------------------------------------------------------------- 1 | var assert = require('assert'), 2 | util = require('util'); 3 | 4 | var infer = require('../../lib/list-tasks/infer-packages.js'); 5 | 6 | function pluck(key, obj) { 7 | var o = { }; 8 | o[key] = obj[key]; 9 | return o; 10 | } 11 | 12 | var cases = { 13 | 14 | 'can infer a single-file package': { 15 | files: [ '/fixtures/simple.js' ] 16 | }, 17 | 18 | 'has-node-module-file': { 19 | files: [ 20 | '/fixtures/index.js', 21 | '/fixtures/node_modules/foo.js' 22 | ] 23 | }, 24 | 25 | 'has-node-module-folder': { 26 | files: [ 27 | '/fixtures/index.js', 28 | '/fixtures/node_modules/foo/index.js', 29 | '/fixtures/node_modules/foo/lib/sub.js' 30 | ], 31 | fakeFS: { 32 | existsSync: function(name) { 33 | return !!cases['has-node-module-folder'].files[name]; 34 | }, 35 | } 36 | }, 37 | 38 | 'has-node-module-folder-mainfile-via-package-json': { 39 | files: [ 40 | '/fixtures/index.js', 41 | '/fixtures/node_modules/foo/main.js', 42 | '/fixtures/node_modules/foo/lib/sub.js', 43 | '/fixtures/node_modules/foo/package.json' 44 | ], 45 | fakeFS: { 46 | existsSync: function(name) { 47 | return !!(name == '/fixtures/node_modules/foo/package.json'); 48 | }, 49 | '/fixtures/node_modules/foo/package.json': JSON.stringify({ 50 | main: 'main.js' 51 | }) 52 | } 53 | }, 54 | 55 | 'has-sub-sub-sub-module': { 56 | files: [ 57 | '/fixtures/index.js', 58 | '/fixtures/node_modules/aa/index.js', 59 | '/fixtures/node_modules/aa/node_modules/bb.js', 60 | '/fixtures/node_modules/aa/node_modules/cc/differentfile.js', 61 | '/fixtures/node_modules/aa/node_modules/cc/package.json' 62 | ], 63 | fakeFS: { 64 | existsSync: function(name) { 65 | return !!(name == '/fixtures/node_modules/aa/node_modules/cc/package.json'); 66 | }, 67 | '/fixtures/node_modules/aa/node_modules/cc/package.json': JSON.stringify({ 68 | main: 'differentfile.js' 69 | }) 70 | } 71 | }, 72 | 73 | 'json-node-module': { 74 | files: [ 75 | '/a/index.js', 76 | '/a/node_modules/b.json' 77 | ], 78 | fakeFS: { 79 | existsSync: function(name) { 80 | return false; 81 | } 82 | } 83 | }, 84 | 85 | 'package-json-guess-extension': { 86 | files: [ 87 | '/a/index.js', 88 | '/a/node_modules/b/alt.js', 89 | '/a/node_modules/b/package.json' 90 | ], 91 | fakeFS: { 92 | existsSync: function(name) { 93 | return !!(name == '/a/node_modules/b/package.json'); 94 | }, 95 | '/a/node_modules/b/package.json': JSON.stringify({ 96 | main: 'alt' 97 | }) 98 | } 99 | }, 100 | 101 | 'package-json-guess-directory': { 102 | files: [ 103 | '/a/index.js', 104 | '/a/node_modules/b/lib/index.js', 105 | '/a/node_modules/b/package.json' 106 | ], 107 | fakeFS: { 108 | existsSync: function(name) { 109 | return !!(name == '/a/node_modules/b/package.json'); 110 | }, 111 | '/a/node_modules/b/package.json': JSON.stringify({ 112 | main: './lib/' 113 | }) 114 | } 115 | }, 116 | 117 | 'package-json-relpath': { 118 | files: [ 119 | '/a/index.js', 120 | '/a/node_modules/b/lib/foo/bar/alt.js', 121 | '/a/node_modules/b/package.json' 122 | ], 123 | fakeFS: { 124 | existsSync: function(name) { 125 | return !!(name == '/a/node_modules/b/package.json'); 126 | }, 127 | '/a/node_modules/b/package.json': JSON.stringify({ 128 | main: './lib/foo/../foo/bar/alt.js' 129 | }) 130 | } 131 | }, 132 | 133 | 'if the main path is a relative path, it should be normalized': { 134 | files: [ 135 | '/a/index.js', 136 | '/a/node_modules/b/url.js', 137 | '/a/node_modules/b/package.json' 138 | ], 139 | fakeFS: { 140 | existsSync: function(name) { 141 | return !!(name == '/a/node_modules/b/package.json'); 142 | }, 143 | '/a/node_modules/b/package.json': JSON.stringify({ 144 | main: './url.js' 145 | }) 146 | } 147 | } 148 | }; 149 | 150 | Object.keys(cases).forEach(function(name) { 151 | cases[name].files = cases[name].files.map(function(file) { return { name: file }; }); 152 | }); 153 | 154 | exports['infer-packages'] = { 155 | 156 | before: function() { 157 | var self = this; 158 | infer._setFS({ 159 | existsSync: function(filename) { 160 | return self.fakeFS.existsSync(filename); 161 | }, 162 | readFileSync: function(filename) { 163 | if(self.fakeFS[filename]) { 164 | return self.fakeFS[filename]; 165 | } 166 | console.log('fs.readFileSync', filename); 167 | throw new Error('Unknown FakeFS read ' + filename); 168 | } 169 | }); 170 | }, 171 | 172 | 'can infer a single-file package': function() { 173 | var list = cases['can infer a single-file package']; 174 | infer(list); 175 | // console.log(util.inspect(list, null, 10, true)); 176 | assert.equal(list.packages.length, 1); 177 | // the root (or base) package should be anonymous (=> name is given by the user) 178 | assert.ok(typeof list.packages[0].name == 'undefined'); 179 | assert.ok(typeof list.packages[0].basepath == 'undefined'); 180 | assert.ok(typeof list.packages[0].main == 'undefined'); 181 | // the package files should be correct 182 | assert.deepEqual(list.packages[0].files, [ { name: '/fixtures/simple.js' } ]); 183 | }, 184 | 185 | 'can infer two packages from module-file and detect the right main file': function() { 186 | var list = cases['has-node-module-file']; 187 | infer(list); 188 | // console.log(util.inspect(list, null, 10, true)); 189 | assert.equal(list.packages.length, 2); 190 | // the root (or base) package should be anonymous (=> name is given by the user) 191 | assert.ok(typeof list.packages[0].name == 'undefined'); 192 | // the package files should be correct 193 | assert.deepEqual(list.packages[0].files, [ { name: '/fixtures/index.js' } ]); 194 | assert.deepEqual(list.packages[0].dependenciesById, { foo: 1 }); 195 | 196 | // foo package 197 | assert.equal(list.packages[1].name, 'foo'); 198 | assert.equal(list.packages[1].basepath, '/fixtures/node_modules/'); 199 | assert.equal(list.packages[1].main, 'foo.js'); 200 | assert.deepEqual(list.packages[1].files, [ { name: '/fixtures/node_modules/foo.js' } ]); 201 | assert.deepEqual(list.packages[1].dependenciesById, { }); 202 | }, 203 | 204 | 'can infer two packages from module-folder': function() { 205 | var list = cases['has-node-module-folder']; 206 | // set up fakeFS 207 | this.fakeFS = list.fakeFS; 208 | infer(list); 209 | // console.log(util.inspect(list, null, 10, true)); 210 | assert.equal(list.packages.length, 2); 211 | // the root (or base) package should be anonymous (=> name is given by the user) 212 | assert.ok(typeof list.packages[0].name == 'undefined'); 213 | // the package files should be correct 214 | assert.deepEqual(list.packages[0].files, [ { name: '/fixtures/index.js' } ]); 215 | assert.deepEqual(list.packages[0].dependenciesById, { foo: 1 }); 216 | 217 | // foo package 218 | assert.equal(list.packages[1].name, 'foo'); 219 | assert.equal(list.packages[1].basepath, '/fixtures/node_modules/foo/'); 220 | assert.equal(list.packages[1].main, 'index.js'); 221 | assert.deepEqual(list.packages[1].files, [ 222 | { name: '/fixtures/node_modules/foo/index.js' }, 223 | { name: '/fixtures/node_modules/foo/lib/sub.js' } 224 | ]); 225 | assert.deepEqual(list.packages[1].dependenciesById, { }); 226 | }, 227 | 228 | 'can pick up main file name from package.json': function() { 229 | var list = cases['has-node-module-folder-mainfile-via-package-json']; 230 | 231 | // set up fakeFS 232 | this.fakeFS = list.fakeFS; 233 | 234 | infer(list); 235 | // console.log(util.inspect(list, null, 10, true)); 236 | assert.equal(list.packages.length, 2); 237 | // the root (or base) package should be anonymous (=> name is given by the user) 238 | assert.ok(typeof list.packages[0].name == 'undefined'); 239 | // the package files should be correct 240 | assert.deepEqual(list.packages[0].files, [ { name: '/fixtures/index.js' } ]); 241 | assert.deepEqual(list.packages[0].dependenciesById, { foo: 1 }); 242 | 243 | // foo package 244 | assert.equal(list.packages[1].name, 'foo'); 245 | assert.equal(list.packages[1].basepath, '/fixtures/node_modules/foo/'); 246 | assert.equal(list.packages[1].main, 'main.js'); 247 | assert.deepEqual(list.packages[1].files, [ { name: '/fixtures/node_modules/foo/main.js' }, { name: '/fixtures/node_modules/foo/package.json' } , { name: '/fixtures/node_modules/foo/lib/sub.js' } ]); 248 | assert.deepEqual(list.packages[1].dependenciesById, { }); 249 | }, 250 | 251 | 'can pick up recursive node_modules': function(){ 252 | var list = cases['has-sub-sub-sub-module']; 253 | // set up fakeFS 254 | this.fakeFS = list.fakeFS; 255 | infer(list); 256 | // console.log(util.inspect(list.packages, null, 10, true)); 257 | assert.equal(list.packages.length, 4); 258 | assert.deepEqual(list.packages, 259 | [ 260 | { files: [ { name: '/fixtures/index.js' } ], 261 | dependenciesById: { aa: 1 } }, 262 | { name: 'aa', 263 | uid: 1, 264 | basepath: '/fixtures/node_modules/aa/', 265 | main: 'index.js', 266 | files: [ { name: '/fixtures/node_modules/aa/index.js' } ], 267 | dependenciesById: { bb: 2, cc: 3 } }, 268 | { name: 'bb', 269 | uid: 2, 270 | basepath: '/fixtures/node_modules/aa/node_modules/', 271 | main: 'bb.js', 272 | files: [ { name: '/fixtures/node_modules/aa/node_modules/bb.js' } ], 273 | dependenciesById: {} }, 274 | { name: 'cc', 275 | uid: 3, 276 | basepath: '/fixtures/node_modules/aa/node_modules/cc/', 277 | main: 'differentfile.js', 278 | files: 279 | [ { name: '/fixtures/node_modules/aa/node_modules/cc/differentfile.js' }, 280 | { name: '/fixtures/node_modules/aa/node_modules/cc/package.json' } ], 281 | dependenciesById: {} } ]); 282 | }, 283 | 284 | 'can resolve single .json file npm module': function() { 285 | var list = cases['json-node-module']; 286 | infer(list); 287 | // console.log(util.inspect(list, null, 10, true)); 288 | assert.equal(list.packages.length, 2); 289 | assert.deepEqual(list.packages, 290 | [ 291 | { files: [ { name: '/a/index.js' } ], 292 | dependenciesById: { b: 1 } }, 293 | { name: 'b', 294 | uid: 1, 295 | basepath: '/a/node_modules/', 296 | main: 'b.json', 297 | files: [ { name: '/a/node_modules/b.json' } ], 298 | dependenciesById: {} } ]); 299 | }, 300 | 301 | 'it should be OK to define the main file without the .js extension': function() { 302 | var list = cases['package-json-guess-extension']; 303 | // set up fakeFS 304 | this.fakeFS = list.fakeFS; 305 | infer(list); 306 | // console.log(util.inspect(list, null, 10, true)); 307 | assert.equal(list.packages.length, 2); 308 | assert.deepEqual(list.packages, [ 309 | { files: [ { name: '/a/index.js' } ], 310 | dependenciesById: { b: 1 } }, 311 | { name: 'b', 312 | uid: 1, 313 | basepath: '/a/node_modules/b/', 314 | main: 'alt.js', 315 | files: 316 | [ { name: '/a/node_modules/b/alt.js' }, 317 | { name: '/a/node_modules/b/package.json' } ], 318 | dependenciesById: {} } ]); 319 | }, 320 | 321 | 'it should be OK to define the main file as just a directory': function() { 322 | var list = cases['package-json-guess-directory']; 323 | // set up fakeFS 324 | this.fakeFS = list.fakeFS; 325 | infer(list); 326 | // console.log(util.inspect(list, null, 10, true)); 327 | assert.equal(list.packages.length, 2); 328 | assert.deepEqual(list.packages, [ { files: [ { name: '/a/index.js' } ], 329 | dependenciesById: { b: 1 } }, 330 | { name: 'b', 331 | uid: 1, 332 | basepath: '/a/node_modules/b/', 333 | main: 'lib/index.js', 334 | files: 335 | [ { name: '/a/node_modules/b/package.json' }, 336 | { name: '/a/node_modules/b/lib/index.js' } ], 337 | dependenciesById: {} } ]); 338 | }, 339 | 340 | 'if the main path is a relative path, it should be normalized': function() { 341 | var list = cases['if the main path is a relative path, it should be normalized']; 342 | // set up fakeFS 343 | this.fakeFS = list.fakeFS; 344 | infer(list); 345 | // console.log(util.inspect(list, null, 10, true)); 346 | assert.equal(list.packages.length, 2); 347 | assert.deepEqual(list.packages, [ { files: [ { name: '/a/index.js' } ], 348 | dependenciesById: { b: 1 } }, 349 | { name: 'b', 350 | uid: 1, 351 | basepath: '/a/node_modules/b/', 352 | main: 'url.js', 353 | files: 354 | [ { name: '/a/node_modules/b/url.js' }, 355 | { name: '/a/node_modules/b/package.json' } ], 356 | dependenciesById: {} } ]); 357 | } 358 | 359 | }; 360 | 361 | // if this module is the script being run, then run the tests: 362 | if (module == require.main) { 363 | var mocha = require('child_process').spawn('mocha', [ '--colors', '--ui', 'exports', '--reporter', 'spec', __filename ]); 364 | mocha.stderr.on('data', function (data) { 365 | if (/^execvp\(\)/.test(data)) { 366 | console.log('Failed to start child process. You need mocha: `npm install -g mocha`'); 367 | } 368 | }); 369 | mocha.stdout.pipe(process.stdout); 370 | mocha.stderr.pipe(process.stderr); 371 | } 372 | -------------------------------------------------------------------------------- /test/list-tasks/tmp/Makefile: -------------------------------------------------------------------------------- 1 | build: 2 | ../../../bin/gluejs \ 3 | --include ./index.js \ 4 | --include ./node_modules \ 5 | --exclude /tests/ \ 6 | --include lib \ 7 | --global foo \ 8 | --main index.js \ 9 | --out foo.js \ 10 | --remap "assert=require('chai').assert" \ 11 | --remap "abc=require('./lib/bar')" \ 12 | --global-require 13 | -------------------------------------------------------------------------------- /test/list-tasks/tmp/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /test/list-tasks/tmp/index.js: -------------------------------------------------------------------------------- 1 | module.exports = require('url'); 2 | -------------------------------------------------------------------------------- /test/list-tasks/tmp/lib/bar.js: -------------------------------------------------------------------------------- 1 | module.exports = function() { 2 | console.log('bar'); 3 | return 'bar'; 4 | }; 5 | -------------------------------------------------------------------------------- /test/list-tasks/tmp/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tmp", 3 | "version": "0.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "Mikito Takada (http://mixu.net/)", 10 | "license": "BSD" 11 | } 12 | -------------------------------------------------------------------------------- /test/node_modules/gluejs.js: -------------------------------------------------------------------------------- 1 | module.exports = require('../../index.js'); 2 | -------------------------------------------------------------------------------- /test/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gluejs-tests", 3 | "description": "Tests for gluejs", 4 | "dependencies": { 5 | "coffee-script": "~1.6.3", 6 | "jade": "~0.35.0", 7 | "brfs": "0.0.8", 8 | "coffeeify": "~0.5.2", 9 | "es6-module-transpiler": "~0.3.1", 10 | "handlebars-runtime": "~1.0.12", 11 | "hbsfy": "~0.1.5", 12 | "uglify-js": "~2.4.8" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /test/require.test.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs'), 2 | vm = require('vm'), 3 | path = require('path'), 4 | assert = require('assert'), 5 | reqWrap = require('../lib/require/index.js'); 6 | 7 | function stringify(value) { 8 | if(typeof value === 'function') { 9 | return value.toString(); 10 | } 11 | if(value.toString() === '[object Object]') { 12 | return JSON.stringify(value, null, 2); 13 | } 14 | return value.toString(); 15 | } 16 | 17 | function convert(arr) { 18 | var result = '[\n'; 19 | result += arr.map(function(pack) { 20 | return ' {\n' + Object.keys(pack).map(function(key) { 21 | var value = pack[key]; 22 | return ' ' + JSON.stringify(key) + ': ' + stringify(value); 23 | }).join(',\n') + '\n }'; 24 | }).join(',\n'); 25 | result += '\n]'; 26 | return result; 27 | } 28 | 29 | 30 | function createCode(opts) { 31 | var str = reqWrap.prelude(opts); 32 | // 2) module definitions 33 | str += 'r.m = ' + convert(opts.packages) + ';\n'; 34 | str += reqWrap.postlude(opts); 35 | return str; 36 | } 37 | 38 | function box() { 39 | var sandbox = { exports: {}, console: console }; 40 | sandbox.module = sandbox; 41 | sandbox.global = sandbox; 42 | return sandbox; 43 | } 44 | 45 | // test matrix: 46 | // - `node` and `umd` should work equally well 47 | // - `min` and `max` versions should work equally well 48 | ['node', 'umd'].forEach(function(exportType) { 49 | ['max', 'min'].forEach(function(minType) { 50 | exports[exportType + ' ' + minType + ' require tests'] = tests({ 51 | type: exportType, 52 | require: minType 53 | }); 54 | }); 55 | }); 56 | 57 | function tests(defaultOpts) { 58 | return { 59 | 'can require() a local file': function() { 60 | var code = createCode({ 61 | 62 | type: defaultOpts.type, 63 | 'root-file': 'index.js', 64 | 'global-require': false, 65 | 'export': 'App', 66 | require: defaultOpts.require, 67 | 68 | packages: [{ 69 | "index.js": function(module, exports, require){ 70 | module.exports = 'index.js'; 71 | } 72 | }] 73 | }); 74 | 75 | var sandbox = box(); 76 | // console.log(code); 77 | vm.runInNewContext(code, sandbox); 78 | 79 | assert.equal(sandbox.exports, 'index.js'); 80 | }, 81 | 82 | 'can require() a file in a different package': function() { 83 | var code = createCode({ 84 | type: defaultOpts.type, 85 | 'root-file': 'index.js', 86 | 'global-require': false, 87 | 'export': 'App', 88 | require: defaultOpts.require, 89 | 90 | packages: [{ 91 | "underscore": {"c":1,"m":"underscore.js"}, 92 | "index.js": function(module, exports, require){ 93 | module.exports = require('underscore'); 94 | } 95 | }, 96 | { 97 | "underscore.js": function(module, exports, require){ 98 | module.exports = 'Underscore'; 99 | } 100 | }] 101 | }); 102 | 103 | var sandbox = box(); 104 | //console.log(code); 105 | vm.runInNewContext(code, sandbox); 106 | 107 | assert.equal(sandbox.exports, 'Underscore'); 108 | 109 | }, 110 | 111 | 'try to use the previous require function for unknown modules': function() { 112 | var code = createCode({ 113 | type: defaultOpts.type, 114 | 'root-file': 'index.js', 115 | 'global-require': false, 116 | 'export': 'App', 117 | require: defaultOpts.require, 118 | 119 | packages: [{ 120 | "index.js": function(module, exports, require){ 121 | module.exports = require('foobar'); 122 | } 123 | }] 124 | }); 125 | 126 | var calls = 0; 127 | 128 | var sandbox = box(); 129 | sandbox.require = function() { 130 | calls++; 131 | return 'called'; 132 | }; 133 | vm.runInNewContext(code, sandbox); 134 | 135 | assert.equal(sandbox.exports, 'called'); 136 | assert.equal(calls, 1); 137 | }, 138 | 139 | 'can chain requires': function() { 140 | 141 | // first block should fall back to the root require() 142 | // first block should export a require() implementation 143 | // second block should export a require() implementation 144 | // second block should fall back to the first require() 145 | 146 | // this requires moving from code that runs inside the anon func 147 | // to moving to window.foo = (function() { return require('main'); }()); 148 | 149 | var code; 150 | 151 | // code = "function require(name) { console.log('ROOT: ' + name); return 'OK ' + name; };"; 152 | code = "function require(name) { return 'OK ' + name; };"; 153 | 154 | code += createCode({ 155 | type: defaultOpts.type, 156 | 'root-file': 'index.js', 157 | 'global-require': true, 158 | 'export': 'App', 159 | require: defaultOpts.require, 160 | 161 | packages: [{ 162 | "index.js": function(module, exports, require){ 163 | module.exports = require('abc'); 164 | } 165 | }] 166 | 167 | }); 168 | 169 | var calls = 0; 170 | var sandbox = box(); 171 | 172 | code += createCode({ 173 | type: defaultOpts.type, 174 | 'root-file': 'index.js', 175 | 'global-require': true, 176 | 'export': 'App', 177 | require: defaultOpts.require, 178 | 179 | packages: [{ 180 | "index.js": function(module, exports, require){ 181 | module.exports = require('def'); 182 | } 183 | }] 184 | 185 | }); 186 | 187 | var sandbox2 = box(); 188 | 189 | code += "result = require('foobar');" 190 | 191 | // console.log(code); 192 | 193 | vm.runInNewContext(code, sandbox2); 194 | 195 | assert.equal(sandbox2.result, 'OK foobar'); 196 | } 197 | 198 | // TODO: test various code paths when exporting a require function 199 | // - require('./index.js') 200 | // - require('innermodule') 201 | // - require('./long/path/../foo.js') 202 | }; 203 | } 204 | 205 | // if this module is the script being run, then run the tests: 206 | if (module == require.main) { 207 | var mocha = require('child_process').spawn('mocha', [ 208 | '--colors', '--ui', 'exports', '--reporter', 'spec', __filename 209 | ]); 210 | mocha.stderr.on('data', function (data) { 211 | if (/^execvp\(\)/.test(data)) { 212 | console.log('Failed to start child process. You need mocha: `npm install -g mocha`'); 213 | } 214 | }); 215 | mocha.stdout.pipe(process.stdout); 216 | mocha.stderr.pipe(process.stderr); 217 | } 218 | 219 | -------------------------------------------------------------------------------- /test/runner/concat.test.js: -------------------------------------------------------------------------------- 1 | var util = require('util'), 2 | List = require('minitask').list; 3 | 4 | var concat = require('../lib/runner/concat.js'), 5 | list = new List(); 6 | 7 | list.add(__dirname+'/fixtures/single-file/'); 8 | 9 | concat(list); 10 | 11 | -------------------------------------------------------------------------------- /test/runner/package.test.js: -------------------------------------------------------------------------------- 1 | var util = require('util'), 2 | List = require('minitask').list; 3 | 4 | var task = require('../../lib/runner/commonjs'), 5 | list = new List(); 6 | 7 | list.add(__dirname+'/../fixtures/complex-package/'); 8 | 9 | task(list, { basepath: __dirname+'/../fixtures/complex-package/'} ); 10 | 11 | -------------------------------------------------------------------------------- /test/runner/server.test.js: -------------------------------------------------------------------------------- 1 | var util = require('util'), 2 | List = require('minitask').list; 3 | 4 | var task = require('../lib/runner/static-server.js'), 5 | list = new List(); 6 | 7 | list.add(__dirname+'/fixtures/single-file/'); 8 | 9 | task(list); 10 | -------------------------------------------------------------------------------- /test/tmp/placeholder.txt: -------------------------------------------------------------------------------- 1 | placeholder for git 2 | -------------------------------------------------------------------------------- /test/tools/file-task.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs'), 2 | uglify = require('../lib/file-tasks/uglify.js'), 3 | wrap = require('../lib/file-tasks/wrap-commonjs-web.js'); 4 | 5 | var tasks = [ 6 | wrap, 7 | uglify, 8 | process 9 | ]; 10 | 11 | var names = [ 'wrap', 'uglify', 'process' ]; 12 | 13 | var stream = { stdout: fs.createReadStream(__dirname + '/fixtures/single-file/simple.js') }; 14 | 15 | tasks.forEach(function(task, i) { 16 | var instance = task; 17 | // item is either a object (e.g. process) with .stdout/.stdin 18 | // or a function that returns an object 19 | if(typeof instance == 'function') { 20 | instance = task(); 21 | } 22 | stream.stdout.pipe(instance.stdin); 23 | // if there is a stderr, pipe that - this avoids issues where the task fails to stderr 24 | // and the stdout is not flushed due to buffering 25 | if(instance.stderr) { 26 | instance.stderr.pipe(process.stderr); 27 | 28 | instance.stderr.on('error', function() { 29 | console.log('stderr.error', i, names[i]); 30 | }); 31 | 32 | instance.stderr.on('finish', function() { 33 | console.log('stderr.finish', i, names[i]); 34 | }); 35 | } 36 | 37 | if(instance.on) { 38 | instance.on('error', function() { 39 | console.log('instance.error', i, names[i]); 40 | }); 41 | instance.on('exit', function(code) { 42 | console.log('instance.exit', i, names[i], code); 43 | }); 44 | } 45 | 46 | // readable stream: close and end events 47 | 48 | instance.stdin.on('error', function() { 49 | console.log('stdin.error', i, names[i]); 50 | }); 51 | 52 | instance.stdin.on('finish', function() { 53 | console.log('stdin.finish', i, names[i]); 54 | }); 55 | 56 | instance.stdin.on('end', function() { 57 | console.log('stdin.end', i, names[i]); 58 | }); 59 | 60 | 61 | // writable stream: close and finish events 62 | 63 | instance.stdout.on('error', function() { 64 | console.log('stdout.error', i, names[i]); 65 | }); 66 | 67 | instance.stdout.on('close', function() { 68 | console.log('stdout.close', i, names[i]); 69 | }); 70 | 71 | instance.stdout.on('end', function() { 72 | console.log('stdout.end', i, names[i]); 73 | }); 74 | 75 | 76 | stream = instance; 77 | }); 78 | 79 | 80 | 81 | 82 | -------------------------------------------------------------------------------- /todo.md: -------------------------------------------------------------------------------- 1 | # What's new in v2.next 2 | 3 | gluejs v2.next adds optional dependency parsing support: 4 | 5 | - Passing `--parse` enables dependency parsing, which can figure out the full dependency graph given just a single entry point. More accurate exclusions may result in smaller output size but come with a performance cost. Note that parsing dependencies is slow (e.g. the performance is closer to browserify's performance). 6 | - The caching system has been significantly improved, with some minor performance gains and reduced file handle usage. 7 | - The [browser field](https://gist.github.com/defunctzombie/4339901) in package.json files is supported via [browser-resolve](https://github.com/defunctzombie/node-browser-resolve) 8 | 9 | ----- 10 | 11 | # Todo 12 | 13 | - add `cache clean` 14 | - improve the autodetection code so that people don't need to supply a --main argument in default cases (e.g. when there is a index.js or there is just one file in the package) 15 | 16 | ## use detective and amdetective 17 | 18 | Steps: 19 | 20 | - implement `--compare`: compares the trees from using the detective strategy vs the default strategy 21 | - apply compatibility fixes: 22 | - allow including directories using the detective strategy 23 | - apply later stage optimizations: 24 | - minimize the list of --replace shimmed modules in each package output based on the real set of dependencies from all files within that package 25 | - add core module shimming support 26 | - add support for replacing modules with other files (parsed) 27 | - add support for excluding dependents of a module 28 | - amd: 29 | - config.js loading 30 | - better resolve error reporting 31 | - resolve value overrides 32 | - use the almond shim (?). Alternative is to just output a bundle of define()s 33 | 34 | Benefits 35 | 36 | - provides more accurate exclusion information (e.g. modules not connected from the main file can be ignored; files like package.json can often be safely excluded) 37 | - allows the user to only specify `--main` without any includes 38 | - allows us to auto-detect node_modules dependencies without explicit include management (making live reload possible / nice) 39 | - paves way for efficient node core module support 40 | 41 | Test cases: 42 | 43 | - exclude unused file like package.json 44 | - include unmentioned file 45 | - include unmentioned module 46 | - apply .npmignore last 47 | - perf test: load large directory a couple of hundred times and ensure caching works 48 | 49 | ## Production / dev 50 | 51 | - allow using the same code paths for production and dev 52 | - use a specific staging area folder 53 | - in dev, check the upstream folders for changes 54 | - in production, simply serve the staging area contents 55 | 56 | ## Inclusion optimization 57 | 58 | - Allow coercion of dependencies (e.g. backbone with underscore + plain underscore => one of each): 59 | - `--dedupe-modules` should use package.json data to reduce duplication 60 | - `--dedupe-force modulename` should force only a single instance of a module, in spite of conflicting package.json data 61 | 62 | ## Tiny module optimizations 63 | 64 | - add support for fully static resolution: given the full set of require() call dependencies, convert them into static lookups 65 | 66 | ## Implicit global support 67 | 68 | - can detect naively via regex 69 | 70 | ## Docs todo 71 | 72 | - need a good example of applying a transformation via the API 73 | - need a good example of using a transformation with the Express middleware 74 | - local dev server example post detective support 75 | - document the grunt task options that are available 76 | - Express middleware dev config example (e.g. if(DEV) { build() } else { express.static('..'); } 77 | - Using libraries with globals such as jQuery and underscore (and/or how to use --replace with big libs for greater efficiency) 78 | - how --replace works in this case 79 | - how cascading works when the desired target is a global var 80 | - how cascading works when the desired target is a require() function 81 | - how cascading works when the desired target is AMD 82 | - Making use of --amd with RequireJS 83 | 84 | ## --cache-clean 85 | 86 | TODO 87 | 88 | `--cache-clean`: Clears the global cache folder by deleting it. This is always done before processing anything else. 89 | 90 | # Known issues 91 | 92 | - setting basepath to ./node_modules/foo causes the root to be empty rather than being based inside the package directory. E.g. you need to do require('foo') to get the result rather than require('./index.js'); 93 | - replace('foo', 'window.foo') applies to all subdependencies indiscriminately. Need a better syntax to control this. Old behavior was to only replace top level dependencies. 94 | 95 | ## --watch 96 | 97 | TODO 98 | 99 | Watch files for changes 100 | 101 | .watch(function(err, text){ ...}): renders and adds file watchers on the files. 102 | 103 | Note that this API is a bit clunky: 104 | 105 | there is no way to unwatch a file other than terminate the program 106 | on each watched file change, a console.log() message is shown 107 | the API uses fs.watchFile(), so you do not get notification of newly added files in directories; watches are registered on the files that were used on the first render 108 | But it works fine for automatically rebuilding e.g. when doing development locally. 109 | 110 | # Tasks 111 | 112 | - A better big lib handing system (e.g. --external backbone --external underscore) 113 | - Etags support for build results (e.g. shortcutting repeated loads even further) 114 | - return a meaningful result from middleware if an error occurs 115 | (e.g. either a status code or perhaps even a div-printing thing) 116 | - better metadata fetching from package.json 117 | - it should be possible to pre-filter the packages (before infer packages), 118 | so that devDependencies are not included in the processing 119 | - version numbers should be fetched 120 | - ability to remap paths 121 | - e.g. add a file to the root from outside the root, with a different virtual filename 122 | - e.g. swap out the content a directory for the content of another one (shimming) 123 | 124 | # Evaluation 125 | 126 | - empirically based packaging / dynamic loading ** 127 | - Source maps support 128 | - Fix issues with interrupted cached data 129 | - Remapping cross environment dependencies / dependency injection 130 | - Mocking out dependencies during testing/runtime ** 131 | - RequireJS to CommonJS conversion 132 | - Easier conventions for adding a module both on the client and server side, e.g. only a node_modules entry => client side 133 | 134 | # Internals 135 | 136 | Phases: 137 | 138 | - acquire file paths (e.g. via calls to .include and .npm) 139 | => result is a tree { files: [] } (under basepath) 140 | => note that nothing is excluded here 141 | - filter file paths (e.g. remove excluded, remove .npmignore and so on) 142 | - attach tasks 143 | - run tasks 144 | - squash result (or save to a directory) 145 | 146 | File tasks: 147 | 148 | - run-shell (e.g. run uglify) 149 | - wrap-commonjs-web (e.g. wrap inside a function call) 150 | - wrap-commonjs-web-sourceurl 151 | - wrap-commonjs-amd (e.g. take commonJS files, resolve their direct dependencies, wrap into an AMD module) 152 | - wrap-amd-commonjs-web (e.g. take a AMD file, look at the dependency declaration, convert that to a string, wrap into a web-commonjs module) 153 | 154 | New features: 155 | 156 | - vastly better conventions and support for writing dual-platform (browser + server) code that has some functions replaced with platform-specific equivalents 157 | - Mocha test server and conventions to make it easy to repackage and run your Mocha tests inside a browser 158 | - static file serving and Connect middleware 159 | - better AMD interoperability: hook into AMD if that's the system being used; optionally export a `require()` implementation 160 | 161 | Minor, but cool features: 162 | 163 | - interoperate with libraries that use globals by binding variables under window.* 164 | - continous rebuild via watcher 165 | - build non-JS resources, for example compile Jade and Handlebars templates 166 | - sourceURL support 167 | - custom build task support 168 | - better support for .npmignore files and filtering in general 169 | - choice between throwing, or returning undefined in the require shim 170 | 171 | # How do I ...? 172 | 173 | ## How do I package tests for the browser? 174 | 175 | There is a builtin Mocha test server task, which takes a set of tests as input and creates a server specifically for running one or more tests. 176 | 177 | If you're not using Mocha, you can still use the API to create a package and serve it (with test-framework wrapping). 178 | 179 | ## How do I write dual-platform (browser + server) code using gluejs? 180 | 181 | You have probably already been targeting both platforms - by writing code that can run in either environment. However, this is just one part of dual-platform coding. 182 | 183 | Let me expand your horizon. All code falls into three categories: 184 | 185 | - Code that works anywhere 186 | - Code that is native on one platform, and needs to be shimmed / emulated / translated into remote procedure calls on the other 187 | - Code that can only run on one platform 188 | 189 | You've been writing code that works anywhere: where you explicitly avoid using API's that are not cross platform, or where you use 3rd party modules to hide the native APIs. 190 | 191 | The second category: code that is native to one platform (node / browser), and shimmed on the other is what gluejs v2's conventions are targeting. This is the basic idea: 192 | 193 | - Think of a single file (module) as a set of exports. The core file needs to be code that works anywhere - for example, data validation code in a Model usually doesn't depend on any external native APIs. 194 | - Take the second category code - code that is native on one platform and shimmed on another - and split it into it's own file. Now have that file require the reusable core, and extend that core. 195 | 196 | Here is an example core file `./lib/common/user.js`: 197 | 198 | function User(age) { this.age = age; } 199 | 200 | User.prototype.validate = function() { 201 | return this.age < 200 && this.age > 0; 202 | }; 203 | 204 | module.exports = User; 205 | 206 | and a shim file that extends that core `./lib/user.js`: 207 | 208 | var fs = require('fs'), 209 | User = require('./common/user.js'); 210 | 211 | User.prototype.save = function() { 212 | fs.writeFileSync('~/userdata.js', JSON.stringify({ age: this.age })); 213 | }; 214 | 215 | module.exports = User; 216 | 217 | 218 | 219 | --------------------------------------------------------------------------------