├── .gitignore ├── .jshintrc ├── AUTHORS ├── CHANGELOG.md ├── CONTRIBUTING.md ├── History.md ├── LICENSE ├── README.md ├── addon.gypi ├── bin └── node-ninja.js ├── gyp ├── .gitignore ├── AUTHORS ├── DEPS ├── LICENSE ├── OWNERS ├── PRESUBMIT.py ├── buildbot │ ├── aosp_manifest.xml │ ├── buildbot_run.py │ └── commit_queue │ │ ├── OWNERS │ │ ├── README │ │ └── cq_config.json ├── codereview.settings ├── data │ └── win │ │ └── large-pdb-shim.cc ├── gyp ├── gyp.bat ├── gyp_main.py ├── gyptest.py ├── pylib │ └── gyp │ │ ├── MSVSNew.py │ │ ├── MSVSProject.py │ │ ├── MSVSSettings.py │ │ ├── MSVSSettings_test.py │ │ ├── MSVSToolFile.py │ │ ├── MSVSUserFile.py │ │ ├── MSVSUtil.py │ │ ├── MSVSVersion.py │ │ ├── __init__.py │ │ ├── common.py │ │ ├── common_test.py │ │ ├── easy_xml.py │ │ ├── easy_xml_test.py │ │ ├── flock_tool.py │ │ ├── generator │ │ ├── __init__.py │ │ ├── analyzer.py │ │ ├── android.py │ │ ├── cmake.py │ │ ├── dump_dependency_json.py │ │ ├── eclipse.py │ │ ├── gypd.py │ │ ├── gypsh.py │ │ ├── make.py │ │ ├── msvs.py │ │ ├── msvs_test.py │ │ ├── ninja.py │ │ ├── ninja_test.py │ │ ├── xcode.py │ │ └── xcode_test.py │ │ ├── input.py │ │ ├── input_test.py │ │ ├── mac_tool.py │ │ ├── msvs_emulation.py │ │ ├── ninja_syntax.py │ │ ├── ordered_dict.py │ │ ├── simple_copy.py │ │ ├── win_tool.py │ │ ├── xcode_emulation.py │ │ ├── xcode_ninja.py │ │ ├── xcodeproj_file.py │ │ └── xml_fix.py ├── samples │ ├── samples │ └── samples.bat ├── setup.py └── tools │ ├── README │ ├── Xcode │ ├── README │ └── Specifications │ │ ├── gyp.pbfilespec │ │ └── gyp.xclangspec │ ├── emacs │ ├── README │ ├── gyp-tests.el │ ├── gyp.el │ ├── run-unit-tests.sh │ └── testdata │ │ ├── media.gyp │ │ └── media.gyp.fontified │ ├── graphviz.py │ ├── pretty_gyp.py │ ├── pretty_sln.py │ └── pretty_vcproj.py ├── lib ├── build.js ├── clean.js ├── configure.js ├── find-node-directory.js ├── install.js ├── list.js ├── node-ninja.js ├── process-release.js ├── rebuild.js └── remove.js ├── package.json ├── src └── win_delay_load_hook.c └── test ├── docker.sh ├── fixtures ├── ca-bundle.crt ├── ca.crt ├── server.crt └── server.key ├── simple-proxy.js ├── test-download.js ├── test-find-node-directory.js ├── test-find-python.js ├── test-options.js └── test-process-release.js /.gitignore: -------------------------------------------------------------------------------- 1 | gyp/test 2 | node_modules 3 | test/.node-gyp 4 | -------------------------------------------------------------------------------- /.jshintrc: -------------------------------------------------------------------------------- 1 | { 2 | "asi": true, 3 | "laxcomma": true, 4 | "es5": true, 5 | "node": true, 6 | "strict": false 7 | } 8 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | Nathan Rajlich 2 | Pieter Hintjens 3 | 4 | 5 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | v3.1.0 2015-11-14 2 | 3 | * [[`9049241f91`](https://github.com/nodejs/node-gyp/commit/9049241f91)] - **gyp**: don't use links at all, just copy the files instead (Nathan Zadoks) 4 | * [[`8ef90348d1`](https://github.com/nodejs/node-gyp/commit/8ef90348d1)] - **gyp**: apply https://codereview.chromium.org/11361103/ (Nathan Rajlich) 5 | * [[`a2ed0df84e`](https://github.com/nodejs/node-gyp/commit/a2ed0df84e)] - **gyp**: always install into $PRODUCT_DIR (Nathan Rajlich) 6 | * [[`cc8b2fa83e`](https://github.com/nodejs/node-gyp/commit/cc8b2fa83e)] - Update gyp to b3cef02. (Imran Iqbal) [#781](https://github.com/nodejs/node-gyp/pull/781) 7 | * [[`f5d86eb84e`](https://github.com/nodejs/node-gyp/commit/f5d86eb84e)] - Update to tar@2.0.0. (Edgar Muentes) [#797](https://github.com/nodejs/node-gyp/pull/797) 8 | * [[`2ac7de02c4`](https://github.com/nodejs/node-gyp/commit/2ac7de02c4)] - Fix infinite loop with zero-length options. (Ben Noordhuis) [#745](https://github.com/nodejs/node-gyp/pull/745) 9 | * [[`101bed639b`](https://github.com/nodejs/node-gyp/commit/101bed639b)] - This platform value came from debian package, and now the value (Jérémy Lal) [#738](https://github.com/nodejs/node-gyp/pull/738) 10 | 11 | v3.0.3 2015-09-14 12 | 13 | * [[`ad827cda30`](https://github.com/nodejs/node-gyp/commit/ad827cda30)] - tarballUrl global and && when checking for iojs (Lars-Magnus Skog) [#729](https://github.com/nodejs/node-gyp/pull/729) 14 | 15 | v3.0.2 2015-09-12 16 | 17 | * [[`6e8c3bf3c6`](https://github.com/nodejs/node-gyp/commit/6e8c3bf3c6)] - add back support for passing additional cmdline args (Rod Vagg) [#723](https://github.com/nodejs/node-gyp/pull/723) 18 | * [[`ff82f2f3b9`](https://github.com/nodejs/node-gyp/commit/ff82f2f3b9)] - fixed broken link in docs to Visual Studio 2013 download (simon-p-r) [#722](https://github.com/nodejs/node-gyp/pull/722) 19 | 20 | v3.0.1 2015-09-08 21 | 22 | * [[`846337e36b`](https://github.com/nodejs/node-gyp/commit/846337e36b)] - normalise versions for target == this comparison (Rod Vagg) [#716](https://github.com/nodejs/node-gyp/pull/716) 23 | 24 | v3.0.0 2015-09-08 25 | 26 | * [[`9720d0373c`](https://github.com/nodejs/node-gyp/commit/9720d0373c)] - remove node_modules from tree (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711) 27 | * [[`6dcf220db7`](https://github.com/nodejs/node-gyp/commit/6dcf220db7)] - test version major directly, don't use semver.satisfies() (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711) 28 | * [[`938dd18d1c`](https://github.com/nodejs/node-gyp/commit/938dd18d1c)] - refactor for clarity, fix dist-url, add env var dist-url functionality (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711) 29 | * [[`9e9df66a06`](https://github.com/nodejs/node-gyp/commit/9e9df66a06)] - use process.release, make aware of io.js & node v4 differences (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711) 30 | * [[`1ea7ed01f4`](https://github.com/nodejs/node-gyp/commit/1ea7ed01f4)] - **deps**: update graceful-fs dependency to the latest (Sakthipriyan Vairamani) [#714](https://github.com/nodejs/node-gyp/pull/714) 31 | * [[`0fbc387b35`](https://github.com/nodejs/node-gyp/commit/0fbc387b35)] - Update repository URLs. (Ben Noordhuis) [#715](https://github.com/nodejs/node-gyp/pull/715) 32 | * [[`bbedb8868b`](https://github.com/nodejs/node-gyp/commit/bbedb8868b)] - **(SEMVER-MAJOR)** **win**: enable delay-load hook by default (Jeremiah Senkpiel) [#708](https://github.com/nodejs/node-gyp/pull/708) 33 | * [[`85ed107565`](https://github.com/nodejs/node-gyp/commit/85ed107565)] - Merge pull request #664 from othiym23/othiym23/allow-semver-5 (Nathan Rajlich) 34 | * [[`0c720d234c`](https://github.com/nodejs/node-gyp/commit/0c720d234c)] - allow semver@5 (Forrest L Norvell) 35 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to This Project 2 | 3 | The contributors are listed in AUTHORS (add yourself). This project uses the MPL v2 license, see LICENSE. 4 | 5 | Please read these documents BEFORE you send a patch: 6 | 7 | * This project uses the [C4.1 (Collective Code Construction Contract)](http://rfc.zeromq.org/spec:22) process for contributions. Please read this if you are unfamiliar with it. 8 | 9 | -------------------------------------------------------------------------------- /History.md: -------------------------------------------------------------------------------- 1 | 0.2.0 / 2016/02/15 2 | ================== 3 | 4 | * Forked from node-gyp and fixed to workaround Gyp dependency path bug. 5 | 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | node-ninja 2 | ========== 3 | 4 | ## Goals 5 | 6 | This is a fork of node-gyp, with the long term goal of removing the dependency on 7 | GYP and targeting the Ninja build system instead. 8 | 9 | The current version is different from node-gyp in these significant ways: 10 | 11 | * It has better support for native stacks with multiple gyp files. 12 | 13 | * It supports incremental builds when pre-building against multiple Node.js ABI versions, when using a package like `prebuild`. 14 | 15 | * It is licensed under Mozilla Public License version 2.0. 16 | 17 | * It uses the [C4](http://rfc.zeromq.org/spec:22) contribution process, meaning your patches will be accepted rapidly, if they conform to some basic rules. 18 | 19 | ## How to Contribute 20 | 21 | * Be a node-ninja user. 22 | * Read [CONTRIBUTING.md](CONTRIBUTING.md) for details. 23 | * Fork https://github.com/codejockey/node-ninja to your own account. 24 | * Solve small problems with minimal, neat solutions. 25 | * Send us your pull requests, and we will merge them. 26 | 27 | ### Node.js native addon build tool 28 | 29 | `node-ninja` is a cross-platform command-line tool written in Node.js for compiling 30 | native addon modules for Node.js. It bundles the [gyp](https://code.google.com/p/gyp/) 31 | project used by the Chromium team and takes away the pain of dealing with the 32 | various differences in build platforms. 33 | 34 | Multiple target versions of node are supported (i.e. `0.8`, `0.9`, `0.10`, ..., `1.0`, 35 | etc.), regardless of what version of node is actually installed on your system 36 | (`node-ninja` downloads the necessary development files for the target version). 37 | 38 | #### Features: 39 | 40 | * Easy to use, consistent interface 41 | * Same commands to build your module on every platform 42 | * Supports multiple target versions of Node 43 | 44 | 45 | Installation 46 | ------------ 47 | 48 | You can install with `npm`: 49 | 50 | ``` bash 51 | $ npm install -g node-ninja 52 | ``` 53 | 54 | TODO: review this section, it is out of date. 55 | 56 | You will also need to install: 57 | 58 | * On Unix: 59 | * `python` (`v2.7` recommended, `v3.x.x` is __*not*__ supported) 60 | * `make` 61 | * A proper C/C++ compiler toolchain, like [GCC](https://gcc.gnu.org) 62 | * On Mac OS X: 63 | * `python` (`v2.7` recommended, `v3.x.x` is __*not*__ supported) (already installed on Mac OS X) 64 | * [Xcode](https://developer.apple.com/xcode/download/) 65 | * You also need to install the `Command Line Tools` via Xcode. You can find this under the menu `Xcode -> Preferences -> Downloads` 66 | * This step will install `gcc` and the related toolchain containing `make` 67 | * On Windows: 68 | * Python ([`v2.7.10`][python-v2.7.10] recommended, `v3.x.x` is __*not*__ supported) 69 | * Make sure that you have a PYTHON environment variable, and it is set to drive:\path\to\python.exe not to a folder 70 | * Windows XP/Vista/7: 71 | * Microsoft Visual Studio C++ 2013 ([Express][msvc2013] version works well) 72 | * If the install fails, try uninstalling any C++ 2010 x64&x86 Redistributable that you have installed first 73 | * If you get errors that the 64-bit compilers are not installed you may also need the [compiler update for the Windows SDK 7.1] 74 | * Windows 7/8/10: 75 | * Install the latest version of npm (3.3.6 at the time of writing) 76 | * Install Python 2.7 from https://www.python.org/download/releases/2.7/ and make sure its on the System Path 77 | * Install Visual Studio Community 2015 Edition. (Custom Install, Select Visual C++ during the installation) 78 | * Set the environment variable GYP_MSVS_VERSION=2015 79 | * Run the command prompt as Administrator 80 | * $ npm install (--msvs_version=2015) <-- Shouldn't be needed if you have set GYP_MSVS_VERSION env 81 | * All Windows Versions 82 | * For 64-bit builds of node and native modules you will _**also**_ need the [Windows 7 64-bit SDK][win7sdk] 83 | * You may need to run one of the following commands if your build complains about WindowsSDKDir not being set, and you are sure you have already installed the SDK: 84 | 85 | ``` 86 | call "C:\Program Files\Microsoft SDKs\Windows\v7.1\bin\Setenv.cmd" /Release /x86 87 | call "C:\Program Files\Microsoft SDKs\Windows\v7.1\bin\Setenv.cmd" /Release /x64 88 | ``` 89 | 90 | If you have multiple Python versions installed, you can identify which Python 91 | version `node-ninja` uses by setting the '--python' variable: 92 | 93 | ``` 94 | node-ninja --python /path/to/python2.7 95 | ``` 96 | 97 | If `node-ninja` is called by way of `npm` *and* you have multiple versions of 98 | Python installed, then you can set `npm`'s 'python' config key to the appropriate 99 | value: 100 | 101 | ``` 102 | npm config set python /path/to/executable/python2.7 103 | ``` 104 | 105 | Note that OS X is just a flavour of Unix and so needs `python`, `make`, and C/C++. 106 | An easy way to obtain these is to install XCode from Apple, 107 | and then use it to install the command line tools (under Preferences -> Downloads). 108 | 109 | How to Use 110 | ---------- 111 | 112 | To compile your native addon, first go to its root directory: 113 | 114 | ``` 115 | cd my_node_addon 116 | ``` 117 | 118 | The next step is to generate the appropriate project build files for the current 119 | platform. Use `configure` for that: 120 | 121 | ``` 122 | node-ninja configure 123 | ``` 124 | 125 | __Note__: The `configure` step looks for the `binding.gyp` file in the current 126 | directory to process. See below for instructions on creating the `binding.gyp` file. 127 | 128 | Now you will have either a `Makefile` (on Unix platforms) or a `vcxproj` file 129 | (on Windows) in the `build/` directory. Next invoke the `build` command: 130 | 131 | ``` 132 | node-ninja build 133 | ``` 134 | 135 | Now you have your compiled `.node` bindings file! The compiled bindings end up 136 | in `build/Debug/` or `build/Release/`, depending on the build mode. At this point 137 | you can require the `.node` file with Node and run your tests! 138 | 139 | __Note:__ To create a _Debug_ build of the bindings file, pass the `--debug` (or 140 | `-d`) switch when running either the `configure`, `build` or `rebuild` command. 141 | 142 | 143 | The "binding.gyp" file 144 | ---------------------- 145 | 146 | The `binding.gyp` file describes the configuration to build your module in [a JSON-like format](https://gyp.gsrc.io/docs/LanguageSpecification.md). This file gets placed in the root of 147 | your package, alongside the `package.json` file. 148 | 149 | A barebones `gyp` file appropriate for building a node addon looks like: 150 | 151 | ``` python 152 | # A minimal gyp file 153 | { 154 | "targets": [ 155 | { 156 | "target_name": "binding", 157 | "sources": [ "src/binding.cc" ], 158 | }, 159 | ], 160 | } 161 | ``` 162 | 163 | Note that the file format allows trailing commas, and `#` comments. 164 | 165 | Commands 166 | -------- 167 | 168 | `node-ninja` accepts the following commands: 169 | 170 | | **Command** | **Description** 171 | |:--------------|:--------------------------------------------------------------- 172 | | `build` | Invokes `make`/`msbuild.exe` and builds the native addon 173 | | `clean` | Removes the `build` directory if it exists 174 | | `configure` | Generates project build files for the current platform 175 | | `rebuild` | Runs `clean`, `configure` and `build` all in a row 176 | | `install` | Installs node development header files for the given version 177 | | `list` | Lists the currently installed node development file versions 178 | | `remove` | Removes the node development header files for the given version 179 | 180 | `node-gyp` accepts the following command options: 181 | 182 | | **Option** | **Description** 183 | |:-------------------|:------------------------------------------------ 184 | | `-j n`, `--jobs n` | For `build`: run parallel builds 185 | | `--silly`, `--loglevel=silly` | Log all progress to console 186 | | `--verbose`, `--loglevel=verbose` | Log most progress to console 187 | | `--silent`, `--loglevel=silent` | Don't log anything 188 | | `--debug` | Make Debug build (default=Release) 189 | | `--release`, `--no-debug` | Make Release build 190 | | `--builddir x` | Build in this directory (default=build) 191 | 192 | License 193 | ------- 194 | Node-ninja is licensed under the Mozilla Public License version 2.0. See LICENSE. 195 | 196 | Part of this code coming from node-gyp are licensed under the MIT license as follows: 197 | 198 | Copyright (c) 2012 Nathan Rajlich <nathan@tootallnate.net> 199 | 200 | Permission is hereby granted, free of charge, to any person obtaining 201 | a copy of this software and associated documentation files (the 202 | 'Software'), to deal in the Software without restriction, including 203 | without limitation the rights to use, copy, modify, merge, publish, 204 | distribute, sublicense, and/or sell copies of the Software, and to 205 | permit persons to whom the Software is furnished to do so, subject to 206 | the following conditions: 207 | 208 | The above copyright notice and this permission notice shall be 209 | included in all copies or substantial portions of the Software. 210 | 211 | THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, 212 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 213 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 214 | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 215 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 216 | TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 217 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 218 | 219 | 220 | [python-v2.7.10]: https://www.python.org/downloads/release/python-2710/ 221 | [msvc2013]: https://www.microsoft.com/en-gb/download/details.aspx?id=44914 222 | [win7sdk]: https://www.microsoft.com/en-us/download/details.aspx?id=8279 223 | [compiler update for the Windows SDK 7.1]: https://www.microsoft.com/en-us/download/details.aspx?id=4422 224 | -------------------------------------------------------------------------------- /addon.gypi: -------------------------------------------------------------------------------- 1 | { 2 | 'target_defaults': { 3 | 'type': 'loadable_module', 4 | 'win_delay_load_hook': 'true', 5 | 'product_prefix': '', 6 | 7 | 'include_dirs': [ 8 | '<(node_root_dir)/include/node', 9 | '<(node_root_dir)/src', 10 | '<(node_root_dir)/deps/uv/include', 11 | '<(node_root_dir)/deps/v8/include' 12 | ], 13 | 'defines': [ 14 | 'NODE_GYP_MODULE_NAME=>(_target_name)' 15 | ], 16 | 17 | 'target_conditions': [ 18 | ['_type=="loadable_module"', { 19 | 'product_extension': 'node', 20 | 'defines': [ 21 | 'BUILDING_NODE_EXTENSION' 22 | ], 23 | 'xcode_settings': { 24 | 'OTHER_LDFLAGS': [ 25 | '-undefined dynamic_lookup' 26 | ], 27 | }, 28 | }], 29 | 30 | ['_type=="static_library"', { 31 | # set to `1` to *disable* the -T thin archive 'ld' flag. 32 | # older linkers don't support this flag. 33 | 'standalone_static_library': '<(standalone_static_library)' 34 | }], 35 | 36 | ['_win_delay_load_hook=="true"', { 37 | # If the addon specifies `'win_delay_load_hook': 'true'` in its 38 | # binding.gyp, link a delay-load hook into the DLL. This hook ensures 39 | # that the addon will work regardless of whether the node/iojs binary 40 | # is named node.exe, iojs.exe, or something else. 41 | 'conditions': [ 42 | [ 'OS=="win"', { 43 | 'sources': [ 44 | '<(node_gyp_dir)/src/win_delay_load_hook.c', 45 | ], 46 | 'msvs_settings': { 47 | 'VCLinkerTool': { 48 | 'DelayLoadDLLs': [ 'iojs.exe', 'node.exe' ], 49 | # Don't print a linker warning when no imports from either .exe 50 | # are used. 51 | 'AdditionalOptions': [ '/ignore:4199' ], 52 | }, 53 | }, 54 | }], 55 | ], 56 | }], 57 | ], 58 | 59 | 'conditions': [ 60 | [ 'OS=="mac"', { 61 | 'defines': [ 62 | '_DARWIN_USE_64_BIT_INODE=1' 63 | ], 64 | 'xcode_settings': { 65 | 'DYLIB_INSTALL_NAME_BASE': '@rpath' 66 | }, 67 | }], 68 | [ 'OS=="aix"', { 69 | 'ldflags': [ 70 | '-Wl,-bimport:<(node_exp_file)' 71 | ], 72 | }], 73 | [ 'OS=="win"', { 74 | 'libraries': [ 75 | '-lkernel32.lib', 76 | '-luser32.lib', 77 | '-lgdi32.lib', 78 | '-lwinspool.lib', 79 | '-lcomdlg32.lib', 80 | '-ladvapi32.lib', 81 | '-lshell32.lib', 82 | '-lole32.lib', 83 | '-loleaut32.lib', 84 | '-luuid.lib', 85 | '-lodbc32.lib', 86 | '-lDelayImp.lib', 87 | '-l"<(node_root_dir)/$(ConfigurationName)/<(node_lib_file)"' 88 | ], 89 | 'msvs_disabled_warnings': [ 90 | # warning C4251: 'node::ObjectWrap::handle_' : class 'v8::Persistent' 91 | # needs to have dll-interface to be used by 92 | # clients of class 'node::ObjectWrap' 93 | 4251 94 | ], 95 | }, { 96 | # OS!="win" 97 | 'defines': [ 98 | '_LARGEFILE_SOURCE', 99 | '_FILE_OFFSET_BITS=64' 100 | ], 101 | }], 102 | [ 'OS in "freebsd openbsd netbsd solaris" or \ 103 | (OS=="linux" and target_arch!="ia32")', { 104 | 'cflags': [ '-fPIC' ], 105 | }] 106 | ] 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /bin/node-ninja.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | /** 4 | * Copyright (c) the Contributors as noted in the AUTHORS file. 5 | * This file is part of node-ninja. 6 | * 7 | * This Source Code Form is subject to the terms of the Mozilla Public 8 | * License, v. 2.0. If a copy of the MPL was not distributed with this 9 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 10 | */ 11 | 12 | /** 13 | * Set the title. 14 | */ 15 | 16 | process.title = 'node-ninja' 17 | 18 | /** 19 | * Module dependencies. 20 | */ 21 | 22 | var gyp = require('../') 23 | var log = require('npmlog') 24 | 25 | /** 26 | * Process and execute the selected commands. 27 | */ 28 | 29 | var prog = gyp() 30 | var completed = false 31 | prog.parseArgv(process.argv) 32 | 33 | if (prog.todo.length === 0) { 34 | if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) { 35 | console.log('v%s', prog.version) 36 | } else { 37 | console.log('%s', prog.usage()) 38 | } 39 | return process.exit(0) 40 | } 41 | 42 | log.info('it worked if it ends with', 'ok') 43 | log.verbose('cli', process.argv) 44 | log.info('using', 'node-ninja@%s', prog.version) 45 | log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch) 46 | 47 | 48 | /** 49 | * Change dir if -C/--directory was passed. 50 | */ 51 | 52 | var dir = prog.opts.directory 53 | if (dir) { 54 | var fs = require('fs') 55 | try { 56 | var stat = fs.statSync(dir) 57 | if (stat.isDirectory()) { 58 | log.info('chdir', dir) 59 | process.chdir(dir) 60 | } else { 61 | log.warn('chdir', dir + ' is not a directory') 62 | } 63 | } catch (e) { 64 | if (e.code === 'ENOENT') { 65 | log.warn('chdir', dir + ' is not a directory') 66 | } else { 67 | log.warn('chdir', 'error during chdir() "%s"', e.message) 68 | } 69 | } 70 | } 71 | 72 | function run () { 73 | var command = prog.todo.shift() 74 | if (!command) { 75 | // done! 76 | completed = true 77 | log.info('ok') 78 | return 79 | } 80 | 81 | prog.commands[command.name](command.args, function (err) { 82 | if (err) { 83 | log.error(command.name + ' error') 84 | log.error('stack', err.stack) 85 | errorMessage() 86 | log.error('not ok') 87 | return process.exit(1) 88 | } 89 | if (command.name == 'list') { 90 | var versions = arguments[1] 91 | if (versions.length > 0) { 92 | versions.forEach(function (version) { 93 | console.log(version) 94 | }) 95 | } else { 96 | console.log('No node development files installed. Use `node-ninja install` to install a version.') 97 | } 98 | } else if (arguments.length >= 2) { 99 | console.log.apply(console, [].slice.call(arguments, 1)) 100 | } 101 | 102 | // now run the next command in the queue 103 | process.nextTick(run) 104 | }) 105 | } 106 | 107 | process.on('exit', function (code) { 108 | if (!completed && !code) { 109 | log.error('Completion callback never invoked!') 110 | issueMessage() 111 | process.exit(6) 112 | } 113 | }) 114 | 115 | process.on('uncaughtException', function (err) { 116 | log.error('UNCAUGHT EXCEPTION') 117 | log.error('stack', err.stack) 118 | issueMessage() 119 | process.exit(7) 120 | }) 121 | 122 | function errorMessage () { 123 | // copied from npm's lib/util/error-handler.js 124 | var os = require('os') 125 | log.error('System', os.type() + ' ' + os.release()) 126 | log.error('command', process.argv 127 | .map(JSON.stringify).join(' ')) 128 | log.error('cwd', process.cwd()) 129 | log.error('node -v', process.version) 130 | log.error('node-ninja -v', 'v' + prog.package.version) 131 | } 132 | 133 | function issueMessage () { 134 | errorMessage() 135 | log.error('', [ 'This is a bug in `node-ninja`.' 136 | , 'Try to update node-ninja and file an Issue if it does not help:' 137 | , ' ' 138 | ].join('\n')) 139 | } 140 | 141 | // start running the given commands! 142 | run() 143 | -------------------------------------------------------------------------------- /gyp/.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | -------------------------------------------------------------------------------- /gyp/AUTHORS: -------------------------------------------------------------------------------- 1 | # Names should be added to this file like so: 2 | # Name or Organization 3 | 4 | Google Inc. 5 | Bloomberg Finance L.P. 6 | Yandex LLC 7 | 8 | Steven Knight 9 | Ryan Norton 10 | David J. Sankel 11 | Eric N. Vander Weele 12 | Tom Freudenberg 13 | -------------------------------------------------------------------------------- /gyp/DEPS: -------------------------------------------------------------------------------- 1 | # DEPS file for gclient use in buildbot execution of gyp tests. 2 | # 3 | # (You don't need to use gclient for normal GYP development work.) 4 | 5 | vars = { 6 | "chrome_trunk": "http://src.chromium.org/svn/trunk", 7 | "googlecode_url": "http://%s.googlecode.com/svn", 8 | } 9 | 10 | deps = { 11 | } 12 | 13 | deps_os = { 14 | "win": { 15 | "third_party/cygwin": 16 | Var("chrome_trunk") + "/deps/third_party/cygwin@66844", 17 | 18 | "third_party/python_26": 19 | Var("chrome_trunk") + "/tools/third_party/python_26@89111", 20 | 21 | "src/third_party/pefile": 22 | (Var("googlecode_url") % "pefile") + "/trunk@63", 23 | }, 24 | } 25 | -------------------------------------------------------------------------------- /gyp/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2009 Google Inc. All rights reserved. 2 | 3 | Redistribution and use in source and binary forms, with or without 4 | modification, are permitted provided that the following conditions are 5 | met: 6 | 7 | * Redistributions of source code must retain the above copyright 8 | notice, this list of conditions and the following disclaimer. 9 | * Redistributions in binary form must reproduce the above 10 | copyright notice, this list of conditions and the following disclaimer 11 | in the documentation and/or other materials provided with the 12 | distribution. 13 | * Neither the name of Google Inc. nor the names of its 14 | contributors may be used to endorse or promote products derived from 15 | this software without specific prior written permission. 16 | 17 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 18 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 19 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 20 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 21 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 22 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 23 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 24 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 25 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /gyp/OWNERS: -------------------------------------------------------------------------------- 1 | * 2 | -------------------------------------------------------------------------------- /gyp/PRESUBMIT.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012 Google Inc. All rights reserved. 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | 6 | """Top-level presubmit script for GYP. 7 | 8 | See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts 9 | for more details about the presubmit API built into gcl. 10 | """ 11 | 12 | 13 | PYLINT_BLACKLIST = [ 14 | # TODO: fix me. 15 | # From SCons, not done in google style. 16 | 'test/lib/TestCmd.py', 17 | 'test/lib/TestCommon.py', 18 | 'test/lib/TestGyp.py', 19 | ] 20 | 21 | 22 | PYLINT_DISABLED_WARNINGS = [ 23 | # TODO: fix me. 24 | # Many tests include modules they don't use. 25 | 'W0611', 26 | # Possible unbalanced tuple unpacking with sequence. 27 | 'W0632', 28 | # Attempting to unpack a non-sequence. 29 | 'W0633', 30 | # Include order doesn't properly include local files? 31 | 'F0401', 32 | # Some use of built-in names. 33 | 'W0622', 34 | # Some unused variables. 35 | 'W0612', 36 | # Operator not preceded/followed by space. 37 | 'C0323', 38 | 'C0322', 39 | # Unnecessary semicolon. 40 | 'W0301', 41 | # Unused argument. 42 | 'W0613', 43 | # String has no effect (docstring in wrong place). 44 | 'W0105', 45 | # map/filter on lambda could be replaced by comprehension. 46 | 'W0110', 47 | # Use of eval. 48 | 'W0123', 49 | # Comma not followed by space. 50 | 'C0324', 51 | # Access to a protected member. 52 | 'W0212', 53 | # Bad indent. 54 | 'W0311', 55 | # Line too long. 56 | 'C0301', 57 | # Undefined variable. 58 | 'E0602', 59 | # Not exception type specified. 60 | 'W0702', 61 | # No member of that name. 62 | 'E1101', 63 | # Dangerous default {}. 64 | 'W0102', 65 | # Cyclic import. 66 | 'R0401', 67 | # Others, too many to sort. 68 | 'W0201', 'W0232', 'E1103', 'W0621', 'W0108', 'W0223', 'W0231', 69 | 'R0201', 'E0101', 'C0321', 70 | # ************* Module copy 71 | # W0104:427,12:_test.odict.__setitem__: Statement seems to have no effect 72 | 'W0104', 73 | ] 74 | 75 | 76 | def CheckChangeOnUpload(input_api, output_api): 77 | report = [] 78 | report.extend(input_api.canned_checks.PanProjectChecks( 79 | input_api, output_api)) 80 | return report 81 | 82 | 83 | def CheckChangeOnCommit(input_api, output_api): 84 | report = [] 85 | 86 | # Accept any year number from 2009 to the current year. 87 | current_year = int(input_api.time.strftime('%Y')) 88 | allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1))) 89 | years_re = '(' + '|'.join(allowed_years) + ')' 90 | 91 | # The (c) is deprecated, but tolerate it until it's removed from all files. 92 | license = ( 93 | r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n' 94 | r'.*? Use of this source code is governed by a BSD-style license that ' 95 | r'can be\n' 96 | r'.*? found in the LICENSE file\.\n' 97 | ) % { 98 | 'year': years_re, 99 | } 100 | 101 | report.extend(input_api.canned_checks.PanProjectChecks( 102 | input_api, output_api, license_header=license)) 103 | report.extend(input_api.canned_checks.CheckTreeIsOpen( 104 | input_api, output_api, 105 | 'http://gyp-status.appspot.com/status', 106 | 'http://gyp-status.appspot.com/current')) 107 | 108 | import os 109 | import sys 110 | old_sys_path = sys.path 111 | try: 112 | sys.path = ['pylib', 'test/lib'] + sys.path 113 | blacklist = PYLINT_BLACKLIST 114 | if sys.platform == 'win32': 115 | blacklist = [os.path.normpath(x).replace('\\', '\\\\') 116 | for x in PYLINT_BLACKLIST] 117 | report.extend(input_api.canned_checks.RunPylint( 118 | input_api, 119 | output_api, 120 | black_list=blacklist, 121 | disabled_warnings=PYLINT_DISABLED_WARNINGS)) 122 | finally: 123 | sys.path = old_sys_path 124 | return report 125 | 126 | 127 | TRYBOTS = [ 128 | 'linux_try', 129 | 'mac_try', 130 | 'win_try', 131 | ] 132 | 133 | 134 | def GetPreferredTryMasters(_, change): 135 | return { 136 | 'client.gyp': { t: set(['defaulttests']) for t in TRYBOTS }, 137 | } 138 | -------------------------------------------------------------------------------- /gyp/buildbot/buildbot_run.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright (c) 2012 Google Inc. All rights reserved. 3 | # Use of this source code is governed by a BSD-style license that can be 4 | # found in the LICENSE file. 5 | 6 | """Argument-less script to select what to run on the buildbots.""" 7 | 8 | import os 9 | import shutil 10 | import subprocess 11 | import sys 12 | 13 | 14 | BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__)) 15 | TRUNK_DIR = os.path.dirname(BUILDBOT_DIR) 16 | ROOT_DIR = os.path.dirname(TRUNK_DIR) 17 | CMAKE_DIR = os.path.join(ROOT_DIR, 'cmake') 18 | CMAKE_BIN_DIR = os.path.join(CMAKE_DIR, 'bin') 19 | OUT_DIR = os.path.join(TRUNK_DIR, 'out') 20 | 21 | 22 | def CallSubProcess(*args, **kwargs): 23 | """Wrapper around subprocess.call which treats errors as build exceptions.""" 24 | with open(os.devnull) as devnull_fd: 25 | retcode = subprocess.call(stdin=devnull_fd, *args, **kwargs) 26 | if retcode != 0: 27 | print '@@@STEP_EXCEPTION@@@' 28 | sys.exit(1) 29 | 30 | 31 | def PrepareCmake(): 32 | """Build CMake 2.8.8 since the version in Precise is 2.8.7.""" 33 | if os.environ['BUILDBOT_CLOBBER'] == '1': 34 | print '@@@BUILD_STEP Clobber CMake checkout@@@' 35 | shutil.rmtree(CMAKE_DIR) 36 | 37 | # We always build CMake 2.8.8, so no need to do anything 38 | # if the directory already exists. 39 | if os.path.isdir(CMAKE_DIR): 40 | return 41 | 42 | print '@@@BUILD_STEP Initialize CMake checkout@@@' 43 | os.mkdir(CMAKE_DIR) 44 | 45 | print '@@@BUILD_STEP Sync CMake@@@' 46 | CallSubProcess( 47 | ['git', 'clone', 48 | '--depth', '1', 49 | '--single-branch', 50 | '--branch', 'v2.8.8', 51 | '--', 52 | 'git://cmake.org/cmake.git', 53 | CMAKE_DIR], 54 | cwd=CMAKE_DIR) 55 | 56 | print '@@@BUILD_STEP Build CMake@@@' 57 | CallSubProcess( 58 | ['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR], 59 | cwd=CMAKE_DIR) 60 | 61 | CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR) 62 | 63 | 64 | def GypTestFormat(title, format=None, msvs_version=None, tests=[]): 65 | """Run the gyp tests for a given format, emitting annotator tags. 66 | 67 | See annotator docs at: 68 | https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations 69 | Args: 70 | format: gyp format to test. 71 | Returns: 72 | 0 for sucesss, 1 for failure. 73 | """ 74 | if not format: 75 | format = title 76 | 77 | print '@@@BUILD_STEP ' + title + '@@@' 78 | sys.stdout.flush() 79 | env = os.environ.copy() 80 | if msvs_version: 81 | env['GYP_MSVS_VERSION'] = msvs_version 82 | command = ' '.join( 83 | [sys.executable, 'gyp/gyptest.py', 84 | '--all', 85 | '--passed', 86 | '--format', format, 87 | '--path', CMAKE_BIN_DIR, 88 | '--chdir', 'gyp'] + tests) 89 | retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True) 90 | if retcode: 91 | # Emit failure tag, and keep going. 92 | print '@@@STEP_FAILURE@@@' 93 | return 1 94 | return 0 95 | 96 | 97 | def GypBuild(): 98 | # Dump out/ directory. 99 | print '@@@BUILD_STEP cleanup@@@' 100 | print 'Removing %s...' % OUT_DIR 101 | shutil.rmtree(OUT_DIR, ignore_errors=True) 102 | print 'Done.' 103 | 104 | retcode = 0 105 | if sys.platform.startswith('linux'): 106 | retcode += GypTestFormat('ninja') 107 | retcode += GypTestFormat('make') 108 | PrepareCmake() 109 | retcode += GypTestFormat('cmake') 110 | elif sys.platform == 'darwin': 111 | retcode += GypTestFormat('ninja') 112 | retcode += GypTestFormat('xcode') 113 | retcode += GypTestFormat('make') 114 | elif sys.platform == 'win32': 115 | retcode += GypTestFormat('ninja') 116 | if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64': 117 | retcode += GypTestFormat('msvs-ninja-2013', format='msvs-ninja', 118 | msvs_version='2013', 119 | tests=[ 120 | r'test\generator-output\gyptest-actions.py', 121 | r'test\generator-output\gyptest-relocate.py', 122 | r'test\generator-output\gyptest-rules.py']) 123 | retcode += GypTestFormat('msvs-2013', format='msvs', msvs_version='2013') 124 | else: 125 | raise Exception('Unknown platform') 126 | if retcode: 127 | # TODO(bradnelson): once the annotator supports a postscript (section for 128 | # after the build proper that could be used for cumulative failures), 129 | # use that instead of this. This isolates the final return value so 130 | # that it isn't misattributed to the last stage. 131 | print '@@@BUILD_STEP failures@@@' 132 | sys.exit(retcode) 133 | 134 | 135 | if __name__ == '__main__': 136 | GypBuild() 137 | -------------------------------------------------------------------------------- /gyp/buildbot/commit_queue/OWNERS: -------------------------------------------------------------------------------- 1 | set noparent 2 | bradnelson@chromium.org 3 | bradnelson@google.com 4 | iannucci@chromium.org 5 | scottmg@chromium.org 6 | thakis@chromium.org 7 | -------------------------------------------------------------------------------- /gyp/buildbot/commit_queue/README: -------------------------------------------------------------------------------- 1 | cq_config.json describes the trybots that must pass in order 2 | to land a change through the commit queue. 3 | Comments are here as the file is strictly JSON. 4 | -------------------------------------------------------------------------------- /gyp/buildbot/commit_queue/cq_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "trybots": { 3 | "launched": { 4 | "tryserver.nacl": { 5 | "gyp-presubmit": ["defaulttests"], 6 | "gyp-linux": ["defaulttests"], 7 | "gyp-mac": ["defaulttests"], 8 | "gyp-win32": ["defaulttests"], 9 | "gyp-win64": ["defaulttests"] 10 | } 11 | }, 12 | "triggered": { 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /gyp/codereview.settings: -------------------------------------------------------------------------------- 1 | # This file is used by gcl to get repository specific information. 2 | CODE_REVIEW_SERVER: codereview.chromium.org 3 | CC_LIST: gyp-developer@googlegroups.com 4 | VIEW_VC: https://chromium.googlesource.com/external/gyp/+/ 5 | TRY_ON_UPLOAD: False 6 | TRYSERVER_PROJECT: gyp 7 | TRYSERVER_PATCHLEVEL: 1 8 | TRYSERVER_ROOT: gyp 9 | TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl 10 | PROJECT: gyp 11 | -------------------------------------------------------------------------------- /gyp/data/win/large-pdb-shim.cc: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2013 Google Inc. All rights reserved. 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | // This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is 6 | // then used during the final link for modules that have large PDBs. Otherwise, 7 | // the linker will generate a pdb with a page size of 1KB, which imposes a limit 8 | // of 1GB on the .pdb. By generating an initial empty .pdb with the compiler 9 | // (rather than the linker), this limit is avoided. With this in place PDBs may 10 | // grow to 2GB. 11 | // 12 | // This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py. 13 | -------------------------------------------------------------------------------- /gyp/gyp: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Copyright 2013 The Chromium Authors. All rights reserved. 3 | # Use of this source code is governed by a BSD-style license that can be 4 | # found in the LICENSE file. 5 | 6 | set -e 7 | base=$(dirname "$0") 8 | exec python "${base}/gyp_main.py" "$@" 9 | -------------------------------------------------------------------------------- /gyp/gyp.bat: -------------------------------------------------------------------------------- 1 | @rem Copyright (c) 2009 Google Inc. All rights reserved. 2 | @rem Use of this source code is governed by a BSD-style license that can be 3 | @rem found in the LICENSE file. 4 | 5 | @python "%~dp0gyp_main.py" %* 6 | -------------------------------------------------------------------------------- /gyp/gyp_main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright (c) 2009 Google Inc. All rights reserved. 4 | # Use of this source code is governed by a BSD-style license that can be 5 | # found in the LICENSE file. 6 | 7 | import os 8 | import sys 9 | 10 | # Make sure we're using the version of pylib in this repo, not one installed 11 | # elsewhere on the system. 12 | sys.path.insert(0, os.path.join(os.path.dirname(sys.argv[0]), 'pylib')) 13 | import gyp 14 | 15 | if __name__ == '__main__': 16 | sys.exit(gyp.script_main()) 17 | -------------------------------------------------------------------------------- /gyp/gyptest.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright (c) 2012 Google Inc. All rights reserved. 4 | # Use of this source code is governed by a BSD-style license that can be 5 | # found in the LICENSE file. 6 | 7 | __doc__ = """ 8 | gyptest.py -- test runner for GYP tests. 9 | """ 10 | 11 | import os 12 | import optparse 13 | import subprocess 14 | import sys 15 | 16 | class CommandRunner(object): 17 | """ 18 | Executor class for commands, including "commands" implemented by 19 | Python functions. 20 | """ 21 | verbose = True 22 | active = True 23 | 24 | def __init__(self, dictionary={}): 25 | self.subst_dictionary(dictionary) 26 | 27 | def subst_dictionary(self, dictionary): 28 | self._subst_dictionary = dictionary 29 | 30 | def subst(self, string, dictionary=None): 31 | """ 32 | Substitutes (via the format operator) the values in the specified 33 | dictionary into the specified command. 34 | 35 | The command can be an (action, string) tuple. In all cases, we 36 | perform substitution on strings and don't worry if something isn't 37 | a string. (It's probably a Python function to be executed.) 38 | """ 39 | if dictionary is None: 40 | dictionary = self._subst_dictionary 41 | if dictionary: 42 | try: 43 | string = string % dictionary 44 | except TypeError: 45 | pass 46 | return string 47 | 48 | def display(self, command, stdout=None, stderr=None): 49 | if not self.verbose: 50 | return 51 | if type(command) == type(()): 52 | func = command[0] 53 | args = command[1:] 54 | s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args))) 55 | if type(command) == type([]): 56 | # TODO: quote arguments containing spaces 57 | # TODO: handle meta characters? 58 | s = ' '.join(command) 59 | else: 60 | s = self.subst(command) 61 | if not s.endswith('\n'): 62 | s += '\n' 63 | sys.stdout.write(s) 64 | sys.stdout.flush() 65 | 66 | def execute(self, command, stdout=None, stderr=None): 67 | """ 68 | Executes a single command. 69 | """ 70 | if not self.active: 71 | return 0 72 | if type(command) == type(''): 73 | command = self.subst(command) 74 | cmdargs = shlex.split(command) 75 | if cmdargs[0] == 'cd': 76 | command = (os.chdir,) + tuple(cmdargs[1:]) 77 | if type(command) == type(()): 78 | func = command[0] 79 | args = command[1:] 80 | return func(*args) 81 | else: 82 | if stdout is sys.stdout: 83 | # Same as passing sys.stdout, except python2.4 doesn't fail on it. 84 | subout = None 85 | else: 86 | # Open pipe for anything else so Popen works on python2.4. 87 | subout = subprocess.PIPE 88 | if stderr is sys.stderr: 89 | # Same as passing sys.stderr, except python2.4 doesn't fail on it. 90 | suberr = None 91 | elif stderr is None: 92 | # Merge with stdout if stderr isn't specified. 93 | suberr = subprocess.STDOUT 94 | else: 95 | # Open pipe for anything else so Popen works on python2.4. 96 | suberr = subprocess.PIPE 97 | p = subprocess.Popen(command, 98 | shell=(sys.platform == 'win32'), 99 | stdout=subout, 100 | stderr=suberr) 101 | p.wait() 102 | if stdout is None: 103 | self.stdout = p.stdout.read() 104 | elif stdout is not sys.stdout: 105 | stdout.write(p.stdout.read()) 106 | if stderr not in (None, sys.stderr): 107 | stderr.write(p.stderr.read()) 108 | return p.returncode 109 | 110 | def run(self, command, display=None, stdout=None, stderr=None): 111 | """ 112 | Runs a single command, displaying it first. 113 | """ 114 | if display is None: 115 | display = command 116 | self.display(display) 117 | return self.execute(command, stdout, stderr) 118 | 119 | 120 | class Unbuffered(object): 121 | def __init__(self, fp): 122 | self.fp = fp 123 | def write(self, arg): 124 | self.fp.write(arg) 125 | self.fp.flush() 126 | def __getattr__(self, attr): 127 | return getattr(self.fp, attr) 128 | 129 | sys.stdout = Unbuffered(sys.stdout) 130 | sys.stderr = Unbuffered(sys.stderr) 131 | 132 | 133 | def is_test_name(f): 134 | return f.startswith('gyptest') and f.endswith('.py') 135 | 136 | 137 | def find_all_gyptest_files(directory): 138 | result = [] 139 | for root, dirs, files in os.walk(directory): 140 | if '.svn' in dirs: 141 | dirs.remove('.svn') 142 | result.extend([ os.path.join(root, f) for f in files if is_test_name(f) ]) 143 | result.sort() 144 | return result 145 | 146 | 147 | def main(argv=None): 148 | if argv is None: 149 | argv = sys.argv 150 | 151 | usage = "gyptest.py [-ahlnq] [-f formats] [test ...]" 152 | parser = optparse.OptionParser(usage=usage) 153 | parser.add_option("-a", "--all", action="store_true", 154 | help="run all tests") 155 | parser.add_option("-C", "--chdir", action="store", default=None, 156 | help="chdir to the specified directory") 157 | parser.add_option("-f", "--format", action="store", default='', 158 | help="run tests with the specified formats") 159 | parser.add_option("-G", '--gyp_option', action="append", default=[], 160 | help="Add -G options to the gyp command line") 161 | parser.add_option("-l", "--list", action="store_true", 162 | help="list available tests and exit") 163 | parser.add_option("-n", "--no-exec", action="store_true", 164 | help="no execute, just print the command line") 165 | parser.add_option("--passed", action="store_true", 166 | help="report passed tests") 167 | parser.add_option("--path", action="append", default=[], 168 | help="additional $PATH directory") 169 | parser.add_option("-q", "--quiet", action="store_true", 170 | help="quiet, don't print test command lines") 171 | opts, args = parser.parse_args(argv[1:]) 172 | 173 | if opts.chdir: 174 | os.chdir(opts.chdir) 175 | 176 | if opts.path: 177 | extra_path = [os.path.abspath(p) for p in opts.path] 178 | extra_path = os.pathsep.join(extra_path) 179 | os.environ['PATH'] = extra_path + os.pathsep + os.environ['PATH'] 180 | 181 | if not args: 182 | if not opts.all: 183 | sys.stderr.write('Specify -a to get all tests.\n') 184 | return 1 185 | args = ['test'] 186 | 187 | tests = [] 188 | for arg in args: 189 | if os.path.isdir(arg): 190 | tests.extend(find_all_gyptest_files(os.path.normpath(arg))) 191 | else: 192 | if not is_test_name(os.path.basename(arg)): 193 | print >>sys.stderr, arg, 'is not a valid gyp test name.' 194 | sys.exit(1) 195 | tests.append(arg) 196 | 197 | if opts.list: 198 | for test in tests: 199 | print test 200 | sys.exit(0) 201 | 202 | CommandRunner.verbose = not opts.quiet 203 | CommandRunner.active = not opts.no_exec 204 | cr = CommandRunner() 205 | 206 | os.environ['PYTHONPATH'] = os.path.abspath('test/lib') 207 | if not opts.quiet: 208 | sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH']) 209 | 210 | passed = [] 211 | failed = [] 212 | no_result = [] 213 | 214 | if opts.format: 215 | format_list = opts.format.split(',') 216 | else: 217 | # TODO: not duplicate this mapping from pylib/gyp/__init__.py 218 | format_list = { 219 | 'aix5': ['make'], 220 | 'freebsd7': ['make'], 221 | 'freebsd8': ['make'], 222 | 'openbsd5': ['make'], 223 | 'cygwin': ['msvs'], 224 | 'win32': ['msvs', 'ninja'], 225 | 'linux2': ['make', 'ninja'], 226 | 'linux3': ['make', 'ninja'], 227 | 'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'], 228 | }[sys.platform] 229 | 230 | for format in format_list: 231 | os.environ['TESTGYP_FORMAT'] = format 232 | if not opts.quiet: 233 | sys.stdout.write('TESTGYP_FORMAT=%s\n' % format) 234 | 235 | gyp_options = [] 236 | for option in opts.gyp_option: 237 | gyp_options += ['-G', option] 238 | if gyp_options and not opts.quiet: 239 | sys.stdout.write('Extra Gyp options: %s\n' % gyp_options) 240 | 241 | for test in tests: 242 | status = cr.run([sys.executable, test] + gyp_options, 243 | stdout=sys.stdout, 244 | stderr=sys.stderr) 245 | if status == 2: 246 | no_result.append(test) 247 | elif status: 248 | failed.append(test) 249 | else: 250 | passed.append(test) 251 | 252 | if not opts.quiet: 253 | def report(description, tests): 254 | if tests: 255 | if len(tests) == 1: 256 | sys.stdout.write("\n%s the following test:\n" % description) 257 | else: 258 | fmt = "\n%s the following %d tests:\n" 259 | sys.stdout.write(fmt % (description, len(tests))) 260 | sys.stdout.write("\t" + "\n\t".join(tests) + "\n") 261 | 262 | if opts.passed: 263 | report("Passed", passed) 264 | report("Failed", failed) 265 | report("No result from", no_result) 266 | 267 | if failed: 268 | return 1 269 | else: 270 | return 0 271 | 272 | 273 | if __name__ == "__main__": 274 | sys.exit(main()) 275 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/MSVSProject.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012 Google Inc. All rights reserved. 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | """Visual Studio project reader/writer.""" 6 | 7 | import gyp.common 8 | import gyp.easy_xml as easy_xml 9 | 10 | #------------------------------------------------------------------------------ 11 | 12 | 13 | class Tool(object): 14 | """Visual Studio tool.""" 15 | 16 | def __init__(self, name, attrs=None): 17 | """Initializes the tool. 18 | 19 | Args: 20 | name: Tool name. 21 | attrs: Dict of tool attributes; may be None. 22 | """ 23 | self._attrs = attrs or {} 24 | self._attrs['Name'] = name 25 | 26 | def _GetSpecification(self): 27 | """Creates an element for the tool. 28 | 29 | Returns: 30 | A new xml.dom.Element for the tool. 31 | """ 32 | return ['Tool', self._attrs] 33 | 34 | class Filter(object): 35 | """Visual Studio filter - that is, a virtual folder.""" 36 | 37 | def __init__(self, name, contents=None): 38 | """Initializes the folder. 39 | 40 | Args: 41 | name: Filter (folder) name. 42 | contents: List of filenames and/or Filter objects contained. 43 | """ 44 | self.name = name 45 | self.contents = list(contents or []) 46 | 47 | 48 | #------------------------------------------------------------------------------ 49 | 50 | 51 | class Writer(object): 52 | """Visual Studio XML project writer.""" 53 | 54 | def __init__(self, project_path, version, name, guid=None, platforms=None): 55 | """Initializes the project. 56 | 57 | Args: 58 | project_path: Path to the project file. 59 | version: Format version to emit. 60 | name: Name of the project. 61 | guid: GUID to use for project, if not None. 62 | platforms: Array of string, the supported platforms. If null, ['Win32'] 63 | """ 64 | self.project_path = project_path 65 | self.version = version 66 | self.name = name 67 | self.guid = guid 68 | 69 | # Default to Win32 for platforms. 70 | if not platforms: 71 | platforms = ['Win32'] 72 | 73 | # Initialize the specifications of the various sections. 74 | self.platform_section = ['Platforms'] 75 | for platform in platforms: 76 | self.platform_section.append(['Platform', {'Name': platform}]) 77 | self.tool_files_section = ['ToolFiles'] 78 | self.configurations_section = ['Configurations'] 79 | self.files_section = ['Files'] 80 | 81 | # Keep a dict keyed on filename to speed up access. 82 | self.files_dict = dict() 83 | 84 | def AddToolFile(self, path): 85 | """Adds a tool file to the project. 86 | 87 | Args: 88 | path: Relative path from project to tool file. 89 | """ 90 | self.tool_files_section.append(['ToolFile', {'RelativePath': path}]) 91 | 92 | def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools): 93 | """Returns the specification for a configuration. 94 | 95 | Args: 96 | config_type: Type of configuration node. 97 | config_name: Configuration name. 98 | attrs: Dict of configuration attributes; may be None. 99 | tools: List of tools (strings or Tool objects); may be None. 100 | Returns: 101 | """ 102 | # Handle defaults 103 | if not attrs: 104 | attrs = {} 105 | if not tools: 106 | tools = [] 107 | 108 | # Add configuration node and its attributes 109 | node_attrs = attrs.copy() 110 | node_attrs['Name'] = config_name 111 | specification = [config_type, node_attrs] 112 | 113 | # Add tool nodes and their attributes 114 | if tools: 115 | for t in tools: 116 | if isinstance(t, Tool): 117 | specification.append(t._GetSpecification()) 118 | else: 119 | specification.append(Tool(t)._GetSpecification()) 120 | return specification 121 | 122 | 123 | def AddConfig(self, name, attrs=None, tools=None): 124 | """Adds a configuration to the project. 125 | 126 | Args: 127 | name: Configuration name. 128 | attrs: Dict of configuration attributes; may be None. 129 | tools: List of tools (strings or Tool objects); may be None. 130 | """ 131 | spec = self._GetSpecForConfiguration('Configuration', name, attrs, tools) 132 | self.configurations_section.append(spec) 133 | 134 | def _AddFilesToNode(self, parent, files): 135 | """Adds files and/or filters to the parent node. 136 | 137 | Args: 138 | parent: Destination node 139 | files: A list of Filter objects and/or relative paths to files. 140 | 141 | Will call itself recursively, if the files list contains Filter objects. 142 | """ 143 | for f in files: 144 | if isinstance(f, Filter): 145 | node = ['Filter', {'Name': f.name}] 146 | self._AddFilesToNode(node, f.contents) 147 | else: 148 | node = ['File', {'RelativePath': f}] 149 | self.files_dict[f] = node 150 | parent.append(node) 151 | 152 | def AddFiles(self, files): 153 | """Adds files to the project. 154 | 155 | Args: 156 | files: A list of Filter objects and/or relative paths to files. 157 | 158 | This makes a copy of the file/filter tree at the time of this call. If you 159 | later add files to a Filter object which was passed into a previous call 160 | to AddFiles(), it will not be reflected in this project. 161 | """ 162 | self._AddFilesToNode(self.files_section, files) 163 | # TODO(rspangler) This also doesn't handle adding files to an existing 164 | # filter. That is, it doesn't merge the trees. 165 | 166 | def AddFileConfig(self, path, config, attrs=None, tools=None): 167 | """Adds a configuration to a file. 168 | 169 | Args: 170 | path: Relative path to the file. 171 | config: Name of configuration to add. 172 | attrs: Dict of configuration attributes; may be None. 173 | tools: List of tools (strings or Tool objects); may be None. 174 | 175 | Raises: 176 | ValueError: Relative path does not match any file added via AddFiles(). 177 | """ 178 | # Find the file node with the right relative path 179 | parent = self.files_dict.get(path) 180 | if not parent: 181 | raise ValueError('AddFileConfig: file "%s" not in project.' % path) 182 | 183 | # Add the config to the file node 184 | spec = self._GetSpecForConfiguration('FileConfiguration', config, attrs, 185 | tools) 186 | parent.append(spec) 187 | 188 | def WriteIfChanged(self): 189 | """Writes the project file.""" 190 | # First create XML content definition 191 | content = [ 192 | 'VisualStudioProject', 193 | {'ProjectType': 'Visual C++', 194 | 'Version': self.version.ProjectVersion(), 195 | 'Name': self.name, 196 | 'ProjectGUID': self.guid, 197 | 'RootNamespace': self.name, 198 | 'Keyword': 'Win32Proj' 199 | }, 200 | self.platform_section, 201 | self.tool_files_section, 202 | self.configurations_section, 203 | ['References'], # empty section 204 | self.files_section, 205 | ['Globals'] # empty section 206 | ] 207 | easy_xml.WriteXmlIfChanged(content, self.project_path, 208 | encoding="Windows-1252") 209 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/MSVSToolFile.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012 Google Inc. All rights reserved. 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | """Visual Studio project reader/writer.""" 6 | 7 | import gyp.common 8 | import gyp.easy_xml as easy_xml 9 | 10 | 11 | class Writer(object): 12 | """Visual Studio XML tool file writer.""" 13 | 14 | def __init__(self, tool_file_path, name): 15 | """Initializes the tool file. 16 | 17 | Args: 18 | tool_file_path: Path to the tool file. 19 | name: Name of the tool file. 20 | """ 21 | self.tool_file_path = tool_file_path 22 | self.name = name 23 | self.rules_section = ['Rules'] 24 | 25 | def AddCustomBuildRule(self, name, cmd, description, 26 | additional_dependencies, 27 | outputs, extensions): 28 | """Adds a rule to the tool file. 29 | 30 | Args: 31 | name: Name of the rule. 32 | description: Description of the rule. 33 | cmd: Command line of the rule. 34 | additional_dependencies: other files which may trigger the rule. 35 | outputs: outputs of the rule. 36 | extensions: extensions handled by the rule. 37 | """ 38 | rule = ['CustomBuildRule', 39 | {'Name': name, 40 | 'ExecutionDescription': description, 41 | 'CommandLine': cmd, 42 | 'Outputs': ';'.join(outputs), 43 | 'FileExtensions': ';'.join(extensions), 44 | 'AdditionalDependencies': 45 | ';'.join(additional_dependencies) 46 | }] 47 | self.rules_section.append(rule) 48 | 49 | def WriteIfChanged(self): 50 | """Writes the tool file.""" 51 | content = ['VisualStudioToolFile', 52 | {'Version': '8.00', 53 | 'Name': self.name 54 | }, 55 | self.rules_section 56 | ] 57 | easy_xml.WriteXmlIfChanged(content, self.tool_file_path, 58 | encoding="Windows-1252") 59 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/MSVSUserFile.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012 Google Inc. All rights reserved. 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | """Visual Studio user preferences file writer.""" 6 | 7 | import os 8 | import re 9 | import socket # for gethostname 10 | 11 | import gyp.common 12 | import gyp.easy_xml as easy_xml 13 | 14 | 15 | #------------------------------------------------------------------------------ 16 | 17 | def _FindCommandInPath(command): 18 | """If there are no slashes in the command given, this function 19 | searches the PATH env to find the given command, and converts it 20 | to an absolute path. We have to do this because MSVS is looking 21 | for an actual file to launch a debugger on, not just a command 22 | line. Note that this happens at GYP time, so anything needing to 23 | be built needs to have a full path.""" 24 | if '/' in command or '\\' in command: 25 | # If the command already has path elements (either relative or 26 | # absolute), then assume it is constructed properly. 27 | return command 28 | else: 29 | # Search through the path list and find an existing file that 30 | # we can access. 31 | paths = os.environ.get('PATH','').split(os.pathsep) 32 | for path in paths: 33 | item = os.path.join(path, command) 34 | if os.path.isfile(item) and os.access(item, os.X_OK): 35 | return item 36 | return command 37 | 38 | def _QuoteWin32CommandLineArgs(args): 39 | new_args = [] 40 | for arg in args: 41 | # Replace all double-quotes with double-double-quotes to escape 42 | # them for cmd shell, and then quote the whole thing if there 43 | # are any. 44 | if arg.find('"') != -1: 45 | arg = '""'.join(arg.split('"')) 46 | arg = '"%s"' % arg 47 | 48 | # Otherwise, if there are any spaces, quote the whole arg. 49 | elif re.search(r'[ \t\n]', arg): 50 | arg = '"%s"' % arg 51 | new_args.append(arg) 52 | return new_args 53 | 54 | class Writer(object): 55 | """Visual Studio XML user user file writer.""" 56 | 57 | def __init__(self, user_file_path, version, name): 58 | """Initializes the user file. 59 | 60 | Args: 61 | user_file_path: Path to the user file. 62 | version: Version info. 63 | name: Name of the user file. 64 | """ 65 | self.user_file_path = user_file_path 66 | self.version = version 67 | self.name = name 68 | self.configurations = {} 69 | 70 | def AddConfig(self, name): 71 | """Adds a configuration to the project. 72 | 73 | Args: 74 | name: Configuration name. 75 | """ 76 | self.configurations[name] = ['Configuration', {'Name': name}] 77 | 78 | def AddDebugSettings(self, config_name, command, environment = {}, 79 | working_directory=""): 80 | """Adds a DebugSettings node to the user file for a particular config. 81 | 82 | Args: 83 | command: command line to run. First element in the list is the 84 | executable. All elements of the command will be quoted if 85 | necessary. 86 | working_directory: other files which may trigger the rule. (optional) 87 | """ 88 | command = _QuoteWin32CommandLineArgs(command) 89 | 90 | abs_command = _FindCommandInPath(command[0]) 91 | 92 | if environment and isinstance(environment, dict): 93 | env_list = ['%s="%s"' % (key, val) 94 | for (key,val) in environment.iteritems()] 95 | environment = ' '.join(env_list) 96 | else: 97 | environment = '' 98 | 99 | n_cmd = ['DebugSettings', 100 | {'Command': abs_command, 101 | 'WorkingDirectory': working_directory, 102 | 'CommandArguments': " ".join(command[1:]), 103 | 'RemoteMachine': socket.gethostname(), 104 | 'Environment': environment, 105 | 'EnvironmentMerge': 'true', 106 | # Currently these are all "dummy" values that we're just setting 107 | # in the default manner that MSVS does it. We could use some of 108 | # these to add additional capabilities, I suppose, but they might 109 | # not have parity with other platforms then. 110 | 'Attach': 'false', 111 | 'DebuggerType': '3', # 'auto' debugger 112 | 'Remote': '1', 113 | 'RemoteCommand': '', 114 | 'HttpUrl': '', 115 | 'PDBPath': '', 116 | 'SQLDebugging': '', 117 | 'DebuggerFlavor': '0', 118 | 'MPIRunCommand': '', 119 | 'MPIRunArguments': '', 120 | 'MPIRunWorkingDirectory': '', 121 | 'ApplicationCommand': '', 122 | 'ApplicationArguments': '', 123 | 'ShimCommand': '', 124 | 'MPIAcceptMode': '', 125 | 'MPIAcceptFilter': '' 126 | }] 127 | 128 | # Find the config, and add it if it doesn't exist. 129 | if config_name not in self.configurations: 130 | self.AddConfig(config_name) 131 | 132 | # Add the DebugSettings onto the appropriate config. 133 | self.configurations[config_name].append(n_cmd) 134 | 135 | def WriteIfChanged(self): 136 | """Writes the user file.""" 137 | configs = ['Configurations'] 138 | for config, spec in sorted(self.configurations.iteritems()): 139 | configs.append(spec) 140 | 141 | content = ['VisualStudioUserFile', 142 | {'Version': self.version.ProjectVersion(), 143 | 'Name': self.name 144 | }, 145 | configs] 146 | easy_xml.WriteXmlIfChanged(content, self.user_file_path, 147 | encoding="Windows-1252") 148 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/MSVSUtil.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Google Inc. All rights reserved. 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | """Utility functions shared amongst the Windows generators.""" 6 | 7 | import copy 8 | import os 9 | 10 | 11 | # A dictionary mapping supported target types to extensions. 12 | TARGET_TYPE_EXT = { 13 | 'executable': 'exe', 14 | 'loadable_module': 'dll', 15 | 'shared_library': 'dll', 16 | 'static_library': 'lib', 17 | } 18 | 19 | 20 | def _GetLargePdbShimCcPath(): 21 | """Returns the path of the large_pdb_shim.cc file.""" 22 | this_dir = os.path.abspath(os.path.dirname(__file__)) 23 | src_dir = os.path.abspath(os.path.join(this_dir, '..', '..')) 24 | win_data_dir = os.path.join(src_dir, 'data', 'win') 25 | large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc') 26 | return large_pdb_shim_cc 27 | 28 | 29 | def _DeepCopySomeKeys(in_dict, keys): 30 | """Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|. 31 | 32 | Arguments: 33 | in_dict: The dictionary to copy. 34 | keys: The keys to be copied. If a key is in this list and doesn't exist in 35 | |in_dict| this is not an error. 36 | Returns: 37 | The partially deep-copied dictionary. 38 | """ 39 | d = {} 40 | for key in keys: 41 | if key not in in_dict: 42 | continue 43 | d[key] = copy.deepcopy(in_dict[key]) 44 | return d 45 | 46 | 47 | def _SuffixName(name, suffix): 48 | """Add a suffix to the end of a target. 49 | 50 | Arguments: 51 | name: name of the target (foo#target) 52 | suffix: the suffix to be added 53 | Returns: 54 | Target name with suffix added (foo_suffix#target) 55 | """ 56 | parts = name.rsplit('#', 1) 57 | parts[0] = '%s_%s' % (parts[0], suffix) 58 | return '#'.join(parts) 59 | 60 | 61 | def _ShardName(name, number): 62 | """Add a shard number to the end of a target. 63 | 64 | Arguments: 65 | name: name of the target (foo#target) 66 | number: shard number 67 | Returns: 68 | Target name with shard added (foo_1#target) 69 | """ 70 | return _SuffixName(name, str(number)) 71 | 72 | 73 | def ShardTargets(target_list, target_dicts): 74 | """Shard some targets apart to work around the linkers limits. 75 | 76 | Arguments: 77 | target_list: List of target pairs: 'base/base.gyp:base'. 78 | target_dicts: Dict of target properties keyed on target pair. 79 | Returns: 80 | Tuple of the new sharded versions of the inputs. 81 | """ 82 | # Gather the targets to shard, and how many pieces. 83 | targets_to_shard = {} 84 | for t in target_dicts: 85 | shards = int(target_dicts[t].get('msvs_shard', 0)) 86 | if shards: 87 | targets_to_shard[t] = shards 88 | # Shard target_list. 89 | new_target_list = [] 90 | for t in target_list: 91 | if t in targets_to_shard: 92 | for i in range(targets_to_shard[t]): 93 | new_target_list.append(_ShardName(t, i)) 94 | else: 95 | new_target_list.append(t) 96 | # Shard target_dict. 97 | new_target_dicts = {} 98 | for t in target_dicts: 99 | if t in targets_to_shard: 100 | for i in range(targets_to_shard[t]): 101 | name = _ShardName(t, i) 102 | new_target_dicts[name] = copy.copy(target_dicts[t]) 103 | new_target_dicts[name]['target_name'] = _ShardName( 104 | new_target_dicts[name]['target_name'], i) 105 | sources = new_target_dicts[name].get('sources', []) 106 | new_sources = [] 107 | for pos in range(i, len(sources), targets_to_shard[t]): 108 | new_sources.append(sources[pos]) 109 | new_target_dicts[name]['sources'] = new_sources 110 | else: 111 | new_target_dicts[t] = target_dicts[t] 112 | # Shard dependencies. 113 | for t in new_target_dicts: 114 | for deptype in ('dependencies', 'dependencies_original'): 115 | dependencies = copy.copy(new_target_dicts[t].get(deptype, [])) 116 | new_dependencies = [] 117 | for d in dependencies: 118 | if d in targets_to_shard: 119 | for i in range(targets_to_shard[d]): 120 | new_dependencies.append(_ShardName(d, i)) 121 | else: 122 | new_dependencies.append(d) 123 | new_target_dicts[t][deptype] = new_dependencies 124 | 125 | return (new_target_list, new_target_dicts) 126 | 127 | 128 | def _GetPdbPath(target_dict, config_name, vars): 129 | """Returns the path to the PDB file that will be generated by a given 130 | configuration. 131 | 132 | The lookup proceeds as follows: 133 | - Look for an explicit path in the VCLinkerTool configuration block. 134 | - Look for an 'msvs_large_pdb_path' variable. 135 | - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is 136 | specified. 137 | - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'. 138 | 139 | Arguments: 140 | target_dict: The target dictionary to be searched. 141 | config_name: The name of the configuration of interest. 142 | vars: A dictionary of common GYP variables with generator-specific values. 143 | Returns: 144 | The path of the corresponding PDB file. 145 | """ 146 | config = target_dict['configurations'][config_name] 147 | msvs = config.setdefault('msvs_settings', {}) 148 | 149 | linker = msvs.get('VCLinkerTool', {}) 150 | 151 | pdb_path = linker.get('ProgramDatabaseFile') 152 | if pdb_path: 153 | return pdb_path 154 | 155 | variables = target_dict.get('variables', {}) 156 | pdb_path = variables.get('msvs_large_pdb_path', None) 157 | if pdb_path: 158 | return pdb_path 159 | 160 | 161 | pdb_base = target_dict.get('product_name', target_dict['target_name']) 162 | pdb_base = '%s.%s.pdb' % (pdb_base, TARGET_TYPE_EXT[target_dict['type']]) 163 | pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base 164 | 165 | return pdb_path 166 | 167 | 168 | def InsertLargePdbShims(target_list, target_dicts, vars): 169 | """Insert a shim target that forces the linker to use 4KB pagesize PDBs. 170 | 171 | This is a workaround for targets with PDBs greater than 1GB in size, the 172 | limit for the 1KB pagesize PDBs created by the linker by default. 173 | 174 | Arguments: 175 | target_list: List of target pairs: 'base/base.gyp:base'. 176 | target_dicts: Dict of target properties keyed on target pair. 177 | vars: A dictionary of common GYP variables with generator-specific values. 178 | Returns: 179 | Tuple of the shimmed version of the inputs. 180 | """ 181 | # Determine which targets need shimming. 182 | targets_to_shim = [] 183 | for t in target_dicts: 184 | target_dict = target_dicts[t] 185 | 186 | # We only want to shim targets that have msvs_large_pdb enabled. 187 | if not int(target_dict.get('msvs_large_pdb', 0)): 188 | continue 189 | # This is intended for executable, shared_library and loadable_module 190 | # targets where every configuration is set up to produce a PDB output. 191 | # If any of these conditions is not true then the shim logic will fail 192 | # below. 193 | targets_to_shim.append(t) 194 | 195 | large_pdb_shim_cc = _GetLargePdbShimCcPath() 196 | 197 | for t in targets_to_shim: 198 | target_dict = target_dicts[t] 199 | target_name = target_dict.get('target_name') 200 | 201 | base_dict = _DeepCopySomeKeys(target_dict, 202 | ['configurations', 'default_configuration', 'toolset']) 203 | 204 | # This is the dict for copying the source file (part of the GYP tree) 205 | # to the intermediate directory of the project. This is necessary because 206 | # we can't always build a relative path to the shim source file (on Windows 207 | # GYP and the project may be on different drives), and Ninja hates absolute 208 | # paths (it ends up generating the .obj and .obj.d alongside the source 209 | # file, polluting GYPs tree). 210 | copy_suffix = 'large_pdb_copy' 211 | copy_target_name = target_name + '_' + copy_suffix 212 | full_copy_target_name = _SuffixName(t, copy_suffix) 213 | shim_cc_basename = os.path.basename(large_pdb_shim_cc) 214 | shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name 215 | shim_cc_path = shim_cc_dir + '/' + shim_cc_basename 216 | copy_dict = copy.deepcopy(base_dict) 217 | copy_dict['target_name'] = copy_target_name 218 | copy_dict['type'] = 'none' 219 | copy_dict['sources'] = [ large_pdb_shim_cc ] 220 | copy_dict['copies'] = [{ 221 | 'destination': shim_cc_dir, 222 | 'files': [ large_pdb_shim_cc ] 223 | }] 224 | 225 | # This is the dict for the PDB generating shim target. It depends on the 226 | # copy target. 227 | shim_suffix = 'large_pdb_shim' 228 | shim_target_name = target_name + '_' + shim_suffix 229 | full_shim_target_name = _SuffixName(t, shim_suffix) 230 | shim_dict = copy.deepcopy(base_dict) 231 | shim_dict['target_name'] = shim_target_name 232 | shim_dict['type'] = 'static_library' 233 | shim_dict['sources'] = [ shim_cc_path ] 234 | shim_dict['dependencies'] = [ full_copy_target_name ] 235 | 236 | # Set up the shim to output its PDB to the same location as the final linker 237 | # target. 238 | for config_name, config in shim_dict.get('configurations').iteritems(): 239 | pdb_path = _GetPdbPath(target_dict, config_name, vars) 240 | 241 | # A few keys that we don't want to propagate. 242 | for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']: 243 | config.pop(key, None) 244 | 245 | msvs = config.setdefault('msvs_settings', {}) 246 | 247 | # Update the compiler directives in the shim target. 248 | compiler = msvs.setdefault('VCCLCompilerTool', {}) 249 | compiler['DebugInformationFormat'] = '3' 250 | compiler['ProgramDataBaseFileName'] = pdb_path 251 | 252 | # Set the explicit PDB path in the appropriate configuration of the 253 | # original target. 254 | config = target_dict['configurations'][config_name] 255 | msvs = config.setdefault('msvs_settings', {}) 256 | linker = msvs.setdefault('VCLinkerTool', {}) 257 | linker['GenerateDebugInformation'] = 'true' 258 | linker['ProgramDatabaseFile'] = pdb_path 259 | 260 | # Add the new targets. They must go to the beginning of the list so that 261 | # the dependency generation works as expected in ninja. 262 | target_list.insert(0, full_copy_target_name) 263 | target_list.insert(0, full_shim_target_name) 264 | target_dicts[full_copy_target_name] = copy_dict 265 | target_dicts[full_shim_target_name] = shim_dict 266 | 267 | # Update the original target to depend on the shim target. 268 | target_dict.setdefault('dependencies', []).append(full_shim_target_name) 269 | 270 | return (target_list, target_dicts) 271 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/common_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright (c) 2012 Google Inc. All rights reserved. 4 | # Use of this source code is governed by a BSD-style license that can be 5 | # found in the LICENSE file. 6 | 7 | """Unit tests for the common.py file.""" 8 | 9 | import gyp.common 10 | import unittest 11 | import sys 12 | 13 | 14 | class TestTopologicallySorted(unittest.TestCase): 15 | def test_Valid(self): 16 | """Test that sorting works on a valid graph with one possible order.""" 17 | graph = { 18 | 'a': ['b', 'c'], 19 | 'b': [], 20 | 'c': ['d'], 21 | 'd': ['b'], 22 | } 23 | def GetEdge(node): 24 | return tuple(graph[node]) 25 | self.assertEqual( 26 | gyp.common.TopologicallySorted(graph.keys(), GetEdge), 27 | ['a', 'c', 'd', 'b']) 28 | 29 | def test_Cycle(self): 30 | """Test that an exception is thrown on a cyclic graph.""" 31 | graph = { 32 | 'a': ['b'], 33 | 'b': ['c'], 34 | 'c': ['d'], 35 | 'd': ['a'], 36 | } 37 | def GetEdge(node): 38 | return tuple(graph[node]) 39 | self.assertRaises( 40 | gyp.common.CycleError, gyp.common.TopologicallySorted, 41 | graph.keys(), GetEdge) 42 | 43 | 44 | class TestGetFlavor(unittest.TestCase): 45 | """Test that gyp.common.GetFlavor works as intended""" 46 | original_platform = '' 47 | 48 | def setUp(self): 49 | self.original_platform = sys.platform 50 | 51 | def tearDown(self): 52 | sys.platform = self.original_platform 53 | 54 | def assertFlavor(self, expected, argument, param): 55 | sys.platform = argument 56 | self.assertEqual(expected, gyp.common.GetFlavor(param)) 57 | 58 | def test_platform_default(self): 59 | self.assertFlavor('freebsd', 'freebsd9' , {}) 60 | self.assertFlavor('freebsd', 'freebsd10', {}) 61 | self.assertFlavor('openbsd', 'openbsd5' , {}) 62 | self.assertFlavor('solaris', 'sunos5' , {}); 63 | self.assertFlavor('solaris', 'sunos' , {}); 64 | self.assertFlavor('linux' , 'linux2' , {}); 65 | self.assertFlavor('linux' , 'linux3' , {}); 66 | 67 | def test_param(self): 68 | self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'}) 69 | 70 | 71 | if __name__ == '__main__': 72 | unittest.main() 73 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/easy_xml.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2011 Google Inc. All rights reserved. 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | import re 6 | import os 7 | 8 | 9 | def XmlToString(content, encoding='utf-8', pretty=False): 10 | """ Writes the XML content to disk, touching the file only if it has changed. 11 | 12 | Visual Studio files have a lot of pre-defined structures. This function makes 13 | it easy to represent these structures as Python data structures, instead of 14 | having to create a lot of function calls. 15 | 16 | Each XML element of the content is represented as a list composed of: 17 | 1. The name of the element, a string, 18 | 2. The attributes of the element, a dictionary (optional), and 19 | 3+. The content of the element, if any. Strings are simple text nodes and 20 | lists are child elements. 21 | 22 | Example 1: 23 | 24 | becomes 25 | ['test'] 26 | 27 | Example 2: 28 | 29 | This is 30 | it! 31 | 32 | 33 | becomes 34 | ['myelement', {'a':'value1', 'b':'value2'}, 35 | ['childtype', 'This is'], 36 | ['childtype', 'it!'], 37 | ] 38 | 39 | Args: 40 | content: The structured content to be converted. 41 | encoding: The encoding to report on the first XML line. 42 | pretty: True if we want pretty printing with indents and new lines. 43 | 44 | Returns: 45 | The XML content as a string. 46 | """ 47 | # We create a huge list of all the elements of the file. 48 | xml_parts = ['' % encoding] 49 | if pretty: 50 | xml_parts.append('\n') 51 | _ConstructContentList(xml_parts, content, pretty) 52 | 53 | # Convert it to a string 54 | return ''.join(xml_parts) 55 | 56 | 57 | def _ConstructContentList(xml_parts, specification, pretty, level=0): 58 | """ Appends the XML parts corresponding to the specification. 59 | 60 | Args: 61 | xml_parts: A list of XML parts to be appended to. 62 | specification: The specification of the element. See EasyXml docs. 63 | pretty: True if we want pretty printing with indents and new lines. 64 | level: Indentation level. 65 | """ 66 | # The first item in a specification is the name of the element. 67 | if pretty: 68 | indentation = ' ' * level 69 | new_line = '\n' 70 | else: 71 | indentation = '' 72 | new_line = '' 73 | name = specification[0] 74 | if not isinstance(name, str): 75 | raise Exception('The first item of an EasyXml specification should be ' 76 | 'a string. Specification was ' + str(specification)) 77 | xml_parts.append(indentation + '<' + name) 78 | 79 | # Optionally in second position is a dictionary of the attributes. 80 | rest = specification[1:] 81 | if rest and isinstance(rest[0], dict): 82 | for at, val in sorted(rest[0].iteritems()): 83 | xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True))) 84 | rest = rest[1:] 85 | if rest: 86 | xml_parts.append('>') 87 | all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True) 88 | multi_line = not all_strings 89 | if multi_line and new_line: 90 | xml_parts.append(new_line) 91 | for child_spec in rest: 92 | # If it's a string, append a text node. 93 | # Otherwise recurse over that child definition 94 | if isinstance(child_spec, str): 95 | xml_parts.append(_XmlEscape(child_spec)) 96 | else: 97 | _ConstructContentList(xml_parts, child_spec, pretty, level + 1) 98 | if multi_line and indentation: 99 | xml_parts.append(indentation) 100 | xml_parts.append('%s' % (name, new_line)) 101 | else: 102 | xml_parts.append('/>%s' % new_line) 103 | 104 | 105 | def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False, 106 | win32=False): 107 | """ Writes the XML content to disk, touching the file only if it has changed. 108 | 109 | Args: 110 | content: The structured content to be written. 111 | path: Location of the file. 112 | encoding: The encoding to report on the first line of the XML file. 113 | pretty: True if we want pretty printing with indents and new lines. 114 | """ 115 | xml_string = XmlToString(content, encoding, pretty) 116 | if win32 and os.linesep != '\r\n': 117 | xml_string = xml_string.replace('\n', '\r\n') 118 | 119 | try: 120 | xml_string = xml_string.encode(encoding) 121 | except Exception: 122 | xml_string = unicode(xml_string, 'latin-1').encode(encoding) 123 | 124 | # Get the old content 125 | try: 126 | f = open(path, 'r') 127 | existing = f.read() 128 | f.close() 129 | except: 130 | existing = None 131 | 132 | # It has changed, write it 133 | if existing != xml_string: 134 | f = open(path, 'w') 135 | f.write(xml_string) 136 | f.close() 137 | 138 | 139 | _xml_escape_map = { 140 | '"': '"', 141 | "'": ''', 142 | '<': '<', 143 | '>': '>', 144 | '&': '&', 145 | '\n': ' ', 146 | '\r': ' ', 147 | } 148 | 149 | 150 | _xml_escape_re = re.compile( 151 | "(%s)" % "|".join(map(re.escape, _xml_escape_map.keys()))) 152 | 153 | 154 | def _XmlEscape(value, attr=False): 155 | """ Escape a string for inclusion in XML.""" 156 | def replace(match): 157 | m = match.string[match.start() : match.end()] 158 | # don't replace single quotes in attrs 159 | if attr and m == "'": 160 | return m 161 | return _xml_escape_map[m] 162 | return _xml_escape_re.sub(replace, value) 163 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/easy_xml_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright (c) 2011 Google Inc. All rights reserved. 4 | # Use of this source code is governed by a BSD-style license that can be 5 | # found in the LICENSE file. 6 | 7 | """ Unit tests for the easy_xml.py file. """ 8 | 9 | import gyp.easy_xml as easy_xml 10 | import unittest 11 | import StringIO 12 | 13 | 14 | class TestSequenceFunctions(unittest.TestCase): 15 | 16 | def setUp(self): 17 | self.stderr = StringIO.StringIO() 18 | 19 | def test_EasyXml_simple(self): 20 | self.assertEqual( 21 | easy_xml.XmlToString(['test']), 22 | '') 23 | 24 | self.assertEqual( 25 | easy_xml.XmlToString(['test'], encoding='Windows-1252'), 26 | '') 27 | 28 | def test_EasyXml_simple_with_attributes(self): 29 | self.assertEqual( 30 | easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]), 31 | '') 32 | 33 | def test_EasyXml_escaping(self): 34 | original = '\'"\r&\nfoo' 35 | converted = '<test>\'" & foo' 36 | converted_apos = converted.replace("'", ''') 37 | self.assertEqual( 38 | easy_xml.XmlToString(['test3', {'a': original}, original]), 39 | '%s' % 40 | (converted, converted_apos)) 41 | 42 | def test_EasyXml_pretty(self): 43 | self.assertEqual( 44 | easy_xml.XmlToString( 45 | ['test3', 46 | ['GrandParent', 47 | ['Parent1', 48 | ['Child'] 49 | ], 50 | ['Parent2'] 51 | ] 52 | ], 53 | pretty=True), 54 | '\n' 55 | '\n' 56 | ' \n' 57 | ' \n' 58 | ' \n' 59 | ' \n' 60 | ' \n' 61 | ' \n' 62 | '\n') 63 | 64 | 65 | def test_EasyXml_complex(self): 66 | # We want to create: 67 | target = ( 68 | '' 69 | '' 70 | '' 71 | '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}' 72 | 'Win32Proj' 73 | 'automated_ui_tests' 74 | '' 75 | '' 76 | '' 79 | 'Application' 80 | 'Unicode' 81 | '' 82 | '') 83 | 84 | xml = easy_xml.XmlToString( 85 | ['Project', 86 | ['PropertyGroup', {'Label': 'Globals'}, 87 | ['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'], 88 | ['Keyword', 'Win32Proj'], 89 | ['RootNamespace', 'automated_ui_tests'] 90 | ], 91 | ['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}], 92 | ['PropertyGroup', 93 | {'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'", 94 | 'Label': 'Configuration'}, 95 | ['ConfigurationType', 'Application'], 96 | ['CharacterSet', 'Unicode'] 97 | ] 98 | ]) 99 | self.assertEqual(xml, target) 100 | 101 | 102 | if __name__ == '__main__': 103 | unittest.main() 104 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/flock_tool.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright (c) 2011 Google Inc. All rights reserved. 3 | # Use of this source code is governed by a BSD-style license that can be 4 | # found in the LICENSE file. 5 | 6 | """These functions are executed via gyp-flock-tool when using the Makefile 7 | generator. Used on systems that don't have a built-in flock.""" 8 | 9 | import fcntl 10 | import os 11 | import struct 12 | import subprocess 13 | import sys 14 | 15 | 16 | def main(args): 17 | executor = FlockTool() 18 | executor.Dispatch(args) 19 | 20 | 21 | class FlockTool(object): 22 | """This class emulates the 'flock' command.""" 23 | def Dispatch(self, args): 24 | """Dispatches a string command to a method.""" 25 | if len(args) < 1: 26 | raise Exception("Not enough arguments") 27 | 28 | method = "Exec%s" % self._CommandifyName(args[0]) 29 | getattr(self, method)(*args[1:]) 30 | 31 | def _CommandifyName(self, name_string): 32 | """Transforms a tool name like copy-info-plist to CopyInfoPlist""" 33 | return name_string.title().replace('-', '') 34 | 35 | def ExecFlock(self, lockfile, *cmd_list): 36 | """Emulates the most basic behavior of Linux's flock(1).""" 37 | # Rely on exception handling to report errors. 38 | # Note that the stock python on SunOS has a bug 39 | # where fcntl.flock(fd, LOCK_EX) always fails 40 | # with EBADF, that's why we use this F_SETLK 41 | # hack instead. 42 | fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666) 43 | if sys.platform.startswith('aix'): 44 | # Python on AIX is compiled with LARGEFILE support, which changes the 45 | # struct size. 46 | op = struct.pack('hhIllqq', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) 47 | else: 48 | op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) 49 | fcntl.fcntl(fd, fcntl.F_SETLK, op) 50 | return subprocess.call(cmd_list) 51 | 52 | 53 | if __name__ == '__main__': 54 | sys.exit(main(sys.argv[1:])) 55 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/generator/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CodeJockey/node-ninja/c921357cc402a01097aa9c9aa2ad260af129d935/gyp/pylib/gyp/generator/__init__.py -------------------------------------------------------------------------------- /gyp/pylib/gyp/generator/dump_dependency_json.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012 Google Inc. All rights reserved. 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | import collections 6 | import os 7 | import gyp 8 | import gyp.common 9 | import gyp.msvs_emulation 10 | import json 11 | import sys 12 | 13 | generator_supports_multiple_toolsets = True 14 | 15 | generator_wants_static_library_dependencies_adjusted = False 16 | 17 | generator_filelist_paths = { 18 | } 19 | 20 | generator_default_variables = { 21 | } 22 | for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR', 23 | 'LIB_DIR', 'SHARED_LIB_DIR']: 24 | # Some gyp steps fail if these are empty(!). 25 | generator_default_variables[dirname] = 'dir' 26 | for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME', 27 | 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT', 28 | 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX', 29 | 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX', 30 | 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX', 31 | 'CONFIGURATION_NAME']: 32 | generator_default_variables[unused] = '' 33 | 34 | 35 | def CalculateVariables(default_variables, params): 36 | generator_flags = params.get('generator_flags', {}) 37 | for key, val in generator_flags.items(): 38 | default_variables.setdefault(key, val) 39 | default_variables.setdefault('OS', gyp.common.GetFlavor(params)) 40 | 41 | flavor = gyp.common.GetFlavor(params) 42 | if flavor =='win': 43 | # Copy additional generator configuration data from VS, which is shared 44 | # by the Windows Ninja generator. 45 | import gyp.generator.msvs as msvs_generator 46 | generator_additional_non_configuration_keys = getattr(msvs_generator, 47 | 'generator_additional_non_configuration_keys', []) 48 | generator_additional_path_sections = getattr(msvs_generator, 49 | 'generator_additional_path_sections', []) 50 | 51 | gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) 52 | 53 | 54 | def CalculateGeneratorInputInfo(params): 55 | """Calculate the generator specific info that gets fed to input (called by 56 | gyp).""" 57 | generator_flags = params.get('generator_flags', {}) 58 | if generator_flags.get('adjust_static_libraries', False): 59 | global generator_wants_static_library_dependencies_adjusted 60 | generator_wants_static_library_dependencies_adjusted = True 61 | 62 | toplevel = params['options'].toplevel_dir 63 | generator_dir = os.path.relpath(params['options'].generator_output or '.') 64 | # output_dir: relative path from generator_dir to the build directory. 65 | output_dir = generator_flags.get('output_dir', 'out') 66 | qualified_out_dir = os.path.normpath(os.path.join( 67 | toplevel, generator_dir, output_dir, 'gypfiles')) 68 | global generator_filelist_paths 69 | generator_filelist_paths = { 70 | 'toplevel': toplevel, 71 | 'qualified_out_dir': qualified_out_dir, 72 | } 73 | 74 | def GenerateOutput(target_list, target_dicts, data, params): 75 | # Map of target -> list of targets it depends on. 76 | edges = {} 77 | 78 | # Queue of targets to visit. 79 | targets_to_visit = target_list[:] 80 | 81 | while len(targets_to_visit) > 0: 82 | target = targets_to_visit.pop() 83 | if target in edges: 84 | continue 85 | edges[target] = [] 86 | 87 | for dep in target_dicts[target].get('dependencies', []): 88 | edges[target].append(dep) 89 | targets_to_visit.append(dep) 90 | 91 | try: 92 | filepath = params['generator_flags']['output_dir'] 93 | except KeyError: 94 | filepath = '.' 95 | filename = os.path.join(filepath, 'dump.json') 96 | f = open(filename, 'w') 97 | json.dump(edges, f) 98 | f.close() 99 | print 'Wrote json to %s.' % filename 100 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/generator/gypd.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2011 Google Inc. All rights reserved. 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | """gypd output module 6 | 7 | This module produces gyp input as its output. Output files are given the 8 | .gypd extension to avoid overwriting the .gyp files that they are generated 9 | from. Internal references to .gyp files (such as those found in 10 | "dependencies" sections) are not adjusted to point to .gypd files instead; 11 | unlike other paths, which are relative to the .gyp or .gypd file, such paths 12 | are relative to the directory from which gyp was run to create the .gypd file. 13 | 14 | This generator module is intended to be a sample and a debugging aid, hence 15 | the "d" for "debug" in .gypd. It is useful to inspect the results of the 16 | various merges, expansions, and conditional evaluations performed by gyp 17 | and to see a representation of what would be fed to a generator module. 18 | 19 | It's not advisable to rename .gypd files produced by this module to .gyp, 20 | because they will have all merges, expansions, and evaluations already 21 | performed and the relevant constructs not present in the output; paths to 22 | dependencies may be wrong; and various sections that do not belong in .gyp 23 | files such as such as "included_files" and "*_excluded" will be present. 24 | Output will also be stripped of comments. This is not intended to be a 25 | general-purpose gyp pretty-printer; for that, you probably just want to 26 | run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip 27 | comments but won't do all of the other things done to this module's output. 28 | 29 | The specific formatting of the output generated by this module is subject 30 | to change. 31 | """ 32 | 33 | 34 | import gyp.common 35 | import errno 36 | import os 37 | import pprint 38 | 39 | 40 | # These variables should just be spit back out as variable references. 41 | _generator_identity_variables = [ 42 | 'CONFIGURATION_NAME', 43 | 'EXECUTABLE_PREFIX', 44 | 'EXECUTABLE_SUFFIX', 45 | 'INTERMEDIATE_DIR', 46 | 'LIB_DIR', 47 | 'PRODUCT_DIR', 48 | 'RULE_INPUT_ROOT', 49 | 'RULE_INPUT_DIRNAME', 50 | 'RULE_INPUT_EXT', 51 | 'RULE_INPUT_NAME', 52 | 'RULE_INPUT_PATH', 53 | 'SHARED_INTERMEDIATE_DIR', 54 | 'SHARED_LIB_DIR', 55 | 'SHARED_LIB_PREFIX', 56 | 'SHARED_LIB_SUFFIX', 57 | 'STATIC_LIB_PREFIX', 58 | 'STATIC_LIB_SUFFIX', 59 | ] 60 | 61 | # gypd doesn't define a default value for OS like many other generator 62 | # modules. Specify "-D OS=whatever" on the command line to provide a value. 63 | generator_default_variables = { 64 | } 65 | 66 | # gypd supports multiple toolsets 67 | generator_supports_multiple_toolsets = True 68 | 69 | # TODO(mark): This always uses <, which isn't right. The input module should 70 | # notify the generator to tell it which phase it is operating in, and this 71 | # module should use < for the early phase and then switch to > for the late 72 | # phase. Bonus points for carrying @ back into the output too. 73 | for v in _generator_identity_variables: 74 | generator_default_variables[v] = '<(%s)' % v 75 | 76 | 77 | def GenerateOutput(target_list, target_dicts, data, params): 78 | output_files = {} 79 | for qualified_target in target_list: 80 | [input_file, target] = \ 81 | gyp.common.ParseQualifiedTarget(qualified_target)[0:2] 82 | 83 | if input_file[-4:] != '.gyp': 84 | continue 85 | input_file_stem = input_file[:-4] 86 | output_file = input_file_stem + params['options'].suffix + '.gypd' 87 | 88 | if not output_file in output_files: 89 | output_files[output_file] = input_file 90 | 91 | for output_file, input_file in output_files.iteritems(): 92 | output = open(output_file, 'w') 93 | pprint.pprint(data[input_file], output) 94 | output.close() 95 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/generator/gypsh.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2011 Google Inc. All rights reserved. 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | """gypsh output module 6 | 7 | gypsh is a GYP shell. It's not really a generator per se. All it does is 8 | fire up an interactive Python session with a few local variables set to the 9 | variables passed to the generator. Like gypd, it's intended as a debugging 10 | aid, to facilitate the exploration of .gyp structures after being processed 11 | by the input module. 12 | 13 | The expected usage is "gyp -f gypsh -D OS=desired_os". 14 | """ 15 | 16 | 17 | import code 18 | import sys 19 | 20 | 21 | # All of this stuff about generator variables was lovingly ripped from gypd.py. 22 | # That module has a much better description of what's going on and why. 23 | _generator_identity_variables = [ 24 | 'EXECUTABLE_PREFIX', 25 | 'EXECUTABLE_SUFFIX', 26 | 'INTERMEDIATE_DIR', 27 | 'PRODUCT_DIR', 28 | 'RULE_INPUT_ROOT', 29 | 'RULE_INPUT_DIRNAME', 30 | 'RULE_INPUT_EXT', 31 | 'RULE_INPUT_NAME', 32 | 'RULE_INPUT_PATH', 33 | 'SHARED_INTERMEDIATE_DIR', 34 | ] 35 | 36 | generator_default_variables = { 37 | } 38 | 39 | for v in _generator_identity_variables: 40 | generator_default_variables[v] = '<(%s)' % v 41 | 42 | 43 | def GenerateOutput(target_list, target_dicts, data, params): 44 | locals = { 45 | 'target_list': target_list, 46 | 'target_dicts': target_dicts, 47 | 'data': data, 48 | } 49 | 50 | # Use a banner that looks like the stock Python one and like what 51 | # code.interact uses by default, but tack on something to indicate what 52 | # locals are available, and identify gypsh. 53 | banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \ 54 | (sys.version, sys.platform, repr(sorted(locals.keys()))) 55 | 56 | code.interact(banner, local=locals) 57 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/generator/msvs_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright (c) 2012 Google Inc. All rights reserved. 3 | # Use of this source code is governed by a BSD-style license that can be 4 | # found in the LICENSE file. 5 | 6 | """ Unit tests for the msvs.py file. """ 7 | 8 | import gyp.generator.msvs as msvs 9 | import unittest 10 | import StringIO 11 | 12 | 13 | class TestSequenceFunctions(unittest.TestCase): 14 | 15 | def setUp(self): 16 | self.stderr = StringIO.StringIO() 17 | 18 | def test_GetLibraries(self): 19 | self.assertEqual( 20 | msvs._GetLibraries({}), 21 | []) 22 | self.assertEqual( 23 | msvs._GetLibraries({'libraries': []}), 24 | []) 25 | self.assertEqual( 26 | msvs._GetLibraries({'other':'foo', 'libraries': ['a.lib']}), 27 | ['a.lib']) 28 | self.assertEqual( 29 | msvs._GetLibraries({'libraries': ['-la']}), 30 | ['a.lib']) 31 | self.assertEqual( 32 | msvs._GetLibraries({'libraries': ['a.lib', 'b.lib', 'c.lib', '-lb.lib', 33 | '-lb.lib', 'd.lib', 'a.lib']}), 34 | ['c.lib', 'b.lib', 'd.lib', 'a.lib']) 35 | 36 | if __name__ == '__main__': 37 | unittest.main() 38 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/generator/ninja_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright (c) 2012 Google Inc. All rights reserved. 4 | # Use of this source code is governed by a BSD-style license that can be 5 | # found in the LICENSE file. 6 | 7 | """ Unit tests for the ninja.py file. """ 8 | 9 | import gyp.generator.ninja as ninja 10 | import unittest 11 | import StringIO 12 | import sys 13 | import TestCommon 14 | 15 | 16 | class TestPrefixesAndSuffixes(unittest.TestCase): 17 | def test_BinaryNamesWindows(self): 18 | # These cannot run on non-Windows as they require a VS installation to 19 | # correctly handle variable expansion. 20 | if sys.platform.startswith('win'): 21 | writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.', 22 | 'build.ninja', 'win') 23 | spec = { 'target_name': 'wee' } 24 | self.assertTrue(writer.ComputeOutputFileName(spec, 'executable'). 25 | endswith('.exe')) 26 | self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). 27 | endswith('.dll')) 28 | self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). 29 | endswith('.lib')) 30 | 31 | def test_BinaryNamesLinux(self): 32 | writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.', 33 | 'build.ninja', 'linux') 34 | spec = { 'target_name': 'wee' } 35 | self.assertTrue('.' not in writer.ComputeOutputFileName(spec, 36 | 'executable')) 37 | self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). 38 | startswith('lib')) 39 | self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). 40 | startswith('lib')) 41 | self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). 42 | endswith('.so')) 43 | self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). 44 | endswith('.a')) 45 | 46 | if __name__ == '__main__': 47 | unittest.main() 48 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/generator/xcode_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright (c) 2013 Google Inc. All rights reserved. 4 | # Use of this source code is governed by a BSD-style license that can be 5 | # found in the LICENSE file. 6 | 7 | """ Unit tests for the xcode.py file. """ 8 | 9 | import gyp.generator.xcode as xcode 10 | import unittest 11 | import sys 12 | 13 | 14 | class TestEscapeXcodeDefine(unittest.TestCase): 15 | if sys.platform == 'darwin': 16 | def test_InheritedRemainsUnescaped(self): 17 | self.assertEqual(xcode.EscapeXcodeDefine('$(inherited)'), '$(inherited)') 18 | 19 | def test_Escaping(self): 20 | self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\') 21 | 22 | if __name__ == '__main__': 23 | unittest.main() 24 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/input_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright 2013 Google Inc. All rights reserved. 4 | # Use of this source code is governed by a BSD-style license that can be 5 | # found in the LICENSE file. 6 | 7 | """Unit tests for the input.py file.""" 8 | 9 | import gyp.input 10 | import unittest 11 | import sys 12 | 13 | 14 | class TestFindCycles(unittest.TestCase): 15 | def setUp(self): 16 | self.nodes = {} 17 | for x in ('a', 'b', 'c', 'd', 'e'): 18 | self.nodes[x] = gyp.input.DependencyGraphNode(x) 19 | 20 | def _create_dependency(self, dependent, dependency): 21 | dependent.dependencies.append(dependency) 22 | dependency.dependents.append(dependent) 23 | 24 | def test_no_cycle_empty_graph(self): 25 | for label, node in self.nodes.iteritems(): 26 | self.assertEquals([], node.FindCycles()) 27 | 28 | def test_no_cycle_line(self): 29 | self._create_dependency(self.nodes['a'], self.nodes['b']) 30 | self._create_dependency(self.nodes['b'], self.nodes['c']) 31 | self._create_dependency(self.nodes['c'], self.nodes['d']) 32 | 33 | for label, node in self.nodes.iteritems(): 34 | self.assertEquals([], node.FindCycles()) 35 | 36 | def test_no_cycle_dag(self): 37 | self._create_dependency(self.nodes['a'], self.nodes['b']) 38 | self._create_dependency(self.nodes['a'], self.nodes['c']) 39 | self._create_dependency(self.nodes['b'], self.nodes['c']) 40 | 41 | for label, node in self.nodes.iteritems(): 42 | self.assertEquals([], node.FindCycles()) 43 | 44 | def test_cycle_self_reference(self): 45 | self._create_dependency(self.nodes['a'], self.nodes['a']) 46 | 47 | self.assertEquals([[self.nodes['a'], self.nodes['a']]], 48 | self.nodes['a'].FindCycles()) 49 | 50 | def test_cycle_two_nodes(self): 51 | self._create_dependency(self.nodes['a'], self.nodes['b']) 52 | self._create_dependency(self.nodes['b'], self.nodes['a']) 53 | 54 | self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]], 55 | self.nodes['a'].FindCycles()) 56 | self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]], 57 | self.nodes['b'].FindCycles()) 58 | 59 | def test_two_cycles(self): 60 | self._create_dependency(self.nodes['a'], self.nodes['b']) 61 | self._create_dependency(self.nodes['b'], self.nodes['a']) 62 | 63 | self._create_dependency(self.nodes['b'], self.nodes['c']) 64 | self._create_dependency(self.nodes['c'], self.nodes['b']) 65 | 66 | cycles = self.nodes['a'].FindCycles() 67 | self.assertTrue( 68 | [self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles) 69 | self.assertTrue( 70 | [self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles) 71 | self.assertEquals(2, len(cycles)) 72 | 73 | def test_big_cycle(self): 74 | self._create_dependency(self.nodes['a'], self.nodes['b']) 75 | self._create_dependency(self.nodes['b'], self.nodes['c']) 76 | self._create_dependency(self.nodes['c'], self.nodes['d']) 77 | self._create_dependency(self.nodes['d'], self.nodes['e']) 78 | self._create_dependency(self.nodes['e'], self.nodes['a']) 79 | 80 | self.assertEquals([[self.nodes['a'], 81 | self.nodes['b'], 82 | self.nodes['c'], 83 | self.nodes['d'], 84 | self.nodes['e'], 85 | self.nodes['a']]], 86 | self.nodes['a'].FindCycles()) 87 | 88 | 89 | if __name__ == '__main__': 90 | unittest.main() 91 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/ninja_syntax.py: -------------------------------------------------------------------------------- 1 | # This file comes from 2 | # https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py 3 | # Do not edit! Edit the upstream one instead. 4 | 5 | """Python module for generating .ninja files. 6 | 7 | Note that this is emphatically not a required piece of Ninja; it's 8 | just a helpful utility for build-file-generation systems that already 9 | use Python. 10 | """ 11 | 12 | import textwrap 13 | import re 14 | 15 | def escape_path(word): 16 | return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:') 17 | 18 | class Writer(object): 19 | def __init__(self, output, width=78): 20 | self.output = output 21 | self.width = width 22 | 23 | def newline(self): 24 | self.output.write('\n') 25 | 26 | def comment(self, text): 27 | for line in textwrap.wrap(text, self.width - 2): 28 | self.output.write('# ' + line + '\n') 29 | 30 | def variable(self, key, value, indent=0): 31 | if value is None: 32 | return 33 | if isinstance(value, list): 34 | value = ' '.join(filter(None, value)) # Filter out empty strings. 35 | self._line('%s = %s' % (key, value), indent) 36 | 37 | def pool(self, name, depth): 38 | self._line('pool %s' % name) 39 | self.variable('depth', depth, indent=1) 40 | 41 | def rule(self, name, command, description=None, depfile=None, 42 | generator=False, pool=None, restat=False, rspfile=None, 43 | rspfile_content=None, deps=None): 44 | self._line('rule %s' % name) 45 | self.variable('command', command, indent=1) 46 | if description: 47 | self.variable('description', description, indent=1) 48 | if depfile: 49 | self.variable('depfile', depfile, indent=1) 50 | if generator: 51 | self.variable('generator', '1', indent=1) 52 | if pool: 53 | self.variable('pool', pool, indent=1) 54 | if restat: 55 | self.variable('restat', '1', indent=1) 56 | if rspfile: 57 | self.variable('rspfile', rspfile, indent=1) 58 | if rspfile_content: 59 | self.variable('rspfile_content', rspfile_content, indent=1) 60 | if deps: 61 | self.variable('deps', deps, indent=1) 62 | 63 | def build(self, outputs, rule, inputs=None, implicit=None, order_only=None, 64 | variables=None): 65 | outputs = self._as_list(outputs) 66 | all_inputs = self._as_list(inputs)[:] 67 | out_outputs = list(map(escape_path, outputs)) 68 | all_inputs = list(map(escape_path, all_inputs)) 69 | 70 | if implicit: 71 | implicit = map(escape_path, self._as_list(implicit)) 72 | all_inputs.append('|') 73 | all_inputs.extend(implicit) 74 | if order_only: 75 | order_only = map(escape_path, self._as_list(order_only)) 76 | all_inputs.append('||') 77 | all_inputs.extend(order_only) 78 | 79 | self._line('build %s: %s' % (' '.join(out_outputs), 80 | ' '.join([rule] + all_inputs))) 81 | 82 | if variables: 83 | if isinstance(variables, dict): 84 | iterator = iter(variables.items()) 85 | else: 86 | iterator = iter(variables) 87 | 88 | for key, val in iterator: 89 | self.variable(key, val, indent=1) 90 | 91 | return outputs 92 | 93 | def include(self, path): 94 | self._line('include %s' % path) 95 | 96 | def subninja(self, path): 97 | self._line('subninja %s' % path) 98 | 99 | def default(self, paths): 100 | self._line('default %s' % ' '.join(self._as_list(paths))) 101 | 102 | def _count_dollars_before_index(self, s, i): 103 | """Returns the number of '$' characters right in front of s[i].""" 104 | dollar_count = 0 105 | dollar_index = i - 1 106 | while dollar_index > 0 and s[dollar_index] == '$': 107 | dollar_count += 1 108 | dollar_index -= 1 109 | return dollar_count 110 | 111 | def _line(self, text, indent=0): 112 | """Write 'text' word-wrapped at self.width characters.""" 113 | leading_space = ' ' * indent 114 | while len(leading_space) + len(text) > self.width: 115 | # The text is too wide; wrap if possible. 116 | 117 | # Find the rightmost space that would obey our width constraint and 118 | # that's not an escaped space. 119 | available_space = self.width - len(leading_space) - len(' $') 120 | space = available_space 121 | while True: 122 | space = text.rfind(' ', 0, space) 123 | if space < 0 or \ 124 | self._count_dollars_before_index(text, space) % 2 == 0: 125 | break 126 | 127 | if space < 0: 128 | # No such space; just use the first unescaped space we can find. 129 | space = available_space - 1 130 | while True: 131 | space = text.find(' ', space + 1) 132 | if space < 0 or \ 133 | self._count_dollars_before_index(text, space) % 2 == 0: 134 | break 135 | if space < 0: 136 | # Give up on breaking. 137 | break 138 | 139 | self.output.write(leading_space + text[0:space] + ' $\n') 140 | text = text[space+1:] 141 | 142 | # Subsequent lines are continuations, so indent them. 143 | leading_space = ' ' * (indent+2) 144 | 145 | self.output.write(leading_space + text + '\n') 146 | 147 | def _as_list(self, input): 148 | if input is None: 149 | return [] 150 | if isinstance(input, list): 151 | return input 152 | return [input] 153 | 154 | 155 | def escape(string): 156 | """Escape a string such that it can be embedded into a Ninja file without 157 | further interpretation.""" 158 | assert '\n' not in string, 'Ninja syntax does not allow newlines' 159 | # We only have one special metacharacter: '$'. 160 | return string.replace('$', '$$') 161 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/ordered_dict.py: -------------------------------------------------------------------------------- 1 | # Unmodified from http://code.activestate.com/recipes/576693/ 2 | # other than to add MIT license header (as specified on page, but not in code). 3 | # Linked from Python documentation here: 4 | # http://docs.python.org/2/library/collections.html#collections.OrderedDict 5 | # 6 | # This should be deleted once Py2.7 is available on all bots, see 7 | # http://crbug.com/241769. 8 | # 9 | # Copyright (c) 2009 Raymond Hettinger. 10 | # 11 | # Permission is hereby granted, free of charge, to any person obtaining a copy 12 | # of this software and associated documentation files (the "Software"), to deal 13 | # in the Software without restriction, including without limitation the rights 14 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 15 | # copies of the Software, and to permit persons to whom the Software is 16 | # furnished to do so, subject to the following conditions: 17 | # 18 | # The above copyright notice and this permission notice shall be included in 19 | # all copies or substantial portions of the Software. 20 | # 21 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 22 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 23 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 24 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 25 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 26 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 27 | # THE SOFTWARE. 28 | 29 | # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. 30 | # Passes Python2.7's test suite and incorporates all the latest updates. 31 | 32 | try: 33 | from thread import get_ident as _get_ident 34 | except ImportError: 35 | from dummy_thread import get_ident as _get_ident 36 | 37 | try: 38 | from _abcoll import KeysView, ValuesView, ItemsView 39 | except ImportError: 40 | pass 41 | 42 | 43 | class OrderedDict(dict): 44 | 'Dictionary that remembers insertion order' 45 | # An inherited dict maps keys to values. 46 | # The inherited dict provides __getitem__, __len__, __contains__, and get. 47 | # The remaining methods are order-aware. 48 | # Big-O running times for all methods are the same as for regular dictionaries. 49 | 50 | # The internal self.__map dictionary maps keys to links in a doubly linked list. 51 | # The circular doubly linked list starts and ends with a sentinel element. 52 | # The sentinel element never gets deleted (this simplifies the algorithm). 53 | # Each link is stored as a list of length three: [PREV, NEXT, KEY]. 54 | 55 | def __init__(self, *args, **kwds): 56 | '''Initialize an ordered dictionary. Signature is the same as for 57 | regular dictionaries, but keyword arguments are not recommended 58 | because their insertion order is arbitrary. 59 | 60 | ''' 61 | if len(args) > 1: 62 | raise TypeError('expected at most 1 arguments, got %d' % len(args)) 63 | try: 64 | self.__root 65 | except AttributeError: 66 | self.__root = root = [] # sentinel node 67 | root[:] = [root, root, None] 68 | self.__map = {} 69 | self.__update(*args, **kwds) 70 | 71 | def __setitem__(self, key, value, dict_setitem=dict.__setitem__): 72 | 'od.__setitem__(i, y) <==> od[i]=y' 73 | # Setting a new item creates a new link which goes at the end of the linked 74 | # list, and the inherited dictionary is updated with the new key/value pair. 75 | if key not in self: 76 | root = self.__root 77 | last = root[0] 78 | last[1] = root[0] = self.__map[key] = [last, root, key] 79 | dict_setitem(self, key, value) 80 | 81 | def __delitem__(self, key, dict_delitem=dict.__delitem__): 82 | 'od.__delitem__(y) <==> del od[y]' 83 | # Deleting an existing item uses self.__map to find the link which is 84 | # then removed by updating the links in the predecessor and successor nodes. 85 | dict_delitem(self, key) 86 | link_prev, link_next, key = self.__map.pop(key) 87 | link_prev[1] = link_next 88 | link_next[0] = link_prev 89 | 90 | def __iter__(self): 91 | 'od.__iter__() <==> iter(od)' 92 | root = self.__root 93 | curr = root[1] 94 | while curr is not root: 95 | yield curr[2] 96 | curr = curr[1] 97 | 98 | def __reversed__(self): 99 | 'od.__reversed__() <==> reversed(od)' 100 | root = self.__root 101 | curr = root[0] 102 | while curr is not root: 103 | yield curr[2] 104 | curr = curr[0] 105 | 106 | def clear(self): 107 | 'od.clear() -> None. Remove all items from od.' 108 | try: 109 | for node in self.__map.itervalues(): 110 | del node[:] 111 | root = self.__root 112 | root[:] = [root, root, None] 113 | self.__map.clear() 114 | except AttributeError: 115 | pass 116 | dict.clear(self) 117 | 118 | def popitem(self, last=True): 119 | '''od.popitem() -> (k, v), return and remove a (key, value) pair. 120 | Pairs are returned in LIFO order if last is true or FIFO order if false. 121 | 122 | ''' 123 | if not self: 124 | raise KeyError('dictionary is empty') 125 | root = self.__root 126 | if last: 127 | link = root[0] 128 | link_prev = link[0] 129 | link_prev[1] = root 130 | root[0] = link_prev 131 | else: 132 | link = root[1] 133 | link_next = link[1] 134 | root[1] = link_next 135 | link_next[0] = root 136 | key = link[2] 137 | del self.__map[key] 138 | value = dict.pop(self, key) 139 | return key, value 140 | 141 | # -- the following methods do not depend on the internal structure -- 142 | 143 | def keys(self): 144 | 'od.keys() -> list of keys in od' 145 | return list(self) 146 | 147 | def values(self): 148 | 'od.values() -> list of values in od' 149 | return [self[key] for key in self] 150 | 151 | def items(self): 152 | 'od.items() -> list of (key, value) pairs in od' 153 | return [(key, self[key]) for key in self] 154 | 155 | def iterkeys(self): 156 | 'od.iterkeys() -> an iterator over the keys in od' 157 | return iter(self) 158 | 159 | def itervalues(self): 160 | 'od.itervalues -> an iterator over the values in od' 161 | for k in self: 162 | yield self[k] 163 | 164 | def iteritems(self): 165 | 'od.iteritems -> an iterator over the (key, value) items in od' 166 | for k in self: 167 | yield (k, self[k]) 168 | 169 | # Suppress 'OrderedDict.update: Method has no argument': 170 | # pylint: disable=E0211 171 | def update(*args, **kwds): 172 | '''od.update(E, **F) -> None. Update od from dict/iterable E and F. 173 | 174 | If E is a dict instance, does: for k in E: od[k] = E[k] 175 | If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] 176 | Or if E is an iterable of items, does: for k, v in E: od[k] = v 177 | In either case, this is followed by: for k, v in F.items(): od[k] = v 178 | 179 | ''' 180 | if len(args) > 2: 181 | raise TypeError('update() takes at most 2 positional ' 182 | 'arguments (%d given)' % (len(args),)) 183 | elif not args: 184 | raise TypeError('update() takes at least 1 argument (0 given)') 185 | self = args[0] 186 | # Make progressively weaker assumptions about "other" 187 | other = () 188 | if len(args) == 2: 189 | other = args[1] 190 | if isinstance(other, dict): 191 | for key in other: 192 | self[key] = other[key] 193 | elif hasattr(other, 'keys'): 194 | for key in other.keys(): 195 | self[key] = other[key] 196 | else: 197 | for key, value in other: 198 | self[key] = value 199 | for key, value in kwds.items(): 200 | self[key] = value 201 | 202 | __update = update # let subclasses override update without breaking __init__ 203 | 204 | __marker = object() 205 | 206 | def pop(self, key, default=__marker): 207 | '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. 208 | If key is not found, d is returned if given, otherwise KeyError is raised. 209 | 210 | ''' 211 | if key in self: 212 | result = self[key] 213 | del self[key] 214 | return result 215 | if default is self.__marker: 216 | raise KeyError(key) 217 | return default 218 | 219 | def setdefault(self, key, default=None): 220 | 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' 221 | if key in self: 222 | return self[key] 223 | self[key] = default 224 | return default 225 | 226 | def __repr__(self, _repr_running={}): 227 | 'od.__repr__() <==> repr(od)' 228 | call_key = id(self), _get_ident() 229 | if call_key in _repr_running: 230 | return '...' 231 | _repr_running[call_key] = 1 232 | try: 233 | if not self: 234 | return '%s()' % (self.__class__.__name__,) 235 | return '%s(%r)' % (self.__class__.__name__, self.items()) 236 | finally: 237 | del _repr_running[call_key] 238 | 239 | def __reduce__(self): 240 | 'Return state information for pickling' 241 | items = [[k, self[k]] for k in self] 242 | inst_dict = vars(self).copy() 243 | for k in vars(OrderedDict()): 244 | inst_dict.pop(k, None) 245 | if inst_dict: 246 | return (self.__class__, (items,), inst_dict) 247 | return self.__class__, (items,) 248 | 249 | def copy(self): 250 | 'od.copy() -> a shallow copy of od' 251 | return self.__class__(self) 252 | 253 | @classmethod 254 | def fromkeys(cls, iterable, value=None): 255 | '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S 256 | and values equal to v (which defaults to None). 257 | 258 | ''' 259 | d = cls() 260 | for key in iterable: 261 | d[key] = value 262 | return d 263 | 264 | def __eq__(self, other): 265 | '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive 266 | while comparison to a regular mapping is order-insensitive. 267 | 268 | ''' 269 | if isinstance(other, OrderedDict): 270 | return len(self)==len(other) and self.items() == other.items() 271 | return dict.__eq__(self, other) 272 | 273 | def __ne__(self, other): 274 | return not self == other 275 | 276 | # -- the following methods are only used in Python 2.7 -- 277 | 278 | def viewkeys(self): 279 | "od.viewkeys() -> a set-like object providing a view on od's keys" 280 | return KeysView(self) 281 | 282 | def viewvalues(self): 283 | "od.viewvalues() -> an object providing a view on od's values" 284 | return ValuesView(self) 285 | 286 | def viewitems(self): 287 | "od.viewitems() -> a set-like object providing a view on od's items" 288 | return ItemsView(self) 289 | 290 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/simple_copy.py: -------------------------------------------------------------------------------- 1 | # Copyright 2014 Google Inc. All rights reserved. 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | """A clone of the default copy.deepcopy that doesn't handle cyclic 6 | structures or complex types except for dicts and lists. This is 7 | because gyp copies so large structure that small copy overhead ends up 8 | taking seconds in a project the size of Chromium.""" 9 | 10 | class Error(Exception): 11 | pass 12 | 13 | __all__ = ["Error", "deepcopy"] 14 | 15 | def deepcopy(x): 16 | """Deep copy operation on gyp objects such as strings, ints, dicts 17 | and lists. More than twice as fast as copy.deepcopy but much less 18 | generic.""" 19 | 20 | try: 21 | return _deepcopy_dispatch[type(x)](x) 22 | except KeyError: 23 | raise Error('Unsupported type %s for deepcopy. Use copy.deepcopy ' + 24 | 'or expand simple_copy support.' % type(x)) 25 | 26 | _deepcopy_dispatch = d = {} 27 | 28 | def _deepcopy_atomic(x): 29 | return x 30 | 31 | for x in (type(None), int, long, float, 32 | bool, str, unicode, type): 33 | d[x] = _deepcopy_atomic 34 | 35 | def _deepcopy_list(x): 36 | return [deepcopy(a) for a in x] 37 | d[list] = _deepcopy_list 38 | 39 | def _deepcopy_dict(x): 40 | y = {} 41 | for key, value in x.iteritems(): 42 | y[deepcopy(key)] = deepcopy(value) 43 | return y 44 | d[dict] = _deepcopy_dict 45 | 46 | del d 47 | -------------------------------------------------------------------------------- /gyp/pylib/gyp/xml_fix.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2011 Google Inc. All rights reserved. 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | """Applies a fix to CR LF TAB handling in xml.dom. 6 | 7 | Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293 8 | Working around this: http://bugs.python.org/issue5752 9 | TODO(bradnelson): Consider dropping this when we drop XP support. 10 | """ 11 | 12 | 13 | import xml.dom.minidom 14 | 15 | 16 | def _Replacement_write_data(writer, data, is_attrib=False): 17 | """Writes datachars to writer.""" 18 | data = data.replace("&", "&").replace("<", "<") 19 | data = data.replace("\"", """).replace(">", ">") 20 | if is_attrib: 21 | data = data.replace( 22 | "\r", " ").replace( 23 | "\n", " ").replace( 24 | "\t", " ") 25 | writer.write(data) 26 | 27 | 28 | def _Replacement_writexml(self, writer, indent="", addindent="", newl=""): 29 | # indent = current indentation 30 | # addindent = indentation to add to higher levels 31 | # newl = newline string 32 | writer.write(indent+"<" + self.tagName) 33 | 34 | attrs = self._get_attributes() 35 | a_names = attrs.keys() 36 | a_names.sort() 37 | 38 | for a_name in a_names: 39 | writer.write(" %s=\"" % a_name) 40 | _Replacement_write_data(writer, attrs[a_name].value, is_attrib=True) 41 | writer.write("\"") 42 | if self.childNodes: 43 | writer.write(">%s" % newl) 44 | for node in self.childNodes: 45 | node.writexml(writer, indent + addindent, addindent, newl) 46 | writer.write("%s%s" % (indent, self.tagName, newl)) 47 | else: 48 | writer.write("/>%s" % newl) 49 | 50 | 51 | class XmlFix(object): 52 | """Object to manage temporary patching of xml.dom.minidom.""" 53 | 54 | def __init__(self): 55 | # Preserve current xml.dom.minidom functions. 56 | self.write_data = xml.dom.minidom._write_data 57 | self.writexml = xml.dom.minidom.Element.writexml 58 | # Inject replacement versions of a function and a method. 59 | xml.dom.minidom._write_data = _Replacement_write_data 60 | xml.dom.minidom.Element.writexml = _Replacement_writexml 61 | 62 | def Cleanup(self): 63 | if self.write_data: 64 | xml.dom.minidom._write_data = self.write_data 65 | xml.dom.minidom.Element.writexml = self.writexml 66 | self.write_data = None 67 | 68 | def __del__(self): 69 | self.Cleanup() 70 | -------------------------------------------------------------------------------- /gyp/samples/samples: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright (c) 2009 Google Inc. All rights reserved. 4 | # Use of this source code is governed by a BSD-style license that can be 5 | # found in the LICENSE file. 6 | 7 | import os.path 8 | import shutil 9 | import sys 10 | 11 | 12 | gyps = [ 13 | 'app/app.gyp', 14 | 'base/base.gyp', 15 | 'build/temp_gyp/googleurl.gyp', 16 | 'build/all.gyp', 17 | 'build/common.gypi', 18 | 'build/external_code.gypi', 19 | 'chrome/test/security_tests/security_tests.gyp', 20 | 'chrome/third_party/hunspell/hunspell.gyp', 21 | 'chrome/chrome.gyp', 22 | 'media/media.gyp', 23 | 'net/net.gyp', 24 | 'printing/printing.gyp', 25 | 'sdch/sdch.gyp', 26 | 'skia/skia.gyp', 27 | 'testing/gmock.gyp', 28 | 'testing/gtest.gyp', 29 | 'third_party/bzip2/bzip2.gyp', 30 | 'third_party/icu38/icu38.gyp', 31 | 'third_party/libevent/libevent.gyp', 32 | 'third_party/libjpeg/libjpeg.gyp', 33 | 'third_party/libpng/libpng.gyp', 34 | 'third_party/libxml/libxml.gyp', 35 | 'third_party/libxslt/libxslt.gyp', 36 | 'third_party/lzma_sdk/lzma_sdk.gyp', 37 | 'third_party/modp_b64/modp_b64.gyp', 38 | 'third_party/npapi/npapi.gyp', 39 | 'third_party/sqlite/sqlite.gyp', 40 | 'third_party/zlib/zlib.gyp', 41 | 'v8/tools/gyp/v8.gyp', 42 | 'webkit/activex_shim/activex_shim.gyp', 43 | 'webkit/activex_shim_dll/activex_shim_dll.gyp', 44 | 'webkit/build/action_csspropertynames.py', 45 | 'webkit/build/action_cssvaluekeywords.py', 46 | 'webkit/build/action_jsconfig.py', 47 | 'webkit/build/action_makenames.py', 48 | 'webkit/build/action_maketokenizer.py', 49 | 'webkit/build/action_useragentstylesheets.py', 50 | 'webkit/build/rule_binding.py', 51 | 'webkit/build/rule_bison.py', 52 | 'webkit/build/rule_gperf.py', 53 | 'webkit/tools/test_shell/test_shell.gyp', 54 | 'webkit/webkit.gyp', 55 | ] 56 | 57 | 58 | def Main(argv): 59 | if len(argv) != 3 or argv[1] not in ['push', 'pull']: 60 | print 'Usage: %s push/pull PATH_TO_CHROME' % argv[0] 61 | return 1 62 | 63 | path_to_chrome = argv[2] 64 | 65 | for g in gyps: 66 | chrome_file = os.path.join(path_to_chrome, g) 67 | local_file = os.path.join(os.path.dirname(argv[0]), os.path.split(g)[1]) 68 | if argv[1] == 'push': 69 | print 'Copying %s to %s' % (local_file, chrome_file) 70 | shutil.copyfile(local_file, chrome_file) 71 | elif argv[1] == 'pull': 72 | print 'Copying %s to %s' % (chrome_file, local_file) 73 | shutil.copyfile(chrome_file, local_file) 74 | else: 75 | assert False 76 | 77 | return 0 78 | 79 | 80 | if __name__ == '__main__': 81 | sys.exit(Main(sys.argv)) 82 | -------------------------------------------------------------------------------- /gyp/samples/samples.bat: -------------------------------------------------------------------------------- 1 | @rem Copyright (c) 2009 Google Inc. All rights reserved. 2 | @rem Use of this source code is governed by a BSD-style license that can be 3 | @rem found in the LICENSE file. 4 | 5 | @python %~dp0/samples %* 6 | -------------------------------------------------------------------------------- /gyp/setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright (c) 2009 Google Inc. All rights reserved. 4 | # Use of this source code is governed by a BSD-style license that can be 5 | # found in the LICENSE file. 6 | 7 | from setuptools import setup 8 | 9 | setup( 10 | name='gyp', 11 | version='0.1', 12 | description='Generate Your Projects', 13 | author='Chromium Authors', 14 | author_email='chromium-dev@googlegroups.com', 15 | url='http://code.google.com/p/gyp', 16 | package_dir = {'': 'pylib'}, 17 | packages=['gyp', 'gyp.generator'], 18 | entry_points = {'console_scripts': ['gyp=gyp:script_main'] } 19 | ) 20 | -------------------------------------------------------------------------------- /gyp/tools/README: -------------------------------------------------------------------------------- 1 | pretty_vcproj: 2 | Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2] 3 | 4 | They key/value pair are used to resolve vsprops name. 5 | 6 | For example, if I want to diff the base.vcproj project: 7 | 8 | pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > orignal.txt 9 | pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt 10 | 11 | And you can use your favorite diff tool to see the changes. 12 | 13 | Note: In the case of base.vcproj, the original vcproj is one level up the generated one. 14 | I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt 15 | before you perform the diff. -------------------------------------------------------------------------------- /gyp/tools/Xcode/README: -------------------------------------------------------------------------------- 1 | Specifications contains syntax formatters for Xcode 3. These do not appear to be supported yet on Xcode 4. To use these with Xcode 3 please install both the gyp.pbfilespec and gyp.xclangspec files in 2 | 3 | ~/Library/Application Support/Developer/Shared/Xcode/Specifications/ 4 | 5 | and restart Xcode. -------------------------------------------------------------------------------- /gyp/tools/Xcode/Specifications/gyp.pbfilespec: -------------------------------------------------------------------------------- 1 | /* 2 | gyp.pbfilespec 3 | GYP source file spec for Xcode 3 4 | 5 | There is not much documentation available regarding the format 6 | of .pbfilespec files. As a starting point, see for instance the 7 | outdated documentation at: 8 | http://maxao.free.fr/xcode-plugin-interface/specifications.html 9 | and the files in: 10 | /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/ 11 | 12 | Place this file in directory: 13 | ~/Library/Application Support/Developer/Shared/Xcode/Specifications/ 14 | */ 15 | 16 | ( 17 | { 18 | Identifier = sourcecode.gyp; 19 | BasedOn = sourcecode; 20 | Name = "GYP Files"; 21 | Extensions = ("gyp", "gypi"); 22 | MIMETypes = ("text/gyp"); 23 | Language = "xcode.lang.gyp"; 24 | IsTextFile = YES; 25 | IsSourceFile = YES; 26 | } 27 | ) 28 | -------------------------------------------------------------------------------- /gyp/tools/Xcode/Specifications/gyp.xclangspec: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) 2011 Google Inc. All rights reserved. 3 | Use of this source code is governed by a BSD-style license that can be 4 | found in the LICENSE file. 5 | 6 | gyp.xclangspec 7 | GYP language specification for Xcode 3 8 | 9 | There is not much documentation available regarding the format 10 | of .xclangspec files. As a starting point, see for instance the 11 | outdated documentation at: 12 | http://maxao.free.fr/xcode-plugin-interface/specifications.html 13 | and the files in: 14 | /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/ 15 | 16 | Place this file in directory: 17 | ~/Library/Application Support/Developer/Shared/Xcode/Specifications/ 18 | */ 19 | 20 | ( 21 | 22 | { 23 | Identifier = "xcode.lang.gyp.keyword"; 24 | Syntax = { 25 | Words = ( 26 | "and", 27 | "or", 28 | " "%s"' % (src, dst) 82 | 83 | print '}' 84 | 85 | 86 | def main(): 87 | if len(sys.argv) < 2: 88 | print >>sys.stderr, __doc__ 89 | print >>sys.stderr 90 | print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0]) 91 | return 1 92 | 93 | edges = LoadEdges('dump.json', sys.argv[1:]) 94 | 95 | WriteGraph(edges) 96 | return 0 97 | 98 | 99 | if __name__ == '__main__': 100 | sys.exit(main()) 101 | -------------------------------------------------------------------------------- /gyp/tools/pretty_gyp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright (c) 2012 Google Inc. All rights reserved. 4 | # Use of this source code is governed by a BSD-style license that can be 5 | # found in the LICENSE file. 6 | 7 | """Pretty-prints the contents of a GYP file.""" 8 | 9 | import sys 10 | import re 11 | 12 | 13 | # Regex to remove comments when we're counting braces. 14 | COMMENT_RE = re.compile(r'\s*#.*') 15 | 16 | # Regex to remove quoted strings when we're counting braces. 17 | # It takes into account quoted quotes, and makes sure that the quotes match. 18 | # NOTE: It does not handle quotes that span more than one line, or 19 | # cases where an escaped quote is preceeded by an escaped backslash. 20 | QUOTE_RE_STR = r'(?P[\'"])(.*?)(? 0: 104 | after = True 105 | 106 | # This catches the special case of a closing brace having something 107 | # other than just whitespace ahead of it -- we don't want to 108 | # unindent that until after this line is printed so it stays with 109 | # the previous indentation level. 110 | if cnt < 0 and closing_prefix_re.match(stripline): 111 | after = True 112 | return (cnt, after) 113 | 114 | 115 | def prettyprint_input(lines): 116 | """Does the main work of indenting the input based on the brace counts.""" 117 | indent = 0 118 | basic_offset = 2 119 | last_line = "" 120 | for line in lines: 121 | if COMMENT_RE.match(line): 122 | print line 123 | else: 124 | line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix. 125 | if len(line) > 0: 126 | (brace_diff, after) = count_braces(line) 127 | if brace_diff != 0: 128 | if after: 129 | print " " * (basic_offset * indent) + line 130 | indent += brace_diff 131 | else: 132 | indent += brace_diff 133 | print " " * (basic_offset * indent) + line 134 | else: 135 | print " " * (basic_offset * indent) + line 136 | else: 137 | print "" 138 | last_line = line 139 | 140 | 141 | def main(): 142 | if len(sys.argv) > 1: 143 | data = open(sys.argv[1]).read().splitlines() 144 | else: 145 | data = sys.stdin.read().splitlines() 146 | # Split up the double braces. 147 | lines = split_double_braces(data) 148 | 149 | # Indent and print the output. 150 | prettyprint_input(lines) 151 | return 0 152 | 153 | 154 | if __name__ == '__main__': 155 | sys.exit(main()) 156 | -------------------------------------------------------------------------------- /gyp/tools/pretty_sln.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright (c) 2012 Google Inc. All rights reserved. 4 | # Use of this source code is governed by a BSD-style license that can be 5 | # found in the LICENSE file. 6 | 7 | """Prints the information in a sln file in a diffable way. 8 | 9 | It first outputs each projects in alphabetical order with their 10 | dependencies. 11 | 12 | Then it outputs a possible build order. 13 | """ 14 | 15 | __author__ = 'nsylvain (Nicolas Sylvain)' 16 | 17 | import os 18 | import re 19 | import sys 20 | import pretty_vcproj 21 | 22 | def BuildProject(project, built, projects, deps): 23 | # if all dependencies are done, we can build it, otherwise we try to build the 24 | # dependency. 25 | # This is not infinite-recursion proof. 26 | for dep in deps[project]: 27 | if dep not in built: 28 | BuildProject(dep, built, projects, deps) 29 | print project 30 | built.append(project) 31 | 32 | def ParseSolution(solution_file): 33 | # All projects, their clsid and paths. 34 | projects = dict() 35 | 36 | # A list of dependencies associated with a project. 37 | dependencies = dict() 38 | 39 | # Regular expressions that matches the SLN format. 40 | # The first line of a project definition. 41 | begin_project = re.compile(r'^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942' 42 | r'}"\) = "(.*)", "(.*)", "(.*)"$') 43 | # The last line of a project definition. 44 | end_project = re.compile('^EndProject$') 45 | # The first line of a dependency list. 46 | begin_dep = re.compile( 47 | r'ProjectSection\(ProjectDependencies\) = postProject$') 48 | # The last line of a dependency list. 49 | end_dep = re.compile('EndProjectSection$') 50 | # A line describing a dependency. 51 | dep_line = re.compile(' *({.*}) = ({.*})$') 52 | 53 | in_deps = False 54 | solution = open(solution_file) 55 | for line in solution: 56 | results = begin_project.search(line) 57 | if results: 58 | # Hack to remove icu because the diff is too different. 59 | if results.group(1).find('icu') != -1: 60 | continue 61 | # We remove "_gyp" from the names because it helps to diff them. 62 | current_project = results.group(1).replace('_gyp', '') 63 | projects[current_project] = [results.group(2).replace('_gyp', ''), 64 | results.group(3), 65 | results.group(2)] 66 | dependencies[current_project] = [] 67 | continue 68 | 69 | results = end_project.search(line) 70 | if results: 71 | current_project = None 72 | continue 73 | 74 | results = begin_dep.search(line) 75 | if results: 76 | in_deps = True 77 | continue 78 | 79 | results = end_dep.search(line) 80 | if results: 81 | in_deps = False 82 | continue 83 | 84 | results = dep_line.search(line) 85 | if results and in_deps and current_project: 86 | dependencies[current_project].append(results.group(1)) 87 | continue 88 | 89 | # Change all dependencies clsid to name instead. 90 | for project in dependencies: 91 | # For each dependencies in this project 92 | new_dep_array = [] 93 | for dep in dependencies[project]: 94 | # Look for the project name matching this cldis 95 | for project_info in projects: 96 | if projects[project_info][1] == dep: 97 | new_dep_array.append(project_info) 98 | dependencies[project] = sorted(new_dep_array) 99 | 100 | return (projects, dependencies) 101 | 102 | def PrintDependencies(projects, deps): 103 | print "---------------------------------------" 104 | print "Dependencies for all projects" 105 | print "---------------------------------------" 106 | print "-- --" 107 | 108 | for (project, dep_list) in sorted(deps.items()): 109 | print "Project : %s" % project 110 | print "Path : %s" % projects[project][0] 111 | if dep_list: 112 | for dep in dep_list: 113 | print " - %s" % dep 114 | print "" 115 | 116 | print "-- --" 117 | 118 | def PrintBuildOrder(projects, deps): 119 | print "---------------------------------------" 120 | print "Build order " 121 | print "---------------------------------------" 122 | print "-- --" 123 | 124 | built = [] 125 | for (project, _) in sorted(deps.items()): 126 | if project not in built: 127 | BuildProject(project, built, projects, deps) 128 | 129 | print "-- --" 130 | 131 | def PrintVCProj(projects): 132 | 133 | for project in projects: 134 | print "-------------------------------------" 135 | print "-------------------------------------" 136 | print project 137 | print project 138 | print project 139 | print "-------------------------------------" 140 | print "-------------------------------------" 141 | 142 | project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]), 143 | projects[project][2])) 144 | 145 | pretty = pretty_vcproj 146 | argv = [ '', 147 | project_path, 148 | '$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]), 149 | ] 150 | argv.extend(sys.argv[3:]) 151 | pretty.main(argv) 152 | 153 | def main(): 154 | # check if we have exactly 1 parameter. 155 | if len(sys.argv) < 2: 156 | print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0] 157 | return 1 158 | 159 | (projects, deps) = ParseSolution(sys.argv[1]) 160 | PrintDependencies(projects, deps) 161 | PrintBuildOrder(projects, deps) 162 | 163 | if '--recursive' in sys.argv: 164 | PrintVCProj(projects) 165 | return 0 166 | 167 | 168 | if __name__ == '__main__': 169 | sys.exit(main()) 170 | -------------------------------------------------------------------------------- /gyp/tools/pretty_vcproj.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright (c) 2012 Google Inc. All rights reserved. 4 | # Use of this source code is governed by a BSD-style license that can be 5 | # found in the LICENSE file. 6 | 7 | """Make the format of a vcproj really pretty. 8 | 9 | This script normalize and sort an xml. It also fetches all the properties 10 | inside linked vsprops and include them explicitly in the vcproj. 11 | 12 | It outputs the resulting xml to stdout. 13 | """ 14 | 15 | __author__ = 'nsylvain (Nicolas Sylvain)' 16 | 17 | import os 18 | import sys 19 | 20 | from xml.dom.minidom import parse 21 | from xml.dom.minidom import Node 22 | 23 | REPLACEMENTS = dict() 24 | ARGUMENTS = None 25 | 26 | 27 | class CmpTuple(object): 28 | """Compare function between 2 tuple.""" 29 | def __call__(self, x, y): 30 | return cmp(x[0], y[0]) 31 | 32 | 33 | class CmpNode(object): 34 | """Compare function between 2 xml nodes.""" 35 | 36 | def __call__(self, x, y): 37 | def get_string(node): 38 | node_string = "node" 39 | node_string += node.nodeName 40 | if node.nodeValue: 41 | node_string += node.nodeValue 42 | 43 | if node.attributes: 44 | # We first sort by name, if present. 45 | node_string += node.getAttribute("Name") 46 | 47 | all_nodes = [] 48 | for (name, value) in node.attributes.items(): 49 | all_nodes.append((name, value)) 50 | 51 | all_nodes.sort(CmpTuple()) 52 | for (name, value) in all_nodes: 53 | node_string += name 54 | node_string += value 55 | 56 | return node_string 57 | 58 | return cmp(get_string(x), get_string(y)) 59 | 60 | 61 | def PrettyPrintNode(node, indent=0): 62 | if node.nodeType == Node.TEXT_NODE: 63 | if node.data.strip(): 64 | print '%s%s' % (' '*indent, node.data.strip()) 65 | return 66 | 67 | if node.childNodes: 68 | node.normalize() 69 | # Get the number of attributes 70 | attr_count = 0 71 | if node.attributes: 72 | attr_count = node.attributes.length 73 | 74 | # Print the main tag 75 | if attr_count == 0: 76 | print '%s<%s>' % (' '*indent, node.nodeName) 77 | else: 78 | print '%s<%s' % (' '*indent, node.nodeName) 79 | 80 | all_attributes = [] 81 | for (name, value) in node.attributes.items(): 82 | all_attributes.append((name, value)) 83 | all_attributes.sort(CmpTuple()) 84 | for (name, value) in all_attributes: 85 | print '%s %s="%s"' % (' '*indent, name, value) 86 | print '%s>' % (' '*indent) 87 | if node.nodeValue: 88 | print '%s %s' % (' '*indent, node.nodeValue) 89 | 90 | for sub_node in node.childNodes: 91 | PrettyPrintNode(sub_node, indent=indent+2) 92 | print '%s' % (' '*indent, node.nodeName) 93 | 94 | 95 | def FlattenFilter(node): 96 | """Returns a list of all the node and sub nodes.""" 97 | node_list = [] 98 | 99 | if (node.attributes and 100 | node.getAttribute('Name') == '_excluded_files'): 101 | # We don't add the "_excluded_files" filter. 102 | return [] 103 | 104 | for current in node.childNodes: 105 | if current.nodeName == 'Filter': 106 | node_list.extend(FlattenFilter(current)) 107 | else: 108 | node_list.append(current) 109 | 110 | return node_list 111 | 112 | 113 | def FixFilenames(filenames, current_directory): 114 | new_list = [] 115 | for filename in filenames: 116 | if filename: 117 | for key in REPLACEMENTS: 118 | filename = filename.replace(key, REPLACEMENTS[key]) 119 | os.chdir(current_directory) 120 | filename = filename.strip('"\' ') 121 | if filename.startswith('$'): 122 | new_list.append(filename) 123 | else: 124 | new_list.append(os.path.abspath(filename)) 125 | return new_list 126 | 127 | 128 | def AbsoluteNode(node): 129 | """Makes all the properties we know about in this node absolute.""" 130 | if node.attributes: 131 | for (name, value) in node.attributes.items(): 132 | if name in ['InheritedPropertySheets', 'RelativePath', 133 | 'AdditionalIncludeDirectories', 134 | 'IntermediateDirectory', 'OutputDirectory', 135 | 'AdditionalLibraryDirectories']: 136 | # We want to fix up these paths 137 | path_list = value.split(';') 138 | new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1])) 139 | node.setAttribute(name, ';'.join(new_list)) 140 | if not value: 141 | node.removeAttribute(name) 142 | 143 | 144 | def CleanupVcproj(node): 145 | """For each sub node, we call recursively this function.""" 146 | for sub_node in node.childNodes: 147 | AbsoluteNode(sub_node) 148 | CleanupVcproj(sub_node) 149 | 150 | # Normalize the node, and remove all extranous whitespaces. 151 | for sub_node in node.childNodes: 152 | if sub_node.nodeType == Node.TEXT_NODE: 153 | sub_node.data = sub_node.data.replace("\r", "") 154 | sub_node.data = sub_node.data.replace("\n", "") 155 | sub_node.data = sub_node.data.rstrip() 156 | 157 | # Fix all the semicolon separated attributes to be sorted, and we also 158 | # remove the dups. 159 | if node.attributes: 160 | for (name, value) in node.attributes.items(): 161 | sorted_list = sorted(value.split(';')) 162 | unique_list = [] 163 | for i in sorted_list: 164 | if not unique_list.count(i): 165 | unique_list.append(i) 166 | node.setAttribute(name, ';'.join(unique_list)) 167 | if not value: 168 | node.removeAttribute(name) 169 | 170 | if node.childNodes: 171 | node.normalize() 172 | 173 | # For each node, take a copy, and remove it from the list. 174 | node_array = [] 175 | while node.childNodes and node.childNodes[0]: 176 | # Take a copy of the node and remove it from the list. 177 | current = node.childNodes[0] 178 | node.removeChild(current) 179 | 180 | # If the child is a filter, we want to append all its children 181 | # to this same list. 182 | if current.nodeName == 'Filter': 183 | node_array.extend(FlattenFilter(current)) 184 | else: 185 | node_array.append(current) 186 | 187 | 188 | # Sort the list. 189 | node_array.sort(CmpNode()) 190 | 191 | # Insert the nodes in the correct order. 192 | for new_node in node_array: 193 | # But don't append empty tool node. 194 | if new_node.nodeName == 'Tool': 195 | if new_node.attributes and new_node.attributes.length == 1: 196 | # This one was empty. 197 | continue 198 | if new_node.nodeName == 'UserMacro': 199 | continue 200 | node.appendChild(new_node) 201 | 202 | 203 | def GetConfiguationNodes(vcproj): 204 | #TODO(nsylvain): Find a better way to navigate the xml. 205 | nodes = [] 206 | for node in vcproj.childNodes: 207 | if node.nodeName == "Configurations": 208 | for sub_node in node.childNodes: 209 | if sub_node.nodeName == "Configuration": 210 | nodes.append(sub_node) 211 | 212 | return nodes 213 | 214 | 215 | def GetChildrenVsprops(filename): 216 | dom = parse(filename) 217 | if dom.documentElement.attributes: 218 | vsprops = dom.documentElement.getAttribute('InheritedPropertySheets') 219 | return FixFilenames(vsprops.split(';'), os.path.dirname(filename)) 220 | return [] 221 | 222 | def SeekToNode(node1, child2): 223 | # A text node does not have properties. 224 | if child2.nodeType == Node.TEXT_NODE: 225 | return None 226 | 227 | # Get the name of the current node. 228 | current_name = child2.getAttribute("Name") 229 | if not current_name: 230 | # There is no name. We don't know how to merge. 231 | return None 232 | 233 | # Look through all the nodes to find a match. 234 | for sub_node in node1.childNodes: 235 | if sub_node.nodeName == child2.nodeName: 236 | name = sub_node.getAttribute("Name") 237 | if name == current_name: 238 | return sub_node 239 | 240 | # No match. We give up. 241 | return None 242 | 243 | 244 | def MergeAttributes(node1, node2): 245 | # No attributes to merge? 246 | if not node2.attributes: 247 | return 248 | 249 | for (name, value2) in node2.attributes.items(): 250 | # Don't merge the 'Name' attribute. 251 | if name == 'Name': 252 | continue 253 | value1 = node1.getAttribute(name) 254 | if value1: 255 | # The attribute exist in the main node. If it's equal, we leave it 256 | # untouched, otherwise we concatenate it. 257 | if value1 != value2: 258 | node1.setAttribute(name, ';'.join([value1, value2])) 259 | else: 260 | # The attribute does nto exist in the main node. We append this one. 261 | node1.setAttribute(name, value2) 262 | 263 | # If the attribute was a property sheet attributes, we remove it, since 264 | # they are useless. 265 | if name == 'InheritedPropertySheets': 266 | node1.removeAttribute(name) 267 | 268 | 269 | def MergeProperties(node1, node2): 270 | MergeAttributes(node1, node2) 271 | for child2 in node2.childNodes: 272 | child1 = SeekToNode(node1, child2) 273 | if child1: 274 | MergeProperties(child1, child2) 275 | else: 276 | node1.appendChild(child2.cloneNode(True)) 277 | 278 | 279 | def main(argv): 280 | """Main function of this vcproj prettifier.""" 281 | global ARGUMENTS 282 | ARGUMENTS = argv 283 | 284 | # check if we have exactly 1 parameter. 285 | if len(argv) < 2: 286 | print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' 287 | '[key2=value2]' % argv[0]) 288 | return 1 289 | 290 | # Parse the keys 291 | for i in range(2, len(argv)): 292 | (key, value) = argv[i].split('=') 293 | REPLACEMENTS[key] = value 294 | 295 | # Open the vcproj and parse the xml. 296 | dom = parse(argv[1]) 297 | 298 | # First thing we need to do is find the Configuration Node and merge them 299 | # with the vsprops they include. 300 | for configuration_node in GetConfiguationNodes(dom.documentElement): 301 | # Get the property sheets associated with this configuration. 302 | vsprops = configuration_node.getAttribute('InheritedPropertySheets') 303 | 304 | # Fix the filenames to be absolute. 305 | vsprops_list = FixFilenames(vsprops.strip().split(';'), 306 | os.path.dirname(argv[1])) 307 | 308 | # Extend the list of vsprops with all vsprops contained in the current 309 | # vsprops. 310 | for current_vsprops in vsprops_list: 311 | vsprops_list.extend(GetChildrenVsprops(current_vsprops)) 312 | 313 | # Now that we have all the vsprops, we need to merge them. 314 | for current_vsprops in vsprops_list: 315 | MergeProperties(configuration_node, 316 | parse(current_vsprops).documentElement) 317 | 318 | # Now that everything is merged, we need to cleanup the xml. 319 | CleanupVcproj(dom.documentElement) 320 | 321 | # Finally, we use the prett xml function to print the vcproj back to the 322 | # user. 323 | #print dom.toprettyxml(newl="\n") 324 | PrettyPrintNode(dom.documentElement) 325 | return 0 326 | 327 | 328 | if __name__ == '__main__': 329 | sys.exit(main(sys.argv)) 330 | -------------------------------------------------------------------------------- /lib/build.js: -------------------------------------------------------------------------------- 1 | // Copyright (c) the Contributors as noted in the AUTHORS file. 2 | // This file is part of node-ninja. 3 | // 4 | // This Source Code Form is subject to the terms of the Mozilla Public 5 | // License, v. 2.0. If a copy of the MPL was not distributed with this 6 | // file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | 8 | module.exports = exports = build 9 | 10 | /** 11 | * Module dependencies. 12 | */ 13 | 14 | var fs = require('graceful-fs') 15 | , rm = require('rimraf') 16 | , path = require('path') 17 | , glob = require('glob') 18 | , log = require('npmlog') 19 | , which = require('which') 20 | , mkdirp = require('mkdirp') 21 | , exec = require('child_process').exec 22 | , processRelease = require('./process-release') 23 | , win = process.platform == 'win32' 24 | 25 | exports.usage = 'Invokes `' + (win ? 'msbuild' : 'make') + '` and builds the module' 26 | 27 | function build (gyp, argv, callback) { 28 | var platformMake = 'make' 29 | if (process.platform === 'aix') { 30 | platformMake = 'gmake' 31 | } else if (process.platform.indexOf('bsd') !== -1) { 32 | platformMake = 'gmake' 33 | } 34 | var builddir = path.resolve(gyp.opts.builddir || 'build'); 35 | 36 | var release = processRelease(argv, gyp, process.version, process.release) 37 | , makeCommand = gyp.opts.make || process.env.MAKE || platformMake 38 | , command = win ? 'msbuild' : makeCommand 39 | , configPath = path.resolve(builddir, 'config.gypi') 40 | , jobs = gyp.opts.jobs || process.env.JOBS 41 | , buildType 42 | , config 43 | , arch 44 | , nodeDir 45 | , copyDevLib 46 | 47 | loadConfigGypi() 48 | 49 | /** 50 | * Load the "config.gypi" file that was generated during "configure". 51 | */ 52 | 53 | function loadConfigGypi () { 54 | fs.readFile(configPath, 'utf8', function (err, data) { 55 | if (err) { 56 | if (err.code == 'ENOENT') { 57 | callback(new Error('Run `node-ninja configure` first!')) 58 | } else { 59 | callback(err) 60 | } 61 | return 62 | } 63 | config = JSON.parse(data.replace(/\#.+\n/, '')) 64 | 65 | // get the 'arch', 'buildType', and 'nodeDir' vars from the config 66 | buildType = config.target_defaults.default_configuration 67 | arch = config.variables.target_arch 68 | nodeDir = config.variables.nodedir 69 | copyDevLib = config.variables.copy_dev_lib == 'true' 70 | 71 | if ('debug' in gyp.opts) { 72 | buildType = gyp.opts.debug ? 'Debug' : 'Release' 73 | } 74 | if (!buildType) { 75 | buildType = 'Release' 76 | } 77 | 78 | log.verbose('build type', buildType) 79 | log.verbose('architecture', arch) 80 | log.verbose('node dev dir', nodeDir) 81 | 82 | if (win) { 83 | findSolutionFile() 84 | } else { 85 | doWhich() 86 | } 87 | }) 88 | } 89 | 90 | /** 91 | * On Windows, find the first build/*.sln file. 92 | */ 93 | 94 | function findSolutionFile () { 95 | glob('*.sln', function (err, files) { 96 | if (err) return callback(err) 97 | if (files.length === 0) { 98 | return callback(new Error('Could not find *.sln file. Did you run "configure"?')) 99 | } 100 | guessedSolution = files[0] 101 | log.verbose('found first Solution file', guessedSolution) 102 | doWhich() 103 | }) 104 | } 105 | 106 | /** 107 | * Uses node-which to locate the msbuild / make executable. 108 | */ 109 | 110 | function doWhich () { 111 | // First make sure we have the build command in the PATH 112 | which(command, function (err, execPath) { 113 | if (err) { 114 | if (win && /not found/.test(err.message)) { 115 | // On windows and no 'msbuild' found. Let's guess where it is 116 | findMsbuild() 117 | } else { 118 | // Some other error or 'make' not found on Unix, report that to the user 119 | callback(err) 120 | } 121 | return 122 | } 123 | log.verbose('`which` succeeded for `' + command + '`', execPath) 124 | copyNodeLib() 125 | }) 126 | } 127 | 128 | /** 129 | * Search for the location of "msbuild.exe" file on Windows. 130 | */ 131 | 132 | function findMsbuild () { 133 | log.verbose('could not find "msbuild.exe" in PATH - finding location in registry') 134 | var notfoundErr = new Error('Can\'t find "msbuild.exe". Do you have Microsoft Visual Studio C++ 2008+ installed?') 135 | var cmd = 'reg query "HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions" /s' 136 | if (process.arch !== 'ia32') 137 | cmd += ' /reg:32' 138 | exec(cmd, function (err, stdout, stderr) { 139 | var reVers = /ToolsVersions\\([^\\]+)$/i 140 | , rePath = /\r\n[ \t]+MSBuildToolsPath[ \t]+REG_SZ[ \t]+([^\r]+)/i 141 | , msbuilds = [] 142 | , r 143 | , msbuildPath 144 | if (err) { 145 | return callback(notfoundErr) 146 | } 147 | stdout.split('\r\n\r\n').forEach(function(l) { 148 | if (!l) return 149 | l = l.trim() 150 | if (r = reVers.exec(l.substring(0, l.indexOf('\r\n')))) { 151 | var ver = parseFloat(r[1], 10) 152 | if (ver >= 3.5) { 153 | if (r = rePath.exec(l)) { 154 | msbuilds.push({ 155 | version: ver, 156 | path: r[1] 157 | }) 158 | } 159 | } 160 | } 161 | }) 162 | msbuilds.sort(function (x, y) { 163 | return (x.version < y.version ? -1 : 1) 164 | }) 165 | ;(function verifyMsbuild () { 166 | if (!msbuilds.length) return callback(notfoundErr) 167 | msbuildPath = path.resolve(msbuilds.pop().path, 'msbuild.exe') 168 | fs.stat(msbuildPath, function (err, stat) { 169 | if (err) { 170 | if (err.code == 'ENOENT') { 171 | if (msbuilds.length) { 172 | return verifyMsbuild() 173 | } else { 174 | callback(notfoundErr) 175 | } 176 | } else { 177 | callback(err) 178 | } 179 | return 180 | } 181 | command = msbuildPath 182 | copyNodeLib() 183 | }) 184 | })() 185 | }) 186 | } 187 | 188 | /** 189 | * Copies the node.lib file for the current target architecture into the 190 | * current proper dev dir location. 191 | */ 192 | 193 | function copyNodeLib () { 194 | if (!win || !copyDevLib) return doBuild() 195 | 196 | var buildDir = path.resolve(nodeDir, buildType) 197 | , archNodeLibPath = path.resolve(nodeDir, arch, release.name + '.lib') 198 | , buildNodeLibPath = path.resolve(buildDir, release.name + '.lib') 199 | 200 | mkdirp(buildDir, function (err, isNew) { 201 | if (err) return callback(err) 202 | log.verbose('"' + buildType + '" dir needed to be created?', isNew) 203 | var rs = fs.createReadStream(archNodeLibPath) 204 | , ws = fs.createWriteStream(buildNodeLibPath) 205 | log.verbose('copying "' + release.name + '.lib" for ' + arch, buildNodeLibPath) 206 | rs.pipe(ws) 207 | rs.on('error', callback) 208 | ws.on('error', callback) 209 | rs.on('end', doBuild) 210 | }) 211 | } 212 | 213 | /** 214 | * Actually spawn the process and compile the module. 215 | */ 216 | 217 | function doBuild () { 218 | 219 | // Enable Verbose build 220 | var verbose = log.levels[log.level] <= log.levels.verbose 221 | if (!win && verbose) { 222 | argv.push('V=1') 223 | } 224 | if (win && !verbose) { 225 | argv.push('/clp:Verbosity=minimal') 226 | } 227 | 228 | if (win) { 229 | // Turn off the Microsoft logo on Windows 230 | argv.push('/nologo') 231 | } 232 | 233 | // Specify the build type, Release by default 234 | if (win) { 235 | var p = arch === 'x64' ? 'x64' : 'Win32' 236 | argv.push('/p:Configuration=' + buildType + ';Platform=' + p) 237 | if (jobs) { 238 | var j = parseInt(jobs, 10) 239 | if (!isNaN(j) && j > 0) { 240 | argv.push('/m:' + j) 241 | } else if (jobs.toUpperCase() === 'MAX') { 242 | argv.push('/m:' + require('os').cpus().length) 243 | } 244 | } else { 245 | argv.push('/m') 246 | } 247 | } else { 248 | argv.push('BUILDTYPE=' + buildType) 249 | if (jobs) { 250 | var j = parseInt(jobs, 10) 251 | if (!isNaN(j) && j > 0) { 252 | argv.push('--jobs') 253 | argv.push(j) 254 | } else if (jobs.toUpperCase() === 'MAX') { 255 | argv.push('--jobs') 256 | argv.push(require('os').cpus().length) 257 | } 258 | } 259 | } 260 | 261 | if (win) { 262 | // did the user specify their own .sln file? 263 | var hasSln = argv.some(function (arg) { 264 | return path.extname(arg) == '.sln' 265 | }) 266 | if (!hasSln) { 267 | argv.unshift(gyp.opts.solution || guessedSolution) 268 | } 269 | } 270 | 271 | var proc = gyp.spawn(command, argv) 272 | proc.on('exit', onExit) 273 | } 274 | 275 | /** 276 | * Invoked after the make/msbuild command exits. 277 | */ 278 | 279 | function onExit (code, signal) { 280 | if (code !== 0) { 281 | return callback(new Error('`' + command + '` failed with exit code: ' + code)) 282 | } 283 | if (signal) { 284 | return callback(new Error('`' + command + '` got signal: ' + signal)) 285 | } 286 | callback() 287 | } 288 | 289 | } 290 | -------------------------------------------------------------------------------- /lib/clean.js: -------------------------------------------------------------------------------- 1 | // Copyright (c) the Contributors as noted in the AUTHORS file. 2 | // This file is part of node-ninja. 3 | // 4 | // This Source Code Form is subject to the terms of the Mozilla Public 5 | // License, v. 2.0. If a copy of the MPL was not distributed with this 6 | // file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | 8 | module.exports = exports = clean 9 | 10 | exports.usage = 'Removes the build directory and any generated build files' 11 | 12 | /** 13 | * Module dependencies. 14 | */ 15 | 16 | var rm = require('rimraf') 17 | var log = require('npmlog') 18 | var path = require('path') 19 | 20 | function clean (gyp, argv, callback) { 21 | var builddir = path.resolve(gyp.opts.builddir || 'build'); 22 | log.verbose('clean', 'removing "%s" directory', builddir) 23 | rm(builddir, callback) 24 | } 25 | -------------------------------------------------------------------------------- /lib/find-node-directory.js: -------------------------------------------------------------------------------- 1 | // Copyright (c) the Contributors as noted in the AUTHORS file. 2 | // This file is part of node-ninja. 3 | // 4 | // This Source Code Form is subject to the terms of the Mozilla Public 5 | // License, v. 2.0. If a copy of the MPL was not distributed with this 6 | // file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | 8 | var path = require('path') 9 | , log = require('npmlog') 10 | 11 | function findNodeDirectory(scriptLocation, processObj) { 12 | // set dirname and process if not passed in 13 | // this facilitates regression tests 14 | if (scriptLocation === undefined) { 15 | scriptLocation = __dirname 16 | } 17 | if (processObj === undefined) { 18 | processObj = process 19 | } 20 | 21 | // Have a look to see what is above us, to try and work out where we are 22 | npm_parent_directory = path.join(scriptLocation, '../../../..') 23 | log.verbose('node-ninja root', 'npm_parent_directory is ' 24 | + path.basename(npm_parent_directory)) 25 | node_root_dir = "" 26 | 27 | log.verbose('node-ninja root', 'Finding node root directory') 28 | if (path.basename(npm_parent_directory) === 'deps') { 29 | // We are in a build directory where this script lives in 30 | // deps/npm/node_modules/node-ninja/lib 31 | node_root_dir = path.join(npm_parent_directory, '..') 32 | log.verbose('node-ninja root', 'in build directory, root = ' 33 | + node_root_dir) 34 | } else if (path.basename(npm_parent_directory) === 'node_modules') { 35 | // We are in a node install directory where this script lives in 36 | // lib/node_modules/npm/node_modules/node-ninja/lib or 37 | // node_modules/npm/node_modules/node-ninja/lib depending on the 38 | // platform 39 | if (processObj.platform === 'win32') { 40 | node_root_dir = path.join(npm_parent_directory, '..') 41 | } else { 42 | node_root_dir = path.join(npm_parent_directory, '../..') 43 | } 44 | log.verbose('node-ninja root', 'in install directory, root = ' 45 | + node_root_dir) 46 | } else { 47 | // We don't know where we are, try working it out from the location 48 | // of the node binary 49 | var node_dir = path.dirname(processObj.execPath) 50 | var directory_up = path.basename(node_dir) 51 | if (directory_up === 'bin') { 52 | node_root_dir = path.join(node_dir, '..') 53 | } else if (directory_up === 'Release' || directory_up === 'Debug') { 54 | // If we are a recently built node, and the directory structure 55 | // is that of a repository. If we are on Windows then we only need 56 | // to go one level up, everything else, two 57 | if (processObj.platform === 'win32') { 58 | node_root_dir = path.join(node_dir, '..') 59 | } else { 60 | node_root_dir = path.join(node_dir, '../..') 61 | } 62 | } 63 | // Else return the default blank, "". 64 | } 65 | return node_root_dir 66 | } 67 | 68 | module.exports = findNodeDirectory 69 | -------------------------------------------------------------------------------- /lib/list.js: -------------------------------------------------------------------------------- 1 | // Copyright (c) the Contributors as noted in the AUTHORS file. 2 | // This file is part of node-ninja. 3 | // 4 | // This Source Code Form is subject to the terms of the Mozilla Public 5 | // License, v. 2.0. If a copy of the MPL was not distributed with this 6 | // file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | 8 | module.exports = exports = list 9 | 10 | exports.usage = 'Prints a listing of the currently installed node development files' 11 | 12 | /** 13 | * Module dependencies. 14 | */ 15 | 16 | var fs = require('graceful-fs') 17 | , path = require('path') 18 | , log = require('npmlog') 19 | 20 | function list (gyp, args, callback) { 21 | 22 | var devDir = gyp.devDir 23 | log.verbose('list', 'using node-ninja dir:', devDir) 24 | 25 | // readdir() the node-ninja dir 26 | fs.readdir(devDir, onreaddir) 27 | 28 | function onreaddir (err, versions) { 29 | if (err && err.code != 'ENOENT') { 30 | return callback(err) 31 | } 32 | if (Array.isArray(versions)) { 33 | versions = versions.filter(function (v) { return v != 'current' }) 34 | } else { 35 | versions = [] 36 | } 37 | callback(null, versions) 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /lib/node-ninja.js: -------------------------------------------------------------------------------- 1 | // Copyright (c) the Contributors as noted in the AUTHORS file. 2 | // This file is part of node-ninja. 3 | // 4 | // This Source Code Form is subject to the terms of the Mozilla Public 5 | // License, v. 2.0. If a copy of the MPL was not distributed with this 6 | // file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | 8 | /** 9 | * Module exports. 10 | */ 11 | 12 | module.exports = exports = gyp 13 | 14 | /** 15 | * Module dependencies. 16 | */ 17 | 18 | var fs = require('graceful-fs') 19 | , path = require('path') 20 | , nopt = require('nopt') 21 | , log = require('npmlog') 22 | , child_process = require('child_process') 23 | , EE = require('events').EventEmitter 24 | , inherits = require('util').inherits 25 | , commands = [ 26 | // Module build commands 27 | 'build' 28 | , 'clean' 29 | , 'configure' 30 | , 'rebuild' 31 | // Development Header File management commands 32 | , 'install' 33 | , 'list' 34 | , 'remove' 35 | ] 36 | , aliases = { 37 | 'ls': 'list' 38 | , 'rm': 'remove' 39 | } 40 | 41 | // Differentiate our logs from npm's 42 | log.heading = 'gyp' 43 | 44 | /** 45 | * The `gyp` function. 46 | */ 47 | 48 | function gyp () { 49 | return new Gyp() 50 | } 51 | 52 | function Gyp () { 53 | var self = this 54 | 55 | // set the dir where node-ninja dev files get installed 56 | var homeDir = process.env.HOME || process.env.USERPROFILE 57 | if (!homeDir) { 58 | throw new Error( 59 | "node-ninja requires that the user's home directory is specified " + 60 | "in either of the environmental variables HOME or USERPROFILE" 61 | ); 62 | } 63 | this.devDir = path.resolve(homeDir, '.node-ninja') 64 | 65 | this.commands = {} 66 | 67 | commands.forEach(function (command) { 68 | self.commands[command] = function (argv, callback) { 69 | log.verbose('command', command, argv) 70 | return require('./' + command)(self, argv, callback) 71 | } 72 | }) 73 | } 74 | inherits(Gyp, EE) 75 | exports.Gyp = Gyp 76 | var proto = Gyp.prototype 77 | 78 | /** 79 | * Export the contents of the package.json. 80 | */ 81 | 82 | proto.package = require('../package') 83 | 84 | /** 85 | * nopt configuration definitions 86 | */ 87 | 88 | proto.configDefs = { 89 | help: Boolean // everywhere 90 | , arch: String // 'configure' 91 | , cafile: String // 'install' 92 | , debug: Boolean // 'build' 93 | , directory: String // bin 94 | , make: String // 'build' 95 | , msvs_version: String // 'configure' 96 | , ensure: Boolean // 'install' 97 | , solution: String // 'build' (windows only) 98 | , proxy: String // 'install' 99 | , nodedir: String // 'configure' 100 | , loglevel: String // everywhere 101 | , python: String // 'configure' 102 | , 'dist-url': String // 'install' 103 | , 'tarball': String // 'install' 104 | , jobs: String // 'build' 105 | , thin: String // 'configure' 106 | , builddir: String // everywhere 107 | } 108 | 109 | /** 110 | * nopt shorthands 111 | */ 112 | 113 | proto.shorthands = { 114 | release: '--no-debug' 115 | , C: '--directory' 116 | , debug: '--debug' 117 | , j: '--jobs' 118 | , silly: '--loglevel=silly' 119 | , verbose: '--loglevel=verbose' 120 | } 121 | 122 | /** 123 | * expose the command aliases for the bin file to use. 124 | */ 125 | 126 | proto.aliases = aliases 127 | 128 | /** 129 | * Parses the given argv array and sets the 'opts', 130 | * 'argv' and 'command' properties. 131 | */ 132 | 133 | proto.parseArgv = function parseOpts (argv) { 134 | this.opts = nopt(this.configDefs, this.shorthands, argv) 135 | this.argv = this.opts.argv.remain.slice() 136 | 137 | var commands = this.todo = [] 138 | 139 | // create a copy of the argv array with aliases mapped 140 | argv = this.argv.map(function (arg) { 141 | // is this an alias? 142 | if (arg in this.aliases) { 143 | arg = this.aliases[arg] 144 | } 145 | return arg 146 | }, this) 147 | 148 | // process the mapped args into "command" objects ("name" and "args" props) 149 | argv.slice().forEach(function (arg) { 150 | if (arg in this.commands) { 151 | var args = argv.splice(0, argv.indexOf(arg)) 152 | argv.shift() 153 | if (commands.length > 0) { 154 | commands[commands.length - 1].args = args 155 | } 156 | commands.push({ name: arg, args: [] }) 157 | } 158 | }, this) 159 | if (commands.length > 0) { 160 | commands[commands.length - 1].args = argv.splice(0) 161 | } 162 | 163 | // support for inheriting config env variables from npm 164 | var npm_config_prefix = 'npm_config_' 165 | Object.keys(process.env).forEach(function (name) { 166 | if (name.indexOf(npm_config_prefix) !== 0) return 167 | var val = process.env[name] 168 | if (name === npm_config_prefix + 'loglevel') { 169 | log.level = val 170 | } else { 171 | // add the user-defined options to the config 172 | name = name.substring(npm_config_prefix.length) 173 | // gyp@741b7f1 enters an infinite loop when it encounters 174 | // zero-length options so ensure those don't get through. 175 | if (name) this.opts[name] = val 176 | } 177 | }, this) 178 | 179 | if (this.opts.loglevel) { 180 | log.level = this.opts.loglevel 181 | } 182 | log.resume() 183 | } 184 | 185 | /** 186 | * Spawns a child process and emits a 'spawn' event. 187 | */ 188 | 189 | proto.spawn = function spawn (command, args, opts) { 190 | if (!opts) opts = {} 191 | if (!opts.silent && !opts.stdio) { 192 | opts.stdio = [ 0, 1, 2 ] 193 | } 194 | var cp = child_process.spawn(command, args, opts) 195 | log.info('spawn', command) 196 | log.info('spawn args', args) 197 | return cp 198 | } 199 | 200 | /** 201 | * Returns the usage instructions 202 | */ 203 | 204 | proto.usage = function usage () { 205 | var str = [ 206 | '' 207 | , ' Usage: node-ninja [options]' 208 | , '' 209 | , ' where is one of:' 210 | , commands.map(function (c) { 211 | return ' - ' + c + ' - ' + require('./' + c).usage 212 | }).join('\n') 213 | , '' 214 | , 'node-ninja@' + this.version + ' ' + path.resolve(__dirname, '..') 215 | , 'node@' + process.versions.node 216 | ].join('\n') 217 | return str 218 | } 219 | 220 | /** 221 | * Version number getter. 222 | */ 223 | 224 | Object.defineProperty(proto, 'version', { 225 | get: function () { 226 | return this.package.version 227 | } 228 | , enumerable: true 229 | }) 230 | 231 | -------------------------------------------------------------------------------- /lib/process-release.js: -------------------------------------------------------------------------------- 1 | // Copyright (c) the Contributors as noted in the AUTHORS file. 2 | // This file is part of node-ninja. 3 | // 4 | // This Source Code Form is subject to the terms of the Mozilla Public 5 | // License, v. 2.0. If a copy of the MPL was not distributed with this 6 | // file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | 8 | var semver = require('semver') 9 | , url = require('url') 10 | , path = require('path') 11 | 12 | // versions where -headers.tar.gz started shipping 13 | , headersTarballRange = '>= 3.0.0 || ~0.12.10 || ~0.10.42' 14 | , bitsre = /\/win-(x86|x64)\// 15 | , bitsreV3 = /\/win-(x86|ia32|x64)\// // io.js v3.x.x shipped with "ia32" but should 16 | // have been "x86" 17 | 18 | // Captures all the logic required to determine download URLs, local directory and 19 | // file names. Inputs come from command-line switches (--target, --dist-url), 20 | // `process.version` and `process.release` where it exists. 21 | function processRelease (argv, gyp, defaultVersion, defaultRelease) { 22 | var version = (semver.valid(argv[0]) && argv[0]) || gyp.opts.target || defaultVersion 23 | , versionSemver = semver.parse(version) 24 | , overrideDistUrl = gyp.opts['dist-url'] || gyp.opts.disturl 25 | , isDefaultVersion 26 | , isIojs 27 | , name 28 | , distBaseUrl 29 | , baseUrl 30 | , libUrl32 31 | , libUrl64 32 | , tarballUrl 33 | , canGetHeaders 34 | 35 | if (!versionSemver) { 36 | // not a valid semver string, nothing we can do 37 | return { version: version } 38 | } 39 | // flatten version into String 40 | version = versionSemver.version 41 | 42 | // defaultVersion should come from process.version so ought to be valid semver 43 | isDefaultVersion = version === semver.parse(defaultVersion).version 44 | 45 | // can't use process.release if we're using --target=x.y.z 46 | if (!isDefaultVersion) 47 | defaultRelease = null 48 | 49 | if (defaultRelease) { 50 | // v3 onward, has process.release 51 | name = defaultRelease.name.replace(/io\.js/, 'iojs') // remove the '.' for directory naming purposes 52 | isIojs = name === 'iojs' 53 | } else { 54 | // old node or alternative --target= 55 | // semver.satisfies() doesn't like prerelease tags so test major directly 56 | isIojs = versionSemver.major >= 1 && versionSemver.major < 4 57 | name = isIojs ? 'iojs' : 'node' 58 | } 59 | 60 | // check for the nvm.sh standard mirror env variables 61 | if (!overrideDistUrl) { 62 | if (isIojs && process.env.IOJS_ORG_MIRROR) 63 | overrideDistUrl = process.env.IOJS_ORG_MIRROR 64 | else if (process.env.NODEJS_ORG_MIRROR) 65 | overrideDistUrl = process.env.NODEJS_ORG_MIRROR 66 | } 67 | 68 | if (overrideDistUrl) 69 | distBaseUrl = overrideDistUrl.replace(/\/+$/, '') 70 | else 71 | distBaseUrl = isIojs ? 'https://iojs.org/download/release' : 'https://nodejs.org/dist' 72 | distBaseUrl += '/v' + version + '/' 73 | 74 | // new style, based on process.release so we have a lot of the data we need 75 | if (defaultRelease && defaultRelease.headersUrl && !overrideDistUrl) { 76 | baseUrl = url.resolve(defaultRelease.headersUrl, './') 77 | libUrl32 = resolveLibUrl(name, defaultRelease.libUrl || baseUrl || distBaseUrl, 'x86', versionSemver.major) 78 | libUrl64 = resolveLibUrl(name, defaultRelease.libUrl || baseUrl || distBaseUrl, 'x64', versionSemver.major) 79 | 80 | return { 81 | version: version, 82 | semver: versionSemver, 83 | name: name, 84 | baseUrl: baseUrl, 85 | tarballUrl: defaultRelease.headersUrl, 86 | shasumsUrl: url.resolve(baseUrl, 'SHASUMS256.txt'), 87 | versionDir: (name !== 'node' ? name + '-' : '') + version, 88 | libUrl32: libUrl32, 89 | libUrl64: libUrl64, 90 | libPath32: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl32).path)), 91 | libPath64: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl64).path)) 92 | } 93 | } 94 | 95 | // older versions without process.release are captured here and we have to make 96 | // a lot of assumptions, additionally if you --target=x.y.z then we can't use the 97 | // current process.release 98 | 99 | baseUrl = distBaseUrl 100 | libUrl32 = resolveLibUrl(name, baseUrl, 'x86', versionSemver.major) 101 | libUrl64 = resolveLibUrl(name, baseUrl, 'x64', versionSemver.major) 102 | // making the bold assumption that anything with a version number >3.0.0 will 103 | // have a *-headers.tar.gz file in its dist location, even some frankenstein 104 | // custom version 105 | canGetHeaders = semver.satisfies(versionSemver, headersTarballRange) 106 | tarballUrl = url.resolve(baseUrl, name + '-v' + version + (canGetHeaders ? '-headers' : '') + '.tar.gz') 107 | 108 | return { 109 | version: version, 110 | semver: versionSemver, 111 | name: name, 112 | baseUrl: baseUrl, 113 | tarballUrl: tarballUrl, 114 | shasumsUrl: url.resolve(baseUrl, 'SHASUMS256.txt'), 115 | versionDir: (name !== 'node' ? name + '-' : '') + version, 116 | libUrl32: libUrl32, 117 | libUrl64: libUrl64, 118 | libPath32: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl32).path)), 119 | libPath64: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl64).path)) 120 | } 121 | } 122 | 123 | function normalizePath (p) { 124 | return path.normalize(p).replace(/\\/g, '/') 125 | } 126 | 127 | function resolveLibUrl (name, defaultUrl, arch, versionMajor) { 128 | var base = url.resolve(defaultUrl, './') 129 | , hasLibUrl = bitsre.test(defaultUrl) || (versionMajor === 3 && bitsreV3.test(defaultUrl)) 130 | 131 | if (!hasLibUrl) { 132 | // let's assume it's a baseUrl then 133 | if (versionMajor >= 1) 134 | return url.resolve(base, 'win-' + arch +'/' + name + '.lib') 135 | // prior to io.js@1.0.0 32-bit node.lib lives in /, 64-bit lives in /x64/ 136 | return url.resolve(base, (arch === 'x64' ? 'x64/' : '') + name + '.lib') 137 | } 138 | 139 | // else we have a proper url to a .lib, just make sure it's the right arch 140 | return defaultUrl.replace(versionMajor === 3 ? bitsreV3 : bitsre, '/win-' + arch + '/') 141 | } 142 | 143 | module.exports = processRelease 144 | -------------------------------------------------------------------------------- /lib/rebuild.js: -------------------------------------------------------------------------------- 1 | // Copyright (c) the Contributors as noted in the AUTHORS file. 2 | // This file is part of node-ninja. 3 | // 4 | // This Source Code Form is subject to the terms of the Mozilla Public 5 | // License, v. 2.0. If a copy of the MPL was not distributed with this 6 | // file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | 8 | module.exports = exports = rebuild 9 | 10 | exports.usage = 'Runs "clean", "configure" and "build" all at once' 11 | 12 | function rebuild (gyp, argv, callback) { 13 | 14 | gyp.todo.push( 15 | { name: 'clean', args: [] } 16 | , { name: 'configure', args: argv } 17 | , { name: 'build', args: [] } 18 | ) 19 | process.nextTick(callback) 20 | } 21 | -------------------------------------------------------------------------------- /lib/remove.js: -------------------------------------------------------------------------------- 1 | // Copyright (c) the Contributors as noted in the AUTHORS file. 2 | // This file is part of node-ninja. 3 | // 4 | // This Source Code Form is subject to the terms of the Mozilla Public 5 | // License, v. 2.0. If a copy of the MPL was not distributed with this 6 | // file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | 8 | module.exports = exports = remove 9 | 10 | exports.usage = 'Removes the node development files for the specified version' 11 | 12 | /** 13 | * Module dependencies. 14 | */ 15 | 16 | var fs = require('fs') 17 | , rm = require('rimraf') 18 | , path = require('path') 19 | , log = require('npmlog') 20 | , semver = require('semver') 21 | 22 | function remove (gyp, argv, callback) { 23 | 24 | var devDir = gyp.devDir 25 | log.verbose('remove', 'using node-ninja dir:', devDir) 26 | 27 | // get the user-specified version to remove 28 | var version = argv[0] || gyp.opts.target 29 | log.verbose('remove', 'removing target version:', version) 30 | 31 | if (!version) { 32 | return callback(new Error('You must specify a version number to remove. Ex: "' + process.version + '"')) 33 | } 34 | 35 | var versionSemver = semver.parse(version) 36 | if (versionSemver) { 37 | // flatten the version Array into a String 38 | version = versionSemver.version 39 | } 40 | 41 | var versionPath = path.resolve(gyp.devDir, version) 42 | log.verbose('remove', 'removing development files for version:', version) 43 | 44 | // first check if its even installed 45 | fs.stat(versionPath, function (err, stat) { 46 | if (err) { 47 | if (err.code == 'ENOENT') { 48 | callback(null, 'version was already uninstalled: ' + version) 49 | } else { 50 | callback(err) 51 | } 52 | return 53 | } 54 | // Go ahead and delete the dir 55 | rm(versionPath, callback) 56 | }) 57 | 58 | } 59 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "node-ninja", 3 | "description": "Node.js native addon build tool", 4 | "license": "MPL-2.0", 5 | "keywords": [ 6 | "native", 7 | "addon", 8 | "module", 9 | "c", 10 | "c++", 11 | "bindings", 12 | "gyp" 13 | ], 14 | "version": "1.0.2", 15 | "installVersion": 0, 16 | "author": "Pieter Hintjens ", 17 | "repository": { 18 | "type": "git", 19 | "url": "git://github.com/codejockey/node-ninja.git" 20 | }, 21 | "preferGlobal": true, 22 | "bin": "./bin/node-ninja.js", 23 | "main": "./lib/node-ninja.js", 24 | "dependencies": { 25 | "fstream": "^1.0.0", 26 | "glob": "3 || 4 || 5 || 6 || 7", 27 | "graceful-fs": "^4.1.2", 28 | "minimatch": "3", 29 | "mkdirp": "^0.5.0", 30 | "nopt": "2 || 3", 31 | "npmlog": "0 || 1 || 2", 32 | "osenv": "0", 33 | "path-array": "^1.0.0", 34 | "request": "2", 35 | "rimraf": "2", 36 | "semver": "2.x || 3.x || 4 || 5", 37 | "tar": "^6.1.12", 38 | "which": "1" 39 | }, 40 | "engines": { 41 | "node": ">= 0.8.0" 42 | }, 43 | "devDependencies": { 44 | "tape": "~4.2.0" 45 | }, 46 | "scripts": { 47 | "test": "tape test/test-*" 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /src/win_delay_load_hook.c: -------------------------------------------------------------------------------- 1 | /* 2 | * When this file is linked to a DLL, it sets up a delay-load hook that 3 | * intervenes when the DLL is trying to load 'node.exe' or 'iojs.exe' 4 | * dynamically. Instead of trying to locate the .exe file it'll just return 5 | * a handle to the process image. 6 | * 7 | * This allows compiled addons to work when node.exe or iojs.exe is renamed. 8 | */ 9 | 10 | #ifdef _MSC_VER 11 | 12 | #ifndef WIN32_LEAN_AND_MEAN 13 | #define WIN32_LEAN_AND_MEAN 14 | #endif 15 | 16 | #include 17 | 18 | #include 19 | #include 20 | 21 | static FARPROC WINAPI load_exe_hook(unsigned int event, DelayLoadInfo* info) { 22 | HMODULE m; 23 | if (event != dliNotePreLoadLibrary) 24 | return NULL; 25 | 26 | if (_stricmp(info->szDll, "iojs.exe") != 0 && 27 | _stricmp(info->szDll, "node.exe") != 0) 28 | return NULL; 29 | 30 | m = GetModuleHandle(NULL); 31 | return (FARPROC) m; 32 | } 33 | 34 | PfnDliHook __pfnDliNotifyHook2 = load_exe_hook; 35 | 36 | #endif 37 | -------------------------------------------------------------------------------- /test/docker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #set -e 4 | 5 | test_node_versions="0.8.28 0.10.40 0.12.7" 6 | test_iojs_versions="1.8.4 2.4.0 3.3.0" 7 | 8 | __dirname="$(CDPATH= cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 9 | dot_node_gyp=${__dirname}/.node-ninja/ 10 | 11 | # borrows from https://github.com/rvagg/dnt/ 12 | 13 | # Simple setup function for a container: 14 | # setup_container(image id, base image, commands to run to set up) 15 | setup_container() { 16 | local container_id="$1" 17 | local base_container="$2" 18 | local run_cmd="$3" 19 | 20 | # Does this image exist? If yes, ignore 21 | docker inspect "$container_id" &> /dev/null 22 | if [[ $? -eq 0 ]]; then 23 | echo "Found existing container [$container_id]" 24 | else 25 | # No such image, so make it 26 | echo "Did not find container [$container_id], creating..." 27 | docker run -i $base_container /bin/bash -c "$run_cmd" 28 | sleep 2 29 | docker commit $(docker ps -l -q) $container_id 30 | fi 31 | } 32 | 33 | # Run tests inside each of the versioned containers, copy cwd into npm's copy of node-ninja 34 | # so it'll be invoked by npm when a compile is needed 35 | # run_tests(version, test-commands) 36 | run_tests() { 37 | local version="$1" 38 | local run_cmd="$2" 39 | 40 | run_cmd="rsync -aAXx --delete --exclude .git --exclude build /node-ninja-src/ /usr/lib/node_modules/npm/node_modules/node-ninja/; 41 | /bin/su -s /bin/bash node-ninja -c 'cd && ${run_cmd}'" 42 | 43 | rm -rf $dot_node_gyp 44 | 45 | docker run \ 46 | --rm -i \ 47 | -v ~/.npm/:/node-ninja/.npm/ \ 48 | -v ${dot_node_gyp}:/node-ninja/.node-ninja/ \ 49 | -v $(pwd):/node-ninja-src/:ro \ 50 | node-ninja-test/${version} /bin/bash -c "${run_cmd}" 51 | } 52 | 53 | # A base image with build tools and a user account 54 | setup_container "node-ninja-test/base" "ubuntu:14.04" " 55 | apt-get update && 56 | apt-get install -y build-essential python git rsync curl && 57 | adduser --gecos node-ninja --home /node-ninja/ --disabled-login node-ninja && 58 | echo "node-ninja:node-ninja" | chpasswd 59 | " 60 | 61 | # An image on top of the base containing clones of repos we want to use for testing 62 | setup_container "node-ninja-test/clones" "node-ninja-test/base" " 63 | cd /node-ninja/ && git clone https://github.com/justmoon/node-bignum.git && 64 | cd /node-ninja/ && git clone https://github.com/bnoordhuis/node-buffertools.git && 65 | chown -R node-ninja.node-ninja /node-ninja/ 66 | " 67 | 68 | # An image for each of the node versions we want to test with that version installed and the latest npm 69 | for v in $test_node_versions; do 70 | setup_container "node-ninja-test/${v}" "node-ninja-test/clones" " 71 | curl -sL https://nodejs.org/dist/v${v}/node-v${v}-linux-x64.tar.gz | tar -zxv --strip-components=1 -C /usr/ && 72 | npm install npm@latest -g && 73 | node -v && npm -v 74 | " 75 | done 76 | 77 | # An image for each of the io.js versions we want to test with that version installed and the latest npm 78 | for v in $test_iojs_versions; do 79 | setup_container "node-ninja-test/${v}" "node-ninja-test/clones" " 80 | curl -sL https://iojs.org/dist/v${v}/iojs-v${v}-linux-x64.tar.gz | tar -zxv --strip-components=1 -C /usr/ && 81 | npm install npm@latest -g && 82 | node -v && npm -v 83 | " 84 | done 85 | 86 | # Run the tests for all of the test images we've created, 87 | # we should see node-ninja doing its download, configure and run thing 88 | # _NOTE: bignum doesn't compile on 0.8 currently so it'll fail for that version only_ 89 | for v in $test_node_versions $test_iojs_versions; do 90 | run_tests $v " 91 | cd node-buffertools && npm install --loglevel=info && npm test && cd 92 | " 93 | # removed for now, too noisy: cd node-bignum && npm install --loglevel=info && npm test 94 | done 95 | 96 | # Test use of --target=x.y.z to compile against alternate versions 97 | test_download_node_version() { 98 | local run_with_ver="$1" 99 | local expected_dir="$2" 100 | local expected_ver="$3" 101 | run_tests $run_with_ver "cd node-buffertools && npm install --loglevel=info --target=${expected_ver}" 102 | local node_ver=$(cat "${dot_node_gyp}${expected_dir}/node_version.h" | grep '#define NODE_\w*_VERSION [0-9]*$') 103 | node_ver=$(echo $node_ver | sed 's/#define NODE_[A-Z]*_VERSION //g' | sed 's/ /./g') 104 | if [ "X$(echo $node_ver)" != "X${expected_ver}" ]; then 105 | echo "Did not download v${expected_ver} using --target, instead got: $(echo $node_ver)" 106 | exit 1 107 | fi 108 | echo "Verified correct download of [v${node_ver}]" 109 | } 110 | 111 | test_download_node_version "0.12.7" "0.10.30/src" "0.10.30" 112 | test_download_node_version "3.3.0" "iojs-1.8.4/src" "1.8.4" 113 | # should download the headers file 114 | test_download_node_version "3.3.0" "iojs-3.2.0/include/node" "3.2.0" 115 | 116 | # TODO: test --dist-url by starting up a localhost server and serving up tarballs 117 | 118 | # testing --dist-url, using simple-proxy.js to make localhost work as a distribution 119 | # point for tarballs 120 | # we can test whether it uses the proxy because after 2 connections the proxy will 121 | # die and therefore should not be running at the end of the test, `nc` can tell us this 122 | run_tests "3.3.0" " 123 | (node /node-ninja-src/test/simple-proxy.js 8080 /foobar/ https://iojs.org/dist/ &) && 124 | cd node-buffertools && 125 | /node-ninja-src/bin/node-ninja.js --loglevel=info --dist-url=http://localhost:8080/foobar/ rebuild && 126 | nc -z localhost 8080 && echo -e \"\\n\\n\\033[31mFAILED TO USE LOCAL PROXY\\033[39m\\n\\n\" 127 | " 128 | 129 | run_tests "3.3.0" " 130 | (node /node-ninja-src/test/simple-proxy.js 8080 /doobar/ https://iojs.org/dist/ &) && 131 | cd node-buffertools && 132 | NVM_IOJS_ORG_MIRROR=http://localhost:8080/doobar/ /node-ninja-src/bin/node-ninja.js --loglevel=info rebuild && 133 | nc -z localhost 8080 && echo -e \"\\n\\n\\033[31mFAILED TO USE LOCAL PROXY\\033[39m\\n\\n\" 134 | " 135 | 136 | run_tests "0.12.7" " 137 | (node /node-ninja-src/test/simple-proxy.js 8080 /boombar/ https://nodejs.org/dist/ &) && 138 | cd node-buffertools && 139 | NVM_NODEJS_ORG_MIRROR=http://localhost:8080/boombar/ /node-ninja-src/bin/node-ninja.js --loglevel=info rebuild && 140 | nc -z localhost 8080 && echo -e \"\\n\\n\\033[31mFAILED TO USE LOCAL PROXY\\033[39m\\n\\n\" 141 | " 142 | 143 | rm -rf $dot_node_gyp 144 | -------------------------------------------------------------------------------- /test/fixtures/ca-bundle.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDJjCCAg4CAhnOMA0GCSqGSIb3DQEBBQUAMH0xCzAJBgNVBAYTAlVTMQswCQYD 3 | VQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZMBcGA1UECgwQU3Ryb25n 4 | TG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRowGAYDVQQDDBFjYS5zdHJv 5 | bmdsb29wLmNvbTAeFw0xNTEyMDgyMzM1MzNaFw00MzA0MjQyMzM1MzNaMBkxFzAV 6 | BgNVBAMMDnN0cm9uZ2xvb3AuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB 7 | CgKCAQEAwOYI7OZ2FX/YjRgLZoDQlbPc5UZXU/j0e1wwiJNPtPEax9Y5Uoza0Pnt 8 | Ikzkc2SfvQ+IJrhXo385tI0W5juuqbHnE7UrjUuPjUX6NHevkxcs/flmjan5wnZM 9 | cPsGhH71WDuUEEflvZihf2Se2x+xgZtMhc5XGmVmRuZFYKvkgUhA2/w8/QrK+jPT 10 | n9QRJxZjWNh2RBdC1B7u4jffSmOSUljYFH1I2eTeY+Rdi6YUIYSU9gEoZxsv3Tia 11 | SomfMF5jt2Mouo6MzA+IhLvvFjcrcph1Qxgi9RkfdCMMd+Ipm9YWELkyG1bDRpQy 12 | 0iyHD4gvVsAqz1Y2KdRSdc3Kt+nTqwIDAQABoxkwFzAVBgNVHREEDjAMhwQAAAAA 13 | hwR/AAABMA0GCSqGSIb3DQEBBQUAA4IBAQAhy4J0hML3NgmDRHdL5/iTucBe22Mf 14 | jJjg2aifD1S187dHm+Il4qZNO2plWwAhN0h704f+8wpsaALxUvBIu6nvlvcMP5PH 15 | jGN5JLe2Km3UaPvYOQU2SgacLilu+uBcIo2JSHLV6O7ziqUj5Gior6YxDLCtEZie 16 | Ea8aX5/YjuACtEMJ1JjRqjgkM66XAoUe0E8onOK3FgTIO3tGoTJwRp0zS50pFuP0 17 | PsZtT04ck6mmXEXXknNoAyBCvPypfms9OHqcUIW9fiQnrGbS/Ri4QSQYj0DtFk/1 18 | na4fY1gf3zTHxH8259b/TOOaPfTnCEsOQtjUrWNR4xhmVZ+HJy4yytUW 19 | -----END CERTIFICATE----- 20 | -----BEGIN CERTIFICATE----- 21 | MIIDbzCCAlcCAmm6MA0GCSqGSIb3DQEBCwUAMH0xCzAJBgNVBAYTAlVTMQswCQYD 22 | VQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZMBcGA1UECgwQU3Ryb25n 23 | TG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRowGAYDVQQDDBFjYS5zdHJv 24 | bmdsb29wLmNvbTAeFw0xNTEyMDgyMzM1MzNaFw00MzA0MjQyMzM1MzNaMH0xCzAJ 25 | BgNVBAYTAlVTMQswCQYDVQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZ 26 | MBcGA1UECgwQU3Ryb25nTG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRow 27 | GAYDVQQDDBFjYS5zdHJvbmdsb29wLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEP 28 | ADCCAQoCggEBANfj86jkvvYDjHBgiqWhk9Cj+bqiMq3MqnV0CBO4iuK33Fo6XssE 29 | H+yVdXlIBFbFe6t655MdBVOR2Sfj7WqNh96vhu6PyDHiwcQlTaiLU6nhIed1J4Wv 30 | lvnJHFmp8Wbtx5AgLT4UYu03ftvXEl2DLi3vhSL2tRM1ebXHB/KPbRWkb25DPX0P 31 | foOHot3f2dgNe2x6kponf7E/QDmAu3s7Nlkfh+ryDhgGU7wocXEhXbprNqRqOGNo 32 | xbXgUI+/9XDxYT/7Gn5LF/fPjtN+aB0SKMnTsDhprVlZie83mlqJ46fOOrR+vrsQ 33 | mi/1m/TadrARtZoIExC/cQRdVM05EK4tUa8CAwEAATANBgkqhkiG9w0BAQsFAAOC 34 | AQEAQ7k5WhyhDTIGYCNzRnrMHWSzGqa1y4tJMW06wafJNRqTm1cthq1ibc6Hfq5a 35 | K10K0qMcgauRTfQ1MWrVCTW/KnJ1vkhiTOH+RvxapGn84gSaRmV6KZen0+gMsgae 36 | KEGe/3Hn+PmDVV+PTamHgPACfpTww38WHIe/7Ce9gHfG7MZ8cKHNZhDy0IAYPln+ 37 | YRwMLd7JNQffHAbWb2CE1mcea4H/12U8JZW5tHCF6y9V+7IuDzqwIrLKcW3lG17n 38 | VUG6ODF/Ryqn3V5X+TL91YyXi6c34y34IpC7MQDV/67U7+5Bp5CfeDPWW2wVSrW+ 39 | uGZtfEvhbNm6m2i4UNmpCXxUZQ== 40 | -----END CERTIFICATE----- 41 | -------------------------------------------------------------------------------- /test/fixtures/ca.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDbzCCAlcCAmm6MA0GCSqGSIb3DQEBCwUAMH0xCzAJBgNVBAYTAlVTMQswCQYD 3 | VQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZMBcGA1UECgwQU3Ryb25n 4 | TG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRowGAYDVQQDDBFjYS5zdHJv 5 | bmdsb29wLmNvbTAeFw0xNTEyMDgyMzM1MzNaFw00MzA0MjQyMzM1MzNaMH0xCzAJ 6 | BgNVBAYTAlVTMQswCQYDVQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZ 7 | MBcGA1UECgwQU3Ryb25nTG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRow 8 | GAYDVQQDDBFjYS5zdHJvbmdsb29wLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEP 9 | ADCCAQoCggEBANfj86jkvvYDjHBgiqWhk9Cj+bqiMq3MqnV0CBO4iuK33Fo6XssE 10 | H+yVdXlIBFbFe6t655MdBVOR2Sfj7WqNh96vhu6PyDHiwcQlTaiLU6nhIed1J4Wv 11 | lvnJHFmp8Wbtx5AgLT4UYu03ftvXEl2DLi3vhSL2tRM1ebXHB/KPbRWkb25DPX0P 12 | foOHot3f2dgNe2x6kponf7E/QDmAu3s7Nlkfh+ryDhgGU7wocXEhXbprNqRqOGNo 13 | xbXgUI+/9XDxYT/7Gn5LF/fPjtN+aB0SKMnTsDhprVlZie83mlqJ46fOOrR+vrsQ 14 | mi/1m/TadrARtZoIExC/cQRdVM05EK4tUa8CAwEAATANBgkqhkiG9w0BAQsFAAOC 15 | AQEAQ7k5WhyhDTIGYCNzRnrMHWSzGqa1y4tJMW06wafJNRqTm1cthq1ibc6Hfq5a 16 | K10K0qMcgauRTfQ1MWrVCTW/KnJ1vkhiTOH+RvxapGn84gSaRmV6KZen0+gMsgae 17 | KEGe/3Hn+PmDVV+PTamHgPACfpTww38WHIe/7Ce9gHfG7MZ8cKHNZhDy0IAYPln+ 18 | YRwMLd7JNQffHAbWb2CE1mcea4H/12U8JZW5tHCF6y9V+7IuDzqwIrLKcW3lG17n 19 | VUG6ODF/Ryqn3V5X+TL91YyXi6c34y34IpC7MQDV/67U7+5Bp5CfeDPWW2wVSrW+ 20 | uGZtfEvhbNm6m2i4UNmpCXxUZQ== 21 | -----END CERTIFICATE----- 22 | -------------------------------------------------------------------------------- /test/fixtures/server.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDJjCCAg4CAhnOMA0GCSqGSIb3DQEBBQUAMH0xCzAJBgNVBAYTAlVTMQswCQYD 3 | VQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZMBcGA1UECgwQU3Ryb25n 4 | TG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRowGAYDVQQDDBFjYS5zdHJv 5 | bmdsb29wLmNvbTAeFw0xNTEyMDgyMzM1MzNaFw00MzA0MjQyMzM1MzNaMBkxFzAV 6 | BgNVBAMMDnN0cm9uZ2xvb3AuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB 7 | CgKCAQEAwOYI7OZ2FX/YjRgLZoDQlbPc5UZXU/j0e1wwiJNPtPEax9Y5Uoza0Pnt 8 | Ikzkc2SfvQ+IJrhXo385tI0W5juuqbHnE7UrjUuPjUX6NHevkxcs/flmjan5wnZM 9 | cPsGhH71WDuUEEflvZihf2Se2x+xgZtMhc5XGmVmRuZFYKvkgUhA2/w8/QrK+jPT 10 | n9QRJxZjWNh2RBdC1B7u4jffSmOSUljYFH1I2eTeY+Rdi6YUIYSU9gEoZxsv3Tia 11 | SomfMF5jt2Mouo6MzA+IhLvvFjcrcph1Qxgi9RkfdCMMd+Ipm9YWELkyG1bDRpQy 12 | 0iyHD4gvVsAqz1Y2KdRSdc3Kt+nTqwIDAQABoxkwFzAVBgNVHREEDjAMhwQAAAAA 13 | hwR/AAABMA0GCSqGSIb3DQEBBQUAA4IBAQAhy4J0hML3NgmDRHdL5/iTucBe22Mf 14 | jJjg2aifD1S187dHm+Il4qZNO2plWwAhN0h704f+8wpsaALxUvBIu6nvlvcMP5PH 15 | jGN5JLe2Km3UaPvYOQU2SgacLilu+uBcIo2JSHLV6O7ziqUj5Gior6YxDLCtEZie 16 | Ea8aX5/YjuACtEMJ1JjRqjgkM66XAoUe0E8onOK3FgTIO3tGoTJwRp0zS50pFuP0 17 | PsZtT04ck6mmXEXXknNoAyBCvPypfms9OHqcUIW9fiQnrGbS/Ri4QSQYj0DtFk/1 18 | na4fY1gf3zTHxH8259b/TOOaPfTnCEsOQtjUrWNR4xhmVZ+HJy4yytUW 19 | -----END CERTIFICATE----- 20 | -------------------------------------------------------------------------------- /test/fixtures/server.key: -------------------------------------------------------------------------------- 1 | -----BEGIN PRIVATE KEY----- 2 | MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDA5gjs5nYVf9iN 3 | GAtmgNCVs9zlRldT+PR7XDCIk0+08RrH1jlSjNrQ+e0iTORzZJ+9D4gmuFejfzm0 4 | jRbmO66psecTtSuNS4+NRfo0d6+TFyz9+WaNqfnCdkxw+waEfvVYO5QQR+W9mKF/ 5 | ZJ7bH7GBm0yFzlcaZWZG5kVgq+SBSEDb/Dz9Csr6M9Of1BEnFmNY2HZEF0LUHu7i 6 | N99KY5JSWNgUfUjZ5N5j5F2LphQhhJT2AShnGy/dOJpKiZ8wXmO3Yyi6jozMD4iE 7 | u+8WNytymHVDGCL1GR90Iwx34imb1hYQuTIbVsNGlDLSLIcPiC9WwCrPVjYp1FJ1 8 | zcq36dOrAgMBAAECggEACg60Xm2xsHNG/ixHw+NpfLSxCr89JGKxlJD88tIDcOK1 9 | S8AOoxA3BHhTddteeenALmJV7fbkkuC6SICmtgBcnfppmuxyRd6vsGT6o6ut2tR1 10 | gxRy1WYMYKg8WhOshlH8RspscODeyKDhorvDUJd5cNGBDuTwQ68PwxiUe3La6iac 11 | EVQoKohg9EmRIhMF1i8I00zXE8p3XENrlTc491ipc+gLPIP5vtqHyQztEUkZHkWd 12 | dXbs+n1hGCr+4FxrphGYEW80HINzmume7dGChr8nvF4ZZcuWW13DJuNim6pQno1i 13 | hM8VdXm8XphLh0XEGI5OCfu/CetkBILZRXKltZk6AQKBgQDoBqJzRlp7regYNU4q 14 | usfS+43tPNaJ0o4DIzcLawqpmK/B/cZStzHl14Sm62BVkKV6cnWAJPeLkENPMFoV 15 | 7Q7wLZBJxpPzqXkpeiDkKN4Wovca891Rffne5Sz6IDB5mOxMjfKIEPd5RkmB5Lkp 16 | qQLwm3YJ2AJcLagG/Gi1DFDRAQKBgQDU1G9T43Mjke6TXG0u7gCSb+VwyDRsrvJA 17 | u2vy6+MANRc1EEF31YLmTKOU5XxUmhtIu7TUbgPoNi0HuRFXx4Zul3BPlAosLMJv 18 | kNQbA/9d0YQAfSgTsploN5CX65dLZ4ejIzVgDZREzpIBWTze6YZTA2DT5iOIet84 19 | DD5DujY4qwKBgG0PuUo/9oYOD3tZiv1wwD5+uY6auykbTF9TLStzzBY9y9d+hrsY 20 | mx6zOAoRtz1g+TdeF7b9KVJzo//T9XQ68nuYnyreaWrt7SK+4jj8sK+pOEd1+0Cz 21 | 20CXLpX/jWmKpP+y9R5aA0kA7cpdjV90rwoTuN8Vpr5XQ5TNDhaTzGUBAoGABYig 22 | fGXlkH8y3NICZL37ddNC+/O4qTrDQbudyusnM9ItkEuj6CG9DY/gkPaGjQyUuQdo 23 | ZD2YDGmcMh81vDqL3ERDv03yFcP0KkJxwWIRObdA32JhsGFsa7FGKS0O+f7vH+bC 24 | dITl3gQg97gCRSl9PJtR4TCSq/HF7Acld01YK5ECgYEAwLFB5JIuxrowJe74cCMP 25 | n5Rwuc8vWdOsg+ytvQTv0/hVCdzcaLet6YvagnWTWaU7PUwTFxZs/mLQ9CAWVutK 26 | IRzs/GWxGFjH5xotDaJdDDzSdQye4tUqvUVxv7zzzsVycCPBYFkyRQ8Tmr5FLtUJ 27 | Cl48TZ6J8Rx5avjdtOw3QC8= 28 | -----END PRIVATE KEY----- 29 | -------------------------------------------------------------------------------- /test/simple-proxy.js: -------------------------------------------------------------------------------- 1 | var http = require('http') 2 | , https = require('https') 3 | , server = http.createServer(handler) 4 | , port = +process.argv[2] 5 | , prefix = process.argv[3] 6 | , upstream = process.argv[4] 7 | , calls = 0 8 | 9 | server.listen(port) 10 | 11 | function handler (req, res) { 12 | if (req.url.indexOf(prefix) != 0) 13 | throw new Error('request url [' + req.url + '] does not start with [' + prefix + ']') 14 | 15 | var upstreamUrl = upstream + req.url.substring(prefix.length) 16 | console.log(req.url + ' -> ' + upstreamUrl) 17 | https.get(upstreamUrl, function (ures) { 18 | ures.on('end', function () { 19 | if (++calls == 2) 20 | server.close() 21 | }) 22 | ures.pipe(res) 23 | }) 24 | } 25 | -------------------------------------------------------------------------------- /test/test-download.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | var fs = require('fs') 4 | var http = require('http') 5 | var https = require('https') 6 | var test = require('tape') 7 | var install = require('../lib/install') 8 | 9 | test('download over http', function (t) { 10 | t.plan(2) 11 | 12 | var server = http.createServer(function (req, res) { 13 | t.strictEqual(req.headers['user-agent'], 14 | 'node-ninja v42 (node ' + process.version + ')') 15 | res.end('ok') 16 | server.close() 17 | }) 18 | 19 | var host = '127.0.0.1' 20 | server.listen(0, host, function () { 21 | var port = this.address().port 22 | var gyp = { 23 | opts: {}, 24 | version: '42', 25 | } 26 | var url = 'http://' + host + ':' + port 27 | var req = install.test.download(gyp, {}, url) 28 | req.on('response', function (res) { 29 | var body = '' 30 | res.setEncoding('utf8') 31 | res.on('data', function(data) { 32 | body += data 33 | }) 34 | res.on('end', function() { 35 | t.strictEqual(body, 'ok') 36 | }) 37 | }) 38 | }) 39 | }) 40 | 41 | test('download over https with custom ca', function (t) { 42 | t.plan(3) 43 | 44 | var cert = fs.readFileSync(__dirname + '/fixtures/server.crt', 'utf8') 45 | var key = fs.readFileSync(__dirname + '/fixtures/server.key', 'utf8') 46 | 47 | var cafile = __dirname + '/fixtures/ca.crt' 48 | var ca = install.test.readCAFile(cafile) 49 | t.strictEqual(ca.length, 1) 50 | 51 | var options = { ca: ca, cert: cert, key: key } 52 | var server = https.createServer(options, function (req, res) { 53 | t.strictEqual(req.headers['user-agent'], 54 | 'node-ninja v42 (node ' + process.version + ')') 55 | res.end('ok') 56 | server.close() 57 | }) 58 | 59 | server.on('clientError', function (err) { 60 | throw err 61 | }) 62 | 63 | var host = '127.0.0.1' 64 | server.listen(8000, host, function () { 65 | var port = this.address().port 66 | var gyp = { 67 | opts: { cafile: cafile }, 68 | version: '42', 69 | } 70 | var url = 'https://' + host + ':' + port 71 | var req = install.test.download(gyp, {}, url) 72 | req.on('response', function (res) { 73 | var body = '' 74 | res.setEncoding('utf8') 75 | res.on('data', function(data) { 76 | body += data 77 | }) 78 | res.on('end', function() { 79 | t.strictEqual(body, 'ok') 80 | }) 81 | }) 82 | }) 83 | }) 84 | 85 | test('download with missing cafile', function (t) { 86 | t.plan(1) 87 | var gyp = { 88 | opts: { cafile: 'no.such.file' }, 89 | } 90 | try { 91 | install.test.download(gyp, {}, 'http://bad/') 92 | } catch (e) { 93 | t.ok(/no.such.file/.test(e.message)) 94 | } 95 | }) 96 | 97 | test('check certificate splitting', function (t) { 98 | var cas = install.test.readCAFile(__dirname + '/fixtures/ca-bundle.crt') 99 | t.plan(2) 100 | t.strictEqual(cas.length, 2) 101 | t.notStrictEqual(cas[0], cas[1]) 102 | }) 103 | -------------------------------------------------------------------------------- /test/test-find-node-directory.js: -------------------------------------------------------------------------------- 1 | var test = require('tape') 2 | var path = require('path') 3 | var findNodeDirectory = require('../lib/find-node-directory') 4 | 5 | var platforms = ['darwin', 'freebsd', 'linux', 'sunos', 'win32', 'aix'] 6 | 7 | // we should find the directory based on the directory 8 | // the script is running in and it should match the layout 9 | // in a build tree where npm is installed in 10 | // .... /deps/npm 11 | test('test find-node-directory - node install', function (t) { 12 | t.plan(platforms.length) 13 | for (var next = 0; next < platforms.length; next++) { 14 | var processObj = {execPath: '/x/y/bin/node', platform: platforms[next]} 15 | t.equal( 16 | findNodeDirectory('/x/deps/npm/node_modules/node-ninja/lib', processObj), 17 | path.join('/x')) 18 | } 19 | }) 20 | 21 | // we should find the directory based on the directory 22 | // the script is running in and it should match the layout 23 | // in an installed tree where npm is installed in 24 | // .... /lib/node_modules/npm or .../node_modules/npm 25 | // depending on the patform 26 | test('test find-node-directory - node build', function (t) { 27 | t.plan(platforms.length) 28 | for (var next = 0; next < platforms.length; next++) { 29 | var processObj = {execPath: '/x/y/bin/node', platform: platforms[next]} 30 | if (platforms[next] === 'win32') { 31 | t.equal( 32 | findNodeDirectory('/y/node_modules/npm/node_modules/node-ninja/lib', 33 | processObj), path.join('/y')) 34 | } else { 35 | t.equal( 36 | findNodeDirectory('/y/lib/node_modules/npm/node_modules/node-ninja/lib', 37 | processObj), path.join('/y')) 38 | } 39 | } 40 | }) 41 | 42 | // we should find the directory based on the execPath 43 | // for node and match because it was in the bin directory 44 | test('test find-node-directory - node in bin directory', function (t) { 45 | t.plan(platforms.length) 46 | for (var next = 0; next < platforms.length; next++) { 47 | var processObj = {execPath: '/x/y/bin/node', platform: platforms[next]} 48 | t.equal( 49 | findNodeDirectory('/nothere/npm/node_modules/node-ninja/lib', processObj), 50 | path.join('/x/y')) 51 | } 52 | }) 53 | 54 | // we should find the directory based on the execPath 55 | // for node and match because it was in the Release directory 56 | test('test find-node-directory - node in build release dir', function (t) { 57 | t.plan(platforms.length) 58 | for (var next = 0; next < platforms.length; next++) { 59 | var processObj 60 | if (platforms[next] === 'win32') { 61 | processObj = {execPath: '/x/y/Release/node', platform: platforms[next]} 62 | } else { 63 | processObj = {execPath: '/x/y/out/Release/node', 64 | platform: platforms[next]} 65 | } 66 | 67 | t.equal( 68 | findNodeDirectory('/nothere/npm/node_modules/node-ninja/lib', processObj), 69 | path.join('/x/y')) 70 | } 71 | }) 72 | 73 | // we should find the directory based on the execPath 74 | // for node and match because it was in the Debug directory 75 | test('test find-node-directory - node in Debug release dir', function (t) { 76 | t.plan(platforms.length) 77 | for (var next = 0; next < platforms.length; next++) { 78 | var processObj 79 | if (platforms[next] === 'win32') { 80 | processObj = {execPath: '/a/b/Debug/node', platform: platforms[next]} 81 | } else { 82 | processObj = {execPath: '/a/b/out/Debug/node', platform: platforms[next]} 83 | } 84 | 85 | t.equal( 86 | findNodeDirectory('/nothere/npm/node_modules/node-ninja/lib', processObj), 87 | path.join('/a/b')) 88 | } 89 | }) 90 | 91 | // we should not find it as it will not match based on the execPath nor 92 | // the directory from which the script is running 93 | test('test find-node-directory - not found', function (t) { 94 | t.plan(platforms.length) 95 | for (var next = 0; next < platforms.length; next++) { 96 | var processObj = {execPath: '/x/y/z/y', platform:next} 97 | t.equal(findNodeDirectory('/a/b/c/d', processObj), '') 98 | } 99 | }) 100 | 101 | // we should find the directory based on the directory 102 | // the script is running in and it should match the layout 103 | // in a build tree where npm is installed in 104 | // .... /deps/npm 105 | // same test as above but make sure additional directory entries 106 | // don't cause an issue 107 | test('test find-node-directory - node install', function (t) { 108 | t.plan(platforms.length) 109 | for (var next = 0; next < platforms.length; next++) { 110 | var processObj = {execPath: '/x/y/bin/node', platform: platforms[next]} 111 | t.equal( 112 | findNodeDirectory('/x/y/z/a/b/c/deps/npm/node_modules/node-ninja/lib', 113 | processObj), path.join('/x/y/z/a/b/c')) 114 | } 115 | }) 116 | -------------------------------------------------------------------------------- /test/test-find-python.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | var test = require('tape') 4 | var configure = require('../lib/configure') 5 | var execFile = require('child_process').execFile 6 | 7 | test('find python executable', function (t) { 8 | t.plan(4) 9 | 10 | configure.test.findPython('python', function (err, found) { 11 | t.strictEqual(err, null) 12 | var proc = execFile(found, ['-V'], function (err, stdout, stderr) { 13 | t.strictEqual(err, null) 14 | t.strictEqual(stdout, '') 15 | t.ok(/Python 2/.test(stderr)) 16 | }) 17 | proc.stdout.setEncoding('utf-8') 18 | proc.stderr.setEncoding('utf-8') 19 | }) 20 | }) 21 | -------------------------------------------------------------------------------- /test/test-options.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var test = require('tape') 4 | var gyp = require('../lib/node-ninja') 5 | 6 | test('options in environment', function (t) { 7 | t.plan(1) 8 | 9 | // `npm test` dumps a ton of npm_config_* variables in the environment. 10 | Object.keys(process.env) 11 | .filter(function(key) { return /^npm_config_/.test(key) }) 12 | .forEach(function(key) { delete process.env[key] }) 13 | 14 | // Zero-length keys should get filtered out. 15 | process.env.npm_config_ = '42' 16 | // Other keys should get added. 17 | process.env.npm_config_x = '42' 18 | // Except loglevel. 19 | process.env.npm_config_loglevel = 'debug' 20 | 21 | var g = gyp(); 22 | g.parseArgv(['rebuild']) // Also sets opts.argv. 23 | 24 | t.deepEqual(Object.keys(g.opts).sort(), ['argv', 'x']) 25 | }) 26 | --------------------------------------------------------------------------------