├── .gitignore ├── .hgignore ├── .npmignore ├── Gruntfile.coffee ├── LICENSE ├── README.md ├── default.nix ├── lib └── command.js ├── package.json └── src ├── npm2nix.coffee └── package-fetcher.coffee /.gitignore: -------------------------------------------------------------------------------- 1 | /node_modules/ 2 | /lib/npm2nix.js 3 | /lib/package-fetcher.js 4 | -------------------------------------------------------------------------------- /.hgignore: -------------------------------------------------------------------------------- 1 | (? 2 | 3 | # Project configuration. 4 | grunt.initConfig 5 | coffee: 6 | app: 7 | expand: true 8 | cwd: 'src' 9 | src: ['**/*.coffee'] 10 | dest: 'lib' 11 | ext: '.js' 12 | watch: 13 | app: 14 | files: '**/*.coffee' 15 | tasks: ['coffee'] 16 | release: 17 | options: 18 | npm: false 19 | github: 20 | repo: 'NixOS/npm2nix' 21 | usernameVar: 'GITHUB_USERNAME' 22 | passwordVar: 'GITHUB_PASSWORD' 23 | 24 | # These plugins provide necessary tasks. 25 | grunt.loadNpmTasks 'grunt-contrib-coffee' 26 | grunt.loadNpmTasks 'grunt-contrib-watch' 27 | grunt.loadNpmTasks 'grunt-release' 28 | 29 | # Default task. 30 | grunt.registerTask 'default', ['coffee'] 31 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2012,2013 Shea Levy 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | DISCLAIMER: This project is no longer maintained. Instead use [node2nix](https://github.com/svanderburg/node2nix) 2 | ----------- 3 | 4 | npm2nix 5 | ======= 6 | 7 | Generate nix expressions from npmjs.org! 8 | 9 | 10 | Usage 11 | ----- 12 | 13 | `npm2nix [--no-dev] node-packages.json node-packages.generated.nix` 14 | 15 | `no-dev` ignores development dependencies 16 | 17 | JSON structure 18 | -------------- 19 | 20 | npm2nix expects the passed JSON file to be a list of strings and at most one 21 | object. Strings are taken as the name of the package. The object must be 22 | a valid dependencies object for an for an npm `packages.json` file. 23 | Alternatively, the passed JSON file can be an npm `package.json`, in which 24 | case the expressions for its dependencies will be generated. 25 | 26 | Development 27 | ----------- 28 | 29 | - `nix-shell` 30 | - `grunt watch` 31 | 32 | Release 33 | ------- 34 | 35 | - `export GITHUB_USERNAME=` 36 | - `export GITHUB_PASSWORD=` 37 | - `grunt release:patch/minor/major` 38 | -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | { }: 2 | 3 | let 4 | 5 | pkgs = import {}; 6 | 7 | in pkgs.stdenv.mkDerivation rec { 8 | name = "npm2nix-env"; 9 | src = ./.; 10 | buildInputs = with pkgs; [ 11 | nodejs 12 | ]; 13 | shellHook = '' 14 | npm install . 15 | ''; 16 | } 17 | -------------------------------------------------------------------------------- /lib/command.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | require(__dirname + '/npm2nix.js'); 4 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "npm2nix", 3 | "version": "5.12.0", 4 | "description": "Generate nix expressions to build npm packages", 5 | "homepage": "https://github.com/NixOS/npm2nix", 6 | "author": { 7 | "name": "Shea Levy", 8 | "email": "shea@shealevy.com" 9 | }, 10 | "bin": { 11 | "npm2nix": "lib/command.js" 12 | }, 13 | "main": "lib/package-fetcher.js", 14 | "dependencies": { 15 | "semver": "~4.2.0", 16 | "argparse": "0.1.15", 17 | "npm-registry-client": "7.1.0", 18 | "npmconf": "0.1.1", 19 | "tar": "0.1.17", 20 | "temp": "0.6.0", 21 | "fs.extra": ">=1.2.0 <2.0.0", 22 | "findit": ">=1.1.0 <2.0.0", 23 | "coffee-script": ">=1.6.2 <2.0.0" 24 | }, 25 | "devDependencies": { 26 | "grunt": "^0.4.5", 27 | "grunt-contrib-coffee": "^0.12.0", 28 | "grunt-contrib-watch": "^0.6.1", 29 | "grunt-release": "^0.10.0" 30 | }, 31 | "repository": { 32 | "type": "git", 33 | "url": "https://github.com/NixOS/npm2nix" 34 | }, 35 | "licenses": [ 36 | { 37 | "type": "MIT", 38 | "url": "https://github.com/NixOS/npm2nix/blob/master/LICENSE" 39 | } 40 | ], 41 | "scripts": { 42 | "prepublish": "coffee --bare --compile --output lib src/*.coffee" 43 | }, 44 | "engines": { 45 | "node": ">=0.9.4" 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/npm2nix.coffee: -------------------------------------------------------------------------------- 1 | fs = require 'fs' 2 | path = require 'path' 3 | 4 | argparse = require 'argparse' 5 | npmconf = require 'npmconf' 6 | RegistryClient = require 'npm-registry-client' 7 | 8 | PackageFetcher = require './package-fetcher' 9 | 10 | version = require('../package.json').version 11 | 12 | parser = new argparse.ArgumentParser { 13 | version: version 14 | description: 'Generate nix expressions to build npm packages' 15 | epilog: """ 16 | The package list can be either an npm package.json, in which case npm2nix 17 | will generate expressions for its dependencies, or a list of strings and 18 | at most one object, where the strings are package names and the object is 19 | a valid dependencies object (see npm.json(5) for details) 20 | """ 21 | } 22 | 23 | parser.addArgument [ 'packageList' ], 24 | help: 'The file containing the packages to generate expressions for' 25 | type: path.resolve 26 | metavar: 'INPUT' 27 | 28 | parser.addArgument [ 'output' ], 29 | help: 'The output file to generate' 30 | type: path.resolve 31 | metavar: 'OUTPUT' 32 | 33 | parser.addArgument [ '--overwrite' ], 34 | help: 'Whether to overwrite the helper default.nix expression (when generating for a package.json)', 35 | action: 'storeTrue', 36 | 37 | parser.addArgument [ '--nodev' ], 38 | help: 'Do not generate development dependencies', 39 | action: 'storeTrue' 40 | 41 | args = parser.parseArgs() 42 | 43 | escapeNixString = (string) -> 44 | string.replace /(\\|\$\{|")/g, "\\$&" 45 | 46 | fullNames = {} 47 | packageSet = {} 48 | 49 | writePkg = finalizePkgs = undefined 50 | do -> 51 | 52 | known = {} 53 | 54 | stream = fs.createWriteStream args.output 55 | stream.write "{ self, fetchurl, fetchgit ? null, lib }:\n\n{" 56 | writePkg = (name, spec, pkg) -> 57 | stream.write """ 58 | \n by-spec.\"#{escapeNixString name}\".\"#{escapeNixString spec}\" = 59 | self.by-version.\"#{escapeNixString name}\".\"#{escapeNixString pkg.version}\"; 60 | """ 61 | unless name of known and pkg.version of known[name] 62 | known[name] ?= {} 63 | known[name][pkg.version] = true 64 | cycleDeps = {} 65 | cycleDeps[pkg.name] = true 66 | 67 | stream.write "\n by-version.\"#{escapeNixString pkg.name}\".\"#{escapeNixString pkg.version}\" = self.buildNodePackage {" 68 | stream.write "\n name = \"#{escapeNixString pkg.name}-#{escapeNixString pkg.version}\";" 69 | stream.write "\n version = \"#{escapeNixString pkg.version}\";" 70 | stream.write "\n bin = #{if "bin" of pkg then "true" else "false"};" 71 | 72 | stream.write "\n src = " 73 | 74 | if 'tarball' of pkg.dist 75 | stream.write """ 76 | fetchurl { 77 | url = "#{pkg.dist.tarball}"; 78 | name = "#{pkg.name}-#{pkg.version}.tgz"; 79 | #{if 'shasum' of pkg.dist then 'sha1' else 'sha256'} = "#{pkg.dist.shasum ? pkg.dist.sha256sum}"; 80 | } 81 | """ 82 | else 83 | stream.write """ 84 | fetchgit { 85 | url = "#{pkg.dist.git}"; 86 | rev = "#{pkg.dist.rev}"; 87 | sha256 = "#{pkg.dist.sha256sum}"; 88 | } 89 | """ 90 | 91 | seenDeps = {} 92 | 93 | stream.write ";\n deps = {" 94 | for nm, spc of pkg.dependencies or {} 95 | unless seenDeps[nm] or nm of (pkg.optionalDependencies or {}) 96 | spc = spc.version if spc instanceof Object 97 | if spc is 'latest' or spc is '' 98 | spc = '*' 99 | stream.write "\n \"#{escapeNixString nm}-#{packageSet[nm][spc].version}\" = self.by-version.\"#{escapeNixString nm}\".\"#{packageSet[nm][spc].version}\";" 100 | seenDeps[nm] = true 101 | 102 | stream.write "\n };\n optionalDependencies = {" 103 | for nm, spc of pkg.optionalDependencies or {} 104 | unless seenDeps[nm] 105 | spc = spc.version if spc instanceof Object 106 | if spc is 'latest' or spc is '' 107 | spc = '*' 108 | stream.write "\n \"#{escapeNixString nm}-#{packageSet[nm][spc].version}\" = self.by-version.\"#{escapeNixString nm}\".\"#{packageSet[nm][spc].version}\";" 109 | seenDeps[nm] = true 110 | 111 | stream.write "\n };\n peerDependencies = [" 112 | for nm, spc of pkg.peerDependencies or {} 113 | unless seenDeps[nm] or cycleDeps[nm] 114 | spc = spc.version if spc instanceof Object 115 | if spc is 'latest' or spc is '' 116 | spc = '*' 117 | stream.write "\n self.by-version.\"#{escapeNixString nm}\".\"#{packageSet[nm][spc].version}\"" 118 | seenDeps[nm] = true 119 | stream.write "];\n" 120 | 121 | stream.write " os = [" 122 | for os, i in pkg.os or [] 123 | stream.write " \"#{os}\"" 124 | stream.write " ];\n" 125 | 126 | stream.write " cpu = [" 127 | for cpu, i in pkg.cpu or [] 128 | stream.write " \"#{cpu}\"" 129 | stream.write " ];\n" 130 | 131 | stream.write " };" 132 | 133 | if fullNames[name] is spec 134 | stream.write """ 135 | \n "#{escapeNixString name}" = self.by-version."#{escapeNixString pkg.name}"."#{pkg.version}"; 136 | """ 137 | 138 | finalizePkgs = -> 139 | stream.end "\n}\n" 140 | 141 | npmconf.load (err, conf) -> 142 | if err? 143 | console.error "Error loading npm config: #{err}" 144 | process.exit 7 145 | registry = new RegistryClient conf 146 | fetcher = new PackageFetcher() 147 | fs.readFile args.packageList, (err, json) -> 148 | if err? 149 | console.error "Error reading file #{args.packageList}: #{err}" 150 | process.exit 1 151 | try 152 | packages = JSON.parse json 153 | catch error 154 | console.error "Error parsing JSON file #{args.packageList}: #{error}" 155 | process.exit 3 156 | 157 | packageByVersion = {} 158 | 159 | pendingPackages = [] 160 | 161 | checkPendingPackages = () -> 162 | console.log "Waiting for #{pendingPackages} to complete ..." 163 | checkInterval = setInterval checkPendingPackages, 10000 164 | 165 | fetcher.on 'fetching', (name, spec) -> 166 | pendingPackages.push(name) 167 | fetcher.on 'fetched', (name, spec, pkg) -> 168 | pendingPackageIndex = pendingPackages.indexOf(name) 169 | console.assert (pendingPackageIndex >= 0), "Package #{name} was fetched multiple times!" 170 | pendingPackages.splice(pendingPackageIndex,1) 171 | packageByVersion[name] ?= {} 172 | unless pkg.version of packageByVersion[name] 173 | packageByVersion[name][pkg.version] = pkg 174 | packageSet[name] ?= {} 175 | packageSet[name][spec] = packageByVersion[name][pkg.version] 176 | if pendingPackages.length == 0 177 | clearInterval(checkInterval) 178 | names = (key for key, val of packageSet).sort() 179 | for name in names 180 | specs = (key for key, val of packageSet[name]).sort() 181 | for spec in specs 182 | writePkg name, spec, packageSet[name][spec] 183 | finalizePkgs() 184 | 185 | fetcher.on 'error', (err, name, spec) -> 186 | console.error "Error during fetch: #{err}" 187 | process.exit 8 188 | 189 | addPackage = (name, spec) -> 190 | spec = '*' if spec is 'latest' or spec is '' #ugh 191 | fullNames[name] = spec 192 | fetcher.fetch name, spec, registry 193 | if packages instanceof Array 194 | for pkg in packages 195 | if typeof pkg is "string" 196 | addPackage pkg, '*' 197 | else 198 | addPackage name, spec for name, spec of pkg 199 | else if packages instanceof Object 200 | unless 'dependencies' of packages or 'devDependencies' of packages 201 | console.error "#{file} specifies no dependencies" 202 | process.exit 6 203 | 204 | addPackage name, spec for name, spec of packages.dependencies ? {} 205 | addPackage name, spec for name, spec of packages.devDependencies ? {} if !args.nodev 206 | 207 | pkgName = escapeNixString packages.name 208 | fs.writeFile "default.nix", """ 209 | { #{pkgName} ? { outPath = ./.; name = "#{pkgName}"; } 210 | , pkgs ? import {} 211 | }: 212 | let 213 | nodePackages = import "${pkgs.path}/pkgs/top-level/node-packages.nix" { 214 | inherit pkgs; 215 | inherit (pkgs) stdenv nodejs fetchurl fetchgit; 216 | neededNatives = [ pkgs.python ] ++ pkgs.lib.optional pkgs.stdenv.isLinux pkgs.utillinux; 217 | self = nodePackages; 218 | generated = ./#{path.relative process.cwd(), args.output}; 219 | }; 220 | in rec { 221 | tarball = pkgs.runCommand "#{pkgName}-#{packages.version}.tgz" { buildInputs = [ pkgs.nodejs ]; } '' 222 | mv `HOME=$PWD npm pack ${#{pkgName}}` $out 223 | ''; 224 | build = nodePackages.buildNodePackage { 225 | name = "#{pkgName}-#{packages.version}"; 226 | src = [ tarball ]; 227 | buildInputs = nodePackages.nativeDeps."#{pkgName}" or []; 228 | deps = [ #{ 229 | ("nodePackages.by-spec.\"#{escapeNixString nm}\".\"#{escapeNixString spc}\"" for nm, spc of (packages.dependencies ? {})).join ' ' 230 | } ]; 231 | peerDependencies = []; 232 | }; 233 | } 234 | """, flag: "w#{if args.overwrite then '' else 'x'}", (err) -> 235 | if err? and err.code isnt 'EEXIST' 236 | console.error "Error writing helper default.nix: #{err}" 237 | else 238 | console.error "#{file} must represent an array of packages or be a valid npm package.json" 239 | process.exit 4 240 | -------------------------------------------------------------------------------- /src/package-fetcher.coffee: -------------------------------------------------------------------------------- 1 | http = require 'http' 2 | https = require 'https' 3 | crypto = require 'crypto' 4 | zlib = require 'zlib' 5 | events = require 'events' 6 | url = require 'url' 7 | util = require 'util' 8 | child_process = require 'child_process' 9 | path = require 'path' 10 | os = require 'os' 11 | 12 | temp = require 'temp' 13 | temp.track() 14 | 15 | fs = require 'fs.extra' 16 | findit = require 'findit' 17 | tar = require 'tar' 18 | semver = require 'semver' 19 | RegistryClient = require 'npm-registry-client' 20 | 21 | PackageFetcher = (cfg) -> 22 | unless this instanceof PackageFetcher 23 | new PackageFetcher cfg 24 | else 25 | events.EventEmitter.call this 26 | @_peerDependencies = {} 27 | this 28 | 29 | PackageFetcher.prototype = Object.create events.EventEmitter.prototype, 30 | constructor: value: PackageFetcher 31 | 32 | PackageFetcher.prototype.fetch = (name, spec, registry) -> 33 | unless name of @_peerDependencies and spec of @_peerDependencies[name] 34 | @_peerDependencies[name] ?= {} 35 | @_peerDependencies[name][spec] = [] 36 | @emit 'fetching', name, spec 37 | parsed = url.parse spec 38 | switch parsed.protocol 39 | when 'git:', 'git+ssh:', 'git+http:', 'git+https:' 40 | @_fetchFromGit name, spec, registry, parsed 41 | when 'http:', 'https:' 42 | @_fetchFromHTTP name, spec, registry, parsed 43 | when null 44 | if semver.validRange spec, true 45 | @_fetchFromRegistry name, spec, registry 46 | else 47 | if spec.indexOf "#" > -1 48 | @_fetchFromGithub name, spec, registry 49 | else 50 | @emit 'error', "Unknown spec #{spec}", name, spec 51 | else 52 | @emit 'error', "Unknown protocol #{parsed.protocol}", name, spec 53 | 54 | 55 | PackageFetcher.prototype._fetchFromGithub = (name, spec, registry) -> 56 | new_spec = "git://github.com/" + spec.replace "#", ".git#" 57 | parsed = url.parse new_spec 58 | @_fetchFromGit name, spec, registry, parsed 59 | 60 | PackageFetcher.prototype._fetchFromRegistry = (name, spec, registry) -> 61 | handlePackage = (pkg) => 62 | unless pkg.dist.tarball? 63 | @emit 'error', "Could not find supported dist type for #{pkg.name}@#{pkg.version} in #{util.inspect pkg.dist}", name, spec 64 | else 65 | if pkg.dist.shasum? # Sometimes npm gives us shasums, how nice 66 | @_havePackage name, spec, pkg, registry 67 | @emit 'fetched', name, spec, pkg 68 | else 69 | @_fetchFromHTTP name, spec, registry, url.parse dist.tarball 70 | 71 | registry.get "https://registry.npmjs.org/#{name}/", {}, (err, info) => 72 | if err? 73 | @emit 'error', "Error getting registry info for #{name}: #{err}", name, spec 74 | else 75 | version = semver.maxSatisfying (key for key, value of info.versions), spec, true 76 | unless version? 77 | @emit 'error', "Could not find version matching #{spec} for #{name} in #{util.inspect info.versions}", name, spec 78 | else 79 | pkg = info.versions[version] 80 | if pkg instanceof Object 81 | handlePackage pkg 82 | else 83 | registry.get "https://registry.npmjs.org/#{name}/#{version}/", (err, info) => 84 | if err? 85 | @emit 'error', "Error getting package info for #{name}@#{version}: #{err}", name, spec 86 | else 87 | handlePackage info 88 | 89 | do -> 90 | cache = {} 91 | 92 | addFetchedCallback = (cached, cb) -> 93 | if 'err' of cached 94 | cb cached.err 95 | else if 'pkg' of cached 96 | cb undefined, cached.pkg 97 | else 98 | cached.callbacks.push cb 99 | 100 | PackageFetcher.prototype._fetchFromHTTP = (name, spec, registry, parsed) -> 101 | href = parsed.href 102 | callback = (err, pkg) => 103 | if err? 104 | @emit 'error', "Error fetching #{href}: #{err}", name, spec 105 | else 106 | @emit 'fetched', name, spec, pkg 107 | if href of cache 108 | addFetchedCallback cache[href], callback 109 | else 110 | cached = callbacks: [ callback ] 111 | cache[href] = cached 112 | error = (err) -> 113 | unless 'err' of cached 114 | cached.err = err 115 | cb err for cb in cached.callbacks 116 | cached.callbacks = [] 117 | 118 | client = switch parsed.protocol 119 | when 'http:' 120 | http 121 | when 'https:' 122 | https 123 | else 124 | undefined 125 | unless client? 126 | error "Unsupported protocol #{parsed.protocol}" 127 | else 128 | unzip = zlib.createGunzip() 129 | computeHash = crypto.createHash 'sha256' 130 | tarParser = new tar.Parse() 131 | 132 | unzip.pipe tarParser 133 | 134 | unzip.on 'error', (err) -> error "Error while unzipping #{href}: #{err}" 135 | 136 | computeHash.on 'error', (err) -> error "Error while computing hash of #{href}: #{err}" 137 | 138 | tarParser.on 'error', (err) -> error "Error while parsing tarball unzipped from #{href}: #{err}" 139 | 140 | redirectCount = 0 141 | getCallback = (res) => 142 | unless res.statusCode is 200 143 | if res.statusCode in [ 300, 301, 302, 303, 307, 308 ] 144 | redirectCount += 1 145 | if redirectCount > 5 146 | error "Unable to GET #{href}: Too many redirects" 147 | else unless 'location' of res.headers 148 | error "Bad HTTP response while GETting #{href}: Redirect with no Location header" 149 | else 150 | resolved = url.resolve(href, res.headers.location) 151 | client = switch (url.parse resolved).protocol 152 | when 'http:' 153 | http 154 | when 'https:' 155 | https 156 | else 157 | undefined 158 | options = url.parse(resolved) 159 | options["rejectUnauthorized"] = false 160 | client.get resolved, getCallback 161 | else 162 | error "Unsuccessful status code while GETting #{href}: #{http.STATUS_CODES[res.statusCode]}" 163 | else 164 | res.on 'error', (err) -> error "Error while GETting #{href}: #{err}" 165 | 166 | tee = -> 167 | while (chunk = res.read()) isnt null 168 | unzip.write chunk 169 | computeHash.write chunk 170 | res.on 'readable', tee 171 | endComputeHash = -> computeHash.end() 172 | res.on 'end', endComputeHash 173 | 174 | earlyEnd = -> error "No package.json found in #{href}" 175 | tarParser.on 'end', earlyEnd 176 | 177 | pkg = null 178 | hashBuf = null 179 | finished = -> 180 | pkg.dist = { tarball: href, sha256sum: hashBuf.toString 'hex' } 181 | cached.pkg = pkg 182 | cb undefined, pkg for cb in cached.callbacks 183 | cached.callbacks = [] 184 | 185 | computeHash.on 'readable', -> 186 | hashBuf = (computeHash.read 32) || hashBuf 187 | finished() unless hashBuf is null or pkg is null 188 | 189 | tarParser.on 'entry', (entry) => 190 | if /^[^/]*\/package\.json$/.test entry.path 191 | chunks = [] 192 | length = 0 193 | 194 | entry.on 'data', (chunk) -> 195 | chunks.push chunk 196 | length += chunk.length 197 | 198 | entry.on 'end', => 199 | pkg = JSON.parse Buffer.concat(chunks, length).toString() 200 | @_havePackage name, spec, pkg, registry 201 | chunks = null 202 | unzip.unpipe tarParser 203 | unzip.end() 204 | # tarParser doesn't like this... 205 | # tarParser.end() 206 | res.removeListener 'readable', tee 207 | res.removeListener 'end', endComputeHash 208 | tarParser.removeListener 'end', earlyEnd 209 | res.pipe computeHash 210 | finished() unless hashBuf is null 211 | 212 | options = url.parse href 213 | options["rejectUnauthorized"] = false 214 | client.get options, getCallback 215 | do -> 216 | cache = {} 217 | 218 | addFetchedCallback = (cached, cb) -> 219 | if 'err' of cached 220 | cb cached.err 221 | else if 'pkg' of cached 222 | cb undefined, cached.pkg 223 | else 224 | cached.callbacks.push cb 225 | 226 | PackageFetcher.prototype._fetchFromGit = (name, spec, registry, parsed) -> 227 | parsed.protocol = switch parsed.protocol 228 | when 'git:' 229 | 'git:' 230 | when 'git+ssh:' 231 | 'ssh:' 232 | when 'git+http:' 233 | 'http:' 234 | when 'git+https:' 235 | 'https:' 236 | href = parsed.format() 237 | callback = (err, pkg) => 238 | if err? 239 | @emit 'error', "Error fetching #{href} from git: #{err}", name, spec 240 | else 241 | @emit 'fetched', name, spec, pkg 242 | if href of cache 243 | addFetchedCallback cache[href], callback 244 | else 245 | cached = callbacks: [ callback ] 246 | cache[href] = cached 247 | error = (err) -> 248 | unless 'err' of cached 249 | cached.err = err 250 | cb err for cb in cached.callbacks 251 | cached.callbacks = [] 252 | commitIsh = if parsed.hash? 253 | parsed.hash.slice 1 254 | else 255 | 'master' 256 | parsed.hash = null 257 | temp.mkdir { dir: os.tmpDir(), prefix: "npm2nix-git-checkout-#{name}" }, (err, dirPath) => 258 | if err? 259 | error "Error creating temporary directory for git checkout: #{err}" 260 | else 261 | oldError = error 262 | error = (err) -> 263 | fs.rmrf dirPath, -> 264 | oldError err 265 | gitClone = child_process.spawn "git", [ "clone", "--recursive", parsed.format() ], cwd: dirPath, stdio: "inherit" 266 | gitClone.on 'error', (err) -> error "Error executing git clone: #{err}" 267 | gitClone.on 'exit', (code, signal) => 268 | unless code? 269 | error "git clone died with signal #{signal}" 270 | else unless code is 0 271 | error "git clone exited with non-zero status code #{code}" 272 | else 273 | fs.readdir dirPath, (err, files) => 274 | if err? 275 | error "Error reading directory #{dirPath}: #{err}" 276 | else 277 | if files.length isnt 1 278 | error "git clone did not create exactly one directory" 279 | else 280 | pkg = null 281 | hash = null 282 | rev = null 283 | finished = -> 284 | fs.rmrf dirPath, -> 285 | pkg.dist = { git: parsed.format(), rev: rev, sha256sum: hash } 286 | cached.pkg = pkg 287 | cb undefined, pkg for cb in cached.callbacks 288 | cached.callbacks = [] 289 | 290 | gitDir = "#{dirPath}/#{files[0]}" 291 | gitCheckout = child_process.spawn "git", [ "checkout", commitIsh ], cwd: gitDir, stdio: "inherit" 292 | gitCheckout.on 'error', (err) -> error "Error executing git checkout: #{err}" 293 | gitCheckout.on 'exit', (code, signal) => 294 | unless code? 295 | error "git checkout died with signal #{signal}" 296 | else unless code is 0 297 | error "git checkout exited with non-zero status code #{code}" 298 | else 299 | 300 | gitRevParse = child_process.spawn "git", [ "rev-parse", commitIsh ], cwd: gitDir, stdio: [ 0, 'pipe', 2 ] 301 | gitRevParse.on 'error', (err) -> error "Error executing git rev-parse: #{err}" 302 | gitRevParse.on 'exit', (code, signal) => 303 | unless code? 304 | error "git rev-parse died with signal #{signal}" 305 | else unless code is 0 306 | error "git rev-parse exited with non-zero status code #{code}" 307 | gitRevParse.stdout.setEncoding "utf8" 308 | readRev = -> 309 | rev = gitRevParse.stdout.read 40 310 | if rev? 311 | gitRevParse.stdout.removeListener 'readable', readRev 312 | gitRevParse.stdout.removeListener 'end', earlyRevEnd 313 | gitRevParse.stdout.on 'data', -> 314 | finished() if pkg? and hash? 315 | gitRevParse.stdout.on 'readable', readRev 316 | readRev() 317 | earlyRevEnd = -> 318 | error "git rev-parse's stdout ended before 64 characters were read" 319 | gitRevParse.stdout.on 'end', earlyRevEnd 320 | 321 | fs.readFile "#{gitDir}/package.json", encoding: "utf8", (err, data) => 322 | if err? 323 | error "Error reading package.json in #{parsed.format()}##{commitIsh} clone: #{err}" 324 | else 325 | pkg = JSON.parse data 326 | @_havePackage name, spec, pkg, registry 327 | finished() if hash? and rev? 328 | finder = findit gitDir 329 | dotGitRegexp = /^\.git.*$/ 330 | deletesLeft = 1 331 | deleteFinished = -> 332 | deletesLeft -= 1 333 | finder.emit 'noMoreDeletes' if deletesLeft is 0 334 | finder.on 'directory', (dir, stat, stop) -> 335 | if dotGitRegexp.test path.basename(dir) 336 | stop() 337 | deletesLeft += 1 338 | fs.rmrf dir, (err) -> 339 | if err? 340 | error "Error removing directory #{dir} in #{parsed.format()}##{commitIsh} clone: #{err}" 341 | else 342 | deleteFinished() 343 | finder.on 'file', (file) -> 344 | if dotGitRegexp.test path.basename(file) 345 | deletesLeft += 1 346 | fs.unlink file, (err) -> 347 | if err? 348 | error "Error unlinking file #{file} in #{parsed.format()}##{commitIsh} clone: #{err}" 349 | else 350 | deleteFinished() 351 | finder.on 'link', (link) -> 352 | if dotGitRegexp.test path.basename(link) 353 | deletesLeft += 1 354 | fs.unlink link, (err) -> 355 | if err? 356 | error "Error unlinking symlink #{link} in #{parsed.format()}##{commitIsh} clone: #{err}" 357 | else 358 | deleteFinished() 359 | finder.on 'error', (err) -> 360 | error "Error walking git tree to remove .git* files: #{err}" 361 | finder.on 'end', deleteFinished 362 | finder.once 'noMoreDeletes', -> 363 | nixHash = child_process.spawn "nix-hash", [ "--type", "sha256", gitDir ], stdio: [ 0, 'pipe', 2 ] 364 | nixHash.on 'error', (err) -> error "Error executing nix-hash: #{err}" 365 | nixHash.stdout.setEncoding "utf8" 366 | nixHash.on 'exit', (code, signal) -> 367 | unless code? 368 | error "nix-hash died with signal #{signal}" 369 | else unless code is 0 370 | error "nix-hash exited with non-zero status code #{code}" 371 | readHash = -> 372 | hash = nixHash.stdout.read 64 373 | if hash? 374 | nixHash.stdout.removeListener 'readable', readHash 375 | nixHash.stdout.removeListener 'end', earlyHashEnd 376 | nixHash.stdout.on 'data', -> 377 | finished() if pkg? and rev? 378 | nixHash.stdout.on 'readable', readHash 379 | readHash() 380 | earlyHashEnd = -> 381 | error "nix-hash's stdout ended before 64 characters were read" 382 | nixHash.stdout.on 'end', earlyHashEnd 383 | 384 | do -> 385 | makeNewRegistry = (registry, newUrl) -> 386 | # uuugh 387 | cfg = do -> 388 | deleted = {} 389 | # !!! Shared cache dir, is that OK? Uses etags so probably 390 | local = registry: newUrl 391 | baseCfg = registry.conf 392 | get: (key) -> 393 | if key of local 394 | local[key] 395 | else if key of deleted 396 | undefined 397 | else 398 | baseCfg.get key 399 | set: (key, value) -> local[key] = value 400 | del: (k) -> deleted[key] = true; delete local[key] 401 | new RegistryClient cfg 402 | 403 | tryMergeDeps = (dep1, dep2) -> 404 | if dep1 instanceof Object 405 | dep1 = dep1.version 406 | if dep2 instanceof Object 407 | dep2 = dep2.version 408 | if dep1 is 'latest' or dep1 is '' 409 | dep1 = '*' 410 | if dep2 is 'latest' or dep2 is '' 411 | dep2 = '*' 412 | if semver.validRange(dep1, true) and semver.validRange(dep2, true) 413 | merged = new semver.Range dep1, true 414 | range2 = new semver.Range dep2, true 415 | mergedSet = [] 416 | for left in merged.set 417 | for right in range2.set 418 | subset = left.concat(right) 419 | subset.splice(left.length - 1, 1) 420 | mergedSet.push subset 421 | merged.set = mergedSet 422 | merged.format() 423 | else 424 | undefined 425 | 426 | PackageFetcher.prototype._handleDeps = (pkg, registry) -> 427 | # !!! TODO: Handle optionalDependencies, peerDependencies 428 | registry = makeNewRegistry registry, pkg.registry if 'registry' of pkg 429 | handleDep = (nm, dep) => 430 | # !!! Seeming conflict between CommonJS Registry spec and npm on the one 431 | # hand and CommonJS Package spec on the other. Package spec allows deps 432 | # to be an object of options (e.g. "ssl": { "gnutls": "1.2.3", "openssl": "2.3.4" }) 433 | # but npm only allows simple strings and Registry only allows version, registry 434 | # objects in addition to simple strings. Ignoring package spec until/unless a 435 | # registry entry in the wild shows up with that format 436 | thisRegistry = registry 437 | if dep instanceof Object 438 | thisRegistry = makeNewRegistry registry, dep.registry 439 | dep = dep.version 440 | if dep is 'latest' or dep is '' 441 | dep = '*' 442 | parsed = url.parse dep 443 | @fetch nm, dep, thisRegistry 444 | for nm, dep of pkg.dependencies or {} 445 | handleDep nm, dep 446 | for nm, dep of pkg.optionalDependencies or {} 447 | handleDep nm, dep 448 | for nm, dep of pkg.peerDependencies or {} 449 | handleDep nm, dep 450 | 451 | handlePeerDependencies = (peerDependencies) => 452 | peerDeps = {} 453 | for nm, dep of peerDependencies 454 | if dep instanceof Object 455 | dep = dep.version 456 | if dep is 'latest' or dep is '' 457 | dep = '*' 458 | if nm of pkg.dependencies 459 | merged = tryMergeDeps dep, pkg.dependencies[nm] 460 | if merged? 461 | dep = merged 462 | else 463 | @emit 'error', 464 | "Cannot merge top-level dependency #{nm}: #{pkg.dependencies[nm]} of #{name}@#{pkg.version} with peerDependency #{nm}: #{dep} since both are not valid semver ranges", 465 | name, 466 | pkg.version 467 | return 468 | handleDep nm, dep 469 | pkg.dependencies[nm] = dep 470 | peerDeps[nm] = dep 471 | for nm, dep of peerDeps 472 | @_getPeerDependencies nm, dep, handlePeerDependencies 473 | 474 | for nm, dep of pkg.dependencies or {} 475 | if dep instanceof Object 476 | dep = dep.version 477 | if dep is 'latest' or dep is '' 478 | dep = '*' 479 | @_getPeerDependencies nm, dep, handlePeerDependencies 480 | 481 | PackageFetcher.prototype._havePackage = (name, spec, pkg, registry) -> 482 | peerDependencies = pkg.peerDependencies ? {} 483 | cb peerDependencies for cb in @_peerDependencies[name][spec] 484 | @_peerDependencies[name][spec] = peerDependencies 485 | @_handleDeps pkg, registry 486 | 487 | PackageFetcher.prototype._getPeerDependencies = (name, spec, callback) -> 488 | if @_peerDependencies[name][spec] instanceof Array 489 | @_peerDependencies[name][spec].push callback 490 | else 491 | callback @_peerDependencies[name][spec] 492 | 493 | module.exports = PackageFetcher 494 | --------------------------------------------------------------------------------