├── .bumpversion.cfg ├── .circleci └── config.yml ├── .ghci ├── .gitignore ├── .gitmodules ├── LICENSE ├── README.md ├── Setup.hs ├── default.nix ├── docker └── Dockerfile ├── nix-libs └── nodeLib │ ├── .gitignore │ ├── buildNodePackage.nix │ ├── circular_dependencies.md │ ├── default.nix │ ├── fetch.py │ ├── fetchUrlNamespaced.nix │ └── tools │ ├── check-package-json │ ├── execute-install-scripts │ ├── install-binaries │ └── patch-dependencies ├── nix ├── fetchNixpkgs.nix ├── nixpkgs.nix ├── semver-range.nix └── text-render.nix ├── nixfromnpm.cabal ├── release.nix ├── runtests.py ├── shell.nix ├── src ├── .gitignore ├── Filesystem │ └── Path │ │ └── Wrappers.hs ├── Main.hs ├── NixFromNpm.hs └── NixFromNpm │ ├── Cli.hs │ ├── Common.hs │ ├── Conversion │ ├── ToDisk.hs │ └── ToNix.hs │ ├── Git │ └── Types.hs │ ├── HttpTools.hs │ ├── Merge.hs │ ├── Npm │ ├── PackageMap.hs │ ├── Resolve.hs │ ├── Types.hs │ └── Version.hs │ └── Options.hs ├── tests └── Unit.hs └── top_packages.txt /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 0.13.0 3 | commit = true 4 | tag = true 5 | message = "nixfromnpm: bump version from {current_version} to {new_version}" 6 | tag_name = {new_version} 7 | 8 | [bumpversion:file:nixfromnpm.cabal] 9 | 10 | [bumpversion:file:default.nix] 11 | 12 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | jobs: 3 | build: 4 | docker: 5 | - image: adnelson/nixfromnpm-base 6 | steps: 7 | - checkout 8 | - run: 9 | name: Build 10 | command: nix-build release.nix -A nixfromnpm --max-jobs 2 11 | - run: 12 | name: Run help command 13 | command: result/bin/nixfromnpm --help >/dev/null 14 | 15 | - run: 16 | name: Build package with no dependencies, confirm rebuilding has no effect 17 | command: | 18 | PATH=$(readlink result)/bin:$PATH 19 | cd $(mktemp -d) 20 | nixfromnpm -o output -p lodash 21 | nix-build output -A nodePackages.lodash --max-jobs 2 22 | nixfromnpm -o output -p lodash 23 | if [[ $(ls output/nodePackages/lodash | wc -l) -ne 1 ]]; then 24 | echo "A new version of lodash shouldn't have been created" >&2 25 | exit 1 26 | fi 27 | 28 | - run: 29 | name: Build package with dependencies 30 | command: | 31 | PATH=$(readlink result)/bin:$PATH 32 | cd $(mktemp -d) 33 | nixfromnpm -o output -p optimist 34 | nix-build output -A nodePackages.optimist --max-jobs 2 35 | 36 | - run: 37 | name: Build package with dev dependencies 38 | command: | 39 | PATH=$(readlink result)/bin:$PATH 40 | cd $(mktemp -d) 41 | nixfromnpm -o output -p coffee-script --dev-depth 1 42 | nix-build output -A nodePackages.coffee-script --max-jobs 2 43 | 44 | - run: 45 | name: Build a scoped package with scoped circular dependencies 46 | command: | 47 | PATH=$(readlink result)/bin:$PATH 48 | cd $(mktemp -d) 49 | nixfromnpm -o output -p @webassemblyjs/wast-printer --dev-depth 1 50 | nix-build output -A nodePackages.namespaces.webassemblyjs.wast-parser --max-jobs 2 51 | 52 | - run: 53 | name: Build package with a namespace 54 | command: | 55 | PATH=$(readlink result)/bin:$PATH 56 | cd $(mktemp -d) 57 | nixfromnpm -o output -p '@types/node' 58 | nix-build output -A nodePackages.namespaces.types.node --max-jobs 2 59 | 60 | - run: 61 | name: Build package with a gyp binding 62 | command: | 63 | PATH=$(readlink result)/bin:$PATH 64 | git submodule update --init tests/node-addon-tutorial 65 | PACKAGE_DIR=$PWD/tests/node-addon-tutorial/VectorExample 66 | cd $(mktemp -d) 67 | nixfromnpm -o output -f "$PACKAGE_DIR" 68 | nix-build $PACKAGE_DIR --max-jobs 2 69 | 70 | - run: 71 | name: Build package with an optional dependency (that will fail on linux) 72 | command: | 73 | PATH=$(readlink result)/bin:$PATH 74 | cd $(mktemp -d) 75 | nixfromnpm -o output -p 'sane@2.5.0' 76 | nix-build output -A nodePackages.sane_2-5-0 --max-jobs 2 77 | 78 | - run: 79 | name: Build package which uses an undocumented format for specifying dependencies 80 | command: | 81 | PATH=$(readlink result)/bin:$PATH 82 | cd $(mktemp -d) 83 | nixfromnpm -o output -p deep-diff 84 | nix-build output -A nodePackages.deep-diff --max-jobs 2 85 | -------------------------------------------------------------------------------- /.ghci: -------------------------------------------------------------------------------- 1 | :set -XFlexibleContexts 2 | :set -XFlexibleInstances 3 | :set -XLambdaCase 4 | :set -XNoImplicitPrelude 5 | :set -XNoMonomorphismRestriction 6 | :set -XOverloadedStrings 7 | :set -XQuasiQuotes 8 | :set -XRecordWildCards 9 | :set -XScopedTypeVariables 10 | :set -XTypeFamilies 11 | :set -XTypeSynonymInstances 12 | :set -XViewPatterns 13 | import ClassyPrelude 14 | import qualified Data.Text as T 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | dist 2 | .cabal-sandbox 3 | \#*# 4 | .\#* 5 | build-node-package 6 | result 7 | test_result 8 | test_result* 9 | *.zip 10 | *.tar.gz 11 | *.tgz 12 | gcli 13 | test_priv 14 | test_priv_out 15 | *_test 16 | *-test 17 | node_modules 18 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "tests/node-addon-tutorial"] 2 | path = tests/node-addon-tutorial 3 | url = https://github.com/justadudewhohacks/node-addon-tutorial 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 Allen Nelson 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # nixfromnpm 2 | 3 | ![Build status](https://circleci.com/gh/adnelson/nixfromnpm/tree/master.svg?style=shield&circle-token=5449a1733b88cfef7ce7ea5f171990d6001214bb) 4 | 5 | Translate NPM packages to Nix expressions. 6 | 7 | ### What it does 8 | 9 | Given the name of one or more packages and an output directory, 10 | queries NPM repositories for those packages' definitions and those of 11 | their dependencies, and generates a set of nix expressions in the 12 | given output directory which not only can be used to build the 13 | requested packages, but also to build any of their dependencies as 14 | desired. 15 | 16 | * The packages generated are easily human readable. 17 | * They can be modified as desired after they are built, and these 18 | modifications will remain in place. For example, if a package relies 19 | on non-NPM dependencies, or requires extra build steps such as 20 | patching, these changes can be added by hand and will be respected 21 | down the line. 22 | * Since builds use pre-existing packages, repeated expression 23 | generation is fast. For example, if you generate the expression for 24 | one package, and then generate the expression for another package 25 | which shares dependencies with the first (even if the second is 26 | built at a later time), the shared dependencies will not be 27 | regenerated. 28 | 29 | ### Advantages over `npm2nix` 30 | 31 | `npm2nix` is another tool which can generate nix expressions from an 32 | npm package. However, it has several drawbacks. It generates the 33 | entire dependency tree for an npm package, without availing itself of 34 | results of previous invocations. This is inefficient, since duplicated 35 | packages are built multiple times. The resulting expression is a 36 | single monolithic file which is hard to grok, and hard to 37 | modify. Furthermore, any modifications performed would have to be done 38 | each time the package was regenerated. It also discourages committing 39 | of the resulting package into source control, since it's large and has 40 | to be continually regenerated whenever changes are made. This means 41 | that packages built with it are unlikely to be cached in a nix store 42 | or repo. 43 | 44 | ### Installation 45 | 46 | Clone the `nixfromnpm` repo. 47 | 48 | ```bash 49 | $ git clone https://github.com/adnelson/nixfromnpm 50 | $ cd nixfromnpm 51 | ``` 52 | 53 | Make sure you have nix installed, and `nixpkgs` is in your `NIX_PATH` 54 | environment variable. Then run: 55 | 56 | ```bash 57 | $ nix-env --install --attr nixfromnpm --file ./release.nix 58 | ``` 59 | 60 | If you'd like to try out `nixfromnpm` without installing it, or just 61 | hack on it, you can use it in a `nix-shell`: 62 | 63 | ```bash 64 | $ cd /path/to/nixfromnpm 65 | $ nix-shell 66 | [nix-shell:nixfromnpm]$ cabal run -- 67 | ``` 68 | 69 | #### Customizing `nixpkgs` and GHC versions 70 | 71 | By default, we pin the version of `nixpkgs` in order to maintain a 72 | reliable build. However, if you'd like to build off of `nixpkgs` in 73 | your `NIX_PATH` or some other custom location: 74 | 75 | ```bash 76 | $ nix-env -f release.nix -iA nixfromnpm --arg nixpkgs '' 77 | ``` 78 | 79 | The GHC version can also be set explicitly, although of course, the 80 | package is not guaranteed to build with any arbitrary GHC version: 81 | 82 | ```bash 83 | $ nix-env -f release.nix -iA nixfromnpm --argstr compiler ghc802 84 | ``` 85 | 86 | Note that the above options also apply to `nix-build`, `nix-shell`, etc. 87 | 88 | ### Usage 89 | 90 | #### Using the `nix-node-packages` repo 91 | 92 | I recommend using the 93 | [`nix-node-packages`](https://github.com/adnelson/nix-node-packages) 94 | repo for most applications. This repo contains several thousand node 95 | package definitions already, which means whatever you're trying to 96 | build might already be defined. It also contains some packages which 97 | are hand-written or hand-modified from what had been auto-generated by 98 | `nixfromnpm` (fixing bugs or performing additional build steps). To use 99 | the repo, clone it and then specify it as an "output" when calling 100 | `nixfromnpm`. Of course, if the package already exists you can just build 101 | it immediately with `nix-build`. 102 | 103 | ```bash 104 | $ git clone https://github.com/adnelson/nix-node-packages 105 | $ nixfromnpm -o nix-node-packages -p 'the-package-I-need@the-version-I-need' 106 | $ nix-build nix-node-packages -A nodePackages.the-package-I-need_the-version-I-need 107 | ``` 108 | 109 | See that repo's `README.md` for information. 110 | 111 | If you don't use the repo, the commands below will still work, but some of the packages which the repo provides fixes for might fail to build. 112 | 113 | #### Generating an expression for a package 114 | 115 | The most basic usage is providing an `-o` (`--output`) flag and one or 116 | more `-p` flags: 117 | 118 | ```bash 119 | $ nixfromnpm -o /some/path -p package_name -p other_package_name 120 | ``` 121 | 122 | This will build the packages called `package_name` and 123 | `other_package_name`, and put all of the generated expressions in 124 | `/some/path`. That path will be created if it doesn't exist. If the 125 | output path does exist, a package will only be fetched if a nix 126 | expression for it doesn't already exist. 127 | 128 | You can also specify a version bound on the packages you are fetching, 129 | using `@`: 130 | 131 | ```bash 132 | $ nixfromnpm -p package_name@version_bound -o /some/path 133 | ``` 134 | 135 | Any NPM version bound is valid; so for example: 136 | 137 | ```bash 138 | $ nixfromnpm -p foo@0.8.6 -o /some/path 139 | $ nixfromnpm -p 'foo@>=0.8 <0.9' -o /some/path 140 | $ nixfromnpm -p 'foo@~1.0.0' -o /some/path 141 | ``` 142 | 143 | #### Generating an expression from a package.json file 144 | 145 | You can also generate an expression for a project on the local disk by 146 | passing in the path to a `package.json` file, or a directory 147 | containing one. The generated expression will be placed in a file 148 | called `project.nix` in the same directory as the `package.json` file, 149 | and a `default.nix` will be created in the directory as well which 150 | calls into `project.nix`. As with normal usage, the `-o` flag is used 151 | to specify a path to where generated expressions will be placed; 152 | however, only the *downsteam* dependencies will be put here, while the 153 | expression itself will be in the `project.nix` file. 154 | 155 | ```bash 156 | $ nixfromnpm -f /path/to/package.json -o /path/to/dependency/set 157 | ``` 158 | 159 | You can give multiple `-f` arguments to build multiple expressions on 160 | disk, and it can be used alongside `-p` arguments as well. 161 | 162 | #### Development Dependencies 163 | 164 | NPM packages differentiate between dependencies needed at runtime and 165 | dependencies required only when developing on a package (e.g. packages 166 | for testing, transpilers for compile-to-javascript languages, etc). A 167 | package might have a great number of development dependencies, and 168 | there might be circular dependencies with development packages because 169 | the packages are not required at runtime. For this reason it's 170 | generally better to avoid generating expressions for development 171 | dependencies unless they're needed, because they add a lot of extra 172 | work and generate a lot of files. 173 | 174 | `nixfromnpm` lets you generate expressions for development 175 | dependencies at a maximum depth. For example, depth `0` means don't 176 | make any development dependency expressions; depth `1` means create 177 | expressions for the package being built, but not any of their 178 | development dependencies, etc. 179 | 180 | ```bash 181 | $ nixfromnpm -p package_name -o /some/path --dev-depth 1 182 | ``` 183 | 184 | #### Extra registries 185 | 186 | For a package in a private registry located at `https://my.registry:2345`: 187 | 188 | ```bash 189 | $ nixfromnpm -p private_package -o /some/path -r https://my.registry:2345 190 | ``` 191 | 192 | #### Github authorization 193 | 194 | For npm packages which fetch from git, if an authorization token is required: 195 | 196 | ```bash 197 | $ nixfromnpm -p package -o /some/path --github-token llnl23uinlaskjdno34nedhoaidjn5o48wugn 198 | ``` 199 | 200 | This can also be set by a `GITHUB_TOKEN` environment variable. 201 | 202 | #### Private NPM namespaces 203 | 204 | NPM offers private packaging, where you can specify a *namespace* or 205 | *scope* under which a package lives. For example, a package might be 206 | designated as `@foo/bar`, where the package is called `bar` and the 207 | namespace is called `foo`. `nixfromnpm` supports this, as long as you 208 | have set up your NPM repo to use authorization tokens (see 209 | documentation for details). To use this, set an environment variable 210 | `NPM_AUTH_TOKENS`, with the following format: 211 | 212 | ``` 213 | mynamespace=mytoken:myothernamespace=myothertoken:... 214 | ``` 215 | 216 | Where tokens are keyed on namespaces. Then when building the 217 | expression set, if a namespaced package is encountered, `nixfromnpm` 218 | will look up the namespace in this environment variable to determine 219 | what token to use for authentication. The same environment variable is 220 | read when the packages are built with `nix-build`. The path to the 221 | package in the generated expressions is slightly different: 222 | 223 | ```bash 224 | $ export NPM_AUTH_TOKENS="foo=a1a1a1a1a1a1a1a1a1a" 225 | $ nixfromnpm -o my_expressions -p '@foo/bar' 226 | $ nix-build my_expressions -A namespaces.foo.bar 227 | ``` 228 | 229 | #### Caching of packages 230 | 231 | By default, `nixfromnpm` will discover all existing packages in the 232 | specified output directory (provided via tha `-o` flag). However, if 233 | you would like to generate all of these from scratch, you can disable 234 | caching with `--no-cache`. 235 | 236 | #### Troubleshooting a package that doesn't build 237 | 238 | There are any number of reasons why a package might not build. Some of 239 | the most common ones are: 240 | 241 | * The `nixfromnpm` tool wasn't able to generate the definition of one of the package's dependencies. It will insert in the `brokenPackage` function, which, as might be anticipated, never builds. Looking at the call to `brokenPackage` will tell you why it couldn't build it. In my experience, this is because `nixfromnpm`'s version range checker is not completely up to spec, and it's unable to find a version that satisfies the bounds given by a `package.json`. If this is the case, the easiest way to fix it is to: 242 | * See what version range `nixfromnpm` failed to resolve. E.g. `foo@>=1.2.3-bar <2.3.4-baz.qux`. 243 | * Use `npm` to manually build the package at the given version bounds. E g. `npm install foo@>=1.2.3-bar <2.3.4-baz.qux`. 244 | * See what version it ends up building. E.g. `foo@1.2.3-xyz`. 245 | * Call `nixfromnpm` on that version. E.g. `nixfromnpm -o /path/to/nix-node-packages -p 'foo@1.2.3-xyz'`. 246 | * Replace the call to `brokenPackage` with `foo_1-2-3-xyz`. 247 | * The build fails with `npm` complaining about HTTP errors. This is usually caused by a dependency that wasn't satified, likely because `nixfromnpm` calculated the wrong dependency. In this case, use steps similar to the above to find out what the actual dependency should be, and modify the package definition to include the correct one. 248 | * A package build script is attempting to do some hacky bullshit like modifying its dependencies. This, of course, is not kosher in the `nix` view of things. In this case, you'll probably want to `nix-shell` into the package and see what it's trying to do. Figure out how to stop it from doing these things, and supply `prePatch` or `postPatch` steps to apply those changes. 249 | 250 | The good news is that if you patch or otherwise fix a broken package, it will not be overwritten by subsequent invocations of `nixfromnpm` (*although*, I highly recommend keeping your expressions in source control in case bad things happen!). 251 | 252 | ### Contributions 253 | 254 | This project has gone from what I thought would be a weekend project 255 | to a pretty significant undertaking. I think it has the potential to 256 | be pretty useful to JS developers who are interested in using nix, and 257 | it's already usable, with 258 | [thousands of packages](https://github.com/adnelson/nix-node-packages) 259 | defined (although there's no guarantee, of course, that these all 260 | work). However, there are still a number of 261 | [issues](https://github.com/adnelson/nixfromnpm/issues) which need to 262 | be addressed. I very heartily welcome contributions, whether error 263 | reporting or pull requests. 264 | -------------------------------------------------------------------------------- /Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | main = defaultMain 3 | -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | { mkDerivation, aeson, ansi-terminal, base, bytestring 2 | , classy-prelude, containers, curl, data-default, data-fix 3 | , directory, exceptions, hnix, hspec, lifted-base, megaparsec 4 | , MissingH, monad-control, mono-traversable, mtl 5 | , neat-interpolation, network-uri, optparse-applicative, parsec 6 | , pcre-heavy, QuickCheck, regex-tdfa, regex-tdfa-text, semver-range 7 | , SHA, shelly, stdenv, system-filepath, temporary, text 8 | , text-render, transformers, unix, unordered-containers 9 | }: 10 | mkDerivation { 11 | pname = "nixfromnpm"; 12 | version = "0.13.0"; 13 | src = ./.; 14 | isLibrary = true; 15 | isExecutable = true; 16 | enableSeparateDataOutput = true; 17 | libraryHaskellDepends = [ 18 | aeson ansi-terminal base bytestring classy-prelude containers curl 19 | data-default data-fix directory exceptions hnix lifted-base 20 | megaparsec MissingH monad-control mono-traversable mtl network-uri 21 | optparse-applicative parsec pcre-heavy regex-tdfa regex-tdfa-text 22 | semver-range SHA shelly system-filepath temporary text text-render 23 | transformers unix unordered-containers 24 | ]; 25 | executableHaskellDepends = [ base optparse-applicative ]; 26 | testHaskellDepends = [ 27 | aeson base bytestring classy-prelude hnix hspec mono-traversable 28 | neat-interpolation QuickCheck text 29 | ]; 30 | description = "Generate nix expressions from npm packages"; 31 | license = stdenv.lib.licenses.mit; 32 | } 33 | -------------------------------------------------------------------------------- /docker/Dockerfile: -------------------------------------------------------------------------------- 1 | # Start with the base nixos image 2 | FROM nixos/nix 3 | 4 | # Clone the nixfromnpm directory, then enter the shell. This will 5 | # cause all of the dependencies to be built. 6 | RUN nix-env -iA nixpkgs.git -iA nixpkgs.bashInteractive -iA nixpkgs.coreutils --max-jobs 8 7 | RUN cd /tmp && git clone https://github.com/adnelson/nixfromnpm && cd nixfromnpm && nix-shell --max-jobs $(nproc) --command true 8 | 9 | # Remove the nixfromnpm repo; no reason to keep it 10 | RUN rm -rf /tmp/nixfromnpm 11 | -------------------------------------------------------------------------------- /nix-libs/nodeLib/.gitignore: -------------------------------------------------------------------------------- 1 | npm-3.4.1 2 | -------------------------------------------------------------------------------- /nix-libs/nodeLib/buildNodePackage.nix: -------------------------------------------------------------------------------- 1 | { 2 | # System packages. 3 | pkgs, 4 | # Derivation for nodejs and npm. 5 | nodejs, 6 | # Which version of npm to use. 7 | npm ? nodejs, 8 | # Self-reference for overriding purposes. 9 | buildNodePackage, 10 | # Provides xcode binaries to OSX builds (for native packages). 11 | xcode-wrapper, 12 | # Scripts that we use during the npm builds. 13 | node-build-tools, 14 | # C header files for node libraries 15 | nodejsSources, 16 | }: 17 | 18 | let 19 | inherit (pkgs) stdenv python2 file darwin; 20 | inherit (pkgs.lib) showVal optional foldl; 21 | inherit (stdenv.lib) fold removePrefix hasPrefix subtractLists flip isList 22 | intersectLists isAttrs listToAttrs nameValuePair hasAttr 23 | mapAttrs filterAttrs attrNames elem concatMapStrings 24 | attrValues concatStringsSep optionalString filter 25 | optionalAttrs optionals; 26 | 27 | # Join a list of strings with newlines, filtering out empty lines. 28 | joinLines = strings: concatStringsSep "\n" (filter (s: s != "") strings); 29 | 30 | # Map a function and concatenate with newlines. 31 | concatMapLines = list: func: joinLines (map func list); 32 | 33 | # Create a tar wrapper that filters all the 'Ignoring unknown 34 | # extended header keyword' noise 35 | # 36 | # Cribbed from nixpkgs/pkgs/development/node-packages/node-env.nix 37 | tarWrapper = pkgs.runCommand "tarWrapper" {} '' 38 | mkdir -p $out/bin 39 | cat > $out/bin/tar <--nodejs-" 95 | nameSuffix ? "-${nodejs.name}", 96 | 97 | # If there's a namespace, by default it will be prepended to the package 98 | # name. Otherwise, a prefix can be given explicitly. 99 | namePrefix ? (if namespace == null then "" else "=${namespace}=-"), 100 | 101 | # List of (runtime) dependencies. 102 | deps ? [], 103 | 104 | # List of runtime dependencies which are circular, meaning that the 105 | # package being defined here occurs somewhere in its own dependency 106 | # tree. 107 | circularDependencies ? [], 108 | 109 | # List of optional dependencies. 110 | optionalDependencies ? [], 111 | 112 | # List or set of development dependencies (or null). These will only be 113 | # installed when `includeDevDependencies` is true, which is provided by 114 | # the `.env` attribute. 115 | devDependencies ? null, 116 | 117 | # If true and devDependencies are defined, the package will only be 118 | # installed contingent on successfully running tests. 119 | doCheck ? false, 120 | 121 | # If true, devDependencies will be added to the packages to the 122 | # build environment. By default, this is true whenever doCheck is true. 123 | includeDevDependencies ? doCheck, 124 | 125 | # Bash command to run package tests. 126 | checkPhase ? defaultCheckPhase, 127 | 128 | # Build inputs to propagate in addition to nodejs and non-dev dependencies. 129 | propagatedBuildInputs ? [], 130 | 131 | # Build inputs in addition to npm and dev dependencies. 132 | buildInputs ? [], 133 | 134 | # Whether to strip debugging symbols from binaries. 135 | # This normally shouldn't be necessary but it can be enabled if desired. 136 | # Doc for details: https://nixos.org/wiki/NixPkgs_Standard_Environment. 137 | dontStrip ? true, 138 | 139 | # Optional attributes to pass through to downstream derivations. 140 | passthru ? {}, 141 | 142 | # A set of dependencies to patch, changing the version given in the 143 | # package.json. Keys are dependency names, values are new 144 | # versions. Alternatively, a value can be `null`, which will have 145 | # the effect of removing the dependency from the package.json. 146 | patchDependencies ? {}, 147 | 148 | # We attempt to automatically remove dev dependencies from the node_modules 149 | # folder prior to copying to the nix store. If this isn't desired (for 150 | # example, custom behavior is needed), then set this to true. 151 | skipDevDependencyCleanup ? false, 152 | 153 | # Indicates the package is broken. Not super user-friendly but 154 | # better than nothing. 155 | isBroken ? false, 156 | 157 | # Metadata about the package. 158 | meta ? {}, 159 | 160 | # Build step 161 | buildStep ? "execute-install-scripts", 162 | 163 | # Overrides to the arguments to mkDerivation. This can be used to 164 | # set custom values for the arguments that buildNodePackage would 165 | # set, so it's only necessary for a certain set of keys (everything 166 | # else can just be passed in directly). 167 | derivationOverrides ? {}, 168 | 169 | # Any remaining flags are passed through to mkDerivation. 170 | ... 171 | } @ args: 172 | 173 | let 174 | # The package name as it appears in the package.json. This contains a 175 | # namespace if there is one, so it will be a distinct identifier for 176 | # different packages. 177 | fullName = if namespace == null then name else "@${namespace}/${name}"; 178 | 179 | # The package name with a version appended. This should be unique amongst 180 | # all packages. 181 | uniqueName = "${fullName}@${version}"; 182 | 183 | in 184 | 185 | # Dev dependencies are required to be installed to run unit tests for 186 | # nearly all packages. Therefore we require that they be installed in 187 | # order to enable tests. 188 | if doCheck && (devDependencies == null) 189 | then throw ("${uniqueName}: Can't run tests because devDependencies have " + 190 | "not been defined. You can pass in `devDependencies = [];` if " + 191 | "there are no dev dependencies.") 192 | else if includeDevDependencies && (devDependencies == null) 193 | then throw ("${uniqueName}: Can't include dev dependencies since they have " + 194 | "not been defined. You can pass in `devDependencies = [];` if " + 195 | "there are no dev dependencies.") 196 | else if isBroken 197 | then throw "${uniqueName}: listed as broken, see definition for details" 198 | else 199 | 200 | let 201 | # Types of npm dependencies as they appear as keys in a package.json file. 202 | dependencyTypes = ["dependencies" "devDependencies" "optionalDependencies"]; 203 | 204 | # These arguments are intended as directives to this function and not 205 | # to be passed through to mkDerivation. They are removed below. 206 | attrsToRemove = ["deps" "flags" "isBroken" 207 | "passthru" "doCheck" "includeDevDependencies" "version" 208 | "namespace" "skipDevDependencyCleanup" "patchDependencies" 209 | "circularDependencies" "derivationOverrides"] ++ dependencyTypes; 210 | 211 | # We create a `self` object for self-referential expressions. It 212 | # bottoms out in a call to `mkDerivation` at the end. 213 | self = let 214 | # Set of normal dependencies. 215 | _dependencies = toAttrSet deps; 216 | # Set of circular dependencies. 217 | _circularDependencies = toAttrSet circularDependencies; 218 | 219 | # Since optional dependencies are optional, ignore the ones that fail 220 | tryOrNull = p: let r = builtins.tryEval "${p}"; in if r.success then p else null; 221 | 222 | _optionalDependencies = 223 | if isList optionalDependencies 224 | then toAttrSet (filter (x: x != null) (map tryOrNull optionalDependencies)) 225 | else if isAttrs optionalDependencies 226 | then toAttrSet (filterAttrs (_: x: x != null) (mapAttrs (_: tryOrNull) optionalDependencies)) 227 | else toAttrSet optionalDependencies; 228 | 229 | # Dev dependencies will only be included if requested. 230 | _devDependencies = if !includeDevDependencies then {} 231 | else toAttrSet devDependencies; 232 | 233 | # Dependencies we need to propagate, meaning they need to be 234 | # available to the package at runtime. We don't include the 235 | # circular dependencies here, even though they might be needed at 236 | # runtime, because we have a "special way" of building them. 237 | runtimeDependencies = _dependencies // _optionalDependencies; 238 | 239 | # Names of packages to keep when cleaning up dev dependencies. We 240 | # put them in a dictionary for fast lookup, but the values are 241 | # just null. 242 | packagesToRetain = mapAttrs (_: _: null) ( 243 | runtimeDependencies // _circularDependencies); 244 | 245 | # Required dependencies are those that we haven't filtered yet. 246 | requiredDependencies = _devDependencies // runtimeDependencies; 247 | 248 | patchPhase = joinLines [ 249 | "runHook prePatch" 250 | "patchShebangs $PWD >/dev/null" 251 | # Ensure that the package name matches what is in the package.json. 252 | "check-package-json checkPackageName ${fullName}" 253 | # Remove any impure dependencies from the package.json (see script 254 | # for details). Apply patches in patchDependencies arguments. 255 | "patch-dependencies" 256 | # We do not handle shrinkwraps yet 257 | "rm -fv npm-shrinkwrap.json" 258 | (args.patchPhase or "") 259 | "runHook postPatch" 260 | ]; 261 | 262 | # Computes the "circular closure" of a package. 263 | # See ./circular_dependencies.md for details. 264 | circularClosure = 265 | # Packages we've already seen. 266 | seenPackages: 267 | # Name of package we're inspecting. 268 | package: 269 | if hasAttr package.name seenPackages 270 | # We've completed the cycle; stop here. 271 | then seenPackages 272 | else let 273 | # Add package to seen. 274 | seen = seenPackages // {"${package.name}" = package;}; 275 | # Recur on circular dependencies. 276 | closure = map (circularClosure seen) 277 | (attrValues package.circularDependencies); 278 | in 279 | # Combine the results into a single set. 280 | foldl (a: b: a // b) self.circularDependencies closure; 281 | 282 | # Compute any cycles. Remove 'self' from the dependency closure. 283 | circularDepClosure = removeAttrs (circularClosure {} self) [self.name]; 284 | 285 | # Turn the closure into a list of all circular dependencies. 286 | circulars = attrValues circularDepClosure; 287 | 288 | # All of the transitive dependencies (non-circular) of the 289 | # circular packages. 290 | transCircularDeps = 291 | foldl (a: b: a // b) {} (map (p: p.runtimeDependencies) circulars); 292 | 293 | configurePhase = 294 | let 295 | # Symlink dependencies for node modules. 296 | link = dep: '' 297 | mkdir -p ${dep.modulePath} 298 | if ! [[ -e node_modules/${dep.fullName} ]]; then 299 | ln -sv ${dep.fullPath} ${dep.modulePath} 300 | if [[ -d ${dep}/bin ]]; then 301 | find -L ${dep}/bin -maxdepth 1 -type f -executable \ 302 | | while read exec_file; do 303 | echo "Symlinking $exec_file binary to node_modules/.bin" 304 | mkdir -p node_modules/.bin 305 | ln -s $exec_file node_modules/.bin/$(basename $exec_file) 306 | done 307 | fi 308 | fi 309 | ''; 310 | in concatStringsSep "\n" ( 311 | ["runHook preConfigure"] ++ 312 | (flip map (attrValues requiredDependencies) link) ++ 313 | ["runHook postConfigure"] ++ 314 | (optional (circulars != []) (let 315 | in concatStringsSep "\n" [ 316 | # Extract all of the circular dependencies' tarballs. 317 | (concatMapLines circulars (dep: '' 318 | echo Satisfying ${dep.fullName}, circular dependency \ 319 | of ${self.fullName} 320 | mkdir -p node_modules 321 | if [[ ! -d node_modules/${dep.fullName} ]]; then 322 | tar xf ${dep.src} 323 | if [[ ! -d package ]]; then 324 | echo "Expected ${dep.src} to be a tarball containing a" \ 325 | "'package' directory. Don't know how to handle this :(" 326 | exit 1 327 | fi 328 | 329 | mkdir -p node_modules/${dep.fullName} 330 | cp -rP package/. node_modules/${dep.fullName} 331 | rm -rf ./package 332 | fi 333 | '')) 334 | # Symlink all of the transitive dependencies of the circular packages. 335 | (concatMapLines (attrValues transCircularDeps) link) 336 | # Create a temporary symlink to the current package directory, 337 | # so that node knows that the dependency is satisfied when 338 | # checking the recursive dependencies (grumble grumble). 339 | "ln -s $PWD node_modules/${self.fullName}" 340 | ] 341 | ))); 342 | 343 | buildPhase = concatStringsSep "\n" [ 344 | "runHook preBuild" 345 | # Previous NODE_PATH should be empty, but it might have been set 346 | # in the custom derivation steps. 347 | "export NODE_PATH=$PWD/node_modules:$NODE_PATH" 348 | "check-package-json checkDependencies" 349 | buildStep 350 | # If we have any circular dependencies, they will need to reference 351 | # the current package at runtime. Make a symlink into the node modules 352 | # folder which points at where the package will live in $out. 353 | (optionalString (circulars != []) '' 354 | rm node_modules/${self.fullName} 355 | ln -s $out/lib/node_modules/${self.fullName} \ 356 | node_modules/${self.fullName} 357 | '') 358 | "runHook postBuild" 359 | ]; 360 | 361 | installPhase = '' 362 | runHook preInstall 363 | 364 | # Ensure that the main entry point appears post-build. 365 | check-package-json checkMainEntryPoint 366 | 367 | # Install the package that we just built. 368 | mkdir -p $out/lib/${self.modulePath} 369 | 370 | # Remove all of the dev dependencies which do not appear in other 371 | # dependency sets. 372 | ${if skipDevDependencyCleanup then "" else 373 | flip concatMapStrings (attrValues _devDependencies) (dep: 374 | let 375 | rm = dep: 376 | if !hasAttr dep.name packagesToRetain 377 | then '' 378 | # Remove the dependency from node modules 379 | rm -rfv node_modules/${dep.fullName} 380 | # Remove any binaries it generated from node_modules/.bin 381 | if [[ -d ${dep}/bin ]]; then 382 | find -L ${dep}/bin -maxdepth 1 -type f -executable \ 383 | | while read exec_file; do 384 | rm -fv node_modules/.bin/$(basename $exec_file) 385 | done 386 | fi 387 | '' 388 | else '' 389 | echo "Retaining ${dep.basicName} since it " \ 390 | "appears in the set of dependencies to propagate" 391 | ''; 392 | in 393 | rm dep)} 394 | 395 | # Copy the folder that was created for this path to $out/lib. 396 | cp -r $PWD $out/lib/node_modules/${self.fullName} 397 | 398 | # Remove the node_modules subfolder from there, and instead put things 399 | # in $PWD/node_modules into that folder. 400 | if [ -e "$out/lib/node_modules/${self.fullName}/man" ]; then 401 | echo -n "Linking manpages... " 402 | NUM_MAN_PAGES=0 403 | mkdir -p $out/share 404 | for dir in $out/lib/node_modules/${self.fullName}/man/*; do #*/ 405 | mkdir -p $out/share/man/$(basename "$dir") 406 | for page in $dir/*; do #*/ 407 | ln -s $page $out/share/man/$(basename "$dir") 408 | NUM_MAN_PAGES=$(($NUM_MAN_PAGES + 1)) 409 | done 410 | done 411 | echo "linked $NUM_MAN_PAGES man pages." 412 | fi 413 | 414 | # Install binaries using the `bin` object in the package.json 415 | install-binaries 416 | 417 | runHook postInstall 418 | ''; 419 | 420 | # These are the arguments that we will pass to `stdenv.mkDerivation`. 421 | mkDerivationArgs = removeAttrs args attrsToRemove // { 422 | inherit 423 | buildPhase 424 | checkPhase 425 | configurePhase 426 | doCheck 427 | dontStrip 428 | fullName 429 | installPhase 430 | meta 431 | patchPhase 432 | nodejsSources 433 | src; 434 | 435 | patchDependencies = builtins.toJSON patchDependencies; 436 | 437 | NO_DEV_DEPENDENCIES = !includeDevDependencies; 438 | 439 | # Tell mkDerivation to run `setVariables` prior to other phases. 440 | prePhases = ["setVariables"]; 441 | 442 | # Define some environment variables that we will use in the build. 443 | setVariables = '' 444 | # In case this was set by an upstream derivation. 445 | unset NODE_PATH 446 | 447 | # This creates a string for this package which is unique but 448 | # deterministic. We can use it to create temporary directories 449 | # and URLs and be confident there will be no collisions. 450 | HASHEDNAME=$(echo "$propagatedNativeBuildInputs $name" \ 451 | | md5sum | awk '{print $1}') 452 | export HASHEDNAME 453 | 454 | # This appends the package name and version to the hash string 455 | # we defined above, so that it is more human-readable. 456 | export UNIQNAME="''${HASHEDNAME:0:10}-${name}-${version}" 457 | 458 | # Add gyp to the path in case it's needed 459 | export PATH=${nodejs}/lib/node_modules/npm/bin/node-gyp-bin:$PATH 460 | ''; 461 | 462 | shellHook = '' 463 | runHook preShellHook 464 | runHook setVariables 465 | export PATH=${npm}/bin:${nodejs}/bin:$(pwd)/node_modules/.bin:$PATH 466 | rm -rf $TMPDIR/$UNIQNAME 467 | mkdir -p $TMPDIR/$UNIQNAME 468 | ( 469 | cd $TMPDIR/$UNIQNAME 470 | eval "$configurePhase" 471 | ) 472 | echo "Installed $fullName dependencies in temporary directory" \ 473 | "$TMPDIR/$UNIQNAME" 474 | export PATH=$TMPDIR/$UNIQNAME/node_modules/.bin:$PATH 475 | NODE_MODULES=$TMPDIR/$UNIQNAME/node_modules 476 | export NODE_PATH=$NODE_MODULES:$NODE_PATH 477 | # Check if the current directory contains the package.json for 478 | # this package. 479 | py_cmd='import json; print(json.load(open("package.json"))["name"])' 480 | if [[ -e package.json ]] && \ 481 | [[ $(python -c "$py_cmd" 2>/dev/null) == "$fullName" ]]; then 482 | IN_PACKAGE_DIR=true 483 | # If we're in the package directory, symlink it into the 484 | # temporary node modules folder we're building and then 485 | # attempt to import it. Issue a warning if we're not 486 | # successful. 487 | echo "Symlinking current directory into node modules folder..." 488 | mkdir -pv $(dirname $NODE_MODULES/$fullName) 489 | ln -sv $(pwd) $NODE_MODULES/$fullName 490 | # Symlink the node modules folder to whatever has been built. 491 | # Don't do this if there is a node_modules directory because this 492 | # could break current directory state. However, issue a warning in 493 | # this case. 494 | if [[ -e node_modules ]] && [[ ! -L node_modules ]]; then 495 | echo "Warning: node_modules exists but is not a symlink." >&2 496 | echo "You can remove it (rm -r node_modules) and re-enter the" >&2 497 | echo 'shell, or run `ln -sf $NODE_MODULES node_modules`' >&2 498 | else 499 | rm -fv node_modules 500 | ln -sfv $NODE_MODULES node_modules 501 | fi 502 | else 503 | echo >&2 504 | echo "WARNING:" >&2 505 | echo "You are not in the directory for $fullName, so the shell"\ 506 | "hook can't symlink the local source code into the temporary"\ 507 | "node_modules directory. This will probably prevent you from"\ 508 | "using $fullName in a node REPL or running its code." >&2 509 | echo "You might be able to do something manually to"\ 510 | "set this up. For example if this package's source is a "\ 511 | "tarball, running these commands might work:" >&2 512 | echo >&2 513 | echo ' $ tar -xf $src' >&2 514 | echo ' $ ln -s $PWD/package $NODE_MODULES/$fullName' >&2 515 | echo >&2 516 | fi 517 | runHook postShellHook 518 | ''; 519 | 520 | # Propagate pieces of information about the package so that downstream 521 | # packages can reflect on them. 522 | passthru = (passthru // { 523 | inherit uniqueName fullName namespace version runtimeDependencies; 524 | circularDependencies = _circularDependencies; 525 | # The basic name is the name without namespace or version, in contrast 526 | # to the fullName which might have a namespace attached, or the 527 | # uniqueName which has a version attached. 528 | basicName = name; 529 | 530 | # The path within $out/lib to find the package. If the package does not 531 | # have a namespace, it will simply be in `node_modules`, and otherwise 532 | # it will appear in `node_modules/@namespace`. 533 | modulePath = if namespace == null then "node_modules" 534 | else "node_modules/@${namespace}"; 535 | 536 | # The full path to the package's code (i.e. folder containing 537 | # package.json) within the nix store. 538 | fullPath = "${self}/lib/node_modules/${self.fullName}"; 539 | 540 | # The `env` attribute is meant to be used with `nix-shell` (although 541 | # that's not required). It will build the package with its dev 542 | # dependencies. This means that the package must have dev dependencies 543 | # defined, or it will error. 544 | env = buildNodePackage (args // {includeDevDependencies = true;}); 545 | 546 | # An 'overrideNodePackage' attribute, which will call 547 | # `buildNodePackage` with new arguments produced by the given 548 | # arg-override function. The function consumes the original 549 | # argument set. 550 | # 551 | # N.B: the legacy behavior of accepting a set is preserved but 552 | # the preferred usage-pattern is to supply a function that 553 | # discards its argument; e.g: 554 | # 555 | # overrideNodePackage (_: { ... }) 556 | # 557 | # We don't use the name `override` because this will get stomped on 558 | # if the derivation is the result of a `callPackage` application. 559 | overrideNodePackage = newArgs: 560 | if builtins.isFunction newArgs 561 | then buildNodePackage (args // (newArgs args)) 562 | else buildNodePackage (args // newArgs); 563 | 564 | }); 565 | } // { 566 | name = if namePrefix == null then throw "Name prefix is null" 567 | else if name == null then throw "Name is null" 568 | else if version == null then throw "Version of ${name} is null" 569 | else if nameSuffix == null then throw "Name suffix is null" 570 | else "${namePrefix}${name}-${version}${nameSuffix}"; 571 | 572 | # Propagate the runtime dependencies, any non-nodejs dependencies, 573 | # and nodejs itself. 574 | propagatedBuildInputs = propagatedBuildInputs ++ 575 | attrValues runtimeDependencies ++ 576 | [nodejs]; 577 | 578 | 579 | # Give as buildInputs npm, python, dev dependencies (if any) and 580 | # additional specified build inputs. In addition, on darwin we 581 | # provide XCode, since node-gyp will use it, and on linux we add 582 | # utillinux. 583 | buildInputs = [ tarWrapper npm python2 file node-build-tools ] ++ 584 | attrValues _devDependencies ++ 585 | buildInputs ++ 586 | (optional stdenv.isLinux pkgs.utillinux) ++ 587 | (optionals stdenv.isDarwin [darwin.cctools xcode-wrapper]); 588 | } // optionalAttrs stdenv.isLinux { 589 | LOCALE_ARCHIVE = "${pkgs.glibcLocales}/lib/locale/locale-archive"; 590 | } // derivationOverrides; 591 | 592 | in stdenv.mkDerivation mkDerivationArgs; 593 | in self 594 | -------------------------------------------------------------------------------- /nix-libs/nodeLib/circular_dependencies.md: -------------------------------------------------------------------------------- 1 | # A note on how we solve circular dependencies 2 | 3 | Npm supports circular dependencies, while nix does not. A package *P* 4 | has a circular dependency *D* if and only if *P* appears in the dependency 5 | closure of *D*. (By *dependency closure* we mean "a package's 6 | dependencies, and all of its dependencies' dependencies, etc") Imagine 7 | the following situation ("->" means "depends on"): 8 | 9 | ``` 10 | A -> {B}, B -> {A, C}, C -> {D, E, F} 11 | ``` 12 | 13 | In this case we have circularity between *A* and *B*. When building *A*, 14 | we can't build *B* the normal way, because we'd have to first build 15 | *A*, and we'd get infinite recursion. However, we can put the 16 | package *B*, and all of its dependencies, in the `node_modules` 17 | folder for *A*, and then *A* will be able to use *B* and vice versa when 18 | *A* is a dependency of some library. This means to build *A* we need to: 19 | 20 | 1. Build *C*, *D*, *E*, and *F* the normal way. 21 | 2. Symlink *C*, *D*, *E* and *F* into *A*'s `node_modules` folder. 22 | 3. Extract the source of *B* into the `node_modules` folder. 23 | 4. Make a self-referential symlink of *A* into its own `node_modules` folder. 24 | 25 | The last step is a bit strange but since *B* needs to be able to import 26 | *A*, *A* needs to be in the same `node_modules` folder as *B*. We can 27 | accomplish this with a symlink. 28 | 29 | Now, how about building *B*? *A* is a circular dependency of *B*, but *A* 30 | doesn't have any other dependencies. This means that the process 31 | is simply 32 | 33 | 1. Extract the source of *A* into the `node_modules` folder. 34 | 2. Symlink *B* into the `node_modules` folder. 35 | 36 | Now let's imagine a slightly more complicated situation involving 37 | a three-way circularity. 38 | 39 | ``` 40 | A -> {B, C}, B -> {C, D, E}, C -> {A, D} 41 | ``` 42 | 43 | In this case the only packages we can build normally are *D* and 44 | *E*. Then to build *A*, we build *E* and *D* as normal and symlink them 45 | into `node_modules`, extract *B* and *C*'s source into `node_modules`, and 46 | make a reflexive symink. 47 | 48 | Finally, a double-circular dependency: 49 | 50 | ``` 51 | A -> {B}, B -> {C, A}, C -> {B} 52 | ``` 53 | 54 | So the more general algorithm to build a package *A* which has one 55 | or more circular dependencies is: 56 | 57 | 1. Compute the full set of circular dependencies of *A*. 58 | 2. Compute the full set of packages that those dependencies depend 59 | on at runtime, and symlink all of these into `node_modules`. 60 | 3. Extract the source of each circular dependency into `node_modules`. 61 | 4. Make a self-referential symlink of *A*. 62 | -------------------------------------------------------------------------------- /nix-libs/nodeLib/default.nix: -------------------------------------------------------------------------------- 1 | /* 2 | A set of tools for generating node packages, such as to be imported by 3 | default.nix files generated by nixfromnpm. 4 | */ 5 | 6 | # NOTE: this should be removed when backwards compatibility breaking 7 | # changes are allowed to be made to the top-level generated 8 | # default.nix 9 | 10 | { 11 | # Self-reference so that we can pass through to downstream libraries 12 | self 13 | }: 14 | 15 | { 16 | # Base set of packages, i.e. nixpkgs. 17 | pkgs, 18 | # nodejs derivation. 19 | nodejs ? pkgs.nodejs-8_x, 20 | } @ args: 21 | 22 | let 23 | 24 | inherit (pkgs.lib) extends makeExtensible makeOverridable; 25 | 26 | # Function to replace dots with something 27 | replaceDots = c: replaceChars ["."] [c]; 28 | inherit (builtins) readDir removeAttrs length getEnv elemAt hasAttr; 29 | inherit (pkgs.lib) attrNames attrValues filterAttrs flip foldl 30 | hasSuffix hasPrefix removeSuffix replaceChars 31 | optional optionals stringToCharacters 32 | concatStrings tail splitString; 33 | inherit (pkgs.stdenv) isLinux; 34 | 35 | # Function to remove the first character of a string. 36 | dropFirstChar = str: concatStrings (tail (stringToCharacters str)); 37 | 38 | # Concatenate a list of sets. 39 | joinSets = foldl (a: b: a // b) {}; 40 | 41 | # Parse the `NPM_AUTH_TOKENS` environment variable to discover 42 | # namespace-token associations and turn them into an attribute set 43 | # which we can use as an input to the fetchPrivateNpm function. 44 | # Split the variable on ':', then turn each k=v element in 45 | # the list into an attribute set and join all of those sets. 46 | namespaceTokens = joinSets ( 47 | flip map (splitString ":" (getEnv "NPM_AUTH_TOKENS")) (kvPair: 48 | let kv = splitString "=" kvPair; in 49 | if length kv != 2 then {} 50 | else {"${elemAt kv 0}" = elemAt kv 1;})); 51 | 52 | # A function similar to fetchUrl but allows setting of custom headers. 53 | fetchUrlNamespaced = pkgs.callPackage ./fetchUrlNamespaced.nix { 54 | inherit namespaceTokens; 55 | }; 56 | 57 | fetchUrlWithHeaders = fetchUrlNamespaced; 58 | 59 | xcode-wrapper = pkgs.xcbuild; 60 | 61 | # Directory containing build tools for buildNodePackage 62 | node-build-tools = pkgs.stdenv.mkDerivation { 63 | name = "node-build-tools"; 64 | buildInputs = [pkgs.makeWrapper nodejs pkgs.python2]; 65 | buildCommand = '' 66 | mkdir -p $out 67 | cp -r ${./tools} $out/bin 68 | chmod -R +w $out/bin 69 | wrapProgram $out/bin/check-package-json \ 70 | --set SEMVER_PATH ${nodejs}/lib/node_modules/npm/node_modules/semver 71 | wrapProgram $out/bin/execute-install-scripts \ 72 | --prefix PATH : ${dirOf pkgs.python2.interpreter} \ 73 | --prefix PATH : ${dirOf pkgs.stdenv.shell} 74 | patchShebangs $out/bin 75 | ''; 76 | }; 77 | 78 | # A script performing various sanity/correctness checks on the package.json 79 | checkPackageJson = pkgs.writeScript "checkPackageJson" '' 80 | #!${pkgs.stdenv.shell} 81 | export SEMVER_PATH=${nodejs}/lib/node_modules/npm/node_modules/semver 82 | exec ${nodejs}/bin/node ${./tools/check-package-json} "$@" 83 | ''; 84 | 85 | # A script which will install all of the binaries a package.json 86 | # declares into the output folder. 87 | installPackageJsonBinaries = pkgs.writeScript "installPackageJsonBinaries" '' 88 | #!${pkgs.stdenv.shell} 89 | exec ${pkgs.python2.interpreter} ${./tools/install-binaries} "$@" 90 | ''; 91 | 92 | # This expression builds the raw C headers and source files for the base 93 | # node.js installation. Node packages which use the C API for node need to 94 | # link against these files and use the headers. 95 | nodejsSources = pkgs.runCommand "node-sources" {} '' 96 | tar --no-same-owner --no-same-permissions -xf ${nodejs.src} 97 | mv $(find . -type d -mindepth 1 -maxdepth 1) $out 98 | ''; 99 | in 100 | 101 | rec { 102 | inherit nodejs; 103 | 104 | buildNodePackage = import ./buildNodePackage.nix { 105 | inherit pkgs nodejs buildNodePackage xcode-wrapper node-build-tools nodejsSources; 106 | }; 107 | # A generic package that will fail to build. This is used to indicate 108 | # packages that are broken, without failing the entire generation of 109 | # a package expression. 110 | brokenPackage = {name, reason}: 111 | let 112 | deriv = pkgs.stdenv.mkDerivation { 113 | name = "BROKEN-${name}"; 114 | buildCommand = '' 115 | echo "Package ${name} is broken: ${reason}" 116 | exit 1 117 | ''; 118 | passthru.withoutTests = deriv; 119 | passthru.pkgName = name; 120 | passthru.basicName = "BROKEN"; 121 | passthru.uniqueName = "BROKEN"; 122 | passthru.overrideNodePackage = (_: (_: deriv)); 123 | passthru.namespace = null; 124 | passthru.version = "BROKEN"; 125 | passthru.override = _: deriv; 126 | passthru.recursiveDeps = []; 127 | }; 128 | in 129 | deriv; 130 | 131 | # The function that a default.nix can call into which will scan its 132 | # directory for all of the package files and generate a big attribute set 133 | # for all of them. Re-exports the `callPackage` function and all of the 134 | # attribute sets, as well as the nodeLib. 135 | # 136 | # We use `lib.makeOverridable` so that this function's result can be 137 | # overridden in the same way that we can override the result from a 138 | # `callPackage` invocation. This is intended to pave the way for a 139 | # non-backwards compatible refactor to provide a simpler interface 140 | # that resembles other overridable language package sets 141 | # (e.g. haskellPackages). 142 | # 143 | # In truth, this whole file should be refactored so that it only 144 | # provides "libs" and then have a make-packages.nix file that can be 145 | # `callPackage`'ed from the `default.nix` one level up from the 146 | # `nodeLibs`; the intake arguments would a union of the arguments to 147 | # this file and to this generatePackages function. 148 | generatePackages = makeOverridable ({ 149 | 150 | # Path to find node packages in. 151 | nodePackagesPath, 152 | 153 | # Extensions are other node libraries which will be folded into the 154 | # generated one. 155 | # 156 | # This is deprecated, the overrides argument should be used 157 | # instead and if there are multiple package sets to give they can 158 | # be composed first with `composeExtensions`. 159 | # 160 | # Note that any overrides provided via the `overrides` argument 161 | # will override any package of the same name from the union of all 162 | # package sets given in `extensions`. 163 | extensions ? [], 164 | 165 | overrides ? (self: super: {}), 166 | 167 | # If any additional arguments should be made available to callPackage 168 | # (for example for packages which require additional arguments), they 169 | # can be passed in here. Those packages can declare an `extras` argument 170 | # which will contain whatever is passed in here. 171 | extras ? {} 172 | }: 173 | let 174 | 175 | mkScope = scope: ({ 176 | inherit fetchUrlNamespaced fetchUrlWithHeaders namespaceTokens; 177 | inherit pkgs buildNodePackage brokenPackage extras nodejsSources; 178 | } // scope); 179 | 180 | callPackage = pkgs.newScope (mkScope { 181 | inherit nodePackages; 182 | inherit (nodePackages) namespaces; 183 | }); 184 | 185 | initialNodePackages = self: 186 | let 187 | oldExtensions = joinSets (map (e: e.nodePackages) extensions); 188 | packageSet = pkgs.callPackage nodePackagesPath { 189 | callPackage = pkgs.newScope (mkScope { 190 | nodePackages = self; 191 | inherit (self) namespaces; 192 | }); 193 | }; 194 | in 195 | oldExtensions // packageSet; 196 | 197 | nodePackages = makeExtensible (extends overrides initialNodePackages); 198 | 199 | in 200 | { inherit callPackage namespaceTokens pkgs node-build-tools nodejsSources; 201 | nodePackages = nodePackages // {inherit nodejs;}; 202 | nodeLib = self args; 203 | }); 204 | } 205 | -------------------------------------------------------------------------------- /nix-libs/nodeLib/fetch.py: -------------------------------------------------------------------------------- 1 | import os 2 | import requests 3 | out = os.environ['out'] 4 | url = os.environ['url'] 5 | headers = {"User-Agent": "nix-fetchurl"} 6 | if os.getenv("auth"): 7 | headers["Authorization"] = "Bearer {}".format(os.environ["auth"]) 8 | print('GET {} with headers {}'.format(url, headers)) 9 | response = requests.get(url, headers=headers) 10 | if response.status_code != 200: 11 | exit("Received a {} response. :(\nContent: {}" 12 | .format(response.status_code, response.content)) 13 | else: 14 | print('Response: {} ({} bytes)' 15 | .format(response.status_code, len(response.content))) 16 | with open(out, 'wb') as f: 17 | f.write(response.content) 18 | -------------------------------------------------------------------------------- /nix-libs/nodeLib/fetchUrlNamespaced.nix: -------------------------------------------------------------------------------- 1 | # A python-based fetchurl function, allowing the passage of an auth 2 | # header via namespaceTokens. Just calls into `requests` under the hood. 3 | { 4 | pythonPackages, stdenv, namespaceTokens 5 | }: 6 | 7 | 8 | { # URL to fetch. 9 | url ? "" 10 | 11 | , # Additional curl options needed for the download to succeed. 12 | curlOpts ? "" 13 | 14 | , # Name of the file. If empty, use the basename of `url' (or of the 15 | # first element of `urls'). 16 | name ? "" 17 | 18 | # Different ways of specifying the hash. 19 | , outputHash ? "" 20 | , outputHashAlgo ? "" 21 | , md5 ? "" 22 | , sha1 ? "" 23 | , sha256 ? "" 24 | 25 | , # Meta information, if any. 26 | meta ? {} 27 | 28 | # Namespace to use (checked for in namespaceTokens) 29 | , namespace ? null 30 | }: 31 | 32 | let 33 | auth = if namespace != null && builtins.hasAttr namespace namespaceTokens 34 | then namespaceTokens.${namespace} 35 | else null; 36 | inherit (stdenv.lib) flip mapAttrs' nameValuePair; 37 | hasHash = (outputHash != "" && outputHashAlgo != "") 38 | || md5 != "" || sha1 != "" || sha256 != ""; 39 | in 40 | 41 | if !hasHash 42 | then throw "You must specify the output hash for ${url}" 43 | else 44 | 45 | stdenv.mkDerivation { 46 | inherit url auth; 47 | name = if name != "" then name else baseNameOf (toString url); 48 | 49 | outputHashAlgo = if outputHashAlgo != "" then outputHashAlgo else 50 | if sha256 != "" then "sha256" else if sha1 != "" then "sha1" else "md5"; 51 | outputHash = if outputHash != "" then outputHash else 52 | if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5; 53 | 54 | # Only flat hashing, which is the normal mode if you're fetching a file. 55 | outputHashMode = "flat"; 56 | 57 | # Doing the download on a remote machine just duplicates network 58 | # traffic, so don't do that. 59 | preferLocalBuild = true; 60 | 61 | buildInputs = with pythonPackages; [python requests]; 62 | buildCommand = '' 63 | python ${./fetch.py} 64 | ''; 65 | } 66 | -------------------------------------------------------------------------------- /nix-libs/nodeLib/tools/check-package-json: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 'use strict' 3 | var fs = require('fs'); 4 | 5 | if (!fs.existsSync('./package.json')) { 6 | throw new Error('This must be run in a directory with a package.json'); 7 | } 8 | 9 | // Load up the package object. 10 | var packageObj = JSON.parse(fs.readFileSync('./package.json')); 11 | 12 | // Exit with an error message. 13 | function fail(msg) { 14 | console.error(msg); 15 | process.exit(1); 16 | } 17 | 18 | // Ensure that the declared package name matches the expected. 19 | function checkPackageName(expectedName) { 20 | if (expectedName !== packageObj.name) { 21 | fail("Package name declared in package.json (" + packageObj.name + 22 | ") does not match expected name (" + expectedName + ")"); 23 | } 24 | } 25 | 26 | // Returns true if a file path exists. 27 | function exists(path) { 28 | try { 29 | fs.lstatSync(path); 30 | return true; 31 | } catch (e) { 32 | return false; 33 | } 34 | } 35 | 36 | // Ensure that the main entry point exists. 37 | function checkMainEntryPoint() { 38 | if (packageObj.main) { 39 | var mainEntryPoint = packageObj.main; 40 | if (!(exists(mainEntryPoint) || 41 | exists(mainEntryPoint + ".js") || 42 | exists(mainEntryPoint + ".node"))) { 43 | fail("Main entry point " + mainEntryPoint + " does not exist"); 44 | } 45 | } 46 | } 47 | 48 | // Check that all dependencies of a package have been satisfied. This 49 | // should only be called when npm fails, assuming it failed because of 50 | // a missing dependency. 51 | function checkDependencies() { 52 | if (!process.env.SEMVER_PATH) { 53 | throw new Error("No path to the semver module is set."); 54 | }; 55 | var semver = require(process.env.SEMVER_PATH); 56 | // This will be keyed on the dependency name and version, and valued with 57 | // the error. 58 | var errorsFound = {}; 59 | var warningsFound = {}; 60 | 61 | // Given the name and version range of a package, check: 62 | // * That a package with the given name exists in the node_modules folder. 63 | // * That its version satisfies the given version bounds. 64 | function checkDependency(name, versionRange, errorIfMissing, description) { 65 | process.stderr.write(" " + name + "@" + versionRange + " -> "); 66 | var dependencyPackageObj; 67 | var pkgJsonPath = process.cwd() + "/node_modules/" + name + "/package.json"; 68 | var errorKey = name + "@" + versionRange; 69 | function err(message) { 70 | var prefix = errorIfMissing ? "ERROR" : "WARNING"; 71 | message = "(" + description + ") " + message; 72 | console.error(prefix + ": " + message); 73 | (errorIfMissing ? errorsFound : warningsFound)[errorKey] = message; 74 | } 75 | try { 76 | dependencyPackageObj = JSON.parse(fs.readFileSync(pkgJsonPath)); 77 | } catch (e) { 78 | return err("Not found in node_modules"); 79 | } 80 | // Check that the version matches 81 | var version = dependencyPackageObj.version; 82 | if (!semver.satisfies(version, versionRange)) { 83 | return err("version " + version + " doesn't match range " + versionRange); 84 | } 85 | console.error("OK (found version " + dependencyPackageObj.version + ")"); 86 | 87 | // If the package has any peer dependencies, check that they are satisfied 88 | if (dependencyPackageObj.peerDependencies) { 89 | console.error("Checking peer dependencies of " + name); 90 | for (var depName in dependencyPackageObj.peerDependencies) { 91 | var range = dependencyPackageObj.peerDependencies[depName]; 92 | checkDependency(depName, range, false, "peer dependency of " + name); 93 | } 94 | }; 95 | } 96 | 97 | // Verify that all of the declared dependencies in a package.json file 98 | // are satisfied by the environment. 99 | var depTypes = ["dependencies", "devDependencies"]; 100 | for (var depTypeIdx in depTypes) { 101 | var depType = depTypes[depTypeIdx]; 102 | if (depType === "devDependencies" && process.env.NO_DEV_DEPENDENCIES) { 103 | continue; 104 | } 105 | if (packageObj[depType]) { 106 | console.log("Checking " + depType + " of " + packageObj.name); 107 | for (var depName in packageObj[depType]) { 108 | checkDependency(depName, packageObj[depType][depName], true, 109 | "Appears in " + packageObj.name + "'s " + depType); 110 | } 111 | } 112 | } 113 | 114 | if (JSON.stringify(warningsFound) !== "{}") { 115 | console.error("Found the following warnings:"); 116 | for (var depName in warningsFound) { 117 | console.error(" " + depName + ": " + warningsFound[depName]); 118 | } 119 | } 120 | if (JSON.stringify(errorsFound) !== "{}") { 121 | console.error("Found the following errors:"); 122 | for (var depName in errorsFound) { 123 | console.error(" " + depName + ": " + errorsFound[depName]); 124 | } 125 | fail("One or more dependencies were unsatisfied. :("); 126 | } 127 | } 128 | 129 | var commands = ['checkPackageName', 'checkMainEntryPoint', 'checkDependencies']; 130 | switch (process.argv[2]) { 131 | case 'checkPackageName': { 132 | if (!process.argv[3]) { 133 | fail("Need an argument for the package name."); 134 | } 135 | checkPackageName(process.argv[3]); break; 136 | } 137 | case 'checkMainEntryPoint': { 138 | checkMainEntryPoint(); break; 139 | } 140 | case 'checkDependencies': { 141 | checkDependencies(); break; 142 | } 143 | default: { 144 | fail('No command, or unrecognized command given. Commands are: ' 145 | + commands.join(", ")); 146 | } 147 | } 148 | -------------------------------------------------------------------------------- /nix-libs/nodeLib/tools/execute-install-scripts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import json, subprocess, os, sys 3 | 4 | # In case a build step references the home directory, set it here. 5 | if not os.getenv("HOME"): 6 | os.environ["HOME"] = os.getcwd() 7 | 8 | scripts = json.load(open("package.json")).setdefault("scripts", {}) 9 | 10 | def runscript(name): 11 | if scripts.get(name): 12 | print("Running {} defined in package.json".format(repr(name))) 13 | sys.stdout.flush() 14 | proc = subprocess.Popen(["bash", "-x"], stdin=subprocess.PIPE) 15 | proc.communicate(input=scripts[name]) 16 | if proc.wait() != 0: 17 | exit("{} script failed".format(name)) 18 | 19 | runscript("preinstall") 20 | runscript("install") 21 | runscript("postinstall") 22 | -------------------------------------------------------------------------------- /nix-libs/nodeLib/tools/install-binaries: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # This script will read the package.json file and create a symlink for each 3 | # item under the `bin` key. The bin can be a string or a dictionary. 4 | # See: https://docs.npmjs.com/files/package.json#bin 5 | import json 6 | import os 7 | from os.path import exists, join, isdir, normpath 8 | import stat 9 | import sys 10 | 11 | with open("package.json", "r") as f: 12 | package = json.load(f) 13 | 14 | if "bin" not in package: 15 | sys.exit(0) 16 | 17 | _bin = package["bin"] 18 | package_name = package["name"] 19 | 20 | if isinstance(_bin, basestring): 21 | # This is equivalent to a singleton dictionary where the key is the 22 | # name of the package. 23 | name = package_name 24 | # If the name is namespaced, just grab the basic name. 25 | if name.startswith("@"): 26 | if "/" not in name: 27 | sys.exit("Can't derive a valid package name from {}".format(name)) 28 | name = name.split("/")[1] 29 | _bin = {name: _bin} 30 | elif not isinstance(_bin, dict): 31 | # Otherwise it must be a dictionary. 32 | sys.exit("Expected `bin` key in package.json to point to a string " 33 | "or a dict, but it is '{}', of type '{}'" 34 | .format(_bin, type(_bin).__name__)) 35 | 36 | out_dir = os.environ["out"] 37 | 38 | # Create the .bin folder 39 | bin_folder = join(out_dir, "bin") 40 | if not isdir(bin_folder): 41 | os.makedirs(bin_folder) 42 | 43 | print("Creating binaries in {}".format(bin_folder)) 44 | 45 | for bin_name, bin_path in _bin.items(): 46 | bin_name = (bin_name or "").strip() 47 | if bin_name == "": 48 | sys.exit("Blank binary name for package {}".format(pname)) 49 | # Get the absolute path of the script being pointed to. 50 | bin_abs_path = normpath(join(out_dir, "lib", "node_modules", 51 | package["name"], bin_path)) 52 | if not exists(bin_abs_path): 53 | sys.exit("Package {} version {} declares a binary {} at path {}, " 54 | "but there is no such file at that path.".format( 55 | package["name"], package["version"], bin_name, bin_path)) 56 | # If a shebang isn't present, add a node shebang. 57 | with open(bin_abs_path) as f: 58 | bin_contents = f.read() 59 | lines = bin_contents.strip().splitlines() 60 | if len(lines) == 0: 61 | sys.exit("Bin file {} is empty.".format(bin_name)) 62 | elif not lines[0].strip().startswith("#!"): 63 | print("Adding node shebang to bin file {}".format(bin_name)) 64 | with open(bin_abs_path, "w") as f: 65 | f.write("#!/usr/bin/env node\n") 66 | f.write(bin_contents) 67 | 68 | print("Linking binary {} to {}".format(bin_name, bin_abs_path)) 69 | # Add executable permissions to the binary. 70 | bin_stats = os.stat(bin_abs_path) 71 | os.chmod(bin_abs_path, bin_stats.st_mode | stat.S_IEXEC) 72 | os.symlink(bin_abs_path, join(bin_folder, bin_name)) 73 | -------------------------------------------------------------------------------- /nix-libs/nodeLib/tools/patch-dependencies: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | // These packages come packaged with nodejs. 3 | var fs = require('fs'); 4 | var url = require('url'); 5 | 6 | function versionSpecIsImpure(versionSpec) { 7 | // Returns true if a version spec is impure. 8 | return (versionSpec == "latest" || versionSpec == "unstable" || 9 | // file path references 10 | versionSpec.substr(0, 2) == ".." || 11 | versionSpec.substr(0, 2) == "./" || 12 | versionSpec.substr(0, 2) == "~/" || 13 | versionSpec.substr(0, 1) == '/' || 14 | // github owner/repo references 15 | /^[^/]+\/[^/]+(#.*)?$/.test(versionSpec) || 16 | // is a URL 17 | url.parse(versionSpec).protocol); 18 | } 19 | 20 | // Load up the package object. 21 | var packageObj = JSON.parse(fs.readFileSync('./package.json')); 22 | 23 | // Purify dependencies. 24 | var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies', 25 | 'peerDependencies']; 26 | for (var i in depTypes) { 27 | var depType = depTypes[i]; 28 | var depSet = packageObj[depType]; 29 | if (depSet !== undefined) { 30 | for (var depName in depSet) { 31 | var versionSpec = depSet[depName]; 32 | if (versionSpecIsImpure(versionSpec)) { 33 | console.log("Replacing impure version spec " + versionSpec + 34 | " for dependency " + depName + " with '*'"); 35 | depSet[depName] = '*'; 36 | } 37 | } 38 | } 39 | } 40 | 41 | // Remove any recursive dependencies if they exist. 42 | if (process.env.circularDependencies) { 43 | var circularDependencies = process.env.circularDependencies.split(" "); 44 | for (var i in circularDependencies) { 45 | var dep = circularDependencies[i]; 46 | if (packageObj.dependencies[dep] != null) { 47 | delete packageObj.dependencies[dep]; 48 | } 49 | if (packageObj.devDependencies[dep] != null) { 50 | delete packageObj.devDependencies[dep]; 51 | } 52 | if (packageObj.peerDependencies[dep] != null) { 53 | delete packageObj.peerDependencies[dep]; 54 | } 55 | if (packageObj.optionalDependencies[dep] != null) { 56 | delete packageObj.optionalDependencies[dep]; 57 | } 58 | } 59 | } 60 | 61 | 62 | /* Patch dependencies as set by the patchDependencies variable */ 63 | var patches = JSON.parse(process.env.patchDependencies || "{}"); 64 | if (Object.keys(patches).length > 0) { 65 | console.log("Applying patches from patchDependencies argument..."); 66 | var errors = false; 67 | // Iterate through all of the dependencies we're patching, and for 68 | // each one either remove it or set it to something else. 69 | for (var depName in patches) { 70 | var version = patches[depName]; 71 | var found = false; 72 | for (var i in depTypes) { 73 | var depType = depTypes[i]; 74 | if (packageObj[depType] && packageObj[depType][depName] != null) { 75 | found = true; 76 | var bound = packageObj[depType][depName]; 77 | console.log("Found " + depName + "@" + bound + " in " + depType + ". " + 78 | (version === null ? "Removing" 79 | : ("Setting to version " + version))); 80 | if (version === null) delete packageObj[depType][depName]; 81 | else packageObj[depType][depName] = version; 82 | } 83 | } 84 | if (!found) { 85 | console.log("Can't patch dependency " + depName + " for package " + 86 | packageObj.name + "@" + packageObj.version + ", because " + 87 | "it's not listed as a dependency."); 88 | errors = true; 89 | } 90 | } 91 | if (errors) process.exit(1); 92 | } 93 | 94 | /* Write the fixed JSON file */ 95 | fs.writeFileSync("package.json", JSON.stringify(packageObj)); 96 | -------------------------------------------------------------------------------- /nix/fetchNixpkgs.nix: -------------------------------------------------------------------------------- 1 | { rev # The Git revision of nixpkgs to fetch 2 | , sha256 # The SHA256 of the downloaded data 3 | , outputSha256 ? null # The SHA256 fixed-output hash 4 | , system ? builtins.currentSystem # This is overridable if necessary 5 | }: 6 | 7 | if (0 <= builtins.compareVersions builtins.nixVersion "1.12") 8 | 9 | # In Nix 1.12, we can just give a `sha256` to `builtins.fetchTarball`. 10 | then ( 11 | builtins.fetchTarball { 12 | url = "https://github.com/NixOS/nixpkgs/archive/${rev}.tar.gz"; 13 | sha256 = outputSha256; 14 | }) 15 | 16 | # This hack should at least work for Nix 1.11 17 | else ( 18 | (rec { 19 | tarball = import { 20 | url = "https://github.com/NixOS/nixpkgs/archive/${rev}.tar.gz"; 21 | inherit sha256; 22 | }; 23 | 24 | builtin-paths = import ; 25 | 26 | script = builtins.toFile "nixpkgs-unpacker" '' 27 | "$coreutils/mkdir" "$out" 28 | cd "$out" 29 | "$gzip" --decompress < "$tarball" | "$tar" -x --strip-components=1 30 | ''; 31 | 32 | nixpkgs = builtins.derivation ({ 33 | name = "nixpkgs-${builtins.substring 0 6 rev}"; 34 | 35 | builder = builtins.storePath builtin-paths.shell; 36 | 37 | args = [ script ]; 38 | 39 | inherit tarball system; 40 | 41 | tar = builtins.storePath builtin-paths.tar; 42 | gzip = builtins.storePath builtin-paths.gzip; 43 | coreutils = builtins.storePath builtin-paths.coreutils; 44 | } // (if null == outputSha256 then { } else { 45 | outputHashMode = "recursive"; 46 | outputHashAlgo = "sha256"; 47 | outputHash = outputSha256; 48 | })); 49 | }).nixpkgs) 50 | -------------------------------------------------------------------------------- /nix/nixpkgs.nix: -------------------------------------------------------------------------------- 1 | let 2 | fetchNixpkgs = import ./fetchNixpkgs.nix; 3 | in 4 | 5 | # version - 18.03 6 | 7 | # to update use 8 | # nix-prefetch-url --unpack https://github.com/NixOS/nixpkgs/archive/$rev.tar.gz 9 | 10 | fetchNixpkgs { 11 | rev = "949bddfae38a613a0e8b0931e48ea5d843c1cf71"; 12 | sha256 = "1xlpl4hnw1hybd9q36av7xjjdp5igam498w56hnwvfi603aih13r"; 13 | outputSha256 = "14lbj6qdgga548k7x610an34c91204dmhcz0c5lc9viry184x0l7"; 14 | } 15 | -------------------------------------------------------------------------------- /nix/semver-range.nix: -------------------------------------------------------------------------------- 1 | { mkDerivation, base, classy-prelude, fetchgit, hspec, parsec 2 | , QuickCheck, stdenv, text, unordered-containers 3 | }: 4 | mkDerivation { 5 | pname = "semver-range"; 6 | version = "0.2.7"; 7 | src = fetchgit { 8 | url = "https://github.com/adnelson/semver-range"; 9 | sha256 = "02gyxd23689hs8ji6708ify0739dn6wiwqry1j3ajbk7wb3v5zr8"; 10 | rev = "6c7073c31185ea974869dcc6d0d1f3b0335bb2d7"; 11 | }; 12 | libraryHaskellDepends = [ 13 | base classy-prelude parsec text unordered-containers 14 | ]; 15 | testHaskellDepends = [ 16 | base classy-prelude hspec parsec QuickCheck text 17 | unordered-containers 18 | ]; 19 | description = "An implementation of semver and semantic version ranges"; 20 | license = stdenv.lib.licenses.mit; 21 | } 22 | -------------------------------------------------------------------------------- /nix/text-render.nix: -------------------------------------------------------------------------------- 1 | { mkDerivation, base, classy-prelude, mtl, parsec, stdenv, text }: 2 | mkDerivation { 3 | pname = "text-render"; 4 | version = "0.1.0.3"; 5 | sha256 = "1p78xsr25qxmfgsl73lzfn7j32ni897667k48448fkihdsg0a15g"; 6 | libraryHaskellDepends = [ base classy-prelude mtl parsec text ]; 7 | homepage = "http://github.com/thinkpad20/text-render"; 8 | description = "A type class for rendering objects as text, pretty-printing, etc"; 9 | license = stdenv.lib.licenses.mit; 10 | } 11 | -------------------------------------------------------------------------------- /nixfromnpm.cabal: -------------------------------------------------------------------------------- 1 | name: nixfromnpm 2 | version: 0.13.0 3 | synopsis: Generate nix expressions from npm packages. 4 | description: 5 | Given an npm package name and one or more npm repositories, will dump out a 6 | collection of nix files, one each for the initial package and all of its 7 | dependencies. Will generate a top-level 'default.nix' which returns a set 8 | containing all of these expressions. Subsequent invocations of the program 9 | using the same target directory will result in re-use of the existing files, 10 | to avoid unnecessary duplication. 11 | license: MIT 12 | license-file: LICENSE 13 | author: Allen Nelson 14 | maintainer: anelson@narrativescience.com 15 | build-type: Simple 16 | cabal-version: >=1.10 17 | bug-reports: https://github.com/adnelson/nixfromnpm/issues 18 | Category: Tools, Nix 19 | 20 | data-files: nix-libs/nodeLib/buildNodePackage.nix 21 | , nix-libs/nodeLib/circular_dependencies.md 22 | , nix-libs/nodeLib/default.nix 23 | , nix-libs/nodeLib/fetch.py 24 | , nix-libs/nodeLib/fetchUrlNamespaced.nix 25 | , nix-libs/nodeLib/tools/check-package-json 26 | , nix-libs/nodeLib/tools/install-binaries 27 | , nix-libs/nodeLib/tools/patch-dependencies 28 | , nix-libs/nodeLib/tools/execute-install-scripts 29 | 30 | 31 | source-repository head 32 | type: git 33 | location: git://github.com/adnelson/nixfromnpm.git 34 | 35 | library 36 | default-language: Haskell2010 37 | hs-source-dirs: src 38 | exposed-modules: NixFromNpm 39 | , NixFromNpm.Cli 40 | , NixFromNpm.Common 41 | , NixFromNpm.Conversion.ToDisk 42 | , NixFromNpm.Conversion.ToNix 43 | , NixFromNpm.Git.Types 44 | , NixFromNpm.Npm.PackageMap 45 | , NixFromNpm.Npm.Types 46 | , NixFromNpm.Npm.Version 47 | , NixFromNpm.Options 48 | other-modules: Filesystem.Path.Wrappers 49 | , NixFromNpm.HttpTools 50 | , NixFromNpm.Merge 51 | , NixFromNpm.Npm.Resolve 52 | , Paths_nixfromnpm 53 | other-extensions: NoImplicitPrelude 54 | build-depends: base >=4.8 && < 5.0 55 | , classy-prelude 56 | , mono-traversable 57 | , text 58 | , bytestring 59 | , mtl 60 | , unordered-containers 61 | , containers 62 | , parsec 63 | , aeson 64 | , data-default 65 | , shelly 66 | , MissingH 67 | , text-render 68 | , system-filepath 69 | , network-uri 70 | , directory 71 | , hnix >=0.4.0 72 | , optparse-applicative 73 | , curl 74 | , temporary 75 | , SHA 76 | , monad-control 77 | , lifted-base 78 | , transformers 79 | , unix 80 | , ansi-terminal 81 | , semver-range >=0.2.7 82 | , data-fix 83 | , pcre-heavy 84 | , exceptions 85 | , megaparsec >= 6.0.0 86 | , regex-tdfa 87 | , regex-tdfa-text 88 | default-language: Haskell2010 89 | 90 | executable nixfromnpm 91 | default-language: Haskell2010 92 | other-extensions: NoImplicitPrelude 93 | main-is: src/Main.hs 94 | build-depends: base >=4.8 && < 5.0 95 | , optparse-applicative 96 | , nixfromnpm 97 | 98 | test-suite unit-tests 99 | default-language: Haskell2010 100 | type: exitcode-stdio-1.0 101 | hs-source-dirs: tests 102 | main-is: Unit.hs 103 | other-extensions: NoImplicitPrelude 104 | , OverloadedStrings 105 | build-depends: base >=4.8 && < 5.0 106 | , classy-prelude 107 | , mono-traversable 108 | , aeson 109 | , bytestring 110 | , hnix 111 | , text 112 | , hspec 113 | , QuickCheck 114 | , neat-interpolation 115 | , nixfromnpm 116 | ghc-options: -threaded -rtsopts -with-rtsopts=-N 117 | default-language: Haskell2010 118 | -------------------------------------------------------------------------------- /release.nix: -------------------------------------------------------------------------------- 1 | { 2 | nixpkgs ? import ./nix/nixpkgs.nix, 3 | compiler ? null, 4 | }: 5 | let 6 | config = { allowUnfree = true; }; 7 | 8 | overlays = [ 9 | (newPkgs: oldPkgs: rec { 10 | origHaskellPackages = if compiler == null then oldPkgs.haskellPackages 11 | else oldPkgs.haskell.packages."${compiler}"; 12 | 13 | haskellPackages = origHaskellPackages.override { 14 | overrides = haskellPackagesNew: haskellPackagesOld: 15 | { semver-range = 16 | haskellPackagesNew.callPackage ./nix/semver-range.nix { }; 17 | 18 | text-render = 19 | haskellPackagesNew.callPackage ./nix/text-render.nix { }; 20 | 21 | hnix = haskellPackagesOld.hnix_0_4_0; 22 | 23 | nixfromnpm = 24 | let 25 | inherit (newPkgs.lib) any flip elem hasSuffix hasPrefix elemAt splitString; 26 | # We'll typically have a lot of files in this directory; 27 | # we only want to take a few of them though. Make a filtering 28 | # function which will choose them. 29 | dirsToInclude = ["src" "tests" "nix-libs"]; 30 | filesToInclude = ["LICENSE" "nixfromnpm.cabal"]; 31 | _filter = path: type: let 32 | subpath = elemAt (splitString "${toString ./.}/" path) 1; 33 | spdir = elemAt (splitString "/" subpath) 0; 34 | in 35 | elem spdir dirsToInclude || 36 | (type == "regular" && elem subpath filesToInclude); 37 | in 38 | newPkgs.haskell.lib.overrideCabal 39 | (haskellPackagesNew.callPackage ./default.nix { }) 40 | (oldDerivation: rec { 41 | src = builtins.filterSource _filter oldDerivation.src; 42 | shellHook = builtins.trace src ((oldDerivation.shellHook or "") + '' 43 | export SRC=${src} 44 | export CURL_CA_BUNDLE=${newPkgs.cacert}/etc/ssl/certs/ca-bundle.crt 45 | export NIX_LIBS_DIR=$PWD/nix-libs 46 | ''); 47 | }); 48 | }; 49 | }; 50 | 51 | }) 52 | ]; 53 | 54 | pkgs = import nixpkgs { inherit config overlays; }; 55 | 56 | in 57 | 58 | { inherit (pkgs.haskellPackages) nixfromnpm; inherit pkgs; } 59 | -------------------------------------------------------------------------------- /runtests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env nix-shell 2 | #! nix-shell -i python3 -p python3Packages.pyyaml 3 | """Run the tests in the circleci config, for local dev.""" 4 | from os.path import dirname, join 5 | from subprocess import call 6 | import yaml 7 | 8 | with open(join(dirname(__file__), ".circleci", "config.yml")) as f: 9 | circle_cfg = yaml.load(f) 10 | 11 | results = {} 12 | failures = 0 13 | 14 | for step in circle_cfg["jobs"]["build"]["steps"]: 15 | try: 16 | name, command = step["run"]["name"], step["run"]["command"] 17 | except (TypeError, KeyError): 18 | continue 19 | print("Running", name) 20 | if call(command, shell=True) == 0: 21 | results[name] = "PASSED" 22 | else: 23 | results[name] = "FAILED" 24 | failures += 1 25 | 26 | print("\nRESULTS:\n") 27 | for name, passed in results.items(): 28 | print("*", name, "PASSED" if passed else "FAILED") 29 | 30 | exit(failures) 31 | -------------------------------------------------------------------------------- /shell.nix: -------------------------------------------------------------------------------- 1 | { 2 | nixpkgs ? import ./nix/nixpkgs.nix, 3 | compiler ? null 4 | }: 5 | (import ./release.nix { inherit nixpkgs compiler; }).nixfromnpm.env 6 | -------------------------------------------------------------------------------- /src/.gitignore: -------------------------------------------------------------------------------- 1 | test*.nix 2 | -------------------------------------------------------------------------------- /src/Filesystem/Path/Wrappers.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE NoImplicitPrelude #-} 2 | {-# LANGUAGE FlexibleContexts #-} 3 | {-# LANGUAGE LambdaCase #-} 4 | {-# LANGUAGE OverloadedStrings #-} 5 | module Filesystem.Path.Wrappers where 6 | 7 | import ClassyPrelude hiding (FilePath, unpack, (), readFile, readFileUtf8, 8 | writeFile, writeFileUtf8) 9 | import qualified ClassyPrelude as CP 10 | import Data.Text hiding (map) 11 | import qualified Data.Text as T 12 | import System.Directory (Permissions(..)) 13 | import qualified System.Directory as Dir 14 | import qualified System.Posix.Files as Posix 15 | import Filesystem.Path.CurrentOS 16 | import Control.Monad.Trans.Control 17 | import Control.Exception.Lifted 18 | import qualified Paths_nixfromnpm as Paths 19 | 20 | import qualified Nix.Expr as Nix 21 | 22 | -- | Take a function that takes a string path and returns something, and 23 | -- turn it into a function that operates in any MonadIO and takes a FilePath. 24 | generalize :: MonadIO io => (CP.FilePath -> IO a) -> FilePath -> io a 25 | generalize action = liftIO . action . pathToString 26 | 27 | -- | Makes a nix regular path expression from a filepath. 28 | mkPath :: FilePath -> Nix.NExpr 29 | mkPath = Nix.mkPath False . pathToString 30 | 31 | -- | Makes a nix regular path expression from a filepath. 32 | mkEnvPath :: FilePath -> Nix.NExpr 33 | mkEnvPath = Nix.mkPath True . pathToString 34 | 35 | -- | Wraps a function generated by cabal. Returns path to a data file. 36 | getDataFileName :: MonadIO io => FilePath -> io FilePath 37 | getDataFileName = map decodeString . generalize Paths.getDataFileName 38 | 39 | -- | Write some stuff to disk. 40 | writeFile :: MonadIO io => FilePath -> ByteString -> io () 41 | writeFile path = CP.writeFile (pathToString path) 42 | 43 | -- | Write some stuff to disk. 44 | writeFileUtf8 :: MonadIO io => FilePath -> Text -> io () 45 | writeFileUtf8 path = CP.writeFileUtf8 (pathToString path) 46 | 47 | -- | Read a file from disk. 48 | readFile :: MonadIO io => FilePath -> io ByteString 49 | readFile = generalize CP.readFile 50 | 51 | -- | Read a file from disk. 52 | readFileUtf8 :: MonadIO io => FilePath -> io Text 53 | readFileUtf8 = generalize CP.readFileUtf8 54 | 55 | -- | Read a data file, as included by cabal. 56 | readDataFile :: MonadIO io => FilePath -> io Text 57 | readDataFile = getDataFileName >=> readFileUtf8 58 | 59 | -- | Create a symbolic link at `path2` pointing to `path1`. 60 | createSymbolicLink :: (MonadIO io) => FilePath -> FilePath -> io () 61 | createSymbolicLink path1 path2 = liftIO $ do 62 | Posix.createSymbolicLink (pathToString path1) (pathToString path2) 63 | 64 | -- | Convert a FilePath into Text. 65 | pathToText :: FilePath -> Text 66 | pathToText pth = case toText pth of 67 | Left p -> p 68 | Right p -> p 69 | 70 | -- | Convert a FilePath into a string. 71 | pathToString :: FilePath -> String 72 | pathToString = unpack . pathToText 73 | 74 | -- | Get the contents of a directory, with the directory prepended. 75 | listDirFullPaths :: MonadIO io => FilePath -> io [FilePath] 76 | listDirFullPaths dir = map (dir ) <$> getDirectoryContents dir 77 | 78 | -- | Map an action over each item in the directory. The action will be 79 | -- called with the path to the directory prepended to the item. 80 | forItemsInDir :: MonadIO io => FilePath -> (FilePath -> io a) -> io [a] 81 | forItemsInDir dir action = do 82 | paths <- listDirFullPaths dir 83 | forM paths action 84 | 85 | -- | Map an action over each item in the directory, and ignore the results. 86 | forItemsInDir_ :: MonadIO io => FilePath -> (FilePath -> io ()) -> io () 87 | forItemsInDir_ dir action = do 88 | paths <- listDirFullPaths dir 89 | forM_ paths action 90 | 91 | -- | Check if the path is a file (not directory). 92 | isFile :: MonadIO io => FilePath -> io Bool 93 | isFile = doesFileExist 94 | 95 | -- | Check if the path is a file (not directory). 96 | isDirectory :: MonadIO io => FilePath -> io Bool 97 | isDirectory = doesDirectoryExist 98 | 99 | -- | Get the base name (filename) of a path, as text. 100 | getFilename :: FilePath -> Text 101 | getFilename = pathToText . filename 102 | 103 | -- | Get the base name of a path without extension, as text. 104 | getBaseName :: FilePath -> Text 105 | getBaseName = pathToText . fst . splitExtension . filename 106 | 107 | createDirectory :: MonadIO io => FilePath -> io () 108 | createDirectory = generalize Dir.createDirectory 109 | 110 | copyFile :: MonadIO io => FilePath -> FilePath -> io () 111 | copyFile source target = liftIO $ Dir.copyFile (pathToString source) 112 | (pathToString target) 113 | 114 | createDirectoryIfMissing :: MonadIO m => FilePath -> m () 115 | createDirectoryIfMissing = liftIO . Dir.createDirectoryIfMissing True . 116 | pathToString 117 | 118 | doesDirectoryExist :: MonadIO m => FilePath -> m Bool 119 | doesDirectoryExist = liftIO . Dir.doesDirectoryExist . pathToString 120 | 121 | doesFileExist :: MonadIO m => FilePath -> m Bool 122 | doesFileExist = liftIO . Dir.doesFileExist . pathToString 123 | 124 | doesPathExist :: MonadIO m => FilePath -> m Bool 125 | doesPathExist path = doesFileExist path >>= \case 126 | True -> return True 127 | False -> doesDirectoryExist path 128 | 129 | getCurrentDirectory :: MonadIO m => m FilePath 130 | getCurrentDirectory = decodeString <$> liftIO Dir.getCurrentDirectory 131 | 132 | removeDirectoryRecursive :: MonadIO m => FilePath -> m () 133 | removeDirectoryRecursive = liftIO . Dir.removeDirectoryRecursive . pathToString 134 | 135 | removeFile :: MonadIO m => FilePath -> m () 136 | removeFile = liftIO . Dir.removeFile . pathToString 137 | 138 | getDirectoryContents :: MonadIO m => FilePath -> m [FilePath] 139 | getDirectoryContents dir = do 140 | contents <- liftIO $ Dir.getDirectoryContents $ pathToString dir 141 | -- Filter out the '.' and '..' folders. 142 | let noDots p = let fn = getFilename p in fn /= "" && T.head fn /= '.' 143 | return $ CP.filter noDots $ map decodeString contents 144 | 145 | hasExt :: Text -> FilePath -> Bool 146 | hasExt ext path = case extension path of 147 | Just ext' | ext == ext' -> True 148 | otherwise -> False 149 | 150 | setCurrentDirectory :: MonadIO io => FilePath -> io () 151 | setCurrentDirectory = liftIO . Dir.setCurrentDirectory . pathToString 152 | 153 | getPermissions :: MonadIO io => FilePath -> io Permissions 154 | getPermissions = generalize Dir.getPermissions 155 | 156 | isWritable :: MonadIO io => FilePath -> io Bool 157 | isWritable = map writable . getPermissions 158 | 159 | absPath :: MonadIO io => FilePath -> io FilePath 160 | absPath path = ( path) <$> getCurrentDirectory 161 | 162 | isDirectoryEmpty :: MonadIO io => FilePath -> io Bool 163 | isDirectoryEmpty = map CP.null . getDirectoryContents 164 | -------------------------------------------------------------------------------- /src/Main.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import System.Environment (getArgs) 4 | import System.Exit (exitWith) 5 | 6 | import NixFromNpm.Cli (runWithArgs) 7 | 8 | main :: IO () 9 | main = exitWith =<< runWithArgs =<< getArgs 10 | -------------------------------------------------------------------------------- /src/NixFromNpm.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE NoImplicitPrelude #-} 2 | {-# LANGUAGE LambdaCase #-} 3 | module NixFromNpm (module NixFromNpm.Options, 4 | module Data.SemVer, 5 | module NixFromNpm.Npm.Version, 6 | module NixFromNpm.Npm.Types, 7 | module NixFromNpm.Npm.Resolve, 8 | module NixFromNpm.Conversion.ToDisk, 9 | module NixFromNpm.Conversion.ToNix, 10 | ) where 11 | 12 | import Data.SemVer 13 | 14 | import NixFromNpm.Options 15 | import NixFromNpm.Npm.Version 16 | import NixFromNpm.Npm.Types 17 | import NixFromNpm.Npm.Resolve 18 | import NixFromNpm.Conversion.ToDisk 19 | import NixFromNpm.Conversion.ToNix 20 | -------------------------------------------------------------------------------- /src/NixFromNpm/Cli.hs: -------------------------------------------------------------------------------- 1 | -- | The nixfromnpm command-line interface 2 | {-# LANGUAGE NoImplicitPrelude #-} 3 | module NixFromNpm.Cli (runWithArgs) where 4 | 5 | import qualified Options.Applicative as O 6 | import System.Environment (getArgs) 7 | import System.Exit (ExitCode) 8 | 9 | import NixFromNpm.Common hiding (getArgs) 10 | import NixFromNpm.Options (NixFromNpmOptions, parseOptions, 11 | validateOptions) 12 | import NixFromNpm.Conversion.ToDisk (dumpPkgFromOptions) 13 | import NixFromNpm.Merge (mergeInto, MergeType(..), Source(..), Dest(..)) 14 | 15 | -- | Execute an argument parser with a list of arguments. 16 | customExecParser_ :: O.ParserInfo a -> [String] -> IO a 17 | customExecParser_ pinfo args = do 18 | let result = O.execParserPure O.defaultPrefs pinfo args 19 | O.handleParseResult result 20 | 21 | -- | Execute the CLI with an argument list, returning an exit code. 22 | runWithArgs :: [String] -> IO ExitCode 23 | runWithArgs args = do 24 | let pInfo = O.info (O.helper <*> parseOptions) 25 | (O.fullDesc <> O.progDesc description <> O.header headerText) 26 | 27 | parsedOpts <- customExecParser_ pInfo args 28 | validatedOpts <- validateOptions parsedOpts 29 | dumpPkgFromOptions validatedOpts 30 | where 31 | description = concat ["nixfromnpm allows you to generate nix expressions ", 32 | "automatically from npm packages. It provides ", 33 | "features such as de-duplication of shared ", 34 | "dependencies and advanced customization."] 35 | headerText = "nixfromnpm - Create nix expressions from NPM" 36 | -------------------------------------------------------------------------------- /src/NixFromNpm/Common.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE LambdaCase #-} 2 | {-# LANGUAGE CPP #-} 3 | {-# LANGUAGE OverloadedStrings #-} 4 | {-# LANGUAGE NoMonomorphismRestriction #-} 5 | {-# LANGUAGE FlexibleContexts #-} 6 | {-# LANGUAGE TypeSynonymInstances #-} 7 | {-# LANGUAGE FlexibleInstances #-} 8 | {-# LANGUAGE ViewPatterns #-} 9 | {-# LANGUAGE NoImplicitPrelude #-} 10 | {-# LANGUAGE TypeFamilies #-} 11 | module NixFromNpm.Common ( 12 | module ClassyPrelude, 13 | module Control.Applicative, 14 | module Control.Exception.Lifted, 15 | module Control.Monad, 16 | module Control.Monad.Catch, 17 | module Control.Monad.Except, 18 | module Control.Monad.Identity, 19 | module Control.Monad.Reader, 20 | module Control.Monad.State.Strict, 21 | module Control.Monad.Trans, 22 | module Control.Monad.RWS.Strict, 23 | module Data.Char, 24 | module Data.Default, 25 | module Data.Either, 26 | module Data.HashMap.Strict, 27 | module Data.List, 28 | module Data.Map.Strict, 29 | module Data.Maybe, 30 | module Data.String.Utils, 31 | module Filesystem.Path.CurrentOS, 32 | module GHC.Exts, 33 | module Network.URI, 34 | module Filesystem.Path.Wrappers, 35 | module Text.Render, 36 | module Text.Printf, 37 | module Control.Monad.Trans.Control, 38 | module System.Console.ANSI, 39 | Name, AuthToken, Record, (//), (<>), 40 | uriToText, uriToString, putStrsLn, putStrs, maybeIf, failC, 41 | errorC, joinBy, mapJoinBy, getEnv, modifyMap, unsafeParseURI, 42 | parseURIText, withColor, withUL, warn, warns, assert, fatal, fatalC, 43 | partitionEither, throw, eitherToMaybe 44 | #if !MIN_VERSION_mono_traversable(1,0,7) 45 | , dropSuffix 46 | #endif 47 | ) where 48 | 49 | import ClassyPrelude hiding (assert, asList, find, FilePath, bracket, 50 | maximum, maximumBy, (), (<>), 51 | minimum, try, stripPrefix, ioError, 52 | mapM_, sequence_, foldM, forM_, throw, throwIO, 53 | filterM, replicateM, writeFile, readFile, 54 | writeFileUtf8, readFileUtf8, catch, catches, 55 | Handler) 56 | import Control.Exception (throw) 57 | import Control.Monad.Catch (catch, catches, Handler(..)) 58 | import qualified Prelude as P 59 | import Control.Monad.RWS.Strict hiding (Any, (<>)) 60 | import Control.Monad (when) 61 | import Control.Monad.Trans (MonadIO(..), lift) 62 | import Control.Monad.Reader (ReaderT(..), MonadReader(..), (<=<), (>=>), ask, 63 | asks) 64 | import Control.Monad.State.Strict (MonadState, StateT, State, get, gets, 65 | modify, put, liftM, liftIO, runState, 66 | runStateT, execState, execStateT, 67 | evalState, evalStateT) 68 | import Control.Monad.Except (ExceptT, MonadError(..), throwError, runExceptT) 69 | import Control.Exception.Lifted () -- hiding (assert, ) 70 | import Control.Monad.Identity (Identity(..)) 71 | import Control.Monad.Trans.Control 72 | import Control.Applicative hiding (empty, optional) 73 | import Data.Char (isDigit, isAlpha) 74 | import Data.Default 75 | import Data.List (maximum, maximumBy) 76 | import Data.HashMap.Strict (HashMap, (!)) 77 | import qualified Data.HashMap.Strict as H 78 | import Data.Map.Strict (Map) 79 | import qualified Data.Map.Strict as M 80 | import Data.Maybe (fromJust, isJust, isNothing) 81 | import Data.Monoid ((<>)) 82 | import Data.Either (isRight, isLeft) 83 | import Data.String.Utils hiding (join) 84 | import qualified Data.Text as T 85 | import Filesystem.Path.CurrentOS hiding (concat, null, (<.>), empty) 86 | import GHC.Exts (IsList) 87 | import Text.Render hiding (renderParens) 88 | import Text.Printf (printf) 89 | import Network.URI (URI(..), URIAuth(..), parseURI, parseAbsoluteURI, 90 | parseRelativeReference, relativeTo) 91 | import qualified Network.URI as NU 92 | import Shelly hiding (get, relativeTo) 93 | import System.Console.ANSI 94 | import Filesystem.Path.Wrappers 95 | 96 | -- | Indicates that the text is some identifier. 97 | type Name = Text 98 | 99 | -- | Used to indicate something is meant for authentication. 100 | type AuthToken = ByteString 101 | 102 | -- | A record is a lookup table with string keys. 103 | type Record = HashMap Name 104 | 105 | newtype FatalError = Fatal Text deriving (Show, Eq, Typeable) 106 | instance Exception FatalError 107 | 108 | -- | Creates a new hashmap by applying a function to every key in it. 109 | alterKeys :: (Eq k, Hashable k, Eq k', Hashable k') => 110 | (k -> k') -> HashMap k v -> HashMap k' v 111 | alterKeys f mp = do 112 | let pairs = H.toList mp 113 | newPairs = P.map (\(k, v) -> (f k, v)) pairs 114 | newMap = H.fromList newPairs 115 | newMap 116 | 117 | -- | Create a hashmap by applying a test to everything in the existing 118 | -- map. If the test returns Just, put it in the result, and otherwise leave 119 | -- it out. 120 | modifyHashMap :: (Eq k, Hashable k) 121 | => (a -> Maybe b) -> HashMap k a -> HashMap k b 122 | modifyHashMap test inputMap = foldl' step mempty $ H.toList inputMap where 123 | step result (k, elem) = case test elem of 124 | Nothing -> result 125 | Just newElem -> H.insert k newElem result 126 | 127 | -- | Same as modifyHashMap, but for Data.Maps. 128 | modifyMap :: Ord k => (a -> Maybe b) -> Map k a -> Map k b 129 | modifyMap test inputMap = foldl' step mempty $ M.toList inputMap where 130 | step result (k, elem) = case test elem of 131 | Nothing -> result 132 | Just newElem -> M.insert k newElem result 133 | 134 | 135 | -- | Convert a URI into Text. 136 | uriToText :: URI -> Text 137 | uriToText = pack . uriToString 138 | 139 | -- | Convert a URI into String. 140 | uriToString :: URI -> String 141 | uriToString uri = NU.uriToString id uri "" 142 | 143 | -- | Concatenate text and print it to stdout with a newline. 144 | putStrsLn :: MonadIO m => [Text] -> m () 145 | putStrsLn = putStrLn . concat 146 | 147 | -- | Concatenate text and print it to stdout. 148 | putStrs :: MonadIO m => [Text] -> m () 149 | putStrs = putStr . concat 150 | 151 | #if !MIN_VERSION_mono_traversable(1,0,7) 152 | -- | Strip the given suffix from the given string. 153 | dropSuffix :: Text -> Text -> Text 154 | dropSuffix suffix input = case T.stripSuffix suffix input of 155 | Nothing -> input 156 | Just stripped -> stripped 157 | #endif 158 | 159 | -- | Return a Just value if the argument is True, else Nothing. 160 | maybeIf :: Bool -> a -> Maybe a 161 | maybeIf True x = Just x 162 | maybeIf False _ = Nothing 163 | 164 | -- | Synonym for intercalate. 165 | joinBy :: Text -> [Text] -> Text 166 | joinBy = T.intercalate 167 | 168 | -- | Map a function and intercalate the results. 169 | mapJoinBy :: Text -> (a -> Text) -> [a] -> Text 170 | mapJoinBy sep func = joinBy sep . map func 171 | 172 | -- | Reads an environment variable. 173 | getEnv :: MonadIO m => Text -> m (Maybe Text) 174 | getEnv = shelly . silently . get_env 175 | 176 | -- | Call the monadic fail function, concatenating a list of Text. 177 | failC :: Monad m => [Text] -> m a 178 | failC = fail . unpack . concat 179 | 180 | -- | Throw an error after concatenation a list of Text. 181 | errorC :: [Text] -> a 182 | errorC = error . unpack . concat 183 | 184 | -- | Appends text to URI with a slash. Ex: foo.com // bar == 185 | -- foo.com/bar. 186 | (//) :: URI -> Text -> URI 187 | uri // txt = do 188 | let fixedUri = unsafeParseURI $ case T.last (uriToText uri) of 189 | '/' -> uriToText uri 190 | _ -> uriToText uri <> "/" 191 | case parseRelativeReference (unpack txt) of 192 | Nothing -> errorC ["Invalid appending URI: ", tshow txt] 193 | Just uri' -> uri' `relativeTo` fixedUri 194 | 195 | unsafeParseURI :: Text -> URI 196 | unsafeParseURI txt = case parseURIText txt of 197 | Nothing -> errorC ["Invalid URI text: ", tshow txt] 198 | Just uri -> uri 199 | 200 | parseURIText :: Text -> Maybe URI 201 | parseURIText = parseURI . unpack 202 | 203 | withColor :: MonadIO io => Color -> io a -> io a 204 | withColor color action = do 205 | liftIO $ setSGR [SetColor Foreground Vivid color] 206 | result <- action 207 | liftIO $ setSGR [Reset] 208 | return result 209 | 210 | withUL :: MonadIO io => io a -> io a 211 | withUL action = do 212 | liftIO $ setSGR [SetUnderlining SingleUnderline] 213 | result <- action 214 | liftIO $ setSGR [SetUnderlining NoUnderline] 215 | return result 216 | 217 | -- | Print a warning string in red. 218 | warn :: MonadIO io => Text -> io () 219 | warn msg = withColor Red $ putStrsLn ["WARNING: ", msg] 220 | 221 | -- | Print a warning string by concatenating strings. 222 | warns :: MonadIO io => [Text] -> io () 223 | warns = warn . concat 224 | 225 | -- | Throws the given exception if the test fails. 226 | assert :: (Monad m, Exception e) => m Bool -> e -> m () 227 | assert test err = test >>= \case 228 | True -> return () 229 | False -> throw err 230 | 231 | -- | Throw a fatal error. 232 | fatal :: Text -> a 233 | fatal = throw . Fatal 234 | 235 | -- | Like `fatal` but takes a list which it concatenates. 236 | fatalC :: [Text] -> a 237 | fatalC = fatal . concat 238 | 239 | -- | Split up a list based on a predicate. 240 | partitionEither :: (a -> Either b c) -> [a] -> ([b], [c]) 241 | partitionEither f = partitionEithers . map f 242 | 243 | -- | Convert an `Either` to a `Maybe`. 244 | eitherToMaybe :: Either a b -> Maybe b 245 | eitherToMaybe (Left _) = Nothing 246 | eitherToMaybe (Right x) = Just x 247 | -------------------------------------------------------------------------------- /src/NixFromNpm/Conversion/ToDisk.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE CPP #-} 2 | {-# LANGUAGE NoImplicitPrelude #-} 3 | {-# LANGUAGE LambdaCase #-} 4 | {-# LANGUAGE OverloadedStrings #-} 5 | {-# LANGUAGE RecordWildCards #-} 6 | {-# LANGUAGE ScopedTypeVariables #-} 7 | {-# LANGUAGE FlexibleContexts #-} 8 | {-# LANGUAGE ViewPatterns #-} 9 | module NixFromNpm.Conversion.ToDisk where 10 | 11 | import Data.Either (either) 12 | import Data.HashMap.Strict (HashMap) 13 | import Data.Map (Map) 14 | import Data.SemVer (semver, anyVersion, parseSemVer, fromHaskellVersion) 15 | import Data.Text (Text) 16 | import Shelly (shelly, cp_r, rm_rf, run_) 17 | import System.Environment (lookupEnv) 18 | import System.Exit (ExitCode(..)) 19 | import Text.Printf (printf) 20 | import qualified Data.HashMap.Strict as H 21 | import qualified Data.HashSet as HS 22 | import qualified Data.Map as M 23 | import qualified Data.Text as T 24 | import qualified Prelude as P 25 | 26 | import qualified Paths_nixfromnpm 27 | import NixFromNpm.Common 28 | import Nix.Expr (NExpr) 29 | import Nix.Parser (Result(..), parseNixFile) 30 | import Nix.Pretty (prettyNix) 31 | #if MIN_VERSION_hnix(0,6,0) 32 | import Nix.Render (MonadFile) 33 | #endif 34 | import NixFromNpm.Conversion.ToNix (ResolvedPkg(..), 35 | toDotNix, 36 | writeNix, 37 | rootDefaultNix, 38 | packageJsonDefaultNix, 39 | packageMapToNix, 40 | resolvedPkgToNix, 41 | nodePackagesDir) 42 | import NixFromNpm.Options (NixFromNpmOptions(..)) 43 | import NixFromNpm.Npm.Types (BrokenPackageReason(..)) 44 | import NixFromNpm.Npm.Types (ResolvedDependency(..), unpackPSC, VersionInfo(..)) 45 | import NixFromNpm.Npm.Version (NpmVersionRange(..), showPair, showRangePair) 46 | import NixFromNpm.Npm.PackageMap (simpleName, pmNumVersions, pmConcat) 47 | import NixFromNpm.Npm.PackageMap (PackageMap, PackageName(..), 48 | pmLookup, pmDelete, pmMap, 49 | psToList) 50 | import NixFromNpm.Npm.Resolve 51 | 52 | -- | The npm lookup utilities will produce a bunch of fully defined packages. 53 | -- However, the only packages that we want to write are the new ones; that 54 | -- is, the ones that we've discovered and the ones that already exist. This 55 | -- will perform the appropriate filter. It will also convert them to nix. 56 | takeNewPackages :: PackageMap FullyDefinedPackage 57 | -> PackageMap NExpr 58 | takeNewPackages startingRec = do 59 | let isNew (NewPackage rpkg) = Just $ resolvedPkgToNix rpkg 60 | isNew _ = Nothing 61 | H.filter (not . M.null) $ H.map (modifyMap isNew) startingRec 62 | 63 | -- | Given the path to a package, finds all of the .nix files which parse 64 | -- correctly. 65 | parseVersionFiles :: 66 | #if MIN_VERSION_hnix(0,6,0) 67 | (MonadIO io, MonadFile io) 68 | #else 69 | MonadIO io 70 | #endif 71 | => Bool -- ^ Verbose output. 72 | -> PackageName -- ^ Name of the package this is a version of. 73 | -> FilePath -- ^ Folder with .nix files for this package. 74 | -> io (PackageMap NExpr) -- ^ Version and expression. 75 | parseVersionFiles verbose pkgName folder = do 76 | maybeExprs <- forItemsInDir folder $ \path -> do 77 | let (versionTxt, ext) = splitExtension $ filename path 78 | case parseSemVer (pathToText versionTxt) of 79 | _ | ext /= Just "nix" -> return Nothing -- not a nix file 80 | Left _ -> return Nothing -- not a version file 81 | Right version -> parseNixFile (pathToString path) >>= \case 82 | Failure err -> do 83 | putStrsLn ["Warning: expression at path ", tshow path, 84 | " failed to parse:\n", tshow err] 85 | return Nothing -- invalid nix, should overwrite 86 | Success expr -> do 87 | when verbose $ 88 | putStrsLn ["Discovered ", tshow pkgName, " at version ", 89 | tshow version] 90 | return $ Just (version, expr) 91 | return $ H.singleton pkgName (M.fromList $ catMaybes maybeExprs) 92 | 93 | -- | Given a directory containing npm nix expressions, parse it into a 94 | -- packagemap of parsed nix expressions. 95 | scanNodePackagesDir :: 96 | #if MIN_VERSION_hnix(0,6,0) 97 | (MonadIO io, MonadFile io) 98 | #else 99 | MonadIO io 100 | #endif 101 | => Bool -> FilePath -> io (PackageMap NExpr) 102 | scanNodePackagesDir verbose nodePackagesDir = pmConcat <$> do 103 | forItemsInDir nodePackagesDir $ \dir -> do 104 | doesDirectoryExist dir >>= \case 105 | False -> return mempty -- not a directory 106 | True -> case T.split (=='@') $ getFilename dir of 107 | -- Check if the directory starts with "@", in which case it's 108 | -- a namespace. 109 | ["", namespace] -> map pmConcat $ forItemsInDir dir $ \dir' -> do 110 | let pkgName = PackageName (getFilename dir') (Just namespace) 111 | parseVersionFiles verbose pkgName dir' 112 | [name] -> do 113 | parseVersionFiles verbose (simpleName $ getFilename dir) dir 114 | _ -> return mempty 115 | 116 | -- | Given a nodePackages folder, create a default.nix which contains all 117 | -- of the packages in that folder. 118 | writeNodePackagesNix :: 119 | #if MIN_VERSION_hnix(0,6,0) 120 | (MonadIO io, MonadFile io) 121 | #else 122 | MonadIO io 123 | #endif 124 | => Bool -> FilePath -> io () 125 | writeNodePackagesNix verbose path' = do 126 | path <- absPath path' 127 | whenM (not <$> doesDirectoryExist (path nodePackagesDir)) $ do 128 | failC ["No node packages folder in ", pathToText path] 129 | let defaultNix = path nodePackagesDir "default.nix" 130 | putStrsLn ["Generating package definition object in ", pathToText defaultNix] 131 | packages <- scanNodePackagesDir verbose (path nodePackagesDir) 132 | writeNix defaultNix $ packageMapToNix packages 133 | 134 | -- | Given the path to a file possibly containing nix expressions, finds all 135 | -- expressions findable at that path and returns a map of them. 136 | findExisting :: 137 | #if MIN_VERSION_hnix(0,6,0) 138 | (MonadBaseControl IO io, MonadIO io, MonadFile io) 139 | #else 140 | (MonadBaseControl IO io, MonadIO io) 141 | #endif 142 | => Bool -- ^ Verbose 143 | -> FilePath -- ^ The path to search. 144 | -> io (PackageMap PreExistingPackage) 145 | -- ^ Mapping of package names to maps of versions to nix 146 | -- expressions. 147 | findExisting verbose path = do 148 | doesDirectoryExist (path nodePackagesDir) >>= \case 149 | False -> pure mempty 150 | True -> do 151 | putStrsLn ["Searching for existing expressions in ", pathToText path] 152 | verMaps <- scanNodePackagesDir verbose (path nodePackagesDir) 153 | let total = pmNumVersions verMaps 154 | putStrsLn ["Found ", render total, " expressions in ", pathToText path] 155 | return $ pmMap FromOutput verMaps 156 | 157 | -- | Given the output directory, finds any existing packages. 158 | preloadPackages :: NpmFetcher () 159 | preloadPackages = do 160 | verbose <- asks nfsVerbose 161 | existing <- asks nfsCacheDepth >>= \case 162 | n | n < 0 -> return mempty 163 | | otherwise -> findExisting verbose =<< asks nfsOutputPath 164 | modify $ \s -> s { 165 | resolved = pmMap toFullyDefined existing <> resolved s 166 | } 167 | 168 | -- | Initialize an output directory from scratch. This means: 169 | -- * Creating a default.nix file. 170 | -- * Creating a nodePackages folder. 171 | initializeOutput :: NpmFetcher () 172 | initializeOutput = do 173 | outputPath <- asks nfsOutputPath 174 | version <- case fromHaskellVersion Paths_nixfromnpm.version of 175 | Left err -> fatal err 176 | Right v -> return v 177 | let defaultNixPath = outputPath "default.nix" 178 | -- skip the action if the path exists and overwrite is disabled. 179 | unlessExists path action = asks nfsOverwriteNixLibs >>= \case 180 | True -> action 181 | False -> doesFileExist path >>= \case 182 | True -> return () 183 | False -> action 184 | putStrsLn ["Initializing ", pathToText outputPath] 185 | createDirectoryIfMissing outputPath 186 | createDirectoryIfMissing (outputPath nodePackagesDir) 187 | 188 | unlessExists defaultNixPath $ do 189 | writeNix defaultNixPath rootDefaultNix 190 | 191 | -- Get the path to the files bundled with nixfromnpm which 192 | -- contain nix libraries. 193 | nixlibs <- liftIO (lookupEnv "NIX_LIBS_DIR") >>= \case 194 | Nothing -> getDataFileName "nix-libs" 195 | Just libPath -> pure (fromString libPath) 196 | 197 | let inputNodeLib = nixlibs "nodeLib" 198 | let outputNodeLib = outputPath "nodeLib" 199 | 200 | putStrsLn ["Generating node libraries in ", pathToText outputPath] 201 | 202 | shelly $ do 203 | rm_rf outputNodeLib 204 | cp_r inputNodeLib outputNodeLib 205 | 206 | let tools = outputNodeLib "tools" 207 | run_ "chmod" [ "-R", "+x", (pathToText tools) ] 208 | 209 | -- | Actually writes the packages to disk. 210 | writeNewPackages :: NpmFetcher () 211 | writeNewPackages = takeNewPackages <$> gets resolved >>= \case 212 | newPackages 213 | | H.null newPackages -> putStrLn "No new packages created." 214 | | otherwise -> forM_ (H.toList newPackages) $ \(pkgName, pkgVers) -> do 215 | forM_ (M.toList pkgVers) $ \(ver, expr) -> do 216 | writePackage pkgName ver expr 217 | 218 | dumpFromPkgJson :: FilePath -- ^ Path to folder containing package.json. 219 | -> NpmFetcher () 220 | dumpFromPkgJson path = do 221 | doesDirectoryExist path >>= \case 222 | False -> errorC ["No such directory ", pathToText path] 223 | True -> doesFileExist (path "package.json") >>= \case 224 | False -> errorC ["No package.json found in ", pathToText path] 225 | True -> do 226 | -- Parse a VersionInfo object from the package.json file. 227 | verinfo <- extractPkgJson (path "package.json") 228 | let (name, version) = (viName verinfo, viVersion verinfo) 229 | putStrsLn ["Generating expression for package ", tshow name, 230 | ", version ", tshow version] 231 | -- Convert this to a ResolvedPkg by resolving its dependencies. 232 | rPkg <- withoutPackage name version $ versionInfoToResolved verinfo 233 | writeNix (path "project.nix") $ resolvedPkgToNix rPkg 234 | outputPath <- asks nfsOutputPath 235 | writeNix (path "default.nix") $ 236 | packageJsonDefaultNix outputPath 237 | 238 | -- | Show all of the broken packages. 239 | showBrokens :: NpmFetcher () 240 | showBrokens = H.toList <$> gets brokenPackages >>= \case 241 | [] -> return () 242 | brokens -> do 243 | putStrsLn ["Failed to generate expressions for ", tshow (length brokens), 244 | " downstream dependencies."] 245 | forM_ brokens $ \(name, rangeMap) -> do 246 | forM_ (M.toList rangeMap) $ \(range, report) -> do 247 | putStrsLn [" ", showRangePair name range] 248 | let chains = bprDependencyChains report 249 | when (HS.size chains > 0) $ do 250 | putStrLn " Dependency of:" 251 | forM_ (HS.toList chains) $ \chain -> 252 | putStrsLn [" ", 253 | mapJoinBy " -> " (uncurry showPair) (reverse chain)] 254 | putStrsLn ["Failed to build because: ", tshow (bprReason report)] 255 | 256 | -- | See if any of the top-level packages failed to build, and return a 257 | -- non-zero status if they did. 258 | checkForBroken :: [(PackageName, NpmVersionRange)] -> NpmFetcher ExitCode 259 | checkForBroken inputs = do 260 | -- findBrokens will look for any of the packages in 261 | let findBrokens [] = return [] 262 | findBrokens ((name, range):others) = getBroken name range >>= \case 263 | Nothing -> findBrokens others 264 | Just report -> ((name, range, report) :) <$> findBrokens others 265 | findBrokens inputs >>= \case 266 | [] -> do 267 | putStrsLn $ case inputs of 268 | [] -> ["No packages to build."] 269 | [(p,v)] -> ["Package ", showRangePair p v, " built successfully."] 270 | pkgs -> ["Packages ", mapJoinBy ", " (uncurry showRangePair) inputs, 271 | " built successfully."] 272 | return ExitSuccess 273 | pkgs -> do 274 | putStrLn "The following packages failed to build:" 275 | forM_ pkgs $ \(name, range, report) -> do 276 | putStrLn $ showRangePair name range 277 | putStrsLn ["Failed because: ", tshow $ bprReason report] 278 | return $ ExitFailure 1 279 | 280 | -- | Traverses down a dependency tree, seeing if the dependencies of a package 281 | -- are broken. 282 | checkPackage :: ResolvedPkg -> NpmFetcher (Maybe BrokenPackageReason) 283 | checkPackage ResolvedPkg{..} = go (H.toList rpDependencies) where 284 | go [] = return Nothing -- all done! 285 | go ((name, Broken reason):_) = return $ Just (BrokenDependency name reason) 286 | go ((name, Resolved (unpackPSC -> ver)):rest) = do 287 | pmLookup name ver <$> gets resolved >>= \case 288 | Nothing -> return $ Just (UnsatisfiedDependency name) 289 | Just (NewPackage rpkg) -> checkPackage rpkg >>= \case 290 | Nothing -> go rest 291 | Just err -> return $ Just err 292 | Just _ -> go rest 293 | 294 | dumpPkgFromOptions :: NixFromNpmOptions -> IO ExitCode 295 | dumpPkgFromOptions opts 296 | | nfnoPkgNames opts == [] && nfnoPkgPaths opts == [] = do 297 | putStrLn "No packages given, nothing to do..." 298 | return $ ExitFailure 1 299 | dumpPkgFromOptions (opts@NixFromNpmOptions{..}) = do 300 | let settings = defaultSettings { 301 | nfsGithubAuthToken = nfnoGithubToken, 302 | nfsNpmAuthTokens = nfnoNpmTokens, 303 | nfsRegistries = nfnoRegistries, 304 | nfsRequestTimeout = fromIntegral nfnoTimeout, 305 | nfsOutputPath = nfnoOutputPath, 306 | nfsMaxDevDepth = nfnoDevDepth, 307 | nfsCacheDepth = nfnoCacheDepth, 308 | nfsRealTimeWrite = nfnoRealTime, 309 | nfsOverwriteNixLibs = nfnoOverwriteNixLibs 310 | } 311 | (status, _) <- runNpmFetchWith settings startState $ do 312 | preloadPackages 313 | initializeOutput 314 | forM nfnoPkgNames $ \(name, range) -> do 315 | resolveNpmVersionRange name range 316 | `catch` \(e :: SomeException) -> do 317 | warns ["Failed to build ", tshow name, "@", tshow range, 318 | ": ", tshow e] 319 | addBroken name range (Reason $ show e) 320 | return $ semver 0 0 0 321 | whenM (not <$> asks nfsRealTimeWrite) writeNewPackages 322 | forM nfnoPkgPaths $ \path -> do 323 | dumpFromPkgJson path 324 | whenM (not <$> asks nfsRealTimeWrite) writeNewPackages 325 | writeNodePackagesNix False =<< asks nfsOutputPath 326 | showBrokens 327 | checkForBroken nfnoPkgNames 328 | return status 329 | -------------------------------------------------------------------------------- /src/NixFromNpm/Conversion/ToNix.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE CPP #-} 2 | {-# LANGUAGE NoImplicitPrelude #-} 3 | {-# LANGUAGE LambdaCase #-} 4 | {-# LANGUAGE OverloadedStrings #-} 5 | {-# LANGUAGE RecordWildCards #-} 6 | {-# LANGUAGE ScopedTypeVariables #-} 7 | {-# LANGUAGE FlexibleContexts #-} 8 | {-# LANGUAGE ViewPatterns #-} 9 | module NixFromNpm.Conversion.ToNix where 10 | 11 | import qualified Prelude as P 12 | import Data.Fix (Fix(..)) 13 | import qualified Data.HashMap.Strict as H 14 | import qualified Data.Map.Strict as M 15 | import qualified Data.ByteString.Char8 as C8 16 | import Data.Char (isDigit) 17 | import Data.Text (Text, replace) 18 | import qualified Data.Text as T 19 | 20 | import Data.SemVer 21 | 22 | import NixFromNpm.Common hiding (replace) 23 | import Nix.Expr hiding (mkPath) 24 | import Nix.Pretty (prettyNix) 25 | import qualified Nix.Expr as Nix 26 | import Nix.Parser 27 | 28 | import NixFromNpm.Npm.Types 29 | import NixFromNpm.Npm.PackageMap 30 | 31 | #if MIN_VERSION_hnix(0,5,0) 32 | import Data.List.NonEmpty (NonEmpty(..), nonEmpty) 33 | import Data.Maybe (fromJust) 34 | import Text.Megaparsec (mkPos) 35 | import Text.Regex.TDFA ((=~)) 36 | import Text.Regex.TDFA.Text () 37 | 38 | genSourcePos :: SourcePos 39 | genSourcePos = SourcePos "generated by nixfromnpm" (mkPos 1) (mkPos 1) 40 | 41 | mkNamedVar :: NAttrPath r -> r -> Binding r 42 | mkNamedVar p r = NamedVar p r genSourcePos 43 | 44 | mkNAttrPath :: [NKeyName a] -> NAttrPath a 45 | mkNAttrPath = fromJust . nonEmpty 46 | 47 | (!.) :: NExpr -> Text -> NExpr 48 | (!.) = mkDot 49 | infixl 8 !. 50 | 51 | mkDot :: NExpr -> Text -> NExpr 52 | mkDot e key = mkDots e [key] 53 | 54 | mkDots :: NExpr -> [Text] -> NExpr 55 | mkDots e keys = Fix $ NSelect e (mkNAttrPath (toKey <$> keys)) Nothing 56 | where 57 | toKey :: Text -> NKeyName NExpr 58 | toKey k = (if isPlainSymbol k then StaticKey else dynamicKey) k 59 | -- | Make a dynamic key name that is only enclosed in double quotes 60 | -- (no antiquotes). 61 | dynamicKey :: Text -> NKeyName NExpr 62 | dynamicKey k = DynamicKey $ Plain $ DoubleQuoted [Plain k] 63 | -- | Check if it’s a valid nix symbol 64 | -- the nix lexer regex for IDs (symbols) is [a-zA-Z\_][a-zA-Z0-9\_\'\-]* 65 | isPlainSymbol :: Text -> Bool 66 | isPlainSymbol s = s =~ ("^[a-zA-Z_][a-zA-Z0-9_'-]*$" :: Text) 67 | 68 | mkParamset' :: [(Text, Maybe NExpr)] -> Params NExpr 69 | mkParamset' ps = mkParamset ps False 70 | #else 71 | mkNamedVar :: NAttrPath r -> r -> Binding r 72 | mkNamedVar p r = NamedVar p r 73 | 74 | mkNAttrPath :: [NKeyName a] -> NAttrPath a 75 | mkNAttrPath = id 76 | 77 | mkParamset' :: [(Text, Maybe NExpr)] -> Params NExpr 78 | mkParamset' ps = mkParamset ps 79 | #endif 80 | 81 | -- | This contains the same information as the .nix file that corresponds 82 | -- to the package. More or less it tells us everything that we need to build 83 | -- the package. 84 | data ResolvedPkg = ResolvedPkg { 85 | rpName :: PackageName, 86 | rpVersion :: SemVer, 87 | rpDistInfo :: Maybe DistInfo, 88 | -- ^ If a token was necessary to fetch the package, include it here. 89 | rpMeta :: PackageMeta, 90 | rpDependencies :: PRecord ResolvedDependency, 91 | rpOptionalDependencies :: PRecord ResolvedDependency, 92 | rpDevDependencies :: Maybe (PRecord ResolvedDependency) 93 | } deriving (Show, Eq) 94 | 95 | -- | True if any of the package's dependencies have namespaces. 96 | hasNamespacedDependency :: ResolvedPkg -> Bool 97 | hasNamespacedDependency rPkg = any hasNs (allDeps rPkg) where 98 | -- Get all of the dependency sets of the package. 99 | allDeps ResolvedPkg{..} = [rpDependencies, 100 | rpOptionalDependencies, 101 | maybe mempty id rpDevDependencies] 102 | -- Look at all of the package names (keys) to see if any are namespaced. 103 | hasNs = any isNamespaced . H.keys 104 | 105 | -- | Turns a string into one that can be used as an identifier. 106 | -- NPM package names can contain dots, so we translate these into dashes. 107 | -- Names can also start with a number; in this case prefix with an underscore. 108 | fixName :: Name -> Name 109 | fixName name = do 110 | -- Replace dots with dashes 111 | let name' = replace "." "-" name 112 | case T.findIndex isDigit name' of 113 | -- First character is a digit; prefix with underscore 114 | Just 0 -> "_" <> name' 115 | _ -> name' 116 | 117 | -- | Converts a package name and semver into an Nix expression. 118 | -- Example: "foo" and 1.2.3 turns into "foo_1-2-3". 119 | -- Example: "foo.bar" and 1.2.3-baz turns into "foo-bar_1-2-3-baz" 120 | -- Example: "@foo/bar" and 1.2.3 turns into "namespaces.foo.bar_1-2-3" 121 | toDepExpr :: PackageName -> SemVer -> NExpr 122 | toDepExpr (PackageName name mNamespace) (SemVer a b c (PrereleaseTags tags) _) = do 123 | let suffix = pack $ intercalate "-" $ (map show [a, b, c]) <> map show tags 124 | ident = fixName name <> "_" <> suffix 125 | case mNamespace of 126 | Nothing -> mkSym ident 127 | -- If there's a namespace, call "namespaces.namespace.pkgname" 128 | Just namespace -> mkDots "namespaces" [namespace, ident] 129 | 130 | -- | Converts a package name and semver into an Nix selector, which can 131 | -- be used in a binding. This is very similar to @toDepExpr@, but it returns 132 | -- something to be used in a binding rather than an expression. 133 | toSelector :: PackageName -> SemVer -> NAttrPath NExpr 134 | toSelector (PackageName name mNamespace) (SemVer a b c (PrereleaseTags tags) _) = mkNAttrPath $ do 135 | let suffix = pack $ intercalate "-" $ (map show [a, b, c]) <> map show tags 136 | ident = fixName name <> "_" <> suffix 137 | StaticKey <$> case mNamespace of 138 | Nothing -> [ident] 139 | Just namespace -> ["namespaces", namespace, ident] 140 | 141 | -- | Same as toSelector, but doesn't append a version. 142 | toSelectorNoVersion :: PackageName -> NAttrPath NExpr 143 | toSelectorNoVersion (PackageName name mNamespace) = mkNAttrPath $ do 144 | StaticKey <$> case mNamespace of 145 | Nothing -> [fixName name] 146 | Just namespace -> ["namespaces", namespace, fixName name] 147 | 148 | -- | Converts a ResolvedDependency to a nix expression. 149 | toNixExpr :: PackageName -> ResolvedDependency -> NExpr 150 | toNixExpr name (Resolved (unpackPSC -> semver)) = toDepExpr name semver 151 | toNixExpr name (Broken reason) = "brokenPackage" @@ mkNonRecSet 152 | ["name" $= mkStr (tshow name), "reason" $= mkStr (tshow reason)] 153 | 154 | -- | Write a nix expression pretty-printed to a file. 155 | writeNix :: MonadIO io => FilePath -> NExpr -> io () 156 | writeNix path = writeFileUtf8 path . (<> "\n") . tshow . prettyNix 157 | 158 | -- | Gets the .nix filename of a semver. E.g. (0, 1, 2) -> 0.1.2.nix 159 | toDotNix :: SemVer -> FilePath 160 | toDotNix v = fromText $ tshow v <> ".nix" 161 | 162 | -- | Get the .nix filename relative to the nodePackages folder of a package. 163 | toRelPath :: PackageName -> SemVer -> FilePath 164 | toRelPath (PackageName name mNamespace) version = do 165 | let subPath = fromText name toDotNix version -- E.g. "foo/1.2.3.nix" 166 | case mNamespace of 167 | -- Simple case: package 'foo@1.2.3' -> './foo/1.2.3.nix' 168 | Nothing -> subPath 169 | -- Namespaced: package '@foo/bar@1.2.3' -> './@foo/bar/1.2.3.nix' 170 | Just nspace -> fromText ("/@" <> nspace) subPath 171 | 172 | -- | Converts distinfo into a nix fetchurl call. 173 | distInfoToNix :: Maybe Name -- `Just` if we are fetching from a namespace. 174 | -> Maybe DistInfo -> NExpr 175 | distInfoToNix _ Nothing = Nix.mkPath False "./." 176 | distInfoToNix maybeNamespace (Just DistInfo{..}) = do 177 | let fetchurl = case maybeNamespace of 178 | Nothing -> "pkgs" !. "fetchurl" 179 | Just _ -> "fetchUrlNamespaced" 180 | (algo, hash) = case diShasum of 181 | SHA1 hash' -> ("sha1", hash') 182 | SHA256 hash' -> ("sha256", hash') 183 | authBinding = case maybeNamespace of 184 | Nothing -> [] 185 | Just namespace -> [bindTo "namespace" (mkStr namespace)] 186 | bindings = ["url" $= mkStr diUrl, algo $= mkStr hash] <> authBinding 187 | fetchurl @@ mkNonRecSet bindings 188 | 189 | -- | Converts package meta to a nix expression, if it exists. 190 | metaToNix :: PackageMeta -> Maybe NExpr 191 | metaToNix PackageMeta{..} = do 192 | let 193 | grab name = maybe [] (\s -> [name $= mkStr s]) 194 | homepage = grab "homepage" (map uriToText pmHomepage) 195 | description = grab "description" pmDescription 196 | author = grab "author" pmAuthor 197 | keywords = case pmKeywords of 198 | ks | null ks -> [] 199 | | otherwise -> ["keywords" $= mkList (toList (map mkStr ks))] 200 | stdenvPlatforms = mkDots "pkgs" ["stdenv", "lib", "platforms"] 201 | platforms = case map nodePlatformToText $ toList pmPlatforms of 202 | [] -> [] 203 | ps -> singleton $ "platforms" $= case ps of 204 | -- For a single one, just do pkgs.stdenv.lib.platforms. 205 | [p] -> stdenvPlatforms !. p 206 | -- For multiples, use the `with` syntax, and since each is a 207 | -- list, join with the concatenation operator. 208 | (p:ps) -> mkWith stdenvPlatforms $ foldl' ($++) (mkSym p) (mkSym <$> ps) 209 | case homepage <> description <> keywords <> author <> platforms of 210 | [] -> Nothing 211 | bindings -> Just $ mkNonRecSet bindings 212 | 213 | -- | Returns true if any of the resolved package's dependencies were broken. 214 | hasBroken :: ResolvedPkg -> Bool 215 | hasBroken ResolvedPkg{..} = case rpDevDependencies of 216 | Nothing -> any isBroken rpDependencies 217 | Just devDeps -> any isBroken rpDependencies || any isBroken devDeps 218 | where isBroken = \case {Broken _ -> True; _ -> False} 219 | 220 | -- | Given all of the versions defined in a node packages folder, create a 221 | -- default.nix which defines an object that calls out to all of those files. 222 | -- 223 | -- Essentially, given a directory structure like this: 224 | -- > foo/ 225 | -- > 0.1.2.nix 226 | -- > 0.2.3.nix 227 | -- > bar/ 228 | -- > 1.2.3.nix 229 | -- > @mynamespace/ 230 | -- > qux/ 231 | -- > 3.4.5.nix 232 | -- > default.nix 233 | -- 234 | -- We would generate a nix file that looks like this: 235 | -- 236 | -- > {callPackage}: 237 | -- > 238 | -- > { 239 | -- > foo_0-1-2 = callPackage ./foo/0.1.2.nix {}; 240 | -- > foo_0-2-3 = callPackage ./foo/0.2.3.nix {}; 241 | -- > foo = callPackage ./foo/0.2.3.nix {}; 242 | -- > bar_1-2-3 = callPackage ./bar/1.2.3.nix {}; 243 | -- > bar = callPackage ./bar/1.2.3.nix {}; 244 | -- > "@mynamespace-qux_3-4-5" = 245 | -- > callPackage (./. + "/@mynamespace/qux/3.4.5.nix") {}; 246 | -- > "@mynamespace-qux" = 247 | -- > callPackage (./. + "/@mynamespace/qux/3.4.5.nix") {}; 248 | -- > } 249 | -- 250 | -- Interestingly, it doesn't matter what the packagemap actually contains. We 251 | -- can derive all of the information we need (names and versions) from the keys 252 | -- of the map. 253 | packageMapToNix :: PackageMap a -> NExpr 254 | packageMapToNix pMap = do 255 | let 256 | -- Create a parameter set with no defaults given. 257 | params = mkParamset' $ [("callPackage", Nothing)] 258 | -- Create the function body as a single set which contains all of the 259 | -- packages in the set as keys, and a callPackage to their paths as values. 260 | toBindings :: (PackageName, [SemVer]) -> [Binding NExpr] 261 | toBindings (pkgName, []) = [] 262 | -- Grab the latest version and store that under a selector without a 263 | -- version. 264 | toBindings (pkgName, (latest:vs)) = binding : bindings where 265 | binding = toSelectorNoVersion pkgName `mkNamedVar` call latest 266 | -- Convert render the name and version to a nix path. It might 267 | -- contain an '@' sign, in which case we'll need to use a trick 268 | -- to get it into a valid path. 269 | path version = case T.find (== '@') textPath of 270 | -- There is no '@' in the path. Just make a path nix expression. 271 | Nothing -> mkPath renderedPath 272 | -- There is an '@'. Then use a path addition; i.e. use the syntax 273 | -- ./. + "/this/@path" 274 | Just _ -> mkPath "." $+ mkStr textPath 275 | where renderedPath = toRelPath pkgName version 276 | textPath = pathToText renderedPath 277 | -- Equiv. to `callPackage path {}` 278 | call v = "callPackage" @@ path v @@ mkNonRecSet [] 279 | toBinding :: SemVer -> Binding NExpr 280 | toBinding version = toSelector pkgName version `mkNamedVar` call version 281 | bindings :: [Binding NExpr] 282 | bindings = map toBinding (latest:vs) 283 | sortedPackages :: [(PackageName, [SemVer])] 284 | sortedPackages = do 285 | M.toList $ M.map (map fst . M.toDescList) $ psToMap pMap 286 | bindings :: [Binding NExpr] 287 | bindings = concatMap toBindings sortedPackages 288 | mkFunction params $ mkNonRecSet bindings 289 | 290 | -- | Converts a resolved package object into a nix expression. The expresion 291 | -- will be a function where the arguments are its dependencies, and its result 292 | -- is a call to `buildNodePackage`. 293 | resolvedPkgToNix :: ResolvedPkg -> NExpr 294 | resolvedPkgToNix rPkg@ResolvedPkg{..} = mkFunction funcParams body 295 | where 296 | --------------------------------------------------------------------------- 297 | -- Circular dependency resolution 298 | -- 299 | -- This is pretty gnarly but for now just deal with it... 300 | -- Step 1: throw out the "broken" packages from the dependencies. 301 | withoutBrokens = H.fromList $ go (H.toList rpDependencies) 302 | where go [] = [] 303 | go ((_, Broken _):rest) = go rest 304 | go ((k, Resolved v):rest) = (k, v):go rest 305 | -- Step 2: separate the dependencies into circular and non-circular. 306 | (noncircDepMap, circDepMap) = sepCircularMap withoutBrokens 307 | -- Step 3: create lists of nix expressions for the circular and 308 | -- non-circular dependencies. 309 | deps = map (uncurry toDepExpr) $ H.toList noncircDepMap 310 | circDeps = flip map (H.toList circDepMap) $ \(name, CircularSemVer ver) -> 311 | toDepExpr name ver 312 | --------------------------------------------------------------------------- 313 | optDeps = map (uncurry toNixExpr) $ H.toList rpOptionalDependencies 314 | -- Same for dev dependencies. 315 | devDeps = map (uncurry toNixExpr) . H.toList <$> rpDevDependencies 316 | -- List of arguments that these functions will take. 317 | funcParams' = catMaybes [ 318 | Just "pkgs", 319 | Just "buildNodePackage", 320 | Just "nodePackages", 321 | -- If the package has any broken dependencies, we will need to include 322 | -- this function. 323 | maybeIf (hasBroken rPkg) "brokenPackage", 324 | -- If the package has a namespace then it will need to set headers 325 | -- when fetching. So add that function as a dependency. 326 | maybeIf (isNamespaced rpName) "fetchUrlNamespaced", 327 | maybeIf (isNamespaced rpName) "namespaceTokens", 328 | -- If any of the package's dependencies have namespaces, they will appear 329 | -- in the `namespaces` set, so we'll need that as a dependency. 330 | maybeIf (hasNamespacedDependency rPkg) "namespaces" 331 | ] 332 | -- None of these have defaults, so put them into pairs with Nothing. 333 | funcParams = mkParamset' $ map (\x -> (x, Nothing)) funcParams' 334 | -- Wrap an list expression in a `with nodePackages;` syntax if non-empty. 335 | withNodePackages noneIfEmpty list = case list of 336 | [] -> if noneIfEmpty then Nothing else Just $ mkList [] 337 | _ -> Just $ mkWith "nodePackages" $ mkList list 338 | devDepBinding = case devDeps of 339 | Nothing -> Nothing 340 | Just ddeps -> bindTo "devDependencies" <$> withNodePackages False ddeps 341 | PackageName name namespace = rpName 342 | args = mkNonRecSet $ catMaybes [ 343 | Just $ "name" $= mkStr name, 344 | Just $ "version" $= (mkStr $ tshow rpVersion), 345 | Just $ "src" $= distInfoToNix (pnNamespace rpName) rpDistInfo, 346 | bindTo "namespace" <$> map mkStr namespace, 347 | bindTo "deps" <$> withNodePackages False deps, 348 | bindTo "circularDependencies" <$> withNodePackages True circDeps, 349 | bindTo "optionalDependencies" <$> withNodePackages True optDeps, 350 | devDepBinding, 351 | maybeIf (hasBroken rPkg) ("isBroken" $= mkBool True), 352 | bindTo "meta" <$> metaToNix rpMeta 353 | ] 354 | body = "buildNodePackage" @@ args 355 | 356 | -- | Convenience function to generate an `import /path {args}` expression. 357 | importWith :: Bool -- ^ True if the path is from the env, e.g. 358 | -> FilePath -- ^ Path to import 359 | -> [Binding NExpr] -- ^ Arguments to pass 360 | -> NExpr -- ^ The resulting nix expression 361 | importWith isEnv path args = do 362 | "import" @@ Nix.mkPath isEnv (pathToString path) @@ mkNonRecSet args 363 | 364 | -- | We use this a few times: `import {}` 365 | importNixpkgs :: NExpr 366 | importNixpkgs = importWith True "nixpkgs" [] 367 | 368 | -- | The default version of nodejs we are using; this should correspond 369 | -- to a key in the nixpkgs set we're importing. 370 | defaultNodeJS :: Text 371 | defaultNodeJS = "nodejs-8_x" 372 | 373 | -- | Also used a few times, these are the top-level params to the generated 374 | -- default.nix files. 375 | defaultParams :: Params NExpr 376 | defaultParams = do 377 | mkParamset' [("pkgs", Just importNixpkgs), 378 | ("nodejs", Just $ "pkgs" !. defaultNodeJS)] 379 | 380 | -- | When passing through arguments, we inherit these things. 381 | defaultInherits :: [Binding NExpr] 382 | defaultInherits = [ 383 | inherit ["pkgs", "nodejs"] 384 | #if MIN_VERSION_hnix(0,5,0) 385 | genSourcePos 386 | #endif 387 | ] 388 | 389 | -- | The name of the subfolder within the output directory that 390 | -- contains node packages. 391 | nodePackagesDir :: FilePath 392 | nodePackagesDir = "nodePackages" 393 | 394 | bindRootPath :: Binding NExpr 395 | bindRootPath = "nodePackagesPath" $= mkPath ("./" nodePackagesDir) 396 | 397 | -- | The root-level default.nix file. 398 | rootDefaultNix :: NExpr 399 | rootDefaultNix = mkFunction defaultParams body where 400 | lets = [ 401 | "mkNodeLib" $= importWith False "./nodeLib" ["self" $= "mkNodeLib"] 402 | , "nodeLib" $= ("mkNodeLib" @@ mkNonRecSet defaultInherits) 403 | ] 404 | genPackages = "nodeLib" !. "generatePackages" 405 | body = mkLets lets $ genPackages @@ (mkNonRecSet [bindRootPath]) 406 | 407 | -- | Create a `default.nix` file for a particular package.json; this simply 408 | -- imports the package as defined in the given path, and calls into it. 409 | packageJsonDefaultNix :: FilePath -- ^ Path to the output directory. 410 | -> NExpr 411 | packageJsonDefaultNix outputPath = do 412 | let 413 | libBind = "lib" $= importWith False outputPath defaultInherits 414 | callPkg = "lib" !. "callPackage" 415 | call = callPkg @@ mkPath "project.nix" @@ mkNonRecSet [] 416 | mkFunction defaultParams $ mkLets [libBind] call 417 | 418 | bindingsToMap :: [Binding t] -> Record t 419 | bindingsToMap = foldl' step mempty where 420 | step record binding = case binding of 421 | #if MIN_VERSION_hnix(0,5,0) 422 | NamedVar (StaticKey key :| []) obj _SourcePos 423 | #else 424 | NamedVar [StaticKey key] obj 425 | #endif 426 | -> H.insert key obj record 427 | _ -> record 428 | -------------------------------------------------------------------------------- /src/NixFromNpm/Git/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE NoImplicitPrelude #-} 3 | {-# LANGUAGE TypeSynonymInstances #-} 4 | {-# LANGUAGE FlexibleInstances #-} 5 | {-# LANGUAGE QuasiQuotes #-} 6 | module NixFromNpm.Git.Types where 7 | 8 | import Data.Aeson 9 | import Data.Aeson.Types (Parser, typeMismatch) 10 | import Data.HashMap.Strict (HashMap) 11 | import qualified Data.HashMap.Strict as H 12 | import qualified Data.Text as T 13 | import Text.Regex.PCRE.Heavy (Regex, scan, re) 14 | 15 | import NixFromNpm.Common 16 | 17 | data Commit = Commit { 18 | cSha :: Text, 19 | cUrl :: Text 20 | } deriving (Show, Eq) 21 | 22 | data Tag = Tag { 23 | tName :: Name, 24 | tCommit :: Commit 25 | } deriving (Show, Eq) 26 | 27 | data Branch = Branch { 28 | bName :: Name, 29 | bCommit :: Commit 30 | } deriving (Show, Eq) 31 | 32 | data Repo = Repo { 33 | rName :: Name, 34 | rDefaultBranch :: Name 35 | } deriving (Show, Eq) 36 | 37 | -- | A git ref might be a commit hash, or something that could be anything. 38 | data GitRef 39 | = SomeRef Text -- ^ Case where it's not yet known 40 | | BranchName Name 41 | | TagName Name 42 | | CommitHash Text 43 | deriving (Show, Eq, Ord) 44 | 45 | instance IsString GitRef where 46 | fromString = SomeRef . fromString 47 | 48 | refText :: GitRef -> Text 49 | refText (SomeRef r) = r 50 | refText (CommitHash h) = h 51 | 52 | -- | Various public git hosting services that nixfromnpm is aware of 53 | data GitSource 54 | = Github 55 | | Bitbucket 56 | | Gist 57 | | GitLab 58 | deriving (Show, Eq, Ord, Enum) 59 | 60 | -- | Represents a git repo at a particular commit. 61 | data GitIdentifier = GitId { 62 | giSource::GitSource, 63 | giOwner::Name, 64 | giRepo::Name, 65 | giRef:: Maybe GitRef 66 | } deriving (Eq, Ord) 67 | 68 | instance Show GitIdentifier where 69 | show (GitId source account repo ref) = do 70 | let ref' = case ref of 71 | Nothing -> "" 72 | Just r -> "#" <> refText r 73 | proto = toLower (show source) <> "://" 74 | proto <> unpack (account <> "/" <> repo <> ref') 75 | 76 | data GithubError 77 | = GithubUnreachable 78 | | InvalidJsonFromGithub Text 79 | | InvalidGitRef GitRef 80 | | InvalidGithubUri URI 81 | | NoSuchRepo Name Name 82 | deriving (Show, Eq, Typeable) 83 | 84 | instance Exception GithubError 85 | 86 | instance FromJSON Commit where 87 | parseJSON = getObject "commit info" >=> \o -> do 88 | sha <- o .: "sha" 89 | url <- o .: "url" 90 | return $ Commit sha url 91 | 92 | instance FromJSON Tag where 93 | parseJSON = getObject "tag info" >=> \o -> do 94 | name <- o .: "name" 95 | commit <- o .: "commit" 96 | return $ Tag name commit 97 | 98 | instance FromJSON Branch where 99 | parseJSON = getObject "branch info" >=> \o -> do 100 | name <- o .: "name" 101 | commit <- o .: "commit" 102 | return $ Branch name commit 103 | 104 | instance FromJSON Repo where 105 | parseJSON = getObject "repo info" >=> \o -> do 106 | name <- o .: "name" 107 | defaultBranch <- o .: "default_branch" 108 | return $ Repo name defaultBranch 109 | 110 | getObject :: String -> Value -> Parser (HashMap Text Value) 111 | getObject _ (Object o) = return o 112 | getObject msg v = 113 | typeMismatch ("object (got " <> show v <> ", message " <> msg <> ")") v 114 | 115 | -- | Convert a vector of tags to a hashmap mapping tag names to SHAs. 116 | tagListToMap :: Vector Tag -> Record Text 117 | tagListToMap tags = foldl' step mempty tags where 118 | step result tag = H.insert (tName tag) (cSha $ tCommit tag) result 119 | 120 | -- | Read a git source from a URI server name. 121 | sourceFromServer :: String -> Maybe GitSource 122 | sourceFromServer regname 123 | | "github.com" `isInfixOf` regname = Just Github 124 | | "bitbucket.com" `isInfixOf` regname = Just Bitbucket 125 | | "gist" `isInfixOf` regname = Just Gist 126 | | "gitlab.com" `isInfixOf` regname = Just GitLab 127 | | otherwise = Nothing 128 | 129 | -- | Get the repo owner and repo name from a URI path. 130 | ownerRepoFromPath :: String -> Maybe (Name, Name) 131 | ownerRepoFromPath path = case scan [re|^/([\w_-]+)/([\w_-]+)$|] $ pack path of 132 | [(_, [owner, repo])] -> Just (owner, repo) 133 | _ -> Nothing 134 | 135 | -- | Parse a git ref from a URI fragment. 136 | refFromFragment :: String -> Maybe GitRef 137 | refFromFragment ('#':frag) = Just $ SomeRef $ pack frag 138 | refFromFragment _ = Nothing 139 | 140 | class IsGitId t where 141 | parseGitId :: t -> Maybe GitIdentifier 142 | 143 | instance IsGitId GitIdentifier where 144 | parseGitId = Just 145 | 146 | instance IsGitId URI where 147 | parseGitId uri = case uri of 148 | -- In order to determine which git service this is, we need to 149 | -- examine the uri authority (which contains the server info). 150 | -- If that's not there this isn't a git identifier. 151 | URI _ Nothing _ _ _ -> Nothing 152 | URI scheme (Just (URIAuth _ regname _)) path _ fragment -> do 153 | source <- sourceFromServer regname 154 | (owner, repo) <- ownerRepoFromPath path 155 | return $ GitId source owner repo (refFromFragment fragment) 156 | 157 | instance IsGitId String where 158 | -- | Github URLs are a special case of git URLs; they can be specified 159 | -- simply by owner/repo or owner/repo#ref 160 | parseGitId str = case scan githubIdRegex str of 161 | [(_, [owner, repo])] -> do 162 | return $ GitId Github (pack owner) (pack repo) Nothing 163 | [(_, [owner, repo, '#':ref])] -> do 164 | return $ GitId Github (pack owner) (pack repo) $ Just $ fromString ref 165 | _ -> do 166 | parseGitId =<< parseURI str 167 | where 168 | githubIdRegex = [re|^([\w._-]+)/([\w._-]+)(\#[^\s]+)?$|] 169 | -------------------------------------------------------------------------------- /src/NixFromNpm/HttpTools.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE LambdaCase #-} 3 | {-# LANGUAGE NoImplicitPrelude #-} 4 | {-# LANGUAGE ScopedTypeVariables #-} 5 | {-# LANGUAGE RecordWildCards #-} 6 | {-# LANGUAGE FlexibleContexts #-} 7 | module NixFromNpm.HttpTools ( 8 | module Network.Curl, 9 | HttpResult(..), HttpError(..), getHttpWith, makeHeaders, curlGetBS 10 | ) where 11 | 12 | import qualified Data.ByteString.Char8 as B 13 | import qualified Data.ByteString.Lazy.Char8 as BL8 14 | import qualified Data.Text as T 15 | import qualified Data.Text.Encoding as T 16 | import Network.Curl 17 | 18 | import NixFromNpm.Common 19 | 20 | data HttpResult a 21 | = HttpSuccess a 22 | | HttpError HttpError 23 | deriving (Show, Eq, Typeable) 24 | 25 | data HttpError 26 | = HttpErrorWithCode Int 27 | | HttpTimedOut Long 28 | | CurlError CurlCode 29 | deriving (Show, Eq, Typeable) 30 | 31 | instance Exception HttpError 32 | 33 | -- | Given a URL and some options, perform a curl request and return the 34 | -- resulting code, HTTP status, and response body. 35 | curlGetBS :: (MonadBaseControl IO io, MonadIO io) 36 | => URLString 37 | -> [CurlOption] 38 | -> io (CurlCode, Int, BL8.ByteString) 39 | curlGetBS url opts = liftIO $ initialize >>= \ h -> do 40 | (finalBody, gatherBody) <- newIncoming 41 | setopt h (CurlFailOnError True) 42 | setDefaultSSLOpts h url 43 | setopt h (CurlURL url) 44 | setopt h (CurlWriteFunction (gatherOutput_ gatherBody)) 45 | mapM (setopt h) opts 46 | rc <- perform h 47 | bs <- finalBody 48 | status <- getResponseCode h 49 | return (rc, status, bs) 50 | 51 | -- | Convert (key, value) pairs into a curl Headers option. 52 | makeHeaders :: [(Text, ByteString)] -> CurlOption 53 | makeHeaders headers = CurlHttpHeaders $ map mk headers where 54 | mk (key, val) = T.unpack key <> ": " <> B.unpack val 55 | 56 | getHttpWith :: (MonadBaseControl IO io, MonadIO io) 57 | => Long -- ^ Timeout in seconds 58 | -> Int -- ^ Number of retries 59 | -> [(Text, ByteString)] -- ^ Headers 60 | -> URI -- ^ URI to hit 61 | -> io BL8.ByteString -- ^ Response content 62 | getHttpWith timeout retries headers uri = loop retries where 63 | opts = [makeHeaders headers, CurlFollowLocation True, 64 | CurlTimeout timeout] 65 | toErr status CurlHttpReturnedError = HttpErrorWithCode status 66 | toErr _ CurlOperationTimeout = HttpTimedOut timeout 67 | toErr _ err = CurlError err 68 | loop retries = do 69 | (code, status, content) <- curlGetBS (uriToString uri) opts 70 | case code of 71 | CurlOK -> return content 72 | code | retries <= 0 -> throw $ toErr status code 73 | | otherwise -> do 74 | putStrsLn ["Request failed. ", tshow retries, " retries left."] 75 | loop (retries - 1) 76 | -------------------------------------------------------------------------------- /src/NixFromNpm/Merge.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE CPP #-} 2 | {-# LANGUAGE NoImplicitPrelude #-} 3 | {-# LANGUAGE LambdaCase #-} 4 | {-# LANGUAGE OverloadedStrings #-} 5 | {-# LANGUAGE FlexibleContexts #-} 6 | module NixFromNpm.Merge where 7 | 8 | #if MIN_VERSION_hnix(0,6,0) 9 | import Nix.Render (MonadFile) 10 | #endif 11 | 12 | import NixFromNpm.Common 13 | import NixFromNpm.Conversion.ToDisk (writeNodePackagesNix) 14 | import NixFromNpm.Conversion.ToNix (nodePackagesDir) 15 | 16 | -- Some types which are more expressive than their raw counterparts. 17 | -- Hey, if you have a cool type system, why not leverage it... 18 | data MergeType = DryRun | DoIt deriving (Eq) 19 | newtype Source = Source FilePath 20 | newtype Dest = Dest FilePath 21 | 22 | -- | Merges one folder containing expressions into another. After the merge, 23 | -- generates a new nodePackages/default.nix in the target directory. 24 | mergeInto :: 25 | #if MIN_VERSION_hnix(0,6,0) 26 | (MonadIO io, MonadBaseControl IO io, MonadFile io) 27 | #else 28 | (MonadIO io, MonadBaseControl IO io) 29 | #endif 30 | => MergeType -- ^ If DryRun, it will just report what it would have 31 | -- otherwise done. 32 | -> Source -- ^ Source path, containing store objects 33 | -> Dest -- ^ Target path, also containing store objects 34 | -> io () 35 | mergeInto mergeType (Source source) (Dest target) = do 36 | let dryRun = mergeType == DryRun 37 | whenM (not <$> doesDirectoryExist (source nodePackagesDir)) $ do 38 | failC ["No node packages folder in source ", pathToText source] 39 | whenM (not <$> doesDirectoryExist (target nodePackagesDir)) $ do 40 | failC ["No node packages folder in target ", pathToText target] 41 | -- Go through all of the packages in the source directory. 42 | forItemsInDir_ (source nodePackagesDir) $ \srcDir -> do 43 | let targetDir = target nodePackagesDir filename srcDir 44 | -- Create a directory for that package, if it doesn't exist. 45 | whenM (not <$> doesDirectoryExist targetDir) $ do 46 | putStrsLn ["Creating directory ", pathToText targetDir] 47 | if dryRun then putStrLn " (Skipped due to dry run)" else 48 | createDirectory targetDir 49 | -- Copy every version file found in that directory as well. 50 | dotNixFiles <- filter (hasExt "nix") <$> listDirFullPaths srcDir 51 | forM_ dotNixFiles $ \versionFile -> do 52 | let targetVersionFile = targetDir filename versionFile 53 | whenM (not <$> doesFileExist targetVersionFile) $ do 54 | putStrsLn ["Copying ", pathToText versionFile, " to ", 55 | pathToText targetVersionFile] 56 | if dryRun then putStrLn " (Skipped due to dry run)" else 57 | copyFile versionFile targetVersionFile 58 | writeNodePackagesNix True target 59 | -------------------------------------------------------------------------------- /src/NixFromNpm/Npm/PackageMap.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE NoImplicitPrelude #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | {-# LANGUAGE BangPatterns #-} 4 | {-# LANGUAGE QuasiQuotes #-} 5 | module NixFromNpm.Npm.PackageMap where 6 | 7 | import Data.HashMap.Strict (HashMap) 8 | import Text.Regex.PCRE.Heavy (re, scan) 9 | import qualified Data.HashMap.Strict as H 10 | import qualified Data.Map.Strict as M 11 | import qualified Data.Text as T 12 | 13 | import Data.SemVer (SemVer) 14 | import NixFromNpm.Common 15 | 16 | -- | The name of a package, which optionally includes a namespace. 17 | data PackageName = PackageName { 18 | pnBasicName :: !Name, 19 | pnNamespace :: !(Maybe Name) 20 | } deriving (Eq, Ord) 21 | 22 | instance IsString PackageName where 23 | fromString s = PackageName (pack s) Nothing 24 | 25 | instance Show PackageName where 26 | show (PackageName name Nothing) = unpack name 27 | show (PackageName name (Just namespace)) = 28 | concat ["@", unpack namespace, "/", unpack name] 29 | 30 | instance Hashable PackageName where 31 | hashWithSalt salt (PackageName name namespace) = 32 | hashWithSalt salt (name, namespace) 33 | 34 | -- | True if the package name has a namespace. 35 | isNamespaced :: PackageName -> Bool 36 | isNamespaced = isJust . pnNamespace 37 | 38 | -- | Create a package name without a namespace. 39 | simpleName :: Name -> PackageName 40 | simpleName = flip PackageName Nothing 41 | 42 | -- | Parse a PackageName from raw text. 43 | -- 44 | -- This is more loose than the actual set of requirements for package 45 | -- naming. All we require is that namespaces (if specified) and 46 | -- package names have at least one character which is not whitespace 47 | -- or a '@', '%' or '/' character. 48 | -- 49 | -- See the complete rules at: 50 | -- https://github.com/npm/validate-npm-package-name#naming-rules 51 | -- 52 | parsePackageName :: Text -> Either Text PackageName 53 | parsePackageName name = case scan [re|^(?:@([^\s@/%]+)/)?([^\s@/%]+)$|] name of 54 | [(_, ["", package])] -> pure $ PackageName package Nothing 55 | [(_, [namespace, package])] -> pure $ PackageName package (Just namespace) 56 | _ -> Left $ "Invalid package name " <> tshow name 57 | 58 | -- | A record keyed on PackageNames. 59 | type PRecord = HashMap PackageName 60 | 61 | -- | We use this data structure a lot: a mapping of package names to 62 | -- a mapping of versions to fully defined packages. We use a map for 63 | -- the versions so that we can quickly get the latest or oldest version. 64 | type PackageMap pkg = PRecord (M.Map SemVer pkg) 65 | 66 | -- | Same thing, but the keys don't map to anything. 67 | type PackageSet = PackageMap () 68 | 69 | -- | Map a function across a PackageMap. 70 | pmMap :: (a -> b) -> PackageMap a -> PackageMap b 71 | pmMap f = H.map (M.map f) 72 | 73 | -- | Insert a value into a package map under a given name and version. 74 | pmInsert :: PackageName -> SemVer -> a -> PackageMap a -> PackageMap a 75 | pmInsert name version val pmap = do 76 | let existing = H.lookupDefault mempty name pmap 77 | H.insert name (M.insert version val existing) pmap 78 | 79 | -- | Insert into a PackageSet, same as inserting () into a map. 80 | psInsert :: PackageName -> SemVer -> PackageSet -> PackageSet 81 | psInsert name version = pmInsert name version () 82 | 83 | -- | Create a singleton package map. 84 | pmSingleton :: PackageName -> SemVer -> a -> PackageMap a 85 | pmSingleton name version x = H.singleton name (M.singleton version x) 86 | 87 | -- | Create a singleton package set. 88 | psSingleton :: PackageName -> SemVer -> PackageSet 89 | psSingleton name version = pmSingleton name version () 90 | 91 | -- | Make a list of package pairs. 92 | psToList :: PackageMap a -> [(PackageName, SemVer)] 93 | psToList packageSet = do 94 | let keysOnly = H.map M.keys packageSet 95 | toPairs (name, versions) = zip (repeat name) versions 96 | concatMap toPairs $ H.toList keysOnly 97 | 98 | -- | Given a PackageSet, create a map of the packages to their versions. 99 | psToMap :: PackageMap a -> Map PackageName (Map SemVer a) 100 | psToMap pMap = foldl' step mempty $ H.toList pMap where 101 | step result (pkgName, versionSet) = M.insert pkgName versionSet result 102 | 103 | -- | Remove a value from a package map under the given name and version. 104 | pmDelete :: PackageName -> SemVer -> PackageMap a -> PackageMap a 105 | pmDelete name version pmap = case H.lookup name pmap of 106 | -- If it's not in the map, we don't have to do anything 107 | Nothing -> pmap 108 | Just vmap -> case M.delete version vmap of 109 | -- If the map is empty after we remove the key, we can remove the map 110 | -- entirely. Otherwise, just remove the key. 111 | vmap' | M.null vmap' -> H.delete name pmap 112 | | otherwise -> H.insert name vmap' pmap 113 | 114 | -- | Check for membership of a package name and version 115 | pmMember :: PackageName -> SemVer -> PackageMap a -> Bool 116 | pmMember name version pmap = case H.lookup name pmap of 117 | Nothing -> False 118 | Just vmap -> M.member version vmap 119 | 120 | pmFromList :: [(PackageName, SemVer, a)] -> PackageMap a 121 | pmFromList = foldl' step mempty where 122 | step pmap (name, ver, x) = pmInsert name ver x pmap 123 | 124 | pmLookup :: PackageName -> SemVer -> PackageMap a -> Maybe a 125 | pmLookup name version pmap = case H.lookup name pmap of 126 | Nothing -> Nothing 127 | Just vmap -> M.lookup version vmap 128 | 129 | pmLookupDefault :: a -> PackageName -> SemVer -> PackageMap a -> a 130 | pmLookupDefault def n v pmap = case pmLookup n v pmap of 131 | Nothing -> def 132 | Just x -> x 133 | 134 | pmDiff :: PackageMap a -> PackageMap b -> PackageMap a 135 | pmDiff pmap1 pmap2 = foldl' step pmap1 $ H.toList pmap2 where 136 | step result (pName, verMap) = case H.lookup pName result of 137 | Nothing -> result 138 | Just verMap' -> case M.difference verMap' verMap of 139 | m | M.null m -> H.delete pName result 140 | | otherwise -> H.insert pName m result 141 | 142 | -- | Given a package map and a package name, find the latest version 143 | -- of that package defined in the map, if it exists. 144 | pmLatestVersion :: PackageName -> PackageMap a -> Maybe SemVer 145 | pmLatestVersion pName pMap = case H.lookup pName pMap of 146 | Nothing -> Nothing 147 | Just versions -> case M.toDescList versions of 148 | [] -> Nothing -- empty map 149 | (v, _):vs -> Just v 150 | 151 | -- | Join two packagemaps. 152 | pmJoin :: PackageMap a -> PackageMap a -> PackageMap a 153 | pmJoin pm1 pm2 = foldl' addVersions pm1 $ H.toList pm2 where 154 | addVersions result (pkgName, pkgVersions) = case H.lookup pkgName result of 155 | -- If there's no package with this name in the result we can just add it. 156 | Nothing -> H.insert pkgName pkgVersions result 157 | -- If there already is, we join the versions. 158 | Just versions -> H.insert pkgName (versions <> pkgVersions) result 159 | 160 | -- | Join a list of packagemaps. 161 | pmConcat :: [PackageMap a] -> PackageMap a 162 | pmConcat = foldl' pmJoin mempty 163 | 164 | -- | The number of packages defined. 165 | pmNumPackages :: PackageMap a -> Int 166 | pmNumPackages = H.size 167 | 168 | -- | The total number of versions of all packages defined. 169 | pmNumVersions :: PackageMap a -> Int 170 | pmNumVersions = sum . map M.size . map snd . H.toList 171 | -------------------------------------------------------------------------------- /src/NixFromNpm/Npm/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE NoImplicitPrelude #-} 2 | {-# LANGUAGE BangPatterns #-} 3 | {-# LANGUAGE LambdaCase #-} 4 | {-# LANGUAGE OverloadedStrings #-} 5 | {-# LANGUAGE ScopedTypeVariables #-} 6 | {-# LANGUAGE DeriveGeneric #-} 7 | module NixFromNpm.Npm.Types where 8 | 9 | import qualified ClassyPrelude as CP 10 | import Data.Aeson 11 | import Data.Aeson.Types as Aeson (Parser, typeMismatch, withObject) 12 | import qualified Data.HashMap.Strict as H 13 | import qualified Data.Text as T 14 | import Data.SemVer (SemVer, SemVerRange) 15 | import Data.SemVer (parseSemVer, parseSemVerRange, anyVersion) 16 | 17 | import NixFromNpm.Common 18 | import NixFromNpm.Git.Types (getObject, GithubError) 19 | import NixFromNpm.Npm.Version 20 | import NixFromNpm.Npm.PackageMap 21 | 22 | -- | Package information; specifically all of the different versions. 23 | data PackageInfo = PackageInfo { 24 | piVersions :: HashMap SemVer VersionInfo, 25 | piTags :: Record SemVer 26 | } deriving (Show, Eq) 27 | 28 | -- | Taken from https://nodejs.org/api/process.html#process_process_platform, 29 | -- and filtered to those that correspond to nixpkgs platforms. 30 | data NodePlatform 31 | = Darwin 32 | | FreeBSD 33 | | OpenBSD 34 | | Linux 35 | | SunOS 36 | deriving (Show, Eq) 37 | 38 | -- | Convert a NodePlatform into text 39 | nodePlatformToText :: IsString t => NodePlatform -> t 40 | nodePlatformToText = \case 41 | Darwin -> "darwin" 42 | FreeBSD -> "freebsd" 43 | OpenBSD -> "openbsd" 44 | Linux -> "linux" 45 | SunOS -> "solaris" 46 | 47 | -- | Parse a node platform from a string. 48 | parseNodePlatform :: Alternative f => Text -> f NodePlatform 49 | parseNodePlatform = \case 50 | "linux" -> pure Linux 51 | "darwin" -> pure Darwin 52 | "freebsd" -> pure FreeBSD 53 | "openbsd" -> pure OpenBSD 54 | "sunos" -> pure SunOS 55 | _ -> empty 56 | 57 | -- | Metadata about a package. 58 | data PackageMeta = PackageMeta { 59 | pmDescription :: Maybe Text, 60 | pmAuthor :: Maybe Text, 61 | pmHomepage :: Maybe URI, 62 | pmKeywords :: Vector Text, 63 | pmPlatforms :: Vector NodePlatform 64 | } deriving (Show, Eq) 65 | 66 | -- | Default (empty) package metadata. 67 | emptyPackageMeta :: PackageMeta 68 | emptyPackageMeta = PackageMeta Nothing Nothing Nothing mempty mempty 69 | 70 | instance FromJSON PackageMeta where 71 | parseJSON = withObject "PackageMeta" $ \o -> do 72 | let getString = \case {String s -> Just s; _ -> Nothing} 73 | description <- o .:? "description" 74 | author <- o .:? "author" <|> pure Nothing 75 | maybePlatforms :: Maybe (Vector Text) <- o .:? "os" <|> pure Nothing 76 | let platforms = maybe mempty (catMaybes . map parseNodePlatform) maybePlatforms 77 | homepage <- o .:? "homepage" >>= \case 78 | Nothing -> return Nothing 79 | Just (String txt) -> return $ parseURIText txt 80 | Just (Array stuff) -> case toList $ catMaybes (getString <$> stuff) of 81 | [] -> return Nothing 82 | (uri:_) -> return $ parseURIText uri 83 | let 84 | -- If keywords are a string, split on commas and strip whitespace. 85 | getKeywords (String s) = fromList $ T.strip <$> T.split (==',') s 86 | -- If an array, just take the array. 87 | getKeywords (Array a) = catMaybes $ map getString a 88 | -- Otherwise, this is an error, but just return an empty array. 89 | getKeywords _ = mempty 90 | keywords <- map getKeywords $ o .:? "keywords" .!= Null 91 | return $ PackageMeta description author homepage keywords platforms 92 | 93 | 94 | -- | Expresses all of the information that a version of a package needs, in 95 | -- the abstract (e.g. using version ranges instead of explicit versions). 96 | -- This type can be used as an input to the Npm.Resolve stuff to produce a 97 | -- `ResolvedPkg`. 98 | data VersionInfo = VersionInfo { 99 | viName :: PackageName, 100 | viDependencies :: PRecord NpmVersionRange, 101 | viOptionalDependencies :: PRecord NpmVersionRange, 102 | viDevDependencies :: PRecord NpmVersionRange, 103 | viBundledDependencies :: [PackageName], 104 | viDist :: Maybe DistInfo, -- not present if in a package.json file. 105 | viMeta :: PackageMeta, 106 | viVersion :: SemVer 107 | } deriving (Show, Eq) 108 | 109 | -- | SHA digest, combining an algorithm type with a digest. 110 | data Shasum = SHA1 Text | SHA256 Text deriving (Show, Eq) 111 | 112 | -- | Distribution info from NPM. Tells us the URL and hash of a tarball. 113 | data DistInfo = DistInfo { 114 | diUrl :: Text, 115 | diShasum :: Shasum 116 | } deriving (Show, Eq) 117 | 118 | -- | Flag for different types of dependencies. 119 | data DependencyType 120 | = Dependency -- ^ Required at runtime. 121 | | OptionalDependency 122 | | DevDependency -- ^ Only required for development. 123 | deriving (Show, Eq) 124 | 125 | -- | Dependencies might be circular; this type lets us indicate if so. 126 | data PossiblyCircularSemVer 127 | = NotCircular SemVer 128 | | Circular CircularSemVer 129 | deriving (Show, Eq, Ord, Generic) 130 | 131 | instance Hashable PossiblyCircularSemVer 132 | 133 | -- | Wrapper for SemVers so we can tell when they're circular. 134 | newtype CircularSemVer = CircularSemVer {unCirc :: SemVer} 135 | deriving (Show, Eq, Ord, Generic) 136 | 137 | instance Hashable CircularSemVer 138 | 139 | -- | Convert a PossiblyCircularSemVer to a SemVer. 140 | unpackPSC :: PossiblyCircularSemVer -> SemVer 141 | unpackPSC (Circular (CircularSemVer sv)) = sv 142 | unpackPSC (NotCircular sv) = sv 143 | 144 | -- | Separate a list of PossiblyCircularSemVers into two lists. 145 | -- The first element contains non-circular dependencies, and the 146 | -- second contains circular ones. 147 | sepCirculars :: [PossiblyCircularSemVer] -> ([SemVer], [CircularSemVer]) 148 | sepCirculars [] = ([], []) 149 | sepCirculars (psc:rest) = do 150 | let (noncirculars, circulars) = sepCirculars rest 151 | case psc of 152 | NotCircular nc -> (nc:noncirculars, circulars) 153 | Circular c -> (noncirculars, c:circulars) 154 | 155 | -- | Similar to @sepCirculars@ but takes in a HashMap and returns a 156 | -- pair of HashMaps. 157 | sepCircularMap :: (Hashable a, Eq a) 158 | => HashMap a PossiblyCircularSemVer 159 | -> (HashMap a SemVer, HashMap a CircularSemVer) 160 | sepCircularMap m = go $ H.toList m where 161 | go [] = (mempty, mempty) 162 | go ((key, psc):rest) = do 163 | let (noncirculars, circulars) = go rest 164 | case psc of 165 | NotCircular nc -> (H.insert key nc noncirculars, circulars) 166 | Circular c -> (noncirculars, H.insert key c circulars) 167 | 168 | -- | Reasons why an expression might not have been able to be built. 169 | data BrokenPackageReason 170 | = NoMatchingPackage PackageName 171 | | NoMatchingVersion NpmVersionRange 172 | | InvalidNpmVersionRange Text 173 | | NoSuchTag Name 174 | | TagPointsToInvalidVersion Name SemVer 175 | | InvalidSemVerSyntax Text String 176 | | InvalidPackageJson Text String 177 | | NoDistributionInfo 178 | | Reason String 179 | | GithubError GithubError 180 | | NotYetImplemented String 181 | | UnsatisfiedDependency PackageName -- This should never happen, but in case 182 | | BrokenDependency PackageName BrokenPackageReason 183 | deriving (Show, Eq, Typeable) 184 | 185 | instance Exception BrokenPackageReason 186 | 187 | -- | We might not be able to resolve a dependency, in which case we record 188 | -- it as a broken package. 189 | data ResolvedDependency 190 | = Resolved PossiblyCircularSemVer -- ^ Package resolved at this version. 191 | | Broken BrokenPackageReason -- ^ Could not build the dependency. 192 | deriving (Show, Eq) 193 | 194 | instance Semigroup PackageInfo where 195 | PackageInfo vs ts <> PackageInfo vs' ts' = 196 | PackageInfo (vs CP.<> vs') (ts CP.<> ts') 197 | 198 | instance Monoid PackageInfo where 199 | mempty = PackageInfo mempty mempty 200 | mappend = (CP.<>) 201 | 202 | instance FromJSON PackageName where 203 | parseJSON (String name) = case parsePackageName name of 204 | Left err -> fail $ unpack err 205 | Right pname -> return pname 206 | parseJSON v = typeMismatch "Expected a string for a package name" v 207 | 208 | -- | Gets a hashmap from an object, or otherwise returns an empty hashmap. 209 | getDict :: (FromJSON val, FromJSON key, Hashable key, Eq key) 210 | => Text -> Object -> Aeson.Parser (HashMap key val) 211 | getDict key obj = case H.lookup key obj of 212 | Just (Object obj') -> map H.fromList $ 213 | forM (H.toList obj') $ \(k, v) -> do 214 | key <- parseJSON (String k) 215 | val <- parseJSON v 216 | return (key, val) 217 | -- sometimes it's malformed, like humanize-number 218 | _ -> return mempty 219 | 220 | instance FromJSON VersionInfo where 221 | parseJSON = withObject "version info" $ \o -> do 222 | listedDependencies <- getDict "dependencies" o 223 | devDependencies <- getDict "devDependencies" o 224 | optionalDependencies <- getDict "optionalDependencies" o 225 | bundledDependencies <- o .:? "bundledDependencies" .!= [] 226 | -- Loop through the bundled dependencies. If any of them are missing 227 | -- from the dependencies record, add it here. 228 | let isMissingDep name = not $ H.member name listedDependencies 229 | missing = filter isMissingDep bundledDependencies 230 | missingDependencies = zip missing (repeat $ SemVerRange anyVersion) 231 | dependencies = listedDependencies <> H.fromList missingDependencies 232 | dist <- o .:? "dist" 233 | pkgName <- o .: "name" 234 | version <- o .: "version" 235 | packageMeta <- parseJSON (Object o) 236 | scripts :: Record Value <- getDict "scripts" o <|> fail "couldn't get scripts" 237 | -- Remove any keys which appear in `optionalDependencies` from 238 | -- the dependencies and devdependencies sets. 239 | let rmOptionals = flip H.difference optionalDependencies 240 | case parseSemVer version of 241 | Left _ -> throw $ VersionSyntaxError version 242 | Right semver -> return $ VersionInfo { 243 | viDependencies = rmOptionals dependencies, 244 | viDevDependencies = rmOptionals devDependencies, 245 | viOptionalDependencies = optionalDependencies, 246 | viBundledDependencies = bundledDependencies, 247 | viDist = dist, 248 | viName = pkgName, 249 | viMeta = packageMeta, 250 | viVersion = semver 251 | } 252 | 253 | instance FromJSON SemVerRange where 254 | parseJSON v = case v of 255 | String s -> case parseSemVerRange s of 256 | Left err -> do 257 | let errorMessage = "valid semantic version (got " <> show v <> ")" 258 | typeMismatch errorMessage v 259 | Right range -> return range 260 | _ -> typeMismatch "string" v 261 | 262 | instance FromJSON PackageInfo where 263 | parseJSON = getObject "package info" >=> \o -> do 264 | vs' :: Record VersionInfo <- getDict "versions" o 265 | tags' <- getDict "dist-tags" o 266 | let vs = H.fromList $ map (\vi -> (viVersion vi, vi)) $ H.elems vs' 267 | convert tags [] = return $ PackageInfo vs (H.fromList tags) 268 | convert tags ((tName, tVer):ts) = case parseSemVer tVer of 269 | Left err -> failC ["Tag ", tName, " refers to an invalid ", 270 | "semver string ", tVer, ": ", tshow err] 271 | Right ver -> convert ((tName, ver):tags) ts 272 | convert [] $ H.toList tags' 273 | 274 | instance FromJSON DistInfo where 275 | parseJSON = getObject "dist info" >=> \o -> do 276 | tarball <- o .: "tarball" 277 | shasum <- SHA1 <$> o .: "shasum" 278 | return $ DistInfo tarball shasum 279 | -------------------------------------------------------------------------------- /src/NixFromNpm/Npm/Version.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE NoImplicitPrelude #-} 2 | {-# LANGUAGE LambdaCase #-} 3 | {-# LANGUAGE OverloadedStrings #-} 4 | {-# LANGUAGE TupleSections #-} 5 | module NixFromNpm.Npm.Version where 6 | 7 | import qualified Data.Text as T 8 | import qualified Data.HashMap.Strict as H 9 | 10 | import Data.SemVer 11 | import Data.Aeson 12 | import qualified Data.Aeson.Types as Aeson 13 | 14 | import NixFromNpm.Common 15 | import NixFromNpm.Npm.PackageMap 16 | import NixFromNpm.Git.Types hiding (Tag) 17 | import Text.Parsec (ParseError) 18 | 19 | data NpmVersionRange 20 | = SemVerRange SemVerRange 21 | -- ^ The most common: parsing from a semver range. 22 | | Tag Name 23 | -- ^ From an npm "tag"; this mapping lives in the package metadata. 24 | | NpmUri URI 25 | -- ^ From a URL (e.g. a tarball or zip file) 26 | | GitIdentifier GitIdentifier 27 | -- ^ From one of the "known" git services (e.g. github, bitbucket). 28 | | LocalPath FilePath 29 | -- ^ From a local file path. 30 | | InvalidVersion Text 31 | -- ^ An invalid version string: this results in a downstream failure 32 | -- but allows us to ignore an invalid version unless we actually 33 | -- need to resolve it. While proper Haskell idioms might dictate we 34 | -- use an Either or similar here, this is somewhat of a path of 35 | -- least resistance. 36 | deriving (Eq, Ord) 37 | 38 | data NpmVersionError 39 | = UnsupportedVersionType NpmVersionRange 40 | | UnsupportedUriScheme String 41 | | UnsupportedGitSource GitSource 42 | | VersionSyntaxError Text 43 | | UnrecognizedVersionFormat Text 44 | deriving (Show, Eq, Typeable) 45 | 46 | instance Exception NpmVersionError 47 | 48 | instance Show NpmVersionRange where 49 | show (SemVerRange rng) = show rng 50 | show (Tag name) = unpack name 51 | show (NpmUri uri) = uriToString uri 52 | show (GitIdentifier ident) = show ident 53 | show (LocalPath pth) = show pth 54 | show (InvalidVersion v) = "Version string unable to be parsed: " <> show v 55 | 56 | showPair :: PackageName -> SemVer -> Text 57 | showPair name version = tshow name <> "@" <> tshow version 58 | 59 | showPairs :: [(PackageName, SemVer)] -> Text 60 | showPairs = mapJoinBy ", " (uncurry showPair) 61 | 62 | showRangePair :: PackageName -> NpmVersionRange -> Text 63 | showRangePair name range = tshow name <> "@" <> tshow range 64 | 65 | showDeps :: [(PackageName, NpmVersionRange)] -> Text 66 | showDeps ranges = mapJoinBy ", " (uncurry showRangePair) ranges 67 | 68 | parseNpmVersionRange :: Text -> Maybe NpmVersionRange 69 | parseNpmVersionRange t = do 70 | SemVerRange <$> eitherToMaybe (parseSemVerRange t) 71 | <|> GitIdentifier <$> parseGitId (unpack t) 72 | <|> NpmUri <$> parseURI (unpack t) 73 | <|> LocalPath <$> asPath 74 | <|> Tag <$> asTag 75 | where 76 | asPath = case unpack t of 77 | '/':_ -> Just $ fromText t 78 | '.':'/':_ -> Just $ fromText t 79 | '.':'.':'/':_ -> Just $ fromText t 80 | '~':'/':_ -> Just $ fromText t 81 | _ -> Nothing 82 | asTag = if t == "" || " " `isInfixOf` t then Nothing else Just t 83 | 84 | instance FromJSON NpmVersionRange where 85 | parseJSON v = case v of 86 | String s -> case parseNpmVersionRange s of 87 | Nothing -> return $ InvalidVersion s 88 | Just range -> return range 89 | -- NOTE: I couldn't find a mention of this format for specifying 90 | -- versions in the official package.json documentation. However, 91 | -- this was encountered in the wild: 92 | -- 93 | -- https://github.com/adnelson/nixfromnpm/issues/138 94 | -- 95 | -- Making a special case for it here seems pretty harmless, 96 | -- especially if it was at one point supported. Alternatively, we 97 | -- could allow parsing dev dependencies and such-like to fail. 98 | Object m -> case H.lookup "version" m of 99 | Just v -> parseJSON v 100 | Nothing -> fail "no 'version' key found in dependency object" 101 | _ -> Aeson.typeMismatch "string" v 102 | 103 | -- | A package name can be passed in directly, or a version range can be 104 | -- specified with a @. 105 | parseNameAndRange :: MonadIO m => Text -> m (PackageName, NpmVersionRange) 106 | parseNameAndRange name = do 107 | let badFormat err = 108 | UnrecognizedVersionFormat (name <> " (" <> err <> help <> ")") 109 | help = case T.find (== '%') name of 110 | Nothing -> "" 111 | Just _ -> " (use '@' instead of '%' to indicate a version range)" 112 | 113 | -- TODO use ExceptT for these, or switch to parsec 114 | let parseName n = case parsePackageName n of 115 | Left err -> throw $ badFormat err 116 | Right pkgName -> pure pkgName 117 | 118 | let parseRange r = case parseNpmVersionRange r of 119 | Nothing -> throw $ VersionSyntaxError r 120 | Just range -> pure range 121 | 122 | case T.split (== '@') name of 123 | -- No namespace, no version range 124 | [_] -> (, SemVerRange anyVersion) <$> parseName name 125 | 126 | -- Namespace but no version range 127 | ["", _] -> (, SemVerRange anyVersion) <$> parseName name 128 | 129 | -- Namespace and range 130 | "" : name' : ranges -> 131 | -- In case a '@' appears in the range, treat the range as a list 132 | -- and join on '@' 133 | (,) <$> parseName ("@" <> name') <*> parseRange (joinBy "@" ranges) 134 | 135 | -- No namespace, but with range 136 | name' : ranges -> 137 | (,) <$> parseName name' <*> parseRange (joinBy "@" ranges) 138 | 139 | -- Anything else is invalid. 140 | _ -> throw $ badFormat $ "Not in format or @" <> help 141 | -------------------------------------------------------------------------------- /src/NixFromNpm/Options.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE NoImplicitPrelude #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | {-# LANGUAGE RecordWildCards #-} 4 | {-# LANGUAGE LambdaCase #-} 5 | module NixFromNpm.Options where 6 | 7 | import qualified Prelude as P 8 | import qualified Data.Text as T 9 | import qualified Data.Text.Encoding as T 10 | import qualified Data.HashMap.Strict as H 11 | 12 | import Data.SemVer (anyVersion) 13 | import Options.Applicative (Mod, OptionFields, Parser, value, switch) 14 | import Options.Applicative (help, long, metavar, short, auto, option, strOption) 15 | 16 | import NixFromNpm.Common 17 | import NixFromNpm.Conversion.ToNix (nodePackagesDir) 18 | import NixFromNpm.Npm.PackageMap (PackageName(..)) 19 | import NixFromNpm.Npm.Resolve (getNpmTokens, parseNpmTokens) 20 | import NixFromNpm.Npm.Version (NpmVersionError, NpmVersionRange) 21 | import NixFromNpm.Npm.Version (parseNameAndRange) 22 | 23 | -- | Errors about node libraries 24 | data InvalidNodeLib 25 | = OutputNotWritable 26 | | OutputParentPathDoesn'tExist 27 | | OutputParentNotWritable 28 | | IsFileNotDirectory 29 | | NoPackageDir 30 | | NoDefaultNix 31 | deriving (Show, Eq, Typeable) 32 | 33 | instance Exception InvalidNodeLib 34 | 35 | data InvalidOption 36 | = NpmVersionError NpmVersionError 37 | | InvalidNodeLib FilePath InvalidNodeLib 38 | | InvalidURI Text 39 | | NoPackageJsonFoundAt FilePath 40 | | NotPathToPackageJson FilePath 41 | deriving (Show, Eq, Typeable) 42 | 43 | instance Exception InvalidOption 44 | 45 | -- | Various options we have available for nixfromnpm, as parsed from the 46 | -- command-line options. 47 | data RawOptions = RawOptions { 48 | roPkgNames :: [Name], -- ^ Names of packages to build. 49 | roPkgPaths :: [Text], -- ^ Paths of package.jsons to build. 50 | roOutputPath :: Text, -- ^ Path to output built expressions to. 51 | roNoDefaultNix :: Bool, -- ^ Disable creation of default.nix file. 52 | roNoCache :: Bool, -- ^ Build all expressions from scratch. 53 | roCacheDepth :: Int, -- ^ Depth at which to use cache. 54 | roDevDepth :: Int, -- ^ Dev dependency depth. 55 | roTest :: Bool, -- ^ Fetch only; don't write expressions. 56 | roRegistries :: [Text], -- ^ List of registries to query. 57 | roTimeout :: Int, -- ^ Number of seconds after which to timeout. 58 | roGithubToken :: Maybe AuthToken, 59 | -- ^ Github authentication token. 60 | roNpmTokens :: [Text], 61 | -- ^ NPM authentication tokens. 62 | roNoDefaultRegistry :: Bool, 63 | -- ^ Disable fetching from npmjs.org. 64 | roNoRealTime :: Bool, 65 | -- ^ Don't write packages to disk as they are written. 66 | roTopNPackages :: Maybe Int, 67 | -- ^ Fetch the top `n` npm packages by popularity. 68 | roAllTop :: Bool, 69 | -- ^ If true, fetch all the top packages we have defined. 70 | roOverwriteNixLibs :: Bool 71 | -- ^ If true, allow existing nix libraries in output to be overridden. 72 | } deriving (Show, Eq) 73 | 74 | -- | Various options we have available for nixfromnpm. Validated 75 | -- versions of what's parsed from the command-line. 76 | data NixFromNpmOptions = NixFromNpmOptions { 77 | nfnoPkgNames :: [(PackageName, NpmVersionRange)], 78 | -- ^ Names/versions of packages to build. 79 | nfnoPkgPaths :: [FilePath], -- ^ Path of package.json to build. 80 | nfnoOutputPath :: FilePath, -- ^ Path to output built expressions to. 81 | nfnoNoDefaultNix :: Bool, -- ^ Disable creation of default.nix file. 82 | nfnoCacheDepth :: Int, -- ^ Dependency depth at which to use cache. 83 | nfnoDevDepth :: Int, -- ^ Dev dependency depth. 84 | nfnoTest :: Bool, -- ^ Fetch only; don't write expressions. 85 | nfnoRegistries :: [URI], -- ^ List of registries to query. 86 | nfnoTimeout :: Int, -- ^ Number of seconds after which to timeout. 87 | nfnoGithubToken :: Maybe AuthToken, -- ^ Github authentication token. 88 | nfnoNpmTokens :: Record AuthToken, -- ^ NPM authentication token. 89 | nfnoRealTime :: Bool, -- ^ Write packages to disk as they are written. 90 | nfnoOverwriteNixLibs :: Bool -- ^ Overwrite existing nix libraries 91 | } deriving (Show, Eq) 92 | 93 | -- | Same as `strOption` but for Text. 94 | textOption :: Mod OptionFields String -> Parser Text 95 | textOption opts = pack <$> strOption opts 96 | 97 | -- | Validate an output folder. An output folder EITHER must not exist, but 98 | -- its parent directory does and is writable, OR it does exist, is writable, 99 | -- and follows the correct format. 100 | validateOutput :: FilePath -> IO FilePath 101 | validateOutput = absPath >=> \path -> do 102 | -- Small wrapper function arround the assertion, taking a monadic 103 | -- test function and returning the given error if the test fails. 104 | let assert' test err = assert test (InvalidNodeLib path err) 105 | doesDirectoryExist path >>= \case 106 | -- If the directory exists, it must be writable and follow the 107 | -- correct format. 108 | True -> do 109 | assert' (isWritable path) OutputNotWritable 110 | whenM (not <$> isDirectoryEmpty path) $ do 111 | assert' (doesFileExist (path "default.nix")) NoDefaultNix 112 | assert' (doesDirectoryExist (path nodePackagesDir)) NoPackageDir 113 | return path 114 | False -> do 115 | -- If it doesn't exist, look at the parent path, which must 116 | -- exist and be writable so that we can create the output 117 | -- directory. Then return the path at the end. 118 | let parentPath = parent path 119 | assert' (doesDirectoryExist parentPath) 120 | OutputParentPathDoesn'tExist 121 | assert' (isWritable $ parentPath) 122 | OutputParentNotWritable 123 | return path 124 | 125 | -- | Check that a path to a package.json file or its parent directory is valid. 126 | validateJsPkg :: FilePath -> IO FilePath 127 | validateJsPkg = absPath >=> \path -> doesDirectoryExist path >>= \case 128 | -- If it is a directory, it must be writable and contain a package.json file. 129 | True -> do assert (isWritable path) OutputNotWritable 130 | assert (doesFileExist (path "package.json")) 131 | (NoPackageJsonFoundAt (path "package.json")) 132 | return path 133 | -- If the path isn't a directory, it must be a "package.json" file, and 134 | -- must exist, and the folder must be writable. 135 | False -> do 136 | assert (return $ getFilename path == "package.json") 137 | (NotPathToPackageJson path) 138 | assert (isWritable (parent path)) OutputNotWritable 139 | assert (doesFileExist path) (NoPackageJsonFoundAt path) 140 | return (parent path) 141 | 142 | -- | Get a list of the top n packages. If n is negative, or too large, we'll 143 | -- return all of the packages we're aware of. If it's too large, 144 | getTopN :: MonadIO io => Maybe Int -> io [(PackageName, NpmVersionRange)] 145 | getTopN numPackages = do 146 | topPackages <- map T.strip <$> T.lines <$> readDataFile "top_packages.txt" 147 | mapM parseNameAndRange $ case numPackages of 148 | Nothing -> topPackages 149 | Just n -> take n topPackages 150 | 151 | -- | Validates the raw options passed in from the command line, and also 152 | -- translates them into their "full" counterpart, NixFromNpmOptions. 153 | validateOptions :: RawOptions -> IO NixFromNpmOptions 154 | validateOptions opts@(RawOptions{..}) = do 155 | pwd <- getCurrentDirectory 156 | topPackagesToFetch <- case roAllTop of 157 | True -> getTopN Nothing 158 | False -> case roTopNPackages of 159 | Nothing -> return [] 160 | Just n -> getTopN (Just n) 161 | packageNames <- mapM parseNameAndRange roPkgNames 162 | packagePaths <- mapM (validateJsPkg . fromText) roPkgPaths 163 | outputPath <- validateOutput . fromText . stripTrailingSlash $ roOutputPath 164 | registries <- mapM validateUrl $ (roRegistries <> 165 | if roNoDefaultRegistry 166 | then [] 167 | else ["https://registry.npmjs.org"]) 168 | githubTokenEnv <- map encodeUtf8 <$> getEnv "GITHUB_TOKEN" 169 | tokensCommandLine <- parseNpmTokens roNpmTokens 170 | npmTokensEnv <- getNpmTokens 171 | return $ NixFromNpmOptions { 172 | nfnoOutputPath = collapse outputPath, 173 | nfnoGithubToken = roGithubToken <|> githubTokenEnv, 174 | nfnoNpmTokens = tokensCommandLine <> npmTokensEnv, 175 | nfnoCacheDepth = if roNoCache then -1 else roCacheDepth, 176 | nfnoDevDepth = roDevDepth, 177 | nfnoTest = roTest, 178 | nfnoTimeout = roTimeout, 179 | nfnoPkgNames = packageNames <> topPackagesToFetch, 180 | nfnoRegistries = registries, 181 | nfnoPkgPaths = packagePaths, 182 | nfnoNoDefaultNix = roNoDefaultNix, 183 | nfnoRealTime = not roNoRealTime, 184 | nfnoOverwriteNixLibs = roOverwriteNixLibs 185 | } 186 | where 187 | -- Remove a trailing slash, if it exists. 188 | stripTrailingSlash path = case T.stripSuffix "/" path of 189 | Nothing -> path 190 | Just path' -> path' 191 | validateUrl rawUrl = case parseURI (unpack rawUrl) of 192 | Nothing -> throw $ InvalidURI rawUrl 193 | Just uri -> return uri 194 | 195 | -- | Parses the raw command-line options into the intermediate form that is 196 | -- used to construct a NixFromNpmOptions object. 197 | parseOptions :: Parser RawOptions 198 | parseOptions = RawOptions 199 | <$> packageNames 200 | <*> packageFiles 201 | <*> textOption outputDir 202 | <*> noDefaultNix 203 | <*> noCache 204 | <*> cacheDepth 205 | <*> devDepth 206 | <*> isTest 207 | <*> registries 208 | <*> timeout 209 | <*> githubToken 210 | <*> npmTokens 211 | <*> noDefaultRegistry 212 | <*> noRealTime 213 | <*> topN 214 | <*> allTop 215 | <*> overwriteNixLibs 216 | where 217 | packageNames = many $ textOption $ short 'p' 218 | <> long "package" 219 | <> metavar "NAME" 220 | <> help ("Package to generate expression for (supports " 221 | <> "multiples)") 222 | packageFileHelp = "Path to package.json to generate expression for " 223 | ++ " (NOT YET SUPPORTED)" 224 | packageFiles = many $ textOption (long "file" 225 | <> short 'f' 226 | <> metavar "FILE" 227 | <> help packageFileHelp) 228 | outputDir = short 'o' 229 | <> long "output" 230 | <> metavar "OUTPUT" 231 | <> help "Directory to output expressions to" 232 | noDefaultNix = switch (long "no-default-nix" 233 | <> help ("When building from a package.json, do not" 234 | <> " create a default.nix")) 235 | noCache = switch (long "no-cache" 236 | <> help "Build all expressions in OUTPUT from scratch") 237 | devDepth = option auto (long "dev-depth" 238 | <> metavar "DEPTH" 239 | <> help "Depth to which to fetch dev dependencies" 240 | <> value 0) 241 | cacheHelp = "Depth at which to use cache. Packages at dependency depth \ 242 | \DEPTH and lower will be pulled from the cache. If DEPTH \ 243 | \is negative, the cache will be ignored entirely (same as \ 244 | \using --no-cache)" 245 | cacheDepth = option auto (long "cache-depth" 246 | <> metavar "DEPTH" 247 | <> help cacheHelp 248 | <> value 0) 249 | isTest = switch (long "test" 250 | <> help "Don't write expressions; just test") 251 | timeout = option auto (long "timeout" 252 | <> metavar "SECONDS" 253 | <> help "Time requests out after SECONDS seconds" 254 | <> value 10) 255 | registries :: Parser [Text] 256 | registries = many $ textOption (long "registry" 257 | <> short 'r' 258 | <> metavar "REGISTRY" 259 | <> help ("NPM registry to query (supports " 260 | <> "multiples)")) 261 | tokenHelp s _type envVar = concat 262 | ["Token", s, " to use for ", _type, " access (also can be set with ", 263 | envVar, " environment variable)"] 264 | githubToken = (Just . T.encodeUtf8 <$> textOption (long "github-token" 265 | <> metavar "TOKEN" 266 | <> help (tokenHelp "" "github" "GITHUB_TOKEN"))) 267 | <|> pure Nothing 268 | npmTokens = many $ textOption $ 269 | long "npm-token" 270 | <> metavar "NAMESPACE=TOKEN" 271 | <> help (tokenHelp "s" "npm" "NPM_AUTH_TOKENS") 272 | noDefaultRegistry = switch (long "no-default-registry" 273 | <> help "Do not include default npmjs.org registry") 274 | noRealTime = switch (long "no-real-time" 275 | <> help "Write packages to disk at the end rather than \ 276 | \ as they are generated.") 277 | allTop = switch (long "all-top" 278 | <> help "Fetch all of the most popular packages that \ 279 | \nixfromnpm knows about.") 280 | topN = (Just . P.read . unpack 281 | <$> textOption (long "top-n" 282 | <> metavar "N" 283 | <> help "Fetch the top N packages by popularity.")) 284 | <|> pure Nothing 285 | overwriteNixLibs = switch (long "overwrite-nix-libs" 286 | <> help "Overwrite existing nix libraries in output.") 287 | -------------------------------------------------------------------------------- /tests/Unit.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE NoImplicitPrelude #-} 3 | {-# LANGUAGE QuasiQuotes #-} 4 | module Main (main) where 5 | 6 | import ClassyPrelude hiding ((<>)) 7 | import Data.Either (isRight, isLeft) 8 | import Data.Aeson (Value(..), decode) 9 | import Nix.Expr 10 | import Test.Hspec 11 | import Test.QuickCheck (property, Arbitrary(..), oneof) 12 | import NeatInterpolation (text) 13 | import qualified Data.ByteString.Lazy as BL 14 | import qualified Data.Text as T 15 | import qualified Data.Text.Encoding as T 16 | 17 | import NixFromNpm 18 | import NixFromNpm.Common hiding (decode) 19 | import NixFromNpm.Git.Types as Git 20 | import NixFromNpm.Npm.PackageMap (PackageName(..), parsePackageName) 21 | import NixFromNpm.Npm.Version as Npm 22 | 23 | shouldBeR :: (Eq a, Eq b, Show a, Show b) => Either a b -> b -> Expectation 24 | shouldBeR a b = a `shouldBe` Right b 25 | 26 | shouldBeJ :: (Eq a, Show a) => Maybe a -> a -> Expectation 27 | shouldBeJ a b = a `shouldBe` Just b 28 | 29 | -- | Ability to parse git identifiers from URIs. 30 | gitIdParsingSpec :: Spec 31 | gitIdParsingSpec = describe "parse git identifiers" $ do 32 | describe "parse from URIs" $ do 33 | it "should parse a github repo" $ do 34 | let uri = unsafeParseURI "https://github.com/foo/bar" 35 | parseGitId uri `shouldBeJ` GitId Github "foo" "bar" Nothing 36 | it "should parse a github repo with a ref" $ do 37 | let uri = unsafeParseURI "https://github.com/foo/bar#baz" 38 | parseGitId uri `shouldBeJ` GitId Github "foo" "bar" (Just "baz") 39 | it "should parse a bitbucket repo" $ do 40 | let uri = unsafeParseURI "https://bitbucket.com/foo/bar" 41 | parseGitId uri `shouldBeJ` GitId Bitbucket "foo" "bar" Nothing 42 | 43 | describe "parse from strings" $ do 44 | let owner = "holidaycheck" :: String 45 | repo = "react-autosuggest" 46 | ref = "43074a439d26243ea07110f0c5752a6fc8aebe4d" 47 | it "should parse owner/repo as a github id" $ do 48 | parseGitId (owner <> "/" <> repo) `shouldBeJ` 49 | GitId Github (pack owner) (pack repo) Nothing 50 | it "should parse owner/repo with a tag as a github id" $ do 51 | parseGitId (owner <> "/" <> repo <> "#" <> ref) `shouldBeJ` 52 | GitId Github (pack owner) (pack repo) (Just $ fromString ref) 53 | it "should allow anything except whitespace in a ref" $ do 54 | let ref' = "haha_!_wow%$*%_:D" 55 | parseGitId (owner <> "/" <> repo <> "#" <> ref') `shouldBeJ` 56 | GitId Github (pack owner) (pack repo) (Just $ fromString ref') 57 | it "should parse anything else as a URI" $ do 58 | parseGitId ("http://github.com/foo/bar#baz"::String) `shouldBeJ` 59 | GitId Github "foo" "bar" (Just "baz") 60 | 61 | npmNameParserSpec :: Spec 62 | npmNameParserSpec = describe "npm package name parser" $ do 63 | it "should parse a name without a namespace" $ do 64 | parsePackageName "foo" `shouldBeR` PackageName "foo" Nothing 65 | 66 | it "should parse a name with a namespace" $ do 67 | parsePackageName "@foo/bar" `shouldBeR` PackageName "bar" (Just "foo") 68 | 69 | it "should not parse a name with an empty namespace" $ do 70 | parsePackageName "@/bar" `shouldSatisfy` isLeft 71 | 72 | it "should not parse a name with a multiple namespaces" $ do 73 | parsePackageName "@foo/@bar" `shouldSatisfy` isLeft 74 | parsePackageName "@foo/@bar/baz" `shouldSatisfy` isLeft 75 | 76 | describe "allow numbers" $ do 77 | it "in package name without a namespace" $ do 78 | parsePackageName "bar123" `shouldBeR` "bar123" 79 | it "in package name with a namespace" $ do 80 | parsePackageName "@foo/bar12" `shouldBeR` PackageName "bar12" (Just "foo") 81 | it "in a namespace" $ do 82 | parsePackageName "@foo12/bar" `shouldBeR` PackageName "bar" (Just "foo12") 83 | 84 | it "should allow certain special characters" $ do 85 | parsePackageName "@foo/bar!" `shouldBeR` PackageName "bar!" (Just "foo") 86 | 87 | it "should not allow other special characters" $ do 88 | parsePackageName "@foo/bar@/" `shouldSatisfy` isLeft 89 | 90 | npmVersionParserSpec :: Spec 91 | npmVersionParserSpec = describe "npm version parser" $ do 92 | it "should parse a semver" $ do 93 | parseNpmVersionRange "0.0.0" `shouldBeJ` SemVerRange (Eq (semver 0 0 0)) 94 | 95 | it "should parse a tag" $ do 96 | parseNpmVersionRange "xyz" `shouldBeJ` Npm.Tag "xyz" 97 | 98 | it "should parse a git uri" $ do 99 | let owner = "holidaycheck" 100 | repo = "react-autosuggest" 101 | ref = "43074a439d26243ea07110f0c5752a6fc8aebe4d" 102 | let uri = joinBy "/" ["https://github.com", owner, repo] 103 | <> "#" <> ref 104 | parseNpmVersionRange uri `shouldBeJ` 105 | GitIdentifier (GitId Github owner repo (Just $ SomeRef ref)) 106 | 107 | it "should parse a partial git uri" $ do 108 | let uri = "holidaycheck/react-autosuggest#" <> 109 | "43074a439d26243ea07110f0c5752a6fc8aebe4d" 110 | parseNpmVersionRange uri `shouldBeJ` 111 | GitIdentifier (GitId Github "holidaycheck" "react-autosuggest" 112 | (Just "43074a439d26243ea07110f0c5752a6fc8aebe4d")) 113 | 114 | 115 | it "should parse a local file path" $ do 116 | parseNpmVersionRange "/foo/bar" `shouldBeJ` LocalPath "/foo/bar" 117 | parseNpmVersionRange "./foo/bar" `shouldBeJ` LocalPath "./foo/bar" 118 | parseNpmVersionRange "../foo/bar" `shouldBeJ` LocalPath "../foo/bar" 119 | parseNpmVersionRange "~/foo/bar" `shouldBeJ` LocalPath "~/foo/bar" 120 | 121 | npmNameAndVersionParserSpec :: Spec 122 | npmNameAndVersionParserSpec = describe "npm name@version parser" $ do 123 | it "should parse a name with no version range" $ do 124 | (name, range) <- parseNameAndRange "foo" 125 | name `shouldBe` "foo" 126 | range `shouldBe` SemVerRange anyVersion 127 | 128 | it "should parse a namespaced name with no version range" $ do 129 | (name, range) <- parseNameAndRange "@foo/bar" 130 | name `shouldBe` PackageName "bar" (Just "foo") 131 | range `shouldBe` SemVerRange anyVersion 132 | 133 | it "should parse a name and a version range" $ do 134 | (name, range) <- parseNameAndRange "foo@1.2.3" 135 | name `shouldBe` "foo" 136 | range `shouldBe` SemVerRange (Eq $ semver 1 2 3) 137 | 138 | it "should parse a namespaced name and a version range" $ do 139 | (name, range) <- parseNameAndRange "@foo/bar@1.2.3" 140 | name `shouldBe` PackageName "bar" (Just "foo") 141 | range `shouldBe` SemVerRange (Eq $ semver 1 2 3) 142 | 143 | it "should warn if the %-format is used" $ do 144 | parseNameAndRange "foo%1.2.3" `shouldThrow` \(UnrecognizedVersionFormat msg) -> do 145 | "use '@' instead" `isInfixOf` msg 146 | 147 | parsePackageMetadataSpec :: Spec 148 | parsePackageMetadataSpec = describe "parse package metadata JSON" $ do 149 | let unsafeDecode = fromJust . decode . BL.fromStrict . T.encodeUtf8 150 | let meta = emptyPackageMeta 151 | it "should parse a description" $ do 152 | let pkgJSON = unsafeDecode [text|{"description": "hey there"}|] 153 | pkgJSON `shouldBe` meta {pmDescription = Just "hey there"} 154 | 155 | it "should parse keywords" $ do 156 | let pkgJSON = unsafeDecode [text|{"keywords": ["awesome", "amazing"]}|] 157 | pkgJSON `shouldBe` meta {pmKeywords = fromList ["awesome", "amazing"]} 158 | 159 | it "should parse keywords separated by commas" $ do 160 | let pkgJSON = unsafeDecode [text|{"keywords": "awesome, amazing"}|] 161 | pkgJSON `shouldBe` meta {pmKeywords = fromList ["awesome", "amazing"]} 162 | 163 | it "should parse platforms" $ do 164 | let pkgJSON = unsafeDecode [text|{"os": ["darwin", "linux"]}|] 165 | pkgJSON `shouldBe` meta {pmPlatforms = fromList [Darwin, Linux]} 166 | 167 | it "should ignore platforms it doesn't recognize" $ do 168 | let pkgJSON = unsafeDecode [text|{"os": ["darwin", "weird"]}|] 169 | pkgJSON `shouldBe` meta {pmPlatforms = fromList [Darwin]} 170 | 171 | it "should parse multiple keys" $ do 172 | let pkgJSON = unsafeDecode [text|{ 173 | "description": "hey there", 174 | "keywords": ["awesome", "amazing"], 175 | "os": ["darwin", "weird"] 176 | }|] 177 | pkgJSON `shouldBe` meta { 178 | pmDescription = Just "hey there", 179 | pmKeywords = fromList ["awesome", "amazing"], 180 | pmPlatforms = fromList [Darwin] 181 | } 182 | 183 | 184 | metaToNixSpec :: Spec 185 | metaToNixSpec = describe "converting package meta to nix" $ do 186 | let meta = emptyPackageMeta 187 | it "should return nothing for an empty metadata" $ do 188 | metaToNix meta `shouldBe` Nothing 189 | 190 | it "should grab the description" $ do 191 | let description = "Some description" 192 | let converted = metaToNix (meta {pmDescription = Just description}) 193 | fromJust converted `shouldBe` mkNonRecSet ["description" $= mkStr description] 194 | 195 | it "should grab the author" $ do 196 | let author = "Some author" 197 | let converted = metaToNix (meta {pmAuthor = Just author}) 198 | fromJust converted `shouldBe` mkNonRecSet ["author" $= mkStr author] 199 | 200 | it "should grab the homepage" $ do 201 | let homepageStr = "http://example.com" 202 | mHomepage = parseURI (T.unpack homepageStr) 203 | let converted = metaToNix (meta {pmHomepage = mHomepage}) 204 | fromJust converted `shouldBe` mkNonRecSet ["homepage" $= mkStr homepageStr] 205 | 206 | it "should grab keywords" $ do 207 | let keywords = ["keyword1", "keyword2"] 208 | let converted = metaToNix (meta {pmKeywords = fromList keywords}) 209 | fromJust converted `shouldBe` mkNonRecSet ["keywords" $= mkList (mkStr <$> keywords)] 210 | 211 | describe "platforms" $ do 212 | let check ps expr = do 213 | let converted = metaToNix (meta {pmPlatforms = fromList ps}) 214 | fromJust converted `shouldBe` mkNonRecSet ["platforms" $= expr] 215 | 216 | it "should convert a single platform" $ do 217 | let platforms = [Linux] 218 | check platforms (mkDots "pkgs" ["stdenv", "lib", "platforms"] !. "linux") 219 | 220 | it "should convert platforms" $ do 221 | let platforms = [Linux, OpenBSD] 222 | let withPlatforms = mkWith (mkDots "pkgs" ["stdenv", "lib", "platforms"]) 223 | check platforms (withPlatforms ("linux" $++ "openbsd")) 224 | 225 | main :: IO () 226 | main = hspec $ do 227 | npmVersionParserSpec 228 | npmNameParserSpec 229 | npmNameAndVersionParserSpec 230 | gitIdParsingSpec 231 | metaToNixSpec 232 | parsePackageMetadataSpec 233 | -------------------------------------------------------------------------------- /top_packages.txt: -------------------------------------------------------------------------------- 1 | lodash 2 | async 3 | request 4 | underscore 5 | express 6 | commander 7 | debug 8 | chalk 9 | q 10 | bluebird 11 | mkdirp 12 | colors 13 | through2 14 | coffee-script 15 | moment 16 | yeoman-generator 17 | glob 18 | gulp-util 19 | optimist 20 | minimist 21 | cheerio 22 | node-uuid 23 | fs-extra 24 | body-parser 25 | react 26 | jade 27 | jquery 28 | socket.io 29 | redis 30 | winston 31 | yosay 32 | uglify-js 33 | handlebars 34 | rimraf 35 | semver 36 | gulp 37 | yargs 38 | extend 39 | through 40 | mongodb 41 | mime 42 | underscore.string 43 | mongoose 44 | xml2js 45 | grunt 46 | ejs 47 | superagent 48 | mocha 49 | marked 50 | js-yaml 51 | connect 52 | shelljs 53 | object-assign 54 | xtend 55 | browserify 56 | inquirer 57 | ember-cli-babel 58 | aws-sdk 59 | promise 60 | minimatch 61 | ws 62 | prompt 63 | mysql 64 | less 65 | morgan 66 | cookie-parser 67 | uuid 68 | chai 69 | inherits 70 | event-stream 71 | babel-runtime 72 | when 73 | readable-stream 74 | babel 75 | concat-stream 76 | esprima 77 | es6-promise 78 | jsdom 79 | nan 80 | co 81 | qs 82 | stylus 83 | joi 84 | open 85 | gulp-rename 86 | nodemailer 87 | backbone 88 | path 89 | validator 90 | chokidar 91 | npm 92 | wrench 93 | cli-color 94 | bunyan 95 | nconf 96 | socket.io-client 97 | nopt 98 | mustache 99 | classnames 100 | pkginfo 101 | clone 102 | postcss 103 | express-session 104 | hoek 105 | ncp 106 | pg 107 | cli-table 108 | iconv-lite 109 | --------------------------------------------------------------------------------