├── .github └── workflows │ ├── auto-update.yml │ └── flakehub-publish-tagged.yml ├── .gitignore ├── README.md ├── flake.lock ├── flake.nix ├── playwright-driver └── default.nix ├── playwright-test ├── default.nix ├── node-env.nix ├── node-packages.json ├── node-packages.nix └── wrapped.nix └── update.sh /.github/workflows/auto-update.yml: -------------------------------------------------------------------------------- 1 | name: "Auto-update" 2 | on: 3 | workflow_dispatch: 4 | schedule: 5 | - cron: "0 0 * * *" 6 | jobs: 7 | check-for-updates: 8 | runs-on: ubuntu-latest 9 | outputs: 10 | updated: ${{ steps.updated.outputs.updated }} 11 | steps: 12 | - uses: actions/checkout@v4 13 | - uses: cachix/install-nix-action@v27 14 | with: 15 | nix_path: nixpkgs=channel:nixos-unstable 16 | github_access_token: ${{ secrets.GITHUB_TOKEN }} 17 | - run: ./update.sh 18 | - id: updated 19 | run: cat updated.txt >> "$GITHUB_OUTPUT" 20 | 21 | check-linux: 22 | if: needs.check-for-updates.outputs.updated == 'true' 23 | needs: check-for-updates 24 | runs-on: ubuntu-latest 25 | steps: 26 | - uses: actions/checkout@v4 27 | - uses: cachix/install-nix-action@v27 28 | with: 29 | nix_path: nixpkgs=channel:nixos-unstable 30 | github_access_token: ${{ secrets.GITHUB_TOKEN }} 31 | - run: ./update.sh 32 | - run: nix build .#playwright-driver 33 | - run: nix flake check 34 | 35 | check-darwin: 36 | if: needs.check-for-updates.outputs.updated == 'true' 37 | needs: check-for-updates 38 | runs-on: macos-latest 39 | steps: 40 | - uses: actions/checkout@v4 41 | - uses: cachix/install-nix-action@v27 42 | with: 43 | nix_path: nixpkgs=channel:nixos-unstable 44 | github_access_token: ${{ secrets.GITHUB_TOKEN }} 45 | - run: ./update.sh 46 | - run: nix build .#playwright-driver 47 | - run: nix flake check 48 | 49 | push-updates: 50 | needs: [check-for-updates, check-linux, check-darwin] 51 | runs-on: ubuntu-latest 52 | 53 | permissions: 54 | # Give the default GITHUB_TOKEN write permission to commit and push the 55 | # added or changed files to the repository. 56 | contents: write 57 | 58 | steps: 59 | - uses: actions/checkout@v4 60 | - uses: cachix/install-nix-action@v27 61 | with: 62 | nix_path: nixpkgs=channel:nixos-unstable 63 | - run: ./update.sh 64 | - name: Set commit and tagging message 65 | id: commit_message_step 66 | run: | 67 | echo 'commit_message<> $GITHUB_OUTPUT 68 | echo "Update to version $(cat version.txt)" >> $GITHUB_OUTPUT 69 | echo 'EOF' >> $GITHUB_OUTPUT 70 | echo 'tagging_message<> $GITHUB_OUTPUT 71 | cat version.txt >> $GITHUB_OUTPUT 72 | echo 'EOF' >> $GITHUB_OUTPUT 73 | - uses: stefanzweifel/git-auto-commit-action@v5 74 | with: 75 | commit_message: ${{ steps.commit_message_step.outputs.commit_message }} 76 | tagging_message: ${{ steps.commit_message_step.outputs.tagging_message }} 77 | -------------------------------------------------------------------------------- /.github/workflows/flakehub-publish-tagged.yml: -------------------------------------------------------------------------------- 1 | name: "Publish tags to FlakeHub" 2 | on: 3 | push: 4 | tags: 5 | - "v?[0-9]+.[0-9]+.[0-9]+*" 6 | workflow_dispatch: 7 | inputs: 8 | tag: 9 | description: "The existing tag to publish to FlakeHub" 10 | type: "string" 11 | required: true 12 | jobs: 13 | flakehub-publish: 14 | runs-on: "ubuntu-latest" 15 | permissions: 16 | id-token: "write" 17 | contents: "read" 18 | steps: 19 | - uses: "actions/checkout@v4" 20 | with: 21 | ref: "${{ (inputs.tag != null) && format('refs/tags/{0}', inputs.tag) || '' }}" 22 | - uses: "DeterminateSystems/nix-installer-action@main" 23 | - uses: "DeterminateSystems/flakehub-push@main" 24 | with: 25 | visibility: "public" 26 | name: "pietdevries94/playwright-web-flake" 27 | tag: "${{ inputs.tag }}" 28 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | result 2 | version.txt 3 | updated.txt -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Playwright Web Flake 2 | 3 | This nix flake provides a way to install [Playwright](https://playwright.dev/) and its browsers in a nixOS system. 4 | It does not contain playwright-python, because for my personal use I don't need it and it sometimes lags behind the latest version of playwright. 5 | 6 | ## Usage 7 | 8 | See the [`nix shell`](#with-nix-shell) example if all you need is access to the `playwright` binary in the current shell. 9 | 10 | If you intend to run a test suite: 11 | 12 | - See the [`nix develop`](#with-nix-develop) example if the codebase you're working in does not already have a `flake.nix`, and you don't want to add one. 13 | - If the codebase already uses a flake.nix, adapt it like the flake.nix shown [below](#in-a-flake). 14 | 15 | ### With `nix shell` 16 | 17 | Get access to the `playwright` binary in the current shell. 18 | 19 | ```sh 20 | nix shell github:pietdevries94/playwright-web-flake#playwright-test 21 | 22 | which playwright && playwright --version && playwright open nixos.org 23 | ``` 24 | 25 | ### With `nix develop` 26 | 27 | Gets access to the `playwright` binary in the current shell and sets some playwright environment variables. 28 | 29 | ```sh 30 | nix develop github:pietdevries94/playwright-web-flake 31 | 32 | which playwright && playwright --version && playwright open nixos.org 33 | ``` 34 | 35 | ### In a flake 36 | 37 | 1. Create a flake.nix with the content shown below. 38 | 1. Enter the devshell with `nix develop`. 39 | 40 | ```nix 41 | { 42 | description = "Playwright development environment"; 43 | inputs.flake-utils.url = "github:numtide/flake-utils"; 44 | inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; 45 | inputs.playwright.url = "github:pietdevries94/playwright-web-flake"; 46 | 47 | outputs = { self, flake-utils, nixpkgs, playwright }: 48 | flake-utils.lib.eachDefaultSystem (system: 49 | let 50 | overlay = final: prev: { 51 | inherit (playwright.packages.${system}) playwright-test playwright-driver; 52 | }; 53 | pkgs = import nixpkgs { 54 | inherit system; 55 | overlays = [ overlay ]; 56 | }; 57 | in 58 | { 59 | devShells = { 60 | default = pkgs.mkShell { 61 | packages = [ 62 | pkgs.playwright-test 63 | ]; 64 | shellHook = '' 65 | export PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD=1 66 | export PLAYWRIGHT_BROWSERS_PATH="${pkgs.playwright-driver.browsers}" 67 | ''; 68 | }; 69 | }; 70 | }); 71 | } 72 | ``` 73 | 74 | ## Versioning 75 | 76 | The update workflow tags the commit with the version of playwright that is installed. This version can be used to checkout the commit that installed that version of playwright, to match your environment. 77 | 78 | The list of available versions can be found [here](https://github.com/pietdevries94/playwright-web-flake/tags). 79 | 80 | The [flake reference](https://nix.dev/manual/nix/2.24/command-ref/new-cli/nix3-flake.html#examples) can be modified to specify a custom version. 81 | 82 | ### Example: specify a custom version in a flake 83 | 84 | ```diff 85 | -inputs.playwright.url = "github:pietdevries94/playwright-web-flake"; 86 | +inputs.playwright.url = "github:pietdevries94/playwright-web-flake/1.37.1"; 87 | ``` 88 | 89 | ### Example: specify a custom version on the command line 90 | 91 | ```diff 92 | -nix develop github:pietdevries94/playwright-web-flake 93 | +nix develop github:pietdevries94/playwright-web-flake/1.37.1 94 | ``` 95 | 96 | ## Also see 97 | 98 | - https://primamateria.github.io/blog/playwright-nixos-webdev/ 99 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "flake-utils": { 4 | "inputs": { 5 | "systems": "systems" 6 | }, 7 | "locked": { 8 | "lastModified": 1726560853, 9 | "narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=", 10 | "owner": "numtide", 11 | "repo": "flake-utils", 12 | "rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a", 13 | "type": "github" 14 | }, 15 | "original": { 16 | "owner": "numtide", 17 | "repo": "flake-utils", 18 | "type": "github" 19 | } 20 | }, 21 | "nixpkgs": { 22 | "locked": { 23 | "lastModified": 0, 24 | "narHash": "sha256-u+rxA79a0lyhG+u+oPBRtTDtzz8kvkc9a6SWSt9ekVc=", 25 | "path": "/nix/store/0283cbhm47kd3lr9zmc5fvdrx9qkav8s-source", 26 | "type": "path" 27 | }, 28 | "original": { 29 | "id": "nixpkgs", 30 | "type": "indirect" 31 | } 32 | }, 33 | "root": { 34 | "inputs": { 35 | "flake-utils": "flake-utils", 36 | "nixpkgs": "nixpkgs" 37 | } 38 | }, 39 | "systems": { 40 | "locked": { 41 | "lastModified": 1681028828, 42 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 43 | "owner": "nix-systems", 44 | "repo": "default", 45 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 46 | "type": "github" 47 | }, 48 | "original": { 49 | "owner": "nix-systems", 50 | "repo": "default", 51 | "type": "github" 52 | } 53 | } 54 | }, 55 | "root": "root", 56 | "version": 7 57 | } 58 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "A flake for playwright"; 3 | 4 | inputs.flake-utils.url = "github:numtide/flake-utils"; 5 | 6 | outputs = { self, nixpkgs, flake-utils }: 7 | flake-utils.lib.eachDefaultSystem (system: 8 | let 9 | pkgs = import nixpkgs { 10 | inherit system; 11 | }; 12 | in 13 | { 14 | packages = { 15 | playwright-test = pkgs.callPackage ./playwright-test/wrapped.nix { }; 16 | playwright-driver = pkgs.callPackage ./playwright-driver { }; 17 | }; 18 | 19 | devShells.default = pkgs.mkShell { 20 | packages = [ 21 | self.packages.${system}.playwright-test 22 | ]; 23 | shellHook = '' 24 | export PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD=1 25 | export PLAYWRIGHT_BROWSERS_PATH="${self.packages.${system}.playwright-driver.browsers}" 26 | ''; 27 | }; 28 | } 29 | ); 30 | } 31 | -------------------------------------------------------------------------------- /playwright-driver/default.nix: -------------------------------------------------------------------------------- 1 | { lib 2 | , stdenv 3 | , chromium 4 | , ffmpeg 5 | , git 6 | , jq 7 | , nodejs 8 | , fetchFromGitHub 9 | , fetchurl 10 | , makeFontsConf 11 | , makeWrapper 12 | , runCommand 13 | , unzip 14 | , cacert 15 | }: 16 | let 17 | inherit (stdenv.hostPlatform) system; 18 | 19 | throwSystem = throw "Unsupported system: ${system}"; 20 | 21 | driver = stdenv.mkDerivation (finalAttrs: 22 | let 23 | suffix = { 24 | x86_64-linux = "linux"; 25 | aarch64-linux = "linux-arm64"; 26 | x86_64-darwin = "mac"; 27 | aarch64-darwin = "mac-arm64"; 28 | }.${system} or throwSystem; 29 | filename = "playwright-${finalAttrs.version}-${suffix}.zip"; 30 | in 31 | { 32 | pname = "playwright-driver"; 33 | # run ./pkgs/development/python-modules/playwright/update.sh to update 34 | version = "1.52.0"; 35 | 36 | src = fetchurl { 37 | url = "https://playwright.azureedge.net/builds/driver/${filename}"; 38 | sha256 = { 39 | x86_64-linux = "0zcfm92in6awhjnn50c208k0ij5hyq7gvvbbp6gsmlj9gjk9fijc"; 40 | aarch64-linux = "1klw8hgw9h59l563vvj44ipc53agd7rcny8rw3y2mhgj8xdwx549"; 41 | x86_64-darwin = "0d6ib7nyi12fm0081spszaggjv660hbv4f5cv4qrcy4xn9rrw0b1"; 42 | aarch64-darwin = "1p8ml1lba359d3as1z1cn40zac39yn84xilpgg1vlmk278v959cp"; 43 | }.${system} or throwSystem; 44 | }; 45 | 46 | sourceRoot = "."; 47 | 48 | nativeBuildInputs = [ unzip ]; 49 | 50 | postPatch = '' 51 | # Use Nix's NodeJS instead of the bundled one. 52 | rm node 53 | 54 | patchShebangs package/bin/*.sh 55 | ''; 56 | 57 | installPhase = '' 58 | runHook preInstall 59 | 60 | mkdir -p $out/bin 61 | # playwright.sh doesn't exist anymore, so we write a new one 62 | cat > $out/bin/playwright < { 4 | inherit system; 5 | }, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_14"}: 6 | 7 | let 8 | nodeEnv = import ./node-env.nix { 9 | inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript; 10 | inherit pkgs nodejs; 11 | libtool = if pkgs.stdenv.isDarwin then pkgs.cctools or pkgs.darwin.cctools else null; 12 | }; 13 | in 14 | import ./node-packages.nix { 15 | inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit; 16 | inherit nodeEnv; 17 | } 18 | -------------------------------------------------------------------------------- /playwright-test/node-env.nix: -------------------------------------------------------------------------------- 1 | # This file originates from node2nix 2 | 3 | {lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}: 4 | 5 | let 6 | # Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master 7 | utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux; 8 | 9 | python = if nodejs ? python then nodejs.python else python2; 10 | 11 | # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise 12 | tarWrapper = runCommand "tarWrapper" {} '' 13 | mkdir -p $out/bin 14 | 15 | cat > $out/bin/tar <> $out/nix-support/hydra-build-products 40 | ''; 41 | }; 42 | 43 | # Common shell logic 44 | installPackage = writeShellScript "install-package" '' 45 | installPackage() { 46 | local packageName=$1 src=$2 47 | 48 | local strippedName 49 | 50 | local DIR=$PWD 51 | cd $TMPDIR 52 | 53 | unpackFile $src 54 | 55 | # Make the base dir in which the target dependency resides first 56 | mkdir -p "$(dirname "$DIR/$packageName")" 57 | 58 | if [ -f "$src" ] 59 | then 60 | # Figure out what directory has been unpacked 61 | packageDir="$(find . -maxdepth 1 -type d | tail -1)" 62 | 63 | # Restore write permissions to make building work 64 | find "$packageDir" -type d -exec chmod u+x {} \; 65 | chmod -R u+w "$packageDir" 66 | 67 | # Move the extracted tarball into the output folder 68 | mv "$packageDir" "$DIR/$packageName" 69 | elif [ -d "$src" ] 70 | then 71 | # Get a stripped name (without hash) of the source directory. 72 | # On old nixpkgs it's already set internally. 73 | if [ -z "$strippedName" ] 74 | then 75 | strippedName="$(stripHash $src)" 76 | fi 77 | 78 | # Restore write permissions to make building work 79 | chmod -R u+w "$strippedName" 80 | 81 | # Move the extracted directory into the output folder 82 | mv "$strippedName" "$DIR/$packageName" 83 | fi 84 | 85 | # Change to the package directory to install dependencies 86 | cd "$DIR/$packageName" 87 | } 88 | ''; 89 | 90 | # Bundle the dependencies of the package 91 | # 92 | # Only include dependencies if they don't exist. They may also be bundled in the package. 93 | includeDependencies = {dependencies}: 94 | lib.optionalString (dependencies != []) ( 95 | '' 96 | mkdir -p node_modules 97 | cd node_modules 98 | '' 99 | + (lib.concatMapStrings (dependency: 100 | '' 101 | if [ ! -e "${dependency.packageName}" ]; then 102 | ${composePackage dependency} 103 | fi 104 | '' 105 | ) dependencies) 106 | + '' 107 | cd .. 108 | '' 109 | ); 110 | 111 | # Recursively composes the dependencies of a package 112 | composePackage = { name, packageName, src, dependencies ? [], ... }@args: 113 | builtins.addErrorContext "while evaluating node package '${packageName}'" '' 114 | installPackage "${packageName}" "${src}" 115 | ${includeDependencies { inherit dependencies; }} 116 | cd .. 117 | ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} 118 | ''; 119 | 120 | pinpointDependencies = {dependencies, production}: 121 | let 122 | pinpointDependenciesFromPackageJSON = writeTextFile { 123 | name = "pinpointDependencies.js"; 124 | text = '' 125 | var fs = require('fs'); 126 | var path = require('path'); 127 | 128 | function resolveDependencyVersion(location, name) { 129 | if(location == process.env['NIX_STORE']) { 130 | return null; 131 | } else { 132 | var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json"); 133 | 134 | if(fs.existsSync(dependencyPackageJSON)) { 135 | var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON)); 136 | 137 | if(dependencyPackageObj.name == name) { 138 | return dependencyPackageObj.version; 139 | } 140 | } else { 141 | return resolveDependencyVersion(path.resolve(location, ".."), name); 142 | } 143 | } 144 | } 145 | 146 | function replaceDependencies(dependencies) { 147 | if(typeof dependencies == "object" && dependencies !== null) { 148 | for(var dependency in dependencies) { 149 | var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency); 150 | 151 | if(resolvedVersion === null) { 152 | process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n"); 153 | } else { 154 | dependencies[dependency] = resolvedVersion; 155 | } 156 | } 157 | } 158 | } 159 | 160 | /* Read the package.json configuration */ 161 | var packageObj = JSON.parse(fs.readFileSync('./package.json')); 162 | 163 | /* Pinpoint all dependencies */ 164 | replaceDependencies(packageObj.dependencies); 165 | if(process.argv[2] == "development") { 166 | replaceDependencies(packageObj.devDependencies); 167 | } 168 | else { 169 | packageObj.devDependencies = {}; 170 | } 171 | replaceDependencies(packageObj.optionalDependencies); 172 | replaceDependencies(packageObj.peerDependencies); 173 | 174 | /* Write the fixed package.json file */ 175 | fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2)); 176 | ''; 177 | }; 178 | in 179 | '' 180 | node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"} 181 | 182 | ${lib.optionalString (dependencies != []) 183 | '' 184 | if [ -d node_modules ] 185 | then 186 | cd node_modules 187 | ${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies} 188 | cd .. 189 | fi 190 | ''} 191 | ''; 192 | 193 | # Recursively traverses all dependencies of a package and pinpoints all 194 | # dependencies in the package.json file to the versions that are actually 195 | # being used. 196 | 197 | pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args: 198 | '' 199 | if [ -d "${packageName}" ] 200 | then 201 | cd "${packageName}" 202 | ${pinpointDependencies { inherit dependencies production; }} 203 | cd .. 204 | ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} 205 | fi 206 | ''; 207 | 208 | # Extract the Node.js source code which is used to compile packages with 209 | # native bindings 210 | nodeSources = runCommand "node-sources" {} '' 211 | tar --no-same-owner --no-same-permissions -xf ${nodejs.src} 212 | mv node-* $out 213 | ''; 214 | 215 | # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty) 216 | addIntegrityFieldsScript = writeTextFile { 217 | name = "addintegrityfields.js"; 218 | text = '' 219 | var fs = require('fs'); 220 | var path = require('path'); 221 | 222 | function augmentDependencies(baseDir, dependencies) { 223 | for(var dependencyName in dependencies) { 224 | var dependency = dependencies[dependencyName]; 225 | 226 | // Open package.json and augment metadata fields 227 | var packageJSONDir = path.join(baseDir, "node_modules", dependencyName); 228 | var packageJSONPath = path.join(packageJSONDir, "package.json"); 229 | 230 | if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored 231 | console.log("Adding metadata fields to: "+packageJSONPath); 232 | var packageObj = JSON.parse(fs.readFileSync(packageJSONPath)); 233 | 234 | if(dependency.integrity) { 235 | packageObj["_integrity"] = dependency.integrity; 236 | } else { 237 | packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads. 238 | } 239 | 240 | if(dependency.resolved) { 241 | packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided 242 | } else { 243 | packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories. 244 | } 245 | 246 | if(dependency.from !== undefined) { // Adopt from property if one has been provided 247 | packageObj["_from"] = dependency.from; 248 | } 249 | 250 | fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2)); 251 | } 252 | 253 | // Augment transitive dependencies 254 | if(dependency.dependencies !== undefined) { 255 | augmentDependencies(packageJSONDir, dependency.dependencies); 256 | } 257 | } 258 | } 259 | 260 | if(fs.existsSync("./package-lock.json")) { 261 | var packageLock = JSON.parse(fs.readFileSync("./package-lock.json")); 262 | 263 | if(![1, 2].includes(packageLock.lockfileVersion)) { 264 | process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n"); 265 | process.exit(1); 266 | } 267 | 268 | if(packageLock.dependencies !== undefined) { 269 | augmentDependencies(".", packageLock.dependencies); 270 | } 271 | } 272 | ''; 273 | }; 274 | 275 | # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes 276 | reconstructPackageLock = writeTextFile { 277 | name = "reconstructpackagelock.js"; 278 | text = '' 279 | var fs = require('fs'); 280 | var path = require('path'); 281 | 282 | var packageObj = JSON.parse(fs.readFileSync("package.json")); 283 | 284 | var lockObj = { 285 | name: packageObj.name, 286 | version: packageObj.version, 287 | lockfileVersion: 2, 288 | requires: true, 289 | packages: { 290 | "": { 291 | name: packageObj.name, 292 | version: packageObj.version, 293 | license: packageObj.license, 294 | bin: packageObj.bin, 295 | dependencies: packageObj.dependencies, 296 | engines: packageObj.engines, 297 | optionalDependencies: packageObj.optionalDependencies 298 | } 299 | }, 300 | dependencies: {} 301 | }; 302 | 303 | function augmentPackageJSON(filePath, packages, dependencies) { 304 | var packageJSON = path.join(filePath, "package.json"); 305 | if(fs.existsSync(packageJSON)) { 306 | var packageObj = JSON.parse(fs.readFileSync(packageJSON)); 307 | packages[filePath] = { 308 | version: packageObj.version, 309 | integrity: "sha1-000000000000000000000000000=", 310 | dependencies: packageObj.dependencies, 311 | engines: packageObj.engines, 312 | optionalDependencies: packageObj.optionalDependencies 313 | }; 314 | dependencies[packageObj.name] = { 315 | version: packageObj.version, 316 | integrity: "sha1-000000000000000000000000000=", 317 | dependencies: {} 318 | }; 319 | processDependencies(path.join(filePath, "node_modules"), packages, dependencies[packageObj.name].dependencies); 320 | } 321 | } 322 | 323 | function processDependencies(dir, packages, dependencies) { 324 | if(fs.existsSync(dir)) { 325 | var files = fs.readdirSync(dir); 326 | 327 | files.forEach(function(entry) { 328 | var filePath = path.join(dir, entry); 329 | var stats = fs.statSync(filePath); 330 | 331 | if(stats.isDirectory()) { 332 | if(entry.substr(0, 1) == "@") { 333 | // When we encounter a namespace folder, augment all packages belonging to the scope 334 | var pkgFiles = fs.readdirSync(filePath); 335 | 336 | pkgFiles.forEach(function(entry) { 337 | if(stats.isDirectory()) { 338 | var pkgFilePath = path.join(filePath, entry); 339 | augmentPackageJSON(pkgFilePath, packages, dependencies); 340 | } 341 | }); 342 | } else { 343 | augmentPackageJSON(filePath, packages, dependencies); 344 | } 345 | } 346 | }); 347 | } 348 | } 349 | 350 | processDependencies("node_modules", lockObj.packages, lockObj.dependencies); 351 | 352 | fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2)); 353 | ''; 354 | }; 355 | 356 | # Script that links bins defined in package.json to the node_modules bin directory 357 | # NPM does not do this for top-level packages itself anymore as of v7 358 | linkBinsScript = writeTextFile { 359 | name = "linkbins.js"; 360 | text = '' 361 | var fs = require('fs'); 362 | var path = require('path'); 363 | 364 | var packageObj = JSON.parse(fs.readFileSync("package.json")); 365 | 366 | var nodeModules = Array(packageObj.name.split("/").length).fill("..").join(path.sep); 367 | 368 | if(packageObj.bin !== undefined) { 369 | fs.mkdirSync(path.join(nodeModules, ".bin")) 370 | 371 | if(typeof packageObj.bin == "object") { 372 | Object.keys(packageObj.bin).forEach(function(exe) { 373 | if(fs.existsSync(packageObj.bin[exe])) { 374 | console.log("linking bin '" + exe + "'"); 375 | fs.symlinkSync( 376 | path.join("..", packageObj.name, packageObj.bin[exe]), 377 | path.join(nodeModules, ".bin", exe) 378 | ); 379 | } 380 | else { 381 | console.log("skipping non-existent bin '" + exe + "'"); 382 | } 383 | }) 384 | } 385 | else { 386 | if(fs.existsSync(packageObj.bin)) { 387 | console.log("linking bin '" + packageObj.bin + "'"); 388 | fs.symlinkSync( 389 | path.join("..", packageObj.name, packageObj.bin), 390 | path.join(nodeModules, ".bin", packageObj.name.split("/").pop()) 391 | ); 392 | } 393 | else { 394 | console.log("skipping non-existent bin '" + packageObj.bin + "'"); 395 | } 396 | } 397 | } 398 | else if(packageObj.directories !== undefined && packageObj.directories.bin !== undefined) { 399 | fs.mkdirSync(path.join(nodeModules, ".bin")) 400 | 401 | fs.readdirSync(packageObj.directories.bin).forEach(function(exe) { 402 | if(fs.existsSync(path.join(packageObj.directories.bin, exe))) { 403 | console.log("linking bin '" + exe + "'"); 404 | fs.symlinkSync( 405 | path.join("..", packageObj.name, packageObj.directories.bin, exe), 406 | path.join(nodeModules, ".bin", exe) 407 | ); 408 | } 409 | else { 410 | console.log("skipping non-existent bin '" + exe + "'"); 411 | } 412 | }) 413 | } 414 | ''; 415 | }; 416 | 417 | prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}: 418 | let 419 | forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com"; 420 | in 421 | '' 422 | # Pinpoint the versions of all dependencies to the ones that are actually being used 423 | echo "pinpointing versions of dependencies..." 424 | source $pinpointDependenciesScriptPath 425 | 426 | # Patch the shebangs of the bundled modules to prevent them from 427 | # calling executables outside the Nix store as much as possible 428 | patchShebangs . 429 | 430 | # Deploy the Node.js package by running npm install. Since the 431 | # dependencies have been provided already by ourselves, it should not 432 | # attempt to install them again, which is good, because we want to make 433 | # it Nix's responsibility. If it needs to install any dependencies 434 | # anyway (e.g. because the dependency parameters are 435 | # incomplete/incorrect), it fails. 436 | # 437 | # The other responsibilities of NPM are kept -- version checks, build 438 | # steps, postprocessing etc. 439 | 440 | export HOME=$TMPDIR 441 | cd "${packageName}" 442 | runHook preRebuild 443 | 444 | ${lib.optionalString bypassCache '' 445 | ${lib.optionalString reconstructLock '' 446 | if [ -f package-lock.json ] 447 | then 448 | echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!" 449 | echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!" 450 | rm package-lock.json 451 | else 452 | echo "No package-lock.json file found, reconstructing..." 453 | fi 454 | 455 | node ${reconstructPackageLock} 456 | ''} 457 | 458 | node ${addIntegrityFieldsScript} 459 | ''} 460 | 461 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild 462 | 463 | runHook postRebuild 464 | 465 | if [ "''${dontNpmInstall-}" != "1" ] 466 | then 467 | # NPM tries to download packages even when they already exist if npm-shrinkwrap is used. 468 | rm -f npm-shrinkwrap.json 469 | 470 | npm ${forceOfflineFlag} --nodedir=${nodeSources} --no-bin-links --ignore-scripts ${npmFlags} ${lib.optionalString production "--production"} install 471 | fi 472 | 473 | # Link executables defined in package.json 474 | node ${linkBinsScript} 475 | ''; 476 | 477 | # Builds and composes an NPM package including all its dependencies 478 | buildNodePackage = 479 | { name 480 | , packageName 481 | , version ? null 482 | , dependencies ? [] 483 | , buildInputs ? [] 484 | , production ? true 485 | , npmFlags ? "" 486 | , dontNpmInstall ? false 487 | , bypassCache ? false 488 | , reconstructLock ? false 489 | , preRebuild ? "" 490 | , dontStrip ? true 491 | , unpackPhase ? "true" 492 | , buildPhase ? "true" 493 | , meta ? {} 494 | , ... }@args: 495 | 496 | let 497 | extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ]; 498 | in 499 | stdenv.mkDerivation ({ 500 | name = "${name}${if version == null then "" else "-${version}"}"; 501 | buildInputs = [ tarWrapper python nodejs ] 502 | ++ lib.optional (stdenv.isLinux) utillinux 503 | ++ lib.optional (stdenv.isDarwin) libtool 504 | ++ buildInputs; 505 | 506 | inherit nodejs; 507 | 508 | inherit dontStrip; # Stripping may fail a build for some package deployments 509 | inherit dontNpmInstall preRebuild unpackPhase buildPhase; 510 | 511 | compositionScript = composePackage args; 512 | pinpointDependenciesScript = pinpointDependenciesOfPackage args; 513 | 514 | passAsFile = [ "compositionScript" "pinpointDependenciesScript" ]; 515 | 516 | installPhase = '' 517 | source ${installPackage} 518 | 519 | # Create and enter a root node_modules/ folder 520 | mkdir -p $out/lib/node_modules 521 | cd $out/lib/node_modules 522 | 523 | # Compose the package and all its dependencies 524 | source $compositionScriptPath 525 | 526 | ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }} 527 | 528 | # Create symlink to the deployed executable folder, if applicable 529 | if [ -d "$out/lib/node_modules/.bin" ] 530 | then 531 | ln -s $out/lib/node_modules/.bin $out/bin 532 | 533 | # Fixup all executables 534 | ls $out/bin/* | while read i 535 | do 536 | file="$(readlink -f "$i")" 537 | chmod u+rwx "$file" 538 | if isScript "$file" 539 | then 540 | sed -i 's/\r$//' "$file" # convert crlf to lf 541 | fi 542 | done 543 | fi 544 | 545 | # Create symlinks to the deployed manual page folders, if applicable 546 | if [ -d "$out/lib/node_modules/${packageName}/man" ] 547 | then 548 | mkdir -p $out/share 549 | for dir in "$out/lib/node_modules/${packageName}/man/"* 550 | do 551 | mkdir -p $out/share/man/$(basename "$dir") 552 | for page in "$dir"/* 553 | do 554 | ln -s $page $out/share/man/$(basename "$dir") 555 | done 556 | done 557 | fi 558 | 559 | # Run post install hook, if provided 560 | runHook postInstall 561 | ''; 562 | 563 | meta = { 564 | # default to Node.js' platforms 565 | platforms = nodejs.meta.platforms; 566 | } // meta; 567 | } // extraArgs); 568 | 569 | # Builds a node environment (a node_modules folder and a set of binaries) 570 | buildNodeDependencies = 571 | { name 572 | , packageName 573 | , version ? null 574 | , src 575 | , dependencies ? [] 576 | , buildInputs ? [] 577 | , production ? true 578 | , npmFlags ? "" 579 | , dontNpmInstall ? false 580 | , bypassCache ? false 581 | , reconstructLock ? false 582 | , dontStrip ? true 583 | , unpackPhase ? "true" 584 | , buildPhase ? "true" 585 | , ... }@args: 586 | 587 | let 588 | extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ]; 589 | in 590 | stdenv.mkDerivation ({ 591 | name = "node-dependencies-${name}${if version == null then "" else "-${version}"}"; 592 | 593 | buildInputs = [ tarWrapper python nodejs ] 594 | ++ lib.optional (stdenv.isLinux) utillinux 595 | ++ lib.optional (stdenv.isDarwin) libtool 596 | ++ buildInputs; 597 | 598 | inherit dontStrip; # Stripping may fail a build for some package deployments 599 | inherit dontNpmInstall unpackPhase buildPhase; 600 | 601 | includeScript = includeDependencies { inherit dependencies; }; 602 | pinpointDependenciesScript = pinpointDependenciesOfPackage args; 603 | 604 | passAsFile = [ "includeScript" "pinpointDependenciesScript" ]; 605 | 606 | installPhase = '' 607 | source ${installPackage} 608 | 609 | mkdir -p $out/${packageName} 610 | cd $out/${packageName} 611 | 612 | source $includeScriptPath 613 | 614 | # Create fake package.json to make the npm commands work properly 615 | cp ${src}/package.json . 616 | chmod 644 package.json 617 | ${lib.optionalString bypassCache '' 618 | if [ -f ${src}/package-lock.json ] 619 | then 620 | cp ${src}/package-lock.json . 621 | chmod 644 package-lock.json 622 | fi 623 | ''} 624 | 625 | # Go to the parent folder to make sure that all packages are pinpointed 626 | cd .. 627 | ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} 628 | 629 | ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }} 630 | 631 | # Expose the executables that were installed 632 | cd .. 633 | ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} 634 | 635 | mv ${packageName} lib 636 | ln -s $out/lib/node_modules/.bin $out/bin 637 | ''; 638 | } // extraArgs); 639 | 640 | # Builds a development shell 641 | buildNodeShell = 642 | { name 643 | , packageName 644 | , version ? null 645 | , src 646 | , dependencies ? [] 647 | , buildInputs ? [] 648 | , production ? true 649 | , npmFlags ? "" 650 | , dontNpmInstall ? false 651 | , bypassCache ? false 652 | , reconstructLock ? false 653 | , dontStrip ? true 654 | , unpackPhase ? "true" 655 | , buildPhase ? "true" 656 | , ... }@args: 657 | 658 | let 659 | nodeDependencies = buildNodeDependencies args; 660 | extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ]; 661 | in 662 | stdenv.mkDerivation ({ 663 | name = "node-shell-${name}${if version == null then "" else "-${version}"}"; 664 | 665 | buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs; 666 | buildCommand = '' 667 | mkdir -p $out/bin 668 | cat > $out/bin/shell < "$root/version.txt" 29 | 30 | # Check if files have changed 31 | if git diff --exit-code; then 32 | echo "No changes" 33 | echo "updated=false" > updated.txt 34 | exit 0 35 | fi 36 | 37 | # Update the node-packages.json 38 | (cd "$playwright_test"; node2nix -i node-packages.json) 39 | echo "updated=true" > updated.txt 40 | --------------------------------------------------------------------------------