├── .prettierignore ├── .eslintignore ├── .dockerignore ├── lib ├── expected.ts ├── tsconfig.json ├── print-hashes.ts ├── apply-patches.ts ├── generate-expected-shas.ts ├── places.ts ├── bin.ts ├── log.ts ├── utils.ts ├── verify.ts ├── system.ts ├── index.ts ├── expected-shas.json └── build.ts ├── .gitignore ├── patches ├── patches.json ├── node.v10.24.1.cpp.patch ├── node.v8.17.0.cpp.patch ├── node.v14.21.3.cpp.patch ├── node.v19.8.1.cpp.patch ├── node.v18.15.0.cpp.patch └── node.v16.19.1.cpp.patch ├── tsconfig.json ├── Dockerfile.linux ├── .eslintrc ├── Dockerfile.alpine ├── .github └── workflows │ ├── ci.yml │ ├── build-macos.yml │ ├── build-windows.yml │ ├── build-alpine.yml │ ├── build-linuxstatic.yml │ ├── build-linux.yml │ └── build-all.yml ├── license.md ├── Dockerfile.linuxcross ├── scripts └── test_patch.sh ├── package.json └── README.md /.prettierignore: -------------------------------------------------------------------------------- 1 | lib-es5 -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | /lib-es5 2 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .* 2 | dist 3 | Dockerfile.* 4 | lib-es5 5 | node_modules 6 | -------------------------------------------------------------------------------- /lib/expected.ts: -------------------------------------------------------------------------------- 1 | import expectedShas from './expected-shas.json'; 2 | 3 | export const EXPECTED_HASHES: Record = expectedShas; 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # build output 2 | /lib-es5 3 | 4 | # dependencies 5 | /node_modules 6 | 7 | # logs 8 | npm-debug.log 9 | yarn-error.log 10 | 11 | # TypeScript incremental compilation cache 12 | *.tsbuildinfo 13 | -------------------------------------------------------------------------------- /lib/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../tsconfig.json", 3 | "compilerOptions": { 4 | "rootDir": ".", 5 | "outDir": "../lib-es5", 6 | "resolveJsonModule": true 7 | }, 8 | "include": ["**/*", "../typings/**/*", "**/*.json"], 9 | "references": [{ "path": "../" }] 10 | } 11 | -------------------------------------------------------------------------------- /patches/patches.json: -------------------------------------------------------------------------------- 1 | { 2 | "v19.8.1": ["node.v19.8.1.cpp.patch"], 3 | "v18.15.0": ["node.v18.15.0.cpp.patch"], 4 | "v16.19.1": ["node.v16.19.1.cpp.patch"], 5 | "v14.21.3": ["node.v14.21.3.cpp.patch"], 6 | "v12.22.11": ["node.v12.22.11.cpp.patch"], 7 | "v10.24.1": ["node.v10.24.1.cpp.patch"], 8 | "v8.17.0": ["node.v8.17.0.cpp.patch"] 9 | } 10 | -------------------------------------------------------------------------------- /lib/print-hashes.ts: -------------------------------------------------------------------------------- 1 | import { EXPECTED_HASHES } from './expected'; 2 | import { nodeBinarySortFunction } from './utils'; 3 | 4 | /* eslint-disable no-console */ 5 | 6 | function main() { 7 | for (const nodeVersion of Object.keys(EXPECTED_HASHES).sort(nodeBinarySortFunction)) { 8 | console.log(`${EXPECTED_HASHES[nodeVersion]} ${nodeVersion}`); 9 | } 10 | } 11 | 12 | main(); 13 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "module": "commonjs", 5 | "allowJs": true, 6 | "declaration": true, 7 | "declarationMap": true, 8 | "sourceMap": true, 9 | "strict": true, 10 | "esModuleInterop": true, 11 | "incremental": true, 12 | "resolveJsonModule": true, 13 | "rootDir": ".", 14 | "outDir": ".", 15 | "composite": true 16 | }, 17 | "files": ["package.json", "patches/patches.json"] 18 | } 19 | -------------------------------------------------------------------------------- /Dockerfile.linux: -------------------------------------------------------------------------------- 1 | FROM oraclelinux:7 2 | 3 | USER root:root 4 | WORKDIR /root/pkg-fetch/ 5 | 6 | RUN yum install -y oracle-softwarecollection-release-el7 7 | 8 | RUN yum-config-manager --enable ol7_latest ol7_optional_latest software_collections 9 | RUN yum upgrade -y 10 | 11 | RUN yum install -y \ 12 | devtoolset-10 glibc-headers kernel-headers \ 13 | make patch python2 \ 14 | rh-python36-python 15 | 16 | RUN curl -fsSL https://rpm.nodesource.com/setup_16.x | bash - 17 | RUN yum install -y nodejs 18 | RUN npm install -g yarn 19 | 20 | COPY . ./ 21 | 22 | ARG PKG_FETCH_OPTION_n 23 | 24 | RUN yarn install 25 | 26 | RUN scl enable devtoolset-10 rh-python36 \ 27 | " \ 28 | yarn start --node-range $PKG_FETCH_OPTION_n --output dist \ 29 | " 30 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "airbnb-base", 4 | "plugin:@typescript-eslint/recommended", 5 | "prettier" 6 | ], 7 | "parser": "@typescript-eslint/parser", 8 | "settings": { 9 | "import/resolver": { 10 | "typescript": {} 11 | } 12 | }, 13 | "rules": { 14 | "max-classes-per-file": "off", 15 | "no-bitwise": "off", 16 | "no-continue": "off", 17 | "no-nested-ternary": "off", 18 | "class-methods-use-this": "off", 19 | "no-await-in-loop": "off", 20 | "no-constant-condition": "off", 21 | "no-param-reassign": "off", 22 | "consistent-return": "off", 23 | "no-restricted-syntax": "off", 24 | "import/prefer-default-export": "off", 25 | "camelcase": "off", 26 | "@typescript-eslint/explicit-module-boundary-types": "off", 27 | "import/extensions": [ 28 | "error", 29 | "ignorePackages", 30 | { 31 | "js": "never", 32 | "jsx": "never", 33 | "ts": "never", 34 | "tsx": "never" 35 | } 36 | ] 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /Dockerfile.alpine: -------------------------------------------------------------------------------- 1 | ARG HOST_ARCH=x86_64 2 | ARG TARGET_TRIPLE=aarch64-linux-musl 3 | 4 | FROM muslcc/$HOST_ARCH:$TARGET_TRIPLE 5 | 6 | ARG PKG_FETCH_OPTION_a 7 | ARG PKG_FETCH_OPTION_n 8 | ARG PKG_FETCH_OPTION_p 9 | 10 | USER root:root 11 | 12 | WORKDIR /root/pkg-fetch/ 13 | 14 | RUN apk add --no-cache build-base git linux-headers npm python3 yarn 15 | 16 | # https://gitlab.alpinelinux.org/alpine/aports/-/issues/8626 17 | ENV CFLAGS=-U_FORTIFY_SOURCE 18 | ENV CFLAGS_host=-U_FORTIFY_SOURCE 19 | ENV CXXFLAGS=-U_FORTIFY_SOURCE 20 | ENV CXXFLAGS_host=-U_FORTIFY_SOURCE 21 | 22 | ENV CC=/bin/gcc 23 | ENV CXX=/bin/g++ 24 | ENV AR=/bin/ar 25 | ENV NM=/bin/nm 26 | ENV READELF=/bin/readelf 27 | ENV STRIP=/bin/strip 28 | 29 | ENV CC_host=/usr/bin/gcc 30 | ENV CXX_host=/usr/bin/g++ 31 | ENV AR_host=/usr/bin/ar 32 | ENV NM_host=/usr/bin/nm 33 | ENV READELF_host=/usr/bin/readelf 34 | 35 | COPY . ./ 36 | 37 | RUN yarn install 38 | 39 | RUN yarn start --arch $PKG_FETCH_OPTION_a --node-range $PKG_FETCH_OPTION_n --platform $PKG_FETCH_OPTION_p --output dist 40 | -------------------------------------------------------------------------------- /lib/apply-patches.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import yargs from 'yargs'; 4 | 5 | import { log } from './log'; 6 | import { getNodeVersion } from './index'; 7 | import { version } from '../package.json'; 8 | import { fetchExtractApply, prepBuildPath } from './build'; 9 | 10 | async function applyPatchesOnVersion(nodeRange: string, quietExtraction = false) { 11 | await prepBuildPath(); 12 | await fetchExtractApply(getNodeVersion(nodeRange), quietExtraction); 13 | } 14 | 15 | async function main() { 16 | const { argv } = yargs 17 | .option('node-range', { alias: 'n', default: 'latest', type: 'string' }) 18 | .option('quiet-extraction', { alias: 'q', type: 'boolean' }) 19 | .version(version) 20 | .alias('v', 'version') 21 | .help() 22 | .alias('h', 'help'); 23 | 24 | const { 25 | 'node-range': nodeRange, 26 | 'quiet-extraction': quietExtraction, 27 | } = argv; 28 | 29 | await applyPatchesOnVersion(nodeRange, quietExtraction); 30 | } 31 | 32 | main().catch((error) => { 33 | if (!error.wasReported) log.error(error); 34 | process.exit(2); 35 | }); 36 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | 9 | jobs: 10 | test: 11 | strategy: 12 | fail-fast: false # prevent test to stop if one fails 13 | matrix: 14 | node-version: [10, 12, 14, 16, 18, 19] 15 | os: [ubuntu-latest] # Skip macos-latest, windows-latest for now 16 | 17 | runs-on: ${{ matrix.os }} 18 | steps: 19 | - name: Checkout 20 | uses: actions/checkout@v2 21 | 22 | - name: Use Node.js ${{ matrix.node-version }} 23 | uses: actions/setup-node@v2.1.5 24 | with: 25 | node-version: ${{ matrix.node-version }} 26 | - uses: actions/cache@v2 27 | with: 28 | path: ~/.pkg-cache/ 29 | key: ${{ matrix.os }}-${{ matrix.node-version }} 30 | 31 | - name: Install deps 32 | run: yarn install 33 | 34 | - name: Lint 35 | run: yarn lint 36 | 37 | - name: Build 38 | run: yarn build 39 | 40 | - name: Check Patches 41 | run: ./scripts/test_patch.sh node${{ matrix.node-version }} 42 | -------------------------------------------------------------------------------- /license.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Zeit, Inc. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.github/workflows/build-macos.yml: -------------------------------------------------------------------------------- 1 | name: Build Node binaries for macOS 2 | 3 | on: 4 | workflow_dispatch: 5 | workflow_call: 6 | 7 | jobs: 8 | macos-x64: 9 | runs-on: macos-11.0 10 | 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | target-node: [14, 16, 18, 19] 15 | 16 | steps: 17 | - uses: actions/checkout@v2 18 | 19 | - uses: maxim-lobanov/setup-xcode@v1 20 | with: 21 | xcode-version: latest 22 | 23 | - name: Use Node.js 14 24 | uses: actions/setup-node@v1 25 | with: 26 | node-version: 14 27 | 28 | - run: yarn install 29 | 30 | - run: yarn start --node-range node${{ matrix.target-node }} --output dist 31 | 32 | - name: Check if binary is compiled 33 | id: check_file 34 | run: | 35 | (test -f dist/*.sha256sum && echo ::set-output name=EXISTS::true) || echo ::set-output name=EXISTS::false 36 | 37 | - uses: actions/upload-artifact@v2 38 | if: steps.check_file.outputs.EXISTS == 'true' 39 | with: 40 | name: node${{ matrix.target-node }}-macos-x64 41 | path: dist/* 42 | -------------------------------------------------------------------------------- /.github/workflows/build-windows.yml: -------------------------------------------------------------------------------- 1 | name: Build Node binaries for Windows 2 | 3 | on: 4 | workflow_dispatch: 5 | workflow_call: 6 | 7 | jobs: 8 | windows-vs2019: 9 | runs-on: windows-2019 10 | 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | target-node: [14, 16, 18, 19] 15 | target-arch: [x64, arm64] 16 | 17 | steps: 18 | - uses: actions/checkout@v2 19 | 20 | - name: Use Node.js 14 21 | uses: actions/setup-node@v1 22 | with: 23 | node-version: 14 24 | 25 | - run: yarn install 26 | 27 | - run: choco install nasm 28 | 29 | - run: yarn start --node-range node${{ matrix.target-node }} --arch ${{ matrix.target-arch }} --output dist 30 | 31 | - name: Check if binary is compiled 32 | id: check_file 33 | run: | 34 | if (Test-Path -Path dist\\*.sha256sum -PathType Leaf) { echo "::set-output name=EXISTS::true" } else { echo "::set-output name=EXISTS::false" } 35 | 36 | - uses: actions/upload-artifact@v2 37 | if: steps.check_file.outputs.EXISTS == 'true' 38 | with: 39 | name: node${{ matrix.target-node }}-windows-${{ matrix.target-arch }} 40 | path: dist\\* 41 | -------------------------------------------------------------------------------- /Dockerfile.linuxcross: -------------------------------------------------------------------------------- 1 | FROM ubuntu:bionic 2 | 3 | USER root:root 4 | WORKDIR /root/pkg-fetch/ 5 | ENV DEBIAN_FRONTEND=noninteractive 6 | 7 | RUN apt-get update 8 | RUN apt-get install -y curl software-properties-common 9 | RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash - 10 | RUN apt-get install -y nodejs 11 | 12 | RUN apt-get install -y binutils g++-8 git make patch python3 python3-distutils 13 | 14 | ARG TARGET_TOOLCHAIN_ARCH 15 | 16 | RUN [ `uname -m` = ${TARGET_TOOLCHAIN_ARCH} ] || \ 17 | apt-get install -y binutils-${TARGET_TOOLCHAIN_ARCH}-linux-gnu g++-8-${TARGET_TOOLCHAIN_ARCH}-linux-gnu 18 | 19 | ENV CC=${TARGET_TOOLCHAIN_ARCH}-linux-gnu-gcc-8 20 | ENV CXX=${TARGET_TOOLCHAIN_ARCH}-linux-gnu-g++-8 21 | ENV AR=${TARGET_TOOLCHAIN_ARCH}-linux-gnu-ar 22 | ENV NM=${TARGET_TOOLCHAIN_ARCH}-linux-gnu-nm 23 | ENV RANLIB=${TARGET_TOOLCHAIN_ARCH}-linux-gnu-ranlib 24 | ENV READELF=${TARGET_TOOLCHAIN_ARCH}-linux-gnu-readelf 25 | ENV STRIP=${TARGET_TOOLCHAIN_ARCH}-linux-gnu-strip 26 | ENV CC_host=gcc-8 27 | ENV CXX_host=g++-8 28 | ENV AR_host=ar 29 | ENV NM_host=nm 30 | ENV RANLIB_host=ranlib 31 | ENV READELF_host=readelf 32 | 33 | RUN npm install -g yarn 34 | 35 | COPY . ./ 36 | 37 | RUN yarn install 38 | 39 | ARG PKG_FETCH_OPTION_a 40 | ARG PKG_FETCH_OPTION_n 41 | 42 | RUN yarn start --arch $PKG_FETCH_OPTION_a --node-range $PKG_FETCH_OPTION_n --output dist 43 | -------------------------------------------------------------------------------- /lib/generate-expected-shas.ts: -------------------------------------------------------------------------------- 1 | import yargs from 'yargs'; 2 | import { readFileSync, writeFileSync } from 'fs'; 3 | import { join } from 'path'; 4 | import { log } from './log'; 5 | import { version } from '../package.json'; 6 | import { nodeBinarySortFunction } from './utils'; 7 | 8 | async function main() { 9 | const { argv } = yargs 10 | .option('input', { alias: 'i', default: 'shas.txt', type: 'string' }) 11 | .version(version) 12 | .alias('v', 'version') 13 | .help() 14 | .alias('h', 'help'); 15 | 16 | const shaFileContent = readFileSync(argv.input).toString(); 17 | const shaMap: { [id: string]: string } = {}; 18 | for (const line of shaFileContent.split('\n')) { 19 | // Expect line to be of the form 20 | // node-v-host-arch 21 | const lineParts = line.split(/\s+/); 22 | if (lineParts.length === 2) { 23 | // eslint-disable-next-line prefer-destructuring 24 | shaMap[lineParts[1]] = lineParts[0]; 25 | } 26 | } 27 | 28 | // Sort map 29 | const sortedShaMap: { [id: string]: string } = {}; 30 | for (const nodeVersion of Object.keys(shaMap).sort(nodeBinarySortFunction)) { 31 | sortedShaMap[nodeVersion] = shaMap[nodeVersion]; 32 | } 33 | 34 | writeFileSync(join(__dirname, '../lib/expected-shas.json'), 35 | `${JSON.stringify(sortedShaMap, null, 2)}\n`); 36 | } 37 | 38 | main().catch((error) => { 39 | if (!error.wasReported) log.error(error); 40 | process.exit(2); 41 | }); 42 | -------------------------------------------------------------------------------- /lib/places.ts: -------------------------------------------------------------------------------- 1 | import { major, minor } from 'semver'; 2 | import os from 'os'; 3 | import path from 'path'; 4 | 5 | const { PKG_CACHE_PATH } = process.env; 6 | const IGNORE_TAG = Boolean(process.env.PKG_IGNORE_TAG); 7 | 8 | export const cachePath = 9 | PKG_CACHE_PATH || path.join(os.homedir(), '.pkg-cache'); 10 | 11 | function tagFromVersion(version: string) { 12 | const mj = major(version); 13 | const mn = minor(version); 14 | 15 | return `v${mj}.${mn}`; 16 | } 17 | 18 | interface PlaceOptions { 19 | version: string; 20 | nodeVersion: string; 21 | platform: string; 22 | arch: string; 23 | } 24 | 25 | interface LocalPlaceOptions extends PlaceOptions { 26 | from: string; 27 | output?: string; 28 | } 29 | 30 | export function localPlace({ 31 | from, 32 | output, 33 | version, 34 | nodeVersion, 35 | platform, 36 | arch, 37 | }: LocalPlaceOptions) { 38 | let binDir: string; 39 | 40 | if (output) { 41 | binDir = path.resolve(output); 42 | } else { 43 | binDir = IGNORE_TAG 44 | ? path.join(cachePath) 45 | : path.join(cachePath, tagFromVersion(version)); 46 | } 47 | 48 | return path.resolve( 49 | binDir, 50 | `${output ? 'node' : from}-${nodeVersion}-${platform}-${arch}` 51 | ); 52 | } 53 | 54 | export interface Remote { 55 | tag: string; 56 | name: string; 57 | } 58 | 59 | export function remotePlace({ 60 | version, 61 | nodeVersion, 62 | platform, 63 | arch, 64 | }: PlaceOptions): Remote { 65 | return { 66 | tag: tagFromVersion(version), 67 | name: `node-${nodeVersion}-${platform}-${arch}`, 68 | }; 69 | } 70 | -------------------------------------------------------------------------------- /scripts/test_patch.sh: -------------------------------------------------------------------------------- 1 | node_range=$1 2 | 3 | if [ -z "$node_range" ]; then 4 | echo "usage: ./test_patch.sh " 5 | echo " where nodeVersion is of the form 'node18' or 'node16'" 6 | exit 1 7 | fi 8 | 9 | echo "Applying patches for $node_range" 10 | 11 | command="npm run applyPatches -- --node-range $node_range --quiet-extraction" 12 | output=$($command) 13 | status=$? 14 | 15 | if [ $status -ne 0 ]; then 16 | echo -e "Command failed:\n$command\n$output" 17 | exit 1 18 | fi 19 | 20 | echo "Checking output" 21 | 22 | expected_include_strings=("fetching" "extracting" "applying patches" "patching file") 23 | failing_strings=("failed" "offset" "rejects") 24 | 25 | found_all_expected_strings=true 26 | for s in "${expected_include_strings[@]}"; do 27 | if [[ "${output,,}" != *"${s,,}"* ]]; then 28 | found_all_expected_strings=false 29 | echo -e "ERROR: Did not find \"$s\" in output" 30 | fi 31 | done 32 | 33 | if [ "$found_all_expected_strings" = false ]; then 34 | echo -e "\nDid not find the expected text when applying patches.\n\nOutput:\n$output" 35 | exit 1 36 | fi 37 | 38 | line_errors=0 39 | while IFS= read -r line; do 40 | for fString in "${failing_strings[@]}"; do 41 | if [[ "${line,,}" == *"${fString,,}"* ]]; then 42 | echo "ERROR: Found \"$fString\" in line: \"$line\""; 43 | line_errors=$((line_errors + 1)) 44 | fi 45 | done 46 | done <<< "$output" 47 | 48 | if [ $line_errors -gt 0 ]; then 49 | echo "ERROR: errors found while attempting to apply patches" 50 | exit $line_errors 51 | fi 52 | 53 | echo "All checks complete" 54 | -------------------------------------------------------------------------------- /lib/bin.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import yargs from 'yargs'; 4 | 5 | import { hostPlatform, hostArch } from './system'; 6 | import { log } from './log'; 7 | import { need } from './index'; 8 | import { verify } from './verify'; 9 | import { version } from '../package.json'; 10 | 11 | async function main() { 12 | const { argv } = yargs 13 | .env('PKG_FETCH_OPTION_') 14 | .option('node-range', { alias: 'n', default: 'latest', type: 'string' }) 15 | .option('platform', { alias: 'p', default: hostPlatform, type: 'string' }) 16 | .option('arch', { alias: 'a', default: hostArch, type: 'string' }) 17 | .option('test', { alias: 't', type: 'boolean' }) 18 | .option('force-fetch', { 19 | alias: 'f', 20 | type: 'boolean', 21 | }) 22 | .option('force-build', { 23 | alias: 'b', 24 | type: 'boolean', 25 | }) 26 | .conflicts('force-fetch', 'force-build') 27 | .option('output', { alias: 'o', type: 'string' }) 28 | .version(version) 29 | .alias('v', 'version') 30 | .help() 31 | .alias('h', 'help'); 32 | 33 | const { 34 | 'node-range': nodeRange, 35 | platform, 36 | arch, 37 | test, 38 | 'force-fetch': forceFetch, 39 | 'force-build': forceBuild, 40 | output, 41 | } = argv; 42 | 43 | const local = await need({ 44 | nodeRange, 45 | platform, 46 | arch, 47 | forceFetch, 48 | forceBuild, 49 | output, 50 | }); 51 | 52 | log.info(local); 53 | 54 | if (test) { 55 | await verify(local); 56 | } 57 | } 58 | 59 | main().catch((error) => { 60 | if (!error.wasReported) log.error(error); 61 | process.exit(2); 62 | }); 63 | -------------------------------------------------------------------------------- /.github/workflows/build-alpine.yml: -------------------------------------------------------------------------------- 1 | name: Build Node binaries for Alpine 2 | 3 | on: 4 | workflow_dispatch: 5 | workflow_call: 6 | 7 | jobs: 8 | alpine: 9 | runs-on: ubuntu-20.04 10 | 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | target-node: [14, 16, 18, 19] 15 | target-arch: [x64, arm64] 16 | include: 17 | - target-arch: x64 18 | target-triple: x86_64-linux-musl 19 | host-arch: x86_64 20 | - target-arch: arm64 21 | target-triple: aarch64-linux-musl 22 | host-arch: x86_64 23 | 24 | steps: 25 | - uses: actions/checkout@v2 26 | 27 | - name: Set up Docker Buildx 28 | uses: docker/setup-buildx-action@v1 29 | 30 | - name: Build 31 | uses: docker/build-push-action@v2 32 | with: 33 | build-args: | 34 | HOST_ARCH=${{ matrix.host-arch }} 35 | TARGET_TRIPLE=${{ matrix.target-triple }} 36 | PKG_FETCH_OPTION_a=${{ matrix.target-arch }} 37 | PKG_FETCH_OPTION_n=node${{ matrix.target-node }} 38 | PKG_FETCH_OPTION_p=alpine 39 | context: . 40 | file: ./Dockerfile.alpine 41 | platforms: linux/amd64 42 | outputs: type=tar,dest=../out.tar 43 | 44 | - name: Extract binaries from Docker image 45 | run: | 46 | tar xvf ../out.tar root/pkg-fetch/dist 47 | 48 | - name: Check if binary is compiled 49 | id: check_file 50 | run: | 51 | (test -f root/pkg-fetch/dist/*.sha256sum && echo ::set-output name=EXISTS::true) || echo ::set-output name=EXISTS::false 52 | 53 | - uses: actions/upload-artifact@v2 54 | if: steps.check_file.outputs.EXISTS == 'true' 55 | with: 56 | name: node${{ matrix.target-node }}-alpine-${{ matrix.target-arch }} 57 | path: root/pkg-fetch/dist/* 58 | -------------------------------------------------------------------------------- /.github/workflows/build-linuxstatic.yml: -------------------------------------------------------------------------------- 1 | name: Build Node binaries for Linux static 2 | 3 | on: 4 | workflow_dispatch: 5 | workflow_call: 6 | 7 | jobs: 8 | linuxstatic: 9 | runs-on: ubuntu-20.04 10 | 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | target-node: [14, 16, 18, 19] 15 | target-arch: [x64, arm64, armv7] 16 | include: 17 | - target-arch: x64 18 | target-triple: x86_64-linux-musl 19 | host-arch: x86_64 20 | - target-arch: arm64 21 | target-triple: aarch64-linux-musl 22 | host-arch: x86_64 23 | - target-arch: armv7 24 | target-triple: armv7l-linux-musleabihf 25 | host-arch: i686 26 | 27 | steps: 28 | - uses: actions/checkout@v2 29 | 30 | - name: Set up Docker Buildx 31 | uses: docker/setup-buildx-action@v1 32 | 33 | - name: Build 34 | uses: docker/build-push-action@v2 35 | with: 36 | build-args: | 37 | HOST_ARCH=${{ matrix.host-arch }} 38 | TARGET_TRIPLE=${{ matrix.target-triple }} 39 | PKG_FETCH_OPTION_a=${{ matrix.target-arch }} 40 | PKG_FETCH_OPTION_n=node${{ matrix.target-node }} 41 | PKG_FETCH_OPTION_p=linuxstatic 42 | context: . 43 | file: ./Dockerfile.alpine 44 | platforms: linux/amd64 45 | outputs: type=tar,dest=../out.tar 46 | 47 | - name: Extract binaries from Docker image 48 | run: | 49 | tar xvf ../out.tar root/pkg-fetch/dist 50 | 51 | - name: Check if binary is compiled 52 | id: check_file 53 | run: | 54 | (test -f root/pkg-fetch/dist/*.sha256sum && echo ::set-output name=EXISTS::true) || echo ::set-output name=EXISTS::false 55 | 56 | - uses: actions/upload-artifact@v2 57 | if: steps.check_file.outputs.EXISTS == 'true' 58 | with: 59 | name: node${{ matrix.target-node }}-linuxstatic-${{ matrix.target-arch }} 60 | path: root/pkg-fetch/dist/* 61 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pkg-fetch", 3 | "version": "3.5.2", 4 | "description": "Compiles and stores base binaries for pkg", 5 | "main": "lib-es5/index.js", 6 | "license": "MIT", 7 | "repository": "vercel/pkg-fetch", 8 | "bin": { 9 | "pkg-fetch": "lib-es5/bin.js" 10 | }, 11 | "files": [ 12 | "lib-es5/*.js", 13 | "lib-es5/*.ts", 14 | "lib-es5/*.json", 15 | "patches/*" 16 | ], 17 | "dependencies": { 18 | "chalk": "^4.1.2", 19 | "fs-extra": "^9.1.0", 20 | "https-proxy-agent": "^5.0.0", 21 | "node-fetch": "^2.6.6", 22 | "progress": "^2.0.3", 23 | "semver": "^7.3.5", 24 | "tar-fs": "^2.1.1", 25 | "yargs": "^16.2.0" 26 | }, 27 | "devDependencies": { 28 | "@types/fs-extra": "^9.0.13", 29 | "@types/node": "^14.17.32", 30 | "@types/node-fetch": "^2.5.12", 31 | "@types/progress": "^2.0.5", 32 | "@types/semver": "^7.3.9", 33 | "@types/tar-fs": "^2.0.1", 34 | "@types/yargs": "^16.0.4", 35 | "@typescript-eslint/eslint-plugin": "^4.33.0", 36 | "@typescript-eslint/parser": "^4.33.0", 37 | "eslint": "^7.32.0", 38 | "eslint-config-airbnb-base": "^14.2.1", 39 | "eslint-config-prettier": "^8.3.0", 40 | "eslint-import-resolver-typescript": "^2.5.0", 41 | "eslint-plugin-import": "^2.25.2", 42 | "lint-staged": "^10.5.4", 43 | "prettier": "^2.4.1", 44 | "rimraf": "^3.0.2", 45 | "simple-git-hooks": ">=2.7.0", 46 | "typescript": "^4.4.4" 47 | }, 48 | "scripts": { 49 | "build": "rimraf lib-es5 && tsc --build lib", 50 | "bin": "node lib-es5/bin.js", 51 | "lint": "eslint lib", 52 | "prepare": "npm run build", 53 | "prepublishOnly": "npm run lint", 54 | "start": "node lib-es5/bin.js", 55 | "applyPatches": "node lib-es5/apply-patches.js", 56 | "printHashes": "node lib-es5/print-hashes.js", 57 | "updateExpected": "node lib-es5/generate-expected-shas.js" 58 | }, 59 | "prettier": { 60 | "singleQuote": true 61 | }, 62 | "simple-git-hooks": { 63 | "pre-commit": "npx lint-staged" 64 | }, 65 | "lint-staged": { 66 | "*.{js,css,md}": "prettier --write" 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /lib/log.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-underscore-dangle, no-console */ 2 | 3 | import Progress from 'progress'; 4 | import assert from 'assert'; 5 | import chalk from 'chalk'; 6 | 7 | class Log { 8 | debugMode = false; 9 | 10 | private bar?: Progress; 11 | 12 | private lines(lines?: string[] | string) { 13 | if (lines === undefined) { 14 | return; 15 | } 16 | 17 | if (!Array.isArray(lines)) { 18 | console.log(` ${lines}`); 19 | return; 20 | } 21 | 22 | for (const line of lines) { 23 | console.log(` ${line}`); 24 | } 25 | } 26 | 27 | debug(text: string, lines?: string[] | string) { 28 | if (!this.debugMode) { 29 | return; 30 | } 31 | 32 | console.log(`> ${chalk.green('[debug]')} ${text}`); 33 | this.lines(lines); 34 | } 35 | 36 | info(text: string, lines?: string[] | string) { 37 | console.log(`> ${text}`); 38 | this.lines(lines); 39 | } 40 | 41 | warn(text: string, lines?: string[] | string) { 42 | console.log(`> ${chalk.blue('Warning')} ${text}`); 43 | this.lines(lines); 44 | } 45 | 46 | error(text: Error | string, lines?: string[] | string) { 47 | const message = text instanceof Error ? text.stack : text; 48 | console.log(`> ${chalk.red('Error!')} ${message}`); 49 | this.lines(lines); 50 | } 51 | 52 | enableProgress(text: string) { 53 | assert(!this.bar); 54 | 55 | text += ' '.repeat(35 - text.length); 56 | this.bar = new Progress(` ${text} [:bar] :percent`, { 57 | stream: process.stdout, 58 | width: 20, 59 | complete: '=', 60 | incomplete: ' ', 61 | total: 100, 62 | }); 63 | } 64 | 65 | showProgress(percentage: number) { 66 | if (!this.bar) { 67 | return; 68 | } 69 | 70 | this.bar.update(percentage / 100); 71 | } 72 | 73 | disableProgress() { 74 | if (!this.bar) { 75 | return; 76 | } 77 | 78 | // avoid empty line 79 | if (!this.bar.complete) { 80 | this.bar.terminate(); 81 | } 82 | 83 | delete this.bar; 84 | } 85 | } 86 | 87 | export const log = new Log(); 88 | 89 | class ReportedError extends Error { 90 | name = 'ReportedError'; 91 | 92 | wasReported = true; 93 | } 94 | 95 | export function wasReported(error?: string, lines?: string[] | string | string) { 96 | let reportedError = new ReportedError('No message'); 97 | 98 | if (typeof error === 'string') { 99 | log.error(error, lines); 100 | reportedError = new ReportedError(error); 101 | } 102 | 103 | return reportedError; 104 | } 105 | -------------------------------------------------------------------------------- /.github/workflows/build-linux.yml: -------------------------------------------------------------------------------- 1 | name: Build Node binaries for Linux 2 | 3 | on: 4 | workflow_dispatch: 5 | workflow_call: 6 | 7 | jobs: 8 | linux-x64: 9 | runs-on: ubuntu-20.04 10 | 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | target-node: [14, 16, 18, 19] 15 | 16 | steps: 17 | - uses: actions/checkout@v2 18 | 19 | - name: Set up Docker Buildx 20 | uses: docker/setup-buildx-action@v1 21 | 22 | - name: Build 23 | uses: docker/build-push-action@v2 24 | with: 25 | build-args: | 26 | PKG_FETCH_OPTION_n=node${{ matrix.target-node }} 27 | context: . 28 | file: ./Dockerfile.linux 29 | platforms: linux/amd64 30 | outputs: type=tar,dest=../out.tar 31 | 32 | - name: Extract binaries from Docker image 33 | run: | 34 | tar xvf ../out.tar root/pkg-fetch/dist 35 | 36 | - name: Check if binary is compiled 37 | id: check_file 38 | run: | 39 | (test -f root/pkg-fetch/dist/*.sha256sum && echo ::set-output name=EXISTS::true) || echo ::set-output name=EXISTS::false 40 | 41 | - uses: actions/upload-artifact@v2 42 | if: steps.check_file.outputs.EXISTS == 'true' 43 | with: 44 | name: node${{ matrix.target-node }}-linux-x64 45 | path: root/pkg-fetch/dist/* 46 | 47 | linux-arm64: 48 | runs-on: ubuntu-20.04 49 | 50 | strategy: 51 | fail-fast: false 52 | matrix: 53 | target-node: [14, 16, 18, 19] 54 | 55 | steps: 56 | - uses: actions/checkout@v2 57 | 58 | - name: Set up Docker Buildx 59 | uses: docker/setup-buildx-action@v1 60 | 61 | - name: Build 62 | uses: docker/build-push-action@v2 63 | with: 64 | build-args: | 65 | TARGET_TOOLCHAIN_ARCH=aarch64 66 | PKG_FETCH_OPTION_a=arm64 67 | PKG_FETCH_OPTION_n=node${{ matrix.target-node }} 68 | context: . 69 | file: ./Dockerfile.linuxcross 70 | platforms: linux/amd64 71 | outputs: type=tar,dest=../out.tar 72 | 73 | - name: Extract binaries from Docker image 74 | run: | 75 | tar xvf ../out.tar root/pkg-fetch/dist 76 | 77 | - name: Check if binary is compiled 78 | id: check_file 79 | run: | 80 | (test -f root/pkg-fetch/dist/*.sha256sum && echo ::set-output name=EXISTS::true) || echo ::set-output name=EXISTS::false 81 | 82 | - uses: actions/upload-artifact@v2 83 | if: steps.check_file.outputs.EXISTS == 'true' 84 | with: 85 | name: node${{ matrix.target-node }}-linux-arm64 86 | path: root/pkg-fetch/dist/* 87 | -------------------------------------------------------------------------------- /lib/utils.ts: -------------------------------------------------------------------------------- 1 | import fetch from 'node-fetch'; 2 | import crypto from 'crypto'; 3 | import fs from 'fs-extra'; 4 | import httpsProxyAgent from 'https-proxy-agent'; 5 | import path from 'path'; 6 | import { spawnSync, SpawnSyncOptions } from 'child_process'; 7 | import stream from 'stream'; 8 | 9 | import { coerce } from 'semver'; 10 | import { log, wasReported } from './log'; 11 | 12 | export async function downloadUrl(url: string, file: string): Promise { 13 | log.enableProgress(path.basename(file)); 14 | log.showProgress(0); 15 | 16 | const proxy = 17 | process.env.HTTPS_PROXY ?? 18 | process.env.https_proxy ?? 19 | process.env.HTTP_PROXY ?? 20 | process.env.http_proxy; 21 | 22 | const res = await fetch( 23 | url, 24 | proxy ? { agent: httpsProxyAgent(proxy) } : undefined 25 | ); 26 | 27 | if (!res.ok) { 28 | log.disableProgress(); 29 | throw wasReported(`${res.status}: ${res.statusText}`); 30 | } 31 | 32 | const tempFile = `${file}.downloading`; 33 | fs.mkdirpSync(path.dirname(tempFile)); 34 | const ws = fs.createWriteStream(tempFile); 35 | 36 | const totalSize = Number(res.headers.get('content-length')); 37 | let currentSize = 0; 38 | 39 | res.body.on('data', (chunk: Buffer) => { 40 | if (totalSize != null && totalSize !== 0) { 41 | currentSize += chunk.length; 42 | log.showProgress((currentSize / totalSize) * 100); 43 | } 44 | }); 45 | res.body.pipe(ws); 46 | 47 | return new Promise((resolve, reject) => { 48 | stream.finished(ws, (err) => { 49 | if (err) { 50 | log.disableProgress(); 51 | fs.rmSync(tempFile); 52 | reject(wasReported(`${err.name}: ${err.message}`)); 53 | } else { 54 | log.showProgress(100); 55 | log.disableProgress(); 56 | fs.moveSync(tempFile, file); 57 | resolve(); 58 | } 59 | }); 60 | }); 61 | } 62 | 63 | export async function hash(filePath: string): Promise { 64 | return new Promise((resolve, reject) => { 65 | const resultHash = crypto.createHash('sha256'); 66 | const input = fs.createReadStream(filePath); 67 | 68 | input.on('error', (e) => { 69 | reject(e); 70 | }); 71 | 72 | input.on('readable', () => { 73 | const data = input.read(); 74 | if (data) { 75 | resultHash.update(data); 76 | } else { 77 | resolve(resultHash.digest('hex')); 78 | } 79 | }); 80 | }); 81 | } 82 | 83 | export async function plusx(file: string) { 84 | const s = await fs.stat(file); 85 | const newMode = s.mode | 64 | 8 | 1; 86 | if (s.mode === newMode) return; 87 | const base8 = newMode.toString(8).slice(-3); 88 | await fs.chmod(file, base8); 89 | } 90 | 91 | export async function spawn( 92 | command: string, 93 | args?: ReadonlyArray, 94 | options?: SpawnSyncOptions 95 | ): Promise { 96 | const { error } = spawnSync(command, args, options); 97 | if (error) { 98 | throw error; 99 | } 100 | } 101 | 102 | export function nodeBinarySortFunction(a: string, b: string): number { 103 | const coercedVersionA = coerce(a); 104 | const coercedVersionB = coerce(b); 105 | if (coercedVersionA && coercedVersionB) { 106 | return coercedVersionA.compare(coercedVersionB); 107 | } 108 | return 0; 109 | } 110 | -------------------------------------------------------------------------------- /lib/verify.ts: -------------------------------------------------------------------------------- 1 | import { spawnSync } from 'child_process'; 2 | 3 | import { plusx } from './utils'; 4 | 5 | const script = ` 6 | var vm = require('vm'); 7 | var assert = require('assert'); 8 | var text = '(function () { return 42; })'; 9 | var cd, fn, result; 10 | var modules = process.versions.modules | 0; 11 | var v8 = process.versions.v8.split('.').slice(0, 2).join('.'); 12 | 13 | var s1 = new vm.Script(text, { filename: 's1', produceCachedData: true, sourceless: true }); 14 | assert(s1.cachedDataProduced); 15 | cd = s1.cachedData; 16 | 17 | var kCpuFeaturesOffset, cpuFeatures; 18 | 19 | if (modules === 14) { 20 | } else 21 | if (modules === 46 || modules === 48 || modules === 51) { 22 | kCpuFeaturesOffset = 0x0c; 23 | } else 24 | if (modules === 57) { 25 | if (v8 === '6.2') { 26 | kCpuFeaturesOffset = 0x0c; 27 | } else 28 | if (v8 === '5.8') { 29 | kCpuFeaturesOffset = 0x0c; 30 | } else { 31 | kCpuFeaturesOffset = 0x10; 32 | } 33 | } else 34 | if (modules === 59) { 35 | kCpuFeaturesOffset = 0x0c; 36 | } else 37 | if (modules === 64) { 38 | kCpuFeaturesOffset = 0x0c; 39 | } else 40 | if (modules === 72) { 41 | // no cpu features anymore 42 | } else 43 | if (modules === 79) { 44 | // no cpu features anymore 45 | } else 46 | if (modules === 83) { 47 | // no cpu features anymore 48 | } else { 49 | assert(false, modules); 50 | } 51 | 52 | if (modules >= 46 && // no cpu_features field in 0.12 53 | process.arch !== 'arm' && // non-zero features even in sourceless mode in arm 54 | modules < 72) { // no cpu_features field in 12+ 55 | cpuFeatures = cd.readUInt32LE(kCpuFeaturesOffset); 56 | assert(cpuFeatures === 0, 'CPU_FEATURES must be zero'); 57 | } 58 | 59 | var s2 = new vm.Script(undefined, { filename: 's2', cachedData: cd, sourceless: true }); 60 | fn = s2.runInThisContext(); 61 | result = fn(); 62 | assert.equal(result, 42); 63 | 64 | if (modules === 14) { 65 | } else 66 | if (modules === 46 || modules === 48 || 67 | modules === 51 || modules === 57 || modules === 59 || modules === 64) { 68 | var paddedPayloadOffset = 0x48; // see SerializedCodeData::Payload() 69 | var index = paddedPayloadOffset + 10; 70 | cd[index] ^= 0xf0; 71 | var s3 = new vm.Script(undefined, { filename: 's3', cachedData: cd, sourceless: true }); 72 | assert(s3.cachedDataRejected, 's3.cachedDataRejected must be true'); 73 | } else 74 | if (modules === 72) { 75 | } else 76 | if (modules === 79) { 77 | } else 78 | if (modules === 83) { 79 | } else { 80 | assert(false, modules); 81 | } 82 | 83 | var s4 = new vm.Script(text, { filename: 's4', produceCachedData: true }); 84 | assert(s4.cachedDataProduced, 's4.cachedDataProduced must be true'); 85 | cd = s4.cachedData; 86 | 87 | if (modules >= 46 && // no cpu_features field in 0.12 88 | process.arch !== 'arm' && // zero features even in non-sourceless mode in arm 89 | modules < 72) { // no cpu_features field in 12+ 90 | cpuFeatures = cd.readUInt32LE(kCpuFeaturesOffset); 91 | assert(cpuFeatures !== 0, 'CPU_FEATURES must be non-zero'); 92 | } 93 | 94 | console.log('ok'); 95 | `; 96 | 97 | export async function verify(local: string) { 98 | await plusx(local); 99 | spawnSync(local, ['-e', script], { 100 | env: { PKG_EXECPATH: 'PKG_INVOKE_NODEJS' }, 101 | stdio: 'inherit', 102 | }); 103 | } 104 | -------------------------------------------------------------------------------- /lib/system.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | import { spawnSync } from 'child_process'; 3 | 4 | function getHostAbi() { 5 | return `m${process.versions.modules}`; 6 | } 7 | 8 | export function abiToNodeRange(abi: string) { 9 | if (/^m?14/.test(abi)) return 'node0.12'; 10 | if (/^m?46/.test(abi)) return 'node4'; 11 | if (/^m?47/.test(abi)) return 'node5'; 12 | if (/^m?48/.test(abi)) return 'node6'; 13 | if (/^m?51/.test(abi)) return 'node7'; 14 | if (/^m?57/.test(abi)) return 'node8'; 15 | if (/^m?59/.test(abi)) return 'node9'; 16 | if (/^m?64/.test(abi)) return 'node10'; 17 | if (/^m?67/.test(abi)) return 'node11'; 18 | if (/^m?72/.test(abi)) return 'node12'; 19 | if (/^m?79/.test(abi)) return 'node13'; 20 | if (/^m?83/.test(abi)) return 'node14'; 21 | return abi; 22 | } 23 | 24 | export function isValidNodeRange(nodeRange: string) { 25 | if (nodeRange === 'latest') return true; 26 | if (!/^node/.test(nodeRange)) return false; 27 | return true; 28 | } 29 | 30 | export function toFancyPlatform(platform: string) { 31 | if (platform === 'darwin') return 'macos'; 32 | if (platform === 'lin') return 'linux'; 33 | if (platform === 'mac') return 'macos'; 34 | if (platform === 'osx') return 'macos'; 35 | if (platform === 'win32') return 'win'; 36 | if (platform === 'windows') return 'win'; 37 | return platform; 38 | } 39 | 40 | function detectAlpine() { 41 | const { platform } = process; 42 | 43 | if (platform !== 'linux') { 44 | return false; 45 | } 46 | 47 | // https://github.com/sass/node-sass/issues/1589#issuecomment-265292579 48 | const ldd = spawnSync('ldd').stderr?.toString(); 49 | 50 | if (ldd == null) { 51 | return fs.readdirSync('/lib').some((file) => file.startsWith('libc.musl')); 52 | } 53 | 54 | if (/\bmusl\b/.test(ldd)) { 55 | return true; 56 | } 57 | 58 | const lddNode = spawnSync('ldd', [process.execPath]).stdout.toString(); 59 | return /\bmusl\b/.test(lddNode); 60 | } 61 | 62 | const isAlpine = detectAlpine(); 63 | 64 | function getHostPlatform() { 65 | const { platform } = process; 66 | 67 | if (isAlpine) { 68 | return 'alpine'; 69 | } 70 | 71 | return toFancyPlatform(platform); 72 | } 73 | 74 | function getKnownPlatforms() { 75 | return ['alpine', 'freebsd', 'linux', 'linuxstatic', 'macos', 'win']; 76 | } 77 | 78 | export function toFancyArch(arch: string) { 79 | if (arch === 'ia32') return 'x86'; 80 | if (arch === 'x86_64') return 'x64'; 81 | return arch; 82 | } 83 | 84 | function getArmUnameArch() { 85 | const uname = spawnSync('uname', ['-a']); 86 | 87 | if (uname.error) { 88 | return ''; 89 | } 90 | 91 | let unameOut = uname.stdout && uname.stdout.toString(); 92 | unameOut = (unameOut || '').toLowerCase(); 93 | 94 | if (unameOut.includes('aarch64')) return 'arm64'; 95 | if (unameOut.includes('arm64')) return 'arm64'; 96 | if (unameOut.includes('armv7')) return 'armv7'; 97 | 98 | return ''; 99 | } 100 | 101 | function getArmHostArch() { 102 | const cpu = fs.readFileSync('/proc/cpuinfo', 'utf8'); 103 | 104 | if (cpu.indexOf('vfpv3') >= 0) { 105 | return 'armv7'; 106 | } 107 | 108 | let name = cpu.split('model name')[1]; 109 | 110 | if (name) [, name] = name.split(':'); 111 | if (name) [name] = name.split('\n'); 112 | if (name && name.indexOf('ARMv7') >= 0) return 'armv7'; 113 | 114 | return 'armv6'; 115 | } 116 | 117 | function getHostArch() { 118 | const { arch } = process; 119 | 120 | if (arch === 'arm') { 121 | return getArmUnameArch() || getArmHostArch(); 122 | } 123 | 124 | return toFancyArch(arch); 125 | } 126 | 127 | function getTargetArchs() { 128 | const arch = getHostArch(); 129 | 130 | if (arch === 'x64') { 131 | return ['x64', 'x86']; 132 | } 133 | 134 | return [arch]; 135 | } 136 | 137 | function getKnownArchs() { 138 | return ['x64', 'x86', 'armv7', 'arm64', 'ppc64', 's390x']; 139 | } 140 | 141 | export const hostAbi = getHostAbi(); 142 | export const hostPlatform = getHostPlatform(); 143 | export const knownPlatforms = getKnownPlatforms(); 144 | export const hostArch = getHostArch(); 145 | export const targetArchs = getTargetArchs(); 146 | export const knownArchs = getKnownArchs(); 147 | -------------------------------------------------------------------------------- /.github/workflows/build-all.yml: -------------------------------------------------------------------------------- 1 | name: Build All Latest Assets 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | newRelease: 7 | description: 'Upload assets to new draft release?' 8 | default: true 9 | type: boolean 10 | 11 | jobs: 12 | build-alpine: 13 | uses: ./.github/workflows/build-alpine.yml 14 | build-linux: 15 | uses: ./.github/workflows/build-linux.yml 16 | build-linuxstatic: 17 | uses: ./.github/workflows/build-linuxstatic.yml 18 | build-macos: 19 | uses: ./.github/workflows/build-macos.yml 20 | build-windows: 21 | uses: ./.github/workflows/build-windows.yml 22 | 23 | collect-artifacts: 24 | permissions: 25 | contents: write 26 | runs-on: ubuntu-latest 27 | needs: [build-alpine, build-linux, build-linuxstatic, build-macos, build-windows] 28 | steps: 29 | - run: echo Is making new release? '${{ inputs.newRelease }}' 30 | - name: Checkout 31 | uses: actions/checkout@v3 32 | 33 | - uses: actions/download-artifact@v3 34 | with: 35 | path: downloaded-artifacts 36 | 37 | - name: Get previous release tag 38 | env: 39 | GITHUB_TOKEN: ${{ github.token }} 40 | id: get_previous_release 41 | run: | 42 | PREV_RELEASE=$(gh release list --limit 1 --exclude-drafts --exclude-pre-releases | cut -f3) 43 | echo "Using release ${PREV_RELEASE}" 44 | echo "prev_release=${PREV_RELEASE}" >> $GITHUB_OUTPUT 45 | 46 | - name: Get previously released artifacts 47 | if: ${{ inputs.newRelease }} 48 | env: 49 | GITHUB_TOKEN: ${{ github.token }} 50 | run: | 51 | gh release download ${{ steps.get_previous_release.outputs.prev_release }} --dir release-artifacts 52 | 53 | - name: Create artifact folders 54 | run: mkdir artifact-binaries && mkdir artifact-shas 55 | 56 | - name: Copy previous release artifacts to artifact folders 57 | if: ${{ inputs.newRelease }} 58 | run: | 59 | pushd release-artifacts 60 | pwd 61 | ls -la 62 | mv *.sha256sum ../artifact-shas/. 63 | mv * ../artifact-binaries/. 64 | popd 65 | 66 | - name: Copy current workflow artifacts to artifact folders 67 | run: | 68 | pushd downloaded-artifacts 69 | pwd 70 | ls -la 71 | for f in $(ls); do 72 | mv $f/*.sha256sum ../artifact-shas/. 73 | mv $f/* ../artifact-binaries/. 74 | done 75 | popd 76 | 77 | - name: Check SHAs 78 | run: | 79 | cd artifact-binaries 80 | ls ../artifact-shas/*.sha256sum | xargs sha256sum --check 81 | 82 | - name: Generate SHA summary 83 | id: generate_sha_file 84 | run: | 85 | echo "---" >> $GITHUB_STEP_SUMMARY 86 | echo "Update $(date -u +%F) $(date -u +%T) GMT/UTC" >> $GITHUB_STEP_SUMMARY 87 | echo "### SHAs of produced and carried forward binaries by this workflow" >> $GITHUB_STEP_SUMMARY 88 | echo " - $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" >> $GITHUB_STEP_SUMMARY 89 | echo '```' >> $GITHUB_STEP_SUMMARY 90 | sha_output_file=${GITHUB_SHA}_${RANDOM}_shas.txt 91 | echo "sha_output_file=${sha_output_file}" >> $GITHUB_OUTPUT 92 | cat artifact-shas/*.sha256sum > ${sha_output_file} 93 | cat artifact-shas/*.sha256sum >> $GITHUB_STEP_SUMMARY 94 | echo '```' >> $GITHUB_STEP_SUMMARY 95 | # Get a random string of characters to represent the EOF delimiter 96 | EOF=$(dd if=/dev/urandom bs=15 count=1 status=none | base64) 97 | echo "sha_summary<<$EOF" >> $GITHUB_ENV 98 | cat $GITHUB_STEP_SUMMARY >> $GITHUB_ENV 99 | echo "$EOF" >> $GITHUB_ENV 100 | 101 | - name: Determine release tag to upload assets to and draft type 102 | run: | 103 | if [[ "${{ inputs.newRelease }}" == "false" ]]; then 104 | echo "use_release_tag=${{ steps.get_previous_release.outputs.prev_release }}" >> $GITHUB_ENV 105 | echo "create_draft=false" >> $GITHUB_ENV 106 | else 107 | echo "use_release_tag=draft_release_${{ github.sha }}" >> $GITHUB_ENV 108 | echo "create_draft=true" >> $GITHUB_ENV 109 | fi 110 | 111 | - name: Add binaries to release 112 | id: create_release 113 | uses: softprops/action-gh-release@d4e8205d7e959a9107da6396278b2f1f07af0f9b 114 | with: 115 | token: ${{ github.token }} 116 | draft: ${{ env.create_draft }} 117 | tag_name: ${{ env.use_release_tag }} 118 | files: | 119 | artifact-shas/* 120 | artifact-binaries/* 121 | body: "${{ env.sha_summary }}" 122 | generate_release_notes: true 123 | append_body: true 124 | 125 | - name: Add release url to summary 126 | run: echo "Release created/updated at ${{ steps.create_release.outputs.url }}" >> $GITHUB_STEP_SUMMARY 127 | -------------------------------------------------------------------------------- /lib/index.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs-extra'; 2 | import path from 'path'; 3 | import semver from 'semver'; 4 | 5 | import { EXPECTED_HASHES } from './expected'; 6 | import { 7 | abiToNodeRange, 8 | hostPlatform, // eslint-disable-line no-duplicate-imports 9 | isValidNodeRange, 10 | knownArchs, 11 | toFancyArch, 12 | toFancyPlatform, 13 | } from './system'; 14 | import * as system from './system'; 15 | import { localPlace, remotePlace, Remote } from './places'; 16 | import { log, wasReported } from './log'; 17 | import build from './build'; 18 | import { downloadUrl, hash, plusx } from './utils'; 19 | import patchesJson from '../patches/patches.json'; 20 | import { version } from '../package.json'; 21 | 22 | async function download( 23 | { tag, name }: Remote, 24 | local: string 25 | ): Promise { 26 | const url = `https://github.com/vercel/pkg-fetch/releases/download/${tag}/${name}`; 27 | 28 | try { 29 | await downloadUrl(url, local); 30 | await plusx(local); 31 | } catch { 32 | return false; 33 | } 34 | 35 | return true; 36 | } 37 | 38 | async function exists(file: string) { 39 | try { 40 | await fs.stat(file); 41 | return true; 42 | } catch (error) { 43 | return false; 44 | } 45 | } 46 | 47 | interface NeedOptions { 48 | forceFetch?: boolean; 49 | forceBuild?: boolean; 50 | dryRun?: boolean; 51 | output?: string; 52 | nodeRange: string; 53 | platform: string; 54 | arch: string; 55 | } 56 | 57 | export function satisfyingNodeVersion(nodeRange: string) { 58 | const versions = Object.keys(patchesJson) 59 | .filter((nv) => semver.satisfies(nv, nodeRange) || nodeRange === 'latest') 60 | .sort((nv1, nv2) => (semver.gt(nv1, nv2) ? 1 : -1)); 61 | 62 | const nodeVersion = versions.pop(); 63 | 64 | if (!nodeVersion) { 65 | throw wasReported( 66 | `No available node version satisfies '${nodeRange}'` 67 | ); 68 | } 69 | 70 | return nodeVersion; 71 | } 72 | 73 | export function getNodeVersion(nodeRange: string) { 74 | nodeRange = abiToNodeRange(nodeRange); // 'm48' -> 'node6' 75 | 76 | if (!isValidNodeRange(nodeRange)) { 77 | throw wasReported("nodeRange must start with 'node'"); 78 | } 79 | 80 | if (nodeRange !== 'latest') { 81 | nodeRange = `v${nodeRange.slice(4)}`; // 'node6' -> 'v6' for semver 82 | } 83 | 84 | const nodeVersion = satisfyingNodeVersion(nodeRange); 85 | return nodeVersion; 86 | } 87 | 88 | 89 | export async function need(opts: NeedOptions) { 90 | // eslint-disable-line complexity 91 | const { forceFetch, forceBuild, dryRun, output, nodeRange } = opts || {}; 92 | let { platform, arch } = opts || {}; 93 | 94 | if (!nodeRange) throw wasReported('nodeRange not specified'); 95 | if (!platform) throw wasReported('platform not specified'); 96 | if (!arch) throw wasReported('arch not specified'); 97 | 98 | platform = toFancyPlatform(platform); // win32 -> win 99 | arch = toFancyArch(arch); // ia32 -> x86 100 | 101 | const nodeVersion = getNodeVersion(nodeRange); 102 | 103 | const fetched = localPlace({ 104 | from: 'fetched', 105 | arch, 106 | nodeVersion, 107 | platform, 108 | version, 109 | output, 110 | }); 111 | const built = localPlace({ 112 | from: 'built', 113 | arch, 114 | nodeVersion, 115 | platform, 116 | version, 117 | output, 118 | }); 119 | const remote = remotePlace({ arch, nodeVersion, platform, version }); 120 | 121 | let fetchFailed; 122 | 123 | if (!forceBuild) { 124 | if (await exists(fetched)) { 125 | if (dryRun) { 126 | return 'exists'; 127 | } 128 | 129 | if ((await hash(fetched)) === EXPECTED_HASHES[remote.name]) { 130 | return fetched; 131 | } 132 | 133 | log.info('Binary hash does NOT match. Re-fetching...'); 134 | fs.unlinkSync(fetched); 135 | } 136 | } 137 | 138 | if (!forceFetch) { 139 | if (await exists(built)) { 140 | if (dryRun) return 'exists'; 141 | if (forceBuild) log.info('Reusing base binaries built locally:', built); 142 | 143 | return built; 144 | } 145 | } 146 | 147 | if (!forceBuild) { 148 | if (dryRun) return 'fetched'; 149 | 150 | if (await download(remote, fetched)) { 151 | if ((await hash(fetched)) === EXPECTED_HASHES[remote.name]) { 152 | return fetched; 153 | } 154 | 155 | fs.unlinkSync(fetched); 156 | throw wasReported('Binary hash does NOT match.'); 157 | } 158 | 159 | fetchFailed = true; 160 | } 161 | 162 | if (!dryRun && fetchFailed) { 163 | log.info('Not found in remote cache:', JSON.stringify(remote)); 164 | if (forceFetch) { 165 | throw wasReported(`Failed to fetch.`); 166 | } 167 | } 168 | 169 | if (!dryRun) { 170 | log.info('Building base binary from source:', path.basename(built)); 171 | } 172 | 173 | if (hostPlatform !== platform) { 174 | if (hostPlatform !== 'alpine' || platform !== 'linuxstatic') { 175 | throw wasReported( 176 | `Not able to build for '${opts.platform}' here, only for '${hostPlatform}'` 177 | ); 178 | } 179 | } 180 | 181 | if (knownArchs.indexOf(arch) < 0) { 182 | throw wasReported( 183 | `Unknown arch '${opts.arch}'. Specify ${knownArchs.join(', ')}` 184 | ); 185 | } 186 | 187 | if (dryRun) { 188 | return 'built'; 189 | } 190 | 191 | await build(nodeVersion, arch, platform, built); 192 | return built; 193 | } 194 | 195 | export { system }; 196 | -------------------------------------------------------------------------------- /lib/expected-shas.json: -------------------------------------------------------------------------------- 1 | { 2 | "node-v8.17.0-alpine-arm64": "807df81524ec8e1e266ac7fbed434c6b2281ae20b9fa7eaa524de90f3330c4d7", 3 | "node-v8.17.0-alpine-x64": "83a3914de57ee6be7d68ebaac8a10c1a2972d554800f1bee218cc4a23650e0fb", 4 | "node-v8.17.0-linux-arm64": "f06855896bfa10bead1f08fac080305fb6fbfb2cc691168a3f0f0e834e12bfba", 5 | "node-v8.17.0-linux-x64": "14d75d43de1ff86469d354bf42a83b9494e09502fa7bc23a975e2cb82b1608b0", 6 | "node-v8.17.0-linuxstatic-arm64": "84de8fe30b2bd1dcb3615cf1d1b538aa48e1fcf66620ef97dce6b7ae85b45025", 7 | "node-v8.17.0-linuxstatic-x64": "5206878079f160e75a02ad33b7559b4a869e8181ee03d51d7211b52995f9ca7b", 8 | "node-v8.17.0-macos-x64": "dffa71e39100f4daa57de73fda7b4debecd09f552b15cf11854c8475380d3817", 9 | "node-v8.17.0-win-x64": "4556a06dc59a0196453ba5962ea077ea71fe566e4de1c92f73f057446d422251", 10 | "node-v10.24.1-alpine-arm64": "f6a59f1ace2ef1f4bf976ff85d9a74bdc71bda098e8aa018e2a089c590aeedea", 11 | "node-v10.24.1-alpine-x64": "24f862b22a59ac0adb746d5bd3f2226c8eb2f6e1565a1cb4d2418c9cb0f3769e", 12 | "node-v10.24.1-linux-arm64": "e3a3e5197e3f3d1063e3178786890b29493b8dfc31362f3417cce90ca7eb3e98", 13 | "node-v10.24.1-linux-x64": "c59574f4ea7b1423bd7ef586887ea41c43cfb2a63431126af0de20291a3a94db", 14 | "node-v10.24.1-linuxstatic-arm64": "01bc2cfbf7a7dd1a74201ae34a6cfafc1ad1c6d77039f587111738a81637bc5b", 15 | "node-v10.24.1-linuxstatic-armv7": "4933be03e394a82f5aa5fc4600b29392b2ce7eac24bd2ef8759f9a8e8c841990", 16 | "node-v10.24.1-linuxstatic-x64": "a7bbd62b712b3a7ac54953b646f0802e84bc7ecadb0b8a0756323fcffe3310a5", 17 | "node-v10.24.1-macos-x64": "f2e4679262a1cc6f3213cc4f0453d662e48c021975b651534fcbf26d6fdab474", 18 | "node-v10.24.1-win-x64": "958647af177a9089bb4f3495e352d5348a1b42858d0111004ca26c3a2ece3f73", 19 | "node-v12.22.11-alpine-arm64": "0933ab559bb34c720f0a7e0066f32608960a4d6290977c3af15529f7abfe7265", 20 | "node-v12.22.11-alpine-x64": "2ec8d7b761f03b2172bcf3b1b56c648e844116ab08a5484d7932138c374cf18c", 21 | "node-v12.22.11-linux-arm64": "3a50d85ebd5ba7e1e62165b9df237925789ef9ed0ed92fd9d0f3a9df7503f751", 22 | "node-v12.22.11-linux-x64": "617d58e81711d3a1d34a737db39e751caa05040a1a586e6dd688241cfb3f3eed", 23 | "node-v12.22.11-linuxstatic-arm64": "0c5b03cbe32ce50f16dbb35769a2a897b30e8fdb2137c4799edb55898b475622", 24 | "node-v12.22.11-linuxstatic-x64": "37714fc3ae8b1d0c92b124f8ab353c77e40494075646e43ce8e20bd4038b5b83", 25 | "node-v12.22.11-macos-x64": "5394093f0fd2bb5ea38ee2a5eaec9e00d3d1da9e3f7c3c99c68eecfe17354286", 26 | "node-v12.22.11-win-x64": "24bedd07eb0cad64d505ec731c438765370bbed32d8e1f47129fe3612fadfcdb", 27 | "node-v14.21.3-alpine-arm64": "801f71544a48e92554d61291b63ed8addd19921075826d0ef299819a9300f996", 28 | "node-v14.21.3-alpine-x64": "62882ceabf13342eb51c5323a11e4be736bec651646071adf93e40fd02125919", 29 | "node-v14.21.3-linux-arm64": "a4c46261e179775268528f0693be44922768cff96afe5f194748c519405fcec4", 30 | "node-v14.21.3-linux-x64": "b0b1fbf8e0caa5483f5538fccc90d9c681824c26f6b3008dacc5b6928881bf39", 31 | "node-v14.21.3-linuxstatic-arm64": "d14f9dccf9c861a14a4491a8f5796c3a8cce914d36a8b802b7b72bd4dd1acb1a", 32 | "node-v14.21.3-linuxstatic-armv7": "183dd014d294ad418fb0902d3fa91dc5d780b4ae5e2e58bd77c52c60fc79be3f", 33 | "node-v14.21.3-linuxstatic-x64": "b941d79858ac45ce1400439d32fd5eba6d339417ff341491a78b5ad6fe799450", 34 | "node-v14.21.3-macos-x64": "0cff926be3b4d4ef31b2daac7c8b191c2bcc961ea90692c2c245d9a694c98832", 35 | "node-v14.21.3-win-arm64": "dfb0f0b94d50f0530aefe93b6333c4caeec34c683c29f9631555c1501d80cc98", 36 | "node-v14.21.3-win-x64": "223dc070ebf5d4528ac2f276ddf69bc83f58e68e2cc9067020c62c07d5ad4e90", 37 | "node-v16.19.1-alpine-arm64": "3020ec066271c71d72479181ac7ec24b5186df61e16b682fb7b060ad2a9a363a", 38 | "node-v16.19.1-alpine-x64": "38e6561457ee35634a071417b1234ce0e7694e45088763a4da5de915fbb02f68", 39 | "node-v16.19.1-linux-arm64": "98f0dda14f5037dafb35617de34c1f772f2b3c2e04955be62cdf125100d0962c", 40 | "node-v16.19.1-linux-x64": "07e151674f4f4a2c6641b8c818f258067f1dbd8d68f3648663c606afea34e576", 41 | "node-v16.19.1-linuxstatic-arm64": "2c40b15e6a3d40ae822bbefe5b269e15a30952be793a30a418b87eae7c1ebdff", 42 | "node-v16.19.1-linuxstatic-armv7": "8dd5d18557fb602070d1eca72a5b73d3b8167e6f3c951e0bd6aed5376597c31d", 43 | "node-v16.19.1-linuxstatic-x64": "f4ac69917aca69694101cda07902dff6d5f99bd2e62e6f72dedd045e67f11066", 44 | "node-v16.19.1-macos-x64": "9396df4caf2fdbc40bf878ef44a73e09aac6e13411dccbd8bb9b63b5a4517ca7", 45 | "node-v16.19.1-win-arm64": "a2901e03d4e3f2cfded37598160075a8867fc4147a71984477cf72250ae292db", 46 | "node-v16.19.1-win-x64": "549e8bd07950d58a325938d004936f43a99a6bcc12139ced6a2d67f6b09470c1", 47 | "node-v18.15.0-alpine-arm64": "cc758d91840c5a8893fcd7f696be38aa227e5e6036b28aeb29ebbe9efe4c55ef", 48 | "node-v18.15.0-alpine-x64": "be0b5f11290adb0eae8ab32907dae1334ee43d3023bb9870eb98515a47aa7031", 49 | "node-v18.15.0-linux-arm64": "d4420e35dd1075a23db48874d14a596503e4c7223e515b620b5036b2916ab128", 50 | "node-v18.15.0-linux-x64": "a4735a8e58c7269985c15cb9b36a700cf778e720828ee79ce6ed45982e42993e", 51 | "node-v18.15.0-linuxstatic-arm64": "60d4ac648a0782f6d8e83b79262ca7ead87f04cb2f9dce187c1e90827c4fadbe", 52 | "node-v18.15.0-linuxstatic-armv7": "3cc2753714271de3eb02e584ffa199ba33a3f72ed22647900a4c85d743c39272", 53 | "node-v18.15.0-linuxstatic-x64": "7a2dde6a5e1e9f711d100d0ae019ba785de5968bb3daa4ae658135f766f667c1", 54 | "node-v18.15.0-macos-x64": "13cc043442af8f110836e7a4abcfc4ba5cf1d9568564485f018fd93d688291e1", 55 | "node-v18.15.0-win-arm64": "d66ac1fc6bfeb25dc538f13e284d71b12684d02913eb5e666d21b2b245ffac74", 56 | "node-v18.15.0-win-x64": "1729a85e503f54ba5345d8ff10552e8706fb231f7be635e67367cb5205e0a1b4", 57 | "node-v19.8.1-alpine-arm64": "2c93c7b842be25bc6110b31278da9fd5699a42323d9e869b3d5019e15ba6724c", 58 | "node-v19.8.1-alpine-x64": "5d7944fe93d3221ca4c4a3651af2af79349a2a5b9c424c12867f44d14feefe6d", 59 | "node-v19.8.1-linux-arm64": "2ca0a267ca5fd44e29fef5a4c205e49af98a623d959958c3e66f1cf5d291bb1b", 60 | "node-v19.8.1-linux-x64": "0060edcd98f924dcac284841d941ce7aabc39ef1fd08c068ec4e24acaaa16220", 61 | "node-v19.8.1-linuxstatic-arm64": "7f4d7a8349ca0f6f8e329a86e590b8c2c7936fcfeb33c65c54315c6e534eb6c0", 62 | "node-v19.8.1-linuxstatic-armv7": "0a4b44593c11a7c5161556a35c63a83581b6fd93ab1360d4f37cdb1d642d57ed", 63 | "node-v19.8.1-linuxstatic-x64": "f995f279381bb4ff5f74f3a14086c5a3cfaa9caab60bcdf6d5278479140e3760", 64 | "node-v19.8.1-macos-x64": "df526135f58d3728f603daefc75e5173e0eb03b2cdf4341bf3bcdbd236705ae0", 65 | "node-v19.8.1-win-arm64": "69dc4c5ae7e590282c57f85db5ef9109437b600fdebdd64927a44567a8b926af", 66 | "node-v19.8.1-win-x64": "bc60b74070057e830e9d5f2d16dfae78e2a1faed9bf29fd8e55c81da21cd0309" 67 | } 68 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # `pkg-fetch` 2 | 3 | > [!IMPORTANT] 4 | > `pkg-fetch` has been deprecated with `3.5` as the last release. There are a number of successful forked versions of `pkg-fetch` already with various feature additions. Further, we’re excited about Node.js 21’s support for [single executable applications](https://nodejs.org/api/single-executable-applications.html). Thank you for the support and contributions over the years. The repository will remain open and archived. 5 | 6 | A utility to fetch or build patched Node binaries used by [pkg](https://github.com/vercel/pkg) to generate executables. This repo hosts prebuilt binaries in [Releases](https://github.com/vercel/pkg-fetch/releases). 7 | 8 | ## Binary Compatibility 9 | 10 | | Node | Platform | Architectures | Minimum OS version | 11 | | --------------------------------------------------------------------------------- | ----------- | ------------------------- | --------------------------------------------------------------------------------- | 12 | | 8[1](#fn1), 10[1](#fn1), 12[1](#fn1), 14, 16, 18 | alpine | x64, arm64 | 3.7.3, other distros with musl libc >= 1.1.18 | 13 | | 8[1](#fn1), 10[1](#fn1), 12[1](#fn1), 14, 16, 18 | linux | x64 | Enterprise Linux 7, Ubuntu 14.04, Debian jessie, other distros with glibc >= 2.17 | 14 | | 8[1](#fn1), 10[1](#fn1), 12[1](#fn1), 14, 16, 18 | linux | arm64 | Enterprise Linux 8, Ubuntu 18.04, Debian buster, other distros with glibc >= 2.27 | 15 | | 8[1](#fn1), 10[1](#fn1), 12[1](#fn1), 14, 16, 18 | linuxstatic | x64, arm64 | Any distro with Linux Kernel >= 2.6.32 (>= 3.10 strongly recommended) | 16 | | 16, 18 | linuxstatic | armv7[2](#fn2) | Any distro with Linux Kernel >= 2.6.32 (>= 3.10 strongly recommended) | 17 | | 8[1](#fn1), 10[1](#fn1), 12[1](#fn1), 14, 16, 18 | macos | x64 | 10.13 | 18 | | 14, 16, 18 | macos | arm64[3](#fn3) | 11.0 | 19 | | 8[1](#fn1), 10[1](#fn1), 12[1](#fn1), 14, 16, 18 | win | x64 | 8.1 | 20 | | 14, 16, 18 | win | arm64 | 10 | 21 | 22 | [1]: end-of-life, may be removed in the next major release. 23 | 24 | [2]: best-effort basis, not semver-protected. 25 | 26 | [3]: [mandatory code signing](https://developer.apple.com/documentation/macos-release-notes/macos-big-sur-11_0_1-universal-apps-release-notes) is enforced by Apple. 27 | 28 | ## Security 29 | 30 | We do not expect this project to have vulnerabilities of its own. Nonetheless, as this project distributes prebuilt Node.js binaries, 31 | 32 | **Node.js security vulnerabilities affect binaries distributed by this project, as well.** 33 | 34 | Like most of you, this project does not have access to advance/private disclosures of Node.js security vulnerabilities. We can only closely monitor the **public** security advisories from the Node.js team. It takes time to build and release a new set of binaries, once a new Node.js version has been released. 35 | 36 | **It is possible for this project to fall victim to a supply chain attack.** 37 | 38 | This project deploys multiple defense measures to ensure that the safe binaries are delivered to users: 39 | 40 | - Binaries are compiled by [Github Actions](https://github.com/vercel/pkg-fetch/actions) 41 | - Workflows and build logs are transparent and auditable. 42 | - Artifacts are the source of truth. Even repository/organization administrators can't tamper them. 43 | - Hashes of binaries are hardcoded in [source](https://github.com/vercel/pkg-fetch/blob/HEAD/lib/expected.ts) 44 | - Origins of the binaries are documented. 45 | - Changes to the binaries are logged by VCS (Git) and are publicly visible. 46 | - `pkg-fetch` rejects the binary if it does not match the hardcoded hash. 47 | - GPG-signed hashes are available in [Releases](https://github.com/vercel/pkg-fetch/releases) 48 | - Easy to spot a compromise. 49 | - `pkg-fetch` package on npm is strictly permission-controlled 50 | - Only authorized Vercel employees can push new revisions to npm. 51 | 52 | ## Contributing Updates to Patches 53 | 54 | ### Example workflow for applying patches to a new version of Node.js (18.13.0) 55 | 56 | 1. Clone Node.js as a sibling to your current `pkg-fetch` clone 57 | 58 | - `git clone https://github.com/nodejs/node.git` 59 | - `cd node` 60 | 61 | 2. Checkout the tag you wish to generate a patch for 62 | 63 | - `git checkout v18.13.0` 64 | 65 | 3. Attempt to apply the closest patch (e.g. applying the existing patch for 66 | 18.12.1 when trying to generate a new patch for 18.13.0) 67 | 68 | - `git apply ..\pkg-fetch\patches\node.v18.12.1.cpp.patch --reject` 69 | 70 | 4. If no rejects, great! you are ready to make your new patch file. 71 | 72 | - `git add -A` 73 | - `git diff --staged --src-prefix=node/ --dst-prefix=node/ > ..\pkg-fetch\patches\node.v18.13.0.cpp.patch` 74 | 75 | 5. If rejects exist, resolve them yourself, and ensure all changes are saved, 76 | and repeat step 4 to export the patch file 77 | 78 | #### Resolving Rejects 79 | 80 | Usually when a patch is rejected, it's because the context around the changes 81 | was refactored slightly since the last patched version. This is not usually 82 | complicated to resolve, but requires a human to interpret the changes since the 83 | last version `pkg` was patched against, compared with the version you wish to 84 | create a patch for. 85 | 86 | One method is to pull up the diff for the file where the rejects apply for the 87 | changes between the last tag (e.g. v18.12.1 to use the previous example) and the 88 | tag you want a patch for (e.g. v18.13.0 to use the previous example). Alongside 89 | this, have the `.rej` file and go through each rejected hunk by hunk and use 90 | your best judgement to determine how it should apply against the new tag. 91 | 92 | Save you results, and export the overall git diff with the commands from the 93 | example above. 94 | 95 | ### Checking that patches apply cleanly 96 | 97 | The expectation is that a patch applies cleanly, with no delta or offsets from 98 | the source repo. 99 | 100 | When making a change to a patch file, it is possible to apply that patch without 101 | building by running 102 | 103 | `yarn applyPatches --node-range node18` 104 | 105 | where the `--node-range` can be specified to apply patches for the version of 106 | node for which you are updating patches. If unspecified, the latest node version 107 | in [patches.json](./patches/patches.json) will be used. 108 | 109 | Ultimately, the patch should result in fully functional node binary, but the 110 | `applyPatches` script can be used to quickly iterate just the application of 111 | the patches you are updating without needing to wait for the full build to 112 | complete. 113 | 114 | ## Building a Binary Locally 115 | 116 | You can use the `yarn start` script to build the binary locally, which is helpful 117 | when updating patches to ensure functionality before pushing patch updates for 118 | review. 119 | 120 | For example: 121 | 122 | `yarn start --node-range node18 --arch x64 --output dist` 123 | -------------------------------------------------------------------------------- /lib/build.ts: -------------------------------------------------------------------------------- 1 | import { createGunzip } from 'zlib'; 2 | import crypto from 'crypto'; 3 | import fs from 'fs-extra'; 4 | import os from 'os'; 5 | import path from 'path'; 6 | import { pipeline } from 'stream'; 7 | import { promisify } from 'util'; 8 | import tar from 'tar-fs'; 9 | 10 | import { cachePath } from './places'; 11 | import { downloadUrl, hash, spawn } from './utils'; 12 | import { hostArch, hostPlatform } from './system'; 13 | import { log, wasReported } from './log'; 14 | import patchesJson from '../patches/patches.json'; 15 | 16 | const buildPath = path.resolve( 17 | process.env.PKG_BUILD_PATH || 18 | path.join(os.tmpdir(), `pkg.${crypto.randomBytes(12).toString('hex')}`) 19 | ); 20 | const nodePath = path.join(buildPath, 'node'); 21 | const patchesPath = path.resolve(__dirname, '../patches'); 22 | 23 | const nodeRepo = 'https://nodejs.org/dist'; 24 | const nodeArchivePath = path.join(cachePath, 'node'); 25 | 26 | function getMajor(nodeVersion: string) { 27 | const [, version] = nodeVersion.match(/^v?(\d+)/) || ['', 0]; 28 | return Number(version) | 0; 29 | } 30 | 31 | function getConfigureArgs(major: number, targetPlatform: string): string[] { 32 | const args: string[] = []; 33 | 34 | // first of all v8_inspector introduces the use 35 | // of `prime_rehash_policy` symbol that requires 36 | // GLIBCXX_3.4.18 on some systems 37 | // also we don't support any kind of debugging 38 | // against packaged apps, hence v8_inspector is useless 39 | args.push('--without-inspector'); 40 | 41 | if (hostPlatform === 'alpine') { 42 | // Statically Link against libgcc and libstdc++ libraries. See vercel/pkg#555. 43 | // libgcc and libstdc++ grant GCC Runtime Library Exception of GPL 44 | args.push('--partly-static'); 45 | } 46 | 47 | if (targetPlatform === 'linuxstatic') { 48 | args.push('--fully-static'); 49 | } 50 | 51 | // Link Time Optimization 52 | if (major >= 12) { 53 | if (hostPlatform !== 'win') { 54 | args.push('--enable-lto'); 55 | } 56 | } 57 | 58 | // production binaries do NOT take NODE_OPTIONS from end-users 59 | args.push('--without-node-options'); 60 | 61 | // The dtrace and etw support was removed in https://github.com/nodejs/node/commit/aa3a572e6bee116cde69508dc29478b40f40551a 62 | if (major <= 18) { 63 | // DTrace 64 | args.push('--without-dtrace'); 65 | } 66 | 67 | // bundled npm package manager 68 | args.push('--without-npm'); 69 | 70 | // Small ICU 71 | args.push('--with-intl=small-icu'); 72 | 73 | // Workaround for nodejs/node#39313 74 | // All supported macOS versions have zlib as a system library 75 | if (targetPlatform === 'macos') { 76 | args.push('--shared-zlib'); 77 | } 78 | 79 | return args; 80 | } 81 | 82 | async function tarFetch(nodeVersion: string) { 83 | log.info('Fetching Node.js source archive from nodejs.org...'); 84 | 85 | const distUrl = `${nodeRepo}/${nodeVersion}`; 86 | const tarName = `node-${nodeVersion}.tar.gz`; 87 | 88 | const archivePath = path.join(nodeArchivePath, tarName); 89 | const hashPath = path.join(nodeArchivePath, `${tarName}.sha256sum`); 90 | 91 | if (fs.existsSync(hashPath) && fs.existsSync(archivePath)) { 92 | return; 93 | } 94 | 95 | await fs.remove(hashPath).catch(() => undefined); 96 | await fs.remove(archivePath).catch(() => undefined); 97 | 98 | await downloadUrl(`${distUrl}/SHASUMS256.txt`, hashPath); 99 | 100 | await fs.writeFile( 101 | hashPath, 102 | (await fs.readFile(hashPath, 'utf8')) 103 | .split('\n') 104 | .filter((l) => l.includes(tarName))[0] 105 | ); 106 | 107 | await downloadUrl(`${distUrl}/${tarName}`, archivePath); 108 | } 109 | 110 | async function tarExtract(nodeVersion: string, suppressTarOutput: boolean) { 111 | log.info('Extracting Node.js source archive...'); 112 | 113 | const tarName = `node-${nodeVersion}.tar.gz`; 114 | 115 | const expectedHash = ( 116 | await fs.readFile( 117 | path.join(nodeArchivePath, `${tarName}.sha256sum`), 118 | 'utf8' 119 | ) 120 | ).split(' ')[0]; 121 | const actualHash = await hash(path.join(nodeArchivePath, tarName)); 122 | 123 | if (expectedHash !== actualHash) { 124 | await fs.remove(path.join(nodeArchivePath, tarName)); 125 | await fs.remove(path.join(nodeArchivePath, `${tarName}.sha256sum`)); 126 | throw wasReported(`Hash mismatch for ${tarName}`); 127 | } 128 | 129 | const pipe = promisify(pipeline); 130 | 131 | const source = fs.createReadStream(path.join(nodeArchivePath, tarName)); 132 | const gunzip = createGunzip(); 133 | const extract = tar.extract(nodePath, { 134 | strip: 1, 135 | map: (header) => { 136 | if (!suppressTarOutput) { 137 | log.info(header.name); 138 | } 139 | return header; 140 | }, 141 | }); 142 | 143 | await pipe(source, gunzip, extract); 144 | } 145 | 146 | async function applyPatches(nodeVersion: string) { 147 | log.info('Applying patches'); 148 | 149 | const storedPatches = patchesJson[nodeVersion as keyof typeof patchesJson] as 150 | | string[] 151 | | { patches: string[] } 152 | | { sameAs: string }; 153 | const storedPatch = 154 | 'patches' in storedPatches ? storedPatches.patches : storedPatches; 155 | const patches = 156 | 'sameAs' in storedPatch 157 | ? patchesJson[storedPatch.sameAs as keyof typeof patchesJson] 158 | : storedPatch; 159 | 160 | for (const patch of patches) { 161 | const patchPath = path.join(patchesPath, patch); 162 | const args = ['-p1', '-i', patchPath]; 163 | await spawn('patch', args, { cwd: nodePath, stdio: 'inherit' }); 164 | } 165 | } 166 | 167 | export async function fetchExtractApply(nodeVersion: string, quietExtraction: boolean) { 168 | await tarFetch(nodeVersion); 169 | await tarExtract(nodeVersion, quietExtraction); 170 | await applyPatches(nodeVersion); 171 | } 172 | 173 | async function compileOnWindows( 174 | nodeVersion: string, 175 | targetArch: string, 176 | targetPlatform: string 177 | ) { 178 | const args = ['/c', 'vcbuild.bat', targetArch]; 179 | const major = getMajor(nodeVersion); 180 | const config_flags = getConfigureArgs(major, targetPlatform); 181 | 182 | // The dtrace and etw support was removed in https://github.com/nodejs/node/commit/aa3a572e6bee116cde69508dc29478b40f40551a 183 | if (major <= 18) { 184 | // Event Tracing for Windows 185 | args.push('noetw'); 186 | } 187 | 188 | // Performance counters on Windows 189 | if (major <= 10) { 190 | args.push('noperfctr'); 191 | } 192 | 193 | // Link Time Code Generation 194 | if (major >= 12) { 195 | args.push('ltcg'); 196 | } 197 | 198 | // Can't cross compile for arm64 with small-icu 199 | if ( 200 | hostArch !== targetArch && 201 | !config_flags.includes('--with-intl=full-icu') 202 | ) { 203 | config_flags.push('--without-intl'); 204 | } 205 | 206 | await spawn('cmd', args, { 207 | cwd: nodePath, 208 | env: { ...process.env, config_flags: config_flags.join(' ') }, 209 | stdio: 'inherit', 210 | }); 211 | 212 | if (major <= 10) { 213 | return path.join(nodePath, 'Release/node.exe'); 214 | } 215 | 216 | return path.join(nodePath, 'out/Release/node.exe'); 217 | } 218 | 219 | const { MAKE_JOB_COUNT = os.cpus().length } = process.env; 220 | 221 | async function compileOnUnix( 222 | nodeVersion: string, 223 | targetArch: string, 224 | targetPlatform: string 225 | ) { 226 | const args = []; 227 | const cpu = { 228 | x86: 'ia32', 229 | x64: 'x64', 230 | armv6: 'arm', 231 | armv7: 'arm', 232 | arm64: 'arm64', 233 | ppc64: 'ppc64', 234 | s390x: 's390x', 235 | }[targetArch]; 236 | 237 | if (cpu) { 238 | args.push('--dest-cpu', cpu); 239 | } 240 | 241 | if (targetArch === 'armv7') { 242 | const { CFLAGS = '', CXXFLAGS = '' } = process.env; 243 | process.env.CFLAGS = `${CFLAGS} -marm -mcpu=cortex-a7`; 244 | process.env.CXXFLAGS = `${CXXFLAGS} -marm -mcpu=cortex-a7`; 245 | 246 | args.push('--with-arm-float-abi=hard'); 247 | args.push('--with-arm-fpu=vfpv3'); 248 | } 249 | 250 | if (hostArch !== targetArch) { 251 | log.warn('Cross compiling!'); 252 | log.warn('You are responsible for appropriate env like CC, CC_host, etc.'); 253 | args.push('--cross-compiling'); 254 | } 255 | 256 | args.push(...getConfigureArgs(getMajor(nodeVersion), targetPlatform)); 257 | 258 | // TODO same for windows? 259 | await spawn('/bin/sh', ['./configure', ...args], { 260 | cwd: nodePath, 261 | stdio: 'inherit', 262 | }); 263 | 264 | await spawn( 265 | hostPlatform === 'freebsd' ? 'gmake' : 'make', 266 | ['-j', String(MAKE_JOB_COUNT)], 267 | { 268 | cwd: nodePath, 269 | stdio: 'inherit', 270 | } 271 | ); 272 | 273 | const output = path.join(nodePath, 'out/Release/node'); 274 | 275 | await spawn( 276 | process.env.STRIP || 'strip', 277 | // global symbols are required for native bindings on macOS 278 | [...(targetPlatform === 'macos' ? ['-x'] : []), output], 279 | { 280 | stdio: 'inherit', 281 | } 282 | ); 283 | 284 | if (targetPlatform === 'macos') { 285 | // Newer versions of Apple Clang automatically ad-hoc sign the compiled executable. 286 | // However, for final executable to be signable, base binary MUST NOT have an existing signature. 287 | await spawn('codesign', ['--remove-signature', output], { 288 | stdio: 'inherit', 289 | }); 290 | } 291 | 292 | return output; 293 | } 294 | 295 | async function compile( 296 | nodeVersion: string, 297 | targetArch: string, 298 | targetPlatform: string 299 | ) { 300 | log.info('Compiling Node.js from sources...'); 301 | const win = hostPlatform === 'win'; 302 | 303 | if (win) { 304 | return compileOnWindows(nodeVersion, targetArch, targetPlatform); 305 | } 306 | 307 | return compileOnUnix(nodeVersion, targetArch, targetPlatform); 308 | } 309 | 310 | export async function prepBuildPath() { 311 | await fs.remove(buildPath); 312 | await fs.mkdirp(nodePath); 313 | await fs.mkdirp(nodeArchivePath); 314 | } 315 | 316 | export default async function build( 317 | nodeVersion: string, 318 | targetArch: string, 319 | targetPlatform: string, 320 | local: string 321 | ) { 322 | await prepBuildPath(); 323 | await fetchExtractApply(nodeVersion, false); 324 | 325 | const output = await compile(nodeVersion, targetArch, targetPlatform); 326 | const outputHash = await hash(output); 327 | 328 | await fs.mkdirp(path.dirname(local)); 329 | await fs.copy(output, local); 330 | await fs.promises.writeFile( 331 | `${local}.sha256sum`, 332 | `${outputHash} ${path.basename(local)} 333 | ` 334 | ); 335 | await fs.remove(buildPath); 336 | } 337 | -------------------------------------------------------------------------------- /patches/node.v10.24.1.cpp.patch: -------------------------------------------------------------------------------- 1 | --- node/deps/v8/include/v8.h 2 | +++ node/deps/v8/include/v8.h 3 | @@ -8137,10 +8137,14 @@ 4 | */ 5 | static void SetFlagsFromCommandLine(int* argc, 6 | char** argv, 7 | bool remove_flags); 8 | 9 | + static void EnableCompilationForSourcelessUse(); 10 | + static void DisableCompilationForSourcelessUse(); 11 | + static void FixSourcelessScript(Isolate* v8_isolate, Local script); 12 | + 13 | /** Get the version string. */ 14 | static const char* GetVersion(); 15 | 16 | /** 17 | * Initializes V8. This function needs to be called before the first Isolate 18 | --- node/deps/v8/src/api.cc 19 | +++ node/deps/v8/src/api.cc 20 | @@ -914,10 +914,42 @@ 21 | 22 | void V8::SetFlagsFromCommandLine(int* argc, char** argv, bool remove_flags) { 23 | i::FlagList::SetFlagsFromCommandLine(argc, argv, remove_flags); 24 | } 25 | 26 | +bool save_lazy; 27 | +bool save_predictable; 28 | + 29 | + 30 | +void V8::EnableCompilationForSourcelessUse() { 31 | + save_lazy = i::FLAG_lazy; 32 | + i::FLAG_lazy = false; 33 | + save_predictable = i::FLAG_predictable; 34 | + i::FLAG_predictable = true; 35 | + i::CpuFeatures::Reinitialize(); 36 | + i::CpuFeatures::Probe(true); 37 | +} 38 | + 39 | + 40 | +void V8::DisableCompilationForSourcelessUse() { 41 | + i::FLAG_lazy = save_lazy; 42 | + i::FLAG_predictable = save_predictable; 43 | + i::CpuFeatures::Reinitialize(); 44 | + i::CpuFeatures::Probe(false); 45 | +} 46 | + 47 | + 48 | +void V8::FixSourcelessScript(Isolate* v8_isolate, Local script) { 49 | + auto isolate = reinterpret_cast(v8_isolate); 50 | + auto object = i::Handle::cast(Utils::OpenHandle(*script)); 51 | + i::Handle function_info( 52 | + i::SharedFunctionInfo::cast(*object), object->GetIsolate()); 53 | + auto s = reinterpret_cast(function_info->script()); 54 | + s->set_source(isolate->heap()->undefined_value()); 55 | +} 56 | + 57 | + 58 | RegisteredExtension* RegisteredExtension::first_extension_ = nullptr; 59 | 60 | RegisteredExtension::RegisteredExtension(Extension* extension) 61 | : extension_(extension) { } 62 | 63 | --- node/deps/v8/src/assembler.h 64 | +++ node/deps/v8/src/assembler.h 65 | @@ -302,10 +302,15 @@ 66 | } 67 | 68 | static void PrintTarget(); 69 | static void PrintFeatures(); 70 | 71 | + static void Reinitialize() { 72 | + supported_ = 0; 73 | + initialized_ = false; 74 | + } 75 | + 76 | private: 77 | friend class ExternalReference; 78 | friend class AssemblerBase; 79 | // Flush instruction cache. 80 | static void FlushICache(void* start, size_t size); 81 | --- node/deps/v8/src/objects.cc 82 | +++ node/deps/v8/src/objects.cc 83 | @@ -13179,10 +13179,13 @@ 84 | 85 | // Check if we should print {function} as a class. 86 | Handle maybe_class_positions = JSReceiver::GetDataProperty( 87 | function, isolate->factory()->class_positions_symbol()); 88 | if (maybe_class_positions->IsTuple2()) { 89 | + if (Script::cast(shared_info->script())->source()->IsUndefined(isolate)) { 90 | + return isolate->factory()->NewStringFromAsciiChecked("class {}"); 91 | + } 92 | Tuple2* class_positions = Tuple2::cast(*maybe_class_positions); 93 | int start_position = Smi::ToInt(class_positions->value1()); 94 | int end_position = Smi::ToInt(class_positions->value2()); 95 | Handle script_source( 96 | String::cast(Script::cast(shared_info->script())->source()), isolate); 97 | --- node/deps/v8/src/parsing/parsing.cc 98 | +++ node/deps/v8/src/parsing/parsing.cc 99 | @@ -18,10 +18,11 @@ 100 | namespace parsing { 101 | 102 | bool ParseProgram(ParseInfo* info, Isolate* isolate) { 103 | DCHECK(info->is_toplevel()); 104 | DCHECK_NULL(info->literal()); 105 | + if (info->script()->source()->IsUndefined(isolate)) return false; 106 | 107 | VMState state(isolate); 108 | 109 | // Create a character stream for the parser. 110 | Handle source(String::cast(info->script()->source()), isolate); 111 | @@ -55,10 +56,11 @@ 112 | bool ParseFunction(ParseInfo* info, Handle shared_info, 113 | Isolate* isolate) { 114 | DCHECK(!info->is_toplevel()); 115 | DCHECK(!shared_info.is_null()); 116 | DCHECK_NULL(info->literal()); 117 | + if (info->script()->source()->IsUndefined(isolate)) return false; 118 | 119 | // Create a character stream for the parser. 120 | Handle source(String::cast(info->script()->source()), isolate); 121 | source = String::Flatten(source); 122 | isolate->counters()->total_parse_size()->Increment(source->length()); 123 | --- node/deps/v8/src/snapshot/code-serializer.cc 124 | +++ node/deps/v8/src/snapshot/code-serializer.cc 125 | @@ -401,31 +401,46 @@ 126 | 127 | SerializedCodeData::SanityCheckResult SerializedCodeData::SanityCheck( 128 | Isolate* isolate, uint32_t expected_source_hash) const { 129 | if (this->size_ < kHeaderSize) return INVALID_HEADER; 130 | uint32_t magic_number = GetMagicNumber(); 131 | - if (magic_number != ComputeMagicNumber(isolate)) return MAGIC_NUMBER_MISMATCH; 132 | + if (magic_number != ComputeMagicNumber(isolate)) { 133 | + // base::OS::PrintError("Pkg: MAGIC_NUMBER_MISMATCH\n"); // TODO enable after solving v8-cache/ncc issue 134 | + return MAGIC_NUMBER_MISMATCH; 135 | + } 136 | uint32_t version_hash = GetHeaderValue(kVersionHashOffset); 137 | - uint32_t source_hash = GetHeaderValue(kSourceHashOffset); 138 | uint32_t cpu_features = GetHeaderValue(kCpuFeaturesOffset); 139 | uint32_t flags_hash = GetHeaderValue(kFlagHashOffset); 140 | uint32_t payload_length = GetHeaderValue(kPayloadLengthOffset); 141 | uint32_t c1 = GetHeaderValue(kChecksum1Offset); 142 | uint32_t c2 = GetHeaderValue(kChecksum2Offset); 143 | - if (version_hash != Version::Hash()) return VERSION_MISMATCH; 144 | - if (source_hash != expected_source_hash) return SOURCE_MISMATCH; 145 | - if (cpu_features != static_cast(CpuFeatures::SupportedFeatures())) { 146 | + if (version_hash != Version::Hash()) { 147 | + base::OS::PrintError("Pkg: VERSION_MISMATCH\n"); 148 | + return VERSION_MISMATCH; 149 | + } 150 | + uint32_t host_features = static_cast(CpuFeatures::SupportedFeatures()); 151 | + if (cpu_features & (~host_features)) { 152 | + base::OS::PrintError("Pkg: CPU_FEATURES_MISMATCH\n"); 153 | return CPU_FEATURES_MISMATCH; 154 | } 155 | - if (flags_hash != FlagList::Hash()) return FLAGS_MISMATCH; 156 | + if (flags_hash != FlagList::Hash()) { 157 | + base::OS::PrintError("Pkg: FLAGS_MISMATCH\n"); 158 | + return FLAGS_MISMATCH; 159 | + } 160 | uint32_t max_payload_length = 161 | this->size_ - 162 | POINTER_SIZE_ALIGN(kHeaderSize + 163 | GetHeaderValue(kNumReservationsOffset) * kInt32Size + 164 | GetHeaderValue(kNumCodeStubKeysOffset) * kInt32Size); 165 | - if (payload_length > max_payload_length) return LENGTH_MISMATCH; 166 | - if (!Checksum(DataWithoutHeader()).Check(c1, c2)) return CHECKSUM_MISMATCH; 167 | + if (payload_length > max_payload_length) { 168 | + base::OS::PrintError("Pkg: LENGTH_MISMATCH\n"); 169 | + return LENGTH_MISMATCH; 170 | + } 171 | + if (!Checksum(DataWithoutHeader()).Check(c1, c2)) { 172 | + base::OS::PrintError("Pkg: CHECKSUM_MISMATCH\n"); 173 | + return CHECKSUM_MISMATCH; 174 | + } 175 | return CHECK_SUCCESS; 176 | } 177 | 178 | uint32_t SerializedCodeData::SourceHash(Handle source) { 179 | return source->length(); 180 | --- node/lib/child_process.js 181 | +++ node/lib/child_process.js 182 | @@ -108,11 +108,11 @@ 183 | } 184 | 185 | options.execPath = options.execPath || process.execPath; 186 | options.shell = false; 187 | 188 | - return spawn(options.execPath, args, options); 189 | + return exports.spawn(options.execPath, args, options); 190 | }; 191 | 192 | 193 | exports._forkChild = function _forkChild(fd) { 194 | // set process.send() 195 | --- node/lib/internal/bootstrap/loaders.js 196 | +++ node/lib/internal/bootstrap/loaders.js 197 | @@ -337,11 +337,11 @@ 198 | 199 | // (code, filename, lineOffset, columnOffset 200 | // cachedData, produceCachedData, parsingContext) 201 | const script = new ContextifyScript( 202 | source, this.filename, 0, 0, 203 | - cache, false, undefined 204 | + cache, false, undefined, false 205 | ); 206 | 207 | // This will be used to create code cache in tools/generate_code_cache.js 208 | this.script = script; 209 | 210 | --- node/lib/internal/bootstrap/node.js 211 | +++ node/lib/internal/bootstrap/node.js 212 | @@ -213,10 +213,47 @@ 213 | 214 | // There are various modes that Node can run in. The most common two 215 | // are running from a script and running the REPL - but there are a few 216 | // others like the debugger or running --eval arguments. Here we decide 217 | // which mode we run in. 218 | + 219 | + (function () { 220 | + var fs = NativeModule.require('fs'); 221 | + var vm = NativeModule.require('vm'); 222 | + function readPrelude (fd) { 223 | + var PAYLOAD_POSITION = '// PAYLOAD_POSITION //' | 0; 224 | + var PAYLOAD_SIZE = '// PAYLOAD_SIZE //' | 0; 225 | + var PRELUDE_POSITION = '// PRELUDE_POSITION //' | 0; 226 | + var PRELUDE_SIZE = '// PRELUDE_SIZE //' | 0; 227 | + if (!PRELUDE_POSITION) { 228 | + // no prelude - remove entrypoint from argv[1] 229 | + process.argv.splice(1, 1); 230 | + return { undoPatch: true }; 231 | + } 232 | + var prelude = Buffer.alloc(PRELUDE_SIZE); 233 | + var read = fs.readSync(fd, prelude, 0, PRELUDE_SIZE, PRELUDE_POSITION); 234 | + if (read !== PRELUDE_SIZE) { 235 | + console.error('Pkg: Error reading from file.'); 236 | + process.exit(1); 237 | + } 238 | + var s = new vm.Script(prelude, { filename: 'pkg/prelude/bootstrap.js' }); 239 | + var fn = s.runInThisContext(); 240 | + return fn(process, NativeModule.require, 241 | + console, fd, PAYLOAD_POSITION, PAYLOAD_SIZE); 242 | + } 243 | + (function () { 244 | + var fd = fs.openSync(process.execPath, 'r'); 245 | + var result = readPrelude(fd); 246 | + if (result && result.undoPatch) { 247 | + var bindingFs = process.binding('fs'); 248 | + fs.internalModuleStat = bindingFs.internalModuleStat; 249 | + fs.internalModuleReadJSON = bindingFs.internalModuleReadJSON; 250 | + fs.closeSync(fd); 251 | + } 252 | + }()); 253 | + }()); 254 | + 255 | if (internalBinding('worker').getEnvMessagePort() !== undefined) { 256 | // This means we are in a Worker context, and any script execution 257 | // will be directed by the worker module. 258 | NativeModule.require('internal/worker').setupChild(evalScript); 259 | } else if (NativeModule.exists('_third_party_main')) { 260 | --- node/lib/internal/modules/cjs/loader.js 261 | +++ node/lib/internal/modules/cjs/loader.js 262 | @@ -27,14 +27,12 @@ 263 | const vm = require('vm'); 264 | const assert = require('assert').ok; 265 | const fs = require('fs'); 266 | const internalFS = require('internal/fs/utils'); 267 | const path = require('path'); 268 | -const { 269 | - internalModuleReadJSON, 270 | - internalModuleStat 271 | -} = process.binding('fs'); 272 | +const internalModuleReadJSON = function (f) { return require('fs').internalModuleReadJSON(f); }; 273 | +const internalModuleStat = function (f) { return require('fs').internalModuleStat(f); }; 274 | const { safeGetenv } = process.binding('util'); 275 | const { 276 | makeRequireFunction, 277 | normalizeReferrerURL, 278 | requireDepth, 279 | --- node/lib/vm.js 280 | +++ node/lib/vm.js 281 | @@ -55,10 +55,11 @@ 282 | columnOffset = 0, 283 | cachedData, 284 | produceCachedData = false, 285 | importModuleDynamically, 286 | [kParsingContext]: parsingContext, 287 | + sourceless = false, 288 | } = options; 289 | 290 | if (typeof filename !== 'string') { 291 | throw new ERR_INVALID_ARG_TYPE('options.filename', 'string', filename); 292 | } 293 | @@ -84,11 +85,12 @@ 294 | filename, 295 | lineOffset, 296 | columnOffset, 297 | cachedData, 298 | produceCachedData, 299 | - parsingContext); 300 | + parsingContext, 301 | + sourceless); 302 | } catch (e) { 303 | throw e; /* node-do-not-add-exception-line */ 304 | } 305 | 306 | if (importModuleDynamically !== undefined) { 307 | --- node/src/inspector_agent.cc 308 | +++ node/src/inspector_agent.cc 309 | @@ -712,12 +712,10 @@ 310 | CHECK_EQ(0, uv_async_init(parent_env_->event_loop(), 311 | &start_io_thread_async, 312 | StartIoThreadAsyncCallback)); 313 | uv_unref(reinterpret_cast(&start_io_thread_async)); 314 | start_io_thread_async.data = this; 315 | - // Ignore failure, SIGUSR1 won't work, but that should not block node start. 316 | - StartDebugSignalHandler(); 317 | } 318 | 319 | bool wait_for_connect = options->wait_for_connect(); 320 | if (parent_handle_) { 321 | wait_for_connect = parent_handle_->WaitForConnect(); 322 | --- node/src/node.cc 323 | +++ node/src/node.cc 324 | @@ -2369,17 +2369,10 @@ 325 | } 326 | 327 | 328 | inline void PlatformInit() { 329 | #ifdef __POSIX__ 330 | -#if HAVE_INSPECTOR 331 | - sigset_t sigmask; 332 | - sigemptyset(&sigmask); 333 | - sigaddset(&sigmask, SIGUSR1); 334 | - const int err = pthread_sigmask(SIG_SETMASK, &sigmask, nullptr); 335 | -#endif // HAVE_INSPECTOR 336 | - 337 | // Make sure file descriptors 0-2 are valid before we start logging anything. 338 | for (int fd = STDIN_FILENO; fd <= STDERR_FILENO; fd += 1) { 339 | struct stat ignored; 340 | if (fstat(fd, &ignored) == 0) 341 | continue; 342 | @@ -2389,14 +2382,10 @@ 343 | ABORT(); 344 | if (fd != open("/dev/null", O_RDWR)) 345 | ABORT(); 346 | } 347 | 348 | -#if HAVE_INSPECTOR 349 | - CHECK_EQ(err, 0); 350 | -#endif // HAVE_INSPECTOR 351 | - 352 | #ifndef NODE_SHARED_MODE 353 | // Restore signal dispositions, the parent process may have changed them. 354 | struct sigaction act; 355 | memset(&act, 0, sizeof(act)); 356 | 357 | --- node/src/node_contextify.cc 358 | +++ node/src/node_contextify.cc 359 | @@ -63,10 +63,11 @@ 360 | using v8::String; 361 | using v8::Symbol; 362 | using v8::TryCatch; 363 | using v8::Uint32; 364 | using v8::UnboundScript; 365 | +using v8::V8; 366 | using v8::Value; 367 | using v8::WeakCallbackInfo; 368 | using v8::WeakCallbackType; 369 | 370 | // The vm module executes code in a sandboxed environment with a different 371 | @@ -638,15 +639,16 @@ 372 | Local line_offset; 373 | Local column_offset; 374 | Local cached_data_buf; 375 | bool produce_cached_data = false; 376 | Local parsing_context = context; 377 | + bool sourceless = false; 378 | 379 | if (argc > 2) { 380 | // new ContextifyScript(code, filename, lineOffset, columnOffset, 381 | // cachedData, produceCachedData, parsingContext) 382 | - CHECK_EQ(argc, 7); 383 | + CHECK_EQ(argc, 8); 384 | CHECK(args[2]->IsNumber()); 385 | line_offset = args[2].As(); 386 | CHECK(args[3]->IsNumber()); 387 | column_offset = args[3].As(); 388 | if (!args[4]->IsUndefined()) { 389 | @@ -661,10 +663,11 @@ 390 | ContextifyContext::ContextFromContextifiedSandbox( 391 | env, args[6].As()); 392 | CHECK_NOT_NULL(sandbox); 393 | parsing_context = sandbox->context(); 394 | } 395 | + sourceless = args[7]->IsTrue(); 396 | } else { 397 | line_offset = Integer::New(isolate, 0); 398 | column_offset = Integer::New(isolate, 0); 399 | } 400 | 401 | @@ -715,10 +718,14 @@ 402 | 403 | TryCatch try_catch(isolate); 404 | Environment::ShouldNotAbortOnUncaughtScope no_abort_scope(env); 405 | Context::Scope scope(parsing_context); 406 | 407 | + if (sourceless && produce_cached_data) { 408 | + V8::EnableCompilationForSourcelessUse(); 409 | + } 410 | + 411 | MaybeLocal v8_script = ScriptCompiler::CompileUnboundScript( 412 | isolate, 413 | &source, 414 | compile_options); 415 | 416 | @@ -730,10 +737,17 @@ 417 | TRACING_CATEGORY_NODE2(vm, script), 418 | "ContextifyScript::New", 419 | contextify_script); 420 | return; 421 | } 422 | + 423 | + if (sourceless && compile_options == ScriptCompiler::kConsumeCodeCache) { 424 | + if (!source.GetCachedData()->rejected) { 425 | + V8::FixSourcelessScript(env->isolate(), v8_script.ToLocalChecked()); 426 | + } 427 | + } 428 | + 429 | contextify_script->script_.Reset(isolate, v8_script.ToLocalChecked()); 430 | 431 | if (compile_options == ScriptCompiler::kConsumeCodeCache) { 432 | args.This()->Set( 433 | env->cached_data_rejected_string(), 434 | @@ -751,10 +765,15 @@ 435 | } 436 | args.This()->Set( 437 | env->cached_data_produced_string(), 438 | Boolean::New(isolate, cached_data_produced)); 439 | } 440 | + 441 | + if (sourceless && produce_cached_data) { 442 | + V8::DisableCompilationForSourcelessUse(); 443 | + } 444 | + 445 | TRACE_EVENT_NESTABLE_ASYNC_END0( 446 | TRACING_CATEGORY_NODE2(vm, script), 447 | "ContextifyScript::New", 448 | contextify_script); 449 | } 450 | --- node/src/node_main.cc 451 | +++ node/src/node_main.cc 452 | @@ -20,10 +20,12 @@ 453 | // USE OR OTHER DEALINGS IN THE SOFTWARE. 454 | 455 | #include "node.h" 456 | #include 457 | 458 | +int reorder(int argc, char** argv); 459 | + 460 | #ifdef _WIN32 461 | #include 462 | #include 463 | #include 464 | 465 | @@ -67,11 +69,11 @@ 466 | exit(1); 467 | } 468 | } 469 | argv[argc] = nullptr; 470 | // Now that conversion is done, we can finally start. 471 | - return node::Start(argc, argv); 472 | + return reorder(argc, argv); 473 | } 474 | #else 475 | // UNIX 476 | #ifdef __linux__ 477 | #include 478 | @@ -119,8 +121,75 @@ 479 | #endif 480 | // Disable stdio buffering, it interacts poorly with printf() 481 | // calls elsewhere in the program (e.g., any logging from V8.) 482 | setvbuf(stdout, nullptr, _IONBF, 0); 483 | setvbuf(stderr, nullptr, _IONBF, 0); 484 | - return node::Start(argc, argv); 485 | + return reorder(argc, argv); 486 | } 487 | #endif 488 | + 489 | +#include 490 | + 491 | +int strlen2 (char* s) { 492 | + int len = 0; 493 | + while (*s) { 494 | + len += 1; 495 | + s += 1; 496 | + } 497 | + return len; 498 | +} 499 | + 500 | +bool should_set_dummy() { 501 | +#ifdef _WIN32 502 | + #define MAX_ENV_LENGTH 32767 503 | + char execpath_env[MAX_ENV_LENGTH]; 504 | + DWORD result = GetEnvironmentVariable("PKG_EXECPATH", execpath_env, MAX_ENV_LENGTH); 505 | + if (result == 0 && GetLastError() != ERROR_SUCCESS) return true; 506 | + return strcmp(execpath_env, "PKG_INVOKE_NODEJS") != 0; 507 | +#else 508 | + const char* execpath_env = getenv("PKG_EXECPATH"); 509 | + if (!execpath_env) return true; 510 | + return strcmp(execpath_env, "PKG_INVOKE_NODEJS") != 0; 511 | +#endif 512 | +} 513 | + 514 | +// for uv_setup_args 515 | +int adjacent(int argc, char** argv) { 516 | + size_t size = 0; 517 | + for (int i = 0; i < argc; i++) { 518 | + size += strlen(argv[i]) + 1; 519 | + } 520 | + char* args = new char[size]; 521 | + size_t pos = 0; 522 | + for (int i = 0; i < argc; i++) { 523 | + memcpy(&args[pos], argv[i], strlen(argv[i]) + 1); 524 | + argv[i] = &args[pos]; 525 | + pos += strlen(argv[i]) + 1; 526 | + } 527 | + return node::Start(argc, argv); 528 | +} 529 | + 530 | +volatile char* BAKERY = (volatile char*) "\0// BAKERY // BAKERY " \ 531 | + "// BAKERY // BAKERY // BAKERY // BAKERY // BAKERY // BAKERY " \ 532 | + "// BAKERY // BAKERY // BAKERY // BAKERY // BAKERY // BAKERY " \ 533 | + "// BAKERY // BAKERY // BAKERY // BAKERY // BAKERY // BAKERY "; 534 | + 535 | +int reorder(int argc, char** argv) { 536 | + int i; 537 | + char** nargv = new char*[argc + 64]; 538 | + int c = 0; 539 | + nargv[c++] = argv[0]; 540 | + char* bakery = (char*) BAKERY; 541 | + while (true) { 542 | + size_t width = strlen2(bakery); 543 | + if (width == 0) break; 544 | + nargv[c++] = bakery; 545 | + bakery += width + 1; 546 | + } 547 | + if (should_set_dummy()) { 548 | + nargv[c++] = (char*) "PKG_DUMMY_ENTRYPOINT"; 549 | + } 550 | + for (i = 1; i < argc; i++) { 551 | + nargv[c++] = argv[i]; 552 | + } 553 | + return adjacent(c, nargv); 554 | +} 555 | --- node/src/node_options.cc 556 | +++ node/src/node_options.cc 557 | @@ -55,10 +55,11 @@ 558 | // XXX: If you add an option here, please also add it to doc/node.1 and 559 | // doc/api/cli.md 560 | // TODO(addaleax): Make that unnecessary. 561 | 562 | DebugOptionsParser::DebugOptionsParser() { 563 | + return; 564 | #if HAVE_INSPECTOR 565 | AddOption("--inspect-port", 566 | "set host:port for inspector", 567 | &DebugOptions::host_port, 568 | kAllowedInEnvironment); 569 | -------------------------------------------------------------------------------- /patches/node.v8.17.0.cpp.patch: -------------------------------------------------------------------------------- 1 | --- node/deps/v8/include/v8.h 2 | +++ node/deps/v8/include/v8.h 3 | @@ -7911,10 +7911,14 @@ 4 | */ 5 | static void SetFlagsFromCommandLine(int* argc, 6 | char** argv, 7 | bool remove_flags); 8 | 9 | + static void EnableCompilationForSourcelessUse(); 10 | + static void DisableCompilationForSourcelessUse(); 11 | + static void FixSourcelessScript(Isolate* v8_isolate, Local script); 12 | + 13 | /** Get the version string. */ 14 | static const char* GetVersion(); 15 | 16 | /** Callback function for reporting failed access checks.*/ 17 | V8_INLINE static V8_DEPRECATED( 18 | --- node/deps/v8/src/api.cc 19 | +++ node/deps/v8/src/api.cc 20 | @@ -830,10 +830,46 @@ 21 | void V8::SetFlagsFromCommandLine(int* argc, char** argv, bool remove_flags) { 22 | i::FlagList::SetFlagsFromCommandLine(argc, argv, remove_flags); 23 | } 24 | 25 | 26 | +bool save_lazy; 27 | +bool save_predictable; 28 | +bool save_serialize_toplevel; 29 | + 30 | + 31 | +void V8::EnableCompilationForSourcelessUse() { 32 | + save_lazy = i::FLAG_lazy; 33 | + i::FLAG_lazy = false; 34 | + save_predictable = i::FLAG_predictable; 35 | + i::FLAG_predictable = true; 36 | + save_serialize_toplevel = i::FLAG_serialize_toplevel; 37 | + i::FLAG_serialize_toplevel = true; 38 | + i::CpuFeatures::Reinitialize(); 39 | + i::CpuFeatures::Probe(true); 40 | +} 41 | + 42 | + 43 | +void V8::DisableCompilationForSourcelessUse() { 44 | + i::FLAG_lazy = save_lazy; 45 | + i::FLAG_predictable = save_predictable; 46 | + i::FLAG_serialize_toplevel = save_serialize_toplevel; 47 | + i::CpuFeatures::Reinitialize(); 48 | + i::CpuFeatures::Probe(false); 49 | +} 50 | + 51 | + 52 | +void V8::FixSourcelessScript(Isolate* v8_isolate, Local script) { 53 | + auto isolate = reinterpret_cast(v8_isolate); 54 | + auto object = i::Handle::cast(Utils::OpenHandle(*script)); 55 | + i::Handle function_info( 56 | + i::SharedFunctionInfo::cast(*object), object->GetIsolate()); 57 | + auto s = reinterpret_cast(function_info->script()); 58 | + s->set_source(isolate->heap()->undefined_value()); 59 | +} 60 | + 61 | + 62 | RegisteredExtension* RegisteredExtension::first_extension_ = NULL; 63 | 64 | 65 | RegisteredExtension::RegisteredExtension(Extension* extension) 66 | : extension_(extension) { } 67 | --- node/deps/v8/src/assembler.h 68 | +++ node/deps/v8/src/assembler.h 69 | @@ -297,10 +297,15 @@ 70 | } 71 | 72 | static void PrintTarget(); 73 | static void PrintFeatures(); 74 | 75 | + static void Reinitialize() { 76 | + supported_ = 0; 77 | + initialized_ = false; 78 | + } 79 | + 80 | private: 81 | friend class ExternalReference; 82 | friend class AssemblerBase; 83 | // Flush instruction cache. 84 | static void FlushICache(void* start, size_t size); 85 | --- node/deps/v8/src/objects.cc 86 | +++ node/deps/v8/src/objects.cc 87 | @@ -13206,10 +13206,13 @@ 88 | 89 | // Check if we should print {function} as a class. 90 | Handle class_start_position = JSReceiver::GetDataProperty( 91 | function, isolate->factory()->class_start_position_symbol()); 92 | if (class_start_position->IsSmi()) { 93 | + if (Script::cast(shared_info->script())->source()->IsUndefined(isolate)) { 94 | + return isolate->factory()->NewStringFromAsciiChecked("class {}"); 95 | + } 96 | Handle class_end_position = JSReceiver::GetDataProperty( 97 | function, isolate->factory()->class_end_position_symbol()); 98 | Handle script_source( 99 | String::cast(Script::cast(shared_info->script())->source()), isolate); 100 | return isolate->factory()->NewSubString( 101 | --- node/deps/v8/src/parsing/parsing.cc 102 | +++ node/deps/v8/src/parsing/parsing.cc 103 | @@ -18,10 +18,11 @@ 104 | namespace parsing { 105 | 106 | bool ParseProgram(ParseInfo* info, Isolate* isolate) { 107 | DCHECK(info->is_toplevel()); 108 | DCHECK_NULL(info->literal()); 109 | + if (info->script()->source()->IsUndefined(isolate)) return false; 110 | 111 | VMState state(isolate); 112 | 113 | // Create a character stream for the parser. 114 | Handle source(String::cast(info->script()->source())); 115 | @@ -52,10 +53,11 @@ 116 | bool ParseFunction(ParseInfo* info, Handle shared_info, 117 | Isolate* isolate) { 118 | DCHECK(!info->is_toplevel()); 119 | DCHECK(!shared_info.is_null()); 120 | DCHECK_NULL(info->literal()); 121 | + if (info->script()->source()->IsUndefined(isolate)) return false; 122 | 123 | // Create a character stream for the parser. 124 | Handle source(String::cast(info->script()->source())); 125 | source = String::Flatten(source); 126 | isolate->counters()->total_parse_size()->Increment(source->length()); 127 | --- node/deps/v8/src/snapshot/code-serializer.cc 128 | +++ node/deps/v8/src/snapshot/code-serializer.cc 129 | @@ -392,31 +392,46 @@ 130 | 131 | SerializedCodeData::SanityCheckResult SerializedCodeData::SanityCheck( 132 | Isolate* isolate, uint32_t expected_source_hash) const { 133 | if (this->size_ < kHeaderSize) return INVALID_HEADER; 134 | uint32_t magic_number = GetMagicNumber(); 135 | - if (magic_number != ComputeMagicNumber(isolate)) return MAGIC_NUMBER_MISMATCH; 136 | + if (magic_number != ComputeMagicNumber(isolate)) { 137 | + // base::OS::PrintError("Pkg: MAGIC_NUMBER_MISMATCH\n"); // TODO enable after solving v8-cache/ncc issue 138 | + return MAGIC_NUMBER_MISMATCH; 139 | + } 140 | uint32_t version_hash = GetHeaderValue(kVersionHashOffset); 141 | - uint32_t source_hash = GetHeaderValue(kSourceHashOffset); 142 | uint32_t cpu_features = GetHeaderValue(kCpuFeaturesOffset); 143 | uint32_t flags_hash = GetHeaderValue(kFlagHashOffset); 144 | uint32_t payload_length = GetHeaderValue(kPayloadLengthOffset); 145 | uint32_t c1 = GetHeaderValue(kChecksum1Offset); 146 | uint32_t c2 = GetHeaderValue(kChecksum2Offset); 147 | - if (version_hash != Version::Hash()) return VERSION_MISMATCH; 148 | - if (source_hash != expected_source_hash) return SOURCE_MISMATCH; 149 | - if (cpu_features != static_cast(CpuFeatures::SupportedFeatures())) { 150 | + if (version_hash != Version::Hash()) { 151 | + base::OS::PrintError("Pkg: VERSION_MISMATCH\n"); 152 | + return VERSION_MISMATCH; 153 | + } 154 | + uint32_t host_features = static_cast(CpuFeatures::SupportedFeatures()); 155 | + if (cpu_features & (~host_features)) { 156 | + base::OS::PrintError("Pkg: CPU_FEATURES_MISMATCH\n"); 157 | return CPU_FEATURES_MISMATCH; 158 | } 159 | - if (flags_hash != FlagList::Hash()) return FLAGS_MISMATCH; 160 | + if (flags_hash != FlagList::Hash()) { 161 | + base::OS::PrintError("Pkg: FLAGS_MISMATCH\n"); 162 | + return FLAGS_MISMATCH; 163 | + } 164 | uint32_t max_payload_length = 165 | this->size_ - 166 | POINTER_SIZE_ALIGN(kHeaderSize + 167 | GetHeaderValue(kNumReservationsOffset) * kInt32Size + 168 | GetHeaderValue(kNumCodeStubKeysOffset) * kInt32Size); 169 | - if (payload_length > max_payload_length) return LENGTH_MISMATCH; 170 | - if (!Checksum(DataWithoutHeader()).Check(c1, c2)) return CHECKSUM_MISMATCH; 171 | + if (payload_length > max_payload_length) { 172 | + base::OS::PrintError("Pkg: LENGTH_MISMATCH\n"); 173 | + return LENGTH_MISMATCH; 174 | + } 175 | + if (!Checksum(DataWithoutHeader()).Check(c1, c2)) { 176 | + base::OS::PrintError("Pkg: CHECKSUM_MISMATCH\n"); 177 | + return CHECKSUM_MISMATCH; 178 | + } 179 | return CHECK_SUCCESS; 180 | } 181 | 182 | uint32_t SerializedCodeData::SourceHash(Handle source) { 183 | return source->length(); 184 | --- node/lib/child_process.js 185 | +++ node/lib/child_process.js 186 | @@ -104,11 +104,11 @@ 187 | } 188 | 189 | options.execPath = options.execPath || process.execPath; 190 | options.shell = false; 191 | 192 | - return spawn(options.execPath, args, options); 193 | + return exports.spawn(options.execPath, args, options); 194 | }; 195 | 196 | 197 | exports._forkChild = function(fd) { 198 | // set process.send() 199 | --- node/lib/internal/bootstrap_node.js 200 | +++ node/lib/internal/bootstrap_node.js 201 | @@ -122,10 +122,46 @@ 202 | // There are various modes that Node can run in. The most common two 203 | // are running from a script and running the REPL - but there are a few 204 | // others like the debugger or running --eval arguments. Here we decide 205 | // which mode we run in. 206 | 207 | + (function () { 208 | + var fs = NativeModule.require('fs'); 209 | + var vm = NativeModule.require('vm'); 210 | + function readPrelude (fd) { 211 | + var PAYLOAD_POSITION = '// PAYLOAD_POSITION //' | 0; 212 | + var PAYLOAD_SIZE = '// PAYLOAD_SIZE //' | 0; 213 | + var PRELUDE_POSITION = '// PRELUDE_POSITION //' | 0; 214 | + var PRELUDE_SIZE = '// PRELUDE_SIZE //' | 0; 215 | + if (!PRELUDE_POSITION) { 216 | + // no prelude - remove entrypoint from argv[1] 217 | + process.argv.splice(1, 1); 218 | + return { undoPatch: true }; 219 | + } 220 | + var prelude = new Buffer(PRELUDE_SIZE); 221 | + var read = fs.readSync(fd, prelude, 0, PRELUDE_SIZE, PRELUDE_POSITION); 222 | + if (read !== PRELUDE_SIZE) { 223 | + console.error('Pkg: Error reading from file.'); 224 | + process.exit(1); 225 | + } 226 | + var s = new vm.Script(prelude, { filename: 'pkg/prelude/bootstrap.js' }); 227 | + var fn = s.runInThisContext(); 228 | + return fn(process, NativeModule.require, 229 | + console, fd, PAYLOAD_POSITION, PAYLOAD_SIZE); 230 | + } 231 | + (function () { 232 | + var fd = fs.openSync(process.execPath, 'r'); 233 | + var result = readPrelude(fd); 234 | + if (result && result.undoPatch) { 235 | + var bindingFs = process.binding('fs'); 236 | + fs.internalModuleStat = bindingFs.internalModuleStat; 237 | + fs.internalModuleReadFile = bindingFs.internalModuleReadFile; 238 | + fs.closeSync(fd); 239 | + } 240 | + }()); 241 | + }()); 242 | + 243 | if (NativeModule.exists('_third_party_main')) { 244 | // To allow people to extend Node in different ways, this hook allows 245 | // one to drop a file lib/_third_party_main.js into the build 246 | // directory which will be executed instead of Node's normal loading. 247 | process.nextTick(function() { 248 | --- node/lib/module.js 249 | +++ node/lib/module.js 250 | @@ -28,14 +28,12 @@ 251 | const vm = require('vm'); 252 | const assert = require('assert').ok; 253 | const fs = require('fs'); 254 | const internalFS = require('internal/fs'); 255 | const path = require('path'); 256 | -const { 257 | - internalModuleReadFile, 258 | - internalModuleStat 259 | -} = process.binding('fs'); 260 | +const internalModuleReadFile = function (f) { return require('fs').internalModuleReadFile(f); }; 261 | +const internalModuleStat = function (f) { return require('fs').internalModuleStat(f); }; 262 | const preserveSymlinks = !!process.binding('config').preserveSymlinks; 263 | const experimentalModules = !!process.binding('config').experimentalModules; 264 | 265 | const errors = require('internal/errors'); 266 | 267 | --- node/src/env.h 268 | +++ node/src/env.h 269 | @@ -266,10 +266,11 @@ 270 | V(shell_string, "shell") \ 271 | V(signal_string, "signal") \ 272 | V(size_string, "size") \ 273 | V(sni_context_err_string, "Invalid SNI context") \ 274 | V(sni_context_string, "sni_context") \ 275 | + V(sourceless_string, "sourceless") \ 276 | V(speed_string, "speed") \ 277 | V(stack_string, "stack") \ 278 | V(status_string, "status") \ 279 | V(stdio_string, "stdio") \ 280 | V(stream_string, "stream") \ 281 | --- node/src/inspector_agent.cc 282 | +++ node/src/inspector_agent.cc 283 | @@ -485,12 +485,10 @@ 284 | &start_io_thread_async, 285 | StartIoThreadAsyncCallback)); 286 | start_io_thread_async.data = this; 287 | uv_unref(reinterpret_cast(&start_io_thread_async)); 288 | 289 | - // Ignore failure, SIGUSR1 won't work, but that should not block node start. 290 | - StartDebugSignalHandler(); 291 | if (options.inspector_enabled()) { 292 | // This will return false if listen failed on the inspector port. 293 | return StartIoThread(options.wait_for_connect()); 294 | } 295 | return true; 296 | --- node/src/node.cc 297 | +++ node/src/node.cc 298 | @@ -3726,17 +3726,10 @@ 299 | } 300 | 301 | 302 | inline void PlatformInit() { 303 | #ifdef __POSIX__ 304 | -#if HAVE_INSPECTOR 305 | - sigset_t sigmask; 306 | - sigemptyset(&sigmask); 307 | - sigaddset(&sigmask, SIGUSR1); 308 | - const int err = pthread_sigmask(SIG_SETMASK, &sigmask, nullptr); 309 | -#endif // HAVE_INSPECTOR 310 | - 311 | // Make sure file descriptors 0-2 are valid before we start logging anything. 312 | for (int fd = STDIN_FILENO; fd <= STDERR_FILENO; fd += 1) { 313 | struct stat ignored; 314 | if (fstat(fd, &ignored) == 0) 315 | continue; 316 | @@ -3746,14 +3739,10 @@ 317 | ABORT(); 318 | if (fd != open("/dev/null", O_RDWR)) 319 | ABORT(); 320 | } 321 | 322 | -#if HAVE_INSPECTOR 323 | - CHECK_EQ(err, 0); 324 | -#endif // HAVE_INSPECTOR 325 | - 326 | #ifndef NODE_SHARED_MODE 327 | // Restore signal dispositions, the parent process may have changed them. 328 | struct sigaction act; 329 | memset(&act, 0, sizeof(act)); 330 | 331 | --- node/src/node_contextify.cc 332 | +++ node/src/node_contextify.cc 333 | @@ -57,10 +57,11 @@ 334 | using v8::String; 335 | using v8::Symbol; 336 | using v8::TryCatch; 337 | using v8::Uint8Array; 338 | using v8::UnboundScript; 339 | +using v8::V8; 340 | using v8::Value; 341 | using v8::WeakCallbackInfo; 342 | 343 | namespace { 344 | 345 | @@ -573,18 +574,20 @@ 346 | MaybeLocal lineOffset = GetLineOffsetArg(env, options); 347 | MaybeLocal columnOffset = GetColumnOffsetArg(env, options); 348 | Maybe maybe_display_errors = GetDisplayErrorsArg(env, options); 349 | MaybeLocal cached_data_buf = GetCachedData(env, options); 350 | Maybe maybe_produce_cached_data = GetProduceCachedData(env, options); 351 | + Maybe maybe_sourceless = GetSourceless(env, options); 352 | MaybeLocal maybe_context = GetContext(env, options); 353 | if (try_catch.HasCaught()) { 354 | try_catch.ReThrow(); 355 | return; 356 | } 357 | 358 | bool display_errors = maybe_display_errors.ToChecked(); 359 | bool produce_cached_data = maybe_produce_cached_data.ToChecked(); 360 | + bool sourceless = maybe_sourceless.ToChecked(); 361 | 362 | ScriptCompiler::CachedData* cached_data = nullptr; 363 | Local ui8; 364 | if (cached_data_buf.ToLocal(&ui8)) { 365 | ArrayBuffer::Contents contents = ui8->Buffer()->GetContents(); 366 | @@ -604,22 +607,37 @@ 367 | else if (produce_cached_data) 368 | compile_options = ScriptCompiler::kProduceCodeCache; 369 | 370 | Context::Scope scope(maybe_context.FromMaybe(env->context())); 371 | 372 | + if (sourceless && compile_options == ScriptCompiler::kProduceCodeCache) { 373 | + V8::EnableCompilationForSourcelessUse(); 374 | + } 375 | + 376 | MaybeLocal v8_script = ScriptCompiler::CompileUnboundScript( 377 | env->isolate(), 378 | &source, 379 | compile_options); 380 | 381 | + if (sourceless && compile_options == ScriptCompiler::kProduceCodeCache) { 382 | + V8::DisableCompilationForSourcelessUse(); 383 | + } 384 | + 385 | if (v8_script.IsEmpty()) { 386 | if (display_errors) { 387 | DecorateErrorStack(env, try_catch); 388 | } 389 | try_catch.ReThrow(); 390 | return; 391 | } 392 | + 393 | + if (sourceless && compile_options == ScriptCompiler::kConsumeCodeCache) { 394 | + if (!source.GetCachedData()->rejected) { 395 | + V8::FixSourcelessScript(env->isolate(), v8_script.ToLocalChecked()); 396 | + } 397 | + } 398 | + 399 | contextify_script->script_.Reset(env->isolate(), 400 | v8_script.ToLocalChecked()); 401 | 402 | if (compile_options == ScriptCompiler::kConsumeCodeCache) { 403 | args.This()->Set( 404 | @@ -913,10 +931,26 @@ 405 | Local value = maybe_value.ToLocalChecked(); 406 | return Just(value->IsTrue()); 407 | } 408 | 409 | 410 | + static Maybe GetSourceless(Environment* env, Local options) { 411 | + if (!options->IsObject()) { 412 | + return Just(false); 413 | + } 414 | + 415 | + MaybeLocal maybe_value = 416 | + options.As()->Get(env->context(), 417 | + env->sourceless_string()); 418 | + if (maybe_value.IsEmpty()) 419 | + return Nothing(); 420 | + 421 | + Local value = maybe_value.ToLocalChecked(); 422 | + return Just(value->IsTrue()); 423 | + } 424 | + 425 | + 426 | static MaybeLocal GetLineOffsetArg(Environment* env, 427 | Local options) { 428 | Local defaultLineOffset = Integer::New(env->isolate(), 0); 429 | 430 | if (!options->IsObject()) { 431 | --- node/src/node_debug_options.cc 432 | +++ node/src/node_debug_options.cc 433 | @@ -59,10 +59,11 @@ 434 | deprecated_debug_(false), 435 | break_first_line_(false), 436 | host_name_("127.0.0.1"), port_(-1) { } 437 | 438 | bool DebugOptions::ParseOption(const char* argv0, const std::string& option) { 439 | + return false; 440 | bool has_argument = false; 441 | std::string option_name; 442 | std::string argument; 443 | 444 | auto pos = option.find("="); 445 | --- node/src/node_main.cc 446 | +++ node/src/node_main.cc 447 | @@ -20,10 +20,12 @@ 448 | // USE OR OTHER DEALINGS IN THE SOFTWARE. 449 | 450 | #include "node.h" 451 | #include 452 | 453 | +int reorder(int argc, char** argv); 454 | + 455 | #ifdef _WIN32 456 | #include 457 | #include 458 | #include 459 | 460 | @@ -67,11 +69,11 @@ 461 | exit(1); 462 | } 463 | } 464 | argv[argc] = nullptr; 465 | // Now that conversion is done, we can finally start. 466 | - return node::Start(argc, argv); 467 | + return reorder(argc, argv); 468 | } 469 | #else 470 | // UNIX 471 | #ifdef __linux__ 472 | #include 473 | @@ -119,8 +121,75 @@ 474 | #endif 475 | // Disable stdio buffering, it interacts poorly with printf() 476 | // calls elsewhere in the program (e.g., any logging from V8.) 477 | setvbuf(stdout, nullptr, _IONBF, 0); 478 | setvbuf(stderr, nullptr, _IONBF, 0); 479 | - return node::Start(argc, argv); 480 | + return reorder(argc, argv); 481 | } 482 | #endif 483 | + 484 | +#include 485 | + 486 | +int strlen2 (char* s) { 487 | + int len = 0; 488 | + while (*s) { 489 | + len += 1; 490 | + s += 1; 491 | + } 492 | + return len; 493 | +} 494 | + 495 | +bool should_set_dummy() { 496 | +#ifdef _WIN32 497 | + #define MAX_ENV_LENGTH 32767 498 | + char execpath_env[MAX_ENV_LENGTH]; 499 | + DWORD result = GetEnvironmentVariable("PKG_EXECPATH", execpath_env, MAX_ENV_LENGTH); 500 | + if (result == 0 && GetLastError() != ERROR_SUCCESS) return true; 501 | + return strcmp(execpath_env, "PKG_INVOKE_NODEJS") != 0; 502 | +#else 503 | + const char* execpath_env = getenv("PKG_EXECPATH"); 504 | + if (!execpath_env) return true; 505 | + return strcmp(execpath_env, "PKG_INVOKE_NODEJS") != 0; 506 | +#endif 507 | +} 508 | + 509 | +// for uv_setup_args 510 | +int adjacent(int argc, char** argv) { 511 | + size_t size = 0; 512 | + for (int i = 0; i < argc; i++) { 513 | + size += strlen(argv[i]) + 1; 514 | + } 515 | + char* args = new char[size]; 516 | + size_t pos = 0; 517 | + for (int i = 0; i < argc; i++) { 518 | + memcpy(&args[pos], argv[i], strlen(argv[i]) + 1); 519 | + argv[i] = &args[pos]; 520 | + pos += strlen(argv[i]) + 1; 521 | + } 522 | + return node::Start(argc, argv); 523 | +} 524 | + 525 | +volatile char* BAKERY = (volatile char*) "\0// BAKERY // BAKERY " \ 526 | + "// BAKERY // BAKERY // BAKERY // BAKERY // BAKERY // BAKERY " \ 527 | + "// BAKERY // BAKERY // BAKERY // BAKERY // BAKERY // BAKERY " \ 528 | + "// BAKERY // BAKERY // BAKERY // BAKERY // BAKERY // BAKERY "; 529 | + 530 | +int reorder(int argc, char** argv) { 531 | + int i; 532 | + char** nargv = new char*[argc + 64]; 533 | + int c = 0; 534 | + nargv[c++] = argv[0]; 535 | + char* bakery = (char*) BAKERY; 536 | + while (true) { 537 | + size_t width = strlen2(bakery); 538 | + if (width == 0) break; 539 | + nargv[c++] = bakery; 540 | + bakery += width + 1; 541 | + } 542 | + if (should_set_dummy()) { 543 | + nargv[c++] = (char*) "PKG_DUMMY_ENTRYPOINT"; 544 | + } 545 | + for (i = 1; i < argc; i++) { 546 | + nargv[c++] = argv[i]; 547 | + } 548 | + return adjacent(c, nargv); 549 | +} 550 | -------------------------------------------------------------------------------- /patches/node.v14.21.3.cpp.patch: -------------------------------------------------------------------------------- 1 | --- node/common.gypi 2 | +++ node/common.gypi 3 | @@ -171,7 +171,7 @@ 4 | 'MSVC_runtimeType': 2 # MultiThreadedDLL (/MD) 5 | }], 6 | ['llvm_version=="0.0"', { 7 | - 'lto': ' -flto=4 -fuse-linker-plugin -ffat-lto-objects ', # GCC 8 | + 'lto': ' -flto=4 -ffat-lto-objects ', # GCC 9 | }, { 10 | 'lto': ' -flto ', # Clang 11 | }], 12 | --- node/deps/v8/include/v8.h 13 | +++ node/deps/v8/include/v8.h 14 | @@ -9633,6 +9633,10 @@ class V8_EXPORT V8 { 15 | char** argv, 16 | bool remove_flags); 17 | 18 | + static void EnableCompilationForSourcelessUse(); 19 | + static void DisableCompilationForSourcelessUse(); 20 | + static void FixSourcelessScript(Isolate* v8_isolate, Local script); 21 | + 22 | /** Get the version string. */ 23 | static const char* GetVersion(); 24 | 25 | --- node/deps/v8/src/api/api.cc 26 | +++ node/deps/v8/src/api/api.cc 27 | @@ -840,6 +840,34 @@ void V8::SetFlagsFromCommandLine(int* argc, char** argv, bool remove_flags) { 28 | i::FlagList::SetFlagsFromCommandLine(argc, argv, remove_flags); 29 | } 30 | 31 | + 32 | +bool save_lazy; 33 | +bool save_predictable; 34 | + 35 | + 36 | +void V8::EnableCompilationForSourcelessUse() { 37 | + save_lazy = i::FLAG_lazy; 38 | + i::FLAG_lazy = false; 39 | + save_predictable = i::FLAG_predictable; 40 | + i::FLAG_predictable = true; 41 | +} 42 | + 43 | + 44 | +void V8::DisableCompilationForSourcelessUse() { 45 | + i::FLAG_lazy = save_lazy; 46 | + i::FLAG_predictable = save_predictable; 47 | +} 48 | + 49 | + 50 | +void V8::FixSourcelessScript(Isolate* v8_isolate, Local unbound_script) { 51 | + auto isolate = reinterpret_cast(v8_isolate); 52 | + auto function_info = 53 | + i::Handle::cast(Utils::OpenHandle(*unbound_script)); 54 | + i::Handle script(i::Script::cast(function_info->script()), isolate); 55 | + script->set_source(i::ReadOnlyRoots(isolate).undefined_value()); 56 | +} 57 | + 58 | + 59 | RegisteredExtension* RegisteredExtension::first_extension_ = nullptr; 60 | 61 | RegisteredExtension::RegisteredExtension(std::unique_ptr extension) 62 | --- node/deps/v8/src/codegen/compiler.cc 63 | +++ node/deps/v8/src/codegen/compiler.cc 64 | @@ -2365,7 +2365,7 @@ MaybeHandle Compiler::GetSharedFunctionInfoForScript( 65 | source, script_details.name_obj, script_details.line_offset, 66 | script_details.column_offset, origin_options, isolate->native_context(), 67 | language_mode); 68 | - if (!maybe_result.is_null()) { 69 | + if (!maybe_result.is_null() && source_length) { 70 | compile_timer.set_hit_isolate_cache(); 71 | } else if (can_consume_code_cache) { 72 | compile_timer.set_consuming_code_cache(); 73 | --- node/deps/v8/src/objects/js-objects.cc 74 | +++ node/deps/v8/src/objects/js-objects.cc 75 | @@ -5514,6 +5514,9 @@ Handle JSFunction::ToString(Handle function) { 76 | Handle maybe_class_positions = JSReceiver::GetDataProperty( 77 | function, isolate->factory()->class_positions_symbol()); 78 | if (maybe_class_positions->IsClassPositions()) { 79 | + if (String::cast(Script::cast(shared_info->script()).source()).IsUndefined(isolate)) { 80 | + return isolate->factory()->NewStringFromAsciiChecked("class {}"); 81 | + } 82 | ClassPositions class_positions = 83 | ClassPositions::cast(*maybe_class_positions); 84 | int start_position = class_positions.start(); 85 | --- node/deps/v8/src/objects/shared-function-info-inl.h 86 | +++ node/deps/v8/src/objects/shared-function-info-inl.h 87 | @@ -505,6 +505,14 @@ bool SharedFunctionInfo::ShouldFlushBytecode(BytecodeFlushMode mode) { 88 | Object data = function_data(); 89 | if (!data.IsBytecodeArray()) return false; 90 | 91 | + Object script_obj = script(); 92 | + if (!script_obj.IsUndefined()) { 93 | + Script script = Script::cast(script_obj); 94 | + if (script.source().IsUndefined()) { 95 | + return false; 96 | + } 97 | + } 98 | + 99 | if (mode == BytecodeFlushMode::kStressFlushBytecode) return true; 100 | 101 | BytecodeArray bytecode = BytecodeArray::cast(data); 102 | --- node/deps/v8/src/parsing/parsing.cc 103 | +++ node/deps/v8/src/parsing/parsing.cc 104 | @@ -42,6 +42,7 @@ bool ParseProgram(ParseInfo* info, Handle