├── .dockerignore ├── .gitignore ├── src ├── api │ ├── index.ts │ ├── bundler.test.ts │ └── bundler.ts ├── common │ ├── error.ts │ ├── buffer.ts │ ├── footer.ts │ └── filesystem.ts └── patches │ ├── thirdPartyMain.ts │ ├── nbin.ts │ └── fs.ts ├── .gitmodules ├── scripts ├── webpack_build.sh ├── test.sh ├── patch_apply.sh ├── patch_gen.sh ├── vars.sh ├── mac_build.sh ├── ci.sh ├── node_build.sh └── docker_build.sh ├── tsconfig.json ├── example ├── src │ └── hello.js └── build.ts ├── .npmignore ├── aarch64-alpine.dockerfile ├── armhf-alpine.dockerfile ├── alpine.dockerfile ├── aarch64.dockerfile ├── armhf.dockerfile ├── centos.dockerfile ├── LICENSE ├── package.json ├── .travis.yml ├── webpack.config.js ├── README.md ├── typings └── nbin.d.ts └── node.patch /.dockerignore: -------------------------------------------------------------------------------- 1 | * -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | yarn-error.log 2 | node_modules 3 | out 4 | build 5 | .vscode 6 | *.tgz -------------------------------------------------------------------------------- /src/api/index.ts: -------------------------------------------------------------------------------- 1 | import { Binary } from "./bundler"; 2 | 3 | export { Binary }; 4 | 5 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "lib/node"] 2 | path = lib/node 3 | url = https://github.com/nodejs/node 4 | -------------------------------------------------------------------------------- /scripts/webpack_build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cd "$(dirname "$0")" 4 | cd .. 5 | npx webpack --config webpack.config.js $* -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es6", 4 | "module": "commonjs", 5 | "outDir": "out" 6 | } 7 | } -------------------------------------------------------------------------------- /scripts/test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cd "$(dirname "$0")" 4 | cd .. 5 | rm lib/node/test/fixtures/packages/unparseable/package.json 6 | npm run test 7 | -------------------------------------------------------------------------------- /scripts/patch_apply.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cd "$(dirname "$0")" 4 | cd ../lib/node 5 | git apply --unidiff-zero --ignore-space-change ../../node.patch 6 | -------------------------------------------------------------------------------- /example/src/hello.js: -------------------------------------------------------------------------------- 1 | console.log("Hey there!"); 2 | 3 | console.log("here is a banana"); 4 | 5 | console.log("frogger"); 6 | 7 | console.log("built with nbin", process.versions.nbin); 8 | 9 | -------------------------------------------------------------------------------- /scripts/patch_gen.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cd "$(dirname "$0")" 4 | cd ../lib/node 5 | git diff -w -- lib/internal/modules/cjs/loader.js lib/internal/bootstrap/node.js src/node.cc --unified=0 > ../../node.patch 6 | -------------------------------------------------------------------------------- /src/common/error.ts: -------------------------------------------------------------------------------- 1 | export const createNotFound = (): NodeJS.ErrnoException => { 2 | const e = new Error("File not found"); 3 | Object.defineProperty(e, "code", { value: "ENOENT" }); 4 | return e; 5 | }; 6 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | .gitignore 2 | .travis.yml 3 | .vscode 4 | lib 5 | yarn-error.log 6 | node_modules 7 | scripts 8 | src 9 | node.patch 10 | tsconfig.json 11 | webpack.config.js 12 | yarn.lock 13 | example 14 | build 15 | *.tgz -------------------------------------------------------------------------------- /scripts/vars.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | NODE_VERSION=10.15.1 4 | PACKAGE_VERSION=$(cat ../package.json \ 5 | | grep version \ 6 | | head -1 \ 7 | | awk -F: '{ print $2 }' \ 8 | | sed 's/[",]//g' \ 9 | | tr -d '[[:space:]]') -------------------------------------------------------------------------------- /aarch64-alpine.dockerfile: -------------------------------------------------------------------------------- 1 | FROM balenalib/aarch64-alpine-node:10.15-edge-build 2 | 3 | RUN ["cross-build-start"] 4 | 5 | RUN apk add --no-cache --no-progress bash gcc g++ ccache git make python linux-headers 6 | 7 | RUN ["cross-build-end"] 8 | -------------------------------------------------------------------------------- /armhf-alpine.dockerfile: -------------------------------------------------------------------------------- 1 | FROM balenalib/armv7hf-alpine-node:10.15-edge-build 2 | 3 | RUN ["cross-build-start"] 4 | 5 | RUN apk add --no-cache --no-progress bash gcc g++ ccache git make python linux-headers 6 | 7 | RUN ["cross-build-end"] 8 | -------------------------------------------------------------------------------- /alpine.dockerfile: -------------------------------------------------------------------------------- 1 | # Very basic image for building nbin target for alpine 2 | # Eventually should be automated within the CI 3 | FROM node:10.15.1-alpine 4 | 5 | RUN apk add --no-cache --no-progress bash gcc g++ ccache git make python linux-headers 6 | -------------------------------------------------------------------------------- /aarch64.dockerfile: -------------------------------------------------------------------------------- 1 | FROM balenalib/aarch64-debian-node:10.15-jessie-build 2 | 3 | RUN ["cross-build-start"] 4 | 5 | RUN apt-get update && apt-get -y install build-essential linux-headers-3.16.0-6-all-arm64 linux-headers-3.16.0-6-common gcc g++ ccache git make 6 | 7 | RUN ["cross-build-end"] 8 | -------------------------------------------------------------------------------- /armhf.dockerfile: -------------------------------------------------------------------------------- 1 | FROM balenalib/armv7hf-debian-node:10.15-jessie-build 2 | 3 | RUN ["cross-build-start"] 4 | 5 | RUN apt-get update && apt-get -y install build-essential linux-headers-3.16.0-9-all-armhf linux-headers-3.16.0-9-common gcc g++ ccache git make 6 | 7 | RUN ["cross-build-end"] 8 | -------------------------------------------------------------------------------- /scripts/mac_build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cd "$(dirname "$0")" 4 | source ./vars.sh 5 | cd ../lib/node 6 | 7 | export CCACHE_DIR="/ccache" 8 | ./build.sh 9 | 10 | ARCH=$(uname -m) 11 | BINARY_NAME="node-${NODE_VERSION}-darwin-x64" 12 | 13 | cp ./out/Release/node ../../build/$PACKAGE_VERSION/$BINARY_NAME 14 | -------------------------------------------------------------------------------- /scripts/ci.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euxo pipefail 4 | 5 | cd "$(dirname "$0")" 6 | source ./vars.sh 7 | 8 | ./patch_apply.sh 9 | ./webpack_build.sh 10 | 11 | cp ./node_build.sh ../lib/node/build.sh 12 | mkdir -p ../build/$PACKAGE_VERSION 13 | rm ../lib/node/test/fixtures/packages/unparseable/package.json 14 | 15 | if [[ "$OSTYPE" == "darwin"* ]]; then 16 | ./mac_build.sh 17 | else 18 | ./docker_build.sh 19 | fi 20 | -------------------------------------------------------------------------------- /example/build.ts: -------------------------------------------------------------------------------- 1 | import { mkdirpSync, writeFileSync } from "fs-extra"; 2 | import * as path from "path"; 3 | import { Binary } from "../out/api"; 4 | 5 | const dirName = path.resolve(__dirname); 6 | const bin = new Binary({ 7 | nodePath: path.join(dirName, "..", "lib", "node", "out", "Release", "node"), 8 | mainFile: path.join(dirName, "src", "hello.js"), 9 | }); 10 | bin.writeFiles(path.join(dirName, "src/**")); 11 | bin.build().then((buffer) => { 12 | const outDir = path.join(dirName, "out"); 13 | mkdirpSync(outDir); 14 | writeFileSync(path.join(outDir, "example"), buffer, { 15 | mode: "755", 16 | }); 17 | }); -------------------------------------------------------------------------------- /src/patches/thirdPartyMain.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Application entrypoint. 3 | * 4 | * Patched to execute then continue normal node instantiation. 5 | * 6 | * We can set `process.argv` here to easily passthrough. 7 | */ 8 | import * as nbin from "nbin"; 9 | 10 | /** 11 | * Specify the version of nbin this binary was built with. This is 12 | * automatically replaced by webpack. 13 | */ 14 | process.versions.nbin = ''; 15 | 16 | /** 17 | * If bypassing nbin don't touch a thing. 18 | */ 19 | if (!process.env.NBIN_BYPASS) { 20 | if (!process.send) { 21 | process.argv.splice(1, 0, nbin.mainFile); 22 | } 23 | } 24 | 25 | -------------------------------------------------------------------------------- /scripts/node_build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cd "$(dirname "$0")" 4 | set -euxo pipefail 5 | 6 | export CC="ccache gcc" 7 | export CXX="ccache g++" 8 | if [[ "$OSTYPE" == "darwin"* ]]; then 9 | export CCACHE_DIR="~/.ccache" 10 | export cores="2" 11 | else 12 | export CCACHE_DIR="/ccache" 13 | export cores=$(cat /proc/cpuinfo | awk '/^processor/{print $3}' | wc -l) 14 | fi 15 | ./configure --link-module './nbin.js' --link-module './lib/_third_party_main.js' --dest-cpu=x64 --openssl-no-asm --openssl-use-def-ca-store 16 | make -j$cores & 17 | pid=$! 18 | ( 19 | while true; do 20 | echo Compiling... 21 | sleep 60 22 | done 23 | ) & 24 | subshell=$! 25 | wait "$pid" 26 | kill -9 $subshell 27 | -------------------------------------------------------------------------------- /centos.dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:7 2 | 3 | RUN yum install -y centos-release-scl 4 | RUN yum-config-manager --enable rhel-server-rhscl-7-rpms 5 | RUN yum install -y devtoolset-6 6 | RUN yum install -y gcc-c++ 7 | RUN rpm -Uvh http://dl.fedoraproject.org/pub/epel/7/x86_64/Packages/c/ccache-3.3.4-1.el7.x86_64.rpm 8 | 9 | RUN mkdir /root/node 10 | RUN cd /root/node && curl https://nodejs.org/dist/v10.15.3/node-v10.15.3-linux-x64.tar.xz | tar xJ --strip-components=1 -- 11 | RUN ln -s /root/node/bin/node /usr/bin/node 12 | ENV PATH "$PATH:/root/node/bin" 13 | RUN npm install -g yarn 14 | 15 | # VOLUME ~/.ccache /ccache 16 | # RUN source /opt/rh/devtoolset-6/enable && scripts/node_build.sh 17 | # RUN npm rebuild 18 | # RUN npm 19 | -------------------------------------------------------------------------------- /src/common/buffer.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Write a string to a buffer. 3 | */ 4 | export const writeString = (buffer: Buffer, value: string, offset: number = 0): number => { 5 | const len = Buffer.byteLength(value); 6 | buffer.writeUInt16BE(len, offset); 7 | offset += 2; 8 | offset += buffer.write(value, offset, "utf8"); 9 | return offset; 10 | }; 11 | 12 | /** 13 | * Read a string from a buffer. 14 | */ 15 | export const readString = (buffer: Buffer, offset: number): { 16 | readonly value: string; 17 | readonly offset: number; 18 | } => { 19 | const len = buffer.readUInt16BE(offset); 20 | offset += 2; 21 | const value = buffer.slice(offset, offset + len).toString("utf8"); 22 | offset += len; 23 | return { 24 | offset, 25 | value, 26 | }; 27 | }; 28 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License 2 | 3 | Copyright (c) 2019 Coder Technologies Inc. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so, 10 | subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /src/common/footer.ts: -------------------------------------------------------------------------------- 1 | import * as fs from "fs"; 2 | 3 | export interface Footer { 4 | readonly headerLength: number; 5 | readonly headerOffset: number; 6 | readonly contentLength: number; 7 | readonly contentOffset: number; 8 | } 9 | 10 | /** 11 | * Reads a footer from a file descriptor with the size of the file 12 | */ 13 | export const readFooter = (fd: number, size: number): Footer => { 14 | const buffer = Buffer.allocUnsafe(16); 15 | fs.readSync(fd, buffer, 0, 16, size - 16); 16 | const headerLength = buffer.readUInt32BE(0); 17 | const headerOffset = buffer.readUInt32BE(4); 18 | const contentLength = buffer.readUInt32BE(8); 19 | const contentOffset = buffer.readUInt32BE(12); 20 | 21 | return { 22 | headerLength, 23 | headerOffset, 24 | contentLength, 25 | contentOffset, 26 | }; 27 | }; 28 | 29 | /** 30 | * Creates a footer defining byte lengths and offsets 31 | */ 32 | export const createFooter = (headerLength: number, headerOffset: number, contentLength: number, contentOffset: number): Buffer => { 33 | const buffer = Buffer.allocUnsafe(16); 34 | buffer.writeUInt32BE(headerLength, 0); 35 | buffer.writeUInt32BE(headerOffset, 4); 36 | buffer.writeUInt32BE(contentLength, 8); 37 | buffer.writeUInt32BE(contentOffset, 12); 38 | return buffer; 39 | }; -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@coder/nbin", 3 | "version": "1.2.2", 4 | "description": "Fast and robust node.js binary compiler.", 5 | "main": "out/api.js", 6 | "scripts": { 7 | "prepare": "./scripts/webpack_build.sh", 8 | "test": "node node_modules/.bin/jest" 9 | }, 10 | "typings": "typings/nbin.d.ts", 11 | "author": "Coder", 12 | "license": "MIT", 13 | "devDependencies": { 14 | "@types/fs-extra": "^5.0.5", 15 | "@types/glob": "^7.1.1", 16 | "@types/jest": "^24.0.11", 17 | "@types/node": "^11.11.4", 18 | "@types/node-fetch": "^2.1.7", 19 | "@types/webpack": "^4.4.26", 20 | "happypack": "^5.0.1", 21 | "jest": "^24.5.0", 22 | "node-loader": "^0.6.0", 23 | "node-pty": "^0.8.1", 24 | "string-replace-loader": "^2.1.1", 25 | "ts-jest": "^24.0.0", 26 | "ts-loader": "^5.3.3", 27 | "typescript": "^3.3.4000", 28 | "webpack": "^4.29.6", 29 | "webpack-cli": "^3.3.3", 30 | "webpack-dev-server": "^3.2.1" 31 | }, 32 | "jest": { 33 | "moduleFileExtensions": [ 34 | "ts", 35 | "tsx", 36 | "js", 37 | "json" 38 | ], 39 | "transform": { 40 | "^.+\\.tsx?$": "ts-jest" 41 | }, 42 | "testPathIgnorePatterns": [ 43 | "/node_modules/", 44 | "/node/test" 45 | ], 46 | "testRegex": "src/.*\\.test\\.tsx?" 47 | }, 48 | "dependencies": { 49 | "@coder/logger": "^1.1.8", 50 | "fs-extra": "^7.0.1", 51 | "glob": "^7.1.3", 52 | "node-fetch": "^2.3.0", 53 | "ora": "^3.2.0" 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | osx_image: xcode10.2 3 | node_js: 4 | - 10.15.1 5 | services: 6 | - docker 7 | cache: 8 | directories: 9 | - /home/travis/.ccache/ 10 | - /home/travis/.centos-ccache/ 11 | - /home/travis/.alpine-ccache/ 12 | before_install: 13 | - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install ccache; fi 14 | - export PATH="/usr/local/opt/ccache/libexec:$PATH" 15 | - eval "$(MATRIX_EVAL)" 16 | matrix: 17 | include: 18 | - os: linux 19 | env: 20 | - TARGET="alpine" 21 | # - os: linux 22 | # env: 23 | # - TARGET="armv7hf-alpine" 24 | # - os: linux 25 | # env: 26 | # - TARGET="aarch64-alpine" 27 | # - os: linux 28 | # env: 29 | # - TARGET="aarch64" 30 | # - os: linux 31 | # env: 32 | # - TARGET="armv7hf" 33 | - os: linux 34 | env: 35 | - TARGET="centos" 36 | - os: osx 37 | script: scripts/ci.sh 38 | deploy: 39 | provider: gcs 40 | access_key_id: GOOGZVNTORC5JOVTMMRVCXAM 41 | secret_access_key: 42 | secure: emlYmPkMTQZykYh1V+mDSoaSyPp5SRWXUxPnJqAvSqM5JZbKLCzjKS6zOpTd6YNZBgklViPud9hMuAkNXH1ApkpcDFqlBG/DzcvgdJO0m+a2n6uUfP9U9O0GBmGc78Fz5kWCip/uckZeA8ZnpU+2He695qIlt2q1ENmKOhIJX7jd6kCxYFfOSadugPkdDMrpmGGEJ4O5KZU1ceAL+bFT0k+nF5Aj578Gih80HzIIlyX/7lIdx4fBV+B3qpNSwgiIZompDeUgFaGyAbbuecSQKmUrG1/4sn9VlJrNR4pFK9ZAldG3K9xR6fshR9cF1GMheX2XbyOk2W/rgn6pVQIV5JLNtwS8trGyGL90wUwSdiSTUd6y/UaOGfudO9QgDo5/nzMf6Ag8AMMr3IIvgbBYR8dUVKFupbhtJOTWdOA0EDlnW800YDGILSGc9a0BjUkuVX3QeGhwAeWYiQ9P5oXA55MaRfADA30dFtVzj9/k/4NQvPME/rvUQxn2jZsalHFPkuaPUVpTfZXyUVwg71cxictRINA7EugfuaqEX/6Imd92NJlrAg1bwr/BQrrbvTnGD6j4YOPT5LlaGcJhmycMC6rxIVD/ss/yGz0X7NS8tv1L16CgjZ/QXoUUpj/jbGerm4LjjPoUOukO0s470NrgBy9ps0a34feWNhU2zitXXvQ= 43 | bucket: nbin.cdr.sh 44 | skip_cleanup: true 45 | local-dir: build 46 | on: 47 | branch: master 48 | repo: cdr/nbin 49 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require("path"); 2 | const os = require("os"); 3 | const environment = process.env.NODE_ENV || "development"; 4 | const HappyPack = require("happypack"); 5 | const webpack = require("webpack"); 6 | 7 | const root = path.resolve(__dirname); 8 | const pkg = require("./package.json"); 9 | 10 | const baseConfig = { 11 | context: root, 12 | devtool: "none", 13 | target: "node", 14 | module: { 15 | rules: [{ 16 | use: [ 17 | { 18 | loader: "string-replace-loader", 19 | options: { 20 | search: "", 21 | replace: pkg.version, 22 | }, 23 | }, 24 | "happypack/loader?id=ts", 25 | ], 26 | test: /(^.?|\.[^d]|[^.]d|[^.][^d])\.tsx?$/, 27 | }, { 28 | use: [{ 29 | loader: "node-loader", 30 | }], 31 | test: /\.node$/, 32 | }], 33 | }, 34 | mode: "production", 35 | node: { 36 | __dirname: true, 37 | }, 38 | plugins: [ 39 | new HappyPack({ 40 | id: "ts", 41 | threads: Math.max(os.cpus().length - 1, 1), 42 | loaders: [{ 43 | path: "ts-loader", 44 | query: { 45 | happyPackMode: true, 46 | }, 47 | }], 48 | }), 49 | new webpack.DefinePlugin({ 50 | "process.env.NODE_ENV": `"${environment}"`, 51 | }), 52 | ], 53 | resolve: { 54 | extensions: [".js", ".jsx", ".ts", ".tsx"], 55 | }, 56 | stats: { 57 | all: false, // Fallback for options not defined. 58 | errors: true, 59 | warnings: true, 60 | }, 61 | }; 62 | 63 | module.exports = [ 64 | { 65 | ...baseConfig, 66 | entry: path.join(root, "src", "patches", "thirdPartyMain.ts"), 67 | output: { 68 | path: path.join(root, "lib", "node", "lib"), 69 | filename: "_third_party_main.js", 70 | }, 71 | externals: { 72 | nbin: "commonjs nbin", 73 | }, 74 | }, 75 | { 76 | ...baseConfig, 77 | entry: path.join(root, "src", "patches", "nbin.ts"), 78 | output: { 79 | path: path.join(root, "lib", "node"), 80 | filename: "nbin.js", 81 | libraryTarget: "commonjs", 82 | }, 83 | externals: { 84 | nbin: "commonjs nbin", 85 | }, 86 | }, 87 | { 88 | ...baseConfig, 89 | entry: path.join(root, "src", "api", "index.ts"), 90 | output: { 91 | path: path.join(root, "out"), 92 | filename: "api.js", 93 | libraryTarget: "commonjs", 94 | }, 95 | } 96 | ]; 97 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # nbin 2 | 3 | [!["Open Issues"](https://img.shields.io/github/issues-raw/cdr/nbin.svg)](https://github.com/cdr/nbin/issues) 4 | [!["Version"](https://img.shields.io/npm/v/@coder/nbin.svg)](https://www.npmjs.com/package/@coder/nbin) 5 | [![MIT license](https://img.shields.io/badge/license-MIT-green.svg)](#) 6 | [![Discord](https://discordapp.com/api/guilds/463752820026376202/widget.png)](https://discord.gg/zxSwN8Z) 7 | 8 | Fast and robust node.js binary compiler. 9 | 10 | **WARNING:** This project was created for [code-server](https://github.com/cdr/code-server) and may provide limited support. 11 | 12 | Why was this made? Why not use `pkg` or `nexe`? 13 | 14 | - Support for native node modules. 15 | - No magic. The user specifies all customization. An example of this is overriding the filesystem. 16 | - First-class support for multiple platforms. 17 | 18 | ## Usage 19 | 20 | We *highly* recommend using webpack to bundle your sources. We do not scan source-files for modules to include. 21 | 22 | When running within the binary, your application will have access to a module named [`nbin`](typings/nbin.d.ts). 23 | 24 | Two packages are provided: 25 | - `@coder/nbin` - available as an API to build binaries. 26 | - `nbin` - *ONLY* available within your binary. 27 | 28 | ### Example 29 | 30 | ```ts 31 | import { Binary } from "@coder/nbin"; 32 | 33 | const bin = new Binary({ 34 | mainFile: "out/cli.js", 35 | }); 36 | 37 | bin.writeFile("out/cli.js", Buffer.from("console.log('hi');")); 38 | const output = bin.bundle(); 39 | ``` 40 | 41 | ### Forks 42 | 43 | `child_process`.`fork` works as expected. `nbin` will treat the compiled binary as the original node binary when the `process.send` function is available. 44 | 45 | ### Webpack 46 | 47 | If you are using webpack to bundle your `main`, you'll need to externalize modules. 48 | 49 | ```ts 50 | // webpack.config.js 51 | 52 | module.exports = { 53 | ... 54 | external: { 55 | nbin: "commonjs nbin", 56 | // Additional modules to exclude 57 | }, 58 | }; 59 | ``` 60 | 61 | ### Environment 62 | 63 | You can pass [`NODE_OPTIONS`](https://nodejs.org/api/cli.html#cli_node_options_options). 64 | 65 | ```bash 66 | NODE_OPTIONS="--inspect-brk" ./path/to/bin 67 | ``` 68 | 69 | Gzip'd JavaScript files are supported to reduce bundle size. 70 | -------------------------------------------------------------------------------- /scripts/docker_build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eoux pipefail 4 | 5 | cd "$(dirname "$0")" 6 | source ./vars.sh 7 | 8 | # Variables to be set: 9 | # $CACHE_DIR 10 | # $IMAGE 11 | # $PREBUILD_COMMAND 12 | # $BINARY_NAME 13 | function docker_build() { 14 | case "$IMAGE" in 15 | *armv7hf* | armv7hf | aarch64 | *aarch64*) 16 | containerID=$(docker create --network=host -it -v $HOME/$CACHE_DIR:/ccache $IMAGE) 17 | docker start $containerID 18 | docker exec $containerID mkdir /src 19 | 20 | function exec() { 21 | docker exec $containerID bash -c "$@" 22 | } 23 | 24 | docker cp ../. $containerID:/src 25 | exec "cross-build-start" 26 | exec "$PREBUILD_COMMAND/src/lib/node/build.sh" 27 | exec "cd /src && npm rebuild" 28 | exec "cd /src && npm test" 29 | exec "cross-build-end" 30 | docker cp $containerID:/src/lib/node/out/Release/node ../build/$PACKAGE_VERSION/$BINARY_NAME 31 | ;; 32 | *) 33 | containerID=$(docker create --network=host -it -v $HOME/$CACHE_DIR:/ccache $IMAGE) 34 | docker start $containerID 35 | docker exec $containerID mkdir /src 36 | 37 | function exec() { 38 | docker exec $containerID bash -c "$@" 39 | } 40 | 41 | docker cp ../. $containerID:/src 42 | exec "$PREBUILD_COMMAND/src/lib/node/build.sh" 43 | exec "cd /src && npm rebuild" 44 | exec "cd /src && npm test" 45 | docker cp $containerID:/src/lib/node/out/Release/node ../build/$PACKAGE_VERSION/$BINARY_NAME 46 | ;; 47 | esac 48 | } 49 | 50 | if [[ "$TARGET" == "alpine" ]]; then 51 | CACHE_DIR=".ccache-alpine" 52 | IMAGE="codercom/nbin-alpine" 53 | PREBUILD_COMMAND="" 54 | BINARY_NAME="node-${NODE_VERSION}-alpine-x64" 55 | docker_build 56 | elif [[ "$TARGET" == "aarch64-alpine" ]]; then 57 | CACHE_DIR=".ccache-alpine" 58 | IMAGE="codercom/nbin-alpine-aarch64-alpine" 59 | PREBUILD_COMMAND="" 60 | BINARY_NAME="node-${NODE_VERSION}-alpine-aarch64" 61 | docker_build 62 | elif [[ "$TARGET" == "armv7hf-alpine" ]]; then 63 | CACHE_DIR=".ccache-alpine" 64 | IMAGE="codercom/nbin-alpine-armv7hf-alpine" 65 | PREBUILD_COMMAND="" 66 | BINARY_NAME="node-${NODE_VERSION}-alpine-armv7hf" 67 | docker_build 68 | elif [[ "$TARGET" == "aarch64" ]]; then 69 | CACHE_DIR=".ccache-aarch64" 70 | IMAGE="codercom/nbin-aarch64" 71 | PREBUILD_COMMAND="" 72 | BINARY_NAME="node-${NODE_VERSION}-aarch64" 73 | docker_build 74 | elif [[ "$TARGET" == "armv7hf" ]]; then 75 | CACHE_DIR=".ccache-armv7hf" 76 | IMAGE="codercom/nbin-armv7hf" 77 | PREBUILD_COMMAND="" 78 | BINARY_NAME="node-${NODE_VERSION}-armv7hf" 79 | docker_build 80 | else 81 | CACHE_DIR=".ccache-centos" 82 | IMAGE="codercom/nbin-centos" 83 | PREBUILD_COMMAND="source /opt/rh/devtoolset-6/enable &&" 84 | BINARY_NAME="node-${NODE_VERSION}-linux-x64" 85 | docker_build 86 | fi 87 | -------------------------------------------------------------------------------- /typings/nbin.d.ts: -------------------------------------------------------------------------------- 1 | declare module 'nbin' { 2 | /** 3 | * Returns the stat for a path. 4 | */ 5 | export interface Stat { 6 | readonly isDirectory: boolean; 7 | readonly isFile: boolean; 8 | readonly size: number; 9 | } 10 | 11 | export interface Disposable { 12 | dispose(): void; 13 | } 14 | 15 | /** 16 | * Checks if a file exists within the binary. 17 | */ 18 | export const existsSync: (path: string) => boolean; 19 | 20 | /** 21 | * Performs a stat on paths within the binary. 22 | */ 23 | export const statSync: (path: string) => Stat; 24 | 25 | /** 26 | * Reads a directory within the binary. 27 | */ 28 | export const readdirSync: (path: string) => ReadonlyArray; 29 | 30 | /** 31 | * Reads a file asynchronously from the binary. 32 | */ 33 | function readFile(path: string, encoding?: "buffer", offset?: number, length?: number): Promise; 34 | function readFile(path: string, encoding?: "utf8", offset?: number, length?: number): Promise; 35 | 36 | /** 37 | * Reads a file synchronously from the binary. 38 | */ 39 | function readFileSync(path: string, encoding?: "buffer", offset?: number, length?: number): Buffer; 40 | function readFileSync(path: string, encoding?: "utf8", offset?: number, length?: number): Buffer; 41 | 42 | /** 43 | * Uniquely generated ID for the packaged binary. 44 | */ 45 | export const id: string; 46 | 47 | /** 48 | * Returns the entrypoint of the application. 49 | */ 50 | export const mainFile: string; 51 | 52 | /** 53 | * Shims the native `fs` module for the path 54 | */ 55 | export const shimNativeFs: (path: string) => void; 56 | } 57 | 58 | declare module '@coder/nbin' { 59 | export interface BinaryOptions { 60 | /** 61 | * Path of the node binary to bundle. 62 | * *Must* be a patched binary. 63 | */ 64 | readonly nodePath?: string; 65 | 66 | /** 67 | * Suppresses log output. 68 | */ 69 | readonly suppressOutput?: boolean; 70 | 71 | /** 72 | * Main file for your application. 73 | * Will be called as the entrypoint. 74 | */ 75 | readonly mainFile: string; 76 | 77 | /** 78 | * OS target 79 | */ 80 | readonly target?: "darwin" | "alpine" | "linux"; 81 | 82 | } 83 | 84 | /** 85 | * Create a new binary. 86 | */ 87 | export class Binary { 88 | public constructor( 89 | options: BinaryOptions, 90 | ); 91 | 92 | /** 93 | * Write a file to the bundle at a path. 94 | */ 95 | public writeFile(pathName: string, content: Buffer): void; 96 | 97 | /** 98 | * Writes files from an FS glob. 99 | * Calls back as files are written. 100 | * @example 101 | * writeFiles(path.join(__dirname, "dog/**")); 102 | * @returns number of files written 103 | */ 104 | public writeFiles(glob: string, callback?: (fileWritten: string) => void): number; 105 | 106 | /** 107 | * Will bundle a module based on path and name. 108 | * Allows you to do `writeModule("/test/bananas/node_modules/frog")` and 109 | * embed the `frog` module within the binary. 110 | * 111 | * All modules by default will be placed in `/node_modules` 112 | */ 113 | public writeModule(modulePath: string): void; 114 | 115 | /** 116 | * Bundles the binary. 117 | * @returns the content of the executable file. 118 | */ 119 | public build(): Promise; 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /src/patches/nbin.ts: -------------------------------------------------------------------------------- 1 | import * as fs from "fs"; 2 | import { Stat } from "nbin"; 3 | import * as path from "path"; 4 | import { readString } from "../common/buffer"; 5 | import { createNotFound } from "../common/error"; 6 | import { ReadableFilesystem } from "../common/filesystem"; 7 | import { readFooter } from "../common/footer"; 8 | import { fillFs } from "./fs"; 9 | 10 | const execPath = process.execPath; 11 | const execPathStat = fs.statSync(execPath); 12 | const nbinFd = fs.openSync(execPath, "r"); 13 | 14 | // Footer is located at the end of the file 15 | const footer = readFooter(nbinFd, execPathStat.size); 16 | 17 | // Contains the ID, mainFile and the filesystem 18 | const headerBuffer = Buffer.allocUnsafe(footer.headerLength); 19 | fs.readSync(nbinFd, headerBuffer, 0, footer.headerLength, footer.headerOffset); 20 | 21 | // Reading the ID. 22 | const id = readString(headerBuffer, 0); 23 | 24 | // Reading the mainfile 25 | const mainFile = readString(headerBuffer, id.offset); 26 | 27 | /** 28 | * Maximize read perf by storing before any overrides 29 | */ 30 | const originalRead = fs.read; 31 | const originalReadSync = fs.readSync; 32 | 33 | const fsBuffer = headerBuffer.slice(mainFile.offset); 34 | const readableFs = ReadableFilesystem.fromBuffer(fsBuffer, { 35 | readContents: (offset: number, length: number): Promise => { 36 | const buffer = Buffer.allocUnsafe(length); 37 | return new Promise((resolve, reject) => { 38 | originalRead(nbinFd, buffer, 0, length, offset + footer.contentOffset, (err, _, buffer) => { 39 | if (err) { 40 | return reject(err); 41 | } 42 | 43 | resolve(buffer); 44 | }); 45 | }); 46 | }, 47 | readContentsSync: (offset: number, length: number): Buffer => { 48 | const buffer = Buffer.alloc(length); 49 | originalReadSync(nbinFd, buffer, 0, length, offset + footer.contentOffset); 50 | return buffer; 51 | }, 52 | }); 53 | 54 | /** 55 | * Parses an entry from a readable FS. 56 | * Will split the inputted path and attempt to 57 | * nest down the tree. 58 | */ 59 | const parse = (fullPath: string): { 60 | readonly fs: ReadableFilesystem; 61 | readonly name: string; 62 | } | undefined => { 63 | const parts = path.normalize(fullPath).split(path.sep).filter(i => i.length); 64 | let fs = readableFs; 65 | for (let i = 0; i < parts.length; i++) { 66 | if (!fs) { 67 | return; 68 | } 69 | const part = parts[i]; 70 | if (i === parts.length - 1) { 71 | return { 72 | fs, 73 | name: part, 74 | }; 75 | } else { 76 | fs = fs.cd(part); 77 | } 78 | } 79 | }; 80 | 81 | const exported: typeof import("nbin") = { 82 | id: id.value, 83 | 84 | mainFile: mainFile.value, 85 | 86 | existsSync: (pathName: string): boolean => { 87 | const stat = exported.statSync(pathName); 88 | return stat.isFile || stat.isDirectory; 89 | }, 90 | 91 | readdirSync: (pathName: string): ReadonlyArray => { 92 | const res = parse(pathName); 93 | if (!res) { 94 | throw createNotFound(); 95 | } 96 | return res.fs.cd(res.name).ls(); 97 | }, 98 | 99 | readFile: async (pathName: string, encoding?: "utf8", offset?: number, length?: number): Promise | Promise => { 100 | const res = parse(pathName); 101 | if (!res) { 102 | throw createNotFound(); 103 | } 104 | const b = await res.fs.read(res.name, offset, length); 105 | if (encoding && encoding === "utf8") { 106 | return b.toString(); 107 | } 108 | return b; 109 | }, 110 | 111 | readFileSync: (pathName: string, encoding?: "utf8", offset?: number, length?: number): Buffer | string => { 112 | const res = parse(pathName); 113 | if (!res) { 114 | throw createNotFound(); 115 | } 116 | const b = res.fs.readSync(res.name, offset, length); 117 | if (encoding && encoding === "utf8") { 118 | return b.toString(); 119 | } 120 | return b; 121 | }, 122 | 123 | statSync: (pathName: string): Stat => { 124 | const res = parse(pathName); 125 | if (!res) { 126 | return { 127 | isDirectory: false, 128 | isFile: false, 129 | size: 0, 130 | }; 131 | } 132 | return res.fs.stat(res.name); 133 | }, 134 | 135 | shimNativeFs: (pathName: string): void => { 136 | fillFs(pathName); 137 | }, 138 | } as typeof import("nbin"); 139 | 140 | export = exported; 141 | -------------------------------------------------------------------------------- /src/api/bundler.test.ts: -------------------------------------------------------------------------------- 1 | import * as cp from "child_process"; 2 | import * as fs from "fs"; 3 | import * as os from "os"; 4 | import * as path from "path"; 5 | import { Binary } from "./bundler"; 6 | import * as zlib from "zlib"; 7 | 8 | const nodePath = path.join(__dirname, "../../lib/node/out/Release/node"); 9 | if (!fs.existsSync(nodePath)) { 10 | throw new Error("Node must be built locally to run bundler test"); 11 | } 12 | let binId = 0; 13 | const runBinary = async (binary: Binary): Promise> => { 14 | const tmpFile = path.join(os.tmpdir(), ".nbin-bundlertest" + binId++); 15 | fs.writeFileSync(tmpFile, await binary.build()); 16 | fs.chmodSync(tmpFile, "755"); 17 | return cp.spawnSync(tmpFile, { 18 | env: { 19 | NODE_OPTIONS: "--max-old-space-size=1024", 20 | }, 21 | }); 22 | }; 23 | 24 | it("should compile binary and execute it", async () => { 25 | const mainFile = "/example.js"; 26 | const bin = new Binary({ 27 | nodePath, 28 | mainFile, 29 | }); 30 | const output = "hello!"; 31 | bin.writeFile(mainFile, Buffer.from(`console.log("${output}");`)); 32 | const resp = await runBinary(bin); 33 | expect(resp.stdout.toString().trim()).toEqual(output); 34 | }); 35 | 36 | /** 37 | * TODO: this should work on other platforms 38 | */ 39 | if (process.platform === "linux") { 40 | it("should load native module", async () => { 41 | const mainFile = "/example.js"; 42 | const bin = new Binary({ 43 | nodePath, 44 | mainFile, 45 | }); 46 | bin.writeModule(path.join(__dirname, "../../node_modules", "node-pty")); 47 | bin.writeFile(mainFile, Buffer.from(`require("node-pty");console.log("hi");`)); 48 | const resp = await runBinary(bin); 49 | const stderr = resp.stderr.toString().trim(); 50 | if (stderr.length > 0) { 51 | console.error(stderr); 52 | } 53 | expect(stderr.length).toEqual(0); 54 | }); 55 | } 56 | 57 | it("should fork", async () => { 58 | const mainFile = "/example.js"; 59 | const bin = new Binary({ 60 | nodePath, 61 | mainFile, 62 | }); 63 | bin.writeFile(mainFile, Buffer.from(`const proc = require("child_process").fork("/test.js", [], { stdio: [null, null, null, "ipc"] }); 64 | proc.stdout.on("data", (d) => { 65 | console.log(d.toString("utf8")); 66 | setTimeout(() => process.exit(0), 10000); 67 | }); 68 | `)); 69 | bin.writeFile("/test.js", Buffer.from("console.log('hi');")); 70 | const resp = await runBinary(bin); 71 | expect(resp.stdout.toString().trim()).toEqual("hi"); 72 | }); 73 | 74 | /** 75 | * TODO: this should work on other platforms 76 | */ 77 | if (process.platform === "linux") { 78 | it("should fill fs", async () => { 79 | const mainFile = "/example.js"; 80 | const exampleContent = () => { 81 | const assert = require("assert") as typeof import("assert"); 82 | const fs = require("fs") as typeof import("fs"); 83 | const nbin = require("nbin") as typeof import("nbin"); 84 | 85 | try { 86 | fs.readFileSync("/donkey/frog"); 87 | // Fail if we read successfully 88 | process.exit(1); 89 | } catch (ex) { 90 | nbin.shimNativeFs("/donkey"); 91 | assert.equal(fs.readFileSync("/donkey/frog").toString(), "example"); 92 | try { 93 | fs.writeFileSync("/donkey/banana", "asdf"); 94 | process.exit(1); 95 | } catch (ex) { 96 | // Expected 97 | } 98 | } 99 | }; 100 | const bin = new Binary({ 101 | nodePath, 102 | mainFile, 103 | }); 104 | bin.writeFile(mainFile, Buffer.from(`(${exampleContent.toString()})()`)); 105 | bin.writeFile("/donkey/frog", Buffer.from("example")); 106 | const resp = await runBinary(bin); 107 | if (resp.stdout.length > 0) { 108 | console.log(resp.stdout.toString()); 109 | } 110 | expect(resp.stderr.toString().trim()).toEqual(""); 111 | }); 112 | } 113 | 114 | it("should fill fs and propogate errors", async () => { 115 | const mainFile = "/example.js"; 116 | const exampleContent = () => { 117 | const fs = require("fs") as typeof import("fs"); 118 | const nbin = require("nbin") as typeof import("nbin"); 119 | 120 | nbin.shimNativeFs("/home/kyle/node/coder/code-server/packages/server"); 121 | fs.open("/home/kyle/node/coder/code-server/packages/server/build/web/auth/__webpack_hmr", "r", (err) => { 122 | if (err) { 123 | // Expected 124 | process.exit(0); 125 | } 126 | 127 | process.exit(1); 128 | }); 129 | }; 130 | const bin = new Binary({ 131 | nodePath, 132 | mainFile, 133 | }); 134 | bin.writeFile(mainFile, Buffer.from(`(${exampleContent.toString()})()`)); 135 | const resp = await runBinary(bin); 136 | if (resp.stderr.length > 0) { 137 | console.log(resp.stderr.toString()); 138 | } 139 | expect(resp.stderr.toString().trim().length).toEqual(0); 140 | }); 141 | 142 | it("should load gzip'd javascript", async () => { 143 | const mainFile = "/example.js.gz"; 144 | const bin = new Binary({ 145 | nodePath, 146 | mainFile, 147 | }); 148 | bin.writeFile(mainFile, zlib.gzipSync(Buffer.from("process.exit(0);"))); 149 | const resp = await runBinary(bin); 150 | if (resp.stderr.length > 0) { 151 | console.log(resp.stderr.toString()); 152 | } 153 | expect(resp.stderr.toString().trim().length).toEqual(0); 154 | }); 155 | -------------------------------------------------------------------------------- /node.patch: -------------------------------------------------------------------------------- 1 | diff --git a/lib/internal/bootstrap/node.js b/lib/internal/bootstrap/node.js 2 | index 885546c5d6..f21e7993f1 100644 3 | --- a/lib/internal/bootstrap/node.js 4 | +++ b/lib/internal/bootstrap/node.js 5 | @@ -215,11 +215,16 @@ 6 | // are running from a script and running the REPL - but there are a few 7 | // others like the debugger or running --eval arguments. Here we decide 8 | // which mode we run in. 9 | + 10 | + if (NativeModule.exists('_third_party_main')) { 11 | + NativeModule.require('_third_party_main'); 12 | + } 13 | + 14 | if (internalBinding('worker').getEnvMessagePort() !== undefined) { 15 | // This means we are in a Worker context, and any script execution 16 | // will be directed by the worker module. 17 | NativeModule.require('internal/worker').setupChild(evalScript); 18 | - } else if (NativeModule.exists('_third_party_main')) { 19 | + } else if (NativeModule.exists('_third_party_main_invalid')) { 20 | // To allow people to extend Node in different ways, this hook allows 21 | // one to drop a file lib/_third_party_main.js into the build 22 | // directory which will be executed instead of Node's normal loading. 23 | diff --git a/lib/internal/modules/cjs/loader.js b/lib/internal/modules/cjs/loader.js 24 | index fb3770b729..f4bad8d6de 100644 25 | --- a/lib/internal/modules/cjs/loader.js 26 | +++ b/lib/internal/modules/cjs/loader.js 27 | @@ -44,6 +44,9 @@ const { getOptionValue } = require('internal/options'); 28 | const preserveSymlinks = getOptionValue('--preserve-symlinks'); 29 | const preserveSymlinksMain = getOptionValue('--preserve-symlinks-main'); 30 | const experimentalModules = getOptionValue('--experimental-modules'); 31 | +const nbin = require('nbin'); 32 | +const os = require('os'); 33 | +const zlib = require('zlib'); 34 | 35 | const { 36 | ERR_INVALID_ARG_TYPE, 37 | @@ -87,7 +90,8 @@ function stat(filename) { 38 | const result = cache.get(filename); 39 | if (result !== undefined) return result; 40 | } 41 | - const result = internalModuleStat(filename); 42 | + const s = nbin.statSync(filename); 43 | + const result = s.isDirectory ? 1 : s.isFile ? 0 : internalModuleStat(filename); 44 | if (cache !== null) cache.set(filename, result); 45 | return result; 46 | } 47 | @@ -154,8 +158,13 @@ function readPackage(requestPath) { 48 | if (entry) 49 | return entry; 50 | 51 | + let json; 52 | const jsonPath = path.resolve(requestPath, 'package.json'); 53 | - const json = internalModuleReadJSON(path.toNamespacedPath(jsonPath)); 54 | + if (nbin.existsSync(jsonPath)) { 55 | + json = nbin.readFileSync(jsonPath, 'utf8'); 56 | + } else { 57 | + json = internalModuleReadJSON(path.toNamespacedPath(jsonPath)); 58 | + } 59 | 60 | if (json === undefined) { 61 | return false; 62 | @@ -199,6 +208,9 @@ function tryFile(requestPath, isMain) { 63 | } 64 | 65 | function toRealPath(requestPath) { 66 | + if (nbin.existsSync(requestPath)) { 67 | + return requestPath; 68 | + } 69 | return fs.realpathSync(requestPath, { 70 | [internalFS.realpathCacheKey]: realpathCache 71 | }); 72 | @@ -696,14 +708,29 @@ Module.prototype._compile = function(content, filename) { 73 | 74 | // Native extension for .js 75 | Module._extensions['.js'] = function(module, filename) { 76 | - var content = fs.readFileSync(filename, 'utf8'); 77 | + let content; 78 | + if (nbin.existsSync(filename)) { 79 | + content = nbin.readFileSync(filename, 'utf8'); 80 | + } else { 81 | + content = fs.readFileSync(filename, 'utf8'); 82 | + } 83 | module._compile(stripBOM(content), filename); 84 | }; 85 | 86 | +// Native extension for .gz 87 | +Module._extensions['.gz'] = function(module, filename) { 88 | + const content = zlib.gunzipSync(nbin.readFileSync(filename)).toString("utf8"); 89 | + module._compile(stripBOM(content), filename); 90 | +}; 91 | 92 | // Native extension for .json 93 | Module._extensions['.json'] = function(module, filename) { 94 | - var content = fs.readFileSync(filename, 'utf8'); 95 | + let content; 96 | + if (nbin.existsSync(filename)) { 97 | + content = nbin.readFileSync(filename, 'utf8'); 98 | + } else { 99 | + content = fs.readFileSync(filename, 'utf8'); 100 | + } 101 | try { 102 | module.exports = JSON.parse(stripBOM(content)); 103 | } catch (err) { 104 | @@ -715,7 +742,16 @@ Module._extensions['.json'] = function(module, filename) { 105 | 106 | // Native extension for .node 107 | Module._extensions['.node'] = function(module, filename) { 108 | - return process.dlopen(module, path.toNamespacedPath(filename)); 109 | + let isInternal = false; 110 | + if (nbin.existsSync(filename)) { 111 | + const tmpFile = path.join(os.tmpdir(), `.nbin${nbin.id}-${path.basename(filename)}`); 112 | + if (!fs.existsSync(tmpFile)) { 113 | + fs.writeFileSync(tmpFile, nbin.readFileSync(filename)); 114 | + } 115 | + filename = tmpFile; 116 | + isInternal = true; 117 | + } 118 | + return process.dlopen(module, isInternal ? filename : path.toNamespacedPath(filename)); 119 | }; 120 | 121 | if (experimentalModules) { 122 | diff --git a/src/node.cc b/src/node.cc 123 | index 9fb5ab3b8e..d2c9ab3bc3 100644 124 | --- a/src/node.cc 125 | +++ b/src/node.cc 126 | @@ -2439,6 +2439,12 @@ void ProcessArgv(std::vector* args, 127 | std::vector v8_args; 128 | std::vector errors{}; 129 | 130 | + if (!getenv("NBIN_BYPASS") && !is_env) { 131 | + // *exec_argc = 0; 132 | + 133 | + return; 134 | + } 135 | + 136 | { 137 | // TODO(addaleax): The mutex here should ideally be held during the 138 | // entire function, but that doesn't play well with the exit() calls below. 139 | -------------------------------------------------------------------------------- /src/api/bundler.ts: -------------------------------------------------------------------------------- 1 | import { field, logger } from "@coder/logger"; 2 | import * as nbin from "@coder/nbin"; 3 | import * as fs from "fs"; 4 | import * as fse from "fs-extra"; 5 | import * as glob from "glob"; 6 | import fetch from "node-fetch"; 7 | import * as os from "os"; 8 | import * as path from "path"; 9 | import { writeString } from "../common/buffer"; 10 | import { WritableFilesystem } from "../common/filesystem"; 11 | import { createFooter } from "../common/footer"; 12 | import ora, { Ora } from "ora"; 13 | 14 | export class Binary implements nbin.Binary { 15 | private readonly fs: WritableFilesystem = new WritableFilesystem(); 16 | 17 | public constructor( 18 | private readonly options: nbin.BinaryOptions, 19 | ) {} 20 | 21 | public writeFile(pathName: string, content: Buffer): void { 22 | const parts = path.normalize(pathName).split(path.sep).filter((i) => i.length); 23 | let writableFs: WritableFilesystem = this.fs; 24 | for (let i = 0; i < parts.length; i++) { 25 | const part = parts[i]; 26 | if (i === parts.length - 1) { 27 | writableFs.write(part, content); 28 | } else { 29 | writableFs = writableFs.cd(part); 30 | } 31 | } 32 | } 33 | 34 | public writeFiles(globName: string, callback?: (fileWritten: string) => void): number { 35 | const files = glob.sync(globName, { 36 | cwd: process.cwd(), 37 | }); 38 | let fileCount: number = 0; 39 | let spinner: Ora | undefined; 40 | if (this.canLog) { 41 | spinner = ora("Writing..."); 42 | } 43 | for (let i = 0; i < files.length; i++) { 44 | const file = files[i]; 45 | const stat = fs.statSync(file); 46 | if (!stat.isFile()) { 47 | continue; 48 | } 49 | this.writeFile(file, fs.readFileSync(file)); 50 | if (spinner) { 51 | spinner.text = `Wrote "${file}"!`; 52 | } 53 | if (callback) { 54 | callback(file); 55 | } 56 | fileCount++; 57 | } 58 | if (spinner) { 59 | spinner.succeed(`Wrote ${fileCount} ${fileCount === 1 ? "file" : "files"}!`); 60 | } 61 | return fileCount; 62 | } 63 | 64 | public writeModule(modulePath: string): void { 65 | if (!fs.existsSync(modulePath)) { 66 | throw new Error(`"${modulePath}" does not exist`); 67 | } 68 | const paths = glob.sync(path.join(modulePath, "**")) 69 | const moduleName = path.basename(modulePath); 70 | 71 | for (let i = 0; i < paths.length; i++) { 72 | const p = paths[i]; 73 | const newPath = path.join("/node_modules", moduleName, path.relative(modulePath, p)); 74 | const stat = fs.statSync(p); 75 | if (!stat.isFile()) { 76 | continue; 77 | } 78 | this.writeFile(newPath, fs.readFileSync(p)); 79 | } 80 | if (this.canLog) { 81 | logger.info("Packaged module", field("module", moduleName)); 82 | } 83 | } 84 | 85 | public async build(): Promise { 86 | const nodeBuffer = await this.cacheBinary(); 87 | 88 | // Create a buffer containing a (most likely) unique ID and its length. 89 | const idLength = 6; 90 | const possible = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; 91 | const id = Array(idLength).fill(1) 92 | .map(() => possible[Math.floor(Math.random() * possible.length)]) 93 | .join(""); 94 | const idBuffer = Buffer.alloc(2 + Buffer.byteLength(id)); 95 | writeString(idBuffer, id); 96 | 97 | // Writing the entrypoint 98 | const mainFileBuffer = Buffer.alloc(2 + Buffer.byteLength(this.options.mainFile)); 99 | writeString(mainFileBuffer, this.options.mainFile); 100 | 101 | if (this.canLog) { 102 | logger.info("Building filesystem"); 103 | } 104 | // Filesystem contents 105 | const fsBuffer = this.fs.build(); 106 | 107 | // Footer 108 | const footerBuffer = createFooter( 109 | fsBuffer.header.byteLength + idBuffer.byteLength + mainFileBuffer.byteLength, // Header byte length 110 | nodeBuffer.byteLength, // Header byte offset 111 | fsBuffer.fileContents.byteLength, // File contents length 112 | nodeBuffer.byteLength + fsBuffer.header.byteLength + idBuffer.byteLength + mainFileBuffer.byteLength, // File contents offset 113 | ); 114 | 115 | return Buffer.concat([nodeBuffer, idBuffer, mainFileBuffer, fsBuffer.header, fsBuffer.fileContents, footerBuffer]); 116 | } 117 | 118 | private async cacheBinary(): Promise { 119 | let nodeBinaryPath = this.options.nodePath || path.join(__dirname, "../../lib/node/out/Release/node"); 120 | const nodeBinaryName = this.nodeBinaryName; 121 | 122 | const cacheDir = process.env.XDG_DATA_HOME ? path.join(process.env.XDG_DATA_HOME, "nbin") : path.join(os.homedir(), ".nbin"); 123 | if (!fs.existsSync(nodeBinaryPath)) { 124 | if (!fs.existsSync(cacheDir)) { 125 | if (this.canLog) { 126 | logger.info("Creating node binary cache directory"); 127 | } 128 | fse.mkdirpSync(cacheDir); 129 | } 130 | nodeBinaryPath = path.join(cacheDir, nodeBinaryName); 131 | } 132 | 133 | if (fs.existsSync(nodeBinaryPath)) { 134 | if (this.canLog) { 135 | logger.info("Returning cached binary", field("binary-name", nodeBinaryName)); 136 | } 137 | return fs.readFileSync(nodeBinaryPath); 138 | } else { 139 | // The pulled binary we need doesn't exist 140 | const binary = await this.fetchNodeBinary(); 141 | fse.mkdirpSync(path.dirname(path.join(cacheDir, nodeBinaryName))); 142 | fse.writeFileSync(path.join(cacheDir, nodeBinaryName), binary); 143 | 144 | if (this.canLog) { 145 | logger.info("Wrote and cached binary", field("binary-name", nodeBinaryName), field("path", path.join(cacheDir, nodeBinaryName))); 146 | } 147 | return binary; 148 | } 149 | } 150 | 151 | private async fetchNodeBinary(): Promise { 152 | const binName = this.nodeBinaryName; 153 | const url = `https://nbin.cdr.sh/${binName}`; 154 | if (this.canLog) { 155 | logger.info("Fetching", field("url", url)); 156 | } 157 | 158 | const resp = await fetch(url); 159 | if (resp.status !== 200) { 160 | throw new Error(resp.statusText); 161 | } 162 | const buffer = await resp.arrayBuffer(); 163 | 164 | return Buffer.from(buffer); 165 | } 166 | 167 | private get nodeBinaryName(): string { 168 | const currentPlatform = this.options.target || os.platform(); 169 | let currentArchitecture = os.arch(); 170 | if (currentArchitecture === "x64") { 171 | currentArchitecture = "x86_64"; 172 | } 173 | const nodeVersion = "10.15.1"; 174 | const packageJson = require("../../package.json"); 175 | const packageVersion = packageJson.version; 176 | const binName = `${packageVersion}/node-${nodeVersion}-${currentPlatform}-${currentArchitecture}`; 177 | 178 | return binName; 179 | } 180 | 181 | private get canLog(): boolean { 182 | return !this.options.suppressOutput; 183 | } 184 | 185 | } 186 | -------------------------------------------------------------------------------- /src/common/filesystem.ts: -------------------------------------------------------------------------------- 1 | import { readString, writeString } from "./buffer"; 2 | import { createNotFound } from "./error"; 3 | 4 | interface Stat { 5 | readonly isFile: boolean; 6 | readonly isDirectory: boolean; 7 | readonly size: number; 8 | } 9 | 10 | interface File { 11 | readonly byteLength: number; 12 | readonly byteOffset: number; 13 | 14 | read(offset?: number, length?: number): Promise; 15 | readSync(offset?: number, length?: number): Buffer; 16 | } 17 | 18 | export abstract class Filesystem { 19 | protected readonly directories: Map = new Map(); 20 | } 21 | 22 | export interface ReadableFilesystemProvider { 23 | readContents: (offset: number, length: number) => Promise; 24 | readContentsSync: (offset: number, length: number) => Buffer; 25 | } 26 | 27 | export class ReadableFilesystem extends Filesystem { 28 | public static fromBuffer(buffer: Buffer, provider: ReadableFilesystemProvider): ReadableFilesystem { 29 | let offset = 0; 30 | const dirAmount = buffer.readUInt16BE(offset); 31 | offset += 2; 32 | const directory = new ReadableFilesystem(); 33 | for (let i = 0; i < dirAmount; i++) { 34 | const dirName = readString(buffer, offset); 35 | offset = dirName.offset; 36 | const dirSliceLen = buffer.readUInt32BE(offset); 37 | offset += 4; 38 | directory.directories.set(dirName.value, ReadableFilesystem.fromBuffer(buffer.slice(offset, offset + dirSliceLen), provider)); 39 | offset += dirSliceLen; 40 | } 41 | const fileAmount = buffer.readUInt16BE(offset); 42 | offset += 2; 43 | for (let i = 0; i < fileAmount; i++) { 44 | const fileName = readString(buffer, offset); 45 | offset = fileName.offset; 46 | const byteOffset = buffer.readUInt32BE(offset); 47 | offset += 4; 48 | const byteLength = buffer.readUInt32BE(offset); 49 | offset += 4; 50 | 51 | directory.files.set(fileName.value, { 52 | byteLength, 53 | byteOffset, 54 | 55 | read: (offset: number = 0, length: number = byteLength): Promise => { 56 | offset = Math.min(byteOffset + offset, byteOffset + byteLength); 57 | length = Math.min(length, byteLength, (byteOffset + byteLength) - offset); 58 | 59 | return provider.readContents(offset, length); 60 | }, 61 | 62 | readSync: (offset: number = 0, length: number = byteLength): Buffer => { 63 | offset = Math.min(byteOffset + offset, byteOffset + byteLength); 64 | length = Math.min(length, byteLength, (byteOffset + byteLength) - offset); 65 | 66 | return provider.readContentsSync(offset, length); 67 | }, 68 | }); 69 | } 70 | return directory; 71 | } 72 | 73 | protected readonly files: Map = new Map(); 74 | 75 | public ls(): ReadonlyArray { 76 | return [...Array.from(this.directories.keys()), ...Array.from(this.files.keys())]; 77 | } 78 | 79 | public stat(name: string): Stat { 80 | const isFile = this.files.has(name); 81 | return { 82 | isFile: isFile, 83 | isDirectory: this.directories.has(name), 84 | size: isFile ? this.files.get(name).byteLength : 0, 85 | }; 86 | } 87 | 88 | public cd(name: string): ReadableFilesystem | undefined { 89 | return this.directories.get(name) as ReadableFilesystem; 90 | } 91 | 92 | public read(name: string, offset?: number, length?: number): Promise { 93 | const file = this.files.get(name); 94 | if (!file) { 95 | return Promise.reject(createNotFound()); 96 | } 97 | return file.read(offset, length); 98 | } 99 | 100 | public readSync(name: string, offset?: number, length?: number): Buffer { 101 | const file = this.files.get(name); 102 | if (!file) { 103 | throw createNotFound(); 104 | } 105 | return file.readSync(offset, length); 106 | } 107 | } 108 | 109 | export class WritableFilesystem extends Filesystem { 110 | protected readonly files: Map = new Map(); 111 | private readonly contentBuffers: Buffer[] = []; 112 | private contentLength: number = 0; 113 | 114 | public constructor( 115 | private readonly parent?: WritableFilesystem, 116 | ) { 117 | super(); 118 | } 119 | 120 | /** 121 | * Write a file. 122 | */ 123 | public write(name: string, value: Buffer): void { 124 | this.files.set(name, value); 125 | } 126 | 127 | public cd(name: string): WritableFilesystem { 128 | if (this.directories.has(name)) { 129 | return this.directories.get(name) as WritableFilesystem; 130 | } 131 | 132 | const dir = new WritableFilesystem(this); 133 | this.directories.set(name, dir); 134 | 135 | return dir; 136 | } 137 | 138 | public build(): { 139 | readonly header: Buffer; 140 | readonly fileContents: Buffer; 141 | } { 142 | return { 143 | header: this.toBuffer(), 144 | fileContents: Buffer.concat(this.contentBuffers), 145 | }; 146 | } 147 | 148 | private toBuffer(): Buffer { 149 | const dirs = Array.from(this.directories.values()).map((dir: WritableFilesystem) => dir.toBuffer()); 150 | const dirNames = Array.from(this.directories.keys()); 151 | const files = Array.from(this.files.keys()); 152 | const headerSize = this.headerSize(dirs); 153 | const buffer = Buffer.alloc(headerSize); 154 | let offset = 0; 155 | 156 | // Storing the amount of directories 157 | offset = buffer.writeUInt16BE(dirs.length, offset); 158 | for (let i = 0; i < dirs.length; i++) { 159 | const dir = dirs[i]; 160 | 161 | // Storing the directory name 162 | offset = writeString(buffer, dirNames[i], offset); 163 | // Writing the length of the dir slice 164 | offset = buffer.writeUInt32BE(dir.byteLength, offset); 165 | // Up until here is fine 166 | // Writing the dirslice 167 | buffer.set(dir, offset); 168 | offset += dir.byteLength; 169 | } 170 | // Storing the amount of files 171 | offset = buffer.writeUInt16BE(files.length, offset); 172 | for (let i = 0; i < files.length; i++) { 173 | const file = this.files.get(files[i])!; 174 | // Writing the file path 175 | offset = writeString(buffer, files[i], offset); 176 | // Writing the resource length offset. 177 | // This offset is set from the beginning of the header. 178 | const resourceOffset = this.store(file); 179 | offset = buffer.writeUInt32BE(resourceOffset, offset); 180 | offset = buffer.writeUInt32BE(file.byteLength, offset); 181 | } 182 | 183 | return buffer; 184 | } 185 | 186 | private headerSize(dirs: Buffer[]): number { 187 | const dirNames = Array.from(this.directories.keys()); 188 | const files = Array.from(this.files.keys()); 189 | const headerSize = 2 + // # of dirs 190 | dirs.map((d, i) => { 191 | // 2 byte for str len, then string 192 | const dirNameLen = 2 + Buffer.byteLength(dirNames[i], "utf8"); 193 | // Length of subdir slice 194 | const dirLen = 4; 195 | return dirNameLen + dirLen + d.byteLength; 196 | }).reduce((p, c) => p + c, 0) + 197 | 2 + 198 | files.map((f) => { 199 | const strLen = 2 + Buffer.byteLength(f, "utf8"); 200 | return strLen + 4 + 4; 201 | }).reduce((p, c) => p + c, 0); 202 | 203 | return headerSize; 204 | } 205 | 206 | /** 207 | * Bubbles to the main filesystem. Stores a buffer 208 | * and returns the offset it'll be stored at. 209 | */ 210 | private store(buffer: Buffer): number { 211 | if (this.parent) { 212 | return this.parent.store(buffer); 213 | } 214 | 215 | const plen = this.contentLength; 216 | this.contentBuffers.push(buffer); 217 | this.contentLength += buffer.byteLength; 218 | 219 | return plen; 220 | } 221 | } 222 | -------------------------------------------------------------------------------- /src/patches/fs.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * A patch for `fs` permitting certain directories to be routed within the binary. 3 | */ 4 | import * as fs from "fs"; 5 | import * as nbin from "nbin"; 6 | import * as path from "path"; 7 | import * as util from "util"; 8 | 9 | /** 10 | * Fills `${pathName}/*` with the binary stuff. 11 | */ 12 | export const fillFs = (pathName: string): void => { 13 | const override = (propertyName: T, callback: (callOld: () => any) => typeof fs[T], customPromisify?: (...args: any[]) => Promise) => { 14 | const oldfunc = fs[propertyName]; 15 | /** 16 | * Overridding the FS func 17 | */ 18 | fs[propertyName] = (...args: any[]): any => { 19 | const callOld = (): any => { 20 | // @ts-ignore 21 | return oldfunc(...args); 22 | }; 23 | 24 | let realPath = args[0]; 25 | 26 | /** 27 | * If this is a number, its likely a file descriptor 28 | */ 29 | if (typeof realPath === "number") { 30 | const newFd = args[0]; 31 | if (openFiles.has(newFd)) { 32 | realPath = openFiles.get(newFd).path; 33 | } 34 | } 35 | 36 | /** 37 | * If this is a string, its likely a filepath 38 | */ 39 | if (typeof realPath === "string") { 40 | const newPath = realPath; 41 | const rel = path.relative(pathName, newPath!); 42 | if (!rel.startsWith("..")) { 43 | // It's in the fill path 44 | // Do stuff here w/ the rest of the args 45 | 46 | const func = callback(() => callOld()); 47 | 48 | // @ts-ignore 49 | return func(...args); 50 | } 51 | } 52 | 53 | return callOld(); 54 | }; 55 | 56 | if (customPromisify) { 57 | fs[propertyName][util.promisify.custom] = customPromisify; 58 | } 59 | }; 60 | 61 | let fdId = 0; 62 | interface OpenFile { 63 | readonly path: string; 64 | readLocation: number; 65 | } 66 | const openFiles = new Map(); 67 | const findCb = (args: any[]): undefined | ((...args: any[]) => void) => { 68 | const cb = args.filter((d) => { 69 | return typeof d === "function"; 70 | }); 71 | if (cb.length === 0) { 72 | return; 73 | } 74 | return cb[0]; 75 | }; 76 | 77 | // @ts-ignore 78 | override("access", (callOld) => (pathName: string) => { 79 | if (!nbin.existsSync(pathName)) { 80 | return callOld(); 81 | } 82 | }); 83 | 84 | override("accessSync", (callOld) => (pathName: string) => { 85 | if (!nbin.existsSync(pathName)) { 86 | return callOld(); 87 | } 88 | }); 89 | 90 | // @ts-ignore 91 | override("close", (callOld) => (fd, callback) => { 92 | if (!openFiles.has(fd)) { 93 | return callOld(); 94 | } 95 | 96 | openFiles.delete(fd); 97 | callback(null); 98 | }); 99 | 100 | override("closeSync", (callOld) => (fd) => { 101 | if (!openFiles.has(fd)) { 102 | return callOld(); 103 | } 104 | 105 | openFiles.delete(fd); 106 | }); 107 | 108 | // @ts-ignore 109 | override("exists", (callOld) => (pathName: string, callback) => { 110 | callback(nbin.existsSync(pathName)); 111 | }, (pathName: string) => { 112 | return new Promise((resolve, reject) => { 113 | return fs.exists(pathName, (exists) => { 114 | resolve(exists); 115 | }); 116 | }); 117 | }); 118 | 119 | override("existsSync", (callOld) => (pathName: string) => { 120 | return nbin.existsSync(pathName); 121 | }); 122 | 123 | // @ts-ignore 124 | override("fstat", (callOld) => (fd, callback) => { 125 | if (!openFiles.has(fd)) { 126 | return callOld(); 127 | } 128 | 129 | const openFile = openFiles.get(fd); 130 | return fs.stat(openFile.path, callback); 131 | }); 132 | 133 | override("fstatSync", (callOld) => (fd) => { 134 | if (!openFiles.has(fd)) { 135 | return callOld(); 136 | } 137 | 138 | const openFile = openFiles.get(fd); 139 | return fs.statSync(openFile.path); 140 | }); 141 | 142 | // @ts-ignore 143 | override("lstat", (callOld) => (pathName, callback) => { 144 | return fs.stat(pathName, callback); 145 | }); 146 | 147 | override("lstatSync", (callOld) => (pathName) => { 148 | return fs.statSync(pathName); 149 | }); 150 | 151 | const doOpen = (pathName: string): number => { 152 | const desc = fdId++; 153 | openFiles.set(desc, { 154 | path: pathName, 155 | readLocation: 0, 156 | }); 157 | return desc; 158 | }; 159 | 160 | // @ts-ignore 161 | override("open", (callOld) => (pathName: string, ...args: any[]) => { 162 | if (!nbin.existsSync(pathName)) { 163 | return callOld(); 164 | } 165 | const fd = doOpen(pathName); 166 | const cb = findCb(args); 167 | if (!cb) { 168 | return; 169 | } 170 | process.nextTick(() => { 171 | cb(null, fd); 172 | }); 173 | }); 174 | 175 | override("openSync", (callOld) => (pathName: string) => { 176 | if (!nbin.existsSync(pathName)) { 177 | return callOld(); 178 | } 179 | return doOpen(pathName); 180 | }); 181 | 182 | // @ts-ignore 183 | override("read", (callOld) => (fd, buffer: Buffer, offset, length, position, callback) => { 184 | const openFile = openFiles.get(fd); 185 | if (!openFile) { 186 | return callOld(); 187 | } 188 | 189 | let hadPosition = true; 190 | if (typeof position === "undefined" || position === null) { 191 | position = openFile.readLocation; 192 | hadPosition = false; 193 | } 194 | nbin.readFile(openFile.path, "buffer", position, length).then((content) => { 195 | buffer.set(content, offset); 196 | if (!hadPosition) { 197 | openFile.readLocation += content.byteLength; 198 | } 199 | // tslint:disable-next-line:no-any 200 | callback(null, content.byteLength, content as any); 201 | }).catch((ex) => { 202 | callback(ex, null, null); 203 | }); 204 | }, (fd, buffer: Buffer, offset, length, position) => { 205 | return new Promise((resolve, reject) => { 206 | return fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => { 207 | if (err) { 208 | return reject(err); 209 | } 210 | 211 | resolve({ 212 | bytesRead, 213 | buffer, 214 | }); 215 | }); 216 | }); 217 | }); 218 | 219 | override("readSync", (callOld) => (fd, buffer: Buffer, offset, length, position): number => { 220 | const openFile = openFiles.get(fd); 221 | if (!openFile) { 222 | return callOld(); 223 | } 224 | 225 | let hadPosition = true; 226 | if (typeof position === "undefined" || position === null) { 227 | position = openFile.readLocation; 228 | hadPosition = false; 229 | } 230 | const content = nbin.readFileSync(openFile.path, "buffer", position, length); 231 | buffer.set(content, offset); 232 | if (!hadPosition) { 233 | openFile.readLocation += content.byteLength; 234 | } 235 | return content.byteLength; 236 | }); 237 | 238 | // @ts-ignore 239 | override("readdir", (callOld) => (pathName: string, ...args: any[]) => { 240 | const cb = findCb(args); 241 | if (!cb) { 242 | return; 243 | } 244 | cb(null, nbin.readdirSync(pathName)); 245 | }); 246 | 247 | // @ts-ignore 248 | override("readdirSync", (callOld) => (pathName: string) => { 249 | return [...nbin.readdirSync(pathName)]; 250 | }); 251 | 252 | // @ts-ignore 253 | override("readFile", (callOld) => (pathName: string, ...args: any[]) => { 254 | let encoding: "utf8" | "buffer" = "buffer"; 255 | if (typeof args[0] === "string") { 256 | encoding = args[0]; 257 | } 258 | if (typeof args[0] === "object" && args[0] !== null) { 259 | const opts = args[0]; 260 | if (opts.encoding) { 261 | encoding = opts.encoding; 262 | } 263 | } 264 | const cb = findCb(args); 265 | if (!cb) { 266 | return; 267 | } 268 | nbin.readFile(pathName, encoding as "utf8").then((result) => { 269 | cb(null, result); 270 | }).catch((ex) => { 271 | cb(ex); 272 | }); 273 | }); 274 | 275 | // @ts-ignore 276 | override("readFileSync", (callOld) => (pathName: string, ...args: any[]) => { 277 | let encoding: "utf8" | "buffer" = "buffer"; 278 | if (typeof args[0] === "string") { 279 | encoding = args[0]; 280 | } 281 | if (typeof args[0] === "object" && args[0] !== null) { 282 | const opts = args[0]; 283 | if (opts.encoding) { 284 | encoding = opts.encoding; 285 | } 286 | } 287 | return nbin.readFileSync(pathName, encoding as "buffer"); 288 | }); 289 | 290 | // @ts-ignore 291 | override("realpath", (callOld) => (pathName: string, ...args: any[]) => { 292 | const cb = findCb(args); 293 | if (!cb) { 294 | return; 295 | } 296 | cb(null, pathName); 297 | }); 298 | 299 | // @ts-ignore 300 | override("realpathSync", (callOld) => (pathName: string) => { 301 | return pathName; 302 | }); 303 | 304 | const doStat = (pathName: string): fs.Stats => { 305 | const stat = nbin.statSync(pathName); 306 | const date = new Date(); 307 | 308 | return new class { 309 | isBlockDevice() { return false; } 310 | isCharacterDevice() { return false; } 311 | isDirectory() { return stat.isDirectory; } 312 | isFIFO() { return false; } 313 | isFile() { return stat.isFile; } 314 | isSocket() { return false; } 315 | isSymbolicLink() { return false; } 316 | 317 | public readonly atime = date; 318 | public readonly atimeMs = date.getTime(); 319 | public readonly birthtime = date; 320 | public readonly birthtimeMs = date.getTime(); 321 | public readonly blksize = null!; 322 | public readonly blocks = null!; 323 | public readonly ctime = date; 324 | public readonly ctimeMs = date.getTime(); 325 | public readonly dev = null!; 326 | public readonly gid = 0; 327 | public readonly ino = 0; 328 | public readonly mode = null!; 329 | public readonly mtime = date; 330 | public readonly mtimeMs = date.getTime(); 331 | public readonly nlink = null!; 332 | public readonly rdev = null!; 333 | public readonly size = stat.size; 334 | public readonly uid = 0; 335 | }; 336 | }; 337 | 338 | // @ts-ignore 339 | override("stat", (callOld) => (pathName: string, ...args: any[]) => { 340 | const cb = findCb(args); 341 | if (!cb) { 342 | return; 343 | } 344 | cb(null, doStat(pathName)); 345 | }); 346 | 347 | override("statSync", (callOld) => (pathName: string) => { 348 | return doStat(pathName); 349 | }); 350 | }; 351 | --------------------------------------------------------------------------------