├── .gitignore ├── Readme.md ├── package-lock.json ├── package.json ├── src ├── StableDiffusionPipeline.ts ├── index.ts ├── schedulers │ └── PNDMScheduler.ts ├── tokenizer.js ├── tqdm.d.ts └── txt2img.ts └── tsconfig.json /.gitignore: -------------------------------------------------------------------------------- 1 | /.idea/ 2 | /dist/ 3 | /models/ 4 | /node_modules/ 5 | /output.png 6 | -------------------------------------------------------------------------------- /Readme.md: -------------------------------------------------------------------------------- 1 | # THIS REPOSITORY IS OBSOLETE, USE https://github.com/dakenf/diffusers.js 2 | 3 | # Stable Diffusion for Node.js with GPU acceleration on Cuda or DirectML 4 | 5 | ## Info 6 | This is a pure typescript implementation of SD pipeline that runs ONNX versions of the model with [patched ONNX node runtime](https://github.com/dakenf/onnxruntime-node-gpu) 7 | 8 | ## Requirements 9 | Warning: this project requires Node 18 10 | 11 | ### Windows 12 | Works out of the box with DirectML. No additional libraries required 13 | 14 | You can speed up things by installing tfjs-node, but i haven't seen significant performance improvements https://github.com/tensorflow/tfjs/tree/master/tfjs-node 15 | 16 | tfjs-node might require installing visual studio build tools and python 2.7 https://community.chocolatey.org/packages/visualstudio2022buildtools 17 | 18 | ### Linux / WSL2 19 | 1. Install CUDA (tested only on 11-7 but 12 should be supported) https://docs.nvidia.com/cuda/cuda-installation-guide-linux/ 20 | 2. Install cuDNN https://developer.nvidia.com/rdp/cudnn-archive 21 | 3. Install onnxruntime-linux-x64-gpu-1.14.1 https://github.com/microsoft/onnxruntime/releases/tag/v1.14.1 22 | ### Mac OS M1 23 | No requirements but can run only on CPU which is quite slow (about 0.2s/it for fp32 and 0.1s/it for fp16) 24 | 25 | ## Usage 26 | ``` 27 | npm i stable-diffusion-nodejs 28 | ``` 29 | 30 | ### Basic windows with SD 2.1 31 | ```typescript 32 | import { PNG } from 'pngjs' 33 | import { StableDiffusionPipeline } from 'stable-diffusion-nodejs' 34 | 35 | const pipe = await StableDiffusionPipeline.fromPretrained( 36 | 'directml', // can be 'cuda' on linux or 'cpu' on mac os 37 | 'aislamov/stable-diffusion-2-1-base-onnx', // relative path or huggingface repo with onnx model 38 | ) 39 | 40 | const image = await pipe.run("A photo of a cat", undefined, 1, 9, 30) 41 | const p = new PNG({ width: 512, height: 512, inputColorType: 2 }) 42 | p.data = Buffer.from((await image[0].data())) 43 | p.pack().pipe(fs.createWriteStream('output.png')).on('finish', () => { 44 | console.log('Image saved as output.png'); 45 | }) 46 | ``` 47 | 48 | ### Accelerated with tfjs-node SD 2.1 49 | ```typescript 50 | import * as tf from "@tensorflow/tfjs-node" 51 | import { StableDiffusionPipeline } from 'stable-diffusion-nodejs' 52 | 53 | const pipe = await StableDiffusionPipeline.fromPretrained( 54 | 'directml', // can be 'cuda' on linux or 'cpu' on mac os 55 | 'aislamov/stable-diffusion-2-1-base-onnx', // relative path or huggingface repo with onnx model 56 | ) 57 | 58 | const image = await pipe.run("A photo of a cat", undefined, 1, 9, 30) 59 | const png = await tf.node.encodePng(image[0]) 60 | fs.writeFileSync("output.png", png); 61 | ``` 62 | 63 | ### To run 1.X models you need to pass huggingface hub revision and version number = 1 64 | ```typescript 65 | import * as tf from "@tensorflow/tfjs-node" 66 | import { StableDiffusionPipeline } from 'stable-diffusion-nodejs' 67 | 68 | const pipe = await StableDiffusionPipeline.fromPretrained( 69 | 'directml', // can be 'cuda' on linux or 'cpu' on mac os 70 | 'CompVis/stable-diffusion-v1-4', 71 | 'onnx', // hf hub revision 72 | 1, // SD version, cannot detect automatically yet 73 | ) 74 | 75 | const image = await pipe.run("A photo of a cat", undefined, 1, 9, 30) 76 | const png = await tf.node.encodePng(image[0]) 77 | fs.writeFileSync("output.png", png); 78 | ``` 79 | 80 | ## Command-line usage 81 | To test inference run this command. It will download SD2.1 onnx version from huggingface hub 82 | ### Windows 83 | `npm run txt2img -- --prompt "an astronaut riding a horse" --provider directml` 84 | ### Linux 85 | `npm run txt2img -- --prompt "an astronaut riding a horse" --provider cuda` 86 | 87 | You can also use `--provider cpu` on a mac or if you don't have a supported videocard 88 | 89 | ## Converting other models to ONNX 90 | You can use this tool to convert any HF hub model to ONNX https://github.com/Amblyopius/Stable-Diffusion-ONNX-FP16 91 | Use fp16 for Cuda/DirectML and fp32 for Apple M1 (it runs twice faster but still slow) 92 | 93 | ## Roadmap 94 | 1. Support different schedulers, like DDIMS and UniPCMultistepScheduler 95 | 2. Support batch size > 1 96 | 3. ControlNet support 97 | 4. Add interop between ONNX backend and tensorflow.js to avoid copying data from and to GPU on each inference step 98 | -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "stable-diffusion-nodejs", 3 | "version": "1.0.3", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "stable-diffusion-nodejs", 9 | "version": "1.0.3", 10 | "license": "MIT", 11 | "dependencies": { 12 | "@huggingface/hub": "^0.5.1", 13 | "@tensorflow/tfjs": "^4.4.0", 14 | "onnxruntime-node-gpu": "^1.14.0", 15 | "pngjs": "^7.0.0", 16 | "tqdm": "^2.0.3" 17 | }, 18 | "devDependencies": { 19 | "@types/minimist": "^1.2.2", 20 | "@types/node": "^18.16.3", 21 | "@types/pngjs": "^6.0.1", 22 | "minimist": "^1.2.8", 23 | "ts-node": "^10.9.1", 24 | "typescript": "^5.0.4" 25 | } 26 | }, 27 | "node_modules/@cspotcode/source-map-support": { 28 | "version": "0.8.1", 29 | "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", 30 | "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", 31 | "dev": true, 32 | "dependencies": { 33 | "@jridgewell/trace-mapping": "0.3.9" 34 | }, 35 | "engines": { 36 | "node": ">=12" 37 | } 38 | }, 39 | "node_modules/@huggingface/hub": { 40 | "version": "0.5.1", 41 | "resolved": "https://registry.npmjs.org/@huggingface/hub/-/hub-0.5.1.tgz", 42 | "integrity": "sha512-ZaE2gY8NY+XwIOL7+gBhPq19PXG4gbGSSJ7zwWLoq6MKP+nsgkQk/c7fBFrxgBwR6lNd0AJMHPRCjwTndqsqWQ==", 43 | "dependencies": { 44 | "hash-wasm": "^4.9.0" 45 | }, 46 | "engines": { 47 | "node": ">=18" 48 | } 49 | }, 50 | "node_modules/@jridgewell/resolve-uri": { 51 | "version": "3.1.1", 52 | "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz", 53 | "integrity": "sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA==", 54 | "dev": true, 55 | "engines": { 56 | "node": ">=6.0.0" 57 | } 58 | }, 59 | "node_modules/@jridgewell/sourcemap-codec": { 60 | "version": "1.4.15", 61 | "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", 62 | "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", 63 | "dev": true 64 | }, 65 | "node_modules/@jridgewell/trace-mapping": { 66 | "version": "0.3.9", 67 | "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", 68 | "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", 69 | "dev": true, 70 | "dependencies": { 71 | "@jridgewell/resolve-uri": "^3.0.3", 72 | "@jridgewell/sourcemap-codec": "^1.4.10" 73 | } 74 | }, 75 | "node_modules/@tensorflow/tfjs": { 76 | "version": "4.4.0", 77 | "resolved": "https://registry.npmjs.org/@tensorflow/tfjs/-/tfjs-4.4.0.tgz", 78 | "integrity": "sha512-EmCsnzdvawyk4b+4JKaLLuicHcJQRZtL1zSy9AWJLiiHTbDDseYgLxfaCEfLk8v2bUe7SBXwl3n3B7OjgvH11Q==", 79 | "dependencies": { 80 | "@tensorflow/tfjs-backend-cpu": "4.4.0", 81 | "@tensorflow/tfjs-backend-webgl": "4.4.0", 82 | "@tensorflow/tfjs-converter": "4.4.0", 83 | "@tensorflow/tfjs-core": "4.4.0", 84 | "@tensorflow/tfjs-data": "4.4.0", 85 | "@tensorflow/tfjs-layers": "4.4.0", 86 | "argparse": "^1.0.10", 87 | "chalk": "^4.1.0", 88 | "core-js": "3.29.1", 89 | "regenerator-runtime": "^0.13.5", 90 | "yargs": "^16.0.3" 91 | }, 92 | "bin": { 93 | "tfjs-custom-module": "dist/tools/custom_module/cli.js" 94 | } 95 | }, 96 | "node_modules/@tensorflow/tfjs-backend-cpu": { 97 | "version": "4.4.0", 98 | "resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-cpu/-/tfjs-backend-cpu-4.4.0.tgz", 99 | "integrity": "sha512-d4eln500/qNym78z9IrUUzF0ITBoJGLrxV8xd92kLVoXhg35Mm+zqUXShjFcrH8joOHOFuST0qZ0TbDDqcPzPA==", 100 | "dependencies": { 101 | "@types/seedrandom": "^2.4.28", 102 | "seedrandom": "^3.0.5" 103 | }, 104 | "engines": { 105 | "yarn": ">= 1.3.2" 106 | }, 107 | "peerDependencies": { 108 | "@tensorflow/tfjs-core": "4.4.0" 109 | } 110 | }, 111 | "node_modules/@tensorflow/tfjs-backend-webgl": { 112 | "version": "4.4.0", 113 | "resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-webgl/-/tfjs-backend-webgl-4.4.0.tgz", 114 | "integrity": "sha512-TzQKvfAPgGt9cMG+5bVoTckoG1xr/PVJM/uODkPvzcMqi3j97kuWDXwkYJIgXldStmfiKkU7f5CmyD3Cq3E6BA==", 115 | "dependencies": { 116 | "@tensorflow/tfjs-backend-cpu": "4.4.0", 117 | "@types/offscreencanvas": "~2019.3.0", 118 | "@types/seedrandom": "^2.4.28", 119 | "@types/webgl-ext": "0.0.30", 120 | "seedrandom": "^3.0.5" 121 | }, 122 | "engines": { 123 | "yarn": ">= 1.3.2" 124 | }, 125 | "peerDependencies": { 126 | "@tensorflow/tfjs-core": "4.4.0" 127 | } 128 | }, 129 | "node_modules/@tensorflow/tfjs-converter": { 130 | "version": "4.4.0", 131 | "resolved": "https://registry.npmjs.org/@tensorflow/tfjs-converter/-/tfjs-converter-4.4.0.tgz", 132 | "integrity": "sha512-JUjpRStrAuw37tgPd5UENu0UjQVuJT09yF7KpOur4BriJ0uQqrbEZHMPHmvUtr5nYzkqlXJTuXIyxvEY/olNpg==", 133 | "peerDependencies": { 134 | "@tensorflow/tfjs-core": "4.4.0" 135 | } 136 | }, 137 | "node_modules/@tensorflow/tfjs-core": { 138 | "version": "4.4.0", 139 | "resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-4.4.0.tgz", 140 | "integrity": "sha512-Anxpc7cAOA0Q7EUXdTbQKMg3reFvrdkgDlaYzH9ZfkMq2CgLV4Au6E/s6HmbYn/VrAtWy9mLY5c/lLJqh4764g==", 141 | "dependencies": { 142 | "@types/long": "^4.0.1", 143 | "@types/offscreencanvas": "~2019.7.0", 144 | "@types/seedrandom": "^2.4.28", 145 | "@types/webgl-ext": "0.0.30", 146 | "@webgpu/types": "0.1.30", 147 | "long": "4.0.0", 148 | "node-fetch": "~2.6.1", 149 | "seedrandom": "^3.0.5" 150 | }, 151 | "engines": { 152 | "yarn": ">= 1.3.2" 153 | } 154 | }, 155 | "node_modules/@tensorflow/tfjs-core/node_modules/@types/offscreencanvas": { 156 | "version": "2019.7.0", 157 | "resolved": "https://registry.npmjs.org/@types/offscreencanvas/-/offscreencanvas-2019.7.0.tgz", 158 | "integrity": "sha512-PGcyveRIpL1XIqK8eBsmRBt76eFgtzuPiSTyKHZxnGemp2yzGzWpjYKAfK3wIMiU7eH+851yEpiuP8JZerTmWg==" 159 | }, 160 | "node_modules/@tensorflow/tfjs-data": { 161 | "version": "4.4.0", 162 | "resolved": "https://registry.npmjs.org/@tensorflow/tfjs-data/-/tfjs-data-4.4.0.tgz", 163 | "integrity": "sha512-aY4eq4cgrsrXeBU6ABZAAN3tV0fG4YcHd0z+cYuNXnCo+VEQLJnPmhn+xymZ4VQZQH4GXbVS4dV9pXMclFNRFw==", 164 | "dependencies": { 165 | "@types/node-fetch": "^2.1.2", 166 | "node-fetch": "~2.6.1", 167 | "string_decoder": "^1.3.0" 168 | }, 169 | "peerDependencies": { 170 | "@tensorflow/tfjs-core": "4.4.0", 171 | "seedrandom": "^3.0.5" 172 | } 173 | }, 174 | "node_modules/@tensorflow/tfjs-layers": { 175 | "version": "4.4.0", 176 | "resolved": "https://registry.npmjs.org/@tensorflow/tfjs-layers/-/tfjs-layers-4.4.0.tgz", 177 | "integrity": "sha512-OGC7shfiD9Gc698hINHK4y9slOJvu5m54tVNm4xf+WSNrw/avvgpar6yyoL5bakYIZNQvFNK75Yr8VRPR7oPeQ==", 178 | "peerDependencies": { 179 | "@tensorflow/tfjs-core": "4.4.0" 180 | } 181 | }, 182 | "node_modules/@tsconfig/node10": { 183 | "version": "1.0.9", 184 | "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz", 185 | "integrity": "sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==", 186 | "dev": true 187 | }, 188 | "node_modules/@tsconfig/node12": { 189 | "version": "1.0.11", 190 | "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", 191 | "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", 192 | "dev": true 193 | }, 194 | "node_modules/@tsconfig/node14": { 195 | "version": "1.0.3", 196 | "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", 197 | "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", 198 | "dev": true 199 | }, 200 | "node_modules/@tsconfig/node16": { 201 | "version": "1.0.3", 202 | "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.3.tgz", 203 | "integrity": "sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==", 204 | "dev": true 205 | }, 206 | "node_modules/@types/long": { 207 | "version": "4.0.2", 208 | "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz", 209 | "integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==" 210 | }, 211 | "node_modules/@types/minimist": { 212 | "version": "1.2.2", 213 | "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.2.tgz", 214 | "integrity": "sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==", 215 | "dev": true 216 | }, 217 | "node_modules/@types/node": { 218 | "version": "18.16.3", 219 | "resolved": "https://registry.npmjs.org/@types/node/-/node-18.16.3.tgz", 220 | "integrity": "sha512-OPs5WnnT1xkCBiuQrZA4+YAV4HEJejmHneyraIaxsbev5yCEr6KMwINNFP9wQeFIw8FWcoTqF3vQsa5CDaI+8Q==" 221 | }, 222 | "node_modules/@types/node-fetch": { 223 | "version": "2.6.3", 224 | "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.3.tgz", 225 | "integrity": "sha512-ETTL1mOEdq/sxUtgtOhKjyB2Irra4cjxksvcMUR5Zr4n+PxVhsCD9WS46oPbHL3et9Zde7CNRr+WUNlcHvsX+w==", 226 | "dependencies": { 227 | "@types/node": "*", 228 | "form-data": "^3.0.0" 229 | } 230 | }, 231 | "node_modules/@types/offscreencanvas": { 232 | "version": "2019.3.0", 233 | "resolved": "https://registry.npmjs.org/@types/offscreencanvas/-/offscreencanvas-2019.3.0.tgz", 234 | "integrity": "sha512-esIJx9bQg+QYF0ra8GnvfianIY8qWB0GBx54PK5Eps6m+xTj86KLavHv6qDhzKcu5UUOgNfJ2pWaIIV7TRUd9Q==" 235 | }, 236 | "node_modules/@types/pngjs": { 237 | "version": "6.0.1", 238 | "resolved": "https://registry.npmjs.org/@types/pngjs/-/pngjs-6.0.1.tgz", 239 | "integrity": "sha512-J39njbdW1U/6YyVXvC9+1iflZghP8jgRf2ndYghdJb5xL49LYDB+1EuAxfbuJ2IBbWIL3AjHPQhgaTxT3YaYeg==", 240 | "dev": true, 241 | "dependencies": { 242 | "@types/node": "*" 243 | } 244 | }, 245 | "node_modules/@types/seedrandom": { 246 | "version": "2.4.30", 247 | "resolved": "https://registry.npmjs.org/@types/seedrandom/-/seedrandom-2.4.30.tgz", 248 | "integrity": "sha512-AnxLHewubLVzoF/A4qdxBGHCKifw8cY32iro3DQX9TPcetE95zBeVt3jnsvtvAUf1vwzMfwzp4t/L2yqPlnjkQ==" 249 | }, 250 | "node_modules/@types/webgl-ext": { 251 | "version": "0.0.30", 252 | "resolved": "https://registry.npmjs.org/@types/webgl-ext/-/webgl-ext-0.0.30.tgz", 253 | "integrity": "sha512-LKVgNmBxN0BbljJrVUwkxwRYqzsAEPcZOe6S2T6ZaBDIrFp0qu4FNlpc5sM1tGbXUYFgdVQIoeLk1Y1UoblyEg==" 254 | }, 255 | "node_modules/@webgpu/types": { 256 | "version": "0.1.30", 257 | "resolved": "https://registry.npmjs.org/@webgpu/types/-/types-0.1.30.tgz", 258 | "integrity": "sha512-9AXJSmL3MzY8ZL//JjudA//q+2kBRGhLBFpkdGksWIuxrMy81nFrCzj2Am+mbh8WoU6rXmv7cY5E3rdlyru2Qg==" 259 | }, 260 | "node_modules/acorn": { 261 | "version": "8.8.2", 262 | "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz", 263 | "integrity": "sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==", 264 | "dev": true, 265 | "bin": { 266 | "acorn": "bin/acorn" 267 | }, 268 | "engines": { 269 | "node": ">=0.4.0" 270 | } 271 | }, 272 | "node_modules/acorn-walk": { 273 | "version": "8.2.0", 274 | "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", 275 | "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", 276 | "dev": true, 277 | "engines": { 278 | "node": ">=0.4.0" 279 | } 280 | }, 281 | "node_modules/ansi-regex": { 282 | "version": "5.0.1", 283 | "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", 284 | "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", 285 | "engines": { 286 | "node": ">=8" 287 | } 288 | }, 289 | "node_modules/ansi-styles": { 290 | "version": "4.3.0", 291 | "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", 292 | "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", 293 | "dependencies": { 294 | "color-convert": "^2.0.1" 295 | }, 296 | "engines": { 297 | "node": ">=8" 298 | }, 299 | "funding": { 300 | "url": "https://github.com/chalk/ansi-styles?sponsor=1" 301 | } 302 | }, 303 | "node_modules/arg": { 304 | "version": "4.1.3", 305 | "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", 306 | "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", 307 | "dev": true 308 | }, 309 | "node_modules/argparse": { 310 | "version": "1.0.10", 311 | "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", 312 | "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", 313 | "dependencies": { 314 | "sprintf-js": "~1.0.2" 315 | } 316 | }, 317 | "node_modules/asynckit": { 318 | "version": "0.4.0", 319 | "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", 320 | "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" 321 | }, 322 | "node_modules/chalk": { 323 | "version": "4.1.2", 324 | "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", 325 | "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", 326 | "dependencies": { 327 | "ansi-styles": "^4.1.0", 328 | "supports-color": "^7.1.0" 329 | }, 330 | "engines": { 331 | "node": ">=10" 332 | }, 333 | "funding": { 334 | "url": "https://github.com/chalk/chalk?sponsor=1" 335 | } 336 | }, 337 | "node_modules/cliui": { 338 | "version": "7.0.4", 339 | "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", 340 | "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", 341 | "dependencies": { 342 | "string-width": "^4.2.0", 343 | "strip-ansi": "^6.0.0", 344 | "wrap-ansi": "^7.0.0" 345 | } 346 | }, 347 | "node_modules/color-convert": { 348 | "version": "2.0.1", 349 | "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", 350 | "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", 351 | "dependencies": { 352 | "color-name": "~1.1.4" 353 | }, 354 | "engines": { 355 | "node": ">=7.0.0" 356 | } 357 | }, 358 | "node_modules/color-name": { 359 | "version": "1.1.4", 360 | "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", 361 | "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" 362 | }, 363 | "node_modules/combined-stream": { 364 | "version": "1.0.8", 365 | "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", 366 | "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", 367 | "dependencies": { 368 | "delayed-stream": "~1.0.0" 369 | }, 370 | "engines": { 371 | "node": ">= 0.8" 372 | } 373 | }, 374 | "node_modules/core-js": { 375 | "version": "3.29.1", 376 | "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.29.1.tgz", 377 | "integrity": "sha512-+jwgnhg6cQxKYIIjGtAHq2nwUOolo9eoFZ4sHfUH09BLXBgxnH4gA0zEd+t+BO2cNB8idaBtZFcFTRjQJRJmAw==", 378 | "hasInstallScript": true, 379 | "funding": { 380 | "type": "opencollective", 381 | "url": "https://opencollective.com/core-js" 382 | } 383 | }, 384 | "node_modules/create-require": { 385 | "version": "1.1.1", 386 | "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", 387 | "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", 388 | "dev": true 389 | }, 390 | "node_modules/delayed-stream": { 391 | "version": "1.0.0", 392 | "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", 393 | "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", 394 | "engines": { 395 | "node": ">=0.4.0" 396 | } 397 | }, 398 | "node_modules/diff": { 399 | "version": "4.0.2", 400 | "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", 401 | "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", 402 | "dev": true, 403 | "engines": { 404 | "node": ">=0.3.1" 405 | } 406 | }, 407 | "node_modules/emoji-regex": { 408 | "version": "8.0.0", 409 | "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", 410 | "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" 411 | }, 412 | "node_modules/escalade": { 413 | "version": "3.1.1", 414 | "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", 415 | "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", 416 | "engines": { 417 | "node": ">=6" 418 | } 419 | }, 420 | "node_modules/form-data": { 421 | "version": "3.0.1", 422 | "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", 423 | "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", 424 | "dependencies": { 425 | "asynckit": "^0.4.0", 426 | "combined-stream": "^1.0.8", 427 | "mime-types": "^2.1.12" 428 | }, 429 | "engines": { 430 | "node": ">= 6" 431 | } 432 | }, 433 | "node_modules/get-caller-file": { 434 | "version": "2.0.5", 435 | "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", 436 | "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", 437 | "engines": { 438 | "node": "6.* || 8.* || >= 10.*" 439 | } 440 | }, 441 | "node_modules/has-flag": { 442 | "version": "4.0.0", 443 | "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", 444 | "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", 445 | "engines": { 446 | "node": ">=8" 447 | } 448 | }, 449 | "node_modules/hash-wasm": { 450 | "version": "4.9.0", 451 | "resolved": "https://registry.npmjs.org/hash-wasm/-/hash-wasm-4.9.0.tgz", 452 | "integrity": "sha512-7SW7ejyfnRxuOc7ptQHSf4LDoZaWOivfzqw+5rpcQku0nHfmicPKE51ra9BiRLAmT8+gGLestr1XroUkqdjL6w==" 453 | }, 454 | "node_modules/is-fullwidth-code-point": { 455 | "version": "3.0.0", 456 | "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", 457 | "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", 458 | "engines": { 459 | "node": ">=8" 460 | } 461 | }, 462 | "node_modules/long": { 463 | "version": "4.0.0", 464 | "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", 465 | "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==" 466 | }, 467 | "node_modules/make-error": { 468 | "version": "1.3.6", 469 | "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", 470 | "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", 471 | "dev": true 472 | }, 473 | "node_modules/mime-db": { 474 | "version": "1.52.0", 475 | "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", 476 | "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", 477 | "engines": { 478 | "node": ">= 0.6" 479 | } 480 | }, 481 | "node_modules/mime-types": { 482 | "version": "2.1.35", 483 | "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", 484 | "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", 485 | "dependencies": { 486 | "mime-db": "1.52.0" 487 | }, 488 | "engines": { 489 | "node": ">= 0.6" 490 | } 491 | }, 492 | "node_modules/minimist": { 493 | "version": "1.2.8", 494 | "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", 495 | "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", 496 | "dev": true, 497 | "funding": { 498 | "url": "https://github.com/sponsors/ljharb" 499 | } 500 | }, 501 | "node_modules/node-fetch": { 502 | "version": "2.6.9", 503 | "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.9.tgz", 504 | "integrity": "sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==", 505 | "dependencies": { 506 | "whatwg-url": "^5.0.0" 507 | }, 508 | "engines": { 509 | "node": "4.x || >=6.0.0" 510 | }, 511 | "peerDependencies": { 512 | "encoding": "^0.1.0" 513 | }, 514 | "peerDependenciesMeta": { 515 | "encoding": { 516 | "optional": true 517 | } 518 | } 519 | }, 520 | "node_modules/onnxruntime-common": { 521 | "version": "1.14.0", 522 | "resolved": "https://registry.npmjs.org/onnxruntime-common/-/onnxruntime-common-1.14.0.tgz", 523 | "integrity": "sha512-3LJpegM2iMNRX2wUmtYfeX/ytfOzNwAWKSq1HbRrKc9+uqG/FsEA0bbKZl1btQeZaXhC26l44NWpNUeXPII7Ew==" 524 | }, 525 | "node_modules/onnxruntime-node-gpu": { 526 | "version": "1.14.0", 527 | "resolved": "https://registry.npmjs.org/onnxruntime-node-gpu/-/onnxruntime-node-gpu-1.14.0.tgz", 528 | "integrity": "sha512-12vkY4f/O3d+iZBzIsX4iezK1uqqOUoGFUiqaCpryildA3MAV2lv8MZTCiQq9r0sa8HvL1PPs/jsqaPqOaxHTA==", 529 | "hasInstallScript": true, 530 | "dependencies": { 531 | "onnxruntime-common": "1.14.0" 532 | } 533 | }, 534 | "node_modules/pngjs": { 535 | "version": "7.0.0", 536 | "resolved": "https://registry.npmjs.org/pngjs/-/pngjs-7.0.0.tgz", 537 | "integrity": "sha512-LKWqWJRhstyYo9pGvgor/ivk2w94eSjE3RGVuzLGlr3NmD8bf7RcYGze1mNdEHRP6TRP6rMuDHk5t44hnTRyow==", 538 | "engines": { 539 | "node": ">=14.19.0" 540 | } 541 | }, 542 | "node_modules/regenerator-runtime": { 543 | "version": "0.13.11", 544 | "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz", 545 | "integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==" 546 | }, 547 | "node_modules/require-directory": { 548 | "version": "2.1.1", 549 | "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", 550 | "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", 551 | "engines": { 552 | "node": ">=0.10.0" 553 | } 554 | }, 555 | "node_modules/safe-buffer": { 556 | "version": "5.2.1", 557 | "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", 558 | "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", 559 | "funding": [ 560 | { 561 | "type": "github", 562 | "url": "https://github.com/sponsors/feross" 563 | }, 564 | { 565 | "type": "patreon", 566 | "url": "https://www.patreon.com/feross" 567 | }, 568 | { 569 | "type": "consulting", 570 | "url": "https://feross.org/support" 571 | } 572 | ] 573 | }, 574 | "node_modules/seedrandom": { 575 | "version": "3.0.5", 576 | "resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-3.0.5.tgz", 577 | "integrity": "sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==" 578 | }, 579 | "node_modules/sprintf-js": { 580 | "version": "1.0.3", 581 | "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", 582 | "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==" 583 | }, 584 | "node_modules/string_decoder": { 585 | "version": "1.3.0", 586 | "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", 587 | "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", 588 | "dependencies": { 589 | "safe-buffer": "~5.2.0" 590 | } 591 | }, 592 | "node_modules/string-width": { 593 | "version": "4.2.3", 594 | "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", 595 | "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", 596 | "dependencies": { 597 | "emoji-regex": "^8.0.0", 598 | "is-fullwidth-code-point": "^3.0.0", 599 | "strip-ansi": "^6.0.1" 600 | }, 601 | "engines": { 602 | "node": ">=8" 603 | } 604 | }, 605 | "node_modules/strip-ansi": { 606 | "version": "6.0.1", 607 | "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", 608 | "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", 609 | "dependencies": { 610 | "ansi-regex": "^5.0.1" 611 | }, 612 | "engines": { 613 | "node": ">=8" 614 | } 615 | }, 616 | "node_modules/supports-color": { 617 | "version": "7.2.0", 618 | "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", 619 | "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", 620 | "dependencies": { 621 | "has-flag": "^4.0.0" 622 | }, 623 | "engines": { 624 | "node": ">=8" 625 | } 626 | }, 627 | "node_modules/tqdm": { 628 | "version": "2.0.3", 629 | "resolved": "https://registry.npmjs.org/tqdm/-/tqdm-2.0.3.tgz", 630 | "integrity": "sha512-Ju50G550gspkjd1AiJ/jFBHe2dii9s+KPntEsq0o73BqywqzNWPUM8/FD3zM1rOH7OGLoH7pGSGI90Ct+Yd/5Q==" 631 | }, 632 | "node_modules/tr46": { 633 | "version": "0.0.3", 634 | "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", 635 | "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" 636 | }, 637 | "node_modules/ts-node": { 638 | "version": "10.9.1", 639 | "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.1.tgz", 640 | "integrity": "sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==", 641 | "dev": true, 642 | "dependencies": { 643 | "@cspotcode/source-map-support": "^0.8.0", 644 | "@tsconfig/node10": "^1.0.7", 645 | "@tsconfig/node12": "^1.0.7", 646 | "@tsconfig/node14": "^1.0.0", 647 | "@tsconfig/node16": "^1.0.2", 648 | "acorn": "^8.4.1", 649 | "acorn-walk": "^8.1.1", 650 | "arg": "^4.1.0", 651 | "create-require": "^1.1.0", 652 | "diff": "^4.0.1", 653 | "make-error": "^1.1.1", 654 | "v8-compile-cache-lib": "^3.0.1", 655 | "yn": "3.1.1" 656 | }, 657 | "bin": { 658 | "ts-node": "dist/bin.js", 659 | "ts-node-cwd": "dist/bin-cwd.js", 660 | "ts-node-esm": "dist/bin-esm.js", 661 | "ts-node-script": "dist/bin-script.js", 662 | "ts-node-transpile-only": "dist/bin-transpile.js", 663 | "ts-script": "dist/bin-script-deprecated.js" 664 | }, 665 | "peerDependencies": { 666 | "@swc/core": ">=1.2.50", 667 | "@swc/wasm": ">=1.2.50", 668 | "@types/node": "*", 669 | "typescript": ">=2.7" 670 | }, 671 | "peerDependenciesMeta": { 672 | "@swc/core": { 673 | "optional": true 674 | }, 675 | "@swc/wasm": { 676 | "optional": true 677 | } 678 | } 679 | }, 680 | "node_modules/typescript": { 681 | "version": "5.0.4", 682 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.4.tgz", 683 | "integrity": "sha512-cW9T5W9xY37cc+jfEnaUvX91foxtHkza3Nw3wkoF4sSlKn0MONdkdEndig/qPBWXNkmplh3NzayQzCiHM4/hqw==", 684 | "dev": true, 685 | "bin": { 686 | "tsc": "bin/tsc", 687 | "tsserver": "bin/tsserver" 688 | }, 689 | "engines": { 690 | "node": ">=12.20" 691 | } 692 | }, 693 | "node_modules/v8-compile-cache-lib": { 694 | "version": "3.0.1", 695 | "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", 696 | "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", 697 | "dev": true 698 | }, 699 | "node_modules/webidl-conversions": { 700 | "version": "3.0.1", 701 | "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", 702 | "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" 703 | }, 704 | "node_modules/whatwg-url": { 705 | "version": "5.0.0", 706 | "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", 707 | "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", 708 | "dependencies": { 709 | "tr46": "~0.0.3", 710 | "webidl-conversions": "^3.0.0" 711 | } 712 | }, 713 | "node_modules/wrap-ansi": { 714 | "version": "7.0.0", 715 | "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", 716 | "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", 717 | "dependencies": { 718 | "ansi-styles": "^4.0.0", 719 | "string-width": "^4.1.0", 720 | "strip-ansi": "^6.0.0" 721 | }, 722 | "engines": { 723 | "node": ">=10" 724 | }, 725 | "funding": { 726 | "url": "https://github.com/chalk/wrap-ansi?sponsor=1" 727 | } 728 | }, 729 | "node_modules/y18n": { 730 | "version": "5.0.8", 731 | "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", 732 | "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", 733 | "engines": { 734 | "node": ">=10" 735 | } 736 | }, 737 | "node_modules/yargs": { 738 | "version": "16.2.0", 739 | "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", 740 | "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", 741 | "dependencies": { 742 | "cliui": "^7.0.2", 743 | "escalade": "^3.1.1", 744 | "get-caller-file": "^2.0.5", 745 | "require-directory": "^2.1.1", 746 | "string-width": "^4.2.0", 747 | "y18n": "^5.0.5", 748 | "yargs-parser": "^20.2.2" 749 | }, 750 | "engines": { 751 | "node": ">=10" 752 | } 753 | }, 754 | "node_modules/yargs-parser": { 755 | "version": "20.2.9", 756 | "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", 757 | "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", 758 | "engines": { 759 | "node": ">=10" 760 | } 761 | }, 762 | "node_modules/yn": { 763 | "version": "3.1.1", 764 | "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", 765 | "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", 766 | "dev": true, 767 | "engines": { 768 | "node": ">=6" 769 | } 770 | } 771 | } 772 | } 773 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "stable-diffusion-nodejs", 3 | "version": "1.0.5", 4 | "description": "StableDiffusion on nodejs with GPU acceleration using Cuda or DirectML", 5 | "main": "dist/index.js", 6 | "scripts": { 7 | "build": "tsc", 8 | "txt2img": "ts-node src/txt2img.ts" 9 | }, 10 | "repository": { 11 | "type": "git", 12 | "url": "git+https://github.com/dakenf/stable-diffusion-nodejs.git" 13 | }, 14 | "author": "Arthur Islamov ", 15 | "license": "MIT", 16 | "bugs": { 17 | "url": "https://github.com/dakenf/stable-diffusion-nodejs/issues" 18 | }, 19 | "homepage": "https://github.com/dakenf/stable-diffusion-nodejs#readme", 20 | "devDependencies": { 21 | "@types/minimist": "^1.2.2", 22 | "@types/node": "^18.16.3", 23 | "@types/pngjs": "^6.0.1", 24 | "minimist": "^1.2.8", 25 | "ts-node": "^10.9.1", 26 | "typescript": "^5.0.4" 27 | }, 28 | "dependencies": { 29 | "@huggingface/hub": "^0.5.1", 30 | "@tensorflow/tfjs": "^4.4.0", 31 | "onnxruntime-node-gpu": "^1.14.0", 32 | "pngjs": "^7.0.0", 33 | "tqdm": "^2.0.3" 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/StableDiffusionPipeline.ts: -------------------------------------------------------------------------------- 1 | import * as tf from '@tensorflow/tfjs' 2 | import { InferenceSession, Tensor } from 'onnxruntime-node-gpu'; 3 | import Tokenizer from './tokenizer' 4 | import { PNDMScheduler } from './schedulers/PNDMScheduler' 5 | import tqdm from 'tqdm' 6 | import fs from 'fs/promises' 7 | import { createWriteStream, ReadStream } from 'fs' 8 | import path from 'path' 9 | import { pipeline } from 'stream' 10 | import { promisify } from 'util' 11 | import { listFiles, downloadFile } from '@huggingface/hub' 12 | 13 | const pipelineAsync = promisify(pipeline) 14 | 15 | let MODEL_CACHE_DIR = process.env.MODEL_CACHE_DIR || 'models' 16 | if (MODEL_CACHE_DIR[MODEL_CACHE_DIR.length - 1] !== '/' || MODEL_CACHE_DIR[MODEL_CACHE_DIR.length - 1] !== '\\') { 17 | MODEL_CACHE_DIR = MODEL_CACHE_DIR + '/' 18 | } 19 | 20 | function extendArray(arr: number[], length: number) { 21 | return arr.concat(Array(length - arr.length).fill(0)); 22 | } 23 | 24 | async function fileExists (filePath: string) { 25 | try { 26 | await fs.access(filePath); 27 | return true; 28 | } catch (error) { 29 | return false; 30 | } 31 | } 32 | 33 | interface SchedulerConfig { 34 | "beta_end": number, 35 | "beta_schedule": string, 36 | "beta_start": number, 37 | "clip_sample": boolean, 38 | "num_train_timesteps": number, 39 | prediction_type?: "epsilon"|"v-preditcion", 40 | "set_alpha_to_one": boolean, 41 | "skip_prk_steps": boolean, 42 | "steps_offset": number, 43 | "trained_betas": null 44 | } 45 | 46 | export class StableDiffusionPipeline { 47 | public unet: InferenceSession 48 | public vae: InferenceSession 49 | public textEncoder: InferenceSession 50 | public tokenizer: Tokenizer 51 | public scheduler: PNDMScheduler 52 | private sdVersion 53 | 54 | constructor(unet: InferenceSession, vae: InferenceSession, textEncoder: InferenceSession, tokenizer: Tokenizer, scheduler: PNDMScheduler, sdVersion: 1|2 = 2) { 55 | this.unet = unet 56 | this.vae = vae 57 | this.textEncoder = textEncoder 58 | this.tokenizer = tokenizer 59 | this.scheduler = scheduler 60 | this.sdVersion = sdVersion 61 | } 62 | 63 | static async createScheduler (config: SchedulerConfig) { 64 | const scheduler = new PNDMScheduler( 65 | { 66 | prediction_type: 'epsilon', 67 | ...config, 68 | }, 69 | config.num_train_timesteps, 70 | config.beta_start, 71 | config.beta_end, 72 | config.beta_schedule, 73 | ) 74 | await scheduler.setAlphasCumprod() 75 | 76 | return scheduler 77 | } 78 | 79 | static async downloadFromHub (targetDir: string, modelRepoOrPath: string, revision?: string) { 80 | const files = listFiles({ repo: modelRepoOrPath, recursive: true, revision }) 81 | 82 | await fs.mkdir(targetDir, { recursive: true }) 83 | try { 84 | for await (const file of files) { 85 | if (file.type === 'directory') { 86 | continue; 87 | } 88 | 89 | console.log(`Downloading ${file.path}...`) 90 | const response = await downloadFile({ repo: modelRepoOrPath, path: file.path, revision }) 91 | if (!response?.body) { 92 | throw new Error(`Error downloading ${file.path}`) 93 | } 94 | 95 | const targetFile = targetDir + '/' + file.path 96 | const targetPath = path.dirname(targetFile) 97 | if (!await fileExists(targetPath)) { 98 | await fs.mkdir(targetPath, { recursive: true }) 99 | } 100 | 101 | const writeStream = createWriteStream(targetDir + '/' + file.path); 102 | await pipelineAsync(response.body as unknown as ReadStream, writeStream); 103 | } 104 | } catch (e) { 105 | console.error(e) 106 | throw await e 107 | } 108 | } 109 | 110 | static async fromPretrained(executionProvider: 'cpu'|'cuda'|'directml' = 'cpu', modelRepoOrPath: string, revision?: string, sdVersion: 1|2 = 2) { 111 | let searchPath = modelRepoOrPath 112 | // let's check in the cache if path does not exist 113 | if (!await fileExists(`${searchPath}/text_encoder/model.onnx`) && searchPath[0] !== '.' && searchPath[0] !== '/') { 114 | searchPath = MODEL_CACHE_DIR + modelRepoOrPath 115 | if (!await fileExists(`${searchPath}/text_encoder/model.onnx`)) { 116 | console.log(`Model not found in cache dir ${searchPath}, downloading from hub...`) 117 | await StableDiffusionPipeline.downloadFromHub(searchPath, modelRepoOrPath, revision) 118 | } 119 | } 120 | 121 | if (!await fileExists(`${searchPath}/text_encoder/model.onnx`)) { 122 | throw new Error("Could not find model files. Maybe you are not using onnx version") 123 | } 124 | 125 | const sessionOption: InferenceSession.SessionOptions = { executionProviders: [executionProvider] } 126 | const textEncoder = await InferenceSession.create(`${searchPath}/text_encoder/model.onnx`, sessionOption) 127 | const unet = await InferenceSession.create(`${searchPath}/unet/model.onnx`, sessionOption) 128 | const vae = await InferenceSession.create(`${searchPath}/vae_decoder/model.onnx`, sessionOption) 129 | 130 | const schedulerConfig = await fs.readFile(`${searchPath}/scheduler/scheduler_config.json`) 131 | const scheduler = await StableDiffusionPipeline.createScheduler(JSON.parse(schedulerConfig.toString())) 132 | 133 | const merges = await fs.readFile(`${searchPath}/tokenizer/merges.txt`) 134 | const tokenizerConfig = await fs.readFile(`${searchPath}/tokenizer/tokenizer_config.json`) 135 | const vocab = await fs.readFile(`${searchPath}/tokenizer/vocab.json`) 136 | return new StableDiffusionPipeline(unet, vae, textEncoder, new Tokenizer(merges.toString(), JSON.parse(tokenizerConfig.toString()), JSON.parse(vocab.toString())), scheduler, sdVersion) 137 | } 138 | 139 | async encodePrompt (prompt: string): Promise { 140 | const tokens = this.tokenizer.encode(prompt) 141 | const tensorTokens = new Tensor('int32', Int32Array.from(extendArray([49406, ...tokens.slice(0, this.tokenizer.tokenMaxLen - 2), 49407], 77)), [1, 77]) 142 | const encoded = await this.textEncoder.run({ input_ids: tensorTokens }) 143 | return encoded.last_hidden_state as Tensor 144 | } 145 | 146 | async getPromptEmbeds (prompt: string, negativePrompt: string|undefined) { 147 | const promptEmbeds = await this.encodePrompt(prompt) 148 | const negativePromptEmbeds = await this.encodePrompt(negativePrompt || '') 149 | 150 | const newShape = [...promptEmbeds.dims] 151 | newShape[0] = 2 152 | return new Tensor('float32', [...negativePromptEmbeds.data as unknown as number[], ...promptEmbeds.data as unknown as number[]], newShape) 153 | } 154 | 155 | async run (prompt: string, negativePrompt: string|undefined, batchSize: number, guidanceScale: number, numInferenceSteps: number) { 156 | const width = 512 157 | const height = 512 158 | if (batchSize != 1) { 159 | throw new Error('Currently only batch size of 1 is supported.') 160 | } 161 | 162 | this.scheduler.setTimesteps(numInferenceSteps) 163 | 164 | const promptEmbeds = await this.getPromptEmbeds(prompt, negativePrompt) 165 | // console.log('promptEmbeds', promptEmbeds) 166 | const latentShape = [batchSize, 4, width / 8, height / 8] 167 | let latents = tf.randomNormal(latentShape, undefined, undefined, 'float32') 168 | 169 | const doClassifierFreeGuidance = guidanceScale > 1 170 | for (const step of tqdm(await this.scheduler.timesteps.data(), { total: this.scheduler.timesteps.size })) { 171 | // for some reason v1.4 takes int64 as timestep input. ideally we should get input dtype from the model 172 | const timestep = this.sdVersion == 2 173 | ? new Tensor('float32', [step]) 174 | : new Tensor(BigInt64Array.from([BigInt(step)]), [1]) 175 | 176 | const latentInputTf = doClassifierFreeGuidance ? latents.concat(latents.clone()) : latents 177 | const latentInput = new Tensor(await latentInputTf.data(), latentInputTf.shape) 178 | 179 | let noise = await this.unet.run( 180 | { sample: await latentInput, timestep, encoder_hidden_states: promptEmbeds }, 181 | ) 182 | 183 | let noisePred = Object.values(noise)[0].data as Float32Array 184 | 185 | let noisePredTf 186 | if (doClassifierFreeGuidance) { 187 | const len = Object.values(noise)[0].data.length / 2 188 | const [noisePredUncond, noisePredText] = [ 189 | tf.tensor(noisePred.slice(0, len), latentShape, 'float32'), 190 | tf.tensor(noisePred.slice(len, len * 2), latentShape, 'float32'), 191 | ] 192 | noisePredTf = noisePredUncond.add(noisePredText.sub(noisePredUncond).mul(guidanceScale)) 193 | } else { 194 | noisePredTf = tf.tensor(noisePred, latentShape, 'float32') 195 | } 196 | 197 | const schedulerOutput = this.scheduler.step( 198 | noisePredTf, 199 | step, 200 | latents, 201 | ) 202 | latents = schedulerOutput 203 | } 204 | latents = latents.mul(tf.tensor(1).div(0.18215)) 205 | 206 | const decoded = await this.vae.run({ latent_sample: new Tensor('float32', await latents.data(), [1, 4, width / 8, height / 8]) }) 207 | 208 | const decodedTensor = tf.tensor(decoded.sample.data as Float32Array, decoded.sample.dims as number[], decoded.sample.type as 'float32') 209 | const images = decodedTensor 210 | .div(2) 211 | .add(0.5) 212 | .mul(255).round().clipByValue(0, 255).cast('int32') 213 | .transpose([0, 2, 3, 1]) 214 | .split(batchSize) 215 | 216 | return images.map(i => i.reshape([width, height, 3])) as tf.Tensor3D[] 217 | } 218 | } 219 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import { StableDiffusionPipeline } from './StableDiffusionPipeline' 2 | 3 | export { 4 | StableDiffusionPipeline, 5 | } 6 | -------------------------------------------------------------------------------- /src/schedulers/PNDMScheduler.ts: -------------------------------------------------------------------------------- 1 | import * as tf from '@tensorflow/tfjs' 2 | 3 | 4 | export class PNDMScheduler { 5 | betas: tf.Tensor1D; 6 | alphas: tf.Tensor1D; 7 | alphasCumprod!: Float32Array; 8 | finalAlphaCumprod!: number; 9 | initNoiseSigma: number; 10 | pndmOrder: number; 11 | curModelOutput: number; 12 | counter: number; 13 | curSample: tf.Tensor|null; 14 | ets: tf.Tensor[]; 15 | numInferenceSteps: number = 20; 16 | timesteps: tf.Tensor; 17 | prkTimesteps: tf.Tensor; 18 | plmsTimesteps: tf.Tensor; 19 | config: any; // Define your config type 20 | 21 | constructor( 22 | config: any, 23 | numTrainTimesteps: number = 1000, 24 | betaStart: number = 0.00085, 25 | betaEnd: number = 0.012, 26 | betaSchedule: string = "scaled_linear", 27 | trainedBetas: tf.Tensor1D | null = null, 28 | skipPrkSteps: boolean = false, 29 | setAlphaToOne: boolean = false, 30 | predictionType: string = "epsilon", 31 | stepsOffset: number = 0 32 | ) { 33 | this.config = config; 34 | 35 | if (trainedBetas !== null) { 36 | // this.betas = nj.array(trainedBetas); 37 | this.betas = tf.linspace(betaStart, betaEnd, numTrainTimesteps) 38 | } else if (betaSchedule === "linear") { 39 | this.betas = tf.linspace(betaStart, betaEnd, numTrainTimesteps) 40 | } else if (betaSchedule === "scaled_linear") { 41 | this.betas = tf.linspace(betaStart ** 0.5, betaEnd ** 0.5, numTrainTimesteps).pow(2); 42 | } else if (betaSchedule === "squaredcos_cap_v2") { 43 | // this.betas = betasForAlphaBar(numTrainTimesteps); 44 | this.betas = tf.linspace(betaStart, betaEnd, numTrainTimesteps) 45 | } else { 46 | throw new Error(`${betaSchedule} does is not implemented for ${this.constructor}`); 47 | } 48 | 49 | this.alphas = tf.sub(1.0, this.betas) 50 | 51 | this.initNoiseSigma = 1.0; 52 | this.pndmOrder = 4; 53 | 54 | // running values 55 | this.curModelOutput = 0; 56 | this.counter = 0; 57 | this.curSample = null; 58 | this.ets = [] 59 | 60 | // setable values 61 | this.timesteps = tf.tensor(Array.from({ length: numTrainTimesteps }, (_, i) => i).reverse()) 62 | this.prkTimesteps = tf.tensor([]) 63 | this.plmsTimesteps = tf.tensor([]) 64 | } 65 | 66 | async setAlphasCumprod () { 67 | this.alphasCumprod = await tf.cumprod(this.alphas).data() as Float32Array 68 | const setAlphaToOne = false 69 | this.finalAlphaCumprod = setAlphaToOne ? 1.0 : this.alphasCumprod[0] 70 | } 71 | 72 | setTimesteps (numInferenceSteps: number) { 73 | this.numInferenceSteps = numInferenceSteps; 74 | const stepRatio = ~~(this.config.num_train_timesteps / this.numInferenceSteps) 75 | this.timesteps = tf.range(0, numInferenceSteps).mul(stepRatio).round(); 76 | this.timesteps = this.timesteps.add(this.config.steps_offset); 77 | 78 | if (this.config.skip_prk_steps) { 79 | this.prkTimesteps = tf.tensor([]); 80 | const size = this.timesteps.size; 81 | this.plmsTimesteps = tf.concat([ 82 | this.timesteps.slice(0, size-1), 83 | this.timesteps.slice(size-2, 1), 84 | this.timesteps.slice(size-1, 1) 85 | ]).reverse().clone(); 86 | } else { 87 | const prkTimesteps = this.timesteps.slice(-this.pndmOrder) 88 | .tile([2]) 89 | .add( 90 | tf.tensor([0, this.config.num_train_timesteps / numInferenceSteps / 2]).tile([this.pndmOrder]) 91 | ); 92 | this.prkTimesteps = prkTimesteps.slice(0, -1).tile([2]).slice(1, -1).reverse().clone(); 93 | this.plmsTimesteps = this.timesteps.slice(0, -3).reverse().clone(); 94 | } 95 | 96 | const timesteps = tf.concat([this.prkTimesteps, this.plmsTimesteps]).asType('int32'); 97 | this.timesteps = timesteps; 98 | this.ets = [] 99 | this.counter = 0; 100 | this.curModelOutput = 0; 101 | } 102 | 103 | step( 104 | modelOutput: tf.Tensor, 105 | timestep: number, 106 | sample: tf.Tensor, 107 | returnDict: boolean = true 108 | ): tf.Tensor { 109 | if (this.counter < this.prkTimesteps.shape[0] && !this.config.skip_prk_steps) { 110 | return this.stepPrk(modelOutput, timestep, sample, returnDict); 111 | } else { 112 | return this.stepPlms(modelOutput, timestep, sample, returnDict); 113 | } 114 | } 115 | 116 | stepPrk( 117 | modelOutput: tf.Tensor, 118 | timestep: number, 119 | sample: tf.Tensor, 120 | returnDict: boolean = true 121 | ): tf.Tensor { 122 | throw new Error("Not implemented") 123 | // if (this.numInferenceSteps === null) { 124 | // throw new Error( 125 | // "Number of inference steps is 'null', you need to run 'setTimesteps' after creating the scheduler" 126 | // ); 127 | // } 128 | 129 | // const diffToPrev = this.counter % 2 === 0 ? this.config.num_train_timesteps / this.numInferenceSteps / 2 : 0; 130 | // const prevTimestep = timestep - diffToPrev; 131 | // timestep = this.prkTimesteps.get(this.counter / 4 * 4); 132 | // 133 | // if (this.counter % 4 === 0) { 134 | // this.curModelOutput = this.curModelOutput.add(modelOutput.mul(1 / 6)); 135 | // this.ets.push(modelOutput); 136 | // this.curSample = sample; 137 | // } else if ((this.counter - 1) % 4 === 0) { 138 | // this.curModelOutput = this.curModelOutput.add(modelOutput.mul(1 / 3)); 139 | // } else if ((this.counter -2 ) % 4 === 0) { 140 | // this.curModelOutput = this.curModelOutput.add(modelOutput.mul(1 / 3)); 141 | // } else if ((this.counter - 3) % 4 === 0) { 142 | // modelOutput = this.curModelOutput.add(modelOutput.mul(1 / 6)); 143 | // this.curModelOutput = 0; 144 | // } 145 | // const curSample = this.curSample !== null ? this.curSample : sample; 146 | // const prevSample = this._getPrevSample(curSample, timestep, prevTimestep, modelOutput); 147 | // this.counter += 1; 148 | 149 | return modelOutput; 150 | } 151 | 152 | stepPlms( 153 | modelOutput: tf.Tensor, 154 | timestep: number, 155 | sample: tf.Tensor, 156 | returnDict: boolean = true 157 | ): tf.Tensor { 158 | let prevTimestep = timestep - ~~(this.config.num_train_timesteps / this.numInferenceSteps) 159 | 160 | if (this.counter !== 1) { 161 | this.ets = this.ets.slice(-3); 162 | this.ets.push(modelOutput); 163 | } else { 164 | prevTimestep = timestep; 165 | timestep = timestep + ~~(this.config.num_train_timesteps / this.numInferenceSteps) 166 | } 167 | 168 | if (this.ets.length === 1 && this.counter === 0) { 169 | modelOutput = modelOutput; 170 | this.curSample = sample; 171 | } else if (this.ets.length === 1 && this.counter === 1) { 172 | modelOutput = modelOutput.add(this.ets[this.ets.length - 1]).div(2); 173 | sample = this.curSample!; 174 | this.curSample = null; 175 | } else if (this.ets.length === 2) { 176 | modelOutput = this.ets[this.ets.length - 1].mul(3) 177 | .sub(this.ets[this.ets.length - 2]) 178 | .div(2) 179 | } else if (this.ets.length === 3) { 180 | modelOutput = 181 | this.ets[this.ets.length - 1].mul(23) 182 | .sub( 183 | this.ets[this.ets.length - 2].mul(16) 184 | ) 185 | .add( 186 | this.ets[this.ets.length - 3].mul(5) 187 | ) 188 | .div(12) 189 | } else { 190 | modelOutput = 191 | this.ets[this.ets.length - 1].mul(55) 192 | .sub( 193 | this.ets[this.ets.length - 2].mul(59) 194 | ) 195 | .add( 196 | this.ets[this.ets.length - 3].mul(37) 197 | ) 198 | .sub( 199 | this.ets[this.ets.length - 4].mul(9) 200 | ).mul(1 / 24); 201 | } 202 | // modelOutput.data().then((data) => { console.log('modelOutput', modelOutput.shape, data) }) 203 | const prevSample = this._getPrevSample(sample, timestep, prevTimestep, modelOutput); 204 | this.counter += 1; 205 | 206 | return prevSample 207 | } 208 | 209 | _getPrevSample(sample: tf.Tensor, timestep: number, prevTimestep: number, modelOutput: tf.Tensor): tf.Tensor { 210 | const alphaProdT = this.alphasCumprod[timestep] 211 | const alphaProdTPrev = prevTimestep >= 0 ? this.alphasCumprod[prevTimestep] : this.finalAlphaCumprod; 212 | 213 | const betaProdT = 1 - alphaProdT; 214 | const betaProdTPrev = 1 - alphaProdTPrev; 215 | if (this.config.prediction_type === 'v_prediction') { 216 | modelOutput = modelOutput.mul(Math.sqrt(alphaProdT)).add(sample.mul(Math.sqrt(betaProdT))); 217 | } else if (this.config.prediction_type !== 'epsilon') { 218 | throw new Error(`prediction_type given as ${this.config.prediction_type} must be one of 'epsilon' or 'v_prediction'`); 219 | } 220 | const sampleCoeff = Math.sqrt(alphaProdTPrev / alphaProdT) 221 | 222 | // corresponds to denominator of e_θ(x_t, t) in formula (9) 223 | const modelOutputDenomCoeff = alphaProdT * Math.sqrt(betaProdTPrev) 224 | + Math.sqrt(alphaProdT * betaProdT * alphaProdTPrev) 225 | 226 | // full formula (9) 227 | const prevSample = sample 228 | .mul(sampleCoeff) 229 | .sub(modelOutput.mul(alphaProdTPrev - alphaProdT).div(modelOutputDenomCoeff)); 230 | 231 | return prevSample; 232 | } 233 | 234 | } 235 | 236 | // function betasForAlphaBar(numDiffusionTimesteps: number, maxBeta = 0.999): nj.NdArray { 237 | // function alphaBar(timeStep: number): number { 238 | // return Math.cos((timeStep + 0.008) / 1.008 * Math.PI / 2) ** 2; 239 | // } 240 | // 241 | // const betas = []; 242 | // for (let i = 0; i < numDiffusionTimesteps; i++) { 243 | // const t1 = i / numDiffusionTimesteps; 244 | // const t2 = (i + 1) / numDiffusionTimesteps; 245 | // betas.push(Math.min(1 - alphaBar(t2) / alphaBar(t1), maxBeta)); 246 | // } 247 | // return nj.array(betas); 248 | // } 249 | -------------------------------------------------------------------------------- /src/tokenizer.js: -------------------------------------------------------------------------------- 1 | // import { Html5Entities as htmlEntities } from "https://deno.land/x/html_entities@v1.0/mod.js"; 2 | // import bpeVocabData from "./bpe_simple_vocab_16e6.mjs"; 3 | // import ftfy from "https://deno.land/x/ftfy_pyodide@v0.1.1/mod.js"; 4 | 5 | function ord(c) { 6 | return c.charCodeAt(0); 7 | } 8 | function range(start, stop, step=1) { 9 | if(stop === undefined) { 10 | stop = start; 11 | start = 0; 12 | } 13 | 14 | if((step > 0 && start >= stop) || (step < 0 && start <= stop)) { 15 | return []; 16 | } 17 | 18 | const result = []; 19 | for(let i = start; step > 0 ? i < stop : i > stop; i += step) { 20 | result.push(i); 21 | } 22 | 23 | return result; 24 | } 25 | 26 | 27 | 28 | function bytesToUnicode() { 29 | let bs = [ 30 | ...range(ord("!"), ord("~") + 1), 31 | ...range(ord("¡"), ord("¬") + 1), 32 | ...range(ord("®"), ord("ÿ") + 1), 33 | ]; 34 | let cs = bs.slice(0); 35 | let n = 0; 36 | for(let b of range(2**8)) { 37 | if(!bs.includes(b)) { 38 | bs.push(b); 39 | cs.push(2**8 + n); 40 | n += 1; 41 | } 42 | } 43 | cs = cs.map(n => String.fromCharCode(n)); 44 | return Object.fromEntries(bs.map((v, i) => [v, cs[i]])); 45 | } 46 | 47 | function getPairs(word) { 48 | let pairs = []; 49 | let prevChar = word[0]; 50 | for(let char of word.slice(1)) { 51 | pairs.push([prevChar, char]); 52 | prevChar = char; 53 | } 54 | return pairs; 55 | } 56 | 57 | function basicClean(text) { 58 | // text = ftfy.fix_text(text); 59 | // text = htmlEntities.decode(htmlEntities.decode(text)); 60 | return text.trim(); 61 | } 62 | 63 | function whitespaceClean(text) { 64 | return text.replace(/\s+/g, " ").trim(); 65 | } 66 | 67 | 68 | export default class { 69 | constructor (vocabMerges, config, vocab) { 70 | this.tokenMaxLen = config.model_max_length 71 | this.byteEncoder = bytesToUnicode(); 72 | this.byteDecoder = Object.fromEntries(Object.entries(this.byteEncoder).map(([k,v]) => [v,k])); 73 | let merges = vocabMerges.split("\n"); 74 | merges = merges.slice(1, 49152-256-2+1); 75 | merges = merges.map(merge => merge.split(" ")); 76 | // There was a bug related to the ordering of Python's .values() output. I'm lazy do I've just copy-pasted the Python output: 77 | // let vocab = ['!', '"', '#', '$', '%', '&', "'", '(', ')', '*', '+', ',', '-', '.', '/', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ':', ';', '<', '=', '>', '?', '@', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '[', '\\', ']', '^', '_', '`', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '{', '|', '}', '~', '¡', '¢', '£', '¤', '¥', '¦', '§', '¨', '©', 'ª', '«', '¬', '®', '¯', '°', '±', '²', '³', '´', 'µ', '¶', '·', '¸', '¹', 'º', '»', '¼', '½', '¾', '¿', 'À', 'Á', 'Â', 'Ã', 'Ä', 'Å', 'Æ', 'Ç', 'È', 'É', 'Ê', 'Ë', 'Ì', 'Í', 'Î', 'Ï', 'Ð', 'Ñ', 'Ò', 'Ó', 'Ô', 'Õ', 'Ö', '×', 'Ø', 'Ù', 'Ú', 'Û', 'Ü', 'Ý', 'Þ', 'ß', 'à', 'á', 'â', 'ã', 'ä', 'å', 'æ', 'ç', 'è', 'é', 'ê', 'ë', 'ì', 'í', 'î', 'ï', 'ð', 'ñ', 'ò', 'ó', 'ô', 'õ', 'ö', '÷', 'ø', 'ù', 'ú', 'û', 'ü', 'ý', 'þ', 'ÿ', 'Ā', 'ā', 'Ă', 'ă', 'Ą', 'ą', 'Ć', 'ć', 'Ĉ', 'ĉ', 'Ċ', 'ċ', 'Č', 'č', 'Ď', 'ď', 'Đ', 'đ', 'Ē', 'ē', 'Ĕ', 'ĕ', 'Ė', 'ė', 'Ę', 'ę', 'Ě', 'ě', 'Ĝ', 'ĝ', 'Ğ', 'ğ', 'Ġ', 'ġ', 'Ģ', 'ģ', 'Ĥ', 'ĥ', 'Ħ', 'ħ', 'Ĩ', 'ĩ', 'Ī', 'ī', 'Ĭ', 'ĭ', 'Į', 'į', 'İ', 'ı', 'IJ', 'ij', 'Ĵ', 'ĵ', 'Ķ', 'ķ', 'ĸ', 'Ĺ', 'ĺ', 'Ļ', 'ļ', 'Ľ', 'ľ', 'Ŀ', 'ŀ', 'Ł', 'ł', 'Ń']; 78 | // vocab = [...vocab, ...vocab.map(v => v+'')]; 79 | // for(let merge of merges) { 80 | // vocab.push(merge.join("")); 81 | // } 82 | // vocab.push('<|startoftext|>', '<|endoftext|>'); 83 | this.encoder = vocab 84 | this.decoder = Object.fromEntries(Object.entries(this.encoder).map(([k,v]) => [v,k])); 85 | this.bpeRanks = Object.fromEntries(merges.map((v,i) => [v.join("·😎·"),i])); // ·😎· because js doesn't yet have tuples 86 | this.cache = {'<|startoftext|>': '<|startoftext|>', '<|endoftext|>': '<|endoftext|>'}; 87 | this.pat = /<\|startoftext\|>|<\|endoftext\|>|'s|'t|'re|'ve|'m|'ll|'d|[\p{L}]+|[\p{N}]|[^\s\p{L}\p{N}]+/gui; 88 | } 89 | 90 | bpe(token) { 91 | if(this.cache[token] !== undefined) { 92 | return this.cache[token]; 93 | } 94 | 95 | let word = [...token.slice(0, -1), token.slice(-1)+'']; 96 | let pairs = getPairs(word); 97 | 98 | if(pairs.length === 0) { 99 | return token+''; 100 | } 101 | 102 | while(1) { 103 | 104 | let bigram = null; 105 | let minRank = Infinity; 106 | for(let p of pairs) { 107 | let r = this.bpeRanks[p.join("·😎·")]; 108 | if(r === undefined) continue; 109 | if(r < minRank) { 110 | minRank = r; 111 | bigram = p; 112 | } 113 | } 114 | 115 | if(bigram === null) { 116 | break; 117 | } 118 | 119 | let [first, second] = bigram; 120 | let newWord = []; 121 | let i = 0; 122 | while(i < word.length) { 123 | 124 | let j = word.indexOf(first, i); 125 | 126 | if(j === -1) { 127 | newWord.push(...word.slice(i)); 128 | break; 129 | } 130 | 131 | newWord.push(...word.slice(i, j)); 132 | i = j; 133 | 134 | if(word[i] === first && i < word.length-1 && word[i+1] === second) { 135 | newWord.push(first+second); 136 | i += 2; 137 | } else { 138 | newWord.push(word[i]); 139 | i += 1; 140 | } 141 | } 142 | word = newWord; 143 | if(word.length === 1) { 144 | break; 145 | } else { 146 | pairs = getPairs(word); 147 | } 148 | } 149 | word = word.join(" "); 150 | this.cache[token] = word; 151 | return word; 152 | } 153 | 154 | encode(text) { 155 | let bpeTokens = [] 156 | text = whitespaceClean(basicClean(text)).toLowerCase(); 157 | for(let token of [...text.matchAll(this.pat)].map(m => m[0])) { 158 | token = [...token].map(b => this.byteEncoder[b.charCodeAt(0)]).join(""); 159 | bpeTokens.push(...this.bpe(token).split(' ').map(bpe_token => this.encoder[bpe_token])); 160 | } 161 | return bpeTokens; 162 | } 163 | 164 | // adds start and end token, and adds padding 0's and ensures it's 77 tokens long 165 | encodeForCLIP(text) { 166 | let tokens = this.encode(text); 167 | tokens.unshift(49406); // start token 168 | tokens = tokens.slice(0, 76); 169 | tokens.push(49407); // end token 170 | while(tokens.length < 77) tokens.push(0); 171 | return tokens; 172 | } 173 | 174 | decode(tokens) { 175 | let text = tokens.map(token => this.decoder[token]).join(""); 176 | text = [...text].map(c => this.byteDecoder[c]).map(v => String.fromCharCode(v)).join("").replaceAll('', ' '); 177 | return text; 178 | } 179 | } 180 | -------------------------------------------------------------------------------- /src/tqdm.d.ts: -------------------------------------------------------------------------------- 1 | export as namespace tqdm; 2 | 3 | 4 | 5 | declare function tqdm (iterable: Iterable, options?: Tqdm.Options): Iterable 6 | 7 | declare namespace Tqdm { 8 | interface Options { 9 | title?: string; 10 | total?: number; 11 | barFormat?: string; 12 | barLength?: number; 13 | barCompleteChar?: string; 14 | barIncompleteChar?: string; 15 | } 16 | function tqdm (iterable: Iterable, options?: Tqdm.Options): Iterable 17 | } 18 | export = tqdm; -------------------------------------------------------------------------------- /src/txt2img.ts: -------------------------------------------------------------------------------- 1 | import minimist from 'minimist'; 2 | import { StableDiffusionPipeline } from './StableDiffusionPipeline' 3 | import fs from 'fs' 4 | import { PNG } from 'pngjs' 5 | 6 | interface CommandLineArgs { 7 | m: string; 8 | prompt: string; 9 | negativePrompt?: string; 10 | provider?: 'cuda'|'cpu'|'directml'; 11 | rev?: string; 12 | version?: 1|2; 13 | steps: number 14 | } 15 | 16 | function parseCommandLineArgs(): CommandLineArgs { 17 | const args = minimist(process.argv.slice(2)); 18 | 19 | return { 20 | m: args.m || 'aislamov/stable-diffusion-2-1-base-onnx', 21 | prompt: args.prompt || 'an astronaut riding a horse', 22 | negativePrompt: args.negativePrompt || '', 23 | provider: args.provider || 'cpu', 24 | rev: args.rev, 25 | version: args.version || 2, 26 | steps: args.steps || 30, 27 | } 28 | } 29 | 30 | async function main() { 31 | const args = parseCommandLineArgs(); 32 | const pipe = await StableDiffusionPipeline.fromPretrained( 33 | args.provider, 34 | args.m, 35 | args.rev, 36 | args.version, 37 | ) 38 | 39 | let image = await pipe.run(args.prompt, args.negativePrompt, 1, 9, args.steps) 40 | const p = new PNG({ width: 512, height: 512, inputColorType: 2 }) 41 | p.data = Buffer.from((await image[0].data())) 42 | p.pack().pipe(fs.createWriteStream('output.png')).on('finish', () => { 43 | console.log('Image saved as output.png'); 44 | }) 45 | } 46 | 47 | main(); 48 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Visit https://aka.ms/tsconfig to read more about this file */ 4 | 5 | /* Projects */ 6 | // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ 7 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ 8 | // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ 9 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ 10 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ 11 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ 12 | 13 | /* Language and Environment */ 14 | "target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ 15 | // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ 16 | // "jsx": "preserve", /* Specify what JSX code is generated. */ 17 | // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ 18 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ 19 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ 20 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ 21 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ 22 | // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ 23 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ 24 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ 25 | // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ 26 | 27 | /* Modules */ 28 | "module": "commonjs", /* Specify what module code is generated. */ 29 | "rootDir": "./src", /* Specify the root folder within your source files. */ 30 | // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ 31 | "baseUrl": "./src", /* Specify the base directory to resolve non-relative module names. */ 32 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ 33 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ 34 | // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ 35 | "types": ["node"], /* Specify type package names to be included without being referenced in a source file. */ 36 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 37 | // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ 38 | // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ 39 | // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ 40 | // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ 41 | // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ 42 | // "resolveJsonModule": true, /* Enable importing .json files. */ 43 | // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ 44 | // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ 45 | 46 | /* JavaScript Support */ 47 | "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ 48 | // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ 49 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ 50 | 51 | /* Emit */ 52 | "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ 53 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */ 54 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ 55 | "sourceMap": true, /* Create source map files for emitted JavaScript files. */ 56 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ 57 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ 58 | "outDir": "dist", /* Specify an output folder for all emitted files. */ 59 | // "removeComments": true, /* Disable emitting comments. */ 60 | // "noEmit": true, /* Disable emitting files from a compilation. */ 61 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ 62 | // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ 63 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ 64 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ 65 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 66 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ 67 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ 68 | // "newLine": "crlf", /* Set the newline character for emitting files. */ 69 | // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ 70 | // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ 71 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ 72 | // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ 73 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ 74 | // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ 75 | 76 | /* Interop Constraints */ 77 | // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ 78 | // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ 79 | // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ 80 | "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ 81 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ 82 | "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ 83 | 84 | /* Type Checking */ 85 | "strict": true, /* Enable all strict type-checking options. */ 86 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ 87 | // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ 88 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ 89 | // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ 90 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ 91 | // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ 92 | // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ 93 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ 94 | // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ 95 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ 96 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ 97 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ 98 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ 99 | // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ 100 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ 101 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ 102 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ 103 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ 104 | 105 | /* Completeness */ 106 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ 107 | "skipLibCheck": true /* Skip type checking all .d.ts files. */ 108 | }, 109 | "exclude": ["node_modules"], 110 | "include": ["src"] 111 | } 112 | --------------------------------------------------------------------------------