├── .eslintrc.js ├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ ├── feature.yaml │ └── main.yaml ├── .gitignore ├── .releaserc ├── LICENSE ├── README.md ├── cspell.yaml ├── package-lock.json ├── package.json ├── src ├── Logger.ts ├── __fixtures__ │ └── killPsTree │ │ ├── badTree │ │ ├── a.js │ │ └── b.js │ │ └── goodTree │ │ ├── a.js │ │ └── b.js ├── backends │ ├── ChokidarWatcher.ts │ ├── FSWatcher.ts │ ├── FileWatchingBackend.test.ts │ ├── FileWatchingBackend.ts │ └── TurboWatcher.ts ├── bin │ └── turbowatch.ts ├── createFileChangeQueue.test.ts ├── createFileChangeQueue.ts ├── createSpawn.test.ts ├── createSpawn.ts ├── deduplicateFileChangeEvents.test.ts ├── deduplicateFileChangeEvents.ts ├── defineConfig.ts ├── errors.ts ├── findNearestDirectory.ts ├── generateShortId.ts ├── hashFile.test.ts ├── hashFile.ts ├── index.ts ├── isFSWatcherAvailable.ts ├── killPsTree.test.ts ├── killPsTree.ts ├── subscribe.test.ts ├── subscribe.ts ├── testExpression.test.ts ├── testExpression.ts ├── types.ts ├── watch.test.ts └── watch.ts ├── tsconfig.build.json ├── tsconfig.json └── vitest.config.ts /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | extends: ['canonical/auto'], 3 | ignorePatterns: ['dist', 'package-lock.json'], 4 | root: true, 5 | }; 6 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: gajus 2 | patreon: gajus 3 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a bug report to help us improve Slonik 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | 12 | ## Expected Behavior 13 | 14 | 15 | ## Current Behavior 16 | 17 | 18 | ## Possible Solution 19 | 20 | 21 | ## Steps to Reproduce 22 | 23 | 24 | 25 | 1. 26 | 2. 27 | 3. 28 | 4. 29 | 30 | ## Logs 31 | 32 | 33 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Create an enhancement request to help us improve Slonik 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | 12 | ## Desired Behavior 13 | 14 | 15 | ## Motivation 16 | 17 | 18 | ## Implementation 19 | 20 | -------------------------------------------------------------------------------- /.github/workflows/feature.yaml: -------------------------------------------------------------------------------- 1 | jobs: 2 | lint: 3 | environment: release 4 | name: Lint 5 | runs-on: ubuntu-latest 6 | steps: 7 | - name: setup repository 8 | uses: actions/checkout@v2 9 | with: 10 | fetch-depth: 0 11 | - name: setup node.js 12 | uses: actions/setup-node@v2 13 | with: 14 | cache: 'npm' 15 | node-version: '19' 16 | - run: npm ci 17 | - run: npm run lint:eslint 18 | - run: npm run lint:tsc 19 | - run: npm run lint:cspell 20 | timeout-minutes: 10 21 | test: 22 | environment: release 23 | name: Test 24 | needs: 25 | - lint 26 | runs-on: ${{ matrix.os }} 27 | steps: 28 | - name: setup repository 29 | uses: actions/checkout@v2 30 | with: 31 | fetch-depth: 0 32 | - name: setup node.js 33 | uses: actions/setup-node@v2 34 | with: 35 | cache: 'npm' 36 | node-version: ${{ matrix.version }} 37 | - run: npm ci 38 | - env: 39 | ROARR_LOG: 'true' 40 | run: npm run test:vitest 41 | - run: npm run build 42 | strategy: 43 | matrix: 44 | os: 45 | - ubuntu-latest 46 | version: 47 | - 18 48 | - 19 49 | timeout-minutes: 10 50 | name: Test and build 51 | on: 52 | pull_request: 53 | branches: 54 | - main 55 | types: 56 | - opened 57 | - synchronize 58 | - reopened 59 | - ready_for_review -------------------------------------------------------------------------------- /.github/workflows/main.yaml: -------------------------------------------------------------------------------- 1 | jobs: 2 | test: 3 | environment: release 4 | name: Test 5 | runs-on: ubuntu-latest 6 | steps: 7 | - name: setup repository 8 | uses: actions/checkout@v2 9 | with: 10 | fetch-depth: 0 11 | - name: setup node.js 12 | uses: actions/setup-node@v2 13 | with: 14 | cache: 'npm' 15 | node-version: '19' 16 | - run: npm ci 17 | - run: npm run lint:eslint 18 | - run: npm run lint:tsc 19 | - run: npm run lint:cspell 20 | - env: 21 | ROARR_LOG: 'true' 22 | run: npm run test:vitest 23 | - run: npm run build 24 | - env: 25 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 26 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 27 | run: npx semantic-release 28 | name: Test, build and release 29 | on: 30 | push: 31 | branches: 32 | - main -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | dist 2 | node_modules 3 | *.log 4 | .* 5 | !.eslintrc.js 6 | !.github 7 | !.gitignore 8 | !.README 9 | !.releaserc -------------------------------------------------------------------------------- /.releaserc: -------------------------------------------------------------------------------- 1 | { 2 | "branches": [ 3 | "main" 4 | ], 5 | "plugins": [ 6 | "@semantic-release/commit-analyzer", 7 | "@semantic-release/github", 8 | "@semantic-release/npm" 9 | ] 10 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2023, Gajus Kuizinas (https://gajus.com/) 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | * Redistributions of source code must retain the above copyright 7 | notice, this list of conditions and the following disclaimer. 8 | * Redistributions in binary form must reproduce the above copyright 9 | notice, this list of conditions and the following disclaimer in the 10 | documentation and/or other materials provided with the distribution. 11 | * Neither the name of the Gajus Kuizinas (https://gajus.com/) nor the 12 | names of its contributors may be used to endorse or promote products 13 | derived from this software without specific prior written permission. 14 | 15 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 16 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | DISCLAIMED. IN NO EVENT SHALL GAJUS KUIZINAS BE LIABLE FOR ANY 19 | DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 21 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 22 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 24 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Turbowatch 🏎 2 | 3 | Extremely fast file change detector and task orchestrator for Node.js. 4 | 5 | If you ever wanted something like [Nodemon](https://nodemon.io/) but more capable, then you are at the right place. 6 | 7 | Basic usage: 8 | 9 | ```bash 10 | npm install turbowatch 11 | cat > turbowatch.ts <<'EOD' 12 | import { defineConfig } from 'turbowatch'; 13 | 14 | export default defineConfig({ 15 | project: __dirname, 16 | triggers: [ 17 | { 18 | expression: ['match', '*.ts', 'basename'], 19 | name: 'build', 20 | onChange: async ({ spawn }) => { 21 | await spawn`tsc`; 22 | }, 23 | }, 24 | ], 25 | }); 26 | EOD 27 | npm exec turbowatch ./turbowatch.ts 28 | ``` 29 | 30 | > **Note** See [logging](#logging) instructions to print logs that explain what Turbowatch is doing. 31 | 32 | Refer to recipes: 33 | 34 | * [Rebuilding assets when file changes are detected](#rebuilding-assets-when-file-changes-are-detected) 35 | * [Restarting server when file changes are detected](#restarting-server-when-file-changes-are-detected) 36 | * [Retrying failing triggers](#retrying-failing-triggers) 37 | * [Gracefully terminating Turbowatch](#gracefully-terminating-turbowatch) 38 | * [Handling the `AbortSignal`](#handling-the-abortsignal) 39 | * [Tearing down project](#tearing-down-project) 40 | * [Throttling `spawn` output](#throttling-spawn-output) 41 | * [Watching multiple scripts](#watching-multiple-scripts) 42 | * [Using custom file watching backend](#using-custom-file-watching-backend) 43 | 44 | ||Turbowatch|Nodemon| 45 | |---|---|---| 46 | |[Node.js interface (scriptable)](#api)|✅|❌1| 47 | |[Graceful termination (teardown)](#gracefully-terminating-turbowatch)|✅|❌2| 48 | |[Scriptable child processes (zx)](#spawn)|✅|❌| 49 | |Retries|✅|❌| 50 | |Debounce|✅|❌| 51 | |Interruptible workflows|✅|❌| 52 | |Concurrent workflows|✅|❌| 53 | |[Log grouping](#throttling-spawn-output)|✅|❌| 54 | |[Bring-your-own backend](#using-custom-file-watching-backend)|✅|❌| 55 | |Works with long-running processes|✅|✅| 56 | |Works with build utilities and REPLs|✅|✅| 57 | |Watch specific files or directories|✅|✅| 58 | |Ignoring specific files or directories|✅|✅| 59 | |Open source and available|✅|✅| 60 | 61 | 1 Undocumented
62 | 2 Nodemon only provides the ability to [send a custom signal](https://github.com/remy/nodemon#gracefully-reloading-down-your-script) to the worker.
63 | 64 | ## API 65 | 66 | > **Note** `defineConfig` is used to export configuration for the consumption by `turbowatch` program. If you want to run Turbowatch programmatically, then use `watch`. The API of both methods is equivalent. 67 | 68 | Turbowatch [defaults](#recipes) are a good choice for most projects. However, Turbowatch has _many_ options that you should be familiar with for advance use cases. 69 | 70 | ```ts 71 | import { 72 | watch, 73 | type ChangeEvent, 74 | } from 'turbowatch'; 75 | 76 | void watch({ 77 | // Debounces triggers by 1 second. 78 | // Most multi-file spanning changes are non-atomic. Therefore, it is typically desirable to 79 | // batch together information about multiple file changes that happened in short succession. 80 | // Provide { debounce: { wait: 0 } } to disable debounce. 81 | debounce: { 82 | wait: 1000, 83 | }, 84 | // The base directory under which all files are matched. 85 | // Note: This is different from the "root project" (https://github.com/gajus/turbowatch#project-root). 86 | project: __dirname, 87 | triggers: [ 88 | { 89 | // Expression match files based on name. 90 | // https://github.com/gajus/turbowatch#expressions 91 | expression: [ 92 | 'allof', 93 | ['not', ['dirname', 'node_modules']], 94 | [ 95 | 'anyof', 96 | ['match', '*.ts', 'basename'], 97 | ['match', '*.tsx', 'basename'], 98 | ] 99 | ], 100 | // Indicates whether the onChange routine should be triggered on script startup. 101 | // Defaults to true. Set it to false if you would like onChange routine to not run until the first changes are detected. 102 | initialRun: true, 103 | // Determines what to do if a new file change is detected while the trigger is executing. 104 | // If {interruptible: true}, then AbortSignal will abort the current onChange routine. 105 | // If {interruptible: false}, then Turbowatch will wait until the onChange routine completes. 106 | // Defaults to true. 107 | interruptible: false, 108 | // Name of the trigger. Used for debugging 109 | // Must match /^[a-z0-9-_]+$/ pattern and must be unique. 110 | name: 'build', 111 | // Routine that is executed when file changes are detected. 112 | onChange: async ({ spawn }: ChangeEvent) => { 113 | await spawn`tsc`; 114 | await spawn`tsc-alias`; 115 | }, 116 | // Routine that is executed when shutdown signal is received. 117 | onTeardown: async ({ spawn }) => { 118 | await spawn`rm -fr ./dist`; 119 | }, 120 | // Label a task as persistent if it is a long-running process, such as a dev server or --watch mode. 121 | persistent: false, 122 | // Retry a task if it fails. Otherwise, watch program will throw an error if trigger fails. 123 | // Defaults to { retries: 3 } 124 | retry: { 125 | retries: 3, 126 | }, 127 | }, 128 | ], 129 | }); 130 | ``` 131 | 132 | ## Motivation 133 | 134 | To abstract the complexity of orchestrating file watching operations. 135 | 136 | For context, we are using [Turborepo](https://turbo.build/). The reason this project came to be is because Turborepo does not have "watch" mode (issue [#986](https://github.com/vercel/turbo/issues/986)). 137 | 138 | At first, we attempted to use a combination of `tsc --watch`, `concurrently` and Nodemon, but started to run into things breaking left and right, e.g. 139 | 140 | * services restarting prematurely (before all the assets are built) 141 | * services failing to gracefully shutdown and then failing to start, e.g. because ports are in use 142 | 143 | Furthermore, the setup for each workspace was repetitive and not straightforward, and debugging issues was not a great experience because you have many workspaces running in watch mode producing tons of logs. Many of the workspaces being dependencies of each other, this kept re-triggering watch operations causing the mentioned issues. 144 | 145 | In short, it quickly became clear that we need the ability to have more control over the orchestration of what/when needs to happen when files change. 146 | 147 | We started with a script. At first I added _debounce_. That improved things. Then I added _graceful termination_ logic, which mostly made everything work. We still had occasional failures due to out-of-order events, but adding _retry_ logic fixed that too... At the end, while we got everything to work, it took a lot of effort and it still was a collection of hacky scripts that are hard to maintain and debug, and that's how Turbowatch came to be – 148 | 149 | Turbowatch is a toolbox for orchestrating and debugging file watching operations based on everything we learned along the way. 150 | 151 | > **Note** If you are working on a very simple project, i.e. just one build step or just one watch operation, then **you don't need Turbowatch**. Turbowatch is designed for monorepos or otherwise complex workspaces where you have dozens or hundreds of build steps that depend on each other (e.g. building and re-building dependencies, building/starting/stopping Docker containers, populating data, sending notifications, etc). 152 | 153 | We also [shared these learnings](https://github.com/vercel/turbo/issues/986#issuecomment-1477360394) with Turborepo team in hopes that it will help to design an embedded file watching experience. 154 | 155 | ## Use Cases 156 | 157 | Turbowatch can be used to automate any sort of operations that need to happen in response to files changing, e.g., 158 | 159 | * You can run (and conditionally restart) long-running processes (like your Node.js application) 160 | * You can build assets (like TypeScript and Docker images) 161 | 162 | ## `spawn` 163 | 164 | Turbowatch exposes `spawn` function that is an instance of [zx](https://github.com/google/zx). Use it to evaluate shell commands: 165 | 166 | ```ts 167 | async ({ spawn }: ChangeEvent) => { 168 | await spawn`tsc`; 169 | await spawn`tsc-alias`; 170 | }, 171 | ``` 172 | 173 | The reason Turbowatch abstracts `zx` is to enable graceful termination of child-processes when triggers are configured to be `interruptible`. 174 | 175 | ## Persistent tasks 176 | 177 | Your setup may include tasks that are not designed to exit, e.g. `next dev` (starts Next.js in development mode). 178 | 179 | It is important that these tasks are marked as `persistent` to distinguish them from tasks that run to completion as that changes how Turbowatch treats them. 180 | 181 | ||Persistent|Non-Persistent| 182 | |---|---|---| 183 | |Ignore `FileChangeEvent` if `{ interruptible: false }`|✅|❌| 184 | 185 | ## Expressions 186 | 187 | Expressions are used to match files. The most basic expression is `match` – it evaluates as true if a glob pattern matches the file, e.g. 188 | 189 | Match all files with `*.ts` extension: 190 | 191 | ```ts 192 | ['match', '*.ts', 'basename'] 193 | ``` 194 | 195 | Expressions can be combined using `allof` and `anyof`, e.g., 196 | 197 | Match all files with `*.ts` or `*.tsx` extensions: 198 | 199 | ```ts 200 | [ 201 | 'anyof', 202 | ['match', '*.ts', 'basename'], 203 | ['match', '*.tsx', 'basename'] 204 | ] 205 | ``` 206 | 207 | Finally, `not` evaluates as true if the sub-expression evaluated as false, i.e. inverts the sub-expression. 208 | 209 | Match all files with `*.ts` extension, but exclude `index.ts`: 210 | 211 | ```ts 212 | [ 213 | 'allof', 214 | ['match', '*.ts', 'basename'], 215 | [ 216 | 'not', 217 | ['match', 'index.ts', 'basename'] 218 | ] 219 | ] 220 | ``` 221 | 222 | This is the gist behind Turbowatch expressions. However, there are many more expressions. Inspect `Expression` type for further guidance. 223 | 224 | ```ts 225 | type Expression = 226 | // Evaluates as true if all of the grouped expressions also evaluated as true. 227 | | ['allof', ...Expression[]] 228 | // Evaluates as true if any of the grouped expressions also evaluated as true. 229 | | ['anyof', ...Expression[]] 230 | // Evaluates as true if a given file has a matching parent directory. 231 | | ['dirname' | 'idirname', string] 232 | // Evaluates as true if a glob matches against the basename of the file. 233 | | ['match' | 'imatch', string, 'basename' | 'wholename'] 234 | // Evaluates as true if the sub-expression evaluated as false, i.e. inverts the sub-expression. 235 | | ['not', Expression]; 236 | ``` 237 | 238 | > **Note** Turbowatch expressions are a subset of [Watchman expressions](https://facebook.github.io/watchman/docs/expr/allof.html). Originally, Turbowatch was developed to leverage Watchman as a superior backend for watching a large number of files. However, along the way, we discovered that Watchman does not support symbolic links (issue [#105](https://github.com/facebook/watchman/issues/105#issuecomment-1469496330)). Unfortunately, that makes Watchman unsuitable for projects that utilize linked dependencies (which is the direction in which the ecosystem is moving for dependency management in monorepos). As such, Watchman was replaced with chokidar. We are hoping to provide Watchman as a backend in the future. Therefore, we made Turbowatch expressions syntax compatible with a subset of Watchman expressions. 239 | 240 | > **Note** Turbowatch uses [micromatch](https://github.com/micromatch/micromatch) for glob matching. Please note that you should be using forward slash (`/`) to separate paths, even on Windows. 241 | 242 | ## Recipes 243 | 244 | ### Rebuilding assets when file changes are detected 245 | 246 | ```ts 247 | import { watch } from 'turbowatch'; 248 | 249 | void watch({ 250 | project: __dirname, 251 | triggers: [ 252 | { 253 | expression: [ 254 | 'allof', 255 | ['not', ['dirname', 'node_modules']], 256 | ['match', '*.ts', 'basename'], 257 | ], 258 | name: 'build', 259 | onChange: async ({ spawn }) => { 260 | await spawn`tsc`; 261 | await spawn`tsc-alias`; 262 | }, 263 | }, 264 | ], 265 | }); 266 | ``` 267 | 268 | ### Restarting server when file changes are detected 269 | 270 | ```ts 271 | import { watch } from 'turbowatch'; 272 | 273 | void watch({ 274 | project: __dirname, 275 | triggers: [ 276 | { 277 | expression: [ 278 | 'allof', 279 | ['not', ['dirname', 'node_modules']], 280 | [ 281 | 'anyof', 282 | ['match', '*.ts', 'basename'], 283 | ['match', '*.graphql', 'basename'], 284 | ] 285 | ], 286 | // Because of this setting, Turbowatch will kill the processes that spawn starts 287 | // when it detects changes when it detects a change. 288 | interruptible: true, 289 | name: 'start-server', 290 | onChange: async ({ spawn }) => { 291 | await spawn`tsx ./src/bin/wait.ts`; 292 | await spawn`tsx ./src/bin/server.ts`; 293 | }, 294 | }, 295 | ], 296 | }); 297 | ``` 298 | 299 | ### Watching `node_modules` 300 | 301 | There is more than one way to watch `node_modules`. However, through trial and error we found that the following set of rules work the best for a generalized solution. 302 | 303 | ```ts 304 | import { watch } from 'turbowatch'; 305 | 306 | void watch({ 307 | project: path.resolve(__dirname, '../..'), 308 | triggers: [ 309 | { 310 | expression: [ 311 | 'anyof', 312 | [ 313 | 'allof', 314 | ['dirname', 'node_modules'], 315 | ['dirname', 'dist'], 316 | ['match', '*', 'basename'], 317 | ], 318 | [ 319 | 'allof', 320 | ['not', ['dirname', 'node_modules']], 321 | ['dirname', 'src'], 322 | ['match', '*', 'basename'], 323 | ], 324 | ], 325 | name: 'build', 326 | onChange: async ({ spawn }) => { 327 | return spawn`pnpm run build`; 328 | }, 329 | }, 330 | ], 331 | }); 332 | ``` 333 | 334 | This setup makes an assumption that your workspaces sources are in `src` directory and `build` task outputs to `dist` directory. 335 | 336 | ### Reusing expressions 337 | 338 | This might be common sense, but since Turbowatch scripts are regular JavaScript scripts, you can (and should) abstract your expressions and routines. 339 | 340 | How you do it is entirely up to you, e.g. You could abstract just expressions or you could go as far as abstracting the entire `trigger`: 341 | 342 | ```ts 343 | import { watch } from 'turbowatch'; 344 | import { 345 | buildTrigger, 346 | } from '@/turbowatch'; 347 | 348 | void watch({ 349 | project: __dirname, 350 | triggers: [ 351 | buildTrigger(), 352 | ], 353 | }); 354 | ``` 355 | 356 | Such abstraction helps to avoid errors that otherwise may occur due to duplicative code across workspaces. 357 | 358 | ### Reducing unnecessary reloads 359 | 360 | Something that is important to consider when orchestrating file watching triggers is how to avoid unnecessary reloads. Consider if this was your "build" script: 361 | 362 | ```bash 363 | rm -fr dist && tsc && tsc-alias 364 | ``` 365 | 366 | and let's assume that you are using an expression such as this one to detect when dependencies are updated: 367 | 368 | ```ts 369 | [ 370 | 'allof', 371 | ['dirname', 'node_modules'], 372 | ['dirname', 'dist'], 373 | ['match', '*'], 374 | ], 375 | ``` 376 | 377 | Running this script will produce at least 3 file change events: 378 | 379 | 1. when `rm -fr dist` completes 380 | 1. when `tsc` completes 381 | 1. when `tsc-alias` completes 382 | 383 | What's even worse is that even if the output has not changed, you are still going to trigger file change events (because `dist` get replaced). 384 | 385 | To some degree, `debounce` setting helps with this. However, it will only help if there is no more than 1 second (by default) inbetween every command. 386 | 387 | One way to avoid this entirely is by using an intermediate directory to output files and swapping only the files that changed. Here is how we do it: 388 | 389 | ```bash 390 | rm -fr .dist && tsc --project tsconfig.build.json && rsync -cr --delete .dist/ ./dist/ && rm -fr .dist 391 | ``` 392 | 393 | This "build" script will always produce at most 1 event, and won't produce any events if the outputs have not changed. 394 | 395 | This is not specific to Turbowatch, but something worth considering as you are designing your build pipeline. 396 | 397 | ### Retrying failing triggers 398 | 399 | Retries are configured by passing a `retry` property to the trigger configuration. 400 | 401 | ```ts 402 | /** 403 | * @property factor The exponential factor to use. Default is 2. 404 | * @property maxTimeout The maximum number of milliseconds between two retries. Default is 30,000. 405 | * @property minTimeout The number of milliseconds before starting the first retry. Default is 1000. 406 | * @property retries The maximum amount of times to retry the operation. Default is 3. Seting this to 1 means do it once, then retry it once. 407 | */ 408 | type Retry = { 409 | factor?: number, 410 | maxTimeout?: number, 411 | minTimeout?: number, 412 | retries?: number, 413 | } 414 | ``` 415 | 416 | ### Gracefully terminating Turbowatch 417 | 418 | > **Note** `SIGINT` is automatically handled if you are using `turbowatch` executable to evaluate your Turbowatch script. This examples shows how to programmatically gracefully shutdown Turbowatch if you choose not to use `turbowatch` program to evaluate your watch scripts. 419 | 420 | > **Warning** Unfortunately, many tools do not allow processes to gracefully terminate. There are open support issues for this in npm ([#4603](https://github.com/npm/npm/issues/4603)), pnpm ([#2653](https://github.com/pnpm/pnpm/issues/2653#issuecomment-1476686711)) and yarn ([#4667](https://github.com/yarnpkg/yarn/issues/4667)), but they haven't been addressed. Therefore, do not wrap your `turbowatch` script execution using these tools if you require processes to gracefully terminate. 421 | 422 | `watch` returns an instance of `TurbowatchController`, which can be used to gracefully terminate the script: 423 | 424 | ```ts 425 | const { shutdown } = await watch({ 426 | project: __dirname, 427 | triggers: [ 428 | { 429 | name: 'test', 430 | expression: ['match', '*', 'basename'], 431 | onChange: async ({ spawn }) => { 432 | // `sleep 60` will receive `SIGTERM` as soon as `shutdown()` is called. 433 | await spawn`sleep 60`; 434 | }, 435 | } 436 | ], 437 | }); 438 | 439 | // SIGINT is the signal sent when we press Ctrl+C 440 | process.once('SIGINT', () => { 441 | void shutdown(); 442 | }); 443 | ``` 444 | 445 | Invoking `shutdown` will propagate an abort signal to all `onChange` handlers. The processes that were initiated using [`spawn`](#spawn) will receive `SIGTERM` signal. 446 | 447 | ### Gracefully terminating Turbowatch using an `AbortController` 448 | 449 | In addition to being to Turbowatch using the `shutdown` routine, Turbowatch instance can be shutdown using an `AbortController`. The main difference is that `shutdown` can be awaited to know when the shutdown routine has run to completion. 450 | 451 | ```ts 452 | const abortController = new AbortController(); 453 | 454 | void watch({ 455 | abortController, 456 | project: __dirname, 457 | triggers: [ 458 | { 459 | name: 'test', 460 | expression: ['match', '*', 'basename'], 461 | onChange: async ({ spawn }) => { 462 | // `sleep 60` will receive `SIGTERM` as soon as `shutdown()` is called. 463 | await spawn`sleep 60`; 464 | }, 465 | } 466 | ], 467 | }); 468 | 469 | void abortController.abort(); 470 | ``` 471 | 472 | ### Handling the `AbortSignal` 473 | 474 | Workflow might be interrupted in two scenarios: 475 | 476 | * when Turbowatch is being gracefully shutdown 477 | * when routine is marked as `interruptible` and a new file change is detected 478 | 479 | Implementing interruptible workflows requires that you define `AbortSignal` handler. If you are using [`zx`](https://npmjs.com/zx), such abstraction could look like so: 480 | 481 | > **Note** Turbowatch already comes with [`zx`](https://npmjs.com/zx) bound to the `AbortSignal`. Just use `spawn`. Documentation demonstrates how to implement equivalent functionality. 482 | 483 | ```ts 484 | import { type ProcessPromise } from 'zx'; 485 | 486 | const interrupt = async ( 487 | processPromise: ProcessPromise, 488 | abortSignal: AbortSignal, 489 | ) => { 490 | let aborted = false; 491 | 492 | const kill = () => { 493 | aborted = true; 494 | 495 | processPromise.kill(); 496 | }; 497 | 498 | abortSignal.addEventListener('abort', kill, { once: true }); 499 | 500 | try { 501 | await processPromise; 502 | } catch (error) { 503 | if (!aborted) { 504 | console.log(error); 505 | } 506 | } 507 | 508 | abortSignal.removeEventListener('abort', kill); 509 | }; 510 | ``` 511 | 512 | which you can then use to kill your scripts, e.g. 513 | 514 | ```ts 515 | export default watch({ 516 | project: __dirname, 517 | triggers: [ 518 | { 519 | expression: ['match', '*.ts', 'basename'], 520 | interruptible: false, 521 | name: 'sleep', 522 | onChange: async ({ abortSignal }) => { 523 | await interrupt($`sleep 30`, abortSignal); 524 | }, 525 | }, 526 | ], 527 | }); 528 | ``` 529 | 530 | ### Tearing down project 531 | 532 | `onTeardown` is going to be called when Turbowatch is gracefully terminated. Use it to "clean up" the project if necessary. 533 | 534 | > **Warning** There is no timeout for `onTeardown`. 535 | 536 | ```ts 537 | import { watch } from 'turbowatch'; 538 | 539 | export default watch({ 540 | abortController, 541 | project: __dirname, 542 | triggers: [ 543 | { 544 | expression: ['match', '*.ts', 'basename'], 545 | name: 'build', 546 | onChange: async ({ spawn }) => { 547 | await spawn`tsc`; 548 | }, 549 | onTeardown: async () => { 550 | await spawn`rm -fr ./dist`; 551 | }, 552 | }, 553 | ], 554 | }); 555 | ``` 556 | 557 | ### Throttling `spawn` output 558 | 559 | When multiple processes are sending logs in parallel, the log stream might be hard to read, e.g. 560 | 561 | ```yaml 562 | redis:dev: 973191cf > #5 sha256:7f65636102fd1f499092cb075baa95784488c0bbc3e0abff2a6d853109e4a948 4.19MB / 9.60MB 22.3s 563 | api:dev: a1e4c6a7 > [18:48:37.171] 765ms debug @utilities #waitFor: Waiting for database to be ready... 564 | redis:dev: 973191cf > #5 sha256:d01ec855d06e16385fb33f299d9cc6eb303ea04378d0eea3a75d74e26c6e6bb9 0B / 1.39MB 22.7s 565 | api:dev: a1e4c6a7 > [18:48:37.225] 54ms debug @utilities #waitFor: Waiting for Redis to be ready... 566 | worker:dev: 2fb02d72 > [18:48:37.313] 88ms debug @utilities #waitFor: Waiting for database to be ready... 567 | redis:dev: 973191cf > #5 sha256:7f65636102fd1f499092cb075baa95784488c0bbc3e0abff2a6d853109e4a948 5.24MB / 9.60MB 22.9s 568 | worker:dev: 2fb02d72 > [18:48:37.408] 95ms debug @utilities #waitFor: Waiting for Redis to be ready... 569 | redis:dev: 973191cf > #5 sha256:7f65636102fd1f499092cb075baa95784488c0bbc3e0abff2a6d853109e4a948 6.29MB / 9.60MB 23.7s 570 | api:dev: a1e4c6a7 > [18:48:38.172] 764ms debug @utilities #waitFor: Waiting for database to be ready... 571 | api:dev: a1e4c6a7 > [18:48:38.227] 55ms debug @utilities #waitFor: Waiting for Redis to be ready... 572 | ``` 573 | 574 | In this example, `redis`, `api` and `worker` processes produce logs at almost the exact same time causing the log stream to switch between outputting from a different process every other line. This makes it hard to read the logs. 575 | 576 | By default, Turbowatch throttles log output to at most once a second per task, producing a lot more easier to follow log output: 577 | 578 | ```yaml 579 | redis:dev: 973191cf > #5 sha256:7f65636102fd1f499092cb075baa95784488c0bbc3e0abff2a6d853109e4a948 4.19MB / 9.60MB 22.3s 580 | redis:dev: 973191cf > #5 sha256:d01ec855d06e16385fb33f299d9cc6eb303ea04378d0eea3a75d74e26c6e6bb9 0B / 1.39MB 22.7s 581 | redis:dev: 973191cf > #5 sha256:7f65636102fd1f499092cb075baa95784488c0bbc3e0abff2a6d853109e4a948 5.24MB / 9.60MB 22.9s 582 | redis:dev: 973191cf > #5 sha256:7f65636102fd1f499092cb075baa95784488c0bbc3e0abff2a6d853109e4a948 6.29MB / 9.60MB 23.7s 583 | api:dev: a1e4c6a7 > [18:48:37.171] 765ms debug @utilities #waitFor: Waiting for database to be ready... 584 | api:dev: a1e4c6a7 > [18:48:37.225] 54ms debug @utilities #waitFor: Waiting for Redis to be ready... 585 | api:dev: a1e4c6a7 > [18:48:38.172] 764ms debug @utilities #waitFor: Waiting for database to be ready... 586 | api:dev: a1e4c6a7 > [18:48:38.227] 55ms debug @utilities #waitFor: Waiting for Redis to be ready... 587 | worker:dev: 2fb02d72 > [18:48:37.313] 88ms debug @utilities #waitFor: Waiting for database to be ready... 588 | worker:dev: 2fb02d72 > [18:48:37.408] 95ms debug @utilities #waitFor: Waiting for Redis to be ready... 589 | ``` 590 | 591 | However, this means that some logs might come out of order. To disable this feature, set `{ throttleOutput: { delay: 0 } }`. 592 | 593 | ### Watching multiple scripts 594 | 595 | By default, `turbowatch` will look for `turbowatch.ts` script in the current working directory. However, you can pass multiple scripts to `turbowatch` to run them concurrently: 596 | 597 | ```bash 598 | turbowatch ./foo.ts ./bar.ts 599 | ``` 600 | 601 | You can also provide a glob pattern: 602 | 603 | ```bash 604 | turbowatch '**/turbowatch.ts' 605 | ``` 606 | 607 | ### Using custom file watching backend 608 | 609 | Many of the existing file watching solutions come with tradeoffs, e.g. Watchman does not track symbolic links ([#105](https://github.com/facebook/watchman/issues/105#issuecomment-1469496330)), chokidar is failing to register file changes ([#1240](https://github.com/paulmillr/chokidar/issues/1240)), `fs.watch` behavior is platform specific, etc. For this reason, Turbowatch provides several backends to choose from and allows to bring-your-own backend by implementing `FileWatchingBackend` interface. 610 | 611 | By default, Turbowatch uses `fs.watch` on MacOS (Node.js v19.1+) and fallsback to [chokidar](https://github.com/paulmillr/chokidar) on other platforms. 612 | 613 | ```ts 614 | import { 615 | watch, 616 | // Smart Watcher that detects the best available file-watching backend. 617 | TurboWatcher, 618 | // fs.watch based file watcher. 619 | FSWatcher, 620 | // Chokidar based file watcher. 621 | ChokidarWatcher, 622 | // Interface that all file watchers must implement. 623 | FileWatchingBackend, 624 | } from 'turbowatch'; 625 | 626 | export default watch({ 627 | Watcher: TurboWatcher, 628 | project: __dirname, 629 | triggers: [], 630 | }); 631 | ``` 632 | 633 | ### Logging 634 | 635 | Turbowatch uses [Roarr](https://github.com/gajus/roarr) logger. 636 | 637 | Export `ROARR_LOG=true` environment variable to enable log printing to `stdout`. 638 | 639 | Use [@roarr/cli](https://github.com/gajus/roarr-cli) to pretty-print logs. 640 | 641 | ```bash 642 | ROARR_LOG=true turbowatch | roarr 643 | ``` 644 | 645 | ## Experiments 646 | 647 | These are features that are available behind feature flags (`TURBOWATCH_EXPERIMENTAL_*`). 648 | 649 | They are released to gather community feedback and may change at any point in future. 650 | 651 | > **Note** There are no active experiments at the moment. 652 | 653 | ## Alternatives 654 | 655 | The biggest benefit of using Turbowatch is that it provides a single abstraction for all file watching operations. That is, you might get away with Nodemon, concurrently, `--watch`, etc. running in parallel, but using Turbowatch will introduce consistency to how you perform watch operations. 656 | 657 | ### Why not use `X --watch`? 658 | 659 | Many tools provide built-in watch functionality, e.g. `tsc --watch`. However, there are couple of problems with relying on them: 660 | 661 | * Running many file watchers is inefficient and is probably draining your laptop's battery faster than you realize. Turbowatch uses a single server to watch all file changes. 662 | * Native tools do not allow to combine operations, e.g. If your build depends on `tsc --watch` and `tsc-alias --watch`, then you cannot combine them. On the other hand, Turbowatch allows you to chain arbitrary operations. 663 | 664 | > **Note** Turbowatch is not a replacement for services that implement Hot Module Replacement (HMR), e.g. Next.js. However, you should still wrap those operations in Turbowatch for consistency, e.g. 665 | > ```ts 666 | > void watch({ 667 | > project: __dirname, 668 | > triggers: [ 669 | > { 670 | > expression: ['dirname', __dirname], 671 | > // Marking this routine as non-interruptible will ensure that 672 | > // next dev is not restarted when file changes are detected. 673 | > interruptible: false, 674 | > name: 'start-server', 675 | > onChange: async ({ spawn }) => { 676 | > await spawn`next dev`; 677 | > }, 678 | > // Enabling this option modifies what Turbowatch logs and warns 679 | > // you if your configuration is incompatible with persistent tasks. 680 | > persistent: true, 681 | > }, 682 | > ], 683 | > }); 684 | > ``` 685 | 686 | ### Why not concurrently? 687 | 688 | I have [seen](https://github.com/justkey007/tsc-alias#add-it-to-your-build-scripts-in-packagejson) [concurrently](https://github.com/open-cli-tools/concurrently) used to "chain" watch operations such as: 689 | 690 | ```bash 691 | concurrently "tsc -w" "tsc-alias -w" 692 | ``` 693 | 694 | While this might work by brute-force, it will produce unexpected results as the order of execution is not guaranteed. 695 | 696 | If you are using Turbowatch, simply execute one command after the other in the trigger workflow, e.g. 697 | 698 | ```ts 699 | async ({ spawn }: ChangeEvent) => { 700 | await spawn`tsc`; 701 | await spawn`tsc-alias`; 702 | }, 703 | ``` 704 | 705 | ### Why not Turborepo? 706 | 707 | [Turborepo](https://turbo.build/) currently does not have support for watch mode (issue [#986](https://github.com/vercel/turbo/issues/986)). However, Turbowatch has been designed to work with Turborepo. 708 | 709 | To use Turbowatch with Turborepo: 710 | 711 | 1. define a persistent task 712 | 1. run the persistent task using `--parallel` 713 | 714 | Example: 715 | 716 | ```json 717 | "dev": { 718 | "cache": false, 719 | "persistent": true 720 | }, 721 | ``` 722 | 723 | ```bash 724 | turbo run dev --parallel 725 | ``` 726 | 727 | > **Note** We found that using `dependsOn` with Turbowatch produces undesirable effects. Instead, simply use Turbowatch expressions to identify when dependencies update. 728 | 729 | > **Note** Turbowatch is not aware of the Turborepo dependency graph. Meaning, that your builds might fail at the first attempt. However, if you setup Turbowatch to [watch `node_modules`](#watching-node_modules), then Turbowatch will automatically retry failing builds as soon as the dependencies are built. 730 | -------------------------------------------------------------------------------- /cspell.yaml: -------------------------------------------------------------------------------- 1 | ignoreRegExpList: 2 | - /.*[0-9].*/ 3 | language: en 4 | version: '0.2' 5 | words: 6 | - allof 7 | - anyof 8 | - gajus 9 | - idirname 10 | - imatch 11 | - interruptible 12 | - jiti 13 | - kuizinas 14 | - pipefail 15 | - randomcolor 16 | - roarr 17 | - SIGINT 18 | - SIGTERM 19 | - turborepo 20 | - turbowatch 21 | - vitest 22 | - wholename 23 | - pidtree 24 | - pids -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "author": { 3 | "email": "gajus@gajus.com", 4 | "name": "Gajus Kuizinas", 5 | "url": "http://gajus.com" 6 | }, 7 | "bin": { 8 | "turbowatch": "./dist/bin/turbowatch.js" 9 | }, 10 | "dependencies": { 11 | "chalk": "^4.1.2", 12 | "chokidar": "^3.5.3", 13 | "find-process": "^1.4.7", 14 | "glob": "^9.3.1", 15 | "jiti": "^1.18.2", 16 | "micromatch": "^4.0.5", 17 | "pidtree": "^0.6.0", 18 | "randomcolor": "^0.6.2", 19 | "roarr": "^7.15.0", 20 | "semver": "^7.3.8", 21 | "serialize-error": "^11.0.0", 22 | "throttle-debounce": "^5.0.0", 23 | "ts-custom-error": "^3.3.1", 24 | "yargs": "^17.7.1", 25 | "zx": "^7.2.1" 26 | }, 27 | "devDependencies": { 28 | "@semantic-release/commit-analyzer": "^9.0.2", 29 | "@semantic-release/github": "^8.0.7", 30 | "@semantic-release/npm": "^9.0.2", 31 | "@types/node": "^18.15.3", 32 | "@types/sinon": "^10.0.13", 33 | "@types/yargs": "^17.0.22", 34 | "cspell": "^6.30.2", 35 | "eslint": "^8.36.0", 36 | "eslint-config-canonical": "^41.0.1", 37 | "semantic-release": "^20.1.3", 38 | "sinon": "^15.0.2", 39 | "typescript": "^5.0.2", 40 | "vitest": "^0.31.4" 41 | }, 42 | "engines": { 43 | "node": ">=18" 44 | }, 45 | "files": [ 46 | "./src", 47 | "./dist" 48 | ], 49 | "keywords": [ 50 | "watch", 51 | "files" 52 | ], 53 | "license": "BSD-3-Clause", 54 | "main": "./dist/index.js", 55 | "name": "turbowatch", 56 | "repository": { 57 | "type": "git", 58 | "url": "https://github.com/gajus/turbowatch" 59 | }, 60 | "scripts": { 61 | "build": "rm -fr ./dist && tsc --project tsconfig.build.json", 62 | "dev": "tsc --watch", 63 | "lint": "npm run lint:tsc && npm run lint:eslint && npm run lint:cspell", 64 | "lint:cspell": "cspell './**/*.{ts,tsx}' --no-progress --gitignore", 65 | "lint:eslint": "eslint --color .", 66 | "lint:tsc": "tsc", 67 | "test:vitest": "vitest --passWithNoTests --single-thread" 68 | }, 69 | "types": "./dist/index.d.ts", 70 | "version": "1.0.0" 71 | } 72 | -------------------------------------------------------------------------------- /src/Logger.ts: -------------------------------------------------------------------------------- 1 | import { Roarr } from 'roarr'; 2 | 3 | export const Logger = Roarr.child({ 4 | package: 'turbowatch', 5 | }); 6 | -------------------------------------------------------------------------------- /src/__fixtures__/killPsTree/badTree/a.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-console */ 2 | 3 | const { spawn } = require('node:child_process'); 4 | const { resolve } = require('node:path'); 5 | 6 | spawn('node', [resolve(__dirname, 'b.js')], { 7 | stdio: 'inherit', 8 | }); 9 | -------------------------------------------------------------------------------- /src/__fixtures__/killPsTree/badTree/b.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-console */ 2 | 3 | setInterval(() => { 4 | console.log('b'); 5 | }, 1_000); 6 | 7 | process.on('SIGTERM', () => { 8 | console.log('b: SIGTERM'); 9 | }); 10 | -------------------------------------------------------------------------------- /src/__fixtures__/killPsTree/goodTree/a.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-console */ 2 | 3 | const { spawn } = require('node:child_process'); 4 | const { resolve } = require('node:path'); 5 | 6 | const b = spawn('node', [resolve(__dirname, 'b.js')]); 7 | 8 | b.stdout.on('data', (data) => { 9 | console.log(data.toString()); 10 | }); 11 | -------------------------------------------------------------------------------- /src/__fixtures__/killPsTree/goodTree/b.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-console */ 2 | 3 | setInterval(() => { 4 | console.log('b'); 5 | }, 1_000); 6 | -------------------------------------------------------------------------------- /src/backends/ChokidarWatcher.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from '../Logger'; 2 | import { FileWatchingBackend } from './FileWatchingBackend'; 3 | import * as chokidar from 'chokidar'; 4 | 5 | const log = Logger.child({ 6 | namespace: 'ChokidarWatcher', 7 | }); 8 | export class ChokidarWatcher extends FileWatchingBackend { 9 | private chokidar: chokidar.FSWatcher; 10 | 11 | private indexingIntervalId: NodeJS.Timeout; 12 | 13 | public constructor(project: string) { 14 | super(); 15 | 16 | let discoveredFileCount = 0; 17 | 18 | this.indexingIntervalId = setInterval(() => { 19 | log.trace( 20 | 'indexed %s %s...', 21 | discoveredFileCount.toLocaleString('en-US'), 22 | discoveredFileCount === 1 ? 'file' : 'files', 23 | ); 24 | }, 5_000); 25 | 26 | this.chokidar = chokidar.watch(project, { 27 | awaitWriteFinish: false, 28 | followSymlinks: true, 29 | }); 30 | 31 | let ready = false; 32 | 33 | this.chokidar.on('ready', () => { 34 | clearInterval(this.indexingIntervalId); 35 | 36 | ready = true; 37 | 38 | this.emitReady(); 39 | }); 40 | 41 | this.chokidar.on('all', (event, filename) => { 42 | if (!ready) { 43 | discoveredFileCount++; 44 | 45 | return; 46 | } 47 | 48 | if (event === 'addDir') { 49 | return; 50 | } 51 | 52 | this.emitChange({ filename }); 53 | }); 54 | } 55 | 56 | public close() { 57 | clearInterval(this.indexingIntervalId); 58 | 59 | return this.chokidar.close(); 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/backends/FSWatcher.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable canonical/filename-match-regex */ 2 | 3 | import { FileWatchingBackend } from './FileWatchingBackend'; 4 | import { glob } from 'glob'; 5 | import { type FSWatcher as NativeFSWatcher, watch } from 'node:fs'; 6 | import { lstat, realpath } from 'node:fs/promises'; 7 | import path from 'node:path'; 8 | 9 | const findSymlinks = async (project: string) => { 10 | const filenames = await glob('./**/*/', { 11 | absolute: true, 12 | cwd: project, 13 | dot: true, 14 | follow: false, 15 | }); 16 | 17 | const symlinks: Array<{ 18 | realpath: string; 19 | symlink: string; 20 | }> = []; 21 | 22 | for (const filename of filenames) { 23 | let stats; 24 | 25 | try { 26 | stats = await lstat(filename); 27 | } catch { 28 | continue; 29 | } 30 | 31 | if (stats.isSymbolicLink()) { 32 | let fileRealpath; 33 | 34 | try { 35 | fileRealpath = await realpath(filename); 36 | } catch { 37 | continue; 38 | } 39 | 40 | if (!symlinks.some((symlink) => symlink.symlink === fileRealpath)) { 41 | symlinks.push({ 42 | realpath: fileRealpath, 43 | symlink: filename, 44 | }); 45 | } 46 | } 47 | } 48 | 49 | return symlinks; 50 | }; 51 | 52 | export class FSWatcher extends FileWatchingBackend { 53 | private fsWatchers: NativeFSWatcher[] = []; 54 | 55 | private closed = false; 56 | 57 | public constructor(project: string) { 58 | super(); 59 | 60 | // eslint-disable-next-line unicorn/consistent-function-scoping 61 | const watchPath = (target: string) => { 62 | return watch( 63 | target, 64 | { 65 | encoding: 'utf8', 66 | persistent: true, 67 | recursive: true, 68 | }, 69 | (eventType, filename) => { 70 | this.emitChange({ filename: path.resolve(target, filename) }); 71 | }, 72 | ); 73 | }; 74 | 75 | this.fsWatchers.push(watchPath(project)); 76 | 77 | // TODO detect when a new symlink is added to the project 78 | // eslint-disable-next-line promise/prefer-await-to-then 79 | findSymlinks(project).then((symlinks) => { 80 | if (this.closed) { 81 | return; 82 | } 83 | 84 | for (const symlink of symlinks) { 85 | this.fsWatchers.push( 86 | watch( 87 | symlink.realpath, 88 | { 89 | encoding: 'utf8', 90 | persistent: true, 91 | recursive: true, 92 | }, 93 | (eventType, filename) => { 94 | const absolutePath = path.resolve(symlink.realpath, filename); 95 | 96 | this.emitChange({ 97 | filename: path.join( 98 | symlink.symlink, 99 | path.relative(symlink.realpath, absolutePath), 100 | ), 101 | }); 102 | }, 103 | ), 104 | ); 105 | } 106 | 107 | this.emitReady(); 108 | }); 109 | } 110 | 111 | public async close() { 112 | this.closed = true; 113 | 114 | for (const fsWatcher of this.fsWatchers) { 115 | fsWatcher.close(); 116 | } 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /src/backends/FileWatchingBackend.test.ts: -------------------------------------------------------------------------------- 1 | import { isFSWatcherAvailable } from '../isFSWatcherAvailable'; 2 | import { ChokidarWatcher } from './ChokidarWatcher'; 3 | import { type FileWatchingBackend } from './FileWatchingBackend'; 4 | import { FSWatcher } from './FSWatcher'; 5 | import { TurboWatcher } from './TurboWatcher'; 6 | import fs from 'node:fs/promises'; 7 | import path from 'node:path'; 8 | import { setTimeout } from 'node:timers/promises'; 9 | import * as sinon from 'sinon'; 10 | import { beforeEach, expect, it } from 'vitest'; 11 | 12 | const fixturesPath = path.resolve(__dirname, '.fixtures'); 13 | 14 | beforeEach(async () => { 15 | await fs.rm(fixturesPath, { 16 | force: true, 17 | recursive: true, 18 | }); 19 | 20 | await fs.mkdir(fixturesPath); 21 | }); 22 | 23 | const waitForReady = (watcher: FileWatchingBackend) => { 24 | return new Promise((resolve) => { 25 | watcher.on('ready', () => { 26 | resolve(null); 27 | }); 28 | }); 29 | }; 30 | 31 | const backends = [ 32 | { 33 | name: 'Chokidar', 34 | Watcher: ChokidarWatcher, 35 | }, 36 | { 37 | name: 'FS', 38 | Watcher: FSWatcher, 39 | }, 40 | { 41 | name: 'Turbo', 42 | Watcher: TurboWatcher, 43 | }, 44 | ]; 45 | 46 | for (const { Watcher, name } of backends) { 47 | if (isFSWatcherAvailable() === false && Watcher === FSWatcher) { 48 | continue; 49 | } 50 | 51 | it('[' + name + '] detects file change', async () => { 52 | const watcher = new Watcher(fixturesPath); 53 | 54 | await waitForReady(watcher); 55 | 56 | const onChange = sinon.stub(); 57 | 58 | watcher.on('change', onChange); 59 | 60 | await setTimeout(100); 61 | 62 | await fs.writeFile(path.join(fixturesPath, 'foo'), ''); 63 | 64 | await setTimeout(100); 65 | 66 | expect( 67 | onChange.calledWith( 68 | sinon.match({ 69 | filename: path.join(fixturesPath, 'foo'), 70 | }), 71 | ), 72 | ).toBe(true); 73 | 74 | await watcher.close(); 75 | }); 76 | 77 | it('[' + name + '] detects changes to a file that is replaced', async () => { 78 | const watcher = new Watcher(fixturesPath); 79 | 80 | await waitForReady(watcher); 81 | 82 | const onChange = sinon.stub(); 83 | 84 | watcher.on('change', onChange); 85 | 86 | await setTimeout(100); 87 | 88 | await fs.writeFile(path.join(fixturesPath, 'foo'), ''); 89 | 90 | await setTimeout(100); 91 | 92 | await fs.unlink(path.join(fixturesPath, 'foo')); 93 | 94 | await setTimeout(100); 95 | 96 | await fs.writeFile(path.join(fixturesPath, 'foo'), ''); 97 | 98 | await setTimeout(100); 99 | 100 | expect(onChange.callCount).toBeGreaterThanOrEqual(3); 101 | 102 | await watcher.close(); 103 | }); 104 | 105 | it('[' + name + '] detects hard link change (linked file)', async () => { 106 | await fs.mkdir(path.resolve(fixturesPath, 'foo')); 107 | 108 | await fs.writeFile(path.join(fixturesPath, 'bar'), ''); 109 | 110 | await fs.link( 111 | path.join(fixturesPath, 'bar'), 112 | path.join(fixturesPath, 'foo', 'bar'), 113 | ); 114 | 115 | const watcher = new Watcher(path.resolve(fixturesPath, 'foo')); 116 | 117 | await waitForReady(watcher); 118 | 119 | const onChange = sinon.stub(); 120 | 121 | watcher.on('change', onChange); 122 | 123 | await setTimeout(100); 124 | 125 | await fs.writeFile(path.join(fixturesPath, 'bar'), ''); 126 | 127 | await setTimeout(100); 128 | 129 | expect( 130 | onChange.calledWith( 131 | sinon.match({ 132 | filename: path.join(fixturesPath, 'foo', 'bar'), 133 | }), 134 | ), 135 | ).toBe(true); 136 | 137 | await watcher.close(); 138 | }); 139 | 140 | it('[' + name + '] detects symlink change (linked file)', async () => { 141 | await fs.mkdir(path.resolve(fixturesPath, 'foo')); 142 | 143 | await fs.writeFile(path.join(fixturesPath, 'bar'), ''); 144 | 145 | await fs.symlink( 146 | path.join(fixturesPath, 'bar'), 147 | path.join(fixturesPath, 'foo', 'bar'), 148 | ); 149 | 150 | const watcher = new Watcher(path.resolve(fixturesPath, 'foo')); 151 | 152 | await waitForReady(watcher); 153 | 154 | const onChange = sinon.stub(); 155 | 156 | watcher.on('change', onChange); 157 | 158 | await setTimeout(100); 159 | 160 | await fs.writeFile(path.join(fixturesPath, 'bar'), ''); 161 | 162 | await setTimeout(100); 163 | 164 | expect( 165 | onChange.calledWith( 166 | sinon.match({ 167 | filename: path.join(fixturesPath, 'foo', 'bar'), 168 | }), 169 | ), 170 | ).toBe(true); 171 | 172 | await watcher.close(); 173 | }); 174 | 175 | it('[' + name + '] detects symlink change (linked path)', async () => { 176 | await fs.mkdir(path.resolve(fixturesPath, 'foo')); 177 | await fs.mkdir(path.resolve(fixturesPath, 'bar')); 178 | await fs.writeFile(path.join(fixturesPath, 'bar', 'baz'), ''); 179 | 180 | await fs.symlink( 181 | path.join(fixturesPath, 'bar'), 182 | path.join(fixturesPath, 'foo', 'bar'), 183 | ); 184 | 185 | const watcher = new Watcher(path.resolve(fixturesPath, 'foo')); 186 | 187 | await waitForReady(watcher); 188 | 189 | const onChange = sinon.stub(); 190 | 191 | watcher.on('change', onChange); 192 | 193 | await setTimeout(100); 194 | 195 | await fs.writeFile(path.join(fixturesPath, 'bar', 'baz'), ''); 196 | 197 | await setTimeout(100); 198 | 199 | expect( 200 | onChange.calledWith( 201 | sinon.match({ 202 | filename: path.join(fixturesPath, 'foo', 'bar', 'baz'), 203 | }), 204 | ), 205 | ).toBe(true); 206 | 207 | // TODO investigate why this is failing in GitHub CI 208 | // expect( 209 | // onChange.calledWith({ 210 | // filename: path.join(fixturesPath, 'foo', 'bar'), 211 | // }), 212 | // ).toBe(true); 213 | 214 | await watcher.close(); 215 | }); 216 | } 217 | -------------------------------------------------------------------------------- /src/backends/FileWatchingBackend.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/consistent-type-definitions */ 2 | /* eslint-disable @typescript-eslint/method-signature-style */ 3 | 4 | import { type FileChangeEvent } from '../types'; 5 | import { EventEmitter } from 'node:events'; 6 | import path from 'node:path'; 7 | 8 | interface BackendEventEmitter { 9 | on(name: 'ready', listener: () => void): this; 10 | on(name: 'change', listener: (event: FileChangeEvent) => void): this; 11 | } 12 | 13 | export abstract class FileWatchingBackend 14 | extends EventEmitter 15 | implements BackendEventEmitter 16 | { 17 | public constructor() { 18 | super(); 19 | } 20 | 21 | public abstract close(): Promise; 22 | 23 | protected emitReady(): void { 24 | this.emit('ready'); 25 | } 26 | 27 | protected emitChange(event: FileChangeEvent): void { 28 | if (!path.isAbsolute(event.filename)) { 29 | throw new Error('Watchers must emit absolute paths'); 30 | } 31 | 32 | this.emit('change', { 33 | filename: event.filename, 34 | }); 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/backends/TurboWatcher.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable canonical/filename-match-regex */ 2 | 3 | import { isFSWatcherAvailable } from '../isFSWatcherAvailable'; 4 | import { Logger } from '../Logger'; 5 | import { ChokidarWatcher } from './ChokidarWatcher'; 6 | import { FileWatchingBackend } from './FileWatchingBackend'; 7 | import { FSWatcher } from './FSWatcher'; 8 | 9 | const log = Logger.child({ 10 | namespace: 'TurboWatcher', 11 | }); 12 | 13 | export class TurboWatcher extends FileWatchingBackend { 14 | private backend: FileWatchingBackend; 15 | 16 | public constructor(project: string) { 17 | super(); 18 | 19 | if (isFSWatcherAvailable()) { 20 | log.info('using native FSWatcher'); 21 | this.backend = new FSWatcher(project); 22 | } else { 23 | log.info('using native ChokidarWatcher'); 24 | this.backend = new ChokidarWatcher(project); 25 | } 26 | 27 | this.backend.on('ready', () => { 28 | this.emit('ready'); 29 | }); 30 | 31 | this.backend.on('change', (event) => { 32 | this.emit('change', event); 33 | }); 34 | } 35 | 36 | public close() { 37 | return this.backend.close(); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/bin/turbowatch.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | /* eslint-disable node/shebang */ 4 | /* eslint-disable require-atomic-updates */ 5 | 6 | import { Logger } from '../Logger'; 7 | import { 8 | type TurbowatchConfigurationInput, 9 | type TurbowatchController, 10 | } from '../types'; 11 | import { glob } from 'glob'; 12 | import jiti from 'jiti'; 13 | import { existsSync } from 'node:fs'; 14 | import path from 'node:path'; 15 | import { hideBin } from 'yargs/helpers'; 16 | import yargs from 'yargs/yargs'; 17 | 18 | const log = Logger.child({ 19 | namespace: 'turbowatch', 20 | }); 21 | 22 | // eslint-disable-next-line node/no-process-env 23 | if (process.env.ROARR_LOG !== 'true') { 24 | // eslint-disable-next-line no-console 25 | console.warn( 26 | '[turbowatch] running turbowatch without logging enabled; set ROARR_LOG=true to enable logging. Install @roarr/cli to pretty-print logs.', 27 | ); 28 | } 29 | 30 | const findTurbowatchScript = (inputPath: string): string | null => { 31 | let resolvedPath: string | null = null; 32 | 33 | const providedPath = path.resolve(process.cwd(), inputPath); 34 | 35 | const possiblePaths = [providedPath]; 36 | 37 | if (path.extname(providedPath) === '') { 38 | possiblePaths.push(providedPath + '.ts', providedPath + '.js'); 39 | } 40 | 41 | for (const possiblePath of possiblePaths) { 42 | if (existsSync(possiblePath)) { 43 | resolvedPath = possiblePath; 44 | } 45 | } 46 | 47 | return resolvedPath; 48 | }; 49 | 50 | const main = async () => { 51 | const abortController = new AbortController(); 52 | 53 | let terminating = false; 54 | 55 | process.once('SIGINT', () => { 56 | if (terminating) { 57 | log.warn('already terminating; ignoring SIGINT'); 58 | 59 | return; 60 | } 61 | 62 | terminating = true; 63 | 64 | log.warn('received SIGINT; gracefully terminating'); 65 | 66 | abortController.abort(); 67 | }); 68 | 69 | process.once('SIGTERM', () => { 70 | if (terminating) { 71 | log.warn('already terminating; ignoring SIGTERM'); 72 | 73 | return; 74 | } 75 | 76 | terminating = true; 77 | 78 | log.warn('received SIGTERM; gracefully terminating'); 79 | 80 | abortController.abort(); 81 | }); 82 | 83 | const { 84 | watch, 85 | }: { 86 | watch: ( 87 | configurationInput: TurbowatchConfigurationInput, 88 | ) => Promise; 89 | } = jiti(__filename)('../watch'); 90 | 91 | const argv = await yargs(hideBin(process.argv)) 92 | .command('$0 [patterns...]', 'Start Turbowatch', (commandYargs) => { 93 | commandYargs.positional('patterns', { 94 | array: true, 95 | default: ['turbowatch.ts'], 96 | describe: 97 | 'Script with Turbowatch instructions. Can provide multiple. It can also be a glob pattern, e.g. **/turbowatch.ts', 98 | type: 'string', 99 | }); 100 | }) 101 | .parse(); 102 | 103 | const patterns = argv.patterns as readonly string[]; 104 | 105 | const scriptPaths: string[] = []; 106 | 107 | for (const pattern of patterns) { 108 | if (pattern.includes('*')) { 109 | scriptPaths.push(...(await glob(pattern))); 110 | } else { 111 | scriptPaths.push(pattern); 112 | } 113 | } 114 | 115 | const resolvedScriptPaths: string[] = []; 116 | 117 | for (const scriptPath of scriptPaths) { 118 | const resolvedPath = findTurbowatchScript(scriptPath); 119 | 120 | if (!resolvedPath) { 121 | log.error('%s not found', scriptPath); 122 | 123 | process.exitCode = 1; 124 | 125 | return; 126 | } 127 | 128 | resolvedScriptPaths.push(resolvedPath); 129 | } 130 | 131 | for (const resolvedPath of resolvedScriptPaths) { 132 | const turbowatchConfiguration = jiti(__filename)(resolvedPath) 133 | .default as TurbowatchConfigurationInput; 134 | 135 | if (typeof turbowatchConfiguration?.Watcher !== 'function') { 136 | log.error( 137 | 'Expected user script to export an instance of TurbowatchController', 138 | ); 139 | 140 | process.exitCode = 1; 141 | 142 | return; 143 | } 144 | 145 | await watch({ 146 | abortController, 147 | cwd: path.dirname(resolvedPath), 148 | ...turbowatchConfiguration, 149 | }); 150 | } 151 | }; 152 | 153 | void main(); 154 | -------------------------------------------------------------------------------- /src/createFileChangeQueue.test.ts: -------------------------------------------------------------------------------- 1 | import { createFileChangeQueue } from './createFileChangeQueue'; 2 | import { type Subscription } from './types'; 3 | import { mkdir, rmdir, writeFile } from 'node:fs/promises'; 4 | import { join } from 'node:path'; 5 | import { setTimeout } from 'node:timers/promises'; 6 | import * as sinon from 'sinon'; 7 | import { beforeEach, expect, test } from 'vitest'; 8 | 9 | const FIXTURES_DIRECTORY = join(__dirname, '.createFileChangeQueueFixtures'); 10 | 11 | beforeEach(async () => { 12 | try { 13 | await rmdir(FIXTURES_DIRECTORY, { 14 | recursive: true, 15 | }); 16 | } catch { 17 | // 18 | } 19 | 20 | await mkdir(FIXTURES_DIRECTORY); 21 | }); 22 | 23 | test('deduplicates triggers', async () => { 24 | const fooFile = join(FIXTURES_DIRECTORY, 'foo'); 25 | 26 | await writeFile(fooFile, 'foo'); 27 | 28 | const abortController = new AbortController(); 29 | 30 | const trigger = sinon.stub().resolves(null); 31 | 32 | const subscription: Subscription = { 33 | activeTask: null, 34 | expression: ['match', '*'], 35 | initialRun: false, 36 | persistent: false, 37 | teardown: async () => {}, 38 | trigger, 39 | }; 40 | 41 | const fileChangeQueue = createFileChangeQueue({ 42 | abortSignal: abortController.signal, 43 | project: FIXTURES_DIRECTORY, 44 | subscriptions: [subscription], 45 | userDebounce: { 46 | wait: 100, 47 | }, 48 | }); 49 | 50 | fileChangeQueue.trigger({ 51 | filename: fooFile, 52 | hash: 'bar', 53 | }); 54 | 55 | fileChangeQueue.trigger({ 56 | filename: fooFile, 57 | hash: 'baz', 58 | }); 59 | 60 | await setTimeout(200); 61 | 62 | expect(trigger.callCount).toBe(1); 63 | 64 | expect(trigger.firstCall.args[0]).toEqual([ 65 | { 66 | filename: fooFile, 67 | hash: 'baz', 68 | }, 69 | ]); 70 | }); 71 | -------------------------------------------------------------------------------- /src/createFileChangeQueue.ts: -------------------------------------------------------------------------------- 1 | import { deduplicateFileChangeEvents } from './deduplicateFileChangeEvents'; 2 | import { hashFile } from './hashFile'; 3 | import { testExpression } from './testExpression'; 4 | import { 5 | type Debounce, 6 | type FileChangeEvent, 7 | type Subscription, 8 | } from './types'; 9 | import path from 'node:path'; 10 | import { debounce } from 'throttle-debounce'; 11 | 12 | export const createFileChangeQueue = ({ 13 | project, 14 | abortSignal, 15 | userDebounce, 16 | subscriptions, 17 | }: { 18 | abortSignal: AbortSignal; 19 | project: string; 20 | subscriptions: Subscription[]; 21 | userDebounce: Debounce; 22 | }) => { 23 | const fileHashMap: Record = {}; 24 | 25 | let queuedFileChangeEvents: FileChangeEvent[] = []; 26 | 27 | const evaluateSubscribers = debounce( 28 | userDebounce.wait, 29 | () => { 30 | const currentFileChangeEvents = deduplicateFileChangeEvents( 31 | queuedFileChangeEvents, 32 | ); 33 | 34 | queuedFileChangeEvents = []; 35 | 36 | const filesWithUnchangedHash: string[] = []; 37 | 38 | for (const fileChangeEvent of currentFileChangeEvents) { 39 | const { filename, hash } = fileChangeEvent; 40 | 41 | if (!hash) { 42 | continue; 43 | } 44 | 45 | const previousHash = fileHashMap[filename]; 46 | 47 | if (previousHash === hash) { 48 | filesWithUnchangedHash.push(filename); 49 | } else { 50 | fileHashMap[filename] = hash; 51 | } 52 | } 53 | 54 | for (const subscription of subscriptions) { 55 | const relevantEvents: FileChangeEvent[] = []; 56 | 57 | for (const fileChangeEvent of currentFileChangeEvents) { 58 | if (filesWithUnchangedHash.includes(fileChangeEvent.filename)) { 59 | continue; 60 | } 61 | 62 | if ( 63 | !testExpression( 64 | subscription.expression, 65 | path.relative(project, fileChangeEvent.filename), 66 | ) 67 | ) { 68 | continue; 69 | } 70 | 71 | relevantEvents.push(fileChangeEvent); 72 | } 73 | 74 | if (relevantEvents.length) { 75 | if (abortSignal?.aborted) { 76 | return; 77 | } 78 | 79 | void subscription.trigger(relevantEvents); 80 | } 81 | } 82 | }, 83 | { 84 | noLeading: true, 85 | }, 86 | ); 87 | 88 | return { 89 | trigger: (fileChangeEvent: FileChangeEvent) => { 90 | if (fileChangeEvent.hash === undefined) { 91 | // eslint-disable-next-line promise/prefer-await-to-then 92 | hashFile(fileChangeEvent.filename).then((hash) => { 93 | queuedFileChangeEvents.push({ 94 | ...fileChangeEvent, 95 | hash, 96 | }); 97 | 98 | evaluateSubscribers(); 99 | }); 100 | } else { 101 | queuedFileChangeEvents.push(fileChangeEvent); 102 | 103 | evaluateSubscribers(); 104 | } 105 | }, 106 | }; 107 | }; 108 | -------------------------------------------------------------------------------- /src/createSpawn.test.ts: -------------------------------------------------------------------------------- 1 | import { createSpawn } from './createSpawn'; 2 | import { expect, it } from 'vitest'; 3 | 4 | it('returns outputs', async () => { 5 | const spawn = createSpawn('foo'); 6 | 7 | const result = await spawn`echo 'Hello, World!'`; 8 | 9 | expect(String(result?.stdout)).toEqual('Hello, World!\n'); 10 | }); 11 | 12 | it('injects path to node_modules/.bin', async () => { 13 | const spawn = createSpawn('foo'); 14 | 15 | const result = await spawn`echo $PATH`; 16 | 17 | expect(String(result?.stdout)).toMatch(/node_modules\/\.bin/u); 18 | }); 19 | 20 | it('rejects if process produces an error', async () => { 21 | const spawn = createSpawn('foo'); 22 | 23 | await expect(spawn`does-not-exist`).rejects.toThrowError( 24 | 'Program exited with code 127.', 25 | ); 26 | }); 27 | 28 | const TIMEOUT = 100; 29 | 30 | it( 31 | 'terminates spawned process when it receives abort signal', 32 | async () => { 33 | const abortController = new AbortController(); 34 | 35 | const spawn = createSpawn('foo', { abortSignal: abortController.signal }); 36 | 37 | setTimeout(() => { 38 | void abortController.abort(); 39 | }, 50); 40 | 41 | await expect(spawn`sleep 10`).rejects.toThrowError(); 42 | }, 43 | TIMEOUT, 44 | ); 45 | 46 | it( 47 | 'waits for termination', 48 | async () => { 49 | const abortController = new AbortController(); 50 | 51 | const spawn = createSpawn('foo', { abortSignal: abortController.signal }); 52 | 53 | setTimeout(() => { 54 | void abortController.abort(); 55 | }, 50); 56 | 57 | const start = Date.now(); 58 | 59 | await expect( 60 | spawn`( trap '' TERM; exec sleep 0.1 )`, 61 | ).rejects.toThrowError(); 62 | 63 | expect(Date.now() - start).toBeGreaterThan(100); 64 | }, 65 | TIMEOUT * 2, 66 | ); 67 | -------------------------------------------------------------------------------- /src/createSpawn.ts: -------------------------------------------------------------------------------- 1 | // cspell:words nothrow 2 | 3 | import { AbortError, UnexpectedError } from './errors'; 4 | import { findNearestDirectory } from './findNearestDirectory'; 5 | import { killPsTree } from './killPsTree'; 6 | import { Logger } from './Logger'; 7 | import { type Throttle } from './types'; 8 | import chalk from 'chalk'; 9 | import randomColor from 'randomcolor'; 10 | import { throttle } from 'throttle-debounce'; 11 | import { $ } from 'zx'; 12 | 13 | const log = Logger.child({ 14 | namespace: 'createSpawn', 15 | }); 16 | 17 | const prefixLines = (subject: string, prefix: string): string => { 18 | const response: string[] = []; 19 | 20 | for (const fragment of subject.split('\n')) { 21 | response.push(prefix + fragment); 22 | } 23 | 24 | return response.join('\n'); 25 | }; 26 | 27 | export const createSpawn = ( 28 | taskId: string, 29 | { 30 | cwd = process.cwd(), 31 | abortSignal, 32 | throttleOutput, 33 | triggerName, 34 | triggerHexColor, 35 | }: { 36 | abortSignal?: AbortSignal; 37 | cwd?: string; 38 | throttleOutput?: Throttle; 39 | triggerHexColor?: string; 40 | triggerName?: string; 41 | } = {}, 42 | ) => { 43 | let stdoutBuffer: string[] = []; 44 | let stderrBuffer: string[] = []; 45 | 46 | const flush = () => { 47 | if (stdoutBuffer.length) { 48 | // eslint-disable-next-line no-console 49 | console.log(stdoutBuffer.join('\n')); 50 | } 51 | 52 | if (stderrBuffer.length) { 53 | // eslint-disable-next-line no-console 54 | console.error(stderrBuffer.join('\n')); 55 | } 56 | 57 | stdoutBuffer = []; 58 | stderrBuffer = []; 59 | }; 60 | 61 | const output = throttle( 62 | throttleOutput?.delay, 63 | () => { 64 | flush(); 65 | }, 66 | { 67 | noLeading: true, 68 | }, 69 | ); 70 | 71 | const colorText = chalk.hex( 72 | triggerHexColor || randomColor({ luminosity: 'dark' }), 73 | ); 74 | 75 | return async (pieces: TemplateStringsArray, ...args: any[]) => { 76 | const binPath = (await findNearestDirectory('node_modules', cwd)) + '/.bin'; 77 | 78 | $.cwd = cwd; 79 | 80 | $.prefix = `set -euo pipefail; export PATH="${binPath}:$PATH";`; 81 | 82 | let onStdout: (chunk: Buffer) => void; 83 | let onStderr: (chunk: Buffer) => void; 84 | 85 | const formatChunk = (chunk: Buffer) => { 86 | const prefixTriggerName = triggerName ? triggerName + ' ' : ''; 87 | 88 | return prefixLines( 89 | chunk.toString().trimEnd(), 90 | colorText(`${prefixTriggerName}${taskId}`) + ' > ', 91 | ); 92 | }; 93 | 94 | if (throttleOutput?.delay) { 95 | onStdout = (chunk: Buffer) => { 96 | stdoutBuffer.push(formatChunk(chunk)); 97 | output(); 98 | }; 99 | 100 | onStderr = (chunk: Buffer) => { 101 | stderrBuffer.push(formatChunk(chunk)); 102 | output(); 103 | }; 104 | } else { 105 | onStdout = (chunk: Buffer) => { 106 | // eslint-disable-next-line no-console 107 | console.log(formatChunk(chunk)); 108 | }; 109 | 110 | onStderr = (chunk: Buffer) => { 111 | // eslint-disable-next-line no-console 112 | console.error(formatChunk(chunk)); 113 | }; 114 | } 115 | 116 | if (abortSignal?.aborted) { 117 | throw new UnexpectedError( 118 | 'Attempted to spawn a process after the task was aborted.', 119 | ); 120 | } 121 | 122 | // eslint-disable-next-line promise/prefer-await-to-then 123 | const processPromise = $(pieces, ...args) 124 | .nothrow() 125 | .quiet(); 126 | 127 | processPromise.stdout.on('data', onStdout); 128 | processPromise.stderr.on('data', onStderr); 129 | 130 | if (abortSignal) { 131 | const kill = () => { 132 | const pid = processPromise.child?.pid; 133 | 134 | if (!pid) { 135 | log.warn('no process to kill'); 136 | 137 | return; 138 | } 139 | 140 | // TODO make this configurable 141 | // eslint-disable-next-line promise/prefer-await-to-then 142 | killPsTree(pid, 5_000).then(() => { 143 | log.debug('task %s was killed', taskId); 144 | 145 | processPromise.stdout.off('data', onStdout); 146 | processPromise.stderr.off('data', onStderr); 147 | }); 148 | }; 149 | 150 | abortSignal.addEventListener('abort', kill, { 151 | once: true, 152 | }); 153 | 154 | // eslint-disable-next-line promise/prefer-await-to-then 155 | processPromise.finally(() => { 156 | abortSignal.removeEventListener('abort', kill); 157 | }); 158 | } 159 | 160 | const result = await processPromise; 161 | 162 | flush(); 163 | 164 | if (result.exitCode === 0) { 165 | return result; 166 | } 167 | 168 | if (abortSignal?.aborted) { 169 | throw new AbortError('Program was aborted.'); 170 | } 171 | 172 | log.error('task %s exited with an error', taskId); 173 | 174 | throw new Error('Program exited with code ' + result.exitCode + '.'); 175 | }; 176 | }; 177 | -------------------------------------------------------------------------------- /src/deduplicateFileChangeEvents.test.ts: -------------------------------------------------------------------------------- 1 | import { deduplicateFileChangeEvents } from './deduplicateFileChangeEvents'; 2 | import { expect, it } from 'vitest'; 3 | 4 | it('keeps only the latest entry of a file change', async () => { 5 | expect( 6 | deduplicateFileChangeEvents([ 7 | { 8 | filename: '/foo', 9 | hash: '1', 10 | }, 11 | { 12 | filename: '/foo', 13 | hash: '2', 14 | }, 15 | { 16 | filename: '/foo', 17 | hash: '3', 18 | }, 19 | ]), 20 | ).toEqual([ 21 | { 22 | filename: '/foo', 23 | hash: '3', 24 | }, 25 | ]); 26 | }); 27 | -------------------------------------------------------------------------------- /src/deduplicateFileChangeEvents.ts: -------------------------------------------------------------------------------- 1 | import { type FileChangeEvent } from './types'; 2 | 3 | export const deduplicateFileChangeEvents = ( 4 | fileChangeEvents: readonly FileChangeEvent[], 5 | ): readonly FileChangeEvent[] => { 6 | const changedFilePaths: string[] = []; 7 | 8 | return fileChangeEvents 9 | .slice() 10 | .reverse() 11 | .filter((event) => { 12 | if (changedFilePaths.includes(event.filename)) { 13 | return false; 14 | } 15 | 16 | changedFilePaths.push(event.filename); 17 | 18 | return true; 19 | }) 20 | .reverse(); 21 | }; 22 | -------------------------------------------------------------------------------- /src/defineConfig.ts: -------------------------------------------------------------------------------- 1 | import { TurboWatcher } from './backends/TurboWatcher'; 2 | import { type TurbowatchConfigurationInput } from './types'; 3 | 4 | export const defineConfig = ( 5 | configurationInput: TurbowatchConfigurationInput, 6 | ): TurbowatchConfigurationInput => { 7 | return { 8 | Watcher: TurboWatcher, 9 | ...configurationInput, 10 | }; 11 | }; 12 | -------------------------------------------------------------------------------- /src/errors.ts: -------------------------------------------------------------------------------- 1 | import { CustomError } from 'ts-custom-error'; 2 | 3 | export class TurbowatchError extends CustomError {} 4 | 5 | export class UnexpectedError extends TurbowatchError {} 6 | 7 | export class AbortError extends TurbowatchError {} 8 | -------------------------------------------------------------------------------- /src/findNearestDirectory.ts: -------------------------------------------------------------------------------- 1 | import fs from 'node:fs/promises'; 2 | import path from 'node:path'; 3 | 4 | /** 5 | * Iterates up the directory tree from the given path until it finds a directory 6 | * containing the given file. 7 | */ 8 | export const findNearestDirectory = async ( 9 | fileName: string, 10 | startPath: string, 11 | ): Promise => { 12 | let currentPath = startPath; 13 | 14 | // eslint-disable-next-line no-constant-condition 15 | while (true) { 16 | const targetPath = path.join(currentPath, fileName); 17 | 18 | try { 19 | await fs.access(targetPath, fs.constants.F_OK); 20 | } catch { 21 | const nextPath = path.resolve(currentPath, '..'); 22 | 23 | if (nextPath === currentPath) { 24 | break; 25 | } 26 | 27 | currentPath = nextPath; 28 | 29 | continue; 30 | } 31 | 32 | return targetPath; 33 | } 34 | 35 | return null; 36 | }; 37 | -------------------------------------------------------------------------------- /src/generateShortId.ts: -------------------------------------------------------------------------------- 1 | import { randomUUID } from 'node:crypto'; 2 | 3 | export const generateShortId = (): string => { 4 | return randomUUID().split('-')[0]; 5 | }; 6 | -------------------------------------------------------------------------------- /src/hashFile.test.ts: -------------------------------------------------------------------------------- 1 | import { hashFile } from './hashFile'; 2 | import { resolve } from 'node:path'; 3 | import { expect, it } from 'vitest'; 4 | 5 | it('hashes file', async () => { 6 | await expect(hashFile(resolve(__dirname, 'Logger.ts'))).resolves.toBe( 7 | '8f8bf20d9e97101d36989916146db88c825b7922', 8 | ); 9 | }); 10 | 11 | it('resolves null if file cannot be read', async () => { 12 | await expect(hashFile(resolve(__dirname, 'does-not-exist.ts'))).resolves.toBe( 13 | null, 14 | ); 15 | }); 16 | -------------------------------------------------------------------------------- /src/hashFile.ts: -------------------------------------------------------------------------------- 1 | import { createHash } from 'node:crypto'; 2 | import { createReadStream } from 'node:fs'; 3 | 4 | export const hashFile = (filePath: string): Promise => { 5 | return new Promise((resolve) => { 6 | const fileDescriptor = createReadStream(filePath); 7 | 8 | const hash = createHash('sha1').setEncoding('hex'); 9 | 10 | fileDescriptor.on('error', () => { 11 | resolve(null); 12 | }); 13 | 14 | fileDescriptor.on('end', () => { 15 | hash.end(); 16 | 17 | resolve(hash.read()); 18 | }); 19 | 20 | fileDescriptor.pipe(hash); 21 | }); 22 | }; 23 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export { ChokidarWatcher } from './backends/ChokidarWatcher'; 2 | export { FileWatchingBackend } from './backends/FileWatchingBackend'; 3 | export { FSWatcher } from './backends/FSWatcher'; 4 | export { TurboWatcher } from './backends/TurboWatcher'; 5 | export { defineConfig } from './defineConfig'; 6 | export { type ChangeEvent, type Expression, type TriggerInput } from './types'; 7 | export { watch } from './watch'; 8 | export { type ProcessPromise } from 'zx'; 9 | -------------------------------------------------------------------------------- /src/isFSWatcherAvailable.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable canonical/filename-match-regex */ 2 | 3 | import { platform } from 'node:os'; 4 | import * as semver from 'semver'; 5 | 6 | const isMacOs = () => { 7 | return platform() === 'darwin'; 8 | }; 9 | 10 | export const isFSWatcherAvailable = () => { 11 | return semver.gte(process.version, '19.1.0') && isMacOs(); 12 | }; 13 | -------------------------------------------------------------------------------- /src/killPsTree.test.ts: -------------------------------------------------------------------------------- 1 | import { killPsTree } from './killPsTree'; 2 | import { exec } from 'node:child_process'; 3 | import { join } from 'node:path'; 4 | import { setTimeout } from 'node:timers/promises'; 5 | import { test } from 'vitest'; 6 | 7 | test('kills a good process tree', async () => { 8 | const childProcess = exec( 9 | `node ${join(__dirname, '__fixtures__/killPsTree/goodTree/a.js')}`, 10 | ); 11 | 12 | if (!childProcess.pid) { 13 | throw new Error('Expected child process to have a pid'); 14 | } 15 | 16 | await setTimeout(500); 17 | 18 | await killPsTree(childProcess.pid); 19 | }); 20 | 21 | test('kills a bad process tree', async () => { 22 | const childProcess = exec( 23 | `node ${join(__dirname, '__fixtures__/killPsTree/badTree/a.js')}`, 24 | ); 25 | 26 | if (!childProcess.pid) { 27 | throw new Error('Expected child process to have a pid'); 28 | } 29 | 30 | await setTimeout(500); 31 | 32 | await killPsTree(childProcess.pid, 1_000); 33 | }); 34 | -------------------------------------------------------------------------------- /src/killPsTree.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from './Logger'; 2 | import findProcess from 'find-process'; 3 | import { setTimeout as delay } from 'node:timers/promises'; 4 | import pidTree from 'pidtree'; 5 | 6 | const log = Logger.child({ 7 | namespace: 'killPsTree', 8 | }); 9 | 10 | export const killPsTree = async ( 11 | rootPid: number, 12 | gracefulTimeout: number = 30_000, 13 | ) => { 14 | const childPids = await pidTree(rootPid); 15 | 16 | const pids = [rootPid, ...childPids]; 17 | 18 | for (const pid of pids) { 19 | try { 20 | process.kill(pid, 'SIGTERM'); 21 | } catch (error) { 22 | if (error.code === 'ESRCH') { 23 | log.debug({ pid }, 'process already terminated'); 24 | } else { 25 | throw error; 26 | } 27 | } 28 | } 29 | 30 | let hangingPids = [...pids]; 31 | 32 | let hitTimeout = false; 33 | 34 | const timeoutId = setTimeout(() => { 35 | hitTimeout = true; 36 | 37 | log.debug({ hangingPids }, 'sending SIGKILL to processes...'); 38 | 39 | for (const pid of hangingPids) { 40 | try { 41 | process.kill(pid, 'SIGKILL'); 42 | } catch (error) { 43 | if (error.code === 'ESRCH') { 44 | log.debug({ pid }, 'process already terminated'); 45 | } else { 46 | throw error; 47 | } 48 | } 49 | } 50 | }, gracefulTimeout); 51 | 52 | // eslint-disable-next-line no-unmodified-loop-condition 53 | while (!hitTimeout && hangingPids.length > 0) { 54 | for (const hangingPid of hangingPids) { 55 | const processes = await findProcess('pid', hangingPid); 56 | 57 | if (processes.length === 0) { 58 | hangingPids = hangingPids.filter((pid) => pid !== hangingPid); 59 | } 60 | } 61 | 62 | await delay(100); 63 | } 64 | 65 | clearTimeout(timeoutId); 66 | 67 | log.debug('all processes terminated'); 68 | }; 69 | -------------------------------------------------------------------------------- /src/subscribe.test.ts: -------------------------------------------------------------------------------- 1 | import { subscribe } from './subscribe'; 2 | import { type Trigger } from './types'; 3 | import { setTimeout } from 'node:timers'; 4 | import * as sinon from 'sinon'; 5 | import { expect, it } from 'vitest'; 6 | 7 | const defaultTrigger = { 8 | abortSignal: new AbortController().signal, 9 | expression: ['match', 'foo', 'basename'], 10 | id: 'foo', 11 | initialRun: true, 12 | interruptible: false, 13 | name: 'foo', 14 | onChange: async () => {}, 15 | onTeardown: async () => {}, 16 | persistent: false, 17 | relativePath: 'foo', 18 | retry: { 19 | retries: 0, 20 | }, 21 | throttleOutput: { 22 | delay: 0, 23 | }, 24 | watch: 'foo', 25 | } as Trigger; 26 | 27 | const wait = (time: number) => { 28 | return new Promise((resolve) => { 29 | setTimeout(resolve, time); 30 | }); 31 | }; 32 | 33 | it('evaluates onChange', async () => { 34 | const trigger = { 35 | ...defaultTrigger, 36 | } as Trigger; 37 | 38 | const triggerMock = sinon.mock(trigger); 39 | 40 | const onChangeExpectation = triggerMock 41 | .expects('onChange') 42 | .once() 43 | .resolves(null); 44 | 45 | const subscription = subscribe(trigger); 46 | 47 | subscription.trigger([]); 48 | 49 | expect(triggerMock.verify()); 50 | 51 | expect(onChangeExpectation.args[0][0].taskId).toMatch(/^[a-z\d]{8}$/u); 52 | }); 53 | 54 | it('skips onChange if teardown is initiated', async () => { 55 | const trigger = { 56 | ...defaultTrigger, 57 | } as Trigger; 58 | 59 | const triggerMock = sinon.mock(trigger); 60 | 61 | const onChangeExpectation = triggerMock.expects('onChange').atLeast(1); 62 | 63 | onChangeExpectation.onFirstCall().resolves(wait(100)); 64 | 65 | onChangeExpectation.onSecondCall().resolves(null); 66 | 67 | const subscription = subscribe(trigger); 68 | 69 | subscription.trigger([{ filename: 'foo' }]); 70 | subscription.teardown(); 71 | subscription.trigger([{ filename: 'bar' }]); 72 | 73 | await wait(300); 74 | 75 | expect(onChangeExpectation.callCount).toBe(1); 76 | }); 77 | 78 | it('initiates teardown at most once', async () => { 79 | const trigger = { 80 | ...defaultTrigger, 81 | } as Trigger; 82 | 83 | const triggerMock = sinon.mock(trigger); 84 | 85 | const onTeardownExpectation = triggerMock.expects('onTeardown').atLeast(1); 86 | 87 | const subscription = subscribe(trigger); 88 | 89 | subscription.teardown(); 90 | subscription.teardown(); 91 | 92 | await wait(300); 93 | 94 | expect(onTeardownExpectation.callCount).toBe(1); 95 | }); 96 | 97 | it('swallow onChange errors', async () => { 98 | const trigger = { 99 | ...defaultTrigger, 100 | } as Trigger; 101 | 102 | const subscriptionMock = sinon.mock(trigger); 103 | 104 | subscriptionMock.expects('onChange').once().rejects(new Error('foo')); 105 | 106 | const subscription = subscribe(trigger); 107 | 108 | await subscription.trigger([]); 109 | 110 | expect(subscriptionMock.verify()); 111 | }); 112 | 113 | it('removes duplicates', async () => { 114 | const trigger = { 115 | ...defaultTrigger, 116 | }; 117 | 118 | const subscriptionMock = sinon.mock(trigger); 119 | 120 | const onChange = subscriptionMock.expects('onChange').once().resolves(null); 121 | 122 | const subscription = subscribe(trigger); 123 | 124 | subscription.trigger([ 125 | { 126 | filename: '/foo', 127 | }, 128 | { 129 | filename: '/foo', 130 | }, 131 | { 132 | filename: '/bar', 133 | }, 134 | ]); 135 | 136 | expect(subscriptionMock.verify()); 137 | 138 | expect(onChange.args[0][0].files).toEqual([ 139 | { name: '/foo' }, 140 | { name: '/bar' }, 141 | ]); 142 | }); 143 | 144 | it('waits for onChange to complete when { interruptible: false }', async () => { 145 | const abortController = new AbortController(); 146 | 147 | const trigger = { 148 | ...defaultTrigger, 149 | abortSignal: abortController.signal, 150 | interruptible: false, 151 | }; 152 | 153 | const triggerMock = sinon.mock(trigger); 154 | 155 | const onChange = triggerMock.expects('onChange').twice(); 156 | 157 | let completed = false; 158 | 159 | onChange.onFirstCall().callsFake(async () => { 160 | await wait(100); 161 | 162 | completed = true; 163 | }); 164 | 165 | onChange.onSecondCall().callsFake(() => { 166 | expect(completed).toBe(true); 167 | 168 | abortController.abort(); 169 | }); 170 | 171 | const subscription = subscribe(trigger); 172 | 173 | await subscription.trigger([]); 174 | await subscription.trigger([]); 175 | 176 | expect(onChange.callCount).toBe(2); 177 | }); 178 | 179 | it('waits for onChange to complete when { interruptible: true } when it receives a shutdown signal', async () => { 180 | const abortController = new AbortController(); 181 | 182 | const trigger = { 183 | ...defaultTrigger, 184 | abortSignal: abortController.signal, 185 | }; 186 | 187 | let resolved = false; 188 | 189 | const subscriptionMock = sinon.mock(trigger); 190 | 191 | subscriptionMock 192 | .expects('onChange') 193 | .once() 194 | .callsFake(() => { 195 | return new Promise((resolve) => { 196 | setTimeout(() => { 197 | resolved = true; 198 | 199 | resolve(null); 200 | }, 100); 201 | }); 202 | }); 203 | 204 | const subscription = subscribe(trigger); 205 | 206 | setImmediate(() => { 207 | abortController.abort(); 208 | }); 209 | 210 | await subscription.trigger([]); 211 | 212 | expect(subscriptionMock.verify()); 213 | 214 | expect(resolved).toBe(true); 215 | }); 216 | 217 | it('retries failing routines', async () => { 218 | const trigger = { 219 | ...defaultTrigger, 220 | retry: { 221 | retries: 1, 222 | }, 223 | }; 224 | 225 | const subscriptionMock = sinon.mock(trigger); 226 | 227 | const onChange = subscriptionMock.expects('onChange').twice(); 228 | 229 | onChange.onFirstCall().rejects(new Error('foo')); 230 | onChange.onSecondCall().resolves(null); 231 | 232 | const subscription = await subscribe(trigger); 233 | 234 | await subscription.trigger([]); 235 | 236 | expect(onChange.verify()); 237 | }); 238 | 239 | it('reports { first: true } only for the first event', async () => { 240 | const trigger = { 241 | ...defaultTrigger, 242 | }; 243 | 244 | const subscriptionMock = sinon.mock(trigger); 245 | 246 | const onChange = subscriptionMock.expects('onChange').twice(); 247 | 248 | onChange.onFirstCall().resolves(null); 249 | 250 | onChange.onSecondCall().resolves(null); 251 | 252 | const subscription = subscribe(trigger); 253 | 254 | await subscription.trigger([]); 255 | await subscription.trigger([]); 256 | 257 | expect(onChange.args).toMatchObject([ 258 | [ 259 | { 260 | first: true, 261 | }, 262 | ], 263 | [ 264 | { 265 | first: false, 266 | }, 267 | ], 268 | ]); 269 | 270 | expect(subscriptionMock.verify()); 271 | }); 272 | 273 | it('retries persistent routine if it exits with success', async () => { 274 | const trigger = { 275 | ...defaultTrigger, 276 | persistent: true, 277 | retry: { 278 | maxTimeout: 100, 279 | retries: 1, 280 | }, 281 | }; 282 | 283 | const onChange = sinon.stub(trigger, 'onChange'); 284 | 285 | onChange.resolves(() => { 286 | return wait(100); 287 | }); 288 | 289 | const subscription = await subscribe(trigger); 290 | 291 | void subscription.trigger([]); 292 | 293 | await wait(500); 294 | 295 | subscription.activeTask?.abortController?.abort(); 296 | 297 | expect(onChange.callCount).toBeGreaterThan(2); 298 | }); 299 | 300 | it('retries persistent routine if it exists with error', async () => { 301 | const trigger = { 302 | ...defaultTrigger, 303 | persistent: true, 304 | retry: { 305 | maxTimeout: 100, 306 | retries: 1, 307 | }, 308 | }; 309 | 310 | const onChange = sinon.stub(trigger, 'onChange'); 311 | 312 | onChange.resolves(async () => { 313 | await wait(100); 314 | 315 | throw new Error('foo'); 316 | }); 317 | 318 | const subscription = await subscribe(trigger); 319 | 320 | void subscription.trigger([]); 321 | 322 | await wait(500); 323 | 324 | subscription.activeTask?.abortController?.abort(); 325 | 326 | expect(onChange.callCount).toBeGreaterThan(2); 327 | }); 328 | 329 | it('stops retrying persistent routine if teardown is called', async () => { 330 | const trigger = { 331 | ...defaultTrigger, 332 | persistent: true, 333 | retry: { 334 | maxTimeout: 100, 335 | retries: 1, 336 | }, 337 | }; 338 | 339 | const onChange = sinon.stub(trigger, 'onChange'); 340 | 341 | onChange.resolves(async () => { 342 | await wait(100); 343 | }); 344 | 345 | const subscription = await subscribe(trigger); 346 | 347 | void subscription.trigger([]); 348 | 349 | await wait(500); 350 | 351 | await subscription.teardown(); 352 | 353 | await wait(100); 354 | 355 | const firstCallCount = onChange.callCount; 356 | 357 | await wait(500); 358 | 359 | expect(onChange.callCount).toBe(firstCallCount); 360 | }); 361 | 362 | it('does not begin the new routine until the interrupted routine has completed', async () => { 363 | const trigger = { 364 | ...defaultTrigger, 365 | interruptible: true, 366 | persistent: true, 367 | retry: { 368 | maxTimeout: 100, 369 | retries: 1, 370 | }, 371 | }; 372 | 373 | const onChange = sinon.stub(trigger, 'onChange'); 374 | 375 | onChange.resolves(async () => { 376 | await wait(100); 377 | }); 378 | 379 | const subscription = await subscribe(trigger); 380 | 381 | void subscription.trigger([]); 382 | 383 | await wait(10); 384 | 385 | void subscription.trigger([]); 386 | 387 | await wait(10); 388 | 389 | subscription.activeTask?.abortController?.abort(); 390 | 391 | expect(onChange.callCount).toBe(1); 392 | }); 393 | 394 | it('does not begin the new routine until the interrupted routine has completed (multiple-triggers)', async () => { 395 | const trigger = { 396 | ...defaultTrigger, 397 | interruptible: true, 398 | persistent: true, 399 | retry: { 400 | maxTimeout: 100, 401 | retries: 1, 402 | }, 403 | }; 404 | 405 | const onChange = sinon.stub(trigger, 'onChange'); 406 | 407 | onChange.resolves(async () => { 408 | await wait(100); 409 | }); 410 | 411 | const subscription = await subscribe(trigger); 412 | 413 | void subscription.trigger([]); 414 | 415 | await wait(10); 416 | 417 | void subscription.trigger([]); 418 | void subscription.trigger([]); 419 | 420 | await wait(10); 421 | 422 | subscription.activeTask?.abortController?.abort(); 423 | 424 | expect(onChange.callCount).toBe(1); 425 | }); 426 | -------------------------------------------------------------------------------- /src/subscribe.ts: -------------------------------------------------------------------------------- 1 | import { createSpawn } from './createSpawn'; 2 | import { generateShortId } from './generateShortId'; 3 | import { Logger } from './Logger'; 4 | import { 5 | type ActiveTask, 6 | type FileChangeEvent, 7 | type Subscription, 8 | type Trigger, 9 | } from './types'; 10 | import { setTimeout } from 'node:timers/promises'; 11 | import { serializeError } from 'serialize-error'; 12 | 13 | const log = Logger.child({ 14 | namespace: 'subscribe', 15 | }); 16 | 17 | /** 18 | * Creates a trigger evaluation specific abort controller that inherits the abort signal from the trigger. 19 | * This abort controller is used to abort the the task that is currently running either because the trigger 20 | * has been interrupted or because the trigger has been triggered again. 21 | */ 22 | const createAbortController = (trigger: Trigger) => { 23 | const abortController = new AbortController(); 24 | 25 | trigger.abortSignal.addEventListener('abort', () => { 26 | abortController.abort(); 27 | }); 28 | 29 | return abortController; 30 | }; 31 | 32 | const runTask = async ({ 33 | taskId, 34 | abortController, 35 | trigger, 36 | firstEvent, 37 | changedFiles, 38 | }: { 39 | abortController: AbortController; 40 | changedFiles: readonly string[]; 41 | firstEvent: boolean; 42 | taskId: string; 43 | trigger: Trigger; 44 | }) => { 45 | if (trigger.initialRun && firstEvent) { 46 | log.debug('%s (%s): initial run...', trigger.name, taskId); 47 | } else if (changedFiles.length > 10) { 48 | log.debug( 49 | { 50 | files: changedFiles.slice(0, 10), 51 | }, 52 | '%s (%s): %d files changed; showing first 10', 53 | trigger.name, 54 | taskId, 55 | changedFiles.length, 56 | ); 57 | } else { 58 | log.debug( 59 | { 60 | files: changedFiles, 61 | }, 62 | '%s (%s): %d %s changed', 63 | trigger.name, 64 | taskId, 65 | changedFiles.length, 66 | changedFiles.length === 1 ? 'file' : 'files', 67 | ); 68 | } 69 | 70 | let failedAttempts = -1; 71 | 72 | while (true) { 73 | if (abortController.signal.aborted) { 74 | log.warn('%s (%s): task aborted', trigger.name, taskId); 75 | 76 | return; 77 | } 78 | 79 | failedAttempts++; 80 | 81 | if (failedAttempts > 0) { 82 | const retryFactor = trigger.retry.factor ?? 2; 83 | const minTimeout = trigger.retry.minTimeout ?? 1_000; 84 | const maxTimeout = trigger.retry.maxTimeout ?? 30_000; 85 | const delay = Math.min( 86 | failedAttempts * retryFactor * minTimeout, 87 | trigger.retry.maxTimeout ?? maxTimeout, 88 | ); 89 | 90 | log.debug('delaying retry by %dms...', delay); 91 | 92 | await setTimeout(delay); 93 | } 94 | 95 | try { 96 | await trigger.onChange({ 97 | abortSignal: abortController?.signal, 98 | attempt: failedAttempts, 99 | files: changedFiles.map((changedFile) => { 100 | return { 101 | name: changedFile, 102 | }; 103 | }), 104 | first: firstEvent, 105 | log, 106 | spawn: createSpawn(taskId, { 107 | abortSignal: abortController?.signal, 108 | cwd: trigger.cwd, 109 | throttleOutput: trigger.throttleOutput, 110 | triggerHexColor: trigger.hexColor, 111 | triggerName: trigger.name, 112 | }), 113 | taskId, 114 | }); 115 | 116 | failedAttempts = 0; 117 | 118 | if (trigger.persistent) { 119 | log.debug( 120 | '%s (%s): re-running because the trigger is persistent', 121 | trigger.name, 122 | taskId, 123 | ); 124 | 125 | continue; 126 | } 127 | 128 | return; 129 | } catch (error) { 130 | if (error.name === 'AbortError') { 131 | log.warn('%s (%s): task aborted', trigger.name, taskId); 132 | 133 | return; 134 | } 135 | 136 | log.warn( 137 | { 138 | error: serializeError(error), 139 | }, 140 | '%s (%s): routine produced an error', 141 | trigger.name, 142 | taskId, 143 | ); 144 | 145 | if (trigger.persistent) { 146 | log.warn( 147 | '%s (%s): retrying because the trigger is persistent', 148 | trigger.name, 149 | taskId, 150 | ); 151 | 152 | continue; 153 | } 154 | 155 | const retriesLeft = trigger.retry.retries - failedAttempts; 156 | 157 | if (retriesLeft < 0) { 158 | throw new Error( 159 | 'Expected retries left to be greater than or equal to 0', 160 | ); 161 | } 162 | 163 | if (retriesLeft === 0) { 164 | log.warn( 165 | '%s (%s): task will not be retried; attempts exhausted', 166 | trigger.name, 167 | taskId, 168 | ); 169 | 170 | throw error; 171 | } 172 | 173 | if (retriesLeft > 0) { 174 | log.warn( 175 | '%s (%s): retrying task %d/%d...', 176 | trigger.name, 177 | taskId, 178 | trigger.retry.retries - retriesLeft, 179 | trigger.retry.retries, 180 | ); 181 | 182 | continue; 183 | } 184 | 185 | throw new Error('Expected retries left to be greater than or equal to 0'); 186 | } 187 | } 188 | 189 | throw new Error('Expected while loop to be terminated by a return statement'); 190 | }; 191 | 192 | export const subscribe = (trigger: Trigger): Subscription => { 193 | /** 194 | * Indicates that the teardown process has been initiated. 195 | * This is used to prevent the trigger from being triggered again while the teardown process is running. 196 | */ 197 | let outerTeardownInitiated = false; 198 | 199 | /** 200 | * Stores the currently active task. 201 | */ 202 | let outerActiveTask: ActiveTask | null = null; 203 | 204 | /** 205 | * Identifies the first event in a series of events. 206 | */ 207 | let outerFirstEvent = true; 208 | 209 | /** 210 | * Stores the files that have changed since the last evaluation of the trigger 211 | */ 212 | let outerChangedFiles: string[] = []; 213 | 214 | const handleSubscriptionEvent = async () => { 215 | let firstEvent = outerFirstEvent; 216 | 217 | if (outerFirstEvent) { 218 | firstEvent = true; 219 | outerFirstEvent = false; 220 | } 221 | 222 | if (outerActiveTask) { 223 | if (trigger.interruptible) { 224 | log.debug('%s (%s): aborting task', trigger.name, outerActiveTask.id); 225 | 226 | if (!outerActiveTask.abortController) { 227 | throw new Error('Expected abort controller to be set'); 228 | } 229 | 230 | outerActiveTask.abortController.abort(); 231 | 232 | log.debug( 233 | '%s (%s): waiting for task to abort', 234 | trigger.name, 235 | outerActiveTask.id, 236 | ); 237 | 238 | if (outerActiveTask.queued) { 239 | return undefined; 240 | } 241 | 242 | outerActiveTask.queued = true; 243 | 244 | try { 245 | // Do not start a new task until the previous task has been 246 | // aborted and the shutdown routine has run to completion. 247 | await outerActiveTask.promise; 248 | } catch { 249 | // nothing to do 250 | } 251 | } else { 252 | if (trigger.persistent) { 253 | log.warn( 254 | '%s (%s): ignoring event because the trigger is persistent', 255 | trigger.name, 256 | outerActiveTask.id, 257 | ); 258 | 259 | return undefined; 260 | } 261 | 262 | log.warn( 263 | '%s (%s): waiting for task to complete', 264 | trigger.name, 265 | outerActiveTask.id, 266 | ); 267 | 268 | if (outerActiveTask.queued) { 269 | return undefined; 270 | } 271 | 272 | outerActiveTask.queued = true; 273 | 274 | try { 275 | await outerActiveTask.promise; 276 | } catch { 277 | // nothing to do 278 | } 279 | } 280 | } 281 | 282 | if (outerTeardownInitiated) { 283 | log.warn('teardown already initiated'); 284 | 285 | return undefined; 286 | } 287 | 288 | const changedFiles = outerChangedFiles; 289 | 290 | outerChangedFiles = []; 291 | 292 | const taskId = generateShortId(); 293 | 294 | const abortController = createAbortController(trigger); 295 | 296 | const taskPromise = runTask({ 297 | abortController, 298 | changedFiles, 299 | firstEvent, 300 | taskId, 301 | trigger, 302 | }) // eslint-disable-next-line promise/prefer-await-to-then 303 | .finally(() => { 304 | if (taskId === outerActiveTask?.id) { 305 | log.debug('%s (%s): completed task', trigger.name, taskId); 306 | 307 | outerActiveTask = null; 308 | } 309 | }) 310 | // eslint-disable-next-line promise/prefer-await-to-then 311 | .catch((error) => { 312 | log.warn( 313 | { 314 | error: serializeError(error), 315 | }, 316 | '%s (%s): task failed', 317 | trigger.name, 318 | taskId, 319 | ); 320 | }); 321 | 322 | log.debug('%s (%s): started task', trigger.name, taskId); 323 | 324 | // eslint-disable-next-line require-atomic-updates 325 | outerActiveTask = { 326 | abortController, 327 | id: taskId, 328 | promise: taskPromise, 329 | queued: false, 330 | }; 331 | 332 | return taskPromise; 333 | }; 334 | 335 | return { 336 | activeTask: outerActiveTask, 337 | expression: trigger.expression, 338 | initialRun: trigger.initialRun, 339 | persistent: trigger.persistent, 340 | teardown: async () => { 341 | if (outerTeardownInitiated) { 342 | log.warn('teardown already initiated'); 343 | 344 | return; 345 | } 346 | 347 | outerTeardownInitiated = true; 348 | 349 | if (outerActiveTask?.abortController) { 350 | await outerActiveTask.abortController.abort(); 351 | } 352 | 353 | if (trigger.onTeardown) { 354 | const taskId = generateShortId(); 355 | 356 | try { 357 | await trigger.onTeardown({ 358 | spawn: createSpawn(taskId, { 359 | throttleOutput: trigger.throttleOutput, 360 | triggerHexColor: trigger.hexColor, 361 | triggerName: trigger.name, 362 | }), 363 | }); 364 | } catch (error) { 365 | log.error( 366 | { 367 | error, 368 | }, 369 | 'teardown produced an error', 370 | ); 371 | } 372 | } 373 | }, 374 | trigger: async (events: readonly FileChangeEvent[]) => { 375 | for (const event of events) { 376 | if (outerChangedFiles.includes(event.filename)) { 377 | continue; 378 | } 379 | 380 | outerChangedFiles.push(event.filename); 381 | } 382 | 383 | try { 384 | await handleSubscriptionEvent(); 385 | } catch (error) { 386 | log.error( 387 | { 388 | error, 389 | }, 390 | 'trigger produced an error', 391 | ); 392 | } 393 | }, 394 | }; 395 | }; 396 | -------------------------------------------------------------------------------- /src/testExpression.test.ts: -------------------------------------------------------------------------------- 1 | import { testExpression } from './testExpression'; 2 | import { expect, it } from 'vitest'; 3 | 4 | it('[allof] evaluates as true if all of the grouped expressions also evaluated as true (true)', () => { 5 | expect( 6 | testExpression(['allof', ['match', 'bar', 'basename']], 'foo/bar'), 7 | ).toBe(true); 8 | }); 9 | 10 | it('[allof] evaluates as true if all of the grouped expressions also evaluated as true (false, true)', () => { 11 | expect( 12 | testExpression( 13 | ['allof', ['match', 'foo', 'basename'], ['match', 'bar', 'basename']], 14 | 'foo/bar', 15 | ), 16 | ).toBe(false); 17 | }); 18 | 19 | it('[allof] evaluates as true if all of the grouped expressions also evaluated as true (false)', () => { 20 | expect( 21 | testExpression(['allof', ['match', 'foo', 'basename']], 'foo/bar'), 22 | ).toBe(false); 23 | }); 24 | 25 | it('[anyof] evaluates as true if any of the grouped expressions also evaluated as true (true)', () => { 26 | expect( 27 | testExpression(['anyof', ['match', 'bar', 'basename']], 'foo/bar'), 28 | ).toBe(true); 29 | }); 30 | 31 | it('[anyof] evaluates as true if any of the grouped expressions also evaluated as true (false, true)', () => { 32 | expect( 33 | testExpression( 34 | ['anyof', ['match', 'foo', 'basename'], ['match', 'bar', 'basename']], 35 | 'foo/bar', 36 | ), 37 | ).toBe(true); 38 | }); 39 | 40 | it('[anyof] evaluates as true if any of the grouped expressions also evaluated as true (false)', () => { 41 | expect( 42 | testExpression(['anyof', ['match', 'foo', 'basename']], 'foo/bar'), 43 | ).toBe(false); 44 | }); 45 | 46 | it('[dirname] dot directory in subject does not break the pattern', () => { 47 | expect( 48 | testExpression(['dirname', 'node_modules'], 'node_modules/.dist/foo.js'), 49 | ).toBe(true); 50 | }); 51 | 52 | it('[dirname] evaluates as true if a given file has a matching parent directory (foo)', () => { 53 | expect(testExpression(['dirname', 'foo'], 'foo/bar')).toBe(true); 54 | expect(testExpression(['dirname', 'bar'], 'foo/bar/baz')).toBe(true); 55 | expect(testExpression(['dirname', 'bar/baz'], 'foo/bar/baz/qux')).toBe(true); 56 | expect(testExpression(['dirname', 'foo/bar'], 'foo/bar/baz/qux')).toBe(true); 57 | }); 58 | 59 | it('[dirname] evaluates as false if a given file does not have a matching parent directory (bar)', () => { 60 | expect(testExpression(['dirname', 'bar'], 'foo/bar')).toBe(false); 61 | expect(testExpression(['dirname', '/bar'], 'foo/bar/baz')).toBe(false); 62 | expect(testExpression(['dirname', 'foo'], '.foo/bar')).toBe(false); 63 | }); 64 | 65 | it('[idirname] evaluates as true if a given file has a matching parent directory (foo)', () => { 66 | expect(testExpression(['idirname', 'FOO'], 'foo/bar')).toBe(true); 67 | }); 68 | 69 | it('[idirname] evaluates as false if a given file does not have a matching parent directory (bar)', () => { 70 | expect(testExpression(['idirname', 'BAR'], 'foo/bar')).toBe(false); 71 | }); 72 | 73 | it('[match] matches basename (bar)', () => { 74 | expect(testExpression(['match', 'bar', 'basename'], 'foo/bar')).toBe(true); 75 | }); 76 | 77 | it('[match] matches basename (b*r)', () => { 78 | expect(testExpression(['match', 'b*r', 'basename'], 'foo/bar')).toBe(true); 79 | }); 80 | 81 | it('[match] does not match basename (bar)', () => { 82 | expect(testExpression(['match', 'foo', 'basename'], 'foo/bar')).toBe(false); 83 | }); 84 | 85 | it('[match] matches basename (BAR) (case insensitive)', () => { 86 | expect(testExpression(['imatch', 'bar', 'basename'], 'foo/bar')).toBe(true); 87 | }); 88 | 89 | it('[match] matches basename (B*R) (case insensitive)', () => { 90 | expect(testExpression(['imatch', 'b*r', 'basename'], 'foo/bar')).toBe(true); 91 | }); 92 | 93 | it('[match] does not match basename (BAR) (case insensitive)', () => { 94 | expect(testExpression(['imatch', 'foo', 'basename'], 'foo/bar')).toBe(false); 95 | }); 96 | 97 | it('[not] evaluates as true if the sub-expression evaluated as false, i.e. inverts the sub-expression (true -> false)', () => { 98 | expect(testExpression(['not', ['match', 'bar', 'basename']], 'foo/bar')).toBe( 99 | false, 100 | ); 101 | }); 102 | 103 | it('[not] evaluates as true if the sub-expression evaluated as false, i.e. inverts the sub-expression (false -> true)', () => { 104 | expect(testExpression(['not', ['match', 'foo', 'basename']], 'foo/bar')).toBe( 105 | true, 106 | ); 107 | }); 108 | -------------------------------------------------------------------------------- /src/testExpression.ts: -------------------------------------------------------------------------------- 1 | // cspell:words nocase 2 | 3 | import { type Expression } from './types'; 4 | import micromatch from 'micromatch'; 5 | import path from 'node:path'; 6 | 7 | export const testExpression = (expression: Expression, fileName: string) => { 8 | if (path.isAbsolute(fileName)) { 9 | throw new Error('File name must be relative'); 10 | } 11 | 12 | const name = expression[0]; 13 | 14 | if (name === 'allof') { 15 | const nextExpressions = expression.slice(1) as Expression[]; 16 | 17 | return nextExpressions.every((nextExpression) => { 18 | return testExpression(nextExpression, fileName); 19 | }); 20 | } 21 | 22 | if (name === 'anyof') { 23 | const nextExpressions = expression.slice(1) as Expression[]; 24 | 25 | return nextExpressions.some((nextExpression) => { 26 | return testExpression(nextExpression, fileName); 27 | }); 28 | } 29 | 30 | if (name === 'dirname' || name === 'idirname') { 31 | return micromatch.isMatch( 32 | path.dirname(fileName), 33 | '**/' + expression[1] + '/**', 34 | { 35 | dot: true, 36 | nocase: name === 'idirname', 37 | }, 38 | ); 39 | } 40 | 41 | if (name === 'match' || name === 'imatch') { 42 | const pattern = expression[1]; 43 | const subject = 44 | expression[2] === 'wholename' ? fileName : path.basename(fileName); 45 | 46 | return micromatch.isMatch(subject, pattern, { 47 | dot: true, 48 | nocase: name === 'imatch', 49 | }); 50 | } 51 | 52 | if (name === 'not') { 53 | const subExpression = expression[1]; 54 | 55 | return !testExpression(subExpression, fileName); 56 | } 57 | 58 | throw new Error('Unknown expression'); 59 | }; 60 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | // cspell:words idirname imatch iname wholename 2 | 3 | import { type FileWatchingBackend } from './backends/FileWatchingBackend'; 4 | import { type Logger } from 'roarr'; 5 | import { type ProcessOutput } from 'zx'; 6 | 7 | /* eslint-disable @typescript-eslint/sort-type-union-intersection-members */ 8 | export type Expression = 9 | // Evaluates as true if all of the grouped expressions also evaluated as true. 10 | // https://facebook.github.io/watchman/docs/expr/allof.html 11 | | ['allof', ...Expression[]] 12 | // Evaluates as true if any of the grouped expressions also evaluated as true. 13 | // https://facebook.github.io/watchman/docs/expr/anyof.html 14 | | ['anyof', ...Expression[]] 15 | // Evaluates as true if a given file has a matching parent directory. 16 | // https://facebook.github.io/watchman/docs/expr/dirname.html 17 | | ['dirname' | 'idirname', string] 18 | // Evaluates as true if a glob matches against the basename of the file. 19 | // https://facebook.github.io/watchman/docs/expr/match.html 20 | | ['match' | 'imatch', string] 21 | | ['match' | 'imatch', string, 'basename' | 'wholename'] 22 | // Evaluates as true if the sub-expression evaluated as false, i.e. inverts the sub-expression. 23 | // https://facebook.github.io/watchman/docs/expr/not.html 24 | | ['not', Expression]; 25 | /* eslint-enable @typescript-eslint/sort-type-union-intersection-members */ 26 | 27 | type JsonValue = 28 | | JsonObject 29 | | JsonValue[] 30 | | boolean 31 | | number 32 | | string 33 | | readonly JsonValue[] 34 | | null 35 | | undefined; 36 | 37 | export type JsonObject = { 38 | [k: string]: JsonValue; 39 | }; 40 | 41 | type File = { 42 | name: string; 43 | }; 44 | 45 | /** 46 | * @property attempt Attempt number (starting with 0) indicating if trigger was retried. 47 | * @property files Describes the list of files that changed. 48 | * @property first Identifies if this is the first event. 49 | * @property signal Instance of AbortSignal used to signal when the routine should be aborted. 50 | * @property spawn Instance of zx bound to AbortSignal. 51 | */ 52 | export type ChangeEvent = { 53 | abortSignal?: AbortSignal; 54 | attempt: number; 55 | files: readonly File[]; 56 | first: boolean; 57 | log: Logger; 58 | spawn: ( 59 | pieces: TemplateStringsArray, 60 | ...args: any[] 61 | ) => Promise; 62 | taskId: string; 63 | }; 64 | 65 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 66 | type OnChangeEventHandler = (event: ChangeEvent) => Promise; 67 | 68 | export type TeardownEvent = { 69 | spawn: ( 70 | pieces: TemplateStringsArray, 71 | ...args: any[] 72 | ) => Promise; 73 | }; 74 | 75 | type OnTeardownEventHandler = (event: TeardownEvent) => Promise; 76 | 77 | /** 78 | * @property factor The exponential factor to use. Default is 2. 79 | * @property maxTimeout The maximum number of milliseconds between two retries. Default is 30,000. 80 | * @property minTimeout The number of milliseconds before starting the first retry. Default is 1000. 81 | * @property retries The maximum amount of times to retry the operation. Default is 10. Seting this to 1 means do it once, then retry it once. 82 | */ 83 | type Retry = { 84 | factor?: number; 85 | maxTimeout?: number; 86 | minTimeout?: number; 87 | retries: number; 88 | }; 89 | 90 | export type Debounce = { 91 | wait: number; 92 | }; 93 | 94 | export type Throttle = { 95 | delay: number; 96 | }; 97 | 98 | /** 99 | * @property expression watchman expression, e.g. https://facebook.github.io/watchman/docs/expr/allof.html 100 | * @property interruptible Sends abort signal to an ongoing routine, if any. Otherwise, waits for routine to finish. (default: true) 101 | * @property initialRun Indicates whether onChange is run when the script is first initiated. 102 | * @property name Name of the trigger. Used for debugging. Must match /^[a-z0-9-_]+$/ pattern and must be unique. 103 | * @property hexColor Hex color code used for logging. 104 | * @property onChange Routine that is executed when file changes are detected. 105 | * @property persistent Label a task as persistent if it is a long-running process, such as a dev server or --watch mode. 106 | */ 107 | export type TriggerInput = { 108 | expression: Expression; 109 | hexColor?: string; 110 | initialRun?: boolean; 111 | interruptible?: boolean; 112 | name: string; 113 | onChange: OnChangeEventHandler; 114 | onTeardown?: OnTeardownEventHandler; 115 | persistent?: boolean; 116 | retry?: Retry; 117 | throttleOutput?: Throttle; 118 | }; 119 | 120 | export type Trigger = { 121 | abortSignal: AbortSignal; 122 | cwd?: string; 123 | expression: Expression; 124 | hexColor?: string; 125 | id: string; 126 | initialRun: boolean; 127 | interruptible: boolean; 128 | name: string; 129 | onChange: OnChangeEventHandler; 130 | onTeardown?: OnTeardownEventHandler; 131 | persistent: boolean; 132 | retry: Retry; 133 | throttleOutput: Throttle; 134 | }; 135 | 136 | export type WatcherConstructable = new (project: string) => FileWatchingBackend; 137 | 138 | /** 139 | * @property project absolute path to the directory to watch 140 | */ 141 | export type TurbowatchConfigurationInput = { 142 | readonly Watcher?: WatcherConstructable; 143 | readonly abortController?: AbortController; 144 | readonly cwd?: string; 145 | readonly debounce?: Debounce; 146 | readonly project: string; 147 | readonly triggers: readonly TriggerInput[]; 148 | }; 149 | 150 | export type TurbowatchConfiguration = { 151 | readonly Watcher: WatcherConstructable; 152 | readonly abortController: AbortController; 153 | readonly cwd?: string; 154 | readonly debounce: Debounce; 155 | readonly project: string; 156 | readonly triggers: readonly TriggerInput[]; 157 | }; 158 | 159 | export type FileChangeEvent = { 160 | filename: string; 161 | hash?: string | null; 162 | }; 163 | 164 | /** 165 | * @property queued Indicates that a follow action has been queued. 166 | */ 167 | export type ActiveTask = { 168 | abortController: AbortController | null; 169 | id: string; 170 | promise: Promise; 171 | queued: boolean; 172 | }; 173 | 174 | export type Subscription = { 175 | activeTask: ActiveTask | null; 176 | expression: Expression; 177 | initialRun: boolean; 178 | persistent: boolean; 179 | teardown: () => Promise; 180 | trigger: (events: readonly FileChangeEvent[]) => Promise; 181 | }; 182 | 183 | export type TurbowatchController = { 184 | shutdown: () => Promise; 185 | }; 186 | -------------------------------------------------------------------------------- /src/watch.test.ts: -------------------------------------------------------------------------------- 1 | import { watch } from './watch'; 2 | import fs from 'node:fs/promises'; 3 | import path from 'node:path'; 4 | import { setTimeout } from 'node:timers/promises'; 5 | import { type Message } from 'roarr'; 6 | import * as sinon from 'sinon'; 7 | import { afterEach, beforeEach, expect, it } from 'vitest'; 8 | 9 | const spyRoarr = () => { 10 | // eslint-disable-next-line node/no-process-env 11 | const { ROARR_LOG } = process.env; 12 | 13 | if (ROARR_LOG !== 'true') { 14 | throw new Error('ROARR_LOG must be set to "true"'); 15 | } 16 | 17 | const messages: Message[] = []; 18 | 19 | globalThis.ROARR.write = (message) => { 20 | const payload = JSON.parse(message); 21 | 22 | messages.push(payload); 23 | }; 24 | 25 | return { 26 | getMessages: () => { 27 | return messages; 28 | }, 29 | }; 30 | }; 31 | 32 | const fixturesPath = path.resolve(__dirname, '.fixtures'); 33 | 34 | beforeEach(async () => { 35 | await fs.rm(fixturesPath, { 36 | force: true, 37 | recursive: true, 38 | }); 39 | 40 | await fs.mkdir(fixturesPath); 41 | await fs.writeFile(path.join(fixturesPath, 'foo'), ''); 42 | }); 43 | 44 | afterEach(async () => { 45 | await fs.rm(fixturesPath, { 46 | force: true, 47 | recursive: true, 48 | }); 49 | }); 50 | 51 | it('detects file change', async () => { 52 | const onChange = sinon.stub(); 53 | 54 | const { shutdown } = await watch({ 55 | debounce: { 56 | wait: 100, 57 | }, 58 | project: fixturesPath, 59 | triggers: [ 60 | { 61 | expression: ['match', 'foo', 'basename'], 62 | initialRun: false, 63 | name: 'foo', 64 | onChange, 65 | }, 66 | ], 67 | }); 68 | 69 | await fs.writeFile(path.join(fixturesPath, 'foo'), ''); 70 | 71 | await setTimeout(1_000); 72 | 73 | expect(onChange.called).toBe(true); 74 | 75 | await shutdown(); 76 | }); 77 | 78 | it('ignores file change events if the file hash is the same', async () => { 79 | const onChange = sinon.stub(); 80 | 81 | const { shutdown } = await watch({ 82 | debounce: { 83 | wait: 100, 84 | }, 85 | project: fixturesPath, 86 | triggers: [ 87 | { 88 | expression: ['match', 'foo', 'basename'], 89 | initialRun: false, 90 | name: 'foo', 91 | onChange, 92 | }, 93 | ], 94 | }); 95 | 96 | await fs.writeFile(path.join(fixturesPath, 'foo'), ''); 97 | 98 | await setTimeout(1_000); 99 | 100 | await fs.writeFile(path.join(fixturesPath, 'foo'), ''); 101 | 102 | await setTimeout(1_000); 103 | 104 | expect(onChange.callCount).toBe(1); 105 | 106 | await shutdown(); 107 | }); 108 | 109 | // While desirable, at the moment this is not possible to implement. 110 | // Implementing this would require to index all files when the watch starts. 111 | it.skip('ignores file change events if the file hash is the same; file existed before watch started', async () => { 112 | const onChange = sinon.stub(); 113 | 114 | await fs.writeFile(path.join(fixturesPath, 'foo'), ''); 115 | 116 | const { shutdown } = await watch({ 117 | debounce: { 118 | wait: 100, 119 | }, 120 | project: fixturesPath, 121 | triggers: [ 122 | { 123 | expression: ['match', 'foo', 'basename'], 124 | initialRun: false, 125 | name: 'foo', 126 | onChange, 127 | }, 128 | ], 129 | }); 130 | 131 | await fs.writeFile(path.join(fixturesPath, 'foo'), ''); 132 | 133 | await setTimeout(1_000); 134 | 135 | expect(onChange.callCount).toBe(0); 136 | 137 | await shutdown(); 138 | }); 139 | 140 | // https://github.com/gajus/turbowatch/issues/17 141 | // Not clear why this is failing in CI/CD. 142 | it.skip('does not log every file change', async () => { 143 | const onChange = sinon.stub(); 144 | 145 | const roarrSpy = spyRoarr(); 146 | 147 | const { shutdown } = await watch({ 148 | debounce: { 149 | wait: 100, 150 | }, 151 | project: fixturesPath, 152 | triggers: [ 153 | { 154 | expression: ['match', 'foo', 'basename'], 155 | initialRun: false, 156 | name: 'foo', 157 | onChange, 158 | }, 159 | ], 160 | }); 161 | 162 | for (let index = 0; index++ < 100; ) { 163 | await fs.writeFile(path.join(fixturesPath, 'foo'), ''); 164 | } 165 | 166 | await setTimeout(1_000); 167 | 168 | expect(onChange.called).toBe(true); 169 | 170 | expect(roarrSpy.getMessages().length).toBeLessThan(20); 171 | 172 | await shutdown(); 173 | }); 174 | 175 | it('executes the initial run (persistent)', async () => { 176 | const onChange = sinon.stub(); 177 | 178 | const { shutdown } = await watch({ 179 | debounce: { 180 | wait: 100, 181 | }, 182 | project: fixturesPath, 183 | triggers: [ 184 | { 185 | expression: ['match', 'foo', 'basename'], 186 | name: 'foo', 187 | onChange, 188 | persistent: true, 189 | }, 190 | ], 191 | }); 192 | 193 | expect(onChange.called).toBe(true); 194 | 195 | await shutdown(); 196 | }); 197 | 198 | it('executes the initial run (non-persistent)', async () => { 199 | const onChange = sinon.stub(); 200 | 201 | const { shutdown } = await watch({ 202 | debounce: { 203 | wait: 100, 204 | }, 205 | project: fixturesPath, 206 | triggers: [ 207 | { 208 | expression: ['match', 'foo', 'basename'], 209 | name: 'foo', 210 | onChange, 211 | persistent: false, 212 | }, 213 | ], 214 | }); 215 | 216 | expect(onChange.called).toBe(true); 217 | 218 | await shutdown(); 219 | }); 220 | -------------------------------------------------------------------------------- /src/watch.ts: -------------------------------------------------------------------------------- 1 | import { TurboWatcher } from './backends/TurboWatcher'; 2 | import { createFileChangeQueue } from './createFileChangeQueue'; 3 | import { generateShortId } from './generateShortId'; 4 | import { Logger } from './Logger'; 5 | import { subscribe } from './subscribe'; 6 | import { 7 | type JsonObject, 8 | type Subscription, 9 | type TurbowatchConfiguration, 10 | type TurbowatchConfigurationInput, 11 | type TurbowatchController, 12 | } from './types'; 13 | import { serializeError } from 'serialize-error'; 14 | 15 | const log = Logger.child({ 16 | namespace: 'watch', 17 | }); 18 | 19 | export const watch = ( 20 | configurationInput: TurbowatchConfigurationInput, 21 | ): Promise => { 22 | const { 23 | abortController, 24 | cwd, 25 | project, 26 | triggers, 27 | debounce: userDebounce, 28 | Watcher, 29 | }: TurbowatchConfiguration = { 30 | abortController: new AbortController(), 31 | // as far as I can tell, this is a bug in unicorn/no-unused-properties 32 | // https://github.com/sindresorhus/eslint-plugin-unicorn/issues/2051 33 | // eslint-disable-next-line unicorn/no-unused-properties 34 | debounce: { 35 | wait: 1_000, 36 | }, 37 | 38 | // eslint-disable-next-line unicorn/no-unused-properties 39 | Watcher: TurboWatcher, 40 | ...configurationInput, 41 | }; 42 | 43 | const abortSignal = abortController.signal; 44 | 45 | const subscriptions: Subscription[] = []; 46 | 47 | const watcher = new Watcher(project); 48 | 49 | let terminating = false; 50 | 51 | const shutdown = async () => { 52 | if (terminating) { 53 | return; 54 | } 55 | 56 | terminating = true; 57 | 58 | await watcher.close(); 59 | 60 | abortController.abort(); 61 | 62 | for (const subscription of subscriptions) { 63 | const { activeTask } = subscription; 64 | 65 | if (activeTask?.promise) { 66 | await activeTask?.promise; 67 | } 68 | } 69 | 70 | for (const subscription of subscriptions) { 71 | const { teardown } = subscription; 72 | 73 | if (teardown) { 74 | await teardown(); 75 | } 76 | } 77 | }; 78 | 79 | if (abortSignal) { 80 | abortSignal.addEventListener( 81 | 'abort', 82 | () => { 83 | shutdown(); 84 | }, 85 | { 86 | once: true, 87 | }, 88 | ); 89 | } 90 | 91 | for (const trigger of triggers) { 92 | const initialRun = trigger.initialRun ?? true; 93 | const persistent = trigger.persistent ?? false; 94 | 95 | if (persistent && !initialRun) { 96 | throw new Error('Persistent triggers must have initialRun set to true.'); 97 | } 98 | 99 | subscriptions.push( 100 | subscribe({ 101 | abortSignal, 102 | cwd, 103 | expression: trigger.expression, 104 | hexColor: trigger.hexColor, 105 | id: generateShortId(), 106 | initialRun, 107 | interruptible: trigger.interruptible ?? true, 108 | name: trigger.name, 109 | onChange: trigger.onChange, 110 | onTeardown: trigger.onTeardown, 111 | persistent, 112 | retry: { 113 | retries: 3, 114 | ...trigger.retry, 115 | }, 116 | throttleOutput: trigger.throttleOutput ?? { delay: 1_000 }, 117 | }), 118 | ); 119 | } 120 | 121 | let ready = false; 122 | 123 | const fileChangeQueue = createFileChangeQueue({ 124 | abortSignal, 125 | project, 126 | subscriptions, 127 | userDebounce, 128 | }); 129 | 130 | watcher.on('change', (event) => { 131 | if (!ready) { 132 | log.warn('ignoring change event before ready'); 133 | 134 | return; 135 | } 136 | 137 | fileChangeQueue.trigger(event); 138 | }); 139 | 140 | return new Promise((resolve, reject) => { 141 | watcher.on('error', (error) => { 142 | log.error( 143 | { 144 | error: serializeError(error) as unknown as JsonObject, 145 | }, 146 | 'could not watch project', 147 | ); 148 | 149 | if (ready) { 150 | shutdown(); 151 | } else { 152 | reject(error); 153 | } 154 | }); 155 | 156 | watcher.on('ready', () => { 157 | ready = true; 158 | 159 | if (!terminating) { 160 | log.info('triggering initial runs'); 161 | 162 | for (const subscription of subscriptions) { 163 | if (subscription.initialRun) { 164 | void subscription.trigger([]); 165 | } 166 | } 167 | 168 | log.info('ready for file changes'); 169 | } 170 | 171 | resolve({ 172 | shutdown, 173 | }); 174 | }); 175 | }); 176 | }; 177 | -------------------------------------------------------------------------------- /tsconfig.build.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "noEmit": false, 4 | "outDir": "dist" 5 | }, 6 | "extends": "./tsconfig.json", 7 | "include": [ 8 | "src" 9 | ] 10 | } -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "allowSyntheticDefaultImports": true, 4 | "declaration": true, 5 | "declarationMap": true, 6 | "esModuleInterop": true, 7 | "lib": [ 8 | "es2021" 9 | ], 10 | "module": "commonjs", 11 | "moduleResolution": "node", 12 | "noEmit": true, 13 | "noImplicitAny": false, 14 | "noImplicitReturns": true, 15 | "skipLibCheck": true, 16 | "sourceMap": true, 17 | "strict": true, 18 | "target": "es2018", 19 | "useUnknownInCatchVariables": false 20 | }, 21 | "include": [ 22 | "src" 23 | ] 24 | } -------------------------------------------------------------------------------- /vitest.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vitest/config'; 2 | 3 | export default defineConfig({ 4 | test: { 5 | singleThread: true, 6 | testTimeout: 5_000, 7 | }, 8 | }); 9 | --------------------------------------------------------------------------------