├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.yml
│ ├── config.yml
│ └── feature_request.yml
└── workflows
│ └── testing.yml
├── .gitignore
├── .npmignore
├── CHANGELOG.md
├── LICENSE
├── README.md
├── bench
├── running-time
│ └── index.js
└── size
│ └── index.js
├── img
└── logo.svg
├── lib
├── bin.js
├── cmd-runner.js
├── config.js
├── errors.js
├── executor.js
├── git-workflow.js
├── git.js
├── glob-to-regex.js
├── index.js
├── renderer.js
├── reporter.js
├── runner.js
└── utils.js
├── package.json
├── pnpm-lock.yaml
└── test
├── cmd-runner.test.js
├── config.test.js
├── errors.test.js
├── fixtures
├── config
│ ├── cjs-in-js
│ │ └── nano-staged.js
│ ├── cjs
│ │ └── nano-staged.cjs
│ ├── esm-in-js
│ │ └── nano-staged.js
│ ├── json
│ │ └── nano-staged.json
│ ├── mjs
│ │ └── nano-staged.mjs
│ ├── no-ext
│ │ └── .nanostagedrc
│ └── test-project
│ │ ├── dir
│ │ └── index.js
│ │ ├── index.js
│ │ └── package.json
└── simple
│ └── .gitignore
├── git-workflow.test.js
├── git.test.js
├── glob-to-regex.test.js
├── index.test.js
├── renderer.test.js
├── reporter.test.js
├── runner.test.js
├── utils.test.js
└── utils
└── index.js
/.github/ISSUE_TEMPLATE/bug_report.yml:
--------------------------------------------------------------------------------
1 | name: "\U0001F41E Bug report"
2 | description: Report an issue with Nano Staged
3 | labels: [pending triage]
4 | body:
5 | - type: markdown
6 | attributes:
7 | value: |
8 | Thanks for taking the time to fill out this bug report!
9 | - type: textarea
10 | id: bug-description
11 | attributes:
12 | label: Describe the bug
13 | description: A clear and concise description of what the bug is. If you intend to submit a PR for this issue, tell us in the description. Thanks!
14 | placeholder: Bug description
15 | validations:
16 | required: true
17 | - type: textarea
18 | id: reproduction
19 | attributes:
20 | label: Reproduction
21 | placeholder: Steps to reproduce
22 | validations:
23 | required: true
24 | - type: textarea
25 | id: system-info
26 | attributes:
27 | label: System Info
28 | description: Output of `npx envinfo --system --npmPackages '{nano-staged}' --binaries --browsers`
29 | render: shell
30 | placeholder: System, Binaries, Browsers
31 | validations:
32 | required: true
33 | - type: dropdown
34 | id: package-manager
35 | attributes:
36 | label: Used Package Manager
37 | description: Select the used package manager
38 | options:
39 | - npm
40 | - yarn
41 | - pnpm
42 | validations:
43 | required: true
44 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: false
2 | contact_links:
3 | - name: Discord Chat
4 | url: https://discord.gg/YeW2RA7UD4
5 | about: Ask questions and discuss with other Nano Staged users in real time.
6 | - name: Questions & Discussions
7 | url: https://github.com/usmanyunusov/nano-staged/discussions
8 | about: Use GitHub discussions for message-board style questions and discussions.
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.yml:
--------------------------------------------------------------------------------
1 | name: "\U0001F680 New feature proposal"
2 | description: Propose a new feature to be added to Nano Staged
3 | labels: ['enhancement: pending triage']
4 | body:
5 | - type: markdown
6 | attributes:
7 | value: |
8 | Thanks for your interest in the project and taking the time to fill out this feature report!
9 | - type: textarea
10 | id: feature-description
11 | attributes:
12 | label: Clear and concise description of the problem
13 | description: 'As a developer using Nano Staged I want [goal / wish] so that [benefit]. If you intend to submit a PR for this issue, tell us in the description. Thanks!'
14 | validations:
15 | required: true
16 | - type: textarea
17 | id: suggested-solution
18 | attributes:
19 | label: Suggested solution
20 | description: 'We could provide following implementation...'
21 | validations:
22 | required: true
23 | - type: textarea
24 | id: alternative
25 | attributes:
26 | label: Alternative
27 | description: Clear and concise description of any alternative solutions or features you've considered.
28 | - type: textarea
29 | id: additional-context
30 | attributes:
31 | label: Additional context
32 | description: Any other context or screenshots about the feature request here.
33 |
--------------------------------------------------------------------------------
/.github/workflows/testing.yml:
--------------------------------------------------------------------------------
1 | name: Testing
2 | on:
3 | push:
4 | branches: [master]
5 | pull_request:
6 | branches: [master]
7 | jobs:
8 | full:
9 | name: Node.js 17 Full
10 | runs-on: ubuntu-latest
11 | steps:
12 | - name: Checkout the repository
13 | uses: actions/checkout@v2
14 | - name: Install pnpm
15 | uses: pnpm/action-setup@v2
16 | with:
17 | version: 6.32.9
18 | - name: Install Node.js
19 | uses: actions/setup-node@v2
20 | with:
21 | node-version: 17
22 | cache: pnpm
23 | - name: Install dependencies
24 | run: pnpm install --frozen-lockfile
25 | - name: Run tests
26 | run: pnpm test
27 | env:
28 | FORCE_COLOR: 2
29 | short:
30 | runs-on: ${{ matrix.os }}
31 | strategy:
32 | matrix:
33 | node-version:
34 | - 12
35 | - 14
36 | - 16
37 | - 17
38 | os:
39 | - ubuntu-latest
40 | - macos-latest
41 | - windows-latest
42 | name: Node.js v${{ matrix.node-version }} on ${{ matrix.os }}
43 | steps:
44 | - if: matrix.os == 'windows-latest'
45 | run: git config --global core.autocrlf true
46 | - name: Checkout the repository
47 | uses: actions/checkout@v2
48 | - name: Install pnpm
49 | uses: pnpm/action-setup@v2
50 | with:
51 | version: 6.32.9
52 | - name: Install Node.js ${{ matrix.node-version }}
53 | uses: actions/setup-node@v2
54 | with:
55 | node-version: ${{ matrix.node-version }}
56 | cache: pnpm
57 | - name: Install dependencies
58 | run: pnpm install --frozen-lockfile
59 | - name: Run unit tests
60 | run: pnpm unit
61 | env:
62 | FORCE_COLOR: 2
63 | benchmark:
64 | name: Benchmark
65 | runs-on: ubuntu-latest
66 | steps:
67 | - name: Checkout the repository
68 | uses: actions/checkout@v2
69 | - name: Install pnpm
70 | uses: pnpm/action-setup@v2
71 | with:
72 | version: 6.32.9
73 | - name: Install Node.js
74 | uses: actions/setup-node@v2
75 | with:
76 | node-version: 16
77 | cache: pnpm
78 | - name: Install dependencies
79 | run: pnpm install --frozen-lockfile
80 | - name: Running time benchmark
81 | run: pnpm bench
82 | env:
83 | FORCE_COLOR: 2
84 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules/
2 |
3 | coverage/
4 | nano-staged-*
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | pnpm-lock.yaml
2 |
3 | coverage/
4 | bench/
5 | test/
6 | img/
7 |
8 | **/*.test.*
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Change Log
2 |
3 | This project adheres to [Semantic Versioning](http://semver.org/).
4 |
5 | # 0.8
6 |
7 | - Support .nanostagedrc config files (by Azat S.)
8 |
9 | # 0.7
10 |
11 | - Suppor git config dir location if .git is a file
12 |
13 | ## 0.6
14 |
15 | - Add JS API
16 | - Add multispinner output
17 | - Support CI terminal
18 | - Support run script for `diff` files
19 |
20 | ## 0.5
21 |
22 | - Support run script for unstaged files
23 | - Added allowing empty git commit
24 | - Update docs
25 | - Replace yarn to pnpm
26 | - Add logo
27 |
28 | ## 0.4.5
29 |
30 | - Fix publish npm
31 |
32 | ## 0.4.4
33 |
34 | - Added support no-color flags
35 |
36 | ## 0.4.3
37 |
38 | - Added run task output status
39 |
40 | ## 0.4.2
41 |
42 | - Better output on script error
43 | - Updated dependencies
44 | - Add support are no-colors output
45 |
46 | ## 0.4.1
47 |
48 | - Postpublish npm
49 |
50 | ## 0.4
51 |
52 | - Support windows platform
53 |
54 | ## 0.3.1
55 |
56 | - Fix glob parse
57 | - Added globstar to glob parse
58 | - Updated docs
59 |
60 | ## 0.3
61 |
62 | - Added config load from file
63 |
64 | ## 0.2.1
65 |
66 | - Fixed test
67 |
68 | ## 0.2
69 |
70 | - Added external configs (`nano-staged.json`, `.nano-staged.json`)
71 | - Update tests and doc
72 |
73 | ## 0.1.5
74 |
75 | - Update docs.
76 |
77 | ## 0.1.4
78 |
79 | - Fixed to run `./node_modules/.bin/nano-staged`.
80 |
81 | ## 0.1.3
82 |
83 | - Removed `globstart`.
84 |
85 | ## 0.1.2
86 |
87 | - Update docs.
88 |
89 | ## 0.1.1
90 |
91 | - Added size benchmarks.
92 |
93 | ## 0.1
94 |
95 | - Initial release.
96 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright 2021 Usman Yunusov
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of
6 | this software and associated documentation files (the "Software"), to deal in
7 | the Software without restriction, including without limitation the rights to
8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
9 | the Software, and to permit persons to whom the Software is furnished to do so,
10 | subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
Nano Staged
4 | Tiny tool to run commands for modified, staged, and committed files in a GIT repository. It helps speed up running of the tests, linters, scripts , and more.
5 |
6 |
7 | ## Features
8 |
9 | - 📦 **Small**: [47kB](https://packagephobia.com/result?p=nano-staged) (142x+ lighter than **lint-staged**).
10 | - 🥇 **Single dependency** ([`picocolors`](https://github.com/alexeyraspopov/picocolors)).
11 | - ☯️ **Support multiple file states like staged, unstaged, last-commit, changed etc**
12 |
13 | ## Benchmarks
14 |
15 | Benchmarks running time for 10 file:
16 |
17 | ```diff
18 | $ node bench/running-time/index.js
19 | - lint-staged 1.394 ms
20 | + nano-staged 0.968 ms
21 | ```
22 |
23 | The space in node_modules including sub-dependencies:
24 |
25 | ```diff
26 | $ node bench/size/index.js
27 | Data from packagephobia.com
28 | - lint-staged 6688 kB
29 | + nano-staged 47 kB
30 | ```
31 |
32 | The performance results were generated on a MBP Late 2013, 2.3 GHz Intel Core i7 by running `npm run bench` in the library folder. See [bench/running-time/index.js](https://github.com/usmanyunusov/nano-staged/blob/master/bench/running-time/index.js)
33 |
34 | ## Usage
35 |
36 | ### Getting Started
37 |
38 | 1. Install `nano-staged`:
39 |
40 | ```terminal
41 | npm install --save-dev nano-staged
42 | ```
43 | or
44 | ```terminal
45 | yarn add nano-staged -D
46 | ```
47 |
48 | 2. Add the `nano-staged` section and the commands to your `package.json`:
49 |
50 | For example:
51 |
52 | ```json
53 | "nano-staged": {
54 | "*.{js,ts}": "prettier --write",
55 | "*.css": ["stylelint", "eslint --fix"]
56 | },
57 | ```
58 |
59 | 3. Run commands with Nano Staged:
60 |
61 | ```terminal
62 | ./node_modules/.bin/nano-staged
63 | ```
64 |
65 | > Nano Staged by default to run commands from the config for staged files.
66 |
67 | ### Pre-commit Hook
68 |
69 | > You can use Nano Staged with a pre-commit tools to run it automatically before every commit.
70 |
71 |
72 | Simple Git Hooks
73 |
74 | 1. Install `simple-git-hooks` as a dev dependency:
75 |
76 | ```terminal
77 | npm install simple-git-hooks --save-dev
78 | ```
79 |
80 | 2. Add the `simple-git-hooks` section to your `package.json` and fill in the `pre-commit`:
81 |
82 | For example:
83 |
84 | ```json
85 | "simple-git-hooks": {
86 | "pre-commit": "./node_modules/.bin/nano-staged"
87 | }
88 | ```
89 |
90 | 3. Run the CLI script to update the git hooks with the commands from the config:
91 |
92 | ```terminal
93 | npx simple-git-hooks
94 | ```
95 |
96 | 4. To automatically have Git hooks enabled after install, edit `package.json`:
97 |
98 | ```json
99 | "scripts": {
100 | "postinstall": "npx simple-git-hooks"
101 | }
102 | ```
103 |
104 |
105 |
106 |
107 | Husky
108 |
109 | 1. Install `husky` as a dev dependency:
110 |
111 | ```terminal
112 | npm install husky --save-dev
113 | ```
114 |
115 | 2. Enable Git hooks:
116 |
117 | ```terminal
118 | npx husky install
119 | ```
120 |
121 | 3. Add a command to a hook:
122 |
123 | ```terminal
124 | npx husky add .husky/pre-commit "./node_modules/.bin/nano-staged"
125 | ```
126 |
127 | 4. To automatically have Git hooks enabled after install, edit `package.json`:
128 |
129 | ```json
130 | "scripts": {
131 | "postinstall": "npx husky install"
132 | }
133 | ```
134 |
135 |
136 |
137 | ## Configuration
138 |
139 | Nano Staged supports multiple ways to define config.
140 |
141 | 1. `nano-staged` section in `package.json`:
142 |
143 | ```json
144 | "nano-staged": {
145 | "*": "your-cmd",
146 | "*.ext": ["your-cmd", "your-cmd"]
147 | }
148 | ```
149 |
150 | 2. or a separate `.nano-staged.json`, `nano-staged.json` or `.nanostagedrc` config file:
151 |
152 | ```json
153 | {
154 | "*": "your-cmd",
155 | "*.ext": ["your-cmd", "your-cmd"]
156 | }
157 | ```
158 |
159 | 3. or a more flexible `.nano-staged.cjs` or `nano-staged.cjs` config file to CommonJS modules:
160 |
161 | ```js
162 | module.exports = {
163 | '*': 'your-cmd',
164 | '*.ext': ['your-cmd', 'your-cmd'],
165 | }
166 | ```
167 |
168 | 4. or a more flexible `.nano-staged.mjs` or `nano-staged.mjs` config file to ECMAScript modules:
169 |
170 | ```js
171 | export default {
172 | '*': 'your-cmd',
173 | '*.ext': ['your-cmd', 'your-cmd'],
174 | }
175 | ```
176 |
177 | 5. or a more flexible `.nano-staged.js` or `nano-staged.js` config file:
178 |
179 | ```js
180 | // package.json => "type": "module"
181 | export default {
182 | '*': 'your-cmd',
183 | '*.ext': ['your-cmd', 'your-cmd'],
184 | }
185 |
186 | // package.json => "type": "commonjs"
187 | module.exports = {
188 | '*': 'your-cmd',
189 | '*.ext': ['your-cmd', 'your-cmd'],
190 | }
191 | ```
192 |
193 | ### Format priorities:
194 |
195 | If there are multiple configuration files in the same directory, Nano Staged will only use one. The priority order is as follows:
196 |
197 | 1. `.nano-staged.js`
198 | 2. `nano-staged.js`
199 | 3. `.nano-staged.cjs`
200 | 4. `nano-staged.cjs`
201 | 5. `.nano-staged.mjs`
202 | 6. `nano-staged.mjs`
203 | 7. `.nano-staged.json`
204 | 8. `nano-staged.json`
205 | 9. `.nanostagedrc`
206 | 10. `package.json`
207 |
208 | ### Config Function API:
209 |
210 | JS config files may export export either a single function or an object:
211 |
212 | ```js
213 | export default (api) => {
214 | const jsFiles = api.filenames.filter((file) => path.extname(file) === '.js')
215 |
216 | return [`eslint --fix ${jsFiles.join(' ')}`, `prettier --write ${jsFiles.join(' ')}`]
217 | }
218 | ```
219 |
220 | ```js
221 | export default {
222 | '*.js': (api) => `eslint --fix ${api.filenames.join(' ')}`,
223 | }
224 | ```
225 |
226 | The `api` object exposes:
227 |
228 | `api.filenames` - working filenames
229 |
230 | `api.type` - run type: `staged`, `unstaged`, `diff`
231 |
232 | ## Command Line Interface
233 |
234 | #### `--config []` or `-c []`
235 |
236 | Path to file that contains your configuration object. The path should be either absolute or relative to the directory that your process is running from.
237 |
238 | #### `--unstaged` or `-u`
239 |
240 | Run commands from the config only for git unstaged files. Nano Staged by default uses only staged git files.
241 |
242 | #### `--diff [ ]`
243 |
244 | Run commands on files changed between the working tree and the index or a tree, on files changed between the index and a tree, files changed between two trees, or on files changed between two indexes (commit hashes).
245 |
246 | #### `--allow-empty`
247 |
248 | Will allow creating an empty commit.
249 |
250 | ## Thanks
251 |
252 | Special thanks to [lint-staged](https://github.com/okonet/lint-staged). Some codes was borrowed from it.
253 |
254 | ## Community
255 |
256 | The Nano Staged community can be found on [GitHub Discussions](https://github.com/usmanyunusov/nano-staged/discussions), where you can ask questions, voice ideas, and share your projects.
257 |
--------------------------------------------------------------------------------
/bench/running-time/index.js:
--------------------------------------------------------------------------------
1 | import { execFile } from 'child_process'
2 | import { resolve, dirname } from 'path'
3 | import { fileURLToPath } from 'url'
4 | import { promisify } from 'util'
5 | import { nanoid } from 'nanoid'
6 | import fs from 'fs-extra'
7 |
8 | import { createGit } from '../../lib/git.js'
9 |
10 | let spawn = promisify(execFile)
11 | let currentDir = dirname(fileURLToPath(import.meta.url))
12 | let cwd = resolve(currentDir, `nano-staged-${nanoid()}`)
13 | let runners = ['lint-staged', 'nano-staged']
14 | let before
15 |
16 | async function makeDir(dir = cwd) {
17 | await fs.mkdir(dir)
18 | }
19 |
20 | async function appendFile(filename, content, dir = cwd) {
21 | await fs.appendFile(resolve(dir, filename), content)
22 | }
23 |
24 | async function execGit(args) {
25 | let git = createGit(cwd)
26 | await git.exec(args, { cwd })
27 | }
28 |
29 | async function initGitRepo() {
30 | await execGit(['init'])
31 | await execGit(['config', 'user.name', '"test"'])
32 | await execGit(['config', 'user.email', '"test@test.com"'])
33 | await appendFile('README.md', '# Test\n')
34 | await appendFile('.gitignore', `node_modules/\n`)
35 | await execGit(['add', 'README.md'])
36 | await execGit(['commit', '-m initial commit'])
37 | }
38 |
39 | async function initProject() {
40 | await appendFile(
41 | 'package.json',
42 | `{
43 | "lint-staged": {
44 | "*.js": "prettier --write",
45 | "*.css": "prettier --write"
46 | },
47 | "nano-staged": {
48 | "*.js": "prettier --write",
49 | "*.css": "prettier --write"
50 | }
51 | }`
52 | )
53 |
54 | await spawn('yarn', ['add', 'lint-staged'], { cwd })
55 | await spawn('yarn', ['add', resolve(cwd, '../../../../nano-staged')], {
56 | cwd,
57 | })
58 | await appendFile('a.js', 'var test = {};')
59 | await appendFile('b.js', 'var test = {};')
60 | await appendFile('c.js', 'var test = {};')
61 | await appendFile('d.js', 'var test = {};')
62 | await appendFile('e.js', 'var test = {};')
63 | await appendFile('a.css', 'body {color: red;}')
64 | await appendFile('b.css', 'body {color: red;}')
65 | await appendFile('c.css', 'body {color: red;}')
66 | await appendFile('d.css', 'body {color: red;}')
67 | await appendFile('e.css', 'body {color: red;}')
68 |
69 | await execGit(['add', '--all'])
70 | }
71 |
72 | function showTime(name) {
73 | let prefix = name === 'nano-staged' ? '+ ' : '- '
74 | let after = performance.now()
75 | let time = (Math.round(after - before) / 1000)
76 | .toString()
77 | .replace(/\.\d$/, '$&00')
78 | .replace(/\.\d\d$/, '$&0')
79 | process.stdout.write(prefix + name + '\x1B[1m' + time.padStart(6) + '\x1B[22m ms\n')
80 | }
81 |
82 | async function run() {
83 | for (let runner of runners) {
84 | before = performance.now()
85 | await spawn(`./node_modules/.bin/${runner}`, { cwd })
86 | showTime(runner)
87 | }
88 | }
89 |
90 | await makeDir()
91 | await initGitRepo()
92 | await initProject()
93 | await run()
94 |
95 | await fs.remove(cwd)
96 |
--------------------------------------------------------------------------------
/bench/size/index.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import { get } from 'https'
4 | import c from 'picocolors'
5 |
6 | async function getJSON(url) {
7 | const options = {
8 | headers: {
9 | 'User-Agent': 'nano-staged',
10 | },
11 | }
12 |
13 | return new Promise((resolve) => {
14 | get(url, options, (res) => {
15 | let text = ''
16 | res.on('data', (chunk) => {
17 | text += chunk
18 | })
19 | res.on('end', () => {
20 | resolve(JSON.parse(text))
21 | })
22 | })
23 | })
24 | }
25 |
26 | async function benchmark(lib) {
27 | let prefix = lib === 'nano-staged' ? '+ ' : '- '
28 | let data = await getJSON(`https://packagephobia.com/v2/api.json?p=${lib}`)
29 | let size = data.install.bytes
30 | process.stdout.write(
31 | prefix +
32 | lib.padEnd('lint-staged '.length) +
33 | c.bold(
34 | Math.round(size / 1024)
35 | .toString()
36 | .padStart(4)
37 | ) +
38 | ' kB\n'
39 | )
40 | }
41 |
42 | async function start() {
43 | process.stdout.write(c.gray('Data from packagephobia.com\n'))
44 | await benchmark('lint-staged')
45 | await benchmark('nano-staged')
46 | }
47 |
48 | start()
49 |
--------------------------------------------------------------------------------
/img/logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/lib/bin.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import nanoStaged from './index.js'
4 | import * as utils from './utils.js'
5 |
6 | const FORCE_COLOR_LEVEL = utils.getForceColorLevel()
7 |
8 | if (FORCE_COLOR_LEVEL) {
9 | process.env.FORCE_COLOR = FORCE_COLOR_LEVEL.toString()
10 | }
11 |
12 | process.on('SIGINT', () => {})
13 |
14 | function run() {
15 | let options = {}
16 |
17 | for (let i = 2; i < process.argv.length; i++) {
18 | let arg = process.argv[i]
19 |
20 | if (arg === '-c' || arg === '--config') {
21 | options.config = process.argv[++i]
22 | } else if (arg === '-u' || arg === '--unstaged') {
23 | options.unstaged = true
24 | } else if (arg === '--allow-empty') {
25 | options.allowEmpty = true
26 | } else if (arg === '--diff') {
27 | options.diff = []
28 | } else if (options.diff && options.diff.length !== 2) {
29 | options.diff.push(process.argv[i])
30 | }
31 | }
32 |
33 | return nanoStaged(options)
34 | }
35 |
36 | run().catch(() => {
37 | process.exitCode = 1
38 | })
39 |
--------------------------------------------------------------------------------
/lib/cmd-runner.js:
--------------------------------------------------------------------------------
1 | import { normalize, relative, resolve, isAbsolute } from 'path'
2 | import c from 'picocolors'
3 |
4 | import { globToRegex } from './glob-to-regex.js'
5 | import { stringArgvToArray } from './utils.js'
6 | import { TaskRunnerError } from './errors.js'
7 | import { executor } from './executor.js'
8 | import { toArray } from './utils.js'
9 |
10 | export function createCmdRunner({
11 | cwd = process.cwd(),
12 | type = 'staged',
13 | rootPath = '',
14 | config = {},
15 | files = [],
16 | } = {}) {
17 | const runner = {
18 | async generateCmdTasks() {
19 | const cmdTasks = []
20 |
21 | for (const [pattern, cmds] of Object.entries(config)) {
22 | const matches = globToRegex(pattern, { extended: true, globstar: pattern.includes('/') })
23 | const isFn = typeof cmds === 'function'
24 | const task_files = []
25 | const tasks = []
26 |
27 | for (let file of files) {
28 | file = normalize(relative(cwd, normalize(resolve(rootPath, file)))).replace(/\\/g, '/')
29 |
30 | if (!pattern.startsWith('../') && (file.startsWith('..') || isAbsolute(file))) {
31 | continue
32 | }
33 |
34 | if (matches.regex.test(file)) {
35 | task_files.push(resolve(cwd, file))
36 | }
37 | }
38 |
39 | const file_count = task_files.length
40 | const commands = toArray(isFn ? await cmds({ filenames: task_files, type }) : cmds)
41 | const suffix = file_count ? file_count + (file_count > 1 ? ' files' : ' file') : 'no files'
42 |
43 | for (const command of commands) {
44 | const [cmd, ...args] = stringArgvToArray(command)
45 |
46 | if (file_count) {
47 | tasks.push({
48 | title: command,
49 | run: async () =>
50 | executor(cmd, isFn ? args : args.concat(task_files), {
51 | cwd: rootPath,
52 | }),
53 | pattern,
54 | })
55 | }
56 | }
57 |
58 | cmdTasks.push({
59 | title: pattern + c.dim(` - ${suffix}`),
60 | file_count,
61 | tasks,
62 | })
63 | }
64 |
65 | return cmdTasks
66 | },
67 |
68 | async run(parentTask) {
69 | const errors = []
70 |
71 | try {
72 | await Promise.all(
73 | parentTask.tasks.map(async (task) => {
74 | task.parent = parentTask
75 |
76 | try {
77 | if (task.file_count) {
78 | task.state = 'run'
79 | await runner.runTask(task)
80 | task.state = 'done'
81 | } else {
82 | task.state = 'warn'
83 | }
84 | } catch (err) {
85 | task.state = 'fail'
86 | errors.push(...err)
87 | }
88 | })
89 | )
90 |
91 | if (errors.length) {
92 | throw new TaskRunnerError(errors.join('\n\n'))
93 | }
94 | } catch (err) {
95 | throw err
96 | }
97 | },
98 |
99 | async runTask(parentTask) {
100 | let skipped = false
101 | let errors = []
102 |
103 | for (const task of parentTask.tasks) {
104 | task.parent = parentTask
105 |
106 | try {
107 | if (skipped) {
108 | task.state = 'warn'
109 | continue
110 | }
111 |
112 | task.state = 'run'
113 | await task.run()
114 | task.state = 'done'
115 | } catch (error) {
116 | skipped = true
117 | task.title = c.red(task.title)
118 | task.state = 'fail'
119 | errors.push(`${c.red(task.pattern)} ${c.dim('>')} ${task.title}:\n` + error.trim())
120 | }
121 | }
122 |
123 | if (errors.length) {
124 | throw errors
125 | }
126 | },
127 | }
128 |
129 | return runner
130 | }
131 |
--------------------------------------------------------------------------------
/lib/config.js:
--------------------------------------------------------------------------------
1 | import { resolve, parse } from 'path'
2 | import { pathToFileURL } from 'url'
3 | import fs from 'fs'
4 |
5 | const places = [
6 | `.nano-staged.js`,
7 | `nano-staged.js`,
8 | `.nano-staged.cjs`,
9 | `nano-staged.cjs`,
10 | `.nano-staged.mjs`,
11 | `nano-staged.mjs`,
12 | `.nano-staged.json`,
13 | `nano-staged.json`,
14 | `.nanostagedrc`,
15 | 'package.json',
16 | ]
17 |
18 | async function readConfig(path) {
19 | if (fs.existsSync(path) && fs.lstatSync(path).isFile()) {
20 | const { ext, name } = parse(path)
21 |
22 | if (ext === '.json' || name === '.nanostagedrc') {
23 | const config = JSON.parse(fs.readFileSync(path, 'utf-8'))
24 | return name === 'package' ? config['nano-staged'] : config
25 | }
26 |
27 | if (ext === '.js' || ext === '.mjs' || ext === '.cjs') {
28 | const { default: config } = await import(pathToFileURL(path))
29 | return typeof config === 'function' ? { '*': config } : config
30 | }
31 | }
32 | }
33 |
34 | export async function getConfig(cwd = process.cwd(), config = undefined) {
35 | try {
36 | if (config) {
37 | return typeof config === 'string' ? await readConfig(resolve(config)) : config
38 | }
39 |
40 | let up = resolve(cwd)
41 |
42 | do {
43 | cwd = up
44 | for (const place of places) {
45 | config = await readConfig(resolve(cwd, place))
46 | if (config) return config
47 | }
48 | up = resolve(cwd, '..')
49 | } while (up !== cwd)
50 | } catch {
51 | return undefined
52 | }
53 | }
54 |
55 | export function validConfig(config) {
56 | return !!(
57 | config &&
58 | Object.keys(config).length &&
59 | Object.keys(config).every(
60 | (key) =>
61 | key &&
62 | typeof key === 'string' &&
63 | config[key] &&
64 | (typeof config[key] === 'string' ||
65 | typeof config[key] === 'function' ||
66 | (Array.isArray(config[key]) &&
67 | config[key].every((cmd) => cmd && typeof cmd === 'string')))
68 | )
69 | )
70 | }
71 |
--------------------------------------------------------------------------------
/lib/errors.js:
--------------------------------------------------------------------------------
1 | const MESSAGES = {
2 | noConfig: () => 'Create Nano Staged config.',
3 | noFileConfig: (path) => `Nano Staged config file *${path}* is not found.`,
4 | invalidConfig: () => 'Nano Staged config invalid.',
5 | noGitRepo: () => 'Nano Staged didn’t find git directory.',
6 | noFiles: (type) => `No ${type} files found.`,
7 | noMatchingFiles: () => 'No files match any configured task.',
8 | }
9 |
10 | export class NanoStagedError extends Error {
11 | constructor(type, ...args) {
12 | super(MESSAGES[type](...args))
13 | this.name = 'NanoStagedError'
14 | this.type = type
15 | }
16 | }
17 |
18 | export class TaskRunnerError extends Error {
19 | constructor(errors) {
20 | super(errors)
21 | this.name = 'TaskRunnerError'
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/lib/executor.js:
--------------------------------------------------------------------------------
1 | /* c8 ignore start */
2 | import { spawn } from 'child_process'
3 | import { promises as fs } from 'fs'
4 | import path from 'path'
5 |
6 | const IS_WINDOWS = process.platform === 'win32'
7 | const ENV_PATH_KEY = getPathKey(process.env)
8 | const RE_EXECUTABLE = /\.(?:com|exe)$/i
9 | const RE_META_CHARS = /([()\][%!^"`<>&|;, *?])/g
10 | const RE_IS_CMD_SHIM = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i
11 |
12 | function escapeCommand(arg) {
13 | return arg.replace(RE_META_CHARS, '^$1')
14 | }
15 |
16 | function escapeArgument(arg, doubleEscapeMetaChars = false) {
17 | arg = `"` + `${arg}`.replace(/(\\*)"/g, '$1$1\\"').replace(/(\\*)$/, '$1$1') + `"`
18 | arg = arg.replace(RE_META_CHARS, '^$1')
19 |
20 | if (doubleEscapeMetaChars) {
21 | arg = arg.replace(RE_META_CHARS, '^$1')
22 | }
23 |
24 | return arg
25 | }
26 |
27 | function getSpawnArgs(cmd, args) {
28 | if (IS_WINDOWS) {
29 | if (isCmdFile(cmd)) {
30 | let line = `/D /S /C "${escapeCommand(cmd)}`
31 | for (const arg of args) {
32 | line += ' '
33 | line += escapeArgument(arg, RE_IS_CMD_SHIM.test(cmd))
34 | }
35 | line += '"'
36 |
37 | return [line]
38 | }
39 | }
40 |
41 | return args
42 | }
43 |
44 | function endsWith(str, end) {
45 | return str.endsWith(end)
46 | }
47 |
48 | function isCmdFile(cmd) {
49 | let upperCMD = cmd.toUpperCase()
50 | return endsWith(upperCMD, '.CMD') || endsWith(upperCMD, '.BAT')
51 | }
52 |
53 | function getSpawnFileName(cmd) {
54 | if (IS_WINDOWS) {
55 | if (isCmdFile(cmd)) {
56 | return process.env['COMSPEC'] || 'cmd.exe'
57 | }
58 | }
59 |
60 | return cmd
61 | }
62 |
63 | async function getPrefix(root) {
64 | let original = (root = path.resolve(root))
65 |
66 | while (path.basename(root) === 'node_modules') {
67 | root = path.dirname(root)
68 | }
69 |
70 | if (original !== root) {
71 | return Promise.resolve(root)
72 | } else {
73 | return Promise.resolve(getPrefixFromTree(root))
74 | }
75 | }
76 |
77 | function getPrefixFromTree(current) {
78 | if (isRooted(current)) {
79 | return false
80 | } else {
81 | return Promise.all([
82 | fs.stat(path.join(current, 'package.json')).catch(() => ''),
83 | fs.stat(path.join(current, 'node_modules')).catch(() => ''),
84 | ]).then(([hasPkg, hasModules]) => {
85 | if (hasPkg || hasModules) {
86 | return current
87 | } else {
88 | return getPrefixFromTree(path.dirname(current))
89 | }
90 | })
91 | }
92 | }
93 |
94 | function getPathKey(env = process.env) {
95 | let pathKey = 'PATH'
96 |
97 | if (IS_WINDOWS) {
98 | pathKey = 'Path'
99 |
100 | for (const key in env) {
101 | if (key.toLowerCase() === 'path') {
102 | pathKey = key
103 | }
104 | }
105 | }
106 |
107 | return pathKey
108 | }
109 |
110 | function isRooted(p) {
111 | p = normalizeSeparators(p)
112 |
113 | if (IS_WINDOWS) {
114 | return p.match(/^[a-z]+:[/\\]?$/i)
115 | }
116 |
117 | return p === '/'
118 | }
119 |
120 | async function tryGetExecutablePath(filePath, extensions) {
121 | let stats = undefined
122 | try {
123 | stats = await fs.stat(filePath)
124 | } catch (err) {
125 | if (err.code !== 'ENOENT') {
126 | console.log(
127 | `Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`
128 | )
129 | }
130 | }
131 | if (stats && stats.isFile()) {
132 | if (IS_WINDOWS) {
133 | const upperExt = path.extname(filePath).toUpperCase()
134 | if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) {
135 | return filePath
136 | }
137 | } else {
138 | if (isUnixExecutable(stats)) {
139 | return filePath
140 | }
141 | }
142 | }
143 |
144 | const originalFilePath = filePath
145 | for (const extension of extensions) {
146 | filePath = originalFilePath + extension
147 |
148 | stats = undefined
149 | try {
150 | stats = await fs.stat(filePath)
151 | } catch (err) {
152 | if (err.code !== 'ENOENT') {
153 | console.log(
154 | `Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`
155 | )
156 | }
157 | }
158 |
159 | if (stats && stats.isFile()) {
160 | if (IS_WINDOWS) {
161 | try {
162 | const directory = path.dirname(filePath)
163 | const upperName = path.basename(filePath).toUpperCase()
164 | for (const actualName of await fs.readdir(directory)) {
165 | if (upperName === actualName.toUpperCase()) {
166 | filePath = path.join(directory, actualName)
167 | break
168 | }
169 | }
170 | } catch (err) {
171 | console.log(
172 | `Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`
173 | )
174 | }
175 |
176 | return filePath
177 | } else {
178 | if (isUnixExecutable(stats)) {
179 | return filePath
180 | }
181 | }
182 | }
183 | }
184 |
185 | return ''
186 | }
187 |
188 | function normalizeSeparators(p = '') {
189 | return IS_WINDOWS ? p.replace(/\//g, '\\').replace(/\\\\+/g, '\\') : p.replace(/\/\/+/g, '/')
190 | }
191 |
192 | function isUnixExecutable(stats) {
193 | return (
194 | (stats.mode & 1) > 0 ||
195 | ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||
196 | ((stats.mode & 64) > 0 && stats.uid === process.getuid())
197 | )
198 | }
199 |
200 | async function findInPath(tool) {
201 | let extensions = []
202 | let directories = []
203 | let matches = []
204 |
205 | if (IS_WINDOWS && process.env['PATHEXT']) {
206 | for (let extension of process.env['PATHEXT'].split(path.delimiter)) {
207 | if (extension) {
208 | extensions.push(extension)
209 | }
210 | }
211 | }
212 |
213 | if (isRooted(tool)) {
214 | let filePath = await tryGetExecutablePath(tool, extensions)
215 |
216 | if (filePath) {
217 | return [filePath]
218 | }
219 |
220 | return []
221 | }
222 |
223 | if (tool.includes(path.sep)) {
224 | return []
225 | }
226 |
227 | if (process.env[ENV_PATH_KEY]) {
228 | for (let p of process.env[ENV_PATH_KEY].split(path.delimiter)) {
229 | if (p) {
230 | directories.push(p)
231 | }
232 | }
233 | }
234 |
235 | for (let directory of directories) {
236 | let filePath = await tryGetExecutablePath(path.join(directory, tool), extensions)
237 |
238 | if (filePath) {
239 | matches.push(filePath)
240 | }
241 | }
242 |
243 | return matches
244 | }
245 |
246 | async function which(tool, check) {
247 | if (!tool) {
248 | throw `'tool' is required`
249 | }
250 |
251 | if (check) {
252 | let result = await which(tool, false)
253 |
254 | if (!result) {
255 | throw `${tool} does not exist`
256 | }
257 |
258 | return result
259 | }
260 |
261 | let matches = await findInPath(tool)
262 |
263 | if (matches && matches.length > 0) {
264 | return matches[0]
265 | }
266 |
267 | return ''
268 | }
269 |
270 | export async function executor(cmd, args = [], opts = {}) {
271 | let prefix = await getPrefix(process.cwd())
272 |
273 | if (prefix) {
274 | let local = path.join(prefix, 'node_modules', '.bin')
275 | process.env[ENV_PATH_KEY] = `${local}${path.delimiter}${process.env.PATH}`
276 | }
277 |
278 | let commandFile = await which(cmd, true)
279 |
280 | if (IS_WINDOWS && !RE_EXECUTABLE.test(commandFile)) {
281 | cmd = getSpawnFileName(commandFile)
282 | args = getSpawnArgs(commandFile, args)
283 | opts.windowsVerbatimArguments = true
284 | }
285 |
286 | let child = spawn(cmd, args, {
287 | ...opts,
288 | env: {
289 | ...process.env,
290 | ...opts.env,
291 | },
292 | })
293 |
294 | let output = ''
295 |
296 | if (child.stdout) {
297 | child.stdout.on('data', (data) => {
298 | output += data
299 | })
300 | }
301 |
302 | if (child.stderr) {
303 | child.stderr.on('data', (data) => {
304 | output += data
305 | })
306 | }
307 |
308 | return new Promise((resolve, reject) => {
309 | child.on('error', reject)
310 |
311 | child.on('close', (code) => {
312 | if (code === 0) {
313 | resolve(output)
314 | } else {
315 | reject(output)
316 | }
317 | })
318 | })
319 | }
320 |
321 | /* c8 ignore end */
322 |
--------------------------------------------------------------------------------
/lib/git-workflow.js:
--------------------------------------------------------------------------------
1 | import fs from 'fs'
2 | import { resolve } from 'path'
3 |
4 | import { createGit } from './git.js'
5 |
6 | export function createGitWorkflow({ allowEmpty = false, dotPath = '', rootPath = '' } = {}) {
7 | const git = createGit(rootPath)
8 | const patch = {
9 | unstaged: resolve(dotPath, './nano-staged_partial.patch'),
10 | original: resolve(dotPath, './nano-staged.patch'),
11 | }
12 |
13 | const workflow = {
14 | hasPatch(path = '') {
15 | let has = false
16 |
17 | if (path) {
18 | try {
19 | let buffer = fs.readFileSync(path)
20 | has = buffer && buffer.toString()
21 | } catch {
22 | has = false
23 | }
24 | }
25 |
26 | return Boolean(has)
27 | },
28 |
29 | async backupOriginalState() {
30 | try {
31 | await git.diff(patch.original)
32 | } catch (e) {
33 | throw e
34 | }
35 | },
36 |
37 | async backupUnstagedFiles(files = []) {
38 | if (files.length) {
39 | try {
40 | await git.diff(patch.unstaged, files)
41 | await git.checkout(files)
42 | } catch (e) {
43 | throw e
44 | }
45 | }
46 | },
47 |
48 | async applyModifications(files = []) {
49 | if (files.length) {
50 | try {
51 | if (!(await git.exec(['diff', 'HEAD'])) && !allowEmpty) {
52 | throw 'Prevented an empty git commit!'
53 | }
54 |
55 | await git.add(files)
56 | } catch (e) {
57 | throw e
58 | }
59 | }
60 | },
61 |
62 | async restoreUnstagedFiles(files = []) {
63 | if (files.length) {
64 | try {
65 | await git.apply(patch.unstaged)
66 | } catch {
67 | try {
68 | await git.apply(patch.unstaged, true)
69 | } catch {
70 | throw 'Merge conflict!!! Unstaged changes not restored.'
71 | }
72 | }
73 | }
74 | },
75 |
76 | async restoreOriginalState() {
77 | try {
78 | await git.checkout('.')
79 |
80 | if (workflow.hasPatch(patch.original)) {
81 | await git.apply(patch.original)
82 | }
83 | } catch (e) {
84 | throw e
85 | }
86 | },
87 |
88 | async cleanUp() {
89 | try {
90 | if (workflow.hasPatch(patch.original)) {
91 | fs.unlinkSync(patch.original)
92 | }
93 |
94 | if (workflow.hasPatch(patch.unstaged)) {
95 | fs.unlinkSync(patch.unstaged)
96 | }
97 | } catch (e) {
98 | throw e
99 | }
100 | },
101 | }
102 |
103 | return workflow
104 | }
105 |
--------------------------------------------------------------------------------
/lib/git.js:
--------------------------------------------------------------------------------
1 | import { join, normalize, resolve } from 'path'
2 | import fs from 'fs'
3 |
4 | import { executor } from './executor.js'
5 | import { toArray } from './utils.js'
6 |
7 | const ADDED = 'A'.charCodeAt(0)
8 | const COPIED = 'C'.charCodeAt(0)
9 | const DELETED = 'D'.charCodeAt(0)
10 | const MODIFIED = 'M'.charCodeAt(0)
11 | const RENAMED = 'R'.charCodeAt(0)
12 | const SPACE = ' '.charCodeAt(0)
13 |
14 | export const STAGED_CODE = 1 << 0
15 | export const CHANGED_CODE = 1 << 1
16 | export const DELETED_CODE = 1 << 2
17 |
18 | const APPLY_ARGS = ['-v', '--whitespace=nowarn', '--recount', '--unidiff-zero']
19 | const DIFF_ARGS = [
20 | '--binary',
21 | '--unified=0',
22 | '--no-color',
23 | '--no-ext-diff',
24 | '--src-prefix=a/',
25 | '--dst-prefix=b/',
26 | '--patch',
27 | '--submodule=short',
28 | ]
29 |
30 | function group(entries = []) {
31 | const deleted = []
32 | const changed = []
33 | const working = []
34 |
35 | for (let { path, type, rename } of entries) {
36 | path = rename || path
37 |
38 | if (!working.includes(path)) {
39 | if (type === CHANGED_CODE) {
40 | changed.push(path)
41 | }
42 |
43 | if (type === DELETED_CODE) {
44 | deleted.push(path)
45 | }
46 |
47 | working.push(path)
48 | }
49 | }
50 |
51 | return { working, deleted, changed }
52 | }
53 |
54 | export function createGit(cwd = process.cwd()) {
55 | const git = {
56 | cwd,
57 |
58 | async exec(args = [], opts = {}) {
59 | try {
60 | return await executor('git', args, {
61 | ...opts,
62 | cwd: opts.cwd || git.cwd,
63 | })
64 | } catch (e) {
65 | throw e
66 | }
67 | },
68 |
69 | async diff(fileName, files = [], opts = {}) {
70 | const args = ['diff', ...DIFF_ARGS, '--output', fileName]
71 |
72 | if (files.length) {
73 | args.push('--')
74 | args.push(...files)
75 | }
76 |
77 | await git.exec(args, opts)
78 | },
79 |
80 | async diffFileName(ref1, ref2, opts = {}) {
81 | const args = ['diff', '--name-only', '--no-ext-diff', '--diff-filter=ACMR', '-z']
82 |
83 | if (ref1) {
84 | args.push(ref1)
85 | }
86 | if (ref2) {
87 | args.push(ref2)
88 | }
89 |
90 | try {
91 | return await git.exec([...args, '--'], opts)
92 | } catch {
93 | return ''
94 | }
95 | },
96 |
97 | async apply(patch, allowConflicts = false, opts = {}) {
98 | const args = ['apply', ...APPLY_ARGS]
99 |
100 | if (allowConflicts) {
101 | args.push('-3')
102 | }
103 |
104 | if (patch) {
105 | args.push(patch)
106 | }
107 |
108 | await git.exec(args, opts)
109 | },
110 |
111 | async getGitPaths(opts = {}) {
112 | const paths = {
113 | root: null,
114 | dot: null,
115 | }
116 |
117 | delete process.env.GIT_DIR
118 | delete process.env.GIT_WORK_TREE
119 |
120 | try {
121 | const line = await git.exec(['rev-parse', '--show-toplevel'], opts)
122 | const git_path = line ? normalize(line.trimLeft().replace(/[\r\n]+$/, '')) : ''
123 | const git_config_path = normalize(fs.realpathSync(join(git_path, '.git')))
124 |
125 | if (git_path) {
126 | paths.root = git_path
127 | paths.dot = git_config_path
128 | }
129 |
130 | if (fs.lstatSync(git_config_path).isFile()) {
131 | const file = fs.readFileSync(git_config_path, 'utf-8').toString()
132 | const path = resolve(git_path, file.replace(/^gitdir: /, '')).trim()
133 | paths.dot = path
134 | }
135 |
136 | return paths
137 | } catch {
138 | return paths
139 | }
140 | },
141 |
142 | async add(paths, opts = {}) {
143 | paths = toArray(paths)
144 |
145 | if (paths.length) {
146 | const args = ['add', '-A', '--', ...paths]
147 | await git.exec(args, opts)
148 | }
149 | },
150 |
151 | async checkout(paths, opts = {}) {
152 | paths = toArray(paths)
153 |
154 | if (paths.length) {
155 | const args = ['checkout', '-q', '--force', '--', ...paths]
156 | await git.exec(args, opts)
157 | }
158 | },
159 |
160 | async status(opts = {}) {
161 | const env = { GIT_OPTIONAL_LOCKS: '0' }
162 | const args = ['status', '-z', '-u']
163 | const result = []
164 |
165 | try {
166 | const raw = await git.exec(args, { env, ...opts })
167 |
168 | let i = 0
169 | let lastIndex
170 |
171 | while (i < raw.length) {
172 | if (i + 4 >= raw.length) {
173 | return []
174 | }
175 |
176 | const entry = {
177 | x: raw.charCodeAt(i++),
178 | y: raw.charCodeAt(i++),
179 | path: '',
180 | rename: undefined,
181 | }
182 |
183 | i++
184 |
185 | if (entry.x === RENAMED || entry.x === COPIED) {
186 | lastIndex = raw.indexOf('\0', i)
187 |
188 | if (!~lastIndex) {
189 | return []
190 | }
191 |
192 | entry.rename = raw.substring(i, lastIndex)
193 | i = lastIndex + 1
194 | }
195 |
196 | lastIndex = raw.indexOf('\0', i)
197 |
198 | if (!~lastIndex) {
199 | return []
200 | }
201 |
202 | entry.path = raw.substring(i, lastIndex)
203 |
204 | if (entry.path[entry.path.length - 1] !== '/') {
205 | result.push(entry)
206 | }
207 |
208 | i = lastIndex + 1
209 | }
210 |
211 | return result
212 | } catch {
213 | return []
214 | }
215 | },
216 |
217 | async changedFiles(refs = [], opts = {}) {
218 | const [ref1, ref2] = refs
219 | const lines = await git.diffFileName(ref1, ref2, opts)
220 | const files = lines ? lines.replace(/\u0000$/, '').split('\u0000') : []
221 | const result = files.map((path) => ({ type: CHANGED_CODE, path, rename: undefined }))
222 |
223 | return group(result)
224 | },
225 |
226 | async stagedFiles(opts = {}) {
227 | const entries = await git.status(opts)
228 | const result = []
229 |
230 | for (const entry of entries) {
231 | const { x, y } = entry
232 |
233 | if (x === ADDED || x === MODIFIED || x === RENAMED || x === COPIED) {
234 | if (y === ADDED || y === COPIED || y === MODIFIED || y === RENAMED) {
235 | entry.type = CHANGED_CODE
236 | } else if (y === DELETED) {
237 | entry.type = DELETED_CODE
238 | } else {
239 | entry.type = STAGED_CODE
240 | }
241 |
242 | result.push(entry)
243 | }
244 | }
245 |
246 | return group(result)
247 | },
248 |
249 | async unstagedFiles(opts = {}) {
250 | const entries = await git.status(opts)
251 | const result = []
252 |
253 | for (const entry of entries) {
254 | const { y } = entry
255 |
256 | if (y !== SPACE && y !== DELETED) {
257 | entry.type = CHANGED_CODE
258 | result.push(entry)
259 | }
260 | }
261 |
262 | return group(result)
263 | },
264 | }
265 |
266 | return git
267 | }
268 |
--------------------------------------------------------------------------------
/lib/glob-to-regex.js:
--------------------------------------------------------------------------------
1 | const CLOSE_PARENTHESES = ')'.charCodeAt(0)
2 | const OPEN_PARENTHESES = '('.charCodeAt(0)
3 | const CLOSE_SQUARE = ']'.charCodeAt(0)
4 | const OPEN_SQUARE = '['.charCodeAt(0)
5 | const CLOSE_CURLY = '}'.charCodeAt(0)
6 | const OPEN_CURLY = '{'.charCodeAt(0)
7 | const BACKSLASH = '\\'.charCodeAt(0)
8 | const ASTERISK = '*'.charCodeAt(0)
9 | const QUESTION = '?'.charCodeAt(0)
10 | const DOLLAR = '$'.charCodeAt(0)
11 | const EQUALS = '='.charCodeAt(0)
12 | const CARRET = '^'.charCodeAt(0)
13 | const SLASH = '/'.charCodeAt(0)
14 | const COLON = ':'.charCodeAt(0)
15 | const POINT = '.'.charCodeAt(0)
16 | const PIPE = '|'.charCodeAt(0)
17 | const PLUS = '+'.charCodeAt(0)
18 | const BANG = '!'.charCodeAt(0)
19 | const COMA = ','.charCodeAt(0)
20 | const AT = '@'.charCodeAt(0)
21 |
22 | const GLOBSTAR = `((?:[^/]*(?:/|$))*)`
23 | const WILDCARD = `([^/]*)`
24 |
25 | export function globToRegex(glob, opts = {}) {
26 | let { extended = false, globstar = false, flags = '' } = opts
27 |
28 | let inRange = false
29 | let inGroup = false
30 | let stack = []
31 | let regex = ''
32 | let pos = 0
33 |
34 | let code, next
35 |
36 | while (pos < glob.length) {
37 | code = glob.charCodeAt(pos)
38 |
39 | switch (code) {
40 | case BACKSLASH:
41 | case DOLLAR:
42 | case CARRET:
43 | case EQUALS:
44 | case POINT: {
45 | regex += `\\${glob[pos]}`
46 | break
47 | }
48 |
49 | case SLASH: {
50 | regex += `\\${glob[pos]}`
51 | if (glob.charCodeAt(pos + 1) === SLASH) {
52 | regex += '?'
53 | }
54 | break
55 | }
56 |
57 | case OPEN_PARENTHESES: {
58 | if (stack.length) {
59 | regex += glob[pos]
60 | break
61 | }
62 |
63 | regex += `\\${glob[pos]}`
64 | break
65 | }
66 |
67 | case CLOSE_PARENTHESES: {
68 | if (stack.length) {
69 | regex += glob[pos]
70 |
71 | let type = stack.pop()
72 | if (type === '@') {
73 | regex += '{1}'
74 | } else if (type === '!') {
75 | regex += '([^/]*)'
76 | } else {
77 | regex += type
78 | }
79 | break
80 | }
81 |
82 | regex += `\\${glob[pos]}`
83 | break
84 | }
85 |
86 | case PIPE: {
87 | if (stack.length) {
88 | regex += glob[pos]
89 | break
90 | }
91 |
92 | regex += `\\${glob[pos]}`
93 | break
94 | }
95 |
96 | case PLUS: {
97 | if (glob.charCodeAt(pos + 1) === OPEN_PARENTHESES && extended) {
98 | stack.push(glob[pos])
99 | break
100 | }
101 |
102 | regex += `\\${glob[pos]}`
103 | break
104 | }
105 |
106 | case AT: {
107 | if (glob.charCodeAt(pos + 1) === OPEN_PARENTHESES && extended) {
108 | stack.push(glob[pos])
109 | break
110 | }
111 | }
112 |
113 | case BANG: {
114 | if (extended) {
115 | if (inRange) {
116 | regex += `^`
117 | break
118 | }
119 | if (glob.charCodeAt(pos + 1) === OPEN_PARENTHESES) {
120 | stack.push(glob[pos])
121 | regex += `(?!`
122 | pos++
123 | break
124 | }
125 |
126 | regex += `\\${glob[pos]}`
127 | break
128 | }
129 |
130 | regex += `\\${glob[pos]}`
131 | break
132 | }
133 |
134 | case QUESTION: {
135 | if (extended) {
136 | if (glob.charCodeAt(pos + 1) === OPEN_PARENTHESES) {
137 | stack.push(glob[pos])
138 | } else {
139 | regex += `.`
140 | }
141 | break
142 | }
143 |
144 | regex += `\\${glob[pos]}`
145 | break
146 | }
147 |
148 | case OPEN_SQUARE: {
149 | if (inRange && glob.charCodeAt(pos + 1) === COLON) {
150 | next = glob.indexOf(':', pos + 2)
151 |
152 | let value = glob.slice(pos + 2, next)
153 | if (value === 'alnum') {
154 | regex += `(\\w|\\d)`
155 | } else if (value === 'space') {
156 | regex += `\\s`
157 | } else if (value === 'digit') {
158 | regex += `\\d`
159 | }
160 |
161 | pos = next + 1
162 | break
163 | }
164 |
165 | if (extended) {
166 | inRange = true
167 | regex += glob[pos]
168 | break
169 | }
170 |
171 | regex += `\\${glob[pos]}`
172 | break
173 | }
174 |
175 | case CLOSE_SQUARE: {
176 | if (extended) {
177 | inRange = false
178 | regex += glob[pos]
179 | break
180 | }
181 |
182 | regex += `\\${glob[pos]}`
183 | break
184 | }
185 |
186 | case OPEN_CURLY: {
187 | if (extended) {
188 | inGroup = true
189 | regex += `(`
190 | break
191 | }
192 |
193 | regex += `\\${glob[pos]}`
194 | break
195 | }
196 |
197 | case CLOSE_CURLY: {
198 | if (extended) {
199 | inGroup = false
200 | regex += `)`
201 | break
202 | }
203 |
204 | regex += `\\${glob[pos]}`
205 | break
206 | }
207 |
208 | case COMA: {
209 | if (inGroup) {
210 | regex += `|`
211 | break
212 | }
213 |
214 | regex += `\\${glob[pos]}`
215 | break
216 | }
217 |
218 | case ASTERISK: {
219 | if (glob.charCodeAt(pos + 1) === OPEN_PARENTHESES && extended) {
220 | stack.push(glob[pos])
221 | break
222 | }
223 |
224 | let prevChar = glob[pos - 1]
225 | let starCount = 1
226 | while (glob.charCodeAt(pos + 1) === ASTERISK) {
227 | starCount++
228 | pos++
229 | }
230 | let nextChar = glob[pos + 1]
231 |
232 | if (!globstar) {
233 | regex += `.*`
234 | } else {
235 | let isGlobstar =
236 | starCount > 1 &&
237 | (prevChar === '/' || prevChar === undefined) &&
238 | (nextChar === '/' || nextChar === undefined)
239 | if (isGlobstar) {
240 | regex += GLOBSTAR
241 | pos++
242 | } else {
243 | regex += WILDCARD
244 | }
245 | }
246 |
247 | break
248 | }
249 |
250 | default: {
251 | regex += glob[pos]
252 | break
253 | }
254 | }
255 |
256 | pos++
257 | }
258 |
259 | if (!flags.includes('g')) {
260 | regex = `^${regex}$`
261 | }
262 |
263 | return { regex: new RegExp(regex, flags) }
264 | }
265 |
--------------------------------------------------------------------------------
/lib/index.js:
--------------------------------------------------------------------------------
1 | import { getConfig, validConfig } from './config.js'
2 | import { createReporter } from './reporter.js'
3 | import { NanoStagedError } from './errors.js'
4 | import { createRunner } from './runner.js'
5 | import { createGit } from './git.js'
6 | import { toArray } from './utils.js'
7 |
8 | export default async function (options) {
9 | const opts = {
10 | stream: process.stderr,
11 | cwd: process.cwd(),
12 | allowEmpty: false,
13 | config: undefined,
14 | unstaged: false,
15 | diff: false,
16 | ...options,
17 | }
18 |
19 | const reporter = createReporter(opts.stream)
20 | const git = createGit(opts.cwd)
21 |
22 | try {
23 | const config = await getConfig(opts.cwd, opts.config)
24 | const git_paths = await git.getGitPaths()
25 |
26 | if (!config) {
27 | if (typeof opts.config === 'string') {
28 | throw new NanoStagedError('noFileConfig', opts.config)
29 | } else {
30 | throw new NanoStagedError('noConfig')
31 | }
32 | }
33 |
34 | if (!validConfig(config)) {
35 | throw new NanoStagedError('invalidConfig')
36 | }
37 |
38 | if (!git_paths.root) {
39 | throw new NanoStagedError('noGitRepo')
40 | }
41 |
42 | let files, type
43 |
44 | if (opts.unstaged) {
45 | files = await git.unstagedFiles({ cwd: git_paths.root })
46 | type = 'unstaged'
47 | } else if (opts.diff && Array.isArray(opts.diff)) {
48 | files = await git.changedFiles(opts.diff, { cwd: git_paths.root })
49 | type = 'diff'
50 | } else {
51 | files = await git.stagedFiles({ cwd: git_paths.root })
52 | type = 'staged'
53 | }
54 |
55 | if (!files.working.length) {
56 | reporter.error(new NanoStagedError('noFiles', type))
57 | return
58 | }
59 |
60 | await createRunner({ ...opts, config, git_paths, files, type }).run()
61 | } catch (errors) {
62 | for (const error of toArray(errors)) {
63 | reporter.error(error)
64 | }
65 | throw errors
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/lib/renderer.js:
--------------------------------------------------------------------------------
1 | import readline from 'readline'
2 | import c from 'picocolors'
3 |
4 | const spinnerMap = new WeakMap()
5 | const spinnerFrames = ['-', '\\', '|', '/']
6 |
7 | function getSpinner() {
8 | let index = 0
9 |
10 | return () => {
11 | index = ++index % spinnerFrames.length
12 | return spinnerFrames[index]
13 | }
14 | }
15 |
16 | function getLines(str = '', width = 80) {
17 | return str
18 | .replace(/\u001b[^m]*?m/g, '')
19 | .split('\n')
20 | .reduce((col, l) => (col += Math.max(1, Math.ceil(l.length / width))), 0)
21 | }
22 |
23 | function getStateSymbol(task) {
24 | if (task.state === 'done') {
25 | return c.green('√')
26 | } else if (task.state === 'fail') {
27 | return c.red('×')
28 | } else if (task.state === 'warn') {
29 | return c.yellow('↓')
30 | } else if (task.state === 'run') {
31 | let spinner = spinnerMap.get(task)
32 |
33 | if (!spinner) {
34 | spinner = getSpinner()
35 | spinnerMap.set(task, spinner)
36 | }
37 |
38 | return c.yellow(spinner())
39 | } else {
40 | return c.gray('*')
41 | }
42 | }
43 |
44 | function getTitles(task) {
45 | const titles = [task.title]
46 | let current = task
47 |
48 | while (current.parent) {
49 | current = current.parent
50 | if (current.title) titles.unshift(current.title)
51 | }
52 |
53 | return titles
54 | }
55 |
56 | function renderTree(tasks, level = 0) {
57 | let output = []
58 |
59 | for (const task of tasks) {
60 | const title = task.title
61 | const prefix = `${getStateSymbol(task)} `
62 |
63 | output.push(' '.repeat(level) + prefix + title)
64 |
65 | if (task.tasks && task.tasks.length > 0) {
66 | if (task.state !== 'done') {
67 | output = output.concat(renderTree(task.tasks, level + 1))
68 | }
69 | }
70 | }
71 |
72 | return output.join('\n')
73 | }
74 |
75 | function renderCI(tasks) {
76 | let output = ''
77 |
78 | for (const task of tasks) {
79 | if (task.state && task.state !== 'end' && task.state !== 'run' && !task.tasks) {
80 | const title = getTitles(task).join(c.yellow(' ≫ '))
81 | const prefix = `${getStateSymbol(task)} `
82 |
83 | output += prefix + title + '\n'
84 | task.state = 'end'
85 | }
86 |
87 | if (task.tasks && task.tasks.length > 0) {
88 | output += renderCI(task.tasks)
89 | }
90 | }
91 |
92 | return output
93 | }
94 |
95 | export function createRenderer(stream, { isTTY = true } = {}) {
96 | let tasks = []
97 | let lines = 0
98 | let timer
99 |
100 | return {
101 | clear() {
102 | for (let i = 0; i < lines; i++) {
103 | i > 0 && readline.moveCursor(stream, 0, -1)
104 | readline.cursorTo(stream, 0)
105 | readline.clearLine(stream, 0)
106 | }
107 | lines = 0
108 | },
109 |
110 | write(str, clear = false) {
111 | if (clear) {
112 | this.clear()
113 | }
114 |
115 | stream.write(str)
116 | },
117 |
118 | render() {
119 | const output = isTTY ? renderTree(tasks) : renderCI(tasks)
120 |
121 | if (isTTY) {
122 | this.write(output, true)
123 | lines = getLines(output, stream.columns)
124 | } else {
125 | this.write(output)
126 | }
127 |
128 | return this
129 | },
130 |
131 | spin(task) {
132 | task && tasks.push(task)
133 | return this.render()
134 | },
135 |
136 | loop() {
137 | timer = setTimeout(() => this.loop(), 130)
138 | return this.spin()
139 | },
140 |
141 | start(task) {
142 | tasks.push(task)
143 |
144 | if (timer) return this
145 | if (isTTY) stream.write(`\x1b[?25l`)
146 |
147 | return this.loop()
148 | },
149 |
150 | stop() {
151 | if (timer) timer = clearTimeout(timer)
152 |
153 | if (isTTY) {
154 | this.write(`${renderTree(tasks)}\n`, true)
155 | this.write(`\x1b[?25h`)
156 | } else {
157 | this.write(renderCI(tasks))
158 | }
159 |
160 | return this
161 | },
162 | }
163 | }
164 |
--------------------------------------------------------------------------------
/lib/reporter.js:
--------------------------------------------------------------------------------
1 | import c from 'picocolors'
2 |
3 | import { NanoStagedError, TaskRunnerError } from './errors.js'
4 |
5 | export function createReporter(stream = process.stderr) {
6 | function print(lines) {
7 | stream.write(lines)
8 | }
9 |
10 | const reporter = {
11 | error(err) {
12 | if (err instanceof NanoStagedError) {
13 | const msg = err.message.replace(/\*([^*]+)\*/g, c.yellow('$1'))
14 |
15 | if (['noFiles', 'noMatchingFiles'].includes(err.type)) {
16 | print(`${c.cyan(`-`)} ${msg}\n`)
17 | } else {
18 | print(`${c.red('×')} ${c.red(msg)}\n`)
19 | }
20 | } else if (err instanceof TaskRunnerError) {
21 | print(`\n${err.message || err}\n`)
22 | } else {
23 | print(`\n${c.red(err.message || err)}\n`)
24 | }
25 | },
26 | }
27 |
28 | return reporter
29 | }
30 |
--------------------------------------------------------------------------------
/lib/runner.js:
--------------------------------------------------------------------------------
1 | import { createGitWorkflow } from './git-workflow.js'
2 | import { createCmdRunner } from './cmd-runner.js'
3 | import { createReporter } from './reporter.js'
4 | import { createRenderer } from './renderer.js'
5 | import { NanoStagedError } from './errors.js'
6 |
7 | export function createRunner({ allowEmpty, git_paths, config, stream, files, type, cwd }) {
8 | const reporter = createReporter(stream)
9 | const renderer = createRenderer(stream, { isTTY: !process.env.CI })
10 |
11 | const runner = {
12 | async run() {
13 | const changes = [...files.changed, ...files.deleted]
14 |
15 | const gitWorkflow = createGitWorkflow({
16 | allowEmpty,
17 | rootPath: git_paths.root,
18 | dotPath: git_paths.dot,
19 | })
20 |
21 | const cmdRunner = createCmdRunner({
22 | rootPath: git_paths.root,
23 | files: files.working,
24 | config,
25 | type,
26 | cwd,
27 | })
28 |
29 | const cmdTasks = await cmdRunner.generateCmdTasks()
30 |
31 | if (!cmdTasks.some((task) => task.file_count > 0)) {
32 | reporter.error(new NanoStagedError('noMatchingFiles'))
33 | return
34 | }
35 |
36 | let enabled = false
37 | let revert = false
38 | let clear = true
39 | let errors = []
40 | let tasks = []
41 |
42 | tasks.push({
43 | title: `Preparing nano-staged`,
44 | run: async () => {
45 | try {
46 | await gitWorkflow.backupOriginalState()
47 | } catch (e) {
48 | enabled = true
49 | throw e
50 | }
51 | },
52 | })
53 |
54 | tasks.push({
55 | title: `Backing up unstaged changes for staged files`,
56 | run: async () => {
57 | try {
58 | await gitWorkflow.backupUnstagedFiles(changes)
59 | } catch (e) {
60 | revert = true
61 | throw e
62 | }
63 | },
64 | skipped: () => enabled || type === 'unstaged' || type === 'diff' || changes.length === 0,
65 | })
66 |
67 | tasks.push({
68 | title: `Running tasks for ${type} files`,
69 | run: async (task) => {
70 | task.tasks = cmdTasks
71 |
72 | try {
73 | await cmdRunner.run(task)
74 | } catch (e) {
75 | revert = true
76 | throw e
77 | }
78 | },
79 | skipped: () => enabled || revert,
80 | })
81 |
82 | tasks.push({
83 | title: `Applying modifications from tasks`,
84 | run: async () => {
85 | try {
86 | await gitWorkflow.applyModifications(files.working)
87 | } catch (e) {
88 | revert = true
89 | throw e
90 | }
91 | },
92 | skipped: () => enabled || revert || type === 'unstaged' || type === 'diff',
93 | })
94 |
95 | tasks.push({
96 | title: `Restoring unstaged changes for staged files`,
97 | run: async () => {
98 | try {
99 | await gitWorkflow.restoreUnstagedFiles(changes)
100 | } catch (e) {
101 | throw e
102 | }
103 | },
104 | skipped: () =>
105 | enabled || revert || type === 'unstaged' || type === 'diff' || changes.length === 0,
106 | })
107 |
108 | tasks.push({
109 | title: `Restoring to original state because of errors`,
110 | run: async () => {
111 | try {
112 | await gitWorkflow.restoreOriginalState()
113 | } catch (e) {
114 | clear = false
115 | throw e
116 | }
117 | },
118 | skipped: () => enabled || !revert,
119 | })
120 |
121 | tasks.push({
122 | title: `Cleaning up temporary to patch files`,
123 | run: async () => {
124 | try {
125 | await gitWorkflow.cleanUp()
126 | } catch (e) {
127 | throw e
128 | }
129 | },
130 | skipped: () => enabled || !clear,
131 | })
132 |
133 | for (const task of tasks) {
134 | if (task.skipped ? !task.skipped() : true) {
135 | renderer.start(task)
136 |
137 | try {
138 | task.state = 'run'
139 | await task.run(task)
140 | task.state = 'done'
141 | } catch (e) {
142 | task.state = 'fail'
143 | errors.push(e)
144 | }
145 | }
146 | }
147 |
148 | renderer.stop()
149 |
150 | if (errors.length) {
151 | throw errors
152 | }
153 | },
154 | }
155 |
156 | return runner
157 | }
158 |
--------------------------------------------------------------------------------
/lib/utils.js:
--------------------------------------------------------------------------------
1 | import { fileURLToPath } from 'url'
2 | import { readFileSync } from 'fs'
3 | import process from 'process'
4 | import { join } from 'path'
5 | import c from 'picocolors'
6 | import tty from 'tty'
7 | import os from 'os'
8 |
9 | const REG_STR = /([^\s'"]([^\s'"]*(['"])([^\3]*?)\3)+[^\s'"]*)|[^\s'"]+|(['"])([^\5]*?)\5/gi
10 |
11 | export function toArray(val) {
12 | return Array.isArray(val) ? val : [val]
13 | }
14 |
15 | export function showVersion(print) {
16 | let pkg = readFileSync(join(fileURLToPath(import.meta.url), '../..', 'package.json'))
17 | let pkgJson = JSON.parse(pkg.toString())
18 | print.write(`Nano Staged ${c.bold(`v${pkgJson.version}`)}\n`)
19 | }
20 |
21 | export function stringArgvToArray(str = '') {
22 | let args = []
23 | let match
24 |
25 | while (true) {
26 | match = REG_STR.exec(str)
27 |
28 | if (!match) {
29 | return args
30 | }
31 |
32 | for (let arg of [match[1], match[6], match[0]]) {
33 | if (typeof arg === 'string') {
34 | args.push(arg)
35 | }
36 | }
37 | }
38 | }
39 |
40 | function hasFlags(...flags) {
41 | return flags
42 | .reduce((acc, flag) => [...acc, '-' + flag, '--' + flag], [])
43 | .some((flag) => process.argv.includes(flag))
44 | }
45 |
46 | export function getForceColorLevel() {
47 | if (hasFlags('no-color', 'no-colors', 'color=false', 'color=never')) {
48 | return 0
49 | } else if (process.env.FORCE_COLOR) {
50 | return Math.min(Number.parseInt(process.env.FORCE_COLOR, 10), 3)
51 | } else if (process.env.FORCE_NO_COLOR) {
52 | return 0
53 | } else if (!tty.isatty(1)) {
54 | return 0
55 | } else if (process.env.TERM === 'dumb') {
56 | return 0
57 | } else if (process.platform === 'win32') {
58 | const osRelease = os.release().split('.')
59 | if (Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10_586) {
60 | return Number(osRelease[2]) >= 14_931 ? 3 : 2
61 | }
62 | return 1
63 | } else if (process.env.COLORTERM === 'truecolor') {
64 | return 3
65 | } else if (/-256(color)?$/i.test(process.env.TERM)) {
66 | return 2
67 | } else if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(process.env.TERM)) {
68 | return 1
69 | } else if (process.env.COLORTERM) {
70 | return 1
71 | } else {
72 | return 0
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "nano-staged",
3 | "version": "0.8.0",
4 | "description": "Tiny tool to run commands for modified, staged, and committed git files.",
5 | "author": "Usman Yunusov ",
6 | "license": "MIT",
7 | "repository": "usmanyunusov/nano-staged",
8 | "type": "module",
9 | "bin": "./lib/bin.js",
10 | "exports": "./lib/index.js",
11 | "engines": {
12 | "node": "^12.20.0 || ^14.13.1 || >=16.0.0"
13 | },
14 | "scripts": {
15 | "lint": "prettier --write lib/**/*.js",
16 | "unit": "cross-env CI=true node --loader=esmock --no-warnings ./node_modules/uvu/bin.js test \"\\.test\\.js$\"",
17 | "test": "c8 pnpm unit",
18 | "bench": "node bench/running-time/index.js && node bench/size/index.js"
19 | },
20 | "dependencies": {
21 | "picocolors": "^1.0.0"
22 | },
23 | "devDependencies": {
24 | "c8": "^7.11.2",
25 | "clean-publish": "^3.4.5",
26 | "cross-env": "^7.0.3",
27 | "esmock": "^1.7.5",
28 | "fs-extra": "^10.1.0",
29 | "nanodelay": "^2.0.2",
30 | "nanoid": "^3.3.3",
31 | "prettier": "^2.6.2",
32 | "uvu": "^0.5.3"
33 | },
34 | "clean-publish": {
35 | "cleanDocs": true
36 | },
37 | "prettier": {
38 | "printWidth": 100,
39 | "semi": false,
40 | "singleQuote": true
41 | },
42 | "c8": {
43 | "include": [
44 | "lib/**/*"
45 | ],
46 | "lines": 100,
47 | "check-coverage": true
48 | },
49 | "keywords": [
50 | "runner",
51 | "lint",
52 | "git",
53 | "staged",
54 | "unstaged",
55 | "diff",
56 | "eslint",
57 | "prettier",
58 | "stylelint"
59 | ]
60 | }
61 |
--------------------------------------------------------------------------------
/pnpm-lock.yaml:
--------------------------------------------------------------------------------
1 | lockfileVersion: 5.3
2 |
3 | specifiers:
4 | c8: ^7.11.2
5 | clean-publish: ^3.4.5
6 | cross-env: ^7.0.3
7 | esmock: ^1.7.5
8 | fs-extra: ^10.1.0
9 | nanodelay: ^2.0.2
10 | nanoid: ^3.3.3
11 | picocolors: ^1.0.0
12 | prettier: ^2.6.2
13 | uvu: ^0.5.3
14 |
15 | dependencies:
16 | picocolors: 1.0.0
17 |
18 | devDependencies:
19 | c8: 7.11.2
20 | clean-publish: 3.4.5
21 | cross-env: 7.0.3
22 | esmock: 1.7.5
23 | fs-extra: 10.1.0
24 | nanodelay: 2.0.2
25 | nanoid: 3.3.3
26 | prettier: 2.6.2
27 | uvu: 0.5.3
28 |
29 | packages:
30 |
31 | /@bcoe/v8-coverage/0.2.3:
32 | resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==}
33 | dev: true
34 |
35 | /@istanbuljs/schema/0.1.3:
36 | resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==}
37 | engines: {node: '>=8'}
38 | dev: true
39 |
40 | /@jridgewell/resolve-uri/3.0.6:
41 | resolution: {integrity: sha512-R7xHtBSNm+9SyvpJkdQl+qrM3Hm2fea3Ef197M3mUug+v+yR+Rhfbs7PBtcBUVnIWJ4JcAdjvij+c8hXS9p5aw==}
42 | engines: {node: '>=6.0.0'}
43 | dev: true
44 |
45 | /@jridgewell/sourcemap-codec/1.4.11:
46 | resolution: {integrity: sha512-Fg32GrJo61m+VqYSdRSjRXMjQ06j8YIYfcTqndLYVAaHmroZHLJZCydsWBOTDqXS2v+mjxohBWEMfg97GXmYQg==}
47 | dev: true
48 |
49 | /@jridgewell/trace-mapping/0.3.9:
50 | resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==}
51 | dependencies:
52 | '@jridgewell/resolve-uri': 3.0.6
53 | '@jridgewell/sourcemap-codec': 1.4.11
54 | dev: true
55 |
56 | /@nodelib/fs.scandir/2.1.5:
57 | resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==}
58 | engines: {node: '>= 8'}
59 | dependencies:
60 | '@nodelib/fs.stat': 2.0.5
61 | run-parallel: 1.2.0
62 | dev: true
63 |
64 | /@nodelib/fs.stat/2.0.5:
65 | resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==}
66 | engines: {node: '>= 8'}
67 | dev: true
68 |
69 | /@nodelib/fs.walk/1.2.8:
70 | resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==}
71 | engines: {node: '>= 8'}
72 | dependencies:
73 | '@nodelib/fs.scandir': 2.1.5
74 | fastq: 1.13.0
75 | dev: true
76 |
77 | /@types/istanbul-lib-coverage/2.0.4:
78 | resolution: {integrity: sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==}
79 | dev: true
80 |
81 | /ansi-regex/5.0.1:
82 | resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==}
83 | engines: {node: '>=8'}
84 | dev: true
85 |
86 | /ansi-styles/4.3.0:
87 | resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==}
88 | engines: {node: '>=8'}
89 | dependencies:
90 | color-convert: 2.0.1
91 | dev: true
92 |
93 | /balanced-match/1.0.2:
94 | resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
95 | dev: true
96 |
97 | /brace-expansion/1.1.11:
98 | resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==}
99 | dependencies:
100 | balanced-match: 1.0.2
101 | concat-map: 0.0.1
102 | dev: true
103 |
104 | /braces/3.0.2:
105 | resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==}
106 | engines: {node: '>=8'}
107 | dependencies:
108 | fill-range: 7.0.1
109 | dev: true
110 |
111 | /c8/7.11.2:
112 | resolution: {integrity: sha512-6ahJSrhS6TqSghHm+HnWt/8Y2+z0hM/FQyB1ybKhAR30+NYL9CTQ1uwHxuWw6U7BHlHv6wvhgOrH81I+lfCkxg==}
113 | engines: {node: '>=10.12.0'}
114 | hasBin: true
115 | dependencies:
116 | '@bcoe/v8-coverage': 0.2.3
117 | '@istanbuljs/schema': 0.1.3
118 | find-up: 5.0.0
119 | foreground-child: 2.0.0
120 | istanbul-lib-coverage: 3.2.0
121 | istanbul-lib-report: 3.0.0
122 | istanbul-reports: 3.1.4
123 | rimraf: 3.0.2
124 | test-exclude: 6.0.0
125 | v8-to-istanbul: 9.0.0
126 | yargs: 16.2.0
127 | yargs-parser: 20.2.9
128 | dev: true
129 |
130 | /clean-publish/3.4.5:
131 | resolution: {integrity: sha512-wxtaEqCy/B7lch+bpTPmx9wI8owTODa619UOVA1XfyNaM2bjwv1SkR1IJNDA6rtf89bjiz6UkpfU5HcEL8YJIw==}
132 | engines: {node: ^12.0.0 || ^14.0.0 || >= 16.0.0}
133 | hasBin: true
134 | dependencies:
135 | cross-spawn: 7.0.3
136 | fast-glob: 3.2.11
137 | fs-extra: 10.1.0
138 | hosted-git-info: 4.1.0
139 | lilconfig: 2.0.5
140 | dev: true
141 |
142 | /cliui/7.0.4:
143 | resolution: {integrity: sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==}
144 | dependencies:
145 | string-width: 4.2.3
146 | strip-ansi: 6.0.1
147 | wrap-ansi: 7.0.0
148 | dev: true
149 |
150 | /color-convert/2.0.1:
151 | resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
152 | engines: {node: '>=7.0.0'}
153 | dependencies:
154 | color-name: 1.1.4
155 | dev: true
156 |
157 | /color-name/1.1.4:
158 | resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
159 | dev: true
160 |
161 | /concat-map/0.0.1:
162 | resolution: {integrity: sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=}
163 | dev: true
164 |
165 | /convert-source-map/1.8.0:
166 | resolution: {integrity: sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==}
167 | dependencies:
168 | safe-buffer: 5.1.2
169 | dev: true
170 |
171 | /cross-env/7.0.3:
172 | resolution: {integrity: sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==}
173 | engines: {node: '>=10.14', npm: '>=6', yarn: '>=1'}
174 | hasBin: true
175 | dependencies:
176 | cross-spawn: 7.0.3
177 | dev: true
178 |
179 | /cross-spawn/7.0.3:
180 | resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==}
181 | engines: {node: '>= 8'}
182 | dependencies:
183 | path-key: 3.1.1
184 | shebang-command: 2.0.0
185 | which: 2.0.2
186 | dev: true
187 |
188 | /dequal/2.0.2:
189 | resolution: {integrity: sha512-q9K8BlJVxK7hQYqa6XISGmBZbtQQWVXSrRrWreHC94rMt1QL/Impruc+7p2CYSYuVIUr+YCt6hjrs1kkdJRTug==}
190 | engines: {node: '>=6'}
191 | dev: true
192 |
193 | /diff/5.0.0:
194 | resolution: {integrity: sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==}
195 | engines: {node: '>=0.3.1'}
196 | dev: true
197 |
198 | /emoji-regex/8.0.0:
199 | resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}
200 | dev: true
201 |
202 | /escalade/3.1.1:
203 | resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==}
204 | engines: {node: '>=6'}
205 | dev: true
206 |
207 | /esmock/1.7.5:
208 | resolution: {integrity: sha512-vvPplt3s8nSQ+Pre8QiMFj3NogTwjQRQCv90DQzjlqfNUoyA0Z72tJux9MRUQKlGc4omd2Dsp27OJ6XXIucgmA==}
209 | dependencies:
210 | resolvewithplus: 0.8.0
211 | dev: true
212 |
213 | /fast-glob/3.2.11:
214 | resolution: {integrity: sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==}
215 | engines: {node: '>=8.6.0'}
216 | dependencies:
217 | '@nodelib/fs.stat': 2.0.5
218 | '@nodelib/fs.walk': 1.2.8
219 | glob-parent: 5.1.2
220 | merge2: 1.4.1
221 | micromatch: 4.0.5
222 | dev: true
223 |
224 | /fastq/1.13.0:
225 | resolution: {integrity: sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==}
226 | dependencies:
227 | reusify: 1.0.4
228 | dev: true
229 |
230 | /fill-range/7.0.1:
231 | resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==}
232 | engines: {node: '>=8'}
233 | dependencies:
234 | to-regex-range: 5.0.1
235 | dev: true
236 |
237 | /find-up/5.0.0:
238 | resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==}
239 | engines: {node: '>=10'}
240 | dependencies:
241 | locate-path: 6.0.0
242 | path-exists: 4.0.0
243 | dev: true
244 |
245 | /foreground-child/2.0.0:
246 | resolution: {integrity: sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==}
247 | engines: {node: '>=8.0.0'}
248 | dependencies:
249 | cross-spawn: 7.0.3
250 | signal-exit: 3.0.7
251 | dev: true
252 |
253 | /fs-extra/10.1.0:
254 | resolution: {integrity: sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==}
255 | engines: {node: '>=12'}
256 | dependencies:
257 | graceful-fs: 4.2.10
258 | jsonfile: 6.1.0
259 | universalify: 2.0.0
260 | dev: true
261 |
262 | /fs.realpath/1.0.0:
263 | resolution: {integrity: sha1-FQStJSMVjKpA20onh8sBQRmU6k8=}
264 | dev: true
265 |
266 | /get-caller-file/2.0.5:
267 | resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==}
268 | engines: {node: 6.* || 8.* || >= 10.*}
269 | dev: true
270 |
271 | /glob-parent/5.1.2:
272 | resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
273 | engines: {node: '>= 6'}
274 | dependencies:
275 | is-glob: 4.0.3
276 | dev: true
277 |
278 | /glob/7.2.0:
279 | resolution: {integrity: sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==}
280 | dependencies:
281 | fs.realpath: 1.0.0
282 | inflight: 1.0.6
283 | inherits: 2.0.4
284 | minimatch: 3.1.2
285 | once: 1.4.0
286 | path-is-absolute: 1.0.1
287 | dev: true
288 |
289 | /graceful-fs/4.2.10:
290 | resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==}
291 | dev: true
292 |
293 | /has-flag/4.0.0:
294 | resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
295 | engines: {node: '>=8'}
296 | dev: true
297 |
298 | /hosted-git-info/4.1.0:
299 | resolution: {integrity: sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==}
300 | engines: {node: '>=10'}
301 | dependencies:
302 | lru-cache: 6.0.0
303 | dev: true
304 |
305 | /html-escaper/2.0.2:
306 | resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==}
307 | dev: true
308 |
309 | /inflight/1.0.6:
310 | resolution: {integrity: sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=}
311 | dependencies:
312 | once: 1.4.0
313 | wrappy: 1.0.2
314 | dev: true
315 |
316 | /inherits/2.0.4:
317 | resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==}
318 | dev: true
319 |
320 | /is-extglob/2.1.1:
321 | resolution: {integrity: sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=}
322 | engines: {node: '>=0.10.0'}
323 | dev: true
324 |
325 | /is-fullwidth-code-point/3.0.0:
326 | resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==}
327 | engines: {node: '>=8'}
328 | dev: true
329 |
330 | /is-glob/4.0.3:
331 | resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
332 | engines: {node: '>=0.10.0'}
333 | dependencies:
334 | is-extglob: 2.1.1
335 | dev: true
336 |
337 | /is-number/7.0.0:
338 | resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
339 | engines: {node: '>=0.12.0'}
340 | dev: true
341 |
342 | /isexe/2.0.0:
343 | resolution: {integrity: sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=}
344 | dev: true
345 |
346 | /istanbul-lib-coverage/3.2.0:
347 | resolution: {integrity: sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==}
348 | engines: {node: '>=8'}
349 | dev: true
350 |
351 | /istanbul-lib-report/3.0.0:
352 | resolution: {integrity: sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==}
353 | engines: {node: '>=8'}
354 | dependencies:
355 | istanbul-lib-coverage: 3.2.0
356 | make-dir: 3.1.0
357 | supports-color: 7.2.0
358 | dev: true
359 |
360 | /istanbul-reports/3.1.4:
361 | resolution: {integrity: sha512-r1/DshN4KSE7xWEknZLLLLDn5CJybV3nw01VTkp6D5jzLuELlcbudfj/eSQFvrKsJuTVCGnePO7ho82Nw9zzfw==}
362 | engines: {node: '>=8'}
363 | dependencies:
364 | html-escaper: 2.0.2
365 | istanbul-lib-report: 3.0.0
366 | dev: true
367 |
368 | /jsonfile/6.1.0:
369 | resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==}
370 | dependencies:
371 | universalify: 2.0.0
372 | optionalDependencies:
373 | graceful-fs: 4.2.10
374 | dev: true
375 |
376 | /kleur/4.1.4:
377 | resolution: {integrity: sha512-8QADVssbrFjivHWQU7KkMgptGTl6WAcSdlbBPY4uNF+mWr6DGcKrvY2w4FQJoXch7+fKMjj0dRrL75vk3k23OA==}
378 | engines: {node: '>=6'}
379 | dev: true
380 |
381 | /lilconfig/2.0.5:
382 | resolution: {integrity: sha512-xaYmXZtTHPAw5m+xLN8ab9C+3a8YmV3asNSPOATITbtwrfbwaLJj8h66H1WMIpALCkqsIzK3h7oQ+PdX+LQ9Eg==}
383 | engines: {node: '>=10'}
384 | dev: true
385 |
386 | /locate-path/6.0.0:
387 | resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==}
388 | engines: {node: '>=10'}
389 | dependencies:
390 | p-locate: 5.0.0
391 | dev: true
392 |
393 | /lru-cache/6.0.0:
394 | resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==}
395 | engines: {node: '>=10'}
396 | dependencies:
397 | yallist: 4.0.0
398 | dev: true
399 |
400 | /make-dir/3.1.0:
401 | resolution: {integrity: sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==}
402 | engines: {node: '>=8'}
403 | dependencies:
404 | semver: 6.3.0
405 | dev: true
406 |
407 | /merge2/1.4.1:
408 | resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==}
409 | engines: {node: '>= 8'}
410 | dev: true
411 |
412 | /micromatch/4.0.5:
413 | resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==}
414 | engines: {node: '>=8.6'}
415 | dependencies:
416 | braces: 3.0.2
417 | picomatch: 2.3.1
418 | dev: true
419 |
420 | /minimatch/3.1.2:
421 | resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==}
422 | dependencies:
423 | brace-expansion: 1.1.11
424 | dev: true
425 |
426 | /mri/1.2.0:
427 | resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==}
428 | engines: {node: '>=4'}
429 | dev: true
430 |
431 | /nanodelay/2.0.2:
432 | resolution: {integrity: sha512-6AS5aCSXsjoxq2Jr9CdaAeT60yoYDOTp6po9ziqeOeY6vf6uTEHYSqWql6EFILrM3fEfXgkZ4KqE9L0rTm/wlA==}
433 | engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0}
434 | dev: true
435 |
436 | /nanoid/3.3.3:
437 | resolution: {integrity: sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==}
438 | engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
439 | hasBin: true
440 | dev: true
441 |
442 | /once/1.4.0:
443 | resolution: {integrity: sha1-WDsap3WWHUsROsF9nFC6753Xa9E=}
444 | dependencies:
445 | wrappy: 1.0.2
446 | dev: true
447 |
448 | /p-limit/3.1.0:
449 | resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==}
450 | engines: {node: '>=10'}
451 | dependencies:
452 | yocto-queue: 0.1.0
453 | dev: true
454 |
455 | /p-locate/5.0.0:
456 | resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==}
457 | engines: {node: '>=10'}
458 | dependencies:
459 | p-limit: 3.1.0
460 | dev: true
461 |
462 | /path-exists/4.0.0:
463 | resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==}
464 | engines: {node: '>=8'}
465 | dev: true
466 |
467 | /path-is-absolute/1.0.1:
468 | resolution: {integrity: sha1-F0uSaHNVNP+8es5r9TpanhtcX18=}
469 | engines: {node: '>=0.10.0'}
470 | dev: true
471 |
472 | /path-key/3.1.1:
473 | resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==}
474 | engines: {node: '>=8'}
475 | dev: true
476 |
477 | /picocolors/1.0.0:
478 | resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==}
479 | dev: false
480 |
481 | /picomatch/2.3.1:
482 | resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
483 | engines: {node: '>=8.6'}
484 | dev: true
485 |
486 | /prettier/2.6.2:
487 | resolution: {integrity: sha512-PkUpF+qoXTqhOeWL9fu7As8LXsIUZ1WYaJiY/a7McAQzxjk82OF0tibkFXVCDImZtWxbvojFjerkiLb0/q8mew==}
488 | engines: {node: '>=10.13.0'}
489 | hasBin: true
490 | dev: true
491 |
492 | /queue-microtask/1.2.3:
493 | resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==}
494 | dev: true
495 |
496 | /require-directory/2.1.1:
497 | resolution: {integrity: sha1-jGStX9MNqxyXbiNE/+f3kqam30I=}
498 | engines: {node: '>=0.10.0'}
499 | dev: true
500 |
501 | /resolvewithplus/0.8.0:
502 | resolution: {integrity: sha512-k2ZTNmYyHjSVMswoitU9I5C0It/Jgs/4+oVSz2Tg9zgW4oURz3FSeHjAAbSau8W+njdBwfWVjFxl45B9cprWTw==}
503 | dev: true
504 |
505 | /reusify/1.0.4:
506 | resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==}
507 | engines: {iojs: '>=1.0.0', node: '>=0.10.0'}
508 | dev: true
509 |
510 | /rimraf/3.0.2:
511 | resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==}
512 | hasBin: true
513 | dependencies:
514 | glob: 7.2.0
515 | dev: true
516 |
517 | /run-parallel/1.2.0:
518 | resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==}
519 | dependencies:
520 | queue-microtask: 1.2.3
521 | dev: true
522 |
523 | /sade/1.8.1:
524 | resolution: {integrity: sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==}
525 | engines: {node: '>=6'}
526 | dependencies:
527 | mri: 1.2.0
528 | dev: true
529 |
530 | /safe-buffer/5.1.2:
531 | resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==}
532 | dev: true
533 |
534 | /semver/6.3.0:
535 | resolution: {integrity: sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==}
536 | hasBin: true
537 | dev: true
538 |
539 | /shebang-command/2.0.0:
540 | resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==}
541 | engines: {node: '>=8'}
542 | dependencies:
543 | shebang-regex: 3.0.0
544 | dev: true
545 |
546 | /shebang-regex/3.0.0:
547 | resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==}
548 | engines: {node: '>=8'}
549 | dev: true
550 |
551 | /signal-exit/3.0.7:
552 | resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==}
553 | dev: true
554 |
555 | /string-width/4.2.3:
556 | resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==}
557 | engines: {node: '>=8'}
558 | dependencies:
559 | emoji-regex: 8.0.0
560 | is-fullwidth-code-point: 3.0.0
561 | strip-ansi: 6.0.1
562 | dev: true
563 |
564 | /strip-ansi/6.0.1:
565 | resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
566 | engines: {node: '>=8'}
567 | dependencies:
568 | ansi-regex: 5.0.1
569 | dev: true
570 |
571 | /supports-color/7.2.0:
572 | resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==}
573 | engines: {node: '>=8'}
574 | dependencies:
575 | has-flag: 4.0.0
576 | dev: true
577 |
578 | /test-exclude/6.0.0:
579 | resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==}
580 | engines: {node: '>=8'}
581 | dependencies:
582 | '@istanbuljs/schema': 0.1.3
583 | glob: 7.2.0
584 | minimatch: 3.1.2
585 | dev: true
586 |
587 | /to-regex-range/5.0.1:
588 | resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
589 | engines: {node: '>=8.0'}
590 | dependencies:
591 | is-number: 7.0.0
592 | dev: true
593 |
594 | /universalify/2.0.0:
595 | resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==}
596 | engines: {node: '>= 10.0.0'}
597 | dev: true
598 |
599 | /uvu/0.5.3:
600 | resolution: {integrity: sha512-brFwqA3FXzilmtnIyJ+CxdkInkY/i4ErvP7uV0DnUVxQcQ55reuHphorpF+tZoVHK2MniZ/VJzI7zJQoc9T9Yw==}
601 | engines: {node: '>=8'}
602 | hasBin: true
603 | dependencies:
604 | dequal: 2.0.2
605 | diff: 5.0.0
606 | kleur: 4.1.4
607 | sade: 1.8.1
608 | dev: true
609 |
610 | /v8-to-istanbul/9.0.0:
611 | resolution: {integrity: sha512-HcvgY/xaRm7isYmyx+lFKA4uQmfUbN0J4M0nNItvzTvH/iQ9kW5j/t4YSR+Ge323/lrgDAWJoF46tzGQHwBHFw==}
612 | engines: {node: '>=10.12.0'}
613 | dependencies:
614 | '@jridgewell/trace-mapping': 0.3.9
615 | '@types/istanbul-lib-coverage': 2.0.4
616 | convert-source-map: 1.8.0
617 | dev: true
618 |
619 | /which/2.0.2:
620 | resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==}
621 | engines: {node: '>= 8'}
622 | hasBin: true
623 | dependencies:
624 | isexe: 2.0.0
625 | dev: true
626 |
627 | /wrap-ansi/7.0.0:
628 | resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==}
629 | engines: {node: '>=10'}
630 | dependencies:
631 | ansi-styles: 4.3.0
632 | string-width: 4.2.3
633 | strip-ansi: 6.0.1
634 | dev: true
635 |
636 | /wrappy/1.0.2:
637 | resolution: {integrity: sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=}
638 | dev: true
639 |
640 | /y18n/5.0.8:
641 | resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==}
642 | engines: {node: '>=10'}
643 | dev: true
644 |
645 | /yallist/4.0.0:
646 | resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==}
647 | dev: true
648 |
649 | /yargs-parser/20.2.9:
650 | resolution: {integrity: sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==}
651 | engines: {node: '>=10'}
652 | dev: true
653 |
654 | /yargs/16.2.0:
655 | resolution: {integrity: sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==}
656 | engines: {node: '>=10'}
657 | dependencies:
658 | cliui: 7.0.4
659 | escalade: 3.1.1
660 | get-caller-file: 2.0.5
661 | require-directory: 2.1.1
662 | string-width: 4.2.3
663 | y18n: 5.0.8
664 | yargs-parser: 20.2.9
665 | dev: true
666 |
667 | /yocto-queue/0.1.0:
668 | resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==}
669 | engines: {node: '>=10'}
670 | dev: true
671 |
--------------------------------------------------------------------------------
/test/cmd-runner.test.js:
--------------------------------------------------------------------------------
1 | import { is, equal } from 'uvu/assert'
2 | import { homedir } from 'os'
3 | import { join } from 'path'
4 | import esmock from 'esmock'
5 | import { test } from 'uvu'
6 |
7 | import { createStdout } from './utils/index.js'
8 |
9 | let stdout = createStdout()
10 |
11 | test.before.each(() => {
12 | stdout.out = ''
13 | })
14 |
15 | test('should create runner and resolve tasks', async () => {
16 | const { createCmdRunner } = await esmock('../lib/cmd-runner.js')
17 |
18 | let runner = createCmdRunner({
19 | rootPath: join(homedir(), 'test'),
20 | cwd: join(homedir(), 'test'),
21 | files: ['a.js', '../../b.css'],
22 | config: { '*.js': ['prettier --write'], '../*.css': 'prettier --write' },
23 | stream: stdout,
24 | })
25 |
26 | const cmdTasks = await runner.generateCmdTasks()
27 |
28 | is(
29 | JSON.stringify(cmdTasks),
30 | JSON.stringify([
31 | {
32 | title: '*.js\u001b[2m - 1 file\u001b[22m',
33 | file_count: 1,
34 | tasks: [{ title: 'prettier --write', pattern: '*.js' }],
35 | },
36 | { title: '../*.css\u001b[2m - no files\u001b[22m', file_count: 0, tasks: [] },
37 | ])
38 | )
39 | })
40 |
41 | test('should run handle error', async () => {
42 | const { createCmdRunner } = await esmock('../lib/cmd-runner.js', {
43 | '../lib/executor.js': {
44 | executor: async () => Promise.reject('Run error'),
45 | },
46 | })
47 |
48 | let runner = await createCmdRunner({
49 | repoPath: 'test',
50 | files: ['a.js', '../../b.css'],
51 | config: { '*.js': ['prettier --write', 'prettier --write'], '*.css': () => 'prettier --write' },
52 | stream: stdout,
53 | })
54 |
55 | const cmdTasks = await runner.generateCmdTasks()
56 | const task = { tasks: cmdTasks }
57 |
58 | try {
59 | await runner.run(task)
60 | } catch (error) {
61 | is(
62 | error.message,
63 | '\x1B[31m*.js\x1B[39m \x1B[2m>\x1B[22m \x1B[31mprettier --write\x1B[39m:\nRun error'
64 | )
65 | equal(
66 | task.tasks.map((t) => ({ state: t.state })),
67 | [{ state: 'fail' }, { state: 'warn' }]
68 | )
69 | }
70 | })
71 |
72 | test('should run handle success', async () => {
73 | const { createCmdRunner } = await esmock('../lib/cmd-runner.js', {
74 | '../lib/executor.js': {
75 | executor: async () => Promise.resolve('Run done'),
76 | },
77 | })
78 |
79 | let runner = await createCmdRunner({
80 | repoPath: 'test',
81 | files: ['a.js', 'b.js', '../../b.css'],
82 | config: { '*.js': ['prettier --write', 'prettier --write'], '*.css': () => 'prettier --write' },
83 | stream: stdout,
84 | })
85 |
86 | const cmdTasks = await runner.generateCmdTasks()
87 | const task = { tasks: cmdTasks }
88 |
89 | await runner.run(task)
90 |
91 | equal(
92 | task.tasks.map((t) => ({ state: t.state })),
93 | [{ state: 'done' }, { state: 'warn' }]
94 | )
95 | })
96 |
97 | test.run()
98 |
--------------------------------------------------------------------------------
/test/config.test.js:
--------------------------------------------------------------------------------
1 | import { equal, is } from 'uvu/assert'
2 | import { homedir } from 'os'
3 | import esmock from 'esmock'
4 | import { join } from 'path'
5 | import { test } from 'uvu'
6 |
7 | import { getConfig, validConfig } from '../lib/config.js'
8 | import { fixture } from './utils/index.js'
9 |
10 | test('should return "undefined" when config file is not found', async () => {
11 | is(await getConfig(join(homedir(), 'test')), undefined)
12 | })
13 |
14 | test('should load config from "package.json"', async () => {
15 | equal(await getConfig(fixture('config/test-project/dir')), {
16 | '*': 'my-tasks',
17 | })
18 | })
19 |
20 | test('should return "object" config', async () => {
21 | equal(await getConfig(process.cwd(), { '*': 'my-tasks' }), {
22 | '*': 'my-tasks',
23 | })
24 | })
25 |
26 | test('should load JSON config file', async () => {
27 | let config = await getConfig(fixture('config/json'))
28 | equal(config, { '*': 'my-tasks' })
29 | })
30 |
31 | test('should load EMS config file from .js file', async () => {
32 | let config = await getConfig(fixture('config/esm-in-js'))
33 | equal(config['*'](), 'my-tasks')
34 | })
35 |
36 | test('should load EMS config file from .mjs file', async () => {
37 | let config = await getConfig(fixture('config/mjs'))
38 | equal(config['*'](), 'my-tasks')
39 | })
40 |
41 | test('should load CJS config file from .cjs file', async () => {
42 | let config = await getConfig(fixture('config/cjs'))
43 | equal(config, { '*': 'my-tasks' })
44 | })
45 |
46 | test('should load CJS config file from absolute path', async () => {
47 | let config = await getConfig(process.cwd(), fixture('config/cjs/nano-staged.cjs'))
48 | equal(config, { '*': 'my-tasks' })
49 | })
50 |
51 | test('should load CJS config file from relative path', async () => {
52 | let config = await getConfig(
53 | process.cwd(),
54 | join('test', 'fixtures', 'config', 'cjs', 'nano-staged.cjs')
55 | )
56 | equal(config, { '*': 'my-tasks' })
57 | })
58 |
59 | test('should load no extension config file', async () => {
60 | let config = await getConfig(fixture('config/no-ext'))
61 | equal(config, { '*': 'my-tasks' })
62 | })
63 |
64 | test('should return "undefined" when error', async () => {
65 | const { getConfig } = await esmock('../lib/config.js', {
66 | fs: {
67 | promises: {
68 | readFile: async () => Promise.reject(),
69 | },
70 | },
71 | })
72 |
73 | is(await getConfig(), undefined)
74 | })
75 |
76 | test('config undefined', async () => {
77 | is(validConfig(), false)
78 | })
79 |
80 | test('config empty', async () => {
81 | is(validConfig({}), false)
82 | })
83 |
84 | test('config single cmd', async () => {
85 | is(
86 | validConfig({
87 | '*': 'my-tasks',
88 | }),
89 | true
90 | )
91 | })
92 |
93 | test('config array cmds', async () => {
94 | is(
95 | validConfig({
96 | '*': ['my-tasks'],
97 | }),
98 | true
99 | )
100 | })
101 |
102 | test('config glob empty', async () => {
103 | is(
104 | validConfig({
105 | '': ['my-tasks'],
106 | }),
107 | false
108 | )
109 | })
110 |
111 | test('config single cmd empty', async () => {
112 | is(
113 | validConfig({
114 | '*': '',
115 | }),
116 | false
117 | )
118 | })
119 |
120 | test('config array cmds empty', async () => {
121 | is(
122 | validConfig({
123 | '*': ['', ''],
124 | }),
125 | false
126 | )
127 | })
128 |
129 | test('config cmd not string', async () => {
130 | is(
131 | validConfig({
132 | '': 1,
133 | }),
134 | false
135 | )
136 | })
137 |
138 | test('config glob and cmd empty', async () => {
139 | is(
140 | validConfig({
141 | '': '',
142 | }),
143 | false
144 | )
145 | })
146 |
147 | test('config one task invalid', async () => {
148 | is(
149 | validConfig({
150 | '*': '',
151 | '*.js': 'my-task',
152 | }),
153 | false
154 | )
155 | })
156 |
157 | test.run()
158 |
--------------------------------------------------------------------------------
/test/errors.test.js:
--------------------------------------------------------------------------------
1 | import { is } from 'uvu/assert'
2 | import { test } from 'uvu'
3 |
4 | import { NanoStagedError, TaskRunnerError } from '../lib/errors.js'
5 | import { createStdout } from './utils/index.js'
6 |
7 | let stdout = createStdout()
8 |
9 | test.before.each(() => {
10 | stdout.out = ''
11 | })
12 |
13 | test('has mark', () => {
14 | let err = new NanoStagedError('noConfig')
15 | is(err.name, 'NanoStagedError')
16 | })
17 |
18 | test('has message', () => {
19 | let err = new NanoStagedError('noConfig')
20 | is(err.message, 'Create Nano Staged config.')
21 | })
22 |
23 | test('has type', () => {
24 | let err = new NanoStagedError('noConfig')
25 | is(err.type, 'noConfig')
26 | })
27 |
28 | test('has error for unknown option', () => {
29 | let err = new NanoStagedError('noFileConfig', 'no-config.js')
30 | is(err.message, 'Nano Staged config file *no-config.js* is not found.')
31 | })
32 |
33 | test('has error for task', () => {
34 | let err = new TaskRunnerError('task error')
35 | is(err.message, 'task error')
36 | })
37 |
38 | test.run()
39 |
--------------------------------------------------------------------------------
/test/fixtures/config/cjs-in-js/nano-staged.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | '*': 'my-tasks',
3 | }
4 |
--------------------------------------------------------------------------------
/test/fixtures/config/cjs/nano-staged.cjs:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | '*': 'my-tasks',
3 | }
4 |
--------------------------------------------------------------------------------
/test/fixtures/config/esm-in-js/nano-staged.js:
--------------------------------------------------------------------------------
1 | export default {
2 | '*': () => {
3 | return 'my-tasks'
4 | },
5 | }
6 |
--------------------------------------------------------------------------------
/test/fixtures/config/json/nano-staged.json:
--------------------------------------------------------------------------------
1 | {
2 | "*": "my-tasks"
3 | }
4 |
--------------------------------------------------------------------------------
/test/fixtures/config/mjs/nano-staged.mjs:
--------------------------------------------------------------------------------
1 | export default () => {
2 | return 'my-tasks'
3 | }
4 |
--------------------------------------------------------------------------------
/test/fixtures/config/no-ext/.nanostagedrc:
--------------------------------------------------------------------------------
1 | {
2 | "*": "my-tasks"
3 | }
4 |
--------------------------------------------------------------------------------
/test/fixtures/config/test-project/dir/index.js:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/usmanyunusov/nano-staged/f5cb38904d1e25323ea4be6726f440f47684244a/test/fixtures/config/test-project/dir/index.js
--------------------------------------------------------------------------------
/test/fixtures/config/test-project/index.js:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/usmanyunusov/nano-staged/f5cb38904d1e25323ea4be6726f440f47684244a/test/fixtures/config/test-project/index.js
--------------------------------------------------------------------------------
/test/fixtures/config/test-project/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "nano-staged": {
3 | "*": "my-tasks"
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/test/fixtures/simple/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules/
--------------------------------------------------------------------------------
/test/git-workflow.test.js:
--------------------------------------------------------------------------------
1 | import { join, resolve } from 'path'
2 | import { is } from 'uvu/assert'
3 | import esmock from 'esmock'
4 | import { test } from 'uvu'
5 | import fs from 'fs-extra'
6 |
7 | import { writeFile, makeDir, appendFile, fixture, removeFile } from './utils/index.js'
8 | import { createGitWorkflow } from '../lib/git-workflow.js'
9 | import { createGit } from '../lib/git.js'
10 |
11 | let cwd = fixture('simple/git-workflow-test')
12 |
13 | async function execGit(args) {
14 | let git = createGit(cwd)
15 | return await git.exec(args, { cwd })
16 | }
17 |
18 | test.before.each(async () => {
19 | await makeDir(cwd)
20 | await execGit(['init'])
21 | await execGit(['config', 'user.name', '"test"'])
22 | await execGit(['config', 'user.email', '"test@test.com"'])
23 | await appendFile('README.md', '# Test\n', cwd)
24 | await execGit(['add', 'README.md'])
25 | await execGit(['commit', '-m initial commit'])
26 | })
27 |
28 | test.after.each(async () => {
29 | await removeFile(cwd)
30 | })
31 |
32 | test('should patch file for original state', async () => {
33 | let gitWorkflow = createGitWorkflow({
34 | dotPath: resolve(cwd, './.git'),
35 | allowEmpty: false,
36 | rootPath: cwd,
37 | })
38 | await gitWorkflow.backupOriginalState()
39 |
40 | is(fs.existsSync(resolve(cwd, './.git/nano-staged.patch')), true)
41 | })
42 |
43 | test('should backup original state handle errors', async () => {
44 | let gitWorkflow = createGitWorkflow({
45 | dotPath: resolve(cwd, './.test'),
46 | allowEmpty: false,
47 | rootPath: cwd,
48 | })
49 |
50 | try {
51 | await gitWorkflow.backupOriginalState()
52 | } catch (error) {
53 | is(!!error, true)
54 | }
55 | })
56 |
57 | test('should patch file for unstaged files', async () => {
58 | let gitWorkflow = createGitWorkflow({
59 | dotPath: resolve(cwd, './.git'),
60 | allowEmpty: false,
61 | rootPath: cwd,
62 | })
63 |
64 | await writeFile('README.md', '# Test\n# Test', cwd)
65 | await gitWorkflow.backupUnstagedFiles([join(cwd, 'README.md')])
66 |
67 | is(fs.existsSync(resolve(cwd, './.git/nano-staged_partial.patch')), true)
68 | })
69 |
70 | test('should backup unstaged files handle errors', async () => {
71 | let gitWorkflow = createGitWorkflow({
72 | dotPath: resolve(cwd, './.git'),
73 | allowEmpty: false,
74 | rootPath: cwd,
75 | })
76 |
77 | try {
78 | await gitWorkflow.backupUnstagedFiles([`random_file-${Date.now().toString()}`])
79 | } catch (error) {
80 | is(!!error, true)
81 | }
82 | })
83 |
84 | test('should apply changes files', async () => {
85 | let gitWorkflow = createGitWorkflow({
86 | dotPath: resolve(cwd, './.git'),
87 | allowEmpty: false,
88 | rootPath: cwd,
89 | })
90 |
91 | await writeFile('README.md', '# Test\n# Test', cwd)
92 | await gitWorkflow.applyModifications([join(cwd, 'README.md')])
93 |
94 | is(await execGit(['diff', '--name-only', '--staged']), 'README.md\n')
95 | })
96 |
97 | test('should apply empty files', async () => {
98 | let gitWorkflow = createGitWorkflow({
99 | dotPath: resolve(cwd, './.git'),
100 | allowEmpty: false,
101 | rootPath: cwd,
102 | })
103 |
104 | try {
105 | await gitWorkflow.applyModifications([join(cwd, 'README.md')])
106 | } catch (error) {
107 | is(error, 'Prevented an empty git commit!')
108 | }
109 | })
110 |
111 | test('should apply changes files handle errors', async () => {
112 | let gitWorkflow = createGitWorkflow({
113 | dotPath: resolve(cwd, './.git'),
114 | allowEmpty: false,
115 | rootPath: cwd,
116 | })
117 |
118 | try {
119 | await writeFile('README.md', '# Test\n# Test', cwd)
120 | await gitWorkflow.applyModifications([join(cwd, 'error.md')])
121 | } catch (error) {
122 | is(!!error, true)
123 | }
124 | })
125 |
126 | test('should restore unstaged files handle errors', async () => {
127 | let gitWorkflow = createGitWorkflow({
128 | dotPath: resolve(cwd, './.git'),
129 | allowEmpty: false,
130 | rootPath: cwd,
131 | })
132 |
133 | try {
134 | await gitWorkflow.restoreUnstagedFiles([`random_file-${Date.now().toString()}`])
135 | } catch (error) {
136 | is(error, 'Merge conflict!!! Unstaged changes not restored.')
137 | }
138 | })
139 |
140 | test('should restore unstaged files', async () => {
141 | let gitWorkflow = createGitWorkflow({
142 | dotPath: resolve(cwd, './.git'),
143 | allowEmpty: false,
144 | rootPath: cwd,
145 | })
146 |
147 | await writeFile('README.md', '# Test\n# Test', cwd)
148 | await gitWorkflow.backupOriginalState()
149 | await gitWorkflow.restoreOriginalState()
150 |
151 | is(await execGit(['diff', '--name-only', 'HEAD']), 'README.md\n')
152 | })
153 |
154 | test('should restore unstaged files handle error', async () => {
155 | const { createGitWorkflow } = await esmock('../lib/git-workflow.js', {
156 | '../lib/git.js': {
157 | createGit: () => ({
158 | checkout: async () => Promise.reject('Checkout error'),
159 | }),
160 | },
161 | })
162 |
163 | let gitWorkflow = createGitWorkflow({
164 | dotPath: resolve(cwd, './.git'),
165 | allowEmpty: false,
166 | rootPath: cwd,
167 | })
168 |
169 | await gitWorkflow.restoreOriginalState().catch((error) => {
170 | is(error, 'Checkout error')
171 | })
172 | })
173 |
174 | test('should remove original and unstaged patch', async () => {
175 | let gitWorkflow = createGitWorkflow({
176 | dotPath: resolve(cwd, './.git'),
177 | allowEmpty: false,
178 | rootPath: cwd,
179 | })
180 |
181 | await writeFile('README.md', '# Test\n# Test', cwd)
182 | await gitWorkflow.backupOriginalState()
183 | await gitWorkflow.backupUnstagedFiles([[join(cwd, 'README.md')]])
184 | await gitWorkflow.cleanUp()
185 |
186 | is(fs.existsSync(resolve(cwd, './.git/nano-staged_partial.patch')), false)
187 | is(fs.existsSync(resolve(cwd, './.git/nano-staged.patch')), false)
188 | })
189 |
190 | test('should clean up handle errors', async () => {
191 | let gitWorkflow = createGitWorkflow({
192 | dotPath: resolve(cwd, './.git'),
193 | allowEmpty: false,
194 | rootPath: cwd,
195 | })
196 |
197 | gitWorkflow.hasPatch = () => {
198 | throw new Error('Clean up error')
199 | }
200 |
201 | try {
202 | await gitWorkflow.cleanUp()
203 | } catch (error) {
204 | is(error.message, 'Clean up error')
205 | }
206 | })
207 |
208 | test('hasPatch return false when no patch file', async () => {
209 | let gitWorkflow = createGitWorkflow({
210 | dotPath: resolve(cwd, './.git'),
211 | allowEmpty: false,
212 | rootPath: cwd,
213 | })
214 |
215 | is(gitWorkflow.hasPatch('./test.patch'), false)
216 | })
217 |
218 | test.run()
219 |
--------------------------------------------------------------------------------
/test/git.test.js:
--------------------------------------------------------------------------------
1 | import { equal, is } from 'uvu/assert'
2 | import { join, resolve } from 'path'
3 | import { test } from 'uvu'
4 | import fs from 'fs-extra'
5 |
6 | import { writeFile, makeDir, appendFile, fixture, removeFile } from './utils/index.js'
7 | import { createGit } from '../lib/git.js'
8 |
9 | let cwd = fixture('simple/git-test')
10 | let patchPath = join(cwd, 'nano-staged.patch')
11 |
12 | async function execGit(args) {
13 | let git = createGit(cwd)
14 | await git.exec(args, { cwd })
15 | }
16 |
17 | test.before.each(async () => {
18 | await makeDir(cwd)
19 | await execGit(['init'])
20 | await execGit(['config', 'user.name', '"test"'])
21 | await execGit(['config', 'user.email', '"test@test.com"'])
22 | await appendFile('README.md', '# Test\n', cwd)
23 | await execGit(['add', 'README.md'])
24 | await execGit(['commit', '-m initial commit'])
25 | })
26 |
27 | test.after.each(async () => {
28 | await removeFile(cwd)
29 | })
30 |
31 | test('should return "null" when git dir is not found', async () => {
32 | let git = createGit(cwd)
33 | git.exec = async () => null
34 |
35 | let git_paths = await git.getGitPaths()
36 |
37 | is(git_paths.root, null)
38 | is(git_paths.dot, null)
39 | })
40 |
41 | test('should return "null" when run error', async () => {
42 | let git = createGit(cwd)
43 | git.exec = async () => Promise.reject()
44 |
45 | let git_paths = await git.getGitPaths()
46 |
47 | is(git_paths.root, null)
48 | is(git_paths.dot, null)
49 | })
50 |
51 | test('should return path when git dir is found', async () => {
52 | let git = createGit(cwd)
53 | let git_paths = await git.getGitPaths()
54 |
55 | is(git_paths.root, fixture('simple/git-test'))
56 | is(
57 | git_paths.dot,
58 | process.platform === 'win32'
59 | ? fixture('simple/git-test') + '\\.git'
60 | : fixture('simple/git-test') + '/.git'
61 | )
62 | })
63 |
64 | test('should create patch to file', async () => {
65 | let git = createGit(cwd)
66 |
67 | await writeFile('README.md', '# Test\n## Test', cwd)
68 | await git.diff(patchPath)
69 |
70 | let patch = await fs.readFile(patchPath)
71 | is(
72 | patch.toString(),
73 | 'diff --git a/README.md b/README.md\n' +
74 | 'index 8ae0569..a07c500 100644\n' +
75 | '--- a/README.md\n' +
76 | '+++ b/README.md\n' +
77 | '@@ -1,0 +2 @@\n' +
78 | '+## Test\n' +
79 | '\\ No newline at end of file\n'
80 | )
81 | })
82 |
83 | test('should create patch to files', async () => {
84 | let git = createGit(cwd)
85 |
86 | await appendFile('a.js', 'let a = {};', cwd)
87 | await git.add(join(cwd, 'a.js'))
88 | await removeFile(join(cwd, 'a.js'))
89 | await git.diff(patchPath, [join(cwd, 'a.js')])
90 |
91 | let patch = await fs.readFile(patchPath)
92 | is(
93 | patch.toString(),
94 | 'diff --git a/a.js b/a.js\n' +
95 | 'deleted file mode 100644\n' +
96 | 'index 36b56ef..0000000\n' +
97 | '--- a/a.js\n' +
98 | '+++ /dev/null\n' +
99 | '@@ -1 +0,0 @@\n' +
100 | '-let a = {};\n' +
101 | '\\ No newline at end of file\n'
102 | )
103 | })
104 |
105 | test('should checkout to files', async () => {
106 | let git = createGit(cwd)
107 |
108 | await appendFile('a.js', 'let a = {};', cwd)
109 | await git.add('.')
110 | await writeFile('a.js', 'let b = {};', cwd)
111 | await git.checkout(join(cwd, 'a.js'))
112 |
113 | equal(await git.status(), [{ x: 65, y: 32, path: 'a.js', rename: undefined }])
114 | })
115 |
116 | test('should apply to patch file', async () => {
117 | let git = createGit(cwd)
118 |
119 | await writeFile('README.md', '# Test\n## Test', cwd)
120 | await git.diff(patchPath)
121 | await git.apply(patchPath)
122 |
123 | is((await fs.stat(patchPath)).isFile(), true)
124 | })
125 |
126 | test('should error when not apply patch file', async () => {
127 | let git = createGit(cwd)
128 |
129 | try {
130 | await git.apply('test.patch', true)
131 | } catch (error) {
132 | is(error, "error: can't open patch 'test.patch': No such file or directory\n")
133 | }
134 | })
135 |
136 | test('should add to files', async () => {
137 | let git = createGit(cwd)
138 |
139 | await appendFile('a.js', 'let a = {};', cwd)
140 | await git.add(['.'])
141 |
142 | equal(await git.status(), [{ x: 65, y: 32, path: 'a.js', rename: undefined }])
143 | })
144 |
145 | test('should parse status correctly', async () => {
146 | let git = createGit(cwd)
147 |
148 | await appendFile('a.js', 'let a = {};', cwd)
149 | await appendFile('b.js', 'let a = {};', cwd)
150 | await git.add(['b.js'])
151 |
152 | equal(await git.status(), [
153 | { x: 65, y: 32, path: 'b.js', rename: undefined },
154 | { x: 63, y: 63, path: 'a.js', rename: undefined },
155 | ])
156 |
157 | git.exec = async () => ''
158 | equal(await git.status(), [])
159 |
160 | git.exec = async () => ' '
161 | equal(await git.status(), [])
162 |
163 | git.exec = async () => 'M rename.js'
164 | equal(await git.status(), [])
165 |
166 | git.exec = async () => 'RM rename.js'
167 | equal(await git.status(), [])
168 |
169 | git.exec = async () => ' '
170 | equal(await git.status(), [])
171 |
172 | git.exec = async () => {
173 | throw new Error('fatal: not a git repository (or any of the parent directories): .git')
174 | }
175 | equal(await git.status(), [])
176 | })
177 |
178 | test('should diff to file correctly', async () => {
179 | let git = createGit(cwd)
180 |
181 | is(await git.diffFileName(), '')
182 |
183 | await writeFile('README.md', '# Test\n## Test', cwd)
184 | await execGit(['add', 'README.md'])
185 | await execGit(['commit', '-m change README.md'])
186 |
187 | is(await git.diffFileName('HEAD', 'HEAD^1'), 'README.md\x00')
188 |
189 | git.exec = async () => {
190 | throw new Error('Error')
191 | }
192 |
193 | is(await git.diffFileName(), '')
194 | })
195 |
196 | test('should get diff file correctly', async () => {
197 | let git = createGit(cwd)
198 |
199 | git.diffFileName = async () => 'add.js\x00'
200 | equal(await git.changedFiles(), { working: ['add.js'], deleted: [], changed: ['add.js'] })
201 |
202 | git.diffFileName = async () => ''
203 | equal(await git.changedFiles(), { working: [], deleted: [], changed: [] })
204 | })
205 |
206 | test('should get staged files correctly', async () => {
207 | let git = createGit(cwd)
208 |
209 | git.exec = async () =>
210 | '?? new.js\x00A stage.js\x00MM mod.js\x00AM test/add.js\x00RM rename.js\x00origin.js\x00CM' +
211 | ' test/copy.js\x00test/base.js\x00MD remove.js\x00D delete.js\x00'
212 |
213 | equal(await git.stagedFiles(), {
214 | working: ['stage.js', 'mod.js', 'test/add.js', 'rename.js', 'test/copy.js', 'remove.js'],
215 | deleted: ['remove.js'],
216 | changed: ['mod.js', 'test/add.js', 'rename.js', 'test/copy.js'],
217 | })
218 | })
219 |
220 | test('should get unstaged files correctly', async () => {
221 | let git = createGit(cwd)
222 |
223 | git.exec = async () =>
224 | 'A add.js\x00AD add_remove.js\x00MM mod.js\x00?? test/add.js\x00RM rename.js\x00origin.js\x00CM' +
225 | ' test/copy.js\x00test/base.js\x00MD remove.js\x00D delete.js\x00'
226 |
227 | equal(await git.unstagedFiles(), {
228 | working: ['mod.js', 'test/add.js', 'rename.js', 'test/copy.js'],
229 | deleted: [],
230 | changed: ['mod.js', 'test/add.js', 'rename.js', 'test/copy.js'],
231 | })
232 | })
233 |
234 | test('should handle git worktrees', async () => {
235 | let git = createGit(cwd)
236 | let work_tree_dir = resolve(cwd, 'worktree')
237 |
238 | await execGit(['branch', 'test'])
239 | await execGit(['worktree', 'add', work_tree_dir, 'test'])
240 |
241 | equal(await git.getGitPaths({ cwd: work_tree_dir }), {
242 | root: fixture('simple/git-test/worktree'),
243 | dot: fixture('simple/git-test/.git/worktrees/worktree'),
244 | })
245 | })
246 |
247 | test.run()
248 |
--------------------------------------------------------------------------------
/test/glob-to-regex.test.js:
--------------------------------------------------------------------------------
1 | import { is } from 'uvu/assert'
2 | import { test } from 'uvu'
3 |
4 | import { globToRegex } from '../lib/glob-to-regex.js'
5 |
6 | function match(glob, path, opts = {}) {
7 | let regex = globToRegex(glob, opts)
8 | return regex.regex.test(path)
9 | }
10 |
11 | test('Standard * matching', () => {
12 | is(match('*', 'foo'), true)
13 | is(match('f*', 'foo'), true)
14 | is(match('*o', 'foo'), true)
15 | is(match('f*uck', 'firetruck'), true)
16 | is(match('uc', 'firetruck'), false)
17 | is(match('f*uck', 'fuck'), true)
18 | })
19 |
20 | test('advance * matching', () => {
21 | is(match('*.min.js', 'http://example.com/jquery.min.js', { globstar: false }), true)
22 | is(match('*.min.*', 'http://example.com/jquery.min.js', { globstar: false }), true)
23 | is(match('*/js/*.js', 'http://example.com/js/jquery.min.js', { globstar: false }), true)
24 | is(match('*.min.js', 'http://example.com/jquery.min.js'), true)
25 | is(match('*.min.*', 'http://example.com/jquery.min.js'), true)
26 | is(match('*/js/*.js', 'http://example.com/js/jquery.min.js'), true)
27 |
28 | const str = '\\/$^+?.()=!|{},[].*'
29 | is(match(str, str), true)
30 |
31 | is(match('.min.', 'http://example.com/jquery.min.js'), false)
32 | is(match('*.min.*', 'http://example.com/jquery.min.js'), true)
33 | is(match('http:', 'http://example.com/jquery.min.js'), false)
34 | is(match('http:*', 'http://example.com/jquery.min.js'), true)
35 | is(match('min.js', 'http://example.com/jquery.min.js'), false)
36 | is(match('*.min.js', 'http://example.com/jquery.min.js'), true)
37 | is(match('/js*jq*.js', 'http://example.com/js/jquery.min.js'), false)
38 | })
39 |
40 | test('? match one character, no more and no less', () => {
41 | is(match('f?o', 'foo', { extended: true }), true)
42 | is(match('f?o', 'fooo', { extended: true }), false)
43 | is(match('f?oo', 'foo', { extended: true }), false)
44 |
45 | const tester = (globstar) => {
46 | is(match('f?o', 'foo', { extended: true, globstar, flags: 'g' }), true)
47 | is(match('f?o', 'fooo', { extended: true, globstar, flags: 'g' }), true)
48 | is(match('f?o?', 'fooo', { extended: true, globstar, flags: 'g' }), true)
49 |
50 | is(match('?fo', 'fooo', { extended: true, globstar, flags: 'g' }), false)
51 | is(match('f?oo', 'foo', { extended: true, globstar, flags: 'g' }), false)
52 | is(match('foo?', 'foo', { extended: true, globstar, flags: 'g' }), false)
53 | }
54 |
55 | tester(true)
56 | tester(false)
57 | })
58 |
59 | test('[] match a character range', () => {
60 | is(match('fo[oz]', 'foo', { extended: true }), true)
61 | is(match('fo[oz]', 'foz', { extended: true }), true)
62 | is(match('fo[oz]', 'fog', { extended: true }), false)
63 | is(match('fo[a-z]', 'fob', { extended: true }), true)
64 | is(match('fo[a-d]', 'fot', { extended: true }), false)
65 | is(match('fo[!tz]', 'fot', { extended: true }), false)
66 | is(match('fo[!tz]', 'fob', { extended: true }), true)
67 |
68 | const tester = (globstar) => {
69 | is(match('fo[oz]', 'foo', { extended: true, globstar, flags: 'g' }), true)
70 | is(match('fo[oz]', 'foz', { extended: true, globstar, flags: 'g' }), true)
71 | is(match('fo[oz]', 'fog', { extended: true, globstar, flags: 'g' }), false)
72 | }
73 |
74 | tester(true)
75 | tester(false)
76 | })
77 |
78 | test('[] extended character ranges', () => {
79 | is(match('[[:alnum:]]/bar.txt', 'a/bar.txt', { extended: true }), true)
80 | is(match('@([[:alnum:]abc]|11)/bar.txt', '11/bar.txt', { extended: true }), true)
81 | is(match('@([[:alnum:]abc]|11)/bar.txt', 'a/bar.txt', { extended: true }), true)
82 | is(match('@([[:alnum:]abc]|11)/bar.txt', 'b/bar.txt', { extended: true }), true)
83 | is(match('@([[:alnum:]abc]|11)/bar.txt', 'c/bar.txt', { extended: true }), true)
84 | is(match('@([[:alnum:]abc]|11)/bar.txt', 'abc/bar.txt', { extended: true }), false)
85 | is(match('@([[:alnum:]abc]|11)/bar.txt', '3/bar.txt', { extended: true }), true)
86 | is(match('[[:digit:]]/bar.txt', '1/bar.txt', { extended: true }), true)
87 | is(match('[[:digit:]b]/bar.txt', 'b/bar.txt', { extended: true }), true)
88 | is(match('[![:digit:]b]/bar.txt', 'a/bar.txt', { extended: true }), true)
89 | is(match('[[:alnum:]]/bar.txt', '!/bar.txt', { extended: true }), false)
90 | is(match('[[:digit:]]/bar.txt', 'a/bar.txt', { extended: true }), false)
91 | is(match('[[:space:]b]/bar.txt', 'a/bar.txt', { extended: true }), false)
92 | })
93 |
94 | test('{} match a choice of different substrings', () => {
95 | is(match('foo{bar,baaz}', 'foobaaz', { extended: true }), true)
96 | is(match('foo{bar,baaz}', 'foobar', { extended: true }), true)
97 | is(match('foo{bar,baaz}', 'foobuzz', { extended: true }), false)
98 | is(match('foo{bar,b*z}', 'foobuzz', { extended: true }), true)
99 |
100 | const tester = (globstar) => {
101 | is(match('foo{bar,baaz}', 'foobaaz', { extended: true, globstar, flag: 'g' }), true)
102 | is(match('foo{bar,baaz}', 'foobar', { extended: true, globstar, flag: 'g' }), true)
103 | is(match('foo{bar,baaz}', 'foobuzz', { extended: true, globstar, flag: 'g' }), false)
104 | is(match('foo{bar,b*z}', 'foobuzz', { extended: true, globstar, flag: 'g' }), true)
105 | }
106 |
107 | tester(true)
108 | tester(false)
109 | })
110 |
111 | test('complex extended matches', () => {
112 | is(
113 | match('http://?o[oz].b*z.com/{*.js,*.html}', 'http://foo.baaz.com/jquery.min.js', {
114 | extended: true,
115 | }),
116 | true
117 | )
118 | is(
119 | match('http://?o[oz].b*z.com/{*.js,*.html}', 'http://moz.buzz.com/index.html', {
120 | extended: true,
121 | }),
122 | true
123 | )
124 | is(
125 | match('http://?o[oz].b*z.com/{*.js,*.html}', 'http://moz.buzz.com/index.htm', {
126 | extended: true,
127 | }),
128 | false
129 | )
130 | is(
131 | match('http://?o[oz].b*z.com/{*.js,*.html}', 'http://moz.bar.com/index.html', {
132 | extended: true,
133 | }),
134 | false
135 | )
136 | is(
137 | match('http://?o[oz].b*z.com/{*.js,*.html}', 'http://flozz.buzz.com/index.html', {
138 | extended: true,
139 | }),
140 | false
141 | )
142 |
143 | const tester = (globstar) => {
144 | is(
145 | match('http://?o[oz].b*z.com/{*.js,*.html}', 'http://foo.baaz.com/jquery.min.js', {
146 | extended: true,
147 | globstar,
148 | flags: 'g',
149 | }),
150 | true
151 | )
152 | is(
153 | match('http://?o[oz].b*z.com/{*.js,*.html}', 'http://moz.buzz.com/index.html', {
154 | extended: true,
155 | globstar,
156 | flags: 'g',
157 | }),
158 | true
159 | )
160 | is(
161 | match('http://?o[oz].b*z.com/{*.js,*.html}', 'http://moz.buzz.com/index.htm', {
162 | extended: true,
163 | globstar,
164 | flags: 'g',
165 | }),
166 | false
167 | )
168 | is(
169 | match('http://?o[oz].b*z.com/{*.js,*.html}', 'http://moz.bar.com/index.html', {
170 | extended: true,
171 | globstar,
172 | flags: 'g',
173 | }),
174 | false
175 | )
176 | is(
177 | match('http://?o[oz].b*z.com/{*.js,*.html}', 'http://flozz.buzz.com/index.html', {
178 | extended: true,
179 | globstar,
180 | flags: 'g',
181 | }),
182 | false
183 | )
184 | }
185 |
186 | tester(true)
187 | tester(false)
188 | })
189 |
190 | test('standard globstar', () => {
191 | const tester = (globstar) => {
192 | is(
193 | match('http://foo.com/**/{*.js,*.html}', 'http://foo.com/bar/jquery.min.js', {
194 | extended: true,
195 | globstar,
196 | flags: 'g',
197 | }),
198 | true
199 | )
200 | is(
201 | match('http://foo.com/**/{*.js,*.html}', 'http://foo.com/bar/baz/jquery.min.js', {
202 | extended: true,
203 | globstar,
204 | flags: 'g',
205 | }),
206 | true
207 | )
208 | is(
209 | match('http://foo.com/**', 'http://foo.com/bar/baz/jquery.min.js', {
210 | extended: true,
211 | globstar,
212 | flags: 'g',
213 | }),
214 | true
215 | )
216 | }
217 |
218 | tester(true)
219 | tester(false)
220 | })
221 |
222 | test('remaining chars should match themself', () => {
223 | const tester = (globstar) => {
224 | const testExtStr = '\\/$^+.()=!|,.*'
225 | is(match(testExtStr, testExtStr, { extended: true }), true)
226 | is(match(testExtStr, testExtStr, { extended: true, globstar, flags: 'g' }), true)
227 | }
228 |
229 | tester(true)
230 | tester(false)
231 | })
232 |
233 | test('globstar advance testing', (t) => {
234 | is(match('/foo/*', '/foo/bar.txt', { globstar: true }), true)
235 | is(match('/foo/**', '/foo/bar.txt', { globstar: true }), true)
236 | is(match('/foo/**', '/foo/bar/baz.txt', { globstar: true }), true)
237 | is(match('/foo/**', '/foo/bar/baz.txt', { globstar: true }), true)
238 | is(match('/foo/*/*.txt', '/foo/bar/baz.txt', { globstar: true }), true)
239 | is(match('/foo/**/*.txt', '/foo/bar/baz.txt', { globstar: true }), true)
240 | is(match('/foo/**/*.txt', '/foo/bar/baz/qux.txt', { globstar: true }), true)
241 | is(match('/foo/**/bar.txt', '/foo/bar.txt', { globstar: true }), true)
242 | is(match('/foo/**/**/bar.txt', '/foo/bar.txt', { globstar: true }), true)
243 | is(match('/foo/**/*/baz.txt', '/foo/bar/baz.txt', { globstar: true }), true)
244 | is(match('/foo/**/*.txt', '/foo/bar.txt', { globstar: true }), true)
245 | is(match('/foo/**/**/*.txt', '/foo/bar.txt', { globstar: true }), true)
246 | is(match('/foo/**/*/*.txt', '/foo/bar/baz.txt', { globstar: true }), true)
247 | is(match('**/*.txt', '/foo/bar/baz/qux.txt', { globstar: true }), true)
248 | is(match('**/foo.txt', 'foo.txt', { globstar: true }), true)
249 | is(match('**/*.txt', 'foo.txt', { globstar: true }), true)
250 | is(match('/foo/*', '/foo/bar/baz.txt', { globstar: true }), false)
251 | is(match('/foo/*.txt', '/foo/bar/baz.txt', { globstar: true }), false)
252 | is(match('/foo/*/*.txt', '/foo/bar/baz/qux.txt', { globstar: true }), false)
253 | is(match('/foo/*/bar.txt', '/foo/bar.txt', { globstar: true }), false)
254 | is(match('/foo/*/*/baz.txt', '/foo/bar/baz.txt', { globstar: true }), false)
255 | is(match('/foo/**.txt', '/foo/bar/baz/qux.txt', { globstar: true }), false)
256 | is(match('/foo/bar**/*.txt', '/foo/bar/baz/qux.txt', { globstar: true }), false)
257 | is(match('/foo/bar**', '/foo/bar/baz.txt', { globstar: true }), false)
258 | is(match('**/.txt', '/foo/bar/baz/qux.txt', { globstar: true }), false)
259 | is(match('*/*.txt', '/foo/bar/baz/qux.txt', { globstar: true }), false)
260 | is(match('*/*.txt', 'foo.txt', { globstar: true }), false)
261 | is(
262 | match('http://foo.com/*', 'http://foo.com/bar/baz/jquery.min.js', {
263 | extended: true,
264 | globstar: true,
265 | }),
266 | false
267 | )
268 | is(match('http://foo.com/*', 'http://foo.com/bar/baz/jquery.min.js', { globstar: true }), false)
269 | is(match('http://foo.com/*', 'http://foo.com/bar/baz/jquery.min.js', { globstar: false }), true)
270 | is(match('http://foo.com/**', 'http://foo.com/bar/baz/jquery.min.js', { globstar: true }), true)
271 | is(
272 | match('http://foo.com/*/*/jquery.min.js', 'http://foo.com/bar/baz/jquery.min.js', {
273 | globstar: true,
274 | }),
275 | true
276 | )
277 | is(
278 | match('http://foo.com/**/jquery.min.js', 'http://foo.com/bar/baz/jquery.min.js', {
279 | globstar: true,
280 | }),
281 | true
282 | )
283 | is(
284 | match('http://foo.com/*/*/jquery.min.js', 'http://foo.com/bar/baz/jquery.min.js', {
285 | globstar: false,
286 | }),
287 | true
288 | )
289 | is(
290 | match('http://foo.com/*/jquery.min.js', 'http://foo.com/bar/baz/jquery.min.js', {
291 | globstar: false,
292 | }),
293 | true
294 | )
295 | is(
296 | match('http://foo.com/*/jquery.min.js', 'http://foo.com/bar/baz/jquery.min.js', {
297 | globstar: true,
298 | }),
299 | false
300 | )
301 | })
302 |
303 | test('extended extglob ?', () => {
304 | is(match('(foo).txt', '(foo).txt', { extended: true }), true)
305 | is(match('?(foo).txt', 'foo.txt', { extended: true }), true)
306 | is(match('?(foo).txt', '.txt', { extended: true }), true)
307 | is(match('?(foo|bar)baz.txt', 'foobaz.txt', { extended: true }), true)
308 | is(match('?(ba[zr]|qux)baz.txt', 'bazbaz.txt', { extended: true }), true)
309 | is(match('?(ba[zr]|qux)baz.txt', 'barbaz.txt', { extended: true }), true)
310 | is(match('?(ba[zr]|qux)baz.txt', 'quxbaz.txt', { extended: true }), true)
311 | is(match('?(ba[!zr]|qux)baz.txt', 'batbaz.txt', { extended: true }), true)
312 | is(match('?(ba*|qux)baz.txt', 'batbaz.txt', { extended: true }), true)
313 | is(match('?(ba*|qux)baz.txt', 'batttbaz.txt', { extended: true }), true)
314 | is(match('?(ba*|qux)baz.txt', 'quxbaz.txt', { extended: true }), true)
315 | is(match('?(ba?(z|r)|qux)baz.txt', 'bazbaz.txt', { extended: true }), true)
316 | is(match('?(ba?(z|?(r))|qux)baz.txt', 'bazbaz.txt', { extended: true }), true)
317 | is(match('?(foo).txt', 'foo.txt', { extended: false }), false)
318 | is(match('?(foo|bar)baz.txt', 'foobarbaz.txt', { extended: true }), false)
319 | is(match('?(ba[zr]|qux)baz.txt', 'bazquxbaz.txt', { extended: true }), false)
320 | is(match('?(ba[!zr]|qux)baz.txt', 'bazbaz.txt', { extended: true }), false)
321 | })
322 |
323 | test('extended extglob *', () => {
324 | is(match('*(foo).txt', 'foo.txt', { extended: true }), true)
325 | is(match('*foo.txt', 'bofoo.txt', { extended: true }), true)
326 | is(match('*(foo).txt', 'foofoo.txt', { extended: true }), true)
327 | is(match('*(foo).txt', '.txt', { extended: true }), true)
328 | is(match('*(fooo).txt', '.txt', { extended: true }), true)
329 | is(match('*(fooo).txt', 'foo.txt', { extended: true }), false)
330 | is(match('*(foo|bar).txt', 'foobar.txt', { extended: true }), true)
331 | is(match('*(foo|bar).txt', 'barbar.txt', { extended: true }), true)
332 | is(match('*(foo|bar).txt', 'barfoobar.txt', { extended: true }), true)
333 | is(match('*(foo|bar).txt', '.txt', { extended: true }), true)
334 | is(match('*(foo|ba[rt]).txt', 'bat.txt', { extended: true }), true)
335 | is(match('*(foo|b*[rt]).txt', 'blat.txt', { extended: true }), true)
336 | is(match('*(foo|b*[rt]).txt', 'tlat.txt', { extended: true }), false)
337 | is(match('*(*).txt', 'whatever.txt', { extended: true, globstar: true }), true)
338 | is(
339 | match('*(foo|bar)/**/*.txt', 'foo/hello/world/bar.txt', { extended: true, globstar: true }),
340 | true
341 | )
342 | is(match('*(foo|bar)/**/*.txt', 'foo/world/bar.txt', { extended: true, globstar: true }), true)
343 | })
344 |
345 | test('extended extglob +', () => {
346 | is(match('+(foo).txt', 'foo.txt', { extended: true }), true)
347 | is(match('+foo.txt', '+foo.txt', { extended: true }), true)
348 | is(match('+(foo).txt', '.txt', { extended: true }), false)
349 | is(match('+(foo|bar).txt', 'foobar.txt', { extended: true }), true)
350 | })
351 |
352 | test('extended extglob @', () => {
353 | is(match('@(foo).txt', 'foo.txt', { extended: true }), true)
354 | is(match('@foo.txt', '@foo.txt', { extended: true }), true)
355 | is(match('@(foo|baz)bar.txt', 'foobar.txt', { extended: true }), true)
356 | is(match('@(foo|baz)bar.txt', 'foobazbar.txt', { extended: true }), false)
357 | is(match('@(foo|baz)bar.txt', 'foofoobar.txt', { extended: true }), false)
358 | is(match('@(foo|baz)bar.txt', 'toofoobar.txt', { extended: true }), false)
359 | })
360 |
361 | test('extended extglob !', () => {
362 | is(match('!(boo).txt', 'foo.txt', { extended: true }), true)
363 | is(match('!(foo|baz)bar.txt', 'buzbar.txt', { extended: true }), true)
364 | is(match('!bar.txt', '!bar.txt', { extended: true }), true)
365 | is(match('!({foo,bar})baz.txt', 'notbaz.txt', { extended: true }), true)
366 | is(match('!({foo,bar})baz.txt', 'foobaz.txt', { extended: true }), false)
367 | })
368 |
369 | test('stress testing', () => {
370 | is(match('**/*/?yfile.{md,js,txt}', 'foo/bar/baz/myfile.md', { extended: true }), true)
371 | is(match('**/*/?yfile.{md,js,txt}', 'foo/baz/myfile.md', { extended: true }), true)
372 | is(match('**/*/?yfile.{md,js,txt}', 'foo/baz/tyfile.js', { extended: true }), true)
373 | is(match('[[:digit:]_.]/file.js', '1/file.js', { extended: true }), true)
374 | is(match('[[:digit:]_.]/file.js', '2/file.js', { extended: true }), true)
375 | is(match('[[:digit:]_.]/file.js', '_/file.js', { extended: true }), true)
376 | is(match('[[:digit:]_.]/file.js', './file.js', { extended: true }), true)
377 | is(match('[[:digit:]_.]/file.js', 'z/file.js', { extended: true }), false)
378 | })
379 |
380 | test.run()
381 |
--------------------------------------------------------------------------------
/test/index.test.js:
--------------------------------------------------------------------------------
1 | import { is } from 'uvu/assert'
2 | import esmock from 'esmock'
3 | import { test } from 'uvu'
4 |
5 | import { createStdout } from './utils/index.js'
6 |
7 | let stdout = createStdout()
8 |
9 | test.before.each(() => {
10 | stdout.out = ''
11 | })
12 |
13 | test('should return when config undefined', async () => {
14 | const nanoStaged = await esmock('../lib/index.js', {
15 | '../lib/config.js': {
16 | getConfig: async () => undefined,
17 | },
18 | })
19 |
20 | try {
21 | await nanoStaged({ stream: stdout })
22 | } catch (error) {
23 | is(stdout.out, '\x1B[31m×\x1B[39m \x1B[31mCreate Nano Staged config.\x1B[39m\n')
24 | }
25 | })
26 |
27 | test('should return when config path error', async () => {
28 | const nanoStaged = await esmock('../lib/index.js', {
29 | '../lib/config.js': {
30 | getConfig: async () => undefined,
31 | },
32 | })
33 |
34 | try {
35 | await nanoStaged({ stream: stdout, config: 'config.json' })
36 | } catch (error) {
37 | is(
38 | stdout.out,
39 | '\x1B[31m×\x1B[39m \x1B[31mNano Staged config file \x1B[33mconfig.json\x1B[31m is not found.\x1B[39m\n'
40 | )
41 | }
42 | })
43 |
44 | test('should config invalid', async () => {
45 | const nanoStaged = await esmock('../lib/index.js', {
46 | '../lib/config.js': {
47 | getConfig: async () => true,
48 | },
49 | })
50 |
51 | try {
52 | await nanoStaged({ stream: stdout })
53 | } catch (error) {
54 | is(stdout.out, '\x1B[31m×\x1B[39m \x1B[31mNano Staged config invalid.\x1B[39m\n')
55 | }
56 | })
57 |
58 | test('should return when git not found', async () => {
59 | const nanoStaged = await esmock('../lib/index.js', {
60 | '../lib/config.js': {
61 | getConfig: async () => true,
62 | validConfig: async () => true,
63 | },
64 |
65 | '../lib/git.js': {
66 | createGit: () => ({
67 | getGitPaths: async () => ({ root: null, dot: null }),
68 | }),
69 | },
70 | })
71 |
72 | try {
73 | await nanoStaged({ stream: stdout })
74 | } catch (error) {
75 | is(error.message, 'Nano Staged didn’t find git directory.')
76 | }
77 | })
78 |
79 | test('should return when no files found for staged/unstaged/diff', async () => {
80 | const nanoStaged = await esmock('../lib/index.js', {
81 | '../lib/config.js': {
82 | getConfig: async () => true,
83 | validConfig: async () => true,
84 | },
85 |
86 | '../lib/git.js': {
87 | createGit: () => ({
88 | getGitPaths: async () => ({ root: 'dir', dot: 'dir/.git' }),
89 | unstagedFiles: async () => ({ working: [], deleted: [], changed: [] }),
90 | stagedFiles: async () => ({ working: [], deleted: [], changed: [] }),
91 | changedFiles: async () => ({ working: [], deleted: [], changed: [] }),
92 | }),
93 | },
94 | })
95 |
96 | try {
97 | await nanoStaged({ stream: stdout })
98 | } catch (error) {
99 | is(error.message, 'No staged files found.')
100 | stdout.out = ''
101 | }
102 |
103 | try {
104 | await nanoStaged({ stream: stdout, unstaged: true })
105 | } catch (error) {
106 | is(error.message, 'No unstaged files found.')
107 | stdout.out = ''
108 | }
109 |
110 | try {
111 | await nanoStaged({ stream: stdout, diff: ['1', '2'] })
112 | } catch (error) {
113 | is(error.message, 'No diff files found.')
114 | stdout.out = ''
115 | }
116 | })
117 |
118 | test('should staged runner', async () => {
119 | const nanoStaged = await esmock('../lib/index.js', {
120 | '../lib/config.js': {
121 | getConfig: async () => true,
122 | validConfig: async () => true,
123 | getGitPaths: async () => ({ root: 'dir', dot: 'dir/.git' }),
124 | },
125 |
126 | '../lib/git.js': {
127 | createGit: () => ({
128 | getGitPaths: async () => ({ root: 'dir', dot: 'dir/.git' }),
129 | stagedFiles: async () => ({ working: ['a.js'], deleted: [], changed: ['a.js'] }),
130 | }),
131 | },
132 |
133 | '../lib/runner.js': {
134 | createRunner: () => ({
135 | run: async () => stdout.write('staged'),
136 | }),
137 | },
138 | })
139 |
140 | await nanoStaged({ stream: stdout })
141 | is(stdout.out, 'staged')
142 | })
143 |
144 | test('should unstaged runner', async () => {
145 | const nanoStaged = await esmock('../lib/index.js', {
146 | '../lib/config.js': {
147 | getConfig: async () => true,
148 | validConfig: async () => true,
149 | },
150 |
151 | '../lib/git.js': {
152 | createGit: () => ({
153 | getGitPaths: async () => ({ root: 'dir', dot: 'dir/.git' }),
154 | unstagedFiles: async () => ({ working: ['a.js'], deleted: [], changed: ['a.js'] }),
155 | }),
156 | },
157 |
158 | '../lib/runner.js': {
159 | createRunner: () => ({
160 | run: async () => stdout.write('unstaged'),
161 | }),
162 | },
163 | })
164 |
165 | await nanoStaged({ stream: stdout, unstaged: true })
166 | is(stdout.out, 'unstaged')
167 | })
168 |
169 | test('should diff runner', async () => {
170 | const nanoStaged = await esmock('../lib/index.js', {
171 | '../lib/config.js': {
172 | getConfig: async () => true,
173 | validConfig: async () => true,
174 | },
175 |
176 | '../lib/git.js': {
177 | createGit: () => ({
178 | getGitPaths: async () => ({ root: 'dir', dot: 'dir/.git' }),
179 | changedFiles: async () => ({ working: ['a.js'], deleted: [], changed: ['a.js'] }),
180 | }),
181 | },
182 |
183 | '../lib/runner.js': {
184 | createRunner: () => ({
185 | run: async () => stdout.write('diff'),
186 | }),
187 | },
188 | })
189 |
190 | await nanoStaged({ stream: stdout, diff: [] })
191 | is(stdout.out, 'diff')
192 | })
193 |
194 | test('should runner run error', async () => {
195 | const nanoStaged = await esmock('../lib/index.js', {
196 | '../lib/config.js': {
197 | getConfig: async () => true,
198 | validConfig: async () => true,
199 | },
200 |
201 | '../lib/git.js': {
202 | createGit: () => ({
203 | getGitPaths: async () => ({ root: 'dir', dot: 'dir/.git' }),
204 | stagedFiles: async () => ({ working: ['a.js'], deleted: [], changed: ['a.js'] }),
205 | }),
206 | },
207 |
208 | '../lib/runner.js': {
209 | createRunner: () => ({
210 | run: async () => {
211 | let taskError = new Error('Task error')
212 | taskError.name = 'TaskError'
213 |
214 | throw taskError
215 | },
216 | }),
217 | },
218 | })
219 |
220 | try {
221 | await nanoStaged({ stream: stdout })
222 | } catch (error) {
223 | is(stdout.out, '\n\x1B[31mTask error\x1B[39m\n')
224 | }
225 | })
226 |
227 | test.run()
228 |
--------------------------------------------------------------------------------
/test/renderer.test.js:
--------------------------------------------------------------------------------
1 | import { is, equal } from 'uvu/assert'
2 | import { test } from 'uvu'
3 | import { delay } from 'nanodelay'
4 |
5 | import { createRenderer } from '../lib/renderer.js'
6 | import { createStdout } from './utils/index.js'
7 |
8 | let stdout = createStdout()
9 |
10 | test.before.each(() => {
11 | stdout.out = ''
12 | })
13 |
14 | test('should create is TTY renderer', async () => {
15 | const renderer = createRenderer(stdout)
16 | const task = {
17 | title: 'Test',
18 | tasks: [
19 | {
20 | title: 'Sub test',
21 | },
22 | ],
23 | }
24 |
25 | renderer.spin(task)
26 | task.state = 'run'
27 |
28 | renderer.spin(task)
29 | task.state = 'warn'
30 |
31 | renderer.spin(task)
32 | task.state = 'done'
33 |
34 | renderer.spin(task)
35 | task.state = 'fail'
36 |
37 | renderer.stop()
38 |
39 | is(
40 | stdout.out,
41 | '\x1B[90m*\x1B[39m Test\n' +
42 | ' \x1B[90m*\x1B[39m Sub test\x1B[1G\x1B[2K\x1B[1A\x1B[1G\x1B[2K\x1B[33m\\\x1B[39m Test\n' +
43 | ' \x1B[90m*\x1B[39m Sub test\n' +
44 | '\x1B[33m|\x1B[39m Test\n' +
45 | ' \x1B[90m*\x1B[39m Sub test\x1B[1G\x1B[2K\x1B[1A\x1B[1G\x1B[2K\x1B[1A\x1B[1G\x1B[2K\x1B[1A\x1B[1G\x1B[2K\x1B[33m↓\x1B[39m Test\n' +
46 | ' \x1B[90m*\x1B[39m Sub test\n' +
47 | '\x1B[33m↓\x1B[39m Test\n' +
48 | ' \x1B[90m*\x1B[39m Sub test\n' +
49 | '\x1B[33m↓\x1B[39m Test\n' +
50 | ' \x1B[90m*\x1B[39m Sub test\x1B[1G\x1B[2K\x1B[1A\x1B[1G\x1B[2K\x1B[1A\x1B[1G\x1B[2K\x1B[1A\x1B[1G\x1B[2K\x1B[1A\x1B[1G\x1B[2K\x1B[1A\x1B[1G\x1B[2K\x1B[32m√\x1B[39m Test\n' +
51 | '\x1B[32m√\x1B[39m Test\n' +
52 | '\x1B[32m√\x1B[39m Test\n' +
53 | '\x1B[32m√\x1B[39m Test\x1B[1G\x1B[2K\x1B[1A\x1B[1G\x1B[2K\x1B[1A\x1B[1G\x1B[2K\x1B[1A\x1B[1G\x1B[2K\x1B[31m×\x1B[39m Test\n' +
54 | ' \x1B[90m*\x1B[39m Sub test\n' +
55 | '\x1B[31m×\x1B[39m Test\n' +
56 | ' \x1B[90m*\x1B[39m Sub test\n' +
57 | '\x1B[31m×\x1B[39m Test\n' +
58 | ' \x1B[90m*\x1B[39m Sub test\n' +
59 | '\x1B[31m×\x1B[39m Test\n' +
60 | ' \x1B[90m*\x1B[39m Sub test\n' +
61 | '\x1B[?25h'
62 | )
63 | })
64 |
65 | test('should create is CI renderer', async () => {
66 | const renderer = createRenderer(stdout, { isTTY: false })
67 | const tasks = [
68 | {
69 | title: 'Test',
70 | state: 'done',
71 | },
72 | {
73 | title: 'Test 2',
74 | state: 'done',
75 | tasks: [
76 | {
77 | title: 'Sub test',
78 | state: 'done',
79 | tasks: [
80 | {
81 | title: 'Sub sub test',
82 | state: 'done',
83 | parent: {
84 | title: 'Sub test',
85 | },
86 | },
87 | ],
88 | },
89 | ],
90 | },
91 | ]
92 |
93 | for (const task of tasks) {
94 | renderer.spin(task)
95 | }
96 |
97 | renderer.stop()
98 |
99 | is(
100 | stdout.out,
101 | '\x1B[32m√\x1B[39m Test\n\x1B[32m√\x1B[39m Sub test\x1B[33m ≫ \x1B[39mSub sub test\n'
102 | )
103 | })
104 |
105 | test.run()
106 |
--------------------------------------------------------------------------------
/test/reporter.test.js:
--------------------------------------------------------------------------------
1 | import { is } from 'uvu/assert'
2 | import { test } from 'uvu'
3 |
4 | import { NanoStagedError, TaskRunnerError } from '../lib/errors.js'
5 | import { createReporter } from '../lib/reporter.js'
6 | import { createStdout } from './utils/index.js'
7 |
8 | let stdout = createStdout()
9 | let report = createReporter(stdout)
10 |
11 | test.before.each(() => {
12 | stdout.out = ''
13 | })
14 |
15 | test('should reported error correctly', () => {
16 | let err = new Error('Error')
17 |
18 | report.error(err)
19 | is(stdout.out, '\n\x1B[31mError\x1B[39m\n')
20 | })
21 |
22 | test('should reported TaskRunnerError correctly', () => {
23 | let err = new Error('TaskRunnerError')
24 | err.name = 'TaskRunnerError'
25 |
26 | report.error(err)
27 | is(stdout.out, '\n\x1B[31mTaskRunnerError\x1B[39m\n')
28 | })
29 |
30 | test('should reported NanoStagedError correctly', () => {
31 | report.error(new NanoStagedError('noFiles'))
32 | is(stdout.out, '\x1B[36m-\x1B[39m No undefined files found.\n')
33 |
34 | stdout.out = ''
35 | report.error(new TaskRunnerError('task error'))
36 | is(stdout.out, '\ntask error\n')
37 |
38 | stdout.out = ''
39 | report.error(new NanoStagedError('invalidConfig'))
40 | is(stdout.out, '\x1B[31m×\x1B[39m \x1B[31mNano Staged config invalid.\x1B[39m\n')
41 | })
42 |
43 | test.run()
44 |
--------------------------------------------------------------------------------
/test/runner.test.js:
--------------------------------------------------------------------------------
1 | import { is, equal } from 'uvu/assert'
2 | import esmock from 'esmock'
3 | import { test } from 'uvu'
4 |
5 | import { createStdout } from './utils/index.js'
6 |
7 | let stdout = createStdout()
8 |
9 | test.before.each(() => {
10 | stdout.out = ''
11 | })
12 |
13 | test('should return when no files match any configured task', async () => {
14 | const git_paths = { root: 'dir', dot: 'dir/.git' }
15 | const files = { working: ['a.js'], deleted: [], changed: ['a.js'] }
16 |
17 | const { createRunner } = await esmock('../lib/runner.js', {
18 | '../lib/cmd-runner.js': {
19 | createCmdRunner: () => ({
20 | generateCmdTasks: async () => [{ file_count: 0 }],
21 | }),
22 | },
23 | })
24 |
25 | try {
26 | await createRunner({ stream: stdout, git_paths, files }).run()
27 | } catch (error) {
28 | is(error.message, 'No files match any configured task.')
29 | }
30 | })
31 |
32 | test('should step success', async () => {
33 | const git_paths = { root: 'dir', dot: 'dir/.git' }
34 | const files = { working: ['a.js'], deleted: [], changed: ['a.js'] }
35 |
36 | const { createRunner } = await esmock('../lib/runner.js', {
37 | '../lib/cmd-runner.js': {
38 | createCmdRunner: () => ({
39 | generateCmdTasks: async () => [{ file_count: 1 }],
40 | run: async () => Promise.resolve(),
41 | }),
42 | },
43 | '../lib/git-workflow.js': {
44 | createGitWorkflow: () => ({
45 | backupOriginalState: async () => Promise.resolve(),
46 | backupUnstagedFiles: async () => Promise.resolve(),
47 | applyModifications: async () => Promise.resolve(),
48 | restoreUnstagedFiles: async () => Promise.resolve(),
49 | restoreOriginalState: async () => Promise.resolve(),
50 | cleanUp: async () => Promise.resolve(),
51 | }),
52 | },
53 | })
54 |
55 | await createRunner({ stream: stdout, git_paths, files }).run()
56 |
57 | is(
58 | stdout.out,
59 | '\x1B[32m√\x1B[39m Preparing nano-staged\n' +
60 | '\x1B[32m√\x1B[39m Backing up unstaged changes for staged files\n' +
61 | '\x1B[32m√\x1B[39m Applying modifications from tasks\n' +
62 | '\x1B[32m√\x1B[39m Restoring unstaged changes for staged files\n' +
63 | '\x1B[32m√\x1B[39m Cleaning up temporary to patch files\n'
64 | )
65 | })
66 |
67 | test('should backupOriginalState error', async () => {
68 | const git_paths = { root: 'dir', dot: 'dir/.git' }
69 | const files = { working: ['a.js'], deleted: [], changed: ['a.js'] }
70 |
71 | const { createRunner } = await esmock('../lib/runner.js', {
72 | '../lib/cmd-runner.js': {
73 | createCmdRunner: () => ({
74 | generateCmdTasks: async () => [{ file_count: 1 }],
75 | run: async () => Promise.resolve(),
76 | }),
77 | },
78 | '../lib/git-workflow.js': {
79 | createGitWorkflow: () => ({
80 | backupOriginalState: async () => Promise.reject('backupOriginalState fail'),
81 | }),
82 | },
83 | })
84 |
85 | try {
86 | await createRunner({ stream: stdout, git_paths, files }).run()
87 | } catch {
88 | is(stdout.out, '\x1B[31m×\x1B[39m Preparing nano-staged\n')
89 | }
90 | })
91 |
92 | test('should backupUnstagedFiles error', async () => {
93 | const git_paths = { root: 'dir', dot: 'dir/.git' }
94 | const files = { working: ['a.js'], deleted: [], changed: ['a.js'] }
95 |
96 | const { createRunner } = await esmock('../lib/runner.js', {
97 | '../lib/cmd-runner.js': {
98 | createCmdRunner: () => ({
99 | generateCmdTasks: async () => [{ file_count: 1 }],
100 | run: async () => Promise.resolve(),
101 | }),
102 | },
103 | '../lib/git-workflow.js': {
104 | createGitWorkflow: () => ({
105 | backupOriginalState: async () => Promise.resolve(),
106 | backupUnstagedFiles: async () => Promise.reject('backupUnstagedFiles fail'),
107 | restoreOriginalState: async () => Promise.resolve(),
108 | cleanUp: async () => Promise.resolve(),
109 | }),
110 | },
111 | })
112 |
113 | try {
114 | await createRunner({ stream: stdout, git_paths, files }).run()
115 | } catch {
116 | is(
117 | stdout.out,
118 | '\x1B[32m√\x1B[39m Preparing nano-staged\n' +
119 | '\x1B[31m×\x1B[39m Backing up unstaged changes for staged files\n' +
120 | '\x1B[32m√\x1B[39m Restoring to original state because of errors\n' +
121 | '\x1B[32m√\x1B[39m Cleaning up temporary to patch files\n'
122 | )
123 | }
124 | })
125 |
126 | test('should applyModifications error', async () => {
127 | const git_paths = { root: 'dir', dot: 'dir/.git' }
128 | const files = { working: ['a.js'], deleted: [], changed: ['a.js'] }
129 |
130 | const { createRunner } = await esmock('../lib/runner.js', {
131 | '../lib/cmd-runner.js': {
132 | createCmdRunner: () => ({
133 | generateCmdTasks: async () => [{ file_count: 1 }],
134 | run: async () => Promise.resolve(),
135 | }),
136 | },
137 | '../lib/git-workflow.js': {
138 | createGitWorkflow: () => ({
139 | backupOriginalState: async () => Promise.resolve(),
140 | backupUnstagedFiles: async () => Promise.resolve(),
141 | applyModifications: async () => Promise.reject('applyModifications fail'),
142 | restoreOriginalState: async () => Promise.resolve(),
143 | cleanUp: async () => Promise.resolve(),
144 | }),
145 | },
146 | })
147 |
148 | try {
149 | await createRunner({ stream: stdout, git_paths, files }).run()
150 | } catch {
151 | is(
152 | stdout.out,
153 | '\x1B[32m√\x1B[39m Preparing nano-staged\n' +
154 | '\x1B[32m√\x1B[39m Backing up unstaged changes for staged files\n' +
155 | '\x1B[31m×\x1B[39m Applying modifications from tasks\n' +
156 | '\x1B[32m√\x1B[39m Restoring to original state because of errors\n' +
157 | '\x1B[32m√\x1B[39m Cleaning up temporary to patch files\n'
158 | )
159 | }
160 | })
161 |
162 | test('should restoreUnstagedFiles error', async () => {
163 | const git_paths = { root: 'dir', dot: 'dir/.git' }
164 | const files = { working: ['a.js'], deleted: [], changed: ['a.js'] }
165 |
166 | const { createRunner } = await esmock('../lib/runner.js', {
167 | '../lib/cmd-runner.js': {
168 | createCmdRunner: () => ({
169 | generateCmdTasks: async () => [{ file_count: 1 }],
170 | run: async () => Promise.resolve(),
171 | }),
172 | },
173 | '../lib/git-workflow.js': {
174 | createGitWorkflow: () => ({
175 | backupOriginalState: async () => Promise.resolve(),
176 | backupUnstagedFiles: async () => Promise.resolve(),
177 | applyModifications: async () => Promise.resolve(),
178 | restoreUnstagedFiles: async () => Promise.reject('restoreUnstagedFiles fail'),
179 | cleanUp: async () => Promise.resolve(),
180 | }),
181 | },
182 | })
183 |
184 | try {
185 | await createRunner({ stream: stdout, git_paths, files }).run()
186 | } catch {
187 | is(
188 | stdout.out,
189 | '\x1B[32m√\x1B[39m Preparing nano-staged\n' +
190 | '\x1B[32m√\x1B[39m Backing up unstaged changes for staged files\n' +
191 | '\x1B[32m√\x1B[39m Applying modifications from tasks\n' +
192 | '\x1B[31m×\x1B[39m Restoring unstaged changes for staged files\n' +
193 | '\x1B[32m√\x1B[39m Cleaning up temporary to patch files\n'
194 | )
195 | }
196 | })
197 |
198 | test('should restoreOriginalState error', async () => {
199 | const git_paths = { root: 'dir', dot: 'dir/.git' }
200 | const files = { working: ['a.js'], deleted: [], changed: ['a.js'] }
201 |
202 | const { createRunner } = await esmock('../lib/runner.js', {
203 | '../lib/cmd-runner.js': {
204 | createCmdRunner: () => ({
205 | generateCmdTasks: async () => [{ file_count: 1 }],
206 | run: async () => Promise.resolve(),
207 | }),
208 | },
209 | '../lib/git-workflow.js': {
210 | createGitWorkflow: () => ({
211 | backupOriginalState: async () => Promise.resolve(),
212 | backupUnstagedFiles: async () => Promise.reject('backupUnstagedFiles fail'),
213 | restoreOriginalState: async () => Promise.reject('restoreOriginalState fail'),
214 | }),
215 | },
216 | })
217 |
218 | try {
219 | await createRunner({ stream: stdout, git_paths, files }).run()
220 | } catch {
221 | is(
222 | stdout.out,
223 | '\x1B[32m√\x1B[39m Preparing nano-staged\n' +
224 | '\x1B[31m×\x1B[39m Backing up unstaged changes for staged files\n' +
225 | '\x1B[31m×\x1B[39m Restoring to original state because of errors\n'
226 | )
227 | }
228 | })
229 |
230 | test('should restoreOriginalState error', async () => {
231 | const git_paths = { root: 'dir', dot: 'dir/.git' }
232 | const files = { working: ['a.js'], deleted: [], changed: ['a.js'] }
233 |
234 | const { createRunner } = await esmock('../lib/runner.js', {
235 | '../lib/cmd-runner.js': {
236 | createCmdRunner: () => ({
237 | generateCmdTasks: async () => [{ file_count: 1 }],
238 | run: async () => Promise.reject('Task runner error'),
239 | }),
240 | },
241 | '../lib/git-workflow.js': {
242 | createGitWorkflow: () => ({
243 | backupOriginalState: async () => Promise.resolve(),
244 | backupUnstagedFiles: async () => Promise.resolve(),
245 | restoreOriginalState: async () => Promise.resolve(),
246 | cleanUp: async () => Promise.resolve(),
247 | }),
248 | },
249 | })
250 |
251 | try {
252 | await createRunner({ stream: stdout, git_paths, files }).run()
253 | } catch (error) {
254 | equal(error, ['Task runner error'])
255 | is(
256 | stdout.out,
257 | '\x1B[32m√\x1B[39m Preparing nano-staged\n' +
258 | '\x1B[32m√\x1B[39m Backing up unstaged changes for staged files\n' +
259 | '\x1B[32m√\x1B[39m Restoring to original state because of errors\n' +
260 | '\x1B[32m√\x1B[39m Cleaning up temporary to patch files\n'
261 | )
262 | }
263 | })
264 |
265 | test('should cleanUp error', async () => {
266 | const git_paths = { root: 'dir', dot: 'dir/.git' }
267 | const files = { working: ['a.js'], deleted: [], changed: ['a.js'] }
268 |
269 | const { createRunner } = await esmock('../lib/runner.js', {
270 | '../lib/cmd-runner.js': {
271 | createCmdRunner: () => ({
272 | generateCmdTasks: async () => [{ file_count: 1 }],
273 | run: async () => Promise.resolve(),
274 | }),
275 | },
276 | '../lib/git-workflow.js': {
277 | createGitWorkflow: () => ({
278 | backupOriginalState: async () => Promise.resolve(),
279 | backupUnstagedFiles: async () => Promise.resolve(),
280 | applyModifications: async () => Promise.resolve(),
281 | restoreUnstagedFiles: async () => Promise.resolve(),
282 | cleanUp: async () => Promise.reject(),
283 | }),
284 | },
285 | })
286 |
287 | try {
288 | await createRunner({ stream: stdout, git_paths, files }).run()
289 | } catch {
290 | is(
291 | stdout.out,
292 | '\x1B[32m√\x1B[39m Preparing nano-staged\n' +
293 | '\x1B[32m√\x1B[39m Backing up unstaged changes for staged files\n' +
294 | '\x1B[32m√\x1B[39m Applying modifications from tasks\n' +
295 | '\x1B[32m√\x1B[39m Restoring unstaged changes for staged files\n' +
296 | '\x1B[31m×\x1B[39m Cleaning up temporary to patch files\n'
297 | )
298 | }
299 | })
300 |
301 | test.run()
302 |
--------------------------------------------------------------------------------
/test/utils.test.js:
--------------------------------------------------------------------------------
1 | import { equal, is } from 'uvu/assert'
2 | import process from 'process'
3 | import { test } from 'uvu'
4 | import tty from 'tty'
5 | import os from 'os'
6 |
7 | import { toArray, showVersion, stringArgvToArray, getForceColorLevel } from '../lib/utils.js'
8 | import { createStdout } from './utils/index.js'
9 |
10 | test.before.each(() => {
11 | Object.defineProperty(process, 'platform', {
12 | value: 'linux',
13 | })
14 | process.env = {}
15 | process.argv = []
16 | tty.isatty = () => true
17 | })
18 |
19 | test('single to array', () => {
20 | equal(toArray('path'), ['path'])
21 | equal(toArray(['path']), ['path'])
22 | })
23 |
24 | test('print version', () => {
25 | let stdout = createStdout()
26 | showVersion(stdout)
27 | is(stdout.out.replace(/\d+\.\d+\.\d+/, '0.1.0'), 'Nano Staged \x1B[1mv0.1.0\x1B[22m\n')
28 | })
29 |
30 | test('string to args', () => {
31 | equal(stringArgvToArray('cmd --test config --test'), ['cmd', '--test', 'config', '--test'])
32 | equal(stringArgvToArray(''), [])
33 | equal(stringArgvToArray(), [])
34 | })
35 |
36 | test('FORCE_COLOR: 1', () => {
37 | process.env = { FORCE_COLOR: '1' }
38 | is(getForceColorLevel(), 1)
39 |
40 | process.env = { FORCE_COLOR: '' }
41 | is(getForceColorLevel(), 0)
42 |
43 | process.env = { FORCE_COLOR: '256' }
44 | is(getForceColorLevel(), 3)
45 |
46 | process.env = { FORCE_NO_COLOR: true }
47 | is(getForceColorLevel(), 0)
48 | })
49 |
50 | test('tty.isatty: false', () => {
51 | tty.isatty = () => false
52 | is(getForceColorLevel(), 0)
53 | })
54 |
55 | test('Windows 10 build 10586', () => {
56 | Object.defineProperty(process, 'platform', {
57 | value: 'win32',
58 | })
59 | Object.defineProperty(process.versions, 'node', {
60 | value: '8.0.0',
61 | })
62 | os.release = () => '10.0.10586'
63 |
64 | is(getForceColorLevel(), 2)
65 | })
66 |
67 | test('Windows 10 build 14931', () => {
68 | Object.defineProperty(process, 'platform', {
69 | value: 'win32',
70 | })
71 | Object.defineProperty(process.versions, 'node', {
72 | value: '8.0.0',
73 | })
74 | os.release = () => '10.0.14931'
75 |
76 | is(getForceColorLevel(), 3)
77 | })
78 |
79 | test('Windows 10 build 10586', () => {
80 | Object.defineProperty(process, 'platform', {
81 | value: 'win32',
82 | })
83 | Object.defineProperty(process.versions, 'node', {
84 | value: '8.0.0',
85 | })
86 | os.release = () => '10.0.10240'
87 |
88 | is(getForceColorLevel(), 1)
89 | })
90 |
91 | test('COLORTERM', () => {
92 | process.env = { COLORTERM: true }
93 | is(getForceColorLevel(), 1)
94 | })
95 |
96 | test('COLORTERM:truecolor', () => {
97 | process.env = { COLORTERM: 'truecolor' }
98 | is(getForceColorLevel(), 3)
99 | })
100 |
101 | test('TERM:dumb', () => {
102 | process.env = { TERM: 'dumb' }
103 | is(getForceColorLevel(), 0)
104 | })
105 |
106 | test('TERM:xterm-256color', () => {
107 | process.env = { TERM: 'xterm-256color' }
108 | is(getForceColorLevel(), 2)
109 | })
110 |
111 | test('TERM:screen-256color', () => {
112 | process.env = { TERM: 'screen-256color' }
113 | is(getForceColorLevel(), 2)
114 | })
115 |
116 | test('support putty-256color', () => {
117 | process.env = { TERM: 'putty-256color' }
118 | is(getForceColorLevel(), 2)
119 | })
120 |
121 | test('TERM:rxvt', () => {
122 | process.env.TERM = 'rxvt'
123 | is(getForceColorLevel(), 1)
124 | })
125 |
126 | test('default', () => {
127 | is(getForceColorLevel(), 0)
128 | })
129 |
130 | test('prefer level 2/xterm over COLORTERM', () => {
131 | process.env = { COLORTERM: '1', TERM: 'xterm-256color' }
132 | is(getForceColorLevel(), 2)
133 | })
134 |
135 | test('return level 1 when `TERM` is set to dumb when `FORCE_COLOR` is set', () => {
136 | process.env = { FORCE_COLOR: '1', TERM: 'dumb' }
137 | is(getForceColorLevel(), 1)
138 | })
139 |
140 | test('--no-color', () => {
141 | process.env = { TERM: 'xterm-256color' }
142 | process.argv = ['--no-colors']
143 | is(getForceColorLevel(), 0)
144 | })
145 |
146 | test('--no-colors', () => {
147 | process.env = { TERM: 'xterm-256color' }
148 | process.argv = ['--no-colors']
149 | is(getForceColorLevel(), 0)
150 | })
151 |
152 | test('-color=false', () => {
153 | process.env = { TERM: 'xterm-256color' }
154 | process.argv = ['--color=false']
155 | is(getForceColorLevel(), 0)
156 | })
157 |
158 | test('--color=never', () => {
159 | process.env = { TERM: 'xterm-256color' }
160 | process.argv = ['--color=never']
161 | is(getForceColorLevel(), 0)
162 | })
163 |
164 | test.run()
165 |
--------------------------------------------------------------------------------
/test/utils/index.js:
--------------------------------------------------------------------------------
1 | import { resolve, dirname } from 'path'
2 | import { fileURLToPath } from 'url'
3 | import fs from 'fs-extra'
4 |
5 | const DIRNAME = dirname(fileURLToPath(import.meta.url))
6 |
7 | export async function appendFile(filename, content, dir = process.cwd()) {
8 | await fs.appendFile(resolve(dir, filename), content)
9 | }
10 |
11 | export async function makeDir(dir = process.cwd()) {
12 | await fs.mkdir(dir)
13 | }
14 |
15 | export async function writeFile(filename, content, dir = process.cwd()) {
16 | await fs.writeFile(resolve(dir, filename), content)
17 | }
18 |
19 | export async function removeFile(dir) {
20 | if (dir) {
21 | await fs.remove(dir)
22 | }
23 | }
24 |
25 | export function fixture(name) {
26 | return resolve(DIRNAME, '../fixtures', name)
27 | }
28 |
29 | export function createStdout() {
30 | let result = {
31 | out: '',
32 | write: (symbols) => {
33 | result.out += symbols
34 | },
35 | }
36 |
37 | return result
38 | }
39 |
--------------------------------------------------------------------------------