├── .editorconfig
├── .eslintignore
├── .gitattributes
├── .github
├── FUNDING.yml
├── dependabot.yml
└── workflows
│ └── bevry.yml
├── .gitignore
├── .npmignore
├── .prettierignore
├── CONTRIBUTING.md
├── HISTORY.md
├── LICENSE.md
├── README.md
├── SECURITY.md
├── bin.mjs
├── package-lock.json
├── package.json
└── source
├── answers.js
├── base.js
├── bin.js
├── ci.js
├── data.js
├── editions.js
├── fs.js
├── get-git.js
├── index.js
├── log.js
├── package.js
├── questions.js
├── runtime.js
├── state.js
├── test.js
├── util.js
├── versions.js
└── website.js
/.editorconfig:
--------------------------------------------------------------------------------
1 | # 2023 June 22
2 | # https://github.com/bevry/base
3 |
4 | root = true
5 |
6 | [*]
7 | end_of_line = lf
8 | charset = utf-8
9 | trim_trailing_whitespace = true
10 | insert_final_newline = false
11 | indent_style = tab
12 |
13 | [{*.mk,*.py}]
14 | indent_style = tab
15 | indent_size = 4
16 |
17 | [*.md]
18 | indent_style = space
19 | indent_size = 4
20 |
21 | [{*.json,*.lsrules,*.yaml,*.yml,*.bowerrc,*.babelrc,*.code-workspace}]
22 | indent_style = space
23 | indent_size = 2
24 |
25 | [{*.json,*.lsrules}]
26 | insert_final_newline = true
27 |
--------------------------------------------------------------------------------
/.eslintignore:
--------------------------------------------------------------------------------
1 | source/bin.js
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # 2024 January 3
2 | # https://github.com/bevry/base
3 |
4 | # never modify any line ending in any file, disregarding all nonsense from eol, autocrlf, renormalize, safecrlf, usecrlfattr, allBinary
5 | * -text
6 |
7 | # =====================================
8 | # CUSTOM
9 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | github: [balupton]
2 | liberapay: bevry
3 | ko_fi: balupton
4 | open_collective: bevry
5 | tidelift: npm/boundation
6 | custom: ['https://bevry.me/fund']
7 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: github-actions
4 | directory: /
5 | schedule:
6 | interval: weekly
7 | day: sunday
8 | time: '00:00'
9 | timezone: Australia/Perth
10 | - package-ecosystem: npm
11 | directory: /
12 | schedule:
13 | interval: weekly
14 | day: sunday
15 | time: '00:00'
16 | timezone: Australia/Perth
17 | open-pull-requests-limit: 0
18 |
--------------------------------------------------------------------------------
/.github/workflows/bevry.yml:
--------------------------------------------------------------------------------
1 | name: bevry
2 | 'on':
3 | - push
4 | - pull_request
5 | jobs:
6 | test:
7 | strategy:
8 | matrix:
9 | os:
10 | - ubuntu-latest
11 | - macos-latest
12 | - windows-latest
13 | node:
14 | - '18'
15 | - '20'
16 | - '21'
17 | runs-on: ${{ matrix.os }}
18 | steps:
19 | - uses: actions/checkout@v4
20 | - name: Install desired Node.js version
21 | uses: actions/setup-node@v4
22 | with:
23 | node-version: '20'
24 | - name: Verify Node.js Versions
25 | run: >-
26 | printf '%s' 'node: ' && node --version && printf '%s' 'npm: ' && npm
27 | --version && node -e 'console.log(process.versions)'
28 | - run: npm run our:setup
29 | - run: npm run our:compile
30 | - run: npm run our:verify
31 | - name: Install targeted Node.js
32 | if: ${{ matrix.node != 20 }}
33 | uses: actions/setup-node@v4
34 | with:
35 | node-version: ${{ matrix.node }}
36 | - name: Verify Node.js Versions
37 | run: >-
38 | printf '%s' 'node: ' && node --version && printf '%s' 'npm: ' && npm
39 | --version && node -e 'console.log(process.versions)'
40 | - run: npm test
41 | publish:
42 | if: ${{ github.event_name == 'push' }}
43 | needs: test
44 | runs-on: ubuntu-latest
45 | steps:
46 | - uses: actions/checkout@v4
47 | - name: Install desired Node.js version
48 | uses: actions/setup-node@v4
49 | with:
50 | node-version: '20'
51 | - name: Verify Node.js Versions
52 | run: >-
53 | printf '%s' 'node: ' && node --version && printf '%s' 'npm: ' && npm
54 | --version && node -e 'console.log(process.versions)'
55 | - run: npm run our:setup
56 | - run: npm run our:compile
57 | - run: npm run our:meta
58 | - name: publish to npm
59 | uses: bevry-actions/npm@v1.1.7
60 | with:
61 | npmAuthToken: ${{ secrets.NPM_AUTH_TOKEN }}
62 | npmBranchTag: ':next'
63 | automerge:
64 | permissions:
65 | contents: write
66 | pull-requests: write
67 | runs-on: ubuntu-latest
68 | if: github.actor == 'dependabot[bot]'
69 | steps:
70 | - name: Enable auto-merge for Dependabot PRs
71 | run: gh pr merge --auto --squash "$PR_URL"
72 | env:
73 | PR_URL: ${{github.event.pull_request.html_url}}
74 | GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
75 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # 2020 June 3
2 | # https://github.com/bevry/base
3 |
4 | # System Files
5 | **/.DS_Store
6 |
7 | # Temp Files
8 | **/.docpad.db
9 | **/*.log
10 | **/*.cpuprofile
11 | **/*.heapsnapshot
12 |
13 | # Editor Files
14 | .c9/
15 | .vscode/
16 |
17 | # Yarn Files
18 | .yarn/*
19 | !.yarn/releases
20 | !.yarn/plugins
21 | !.yarn/sdks
22 | !.yarn/versions
23 | .pnp.*
24 | .pnp/
25 |
26 | # Private Files
27 | .env
28 | .idea
29 | .cake_task_cache
30 |
31 | # Build Caches
32 | build/
33 | bower_components/
34 | node_modules/
35 | .next/
36 |
37 | # -------------------------------------
38 | # CDN Inclusions, Git Exclusions
39 |
40 | # Build Outputs
41 | **/out.*
42 | **/*.out.*
43 | **/out/
44 | **/output/
45 | *compiled*
46 | edition*/
47 | coffeejs/
48 | coffee/
49 | es5/
50 | es2015/
51 | esnext/
52 | docs/
53 |
54 | # =====================================
55 | # CUSTOM
56 |
57 | # None
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | # 2020 May 5
2 | # https://github.com/bevry/base
3 |
4 | # System Files
5 | **/.DS_Store
6 |
7 | # Temp Files
8 | **/.docpad.db
9 | **/*.log
10 | **/*.cpuprofile
11 | **/*.heapsnapshot
12 |
13 | # Editor Files
14 | .c9/
15 | .vscode/
16 |
17 | # Private Files
18 | .env
19 | .idea
20 | .cake_task_cache
21 |
22 | # Build Caches
23 | build/
24 | components/
25 | bower_components/
26 | node_modules/
27 | .pnp/
28 | .pnp.js
29 |
30 | # Ecosystem Files
31 | .dependabout
32 | .github
33 |
34 | # -------------------------------------
35 | # CDN Inclusions, Package Exclusions
36 |
37 | # Documentation Files
38 | docs/
39 | guides/
40 | BACKERS.md
41 | CONTRIBUTING.md
42 | HISTORY.md
43 |
44 | # Development Files
45 | web/
46 | **/example*
47 | **/test*
48 | .babelrc*
49 | .editorconfig
50 | .eslintrc*
51 | .jshintrc
52 | .jscrc
53 | coffeelint*
54 | .travis*
55 | nakefile*
56 | Cakefile
57 | Makefile
58 |
59 | # Other Package Definitions
60 | template.js
61 | component.json
62 | bower.json
63 |
64 | # =====================================
65 | # CUSTOM
66 |
67 | # None
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | # 2023 November 13
2 | # https://github.com/bevry/base
3 |
4 | # VCS Files
5 | .git
6 | .svn
7 | .hg
8 |
9 | # System Files
10 | **/.DS_Store
11 |
12 | # Temp Files
13 | **/.docpad.db
14 | **/*.log
15 | **/*.cpuprofile
16 | **/*.heapsnapshot
17 |
18 | # Yarn Files
19 | .yarn/*
20 | !.yarn/releases
21 | !.yarn/plugins
22 | !.yarn/sdks
23 | !.yarn/versions
24 | .pnp.*
25 | .pnp/
26 |
27 | # Build Caches
28 | build/
29 | components/
30 | bower_components/
31 | node_modules/
32 |
33 | # Build Outputs
34 | **/*.cjs
35 | **/*.mjs
36 | **/out.*
37 | **/*.out.*
38 | **/out/
39 | **/output/
40 | *compiled*
41 | edition*/
42 | coffeejs/
43 | coffee/
44 | es5/
45 | es2015/
46 | esnext/
47 | docs/
48 |
49 | # Development Files
50 | test/
51 | **/*fixtures*
52 |
53 | # Ecosystem Caches
54 | .trunk/*/
55 |
56 | # =====================================
57 | # CUSTOM
58 |
59 | # None
60 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 |
5 |
6 | # Before You Post!
7 |
8 | ## Support
9 |
10 | We offer support through our [Official Support Channels](https://bevry.me/support). Do not use GitHub Issues for support, your issue will be closed.
11 |
12 | ## Contribute
13 |
14 | Our [Contributing Guide](https://bevry.me/contribute) contains useful tips and suggestions for how to contribute to this project, it's worth the read.
15 |
16 | ## Development
17 |
18 | ### Setup
19 |
20 | 1. [Install Node.js](https://bevry.me/install/node)
21 |
22 | 1. Fork the project and clone your fork - [guide](https://help.github.com/articles/fork-a-repo/)
23 |
24 | 1. Setup the project for development
25 |
26 | ```bash
27 | npm run our:setup
28 | ```
29 |
30 | ### Developing
31 |
32 | 1. Compile changes
33 |
34 | ```bash
35 | npm run our:compile
36 | ```
37 |
38 | 1. Run tests
39 |
40 | ```bash
41 | npm test
42 | ```
43 |
44 | ### Publishing
45 |
46 | Follow these steps in order to implement your changes/improvements into your desired project:
47 |
48 | #### Preparation
49 |
50 | 1. Make sure your changes are on their own branch that is branched off from master.
51 |
52 | 1. You can do this by: `git checkout master; git checkout -b your-new-branch`
53 | 1. And push the changes up by: `git push origin your-new-branch`
54 |
55 | 1. Ensure all tests pass:
56 |
57 | ```bash
58 | npm test
59 | ```
60 |
61 | > If possible, add tests for your change, if you don't know how, mention this in your pull request
62 |
63 | 1. Ensure the project is ready for publishing:
64 |
65 | ```
66 | npm run our:release:prepare
67 | ```
68 |
69 | #### Pull Request
70 |
71 | To send your changes for the project owner to merge in:
72 |
73 | 1. Submit your pull request
74 | 1. When submitting, if the original project has a `dev` or `integrate` branch, use that as the target branch for your pull request instead of the default `master`
75 | 1. By submitting a pull request you agree for your changes to have the same license as the original plugin
76 |
77 | #### Publish
78 |
79 | To publish your changes as the project owner:
80 |
81 | 1. Switch to the master branch:
82 |
83 | ```bash
84 | git checkout master
85 | ```
86 |
87 | 1. Merge in the changes of the feature branch (if applicable)
88 |
89 | 1. Increment the version number in the `package.json` file according to the [semantic versioning](http://semver.org) standard, that is:
90 |
91 | 1. `x.0.0` MAJOR version when you make incompatible API changes (note: DocPad plugins must use v2 as the major version, as v2 corresponds to the current DocPad v6.x releases)
92 | 1. `x.y.0` MINOR version when you add functionality in a backwards-compatible manner
93 | 1. `x.y.z` PATCH version when you make backwards-compatible bug fixes
94 |
95 | 1. Add an entry to the changelog following the format of the previous entries, an example of this is:
96 |
97 | ```markdown
98 | ## v6.29.0 2013 April 1
99 |
100 | - Progress on [issue #474](https://github.com/docpad/docpad/issues/474)
101 | - DocPad will now set permissions based on the process's ability
102 | - Thanks to [Avi Deitcher](https://github.com/deitch), [Stephan Lough](https://github.com/stephanlough) for [issue #165](https://github.com/docpad/docpad/issues/165)
103 | - Updated dependencies
104 | ```
105 |
106 | 1. Commit the changes with the commit title set to something like `v6.29.0. Bugfix. Improvement.` and commit description set to the changelog entry
107 |
108 | 1. Ensure the project is ready for publishing:
109 |
110 | ```
111 | npm run our:release:prepare
112 | ```
113 |
114 | 1. Prepare the release and publish it to npm and git:
115 |
116 | ```bash
117 | npm run our:release
118 | ```
119 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | # License
4 |
5 | Unless stated otherwise all works are:
6 |
7 | - Copyright © [Benjamin Lupton](https://balupton.com)
8 |
9 | and licensed under:
10 |
11 | - [Artistic License 2.0](http://spdx.org/licenses/Artistic-2.0.html)
12 |
13 | ## The Artistic License 2.0
14 |
15 |
16 | Copyright (c) 2000-2006, The Perl Foundation.
17 |
18 | Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
19 |
20 | Preamble
21 |
22 | This license establishes the terms under which a given free software Package may be copied, modified, distributed, and/or redistributed. The intent is that the Copyright Holder maintains some artistic control over the development of that Package while still keeping the Package available as open source and free software.
23 |
24 | You are always permitted to make arrangements wholly outside of this license directly with the Copyright Holder of a given Package. If the terms of this license do not permit the full use that you propose to make of the Package, you should contact the Copyright Holder and seek a different licensing arrangement.
25 |
26 | Definitions
27 |
28 | "Copyright Holder" means the individual(s) or organization(s) named in the copyright notice for the entire Package.
29 |
30 | "Contributor" means any party that has contributed code or other material to the Package, in accordance with the Copyright Holder's procedures.
31 |
32 | "You" and "your" means any person who would like to copy, distribute, or modify the Package.
33 |
34 | "Package" means the collection of files distributed by the Copyright Holder, and derivatives of that collection and/or of those files. A given Package may consist of either the Standard Version, or a Modified Version.
35 |
36 | "Distribute" means providing a copy of the Package or making it accessible to anyone else, or in the case of a company or organization, to others outside of your company or organization.
37 |
38 | "Distributor Fee" means any fee that you charge for Distributing this Package or providing support for this Package to another party. It does not mean licensing fees.
39 |
40 | "Standard Version" refers to the Package if it has not been modified, or has been modified only in ways explicitly requested by the Copyright Holder.
41 |
42 | "Modified Version" means the Package, if it has been changed, and such changes were not explicitly requested by the Copyright Holder.
43 |
44 | "Original License" means this Artistic License as Distributed with the Standard Version of the Package, in its current version or as it may be modified by The Perl Foundation in the future.
45 |
46 | "Source" form means the source code, documentation source, and configuration files for the Package.
47 |
48 | "Compiled" form means the compiled bytecode, object code, binary, or any other form resulting from mechanical transformation or translation of the Source form.
49 |
50 | Permission for Use and Modification Without Distribution
51 |
52 | (1) You are permitted to use the Standard Version and create and use Modified Versions for any purpose without restriction, provided that you do not Distribute the Modified Version.
53 |
54 | Permissions for Redistribution of the Standard Version
55 |
56 | (2) You may Distribute verbatim copies of the Source form of the Standard Version of this Package in any medium without restriction, either gratis or for a Distributor Fee, provided that you duplicate all of the original copyright notices and associated disclaimers. At your discretion, such verbatim copies may or may not include a Compiled form of the Package.
57 |
58 | (3) You may apply any bug fixes, portability changes, and other modifications made available from the Copyright Holder. The resulting Package will still be considered the Standard Version, and as such will be subject to the Original License.
59 |
60 | Distribution of Modified Versions of the Package as Source
61 |
62 | (4) You may Distribute your Modified Version as Source (either gratis or for a Distributor Fee, and with or without a Compiled form of the Modified Version) provided that you clearly document how it differs from the Standard Version, including, but not limited to, documenting any non-standard features, executables, or modules, and provided that you do at least ONE of the following:
63 |
64 | (a) make the Modified Version available to the Copyright Holder of the Standard Version, under the Original License, so that the Copyright Holder may include your modifications in the Standard Version.
65 | (b) ensure that installation of your Modified Version does not prevent the user installing or running the Standard Version. In addition, the Modified Version must bear a name that is different from the name of the Standard Version.
66 | (c) allow anyone who receives a copy of the Modified Version to make the Source form of the Modified Version available to others under
67 |
68 | (i) the Original License or
69 | (ii) a license that permits the licensee to freely copy, modify and redistribute the Modified Version using the same licensing terms that apply to the copy that the licensee received, and requires that the Source form of the Modified Version, and of any works derived from it, be made freely available in that license fees are prohibited but Distributor Fees are allowed.
70 |
71 | Distribution of Compiled Forms of the Standard Version or Modified Versions without the Source
72 |
73 | (5) You may Distribute Compiled forms of the Standard Version without the Source, provided that you include complete instructions on how to get the Source of the Standard Version. Such instructions must be valid at the time of your distribution. If these instructions, at any time while you are carrying out such distribution, become invalid, you must provide new instructions on demand or cease further distribution. If you provide valid instructions or cease distribution within thirty days after you become aware that the instructions are invalid, then you do not forfeit any of your rights under this license.
74 |
75 | (6) You may Distribute a Modified Version in Compiled form without the Source, provided that you comply with Section 4 with respect to the Source of the Modified Version.
76 |
77 | Aggregating or Linking the Package
78 |
79 | (7) You may aggregate the Package (either the Standard Version or Modified Version) with other packages and Distribute the resulting aggregation provided that you do not charge a licensing fee for the Package. Distributor Fees are permitted, and licensing fees for other components in the aggregation are permitted. The terms of this license apply to the use and Distribution of the Standard or Modified Versions as included in the aggregation.
80 |
81 | (8) You are permitted to link Modified and Standard Versions with other works, to embed the Package in a larger work of your own, or to build stand-alone binary or bytecode versions of applications that include the Package, and Distribute the result without restriction, provided the result does not expose a direct interface to the Package.
82 |
83 | Items That are Not Considered Part of a Modified Version
84 |
85 | (9) Works (including, but not limited to, modules and scripts) that merely extend or make use of the Package, do not, by themselves, cause the Package to be a Modified Version. In addition, such works are not considered parts of the Package itself, and are not subject to the terms of this license.
86 |
87 | General Provisions
88 |
89 | (10) Any use, modification, and distribution of the Standard or Modified Versions is governed by this Artistic License. By using, modifying or distributing the Package, you accept this license. Do not use, modify, or distribute the Package, if you do not accept this license.
90 |
91 | (11) If your Modified Version has been derived from a Modified Version made by someone other than you, you are nevertheless required to ensure that your Modified Version complies with the requirements of this license.
92 |
93 | (12) This license does not grant you the right to use any trademark, service mark, tradename, or logo of the Copyright Holder.
94 |
95 | (13) This license includes the non-exclusive, worldwide, free-of-charge patent license to make, have made, use, offer to sell, sell, import and otherwise transfer the Package with respect to any patent claims licensable by the Copyright Holder that are necessarily infringed by the Package. If you institute patent litigation (including a cross-claim or counterclaim) against any party alleging that the Package constitutes direct or contributory patent infringement, then this Artistic License to you shall terminate on the date that such litigation is filed.
96 |
97 | (14) Disclaimer of Warranty:
98 | THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS" AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
99 |
100 |
101 |
102 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | # boundation
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 | Automatic scaffolding and upgrading of your JavaScript ecosystem projects using Bevry's best practices
29 |
30 |
31 |
32 |
33 | ## Usage
34 |
35 | Install the package globally on Node.js v12 or higher using `npm install --global boundation` then run `boundation` on your project or in an empty directory.
36 |
37 | It will ask you several questions about your project, then initialise or upgrade the project with the latest Bevry best-practices.
38 |
39 | ## Features
40 |
41 | - Supports JavaScript, TypeScript, CoffeeScript, and Website projects
42 | - Automatic [Editions](https://github.com/bevry/editions) setup and upgrades for automatic selection of the best edition for the environment, allowing you to develop for the latest environment with the latest technology, then automatically test on and support older environments
43 | - Automated edition generation so the one package can be used between Node.js, Web Browsers, and Deno where applicable
44 | - Automated ES6 Import and CJS Require compatibility generation
45 | - Automated compatible editions generated for Web Browsers, Node.js, Deno
46 | - Each generated edition is targeted specifically for each version of each target that you intend to support, with each edition compatibility tested by [Testen](https://github.com/bevry/testen) and trimmed if redundant overlaps are present
47 | - Uses [Projectz](https://github.com/bevry/projectz) to automatically generate and maintain your readme, license, badges, and the contributing file
48 | - Automatically configures sensible defaults based on the features that your project is using, while maintaining support for your extensions and customisations, supports
49 | - TSConfig for JavaScript and TypeScript projects
50 | - ESLint for JavaScript and TypeScript projects
51 | - Flow for JavaScript projects
52 | - CoffeeLint for CoffeeSCript projects
53 | - Zeit's Now and Next.js
54 | - DocPad Plugins
55 | - Automatically gives you documentation generation and publishing for the following:
56 | - TypeDoc for TypeScript projects
57 | - JSDoc for JavaScript projects
58 | - YUIDoc for new CoffeeScript projects, and Biscotto for older projects
59 | - Automated GitHub Actions setup and configuration for a variety of projects
60 | - Automated package dependency upgrades and migrations to compatible versions
61 | - Powerful NPM Scripts
62 | - `npm run our:setup` for setting up the project for development
63 | - automatic addition of your `my:setup:*` scripts
64 | - `npm run our:compile` for compiling the project
65 | - automatic addition of your `my:compile:*` scripts
66 | - `npm run our:deploy` for linting
67 | - automatic addition of your `my:deploy:*` scripts
68 | - `npm run our:meta` for compiling the meta files
69 | - automatic addition of your `my:meta:*` scripts
70 | - `npm run our:verify` for linting and tests
71 | - automatic addition of your `my:verify:*` scripts
72 | - `npm run our:release` for for releasing your project
73 | - on code projects, it will run verify, check for uncommitted changes, a changelog entry, performing the git tag automatically, and the git push
74 | - on website projects, it will run verify and git push
75 | - automatic addition of your `my:release:*` scripts
76 |
77 |
78 |
79 | ## Install
80 |
81 | ### [npm](https://npmjs.com "npm is a package manager for javascript")
82 |
83 | #### Install Globally
84 |
85 | - Install: `npm install --global boundation`
86 | - Executable: `boundation`
87 |
88 | #### Install Locally
89 |
90 | - Install: `npm install --save boundation`
91 | - Executable: `npx boundation`
92 | - Import: `import pkg from ('boundation')`
93 | - Require: `const pkg = require('boundation').default`
94 |
95 | ### [Editions](https://editions.bevry.me "Editions are the best way to produce and consume packages you care about.")
96 |
97 | This package is published with the following editions:
98 | - `boundation` aliases `boundation/source/index.js`
99 | - `boundation/source/index.js` is [ESNext](https://en.wikipedia.org/wiki/ECMAScript#ES.Next "ECMAScript Next") source code for [Node.js](https://nodejs.org "Node.js is a JavaScript runtime built on Chrome's V8 JavaScript engine") 18 || 20 || 21 with [Import](https://babeljs.io/docs/learn-es2015/#modules "ECMAScript Modules") for modules
100 |
101 |
102 |
103 |
104 |
105 | ## History
106 |
107 | [Discover the release history by heading on over to the `HISTORY.md` file.](https://github.com/bevry/boundation/blob/HEAD/HISTORY.md#files)
108 |
109 |
110 |
111 |
112 |
113 | ## Backers
114 |
115 | ### Code
116 |
117 | [Discover how to contribute via the `CONTRIBUTING.md` file.](https://github.com/bevry/boundation/blob/HEAD/CONTRIBUTING.md#files)
118 |
119 | #### Authors
120 |
121 | - [Benjamin Lupton](https://balupton.com) — Accelerating collaborative wisdom.
122 |
123 | #### Maintainers
124 |
125 | - [Benjamin Lupton](https://balupton.com) — Accelerating collaborative wisdom.
126 |
127 | #### Contributors
128 |
129 | - [Benjamin Lupton](https://github.com/balupton) — [view contributions](https://github.com/bevry/boundation/commits?author=balupton "View the GitHub contributions of Benjamin Lupton on repository bevry/boundation")
130 |
131 | ### Finances
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 | #### Sponsors
142 |
143 | - [Andrew Nesbitt](https://nesbitt.io) — Software engineer and researcher
144 | - [Balsa](https://balsa.com) — We're Balsa, and we're building tools for builders.
145 | - [Codecov](https://codecov.io) — Empower developers with tools to improve code quality and testing.
146 | - [Frontend Masters](https://FrontendMasters.com) — The training platform for web app engineering skills – from front-end to full-stack! 🚀
147 | - [Mr. Henry](https://mrhenry.be)
148 | - [Poonacha Medappa](https://poonachamedappa.com)
149 | - [Rob Morris](https://github.com/Rob-Morris)
150 | - [Sentry](https://sentry.io) — Real-time crash reporting for your web apps, mobile apps, and games.
151 | - [Syntax](https://syntax.fm) — Syntax Podcast
152 |
153 | #### Donors
154 |
155 | - [Andrew Nesbitt](https://nesbitt.io)
156 | - [Ángel González](https://univunix.com)
157 | - [Armen Mkrtchian](https://mogoni.dev)
158 | - [Balsa](https://balsa.com)
159 | - [Chad](https://opencollective.com/chad8)
160 | - [Codecov](https://codecov.io)
161 | - [dr.dimitru](https://veliovgroup.com)
162 | - [Elliott Ditman](https://elliottditman.com)
163 | - [entroniq](https://gitlab.com/entroniq)
164 | - [Frontend Masters](https://FrontendMasters.com)
165 | - [GitHub](https://github.com/about)
166 | - [Hunter Beast](https://cryptoquick.com)
167 | - [Jean-Luc Geering](https://github.com/jlgeering)
168 | - [Lee Driscoll](https://leedriscoll.me)
169 | - [Michael Duane Mooring](https://mdm.cc)
170 | - [Michael Harry Scepaniak](https://michaelscepaniak.com)
171 | - [Mohammed Shah](https://github.com/smashah)
172 | - [Mr. Henry](https://mrhenry.be)
173 | - [Nermal](https://arjunaditya.vercel.app)
174 | - [Pleo](https://pleo.io)
175 | - [Poonacha Medappa](https://poonachamedappa.com)
176 | - [Robert de Forest](https://github.com/rdeforest)
177 | - [Rob Morris](https://github.com/Rob-Morris)
178 | - [Scott Kempson](https://github.com/scokem)
179 | - [Sentry](https://sentry.io)
180 | - [ServieJS](https://github.com/serviejs)
181 | - [Skunk Team](https://skunk.team)
182 | - [Syntax](https://syntax.fm)
183 | - [WriterJohnBuck](https://github.com/WriterJohnBuck)
184 |
185 |
186 |
187 |
188 |
189 | ## License
190 |
191 | Unless stated otherwise all works are:
192 |
193 | - Copyright © [Benjamin Lupton](https://balupton.com)
194 |
195 | and licensed under:
196 |
197 | - [Artistic License 2.0](http://spdx.org/licenses/Artistic-2.0.html)
198 |
199 |
200 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 |
3 | ## Security Practices
4 |
5 | This project meets standardized secure software development practices, including 2FA for all members, password managers with monitoring, secure secret retrieval instead of storage. [Learn about our practices.](https://tidelift.com/funding/github/npm/boundation)
6 |
7 | ## Supported Versions
8 |
9 | This project uses [Bevry's automated tooling](https://github.com/bevry/boundation) to deliver the latest updates, fixes, and improvements inside the latest release while still maintaining widespread ecosystem compatibility.
10 |
11 | [Refer to supported ecosystem versions: `Editions` section in `README.md`](https://github.com/bevry/boundation/blob/master/README.md#Editions)
12 |
13 | [Refer to automated support of ecosystem versions: `boundation` entries in `HISTORY.md`](https://github.com/bevry/boundation/blob/master/HISTORY.md)
14 |
15 | Besides testing and verification, out CI also [auto-merges](https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions) [Dependabot security updates](https://docs.github.com/en/code-security/dependabot/dependabot-security-updates/about-dependabot-security-updates) and [auto-publishes](https://github.com/bevry-actions/npm) successful builds of the [`master` branch](https://github.com/bevry/wait/actions?query=branch%3Amaster) to the [`next` version tag](https://www.npmjs.com/package/boundation?activeTab=versions), offering immediate resolutions before scheduled maintenance releases.
16 |
17 | ## Reporting a Vulnerability
18 |
19 | [Report the vulnerability to the project owners.](https://github.com/bevry/boundation/security/advisories)
20 |
21 | [Report the vulnerability to Tidelift.](https://tidelift.com/security)
22 |
--------------------------------------------------------------------------------
/bin.mjs:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 | // auto-generated by boundation, do not update manually
3 | export * from './source/bin.js'
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "boundation",
3 | "version": "1.91.0",
4 | "license": "Artistic-2.0",
5 | "description": "Automatic scaffolding and upgrading of your JavaScript ecosystem projects using Bevry's best practices",
6 | "homepage": "https://github.com/bevry/boundation",
7 | "funding": "https://bevry.me/fund",
8 | "repository": {
9 | "type": "git",
10 | "url": "git+https://github.com/bevry/boundation.git"
11 | },
12 | "bugs": {
13 | "url": "https://github.com/bevry/boundation/issues"
14 | },
15 | "keywords": [
16 | ".travis.yml",
17 | "esnext",
18 | "export-default",
19 | "node",
20 | "npm",
21 | "package.json",
22 | "scaffold",
23 | "scaffolding",
24 | "travis",
25 | "travis.yml",
26 | "travisci"
27 | ],
28 | "badges": {
29 | "list": [
30 | "githubworkflow",
31 | "npmversion",
32 | "npmdownloads",
33 | "---",
34 | "githubsponsors",
35 | "thanksdev",
36 | "liberapay",
37 | "buymeacoffee",
38 | "opencollective",
39 | "crypto",
40 | "paypal",
41 | "---",
42 | "discord",
43 | "twitch"
44 | ],
45 | "config": {
46 | "githubWorkflow": "bevry",
47 | "githubSponsorsUsername": "balupton",
48 | "thanksdevGithubUsername": "bevry",
49 | "liberapayUsername": "bevry",
50 | "buymeacoffeeUsername": "balupton",
51 | "opencollectiveUsername": "bevry",
52 | "cryptoURL": "https://bevry.me/crypto",
53 | "paypalURL": "https://bevry.me/paypal",
54 | "discordServerID": "1147436445783560193",
55 | "discordServerInvite": "nQuXddV7VP",
56 | "twitchUsername": "balupton",
57 | "githubUsername": "bevry",
58 | "githubRepository": "boundation",
59 | "githubSlug": "bevry/boundation",
60 | "npmPackageName": "boundation"
61 | }
62 | },
63 | "author": "Benjamin Lupton (https://balupton.com) (https://github.com/balupton)",
64 | "authors": [
65 | "Benjamin Lupton (https://balupton.com) (https://github.com/balupton): Accelerating collaborative wisdom."
66 | ],
67 | "maintainers": [
68 | "Benjamin Lupton (https://balupton.com) (https://github.com/balupton): Accelerating collaborative wisdom."
69 | ],
70 | "contributors": [
71 | "Benjamin Lupton (https://balupton.com) (https://github.com/balupton)"
72 | ],
73 | "sponsors": [
74 | "Andrew Nesbitt (https://nesbitt.io) (https://github.com/andrew): Software engineer and researcher",
75 | "Balsa (https://balsa.com) (https://github.com/balsa): We're Balsa, and we're building tools for builders.",
76 | "Codecov (https://codecov.io) (https://github.com/codecov): Empower developers with tools to improve code quality and testing.",
77 | "Frontend Masters (https://FrontendMasters.com) (https://github.com/FrontendMasters) (https://thanks.dev/d/gh/FrontendMasters): The training platform for web app engineering skills – from front-end to full-stack! 🚀",
78 | "Mr. Henry (https://mrhenry.be) (https://github.com/mrhenry)",
79 | "Poonacha Medappa (https://poonachamedappa.com) (https://github.com/km-Poonacha)",
80 | "Rob Morris (https://github.com/Rob-Morris)",
81 | "Sentry (https://sentry.io) (https://github.com/getsentry): Real-time crash reporting for your web apps, mobile apps, and games.",
82 | "Syntax (https://syntax.fm) (https://github.com/syntaxfm): Syntax Podcast"
83 | ],
84 | "donors": [
85 | "Andrew Nesbitt (https://nesbitt.io) (https://github.com/andrew)",
86 | "Ángel González (https://univunix.com) (https://github.com/Aglezabad) (https://twitter.com/Aglezabad)",
87 | "Armen Mkrtchian (https://mogoni.dev) (https://github.com/Armenm) (https://twitter.com/armen_mkrtchian)",
88 | "Balsa (https://balsa.com) (https://github.com/balsa)",
89 | "Chad (https://opencollective.com/chad8)",
90 | "Codecov (https://codecov.io) (https://github.com/codecov)",
91 | "dr.dimitru (https://veliovgroup.com) (https://github.com/dr-dimitru)",
92 | "Elliott Ditman (https://elliottditman.com) (https://github.com/elliottditman) (https://patreon.com/user?u=15026448)",
93 | "entroniq (https://gitlab.com/entroniq) (https://thanks.dev/d/gl/entroniq)",
94 | "Frontend Masters (https://FrontendMasters.com) (https://github.com/FrontendMasters) (https://thanks.dev/d/gh/FrontendMasters)",
95 | "GitHub (https://github.com/about) (https://github.com/github)",
96 | "Hunter Beast (https://cryptoquick.com) (https://github.com/cryptoquick)",
97 | "Jean-Luc Geering (https://github.com/jlgeering) (https://opencollective.com/jlgeering) (https://twitter.com/jlgeering)",
98 | "Lee Driscoll (https://leedriscoll.me) (https://github.com/leedriscoll) (https://patreon.com/user?u=5292556)",
99 | "Michael Duane Mooring (https://mdm.cc) (https://github.com/mikeumus) (https://opencollective.com/mikeumus) (https://twitter.com/mikeumus)",
100 | "Michael Harry Scepaniak (https://michaelscepaniak.com) (https://github.com/hispanic)",
101 | "Mohammed Shah (https://github.com/smashah) (https://thanks.dev/d/gh/smashah) (https://twitter.com/smashah)",
102 | "Mr. Henry (https://mrhenry.be) (https://github.com/mrhenry)",
103 | "Nermal (https://arjunaditya.vercel.app) (https://github.com/nermalcat69)",
104 | "Pleo (https://pleo.io) (https://github.com/pleo-io)",
105 | "Poonacha Medappa (https://poonachamedappa.com) (https://github.com/km-Poonacha)",
106 | "Robert de Forest (https://github.com/rdeforest)",
107 | "Rob Morris (https://github.com/Rob-Morris)",
108 | "Scott Kempson (https://github.com/scokem) (https://twitter.com/scokem)",
109 | "Sentry (https://sentry.io) (https://github.com/getsentry)",
110 | "ServieJS (https://github.com/serviejs) (https://thanks.dev/d/gh/serviejs)",
111 | "Skunk Team (https://skunk.team) (https://github.com/skunkteam)",
112 | "Syntax (https://syntax.fm) (https://github.com/syntaxfm)",
113 | "WriterJohnBuck (https://github.com/WriterJohnBuck)"
114 | ],
115 | "engines": {
116 | "node": ">=18"
117 | },
118 | "editions": [
119 | {
120 | "description": "ESNext source code for Node.js 18 || 20 || 21 with Import for modules",
121 | "directory": "source",
122 | "entry": "index.js",
123 | "tags": [
124 | "source",
125 | "javascript",
126 | "esnext",
127 | "import"
128 | ],
129 | "engines": {
130 | "node": "18 || 20 || 21"
131 | }
132 | }
133 | ],
134 | "bin": "bin.mjs",
135 | "type": "module",
136 | "main": "source/index.js",
137 | "exports": {
138 | "node": {
139 | "import": "./source/index.js"
140 | }
141 | },
142 | "dependencies": {
143 | "@bevry/ansi": "^6.9.0",
144 | "@bevry/ecmascript-versions": "^4.8.0",
145 | "@bevry/fs-accessible": "^2.5.0",
146 | "@bevry/fs-read": "^1.6.0",
147 | "@bevry/fs-readable": "^2.5.0",
148 | "@bevry/fs-remove": "^1.7.0",
149 | "@bevry/fs-unlink": "^1.6.0",
150 | "@bevry/fs-write": "^2.0.0",
151 | "@bevry/github-api": "^11.4.0",
152 | "@bevry/github-orgs": "^3.6.0",
153 | "@bevry/list": "^2.5.0",
154 | "@bevry/nodejs-ecmascript-compatibility": "^5.8.0",
155 | "@bevry/nodejs-versions": "^3.5.0",
156 | "@bevry/testen": "^9.4.0",
157 | "arrange-package-json": "^5.2.0",
158 | "arrangekeys": "^6.7.0",
159 | "errlop": "^8.4.0",
160 | "fellow": "^7.4.0",
161 | "filedirname": "^3.4.0",
162 | "get-cli-arg": "^8.6.0",
163 | "inquirer": "^9.2.12",
164 | "js-yaml": "^4.1.0",
165 | "safeps": "^11.6.0",
166 | "sortobject": "^5.6.0",
167 | "trim-empty-keys": "^1.2.0",
168 | "typechecker": "^9.3.0",
169 | "version-compare": "^3.11.0",
170 | "version-range": "^4.14.0"
171 | },
172 | "devDependencies": {
173 | "assert-helpers": "^11.12.0",
174 | "eslint": "^8.56.0",
175 | "eslint-config-bevry": "^5.5.0",
176 | "eslint-config-prettier": "^9.1.0",
177 | "eslint-plugin-prettier": "^5.1.3",
178 | "kava": "^7.9.0",
179 | "prettier": "^3.1.1",
180 | "projectz": "^4.2.0",
181 | "valid-directory": "^4.9.0"
182 | },
183 | "scripts": {
184 | "our:bin": "node ./bin.mjs",
185 | "our:clean": "rm -rf ./docs ./edition* ./es2015 ./es5 ./out ./.next",
186 | "our:compile": "printf '%s\n' 'no need for this project'",
187 | "our:deploy": "printf '%s\n' 'no need for this project'",
188 | "our:meta": "npm run our:meta:projectz",
189 | "our:meta:projectz": "projectz --offline",
190 | "our:release": "npm run our:release:prepare && npm run our:release:check-changelog && npm run our:release:check-dirty && npm run our:release:tag && npm run our:release:push",
191 | "our:release:check-changelog": "cat ./HISTORY.md | grep \"v$npm_package_version\" || (printf '%s\n' \"add a changelog entry for v$npm_package_version\" && exit -1)",
192 | "our:release:check-dirty": "git diff --exit-code",
193 | "our:release:prepare": "npm run our:clean && npm run our:compile && npm run our:test && npm run our:meta",
194 | "our:release:push": "git push origin && git push origin --tags",
195 | "our:release:tag": "export MESSAGE=$(cat ./HISTORY.md | sed -n \"/## v$npm_package_version/,/##/p\" | sed 's/## //' | awk 'NR>1{print buf}{buf = $0}') && test \"$MESSAGE\" || (printf '%s\n' 'proper changelog entry not found' && exit -1) && git tag \"v$npm_package_version\" -am \"$MESSAGE\"",
196 | "our:setup": "npm run our:setup:install",
197 | "our:setup:install": "npm install",
198 | "our:test": "npm run our:verify && npm test",
199 | "our:verify": "npm run our:verify:eslint && npm run our:verify:prettier",
200 | "our:verify:eslint": "eslint --fix --ignore-pattern '**/*.d.ts' --ignore-pattern '**/vendor/' --ignore-pattern '**/node_modules/' --ext .mjs,.js,.jsx,.ts,.tsx ./source",
201 | "our:verify:prettier": "prettier --write .",
202 | "test": "node ./source/test.js"
203 | },
204 | "boundation": {
205 | "maintainedNodeVersions": true,
206 | "expandNodeVersions": false,
207 | "compileNode": false
208 | },
209 | "eslintConfig": {
210 | "extends": [
211 | "bevry"
212 | ],
213 | "rules": {
214 | "require-atomic-updates": 0,
215 | "no-console": 0,
216 | "no-use-before-define": 1,
217 | "valid-jsdoc": 0
218 | }
219 | },
220 | "prettier": {
221 | "semi": false,
222 | "singleQuote": true,
223 | "trailingComma": "all",
224 | "endOfLine": "lf"
225 | }
226 | }
227 |
--------------------------------------------------------------------------------
/source/answers.js:
--------------------------------------------------------------------------------
1 | // external
2 | import inquirer from 'inquirer'
3 | import * as ansi from '@bevry/ansi'
4 | import Errlop from 'errlop'
5 | import getArgValue from 'get-cli-arg'
6 |
7 | // local
8 | import { hiddenConfigurationProperties } from './data.js'
9 |
10 | // vars
11 | const skipAllArg = '--auto'
12 | const skipAll = process.argv.includes(skipAllArg)
13 |
14 | // Fetch
15 | function fetch(q, value, ...args) {
16 | return typeof value === 'function' ? value.apply(q, args) : value
17 | }
18 |
19 | // Action
20 | export default async function getAnswers(questions, user = {}) {
21 | try {
22 | // dereference user, so our modifications don't apply to it
23 | user = JSON.parse(JSON.stringify(user))
24 | // find defaults
25 | const defaults = {}
26 | questions.forEach(function (question) {
27 | const { name, skip, when, ignore, arg } = question
28 | if (typeof question.default === 'function') {
29 | const qc = question.choices
30 | if (typeof question.choices === 'function')
31 | question.choices = function (answers) {
32 | const values = Object.assign({}, defaults, answers)
33 | const value = fetch(question, qc, values)
34 | return value
35 | }
36 | const qd = question.default
37 | question.default = function (answers) {
38 | const values = Object.assign({}, defaults, answers)
39 | const value = fetch(question, qd, values)
40 | return value
41 | }
42 | }
43 | question.when = async function (answers) {
44 | let reason,
45 | result,
46 | opaque = false
47 |
48 | // fetch values
49 | const value = await Promise.resolve(
50 | fetch(question, question.default, answers),
51 | )
52 | const values = Object.assign({ [name]: value }, defaults, answers)
53 |
54 | // check args
55 | const args = arg ? [name, arg] : [name]
56 | for (const _arg of args) {
57 | const _value = getArgValue(_arg)
58 | if (_value != null) {
59 | opaque = true
60 | defaults[name] = _value === 0.1 ? '0.10' : _value
61 | reason = 'arg'
62 | }
63 | }
64 |
65 | // check user
66 | if (typeof user[name] !== 'undefined') {
67 | if (reason) {
68 | console.warn(
69 | `package:.json:boundation:${name}=${JSON.stringify(
70 | user[name],
71 | )} taking preference over ${reason} value of ${JSON.stringify(
72 | defaults[name],
73 | )}`,
74 | )
75 | }
76 | defaults[name] = user[name]
77 | delete user[name]
78 | reason = 'package'
79 | }
80 |
81 | // fallback to other checks if no arg
82 | if (!reason) {
83 | // check when and ignore
84 | if (when || ignore) {
85 | // check ignore
86 | if (when != null) {
87 | result = fetch(question, when, values)
88 | if (!result) reason = 'when'
89 | }
90 | // check ignore
91 | if (!reason && ignore != null) {
92 | result = fetch(question, ignore, values)
93 | if (result) reason = 'ignore'
94 | }
95 | }
96 |
97 | // check skip
98 | if (!reason) {
99 | // check skip
100 | if (skip != null) {
101 | result = fetch(question, skip, values)
102 | if (result) {
103 | reason = 'skip'
104 | opaque = true
105 | }
106 | }
107 | // check skip all
108 | if (!reason && skipAll) {
109 | reason = skipAllArg
110 | opaque = true
111 | }
112 | // store value
113 | if (reason) {
114 | defaults[name] = value
115 | }
116 | }
117 | }
118 |
119 | // if we are not proceeding then ignore
120 | if (reason) {
121 | const value = defaults[name]
122 | const color = opaque ? (v) => v : ansi.dim
123 | const message = [
124 | 'Automated',
125 | ansi.bold(ansi.underline(name)),
126 | 'via',
127 | reason,
128 | 'to',
129 | // type="checkbox" returns arrays
130 | // values could also be null, undefined, true, or false
131 | ansi.bold(
132 | ansi.green(
133 | question.type === 'password'
134 | ? '[hidden]'
135 | : typeof value === 'string'
136 | ? value
137 | : JSON.stringify(value),
138 | ),
139 | ),
140 | ]
141 | .map((v) => color(v))
142 | .join(' ')
143 | console.log(message)
144 | }
145 | return !reason
146 | }
147 | })
148 |
149 | // get answers
150 | const answers = await inquirer.prompt(questions)
151 |
152 | // merge in defaults
153 | const values = Object.assign({}, defaults, answers)
154 |
155 | // check if we had any unknown properties
156 | const unknownProperties = new Set(Object.keys(user))
157 | for (const hidden of hiddenConfigurationProperties)
158 | unknownProperties.delete(hidden)
159 | if (unknownProperties.size) {
160 | console.log(user)
161 | throw new Error(
162 | `package.json:boundation had unknown properties: ${Array.from(
163 | unknownProperties.values(),
164 | ).join(', ')}`,
165 | )
166 | }
167 |
168 | // return merge
169 | return values
170 | } catch (err) {
171 | throw new Errlop('Failed to fetch the answers from the user', err)
172 | }
173 | }
174 |
--------------------------------------------------------------------------------
/source/base.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable camelcase */
2 |
3 | // builtin
4 | import * as pathUtil from 'node:path'
5 | import * as urlUtil from 'node:url'
6 |
7 | // external
8 | import Errlop from 'errlop'
9 | import { is as isBevryOrganisation } from '@bevry/github-orgs'
10 | import { isAccessible } from '@bevry/fs-accessible'
11 | import write from '@bevry/fs-write'
12 | import read from '@bevry/fs-read'
13 | import unlink from '@bevry/fs-unlink'
14 | import remove from '@bevry/fs-remove'
15 | import mkdirp from '@bevry/fs-mkdirp'
16 |
17 | // local
18 | import { trimOrgName } from './util.js'
19 | import { status } from './log.js'
20 | import { rename } from './fs.js'
21 |
22 | export async function download(opts) {
23 | try {
24 | if (typeof opts === 'string') opts = { url: opts }
25 | const response = await fetch(opts.url, {})
26 | let data = await response.text()
27 | const file =
28 | opts.file || pathUtil.basename(urlUtil.parse(opts.url).pathname)
29 | if (await isAccessible(file)) {
30 | if (opts.overwrite === false) {
31 | return Promise.resolve()
32 | }
33 | const localData = (await read(file)).toString()
34 | const localLines = localData.split('\n')
35 | const localCustomIndex = localLines.findIndex((line) =>
36 | /^# CUSTOM/i.test(line),
37 | )
38 | if (localCustomIndex !== -1) {
39 | const remoteLines = data.split('\n')
40 | const remoteCustomIndex = remoteLines.findIndex((line) =>
41 | /^# CUSTOM/i.test(line),
42 | )
43 | data = remoteLines
44 | .slice(0, remoteCustomIndex)
45 | .concat(localLines.slice(localCustomIndex))
46 | .join('\n')
47 | }
48 | }
49 | return write(file, data)
50 | } catch (err) {
51 | throw new Errlop(`Download of ${opts.url} FAILED`, err)
52 | }
53 | }
54 |
55 | export async function updateBaseFiles({ answers, packageData }) {
56 | // clean
57 | status('removing old files...')
58 | const purgeList = [
59 | // old ecosystem files
60 | '.babelrc',
61 | '.eslintrc.js',
62 | '.jscrc',
63 | '.jshintrc',
64 | '.stylelintrc.js',
65 | 'Cakefile',
66 | 'cyclic.js',
67 | 'docpad-setup.sh',
68 | 'esnextguardian.js',
69 | 'nakefile.js',
70 | 'next.config.js',
71 | // package manager logs
72 | 'npm-debug.log',
73 | 'yarn-error.log',
74 | // package manager caches
75 | // npm (don't trim node_modules if we are boundation, as otherwise we can't run ourself)
76 | answers.name === 'boundation' ? '' : 'node_modules/',
77 | 'package-lock.json',
78 | // pnpn
79 | 'pnpm-lock.yaml',
80 | // yarn
81 | '.pnp',
82 | '.pnp.js',
83 | 'yarn.lock',
84 | ].filter((i) => i)
85 | if (answers.packageManager !== 'yarn')
86 | purgeList.push('./.yarnrc', './.yarnrc.yml', './.yarn/')
87 | await remove(purgeList.filter((i) => `./${i}`))
88 | status('...removed old files')
89 |
90 | // rename old files
91 | status('renaming old files...')
92 |
93 | if (await isAccessible('src')) {
94 | await rename('src', 'source')
95 | }
96 |
97 | if (await isAccessible('history.md')) {
98 | await rename('history.md', 'HISTORY.md')
99 | }
100 |
101 | if (answers.docpadPlugin) {
102 | const docpadMainEntry =
103 | packageData.name.replace(/^docpad-plugin-/, '') + '.plugin'
104 | if (await isAccessible(`./source/${docpadMainEntry}.coffee`)) {
105 | await rename(
106 | `./source/${docpadMainEntry}.coffee`,
107 | './source/index.coffee',
108 | )
109 | } else if (await isAccessible(`./source/${docpadMainEntry}.js`)) {
110 | await rename(`./source/${docpadMainEntry}.js`, './source/index.js')
111 | }
112 |
113 | const docpadTestEntry =
114 | packageData.name.replace(/^docpad-plugin-/, '') + '.test'
115 | if (await isAccessible(`./source/${docpadTestEntry}.coffee`)) {
116 | await rename(`./source/${docpadTestEntry}.coffee`, './source/test.coffee')
117 | } else if (await isAccessible(`./source/${docpadTestEntry}.js`)) {
118 | await rename(`./source/${docpadTestEntry}.js`, './source/test.js')
119 | }
120 |
121 | const docpadTesterEntry =
122 | packageData.name.replace(/^docpad-plugin-/, '') + '.tester'
123 | if (await isAccessible(`./source/${docpadTesterEntry}.coffee`)) {
124 | await rename(
125 | `./source/${docpadTesterEntry}.coffee`,
126 | './source/tester.coffee',
127 | )
128 | } else if (await isAccessible(`./source/${docpadTesterEntry}.js`)) {
129 | await rename(`./source/${docpadTesterEntry}.js`, './source/tester.js')
130 | }
131 | }
132 | status('...renamed old files')
133 |
134 | status('downloading files...')
135 | /** @type {Array} */
136 | const downloads = [
137 | 'https://raw.githubusercontent.com/bevry/base/HEAD/.editorconfig',
138 | {
139 | url: 'https://raw.githubusercontent.com/bevry/base/HEAD/.gitattributes',
140 | custom: true,
141 | },
142 | {
143 | url: 'https://raw.githubusercontent.com/bevry/base/HEAD/.gitignore',
144 | custom: true,
145 | },
146 | {
147 | url: 'https://raw.githubusercontent.com/bevry/base/HEAD/.prettierignore',
148 | custom: true,
149 | },
150 | 'https://raw.githubusercontent.com/bevry/base/HEAD/LICENSE.md',
151 | 'https://raw.githubusercontent.com/bevry/base/HEAD/CONTRIBUTING.md',
152 | ]
153 | if (answers.type === 'package') {
154 | downloads.push({
155 | url: 'https://raw.githubusercontent.com/bevry/base/HEAD/HISTORY.md',
156 | overwrite: false,
157 | })
158 | }
159 | if (answers.npm) {
160 | downloads.push({
161 | url: 'https://raw.githubusercontent.com/bevry/base/HEAD/.npmignore',
162 | custom: true,
163 | })
164 | } else {
165 | await unlink('.npmignore')
166 | }
167 | if (answers.flowtype) {
168 | downloads.push(
169 | 'https://raw.githubusercontent.com/bevry/base/HEAD/.flowconfig',
170 | )
171 | } else {
172 | await unlink('.flowconfig')
173 | }
174 | if (answers.languages.includes('coffeescript')) {
175 | downloads.push(
176 | 'https://raw.githubusercontent.com/bevry/base/34fc820c8d87f1f21706ce7e26882b6cd5437368/coffeelint.json',
177 | )
178 | } else {
179 | await unlink('coffeelint.json')
180 | }
181 | await Promise.all(downloads.map((i) => download(i)))
182 | status('...downloaded files')
183 |
184 | // write the readme file
185 | // trim say `@bevry/update-contributors` to `update-contributors` for API doc links
186 | let newDocumentationLink = ''
187 | if (answers.docs && ['bevry', 'surge'].includes(answers.deploymentStrategy)) {
188 | const newDocumentationPrefix =
189 | answers.deploymentStrategy === 'bevry'
190 | ? `https://cdn.bevry.me/${trimOrgName(answers.name)}/${
191 | answers.defaultBranch
192 | }/`
193 | : `http://${answers.defaultBranch}.${trimOrgName(answers.name)}.${
194 | answers.githubUsername
195 | }.surge.sh/`
196 | const newDocumentationSuffix = `docs/${
197 | answers.deploymentStrategy === 'bevry' ? 'index.html' : ''
198 | }` // don't use globals.html, it may not always be present
199 | const newDocumentationURL = newDocumentationPrefix + newDocumentationSuffix
200 | newDocumentationLink = `[Complete API Documentation.](${newDocumentationURL})`
201 | }
202 | if ((await isAccessible('README.md')) === false) {
203 | status('writing readme file...')
204 | await write(
205 | 'README.md',
206 | [
207 | '',
208 | '',
209 | '',
210 | '## Usage',
211 | answers.docs && newDocumentationLink,
212 | '',
213 | '',
214 | '',
215 | '',
216 | '',
217 | ]
218 | .filter((i) => i)
219 | .join('\n\n'),
220 | )
221 | status('...wrote readme file')
222 | } else {
223 | // update the existing readme file
224 | status('updating readme file...')
225 | // read
226 | let content = await read('README.md')
227 | content = content.toString()
228 | // remove old documentation link, should come before the changes below
229 | if (newDocumentationLink) {
230 | content = content.replace(
231 | /\[(Complete )?(Technical )?(API )?Documentation\.?\]\([^)]+\)/g,
232 | '',
233 | )
234 | }
235 | // update old documentation names
236 | content = content
237 | .replace(/\[Web Demonstration\.?\]/g, '[Web Browser Demonstration.]')
238 | .replace(
239 | /\[(Tutorials & Guides|Documentation)\.?\]/g,
240 | '[Tutorials & Guides.]',
241 | )
242 | // insert new documentation under usage
243 | if (newDocumentationLink) {
244 | content = content.replace('## Usage', function (found) {
245 | return found + '\n\n' + newDocumentationLink
246 | })
247 | }
248 | // move the install section before the history section
249 | let install = ''
250 | content = content.replace(
251 | /.+?/s,
252 | function (found) {
253 | install = found
254 | return ''
255 | },
256 | )
257 | content = content.replace('', function (found) {
258 | return install + '\n\n' + found
259 | })
260 | // write
261 | await write('README.md', content)
262 | status('...updated readme file')
263 | }
264 |
265 | // convert the history file
266 | if (await isAccessible('HISTORY.md')) {
267 | status('updating history file...')
268 | let historyContent = await read('HISTORY.md')
269 | historyContent = historyContent.toString()
270 | if (/^##/m.test(historyContent) === false) {
271 | historyContent = historyContent.replace(/^-/gm, '##').replace(/^\t/gm, '')
272 | }
273 | historyContent = historyContent.replace(
274 | /^(## v\d+\.\d+\.\d+) ([a-z]+ \d+), (\d+)$/gim,
275 | '$1 $3 $2',
276 | )
277 | await write('HISTORY.md', historyContent)
278 | status('...updated history file')
279 | }
280 |
281 | // write bevry specific files
282 | if (isBevryOrganisation(answers.githubUsername)) {
283 | // security
284 | if (answers.npm) {
285 | status('writing security file...')
286 | await write(
287 | 'SECURITY.md',
288 | [
289 | '# Security Policy',
290 | '',
291 | '## Security Practices',
292 | '',
293 | `This project meets standardized secure software development practices, including 2FA for all members, password managers with monitoring, secure secret retrieval instead of storage. [Learn about our practices.](https://tidelift.com/funding/github/npm/${answers.name})`,
294 | '',
295 | '## Supported Versions',
296 | '',
297 | `This project uses [Bevry's automated tooling](https://github.com/bevry/boundation) to deliver the latest updates, fixes, and improvements inside the latest release while still maintaining widespread ecosystem compatibility.`,
298 | '',
299 | `[Refer to supported ecosystem versions: \`Editions\` section in \`README.md\`](https://github.com/${answers.githubSlug}/blob/${answers.defaultBranch}/README.md#Editions)`,
300 | '',
301 | `[Refer to automated support of ecosystem versions: \`boundation\` entries in \`HISTORY.md\`](https://github.com/${answers.githubSlug}/blob/${answers.defaultBranch}/HISTORY.md)`,
302 | '',
303 | `Besides testing and verification, out CI also [auto-merges](https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions) [Dependabot security updates](https://docs.github.com/en/code-security/dependabot/dependabot-security-updates/about-dependabot-security-updates) and [auto-publishes](https://github.com/bevry-actions/npm) successful builds of the [\`${answers.defaultBranch}\` branch](https://github.com/bevry/wait/actions?query=branch%3A${answers.defaultBranch}) to the [\`next\` version tag](https://www.npmjs.com/package/${answers.name}?activeTab=versions), offering immediate resolutions before scheduled maintenance releases.`,
304 | '',
305 | '## Reporting a Vulnerability',
306 | '',
307 | `[Report the vulnerability to the project owners.](https://github.com/${answers.githubSlug}/security/advisories)`,
308 | '',
309 | '[Report the vulnerability to Tidelift.](https://tidelift.com/security)',
310 | ]
311 | .filter((i) => i)
312 | .join('\n\n'),
313 | )
314 | status('...wrote security file')
315 | }
316 |
317 | // funding
318 | // https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/displaying-a-sponsor-button-in-your-repository
319 | await mkdirp('.github')
320 | await write(
321 | '.github/FUNDING.yml',
322 | [
323 | 'github: [balupton]',
324 | // doesn't support thanksdev
325 | 'liberapay: bevry',
326 | 'ko_fi: balupton',
327 | // doesn't support buymeacoffee
328 | 'open_collective: bevry',
329 | answers.npm ? `tidelift: npm/${answers.name}` : '',
330 | "custom: ['https://bevry.me/fund']",
331 | ]
332 | .filter(Boolean)
333 | .join('\n'),
334 | )
335 | }
336 | }
337 |
--------------------------------------------------------------------------------
/source/bin.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | // builtin
4 | import { resolve, join } from 'node:path'
5 |
6 | // get root with imports
7 | import filedirname from 'filedirname'
8 | const [file, dir] = filedirname()
9 | const root = resolve(dir, '..')
10 | const pkgPath = join(root, 'package.json')
11 |
12 | // internal
13 | import { readJSON } from './fs.js'
14 | import boundation from './index.js'
15 | import state from './state.js'
16 | import { pwd } from './data.js'
17 |
18 | // // Process unhandled rejections
19 | // process.on('unhandledRejection', function unhandledRejection(error) {
20 | // console.error(new Errlop('An unhandled promise failed', error))
21 | // process.exit(-1)
22 | // })
23 |
24 | async function main() {
25 | // boundation
26 | const { version } = await readJSON(pkgPath)
27 | console.log(`Boundation v${version} [${root}]`)
28 |
29 | // app
30 | console.log(`Running on [${pwd}]`)
31 |
32 | // run
33 | await boundation(state)
34 | }
35 |
36 | main().catch((err) => {
37 | console.error(err)
38 | process.exit(1)
39 | })
40 |
--------------------------------------------------------------------------------
/source/ci.js:
--------------------------------------------------------------------------------
1 | /* eslint no-template-curly-in-string:0 */
2 |
3 | // external
4 | import { filterNodeVersions } from '@bevry/nodejs-versions'
5 | import unlink from '@bevry/fs-unlink'
6 | import { isAccessible } from '@bevry/fs-accessible'
7 | import mkdirp from '@bevry/fs-mkdirp'
8 | import trimEmptyKeys from 'trim-empty-keys'
9 |
10 | // local
11 | import { status } from './log.js'
12 | import { writeYAML } from './fs.js'
13 |
14 | // github actions no longer supports node versions prior to 16
15 | // https://github.blog/changelog/2023-06-13-github-actions-all-actions-will-run-on-node16-instead-of-node12-by-default/
16 | function filterSetupNodeVersions(nodeVersions) {
17 | return filterNodeVersions(nodeVersions, { gte: 16 })
18 | }
19 |
20 | // generate the json file
21 | function generateGitHubActionsJSON(state) {
22 | // extract
23 | const { packageData, answers } = state
24 |
25 | // prepare vars
26 | const actionsOperatingSystems = answers.npm
27 | ? ['ubuntu-latest', 'macos-latest', 'windows-latest']
28 | : ['ubuntu-latest']
29 | const actionsOperatingSystemsOptional = []
30 | /* @todo: make optional operating systems a hidden question, as we now want to guarantee support on all operating systems unless exacerbating circumstances prevent such:
31 | = intersect(actionsOperatingSystems, [
32 | 'macos-latest',
33 | 'windows-latest',
34 | ]) */
35 | const { desiredNodeVersion } = answers
36 | const actionsNodeVersions = filterSetupNodeVersions(
37 | answers.nodeVersionsTested,
38 | )
39 | const actionsNodeVersionsOptional = filterSetupNodeVersions(
40 | state.nodeVersionsOptional,
41 | )
42 | const continueOnErrors = [
43 | actionsNodeVersionsOptional.length
44 | ? `contains('${actionsNodeVersionsOptional.join(' ')}', matrix.node)`
45 | : '',
46 | actionsOperatingSystemsOptional.length
47 | ? `contains('${actionsOperatingSystemsOptional.join(' ')}', matrix.os)`
48 | : '',
49 | ]
50 | .filter((i) => i)
51 | .join(' || ')
52 | const continueOnError = continueOnErrors
53 | ? `\${{ ${continueOnErrors} }}`
54 | : null
55 |
56 | // standard actions
57 | const preTestSteps = [
58 | {
59 | run: 'npm run our:setup',
60 | },
61 | {
62 | run: 'npm run our:compile',
63 | },
64 | {
65 | run: 'npm run our:verify',
66 | },
67 | ]
68 | const verifyNodeVersionSteps = [
69 | {
70 | name: 'Verify Node.js Versions',
71 | run: "printf '%s' 'node: ' && node --version && printf '%s' 'npm: ' && npm --version && node -e 'console.log(process.versions)'",
72 | },
73 | ]
74 | const testSteps = [
75 | {
76 | run: 'npm test',
77 | },
78 | ]
79 | const prePublishSteps = [
80 | {
81 | run: 'npm run our:setup',
82 | },
83 | {
84 | run: 'npm run our:compile',
85 | },
86 | {
87 | run: 'npm run our:meta',
88 | },
89 | ]
90 |
91 | // inject custom conf into test steps
92 | if (packageData.boundation && packageData.boundation.githubActionTestEnv) {
93 | for (const step of testSteps) {
94 | step.env = packageData.boundation.githubActionTestEnv
95 | }
96 | }
97 |
98 | // bevry actions
99 | const npmPublishSteps = [
100 | {
101 | name: 'publish to npm',
102 | uses: 'bevry-actions/npm@v1.1.7',
103 | with: {
104 | npmAuthToken: '${{ secrets.NPM_AUTH_TOKEN }}',
105 | npmBranchTag: answers.npm ? ':next' : null,
106 | },
107 | },
108 | ]
109 | const surgePublishSteps = [
110 | {
111 | name: 'publish to surge',
112 | uses: 'bevry-actions/surge@v1.1.0',
113 | with: {
114 | surgeLogin: '${{ secrets.SURGE_LOGIN }}',
115 | surgeToken: '${{ secrets.SURGE_TOKEN }}',
116 | },
117 | },
118 | ]
119 | const customPublishSteps = [
120 | {
121 | run: 'npm run my:deploy',
122 | },
123 | ]
124 | const publishSteps = []
125 | // @todo turn bevry cdn into its own github action
126 | // https://github.com/bevry-actions/npm/blob/2811aea332baf2e7994ae4f118e23a52e4615cf9/action.bash#L110
127 | if (answers.npm || answers.deploymentStrategy === 'bevry') {
128 | publishSteps.push(...npmPublishSteps)
129 | }
130 | if (answers.deploymentStrategy === 'surge') {
131 | publishSteps.push(...surgePublishSteps)
132 | }
133 | if (answers.deploymentStrategy === 'custom') {
134 | publishSteps.push(...customPublishSteps)
135 | }
136 |
137 | // github actions
138 | const setupSteps = [
139 | {
140 | uses: 'actions/checkout@v4',
141 | },
142 | ]
143 | const desiredNodeSteps = [
144 | {
145 | name: 'Install desired Node.js version',
146 | uses: 'actions/setup-node@v4',
147 | with: {
148 | 'node-version': desiredNodeVersion,
149 | },
150 | },
151 | ...verifyNodeVersionSteps,
152 | ]
153 | const targetNodeSteps = [
154 | {
155 | name: 'Install targeted Node.js',
156 |
157 | if: `\${{ matrix.node != ${desiredNodeVersion} }}`,
158 | uses: 'actions/setup-node@v4',
159 | with: {
160 | 'node-version': '${{ matrix.node }}',
161 | },
162 | },
163 | ...verifyNodeVersionSteps,
164 | ]
165 | const setupDenoSteps = [
166 | {
167 | name: 'Install Deno',
168 | uses: 'denoland/setup-deno@v1',
169 | with: {
170 | 'deno-version': 'vx.x.x',
171 | },
172 | },
173 | ]
174 |
175 | // add deno steps if needed
176 | if (answers.keywords.has('deno')) {
177 | setupSteps.push(...setupDenoSteps)
178 | }
179 |
180 | // merge
181 | return trimEmptyKeys({
182 | name: 'bevry',
183 | on: ['push', 'pull_request'],
184 | jobs: {
185 | test: {
186 | strategy: {
187 | matrix: {
188 | os: actionsOperatingSystems,
189 | node: actionsNodeVersions,
190 | },
191 | },
192 | 'runs-on': '${{ matrix.os }}',
193 | 'continue-on-error': continueOnError,
194 | steps: [
195 | ...setupSteps,
196 | ...desiredNodeSteps,
197 | ...preTestSteps,
198 | ...targetNodeSteps,
199 | ...testSteps,
200 | ],
201 | },
202 | publish: publishSteps.length
203 | ? {
204 | if: "${{ github.event_name == 'push' }}",
205 | needs: 'test',
206 | 'runs-on': 'ubuntu-latest',
207 | steps: [
208 | ...setupSteps,
209 | ...desiredNodeSteps,
210 | ...prePublishSteps,
211 | ...publishSteps,
212 | ],
213 | }
214 | : null,
215 | automerge: {
216 | permissions: {
217 | contents: 'write',
218 | 'pull-requests': 'write',
219 | },
220 | 'runs-on': 'ubuntu-latest',
221 | if: "github.actor == 'dependabot[bot]'",
222 | steps: [
223 | {
224 | name: 'Enable auto-merge for Dependabot PRs',
225 | run: 'gh pr merge --auto --squash "$PR_URL"',
226 | env: {
227 | PR_URL: '${{github.event.pull_request.html_url}}',
228 | GITHUB_TOKEN: '${{secrets.GITHUB_TOKEN}}',
229 | },
230 | },
231 | ],
232 | },
233 | },
234 | })
235 | }
236 |
237 | // Thing
238 | export async function updateCI(state) {
239 | status('customising ci...')
240 |
241 | // wiping old ci files and prep new ones
242 | await unlink([
243 | '.travis.yml',
244 | '.mergify.yml',
245 | '.dependabot/config.yml',
246 | '.github/workflows/automerge.yml',
247 | ])
248 | await mkdirp('.github/workflows')
249 |
250 | // dependabot v2 file
251 | // https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
252 | await writeYAML('.github/dependabot.yml', {
253 | version: 2,
254 | updates: [
255 | {
256 | 'package-ecosystem': 'github-actions',
257 | directory: '/',
258 | schedule: {
259 | interval: 'weekly',
260 | day: 'sunday',
261 | time: '00:00',
262 | timezone: 'Australia/Perth',
263 | },
264 | },
265 | {
266 | 'package-ecosystem': 'npm',
267 | directory: '/',
268 | schedule: {
269 | interval: 'weekly',
270 | day: 'sunday',
271 | time: '00:00',
272 | timezone: 'Australia/Perth',
273 | },
274 | // only allow security updates
275 | // this is because of the lag it causes on the bevry org
276 | // as well as that github only supports the maintained node.js versions so dependabot could merge a dependency that breaks unmaintained node.js versions that are still supported by our package
277 | 'open-pull-requests-limit': 0,
278 | },
279 | ],
280 | })
281 |
282 | // add github actions if a custom one is not present
283 | if (await isAccessible('.github/workflows/custom.yml')) {
284 | state.githubWorkflow = 'custom'
285 | console.log('skipping writing github actions as a custom workflow exists')
286 | } else {
287 | await writeYAML(
288 | '.github/workflows/bevry.yml',
289 | generateGitHubActionsJSON(state),
290 | )
291 | }
292 |
293 | // log
294 | status('...customised ci')
295 | }
296 |
--------------------------------------------------------------------------------
/source/data.js:
--------------------------------------------------------------------------------
1 | // builtin
2 | import { cwd } from 'node:process'
3 |
4 | // external
5 | import {
6 | getESVersionsByDate,
7 | getESVersionByDate,
8 | getESVersionsByNow,
9 | getDateWithYearOffset,
10 | } from '@bevry/ecmascript-versions'
11 |
12 | export const pwd = cwd()
13 |
14 | export const pastBevrySponsors = [
15 | // fetched manually with incognito mode from: https://github.com/sponsors/balupton
16 | // as there isn't an api for this yet, when there is an api, add it to github-api
17 | { githubUsername: 'dr-dimitru' },
18 | { githubUsername: 'elliottditman' },
19 | { githubUsername: 'Armenm' },
20 | { githubUsername: 'WriterJohnBuck' },
21 | { githubUsername: 'cryptoquick' },
22 | { githubUsername: 'rdeforest' },
23 | { githubUsername: 'hispanic' },
24 | { githubUsername: 'github' },
25 | { githubUsername: 'pleo-io' },
26 | { githubUsername: 'mrhenry' },
27 | { githubUsername: 'nermalcat69' },
28 | { githubUsername: 'skunkteam' },
29 | // fetched manually from: https://www.patreon.com/members via bevry creator account
30 | // github associations performed manually
31 | {
32 | githubUsername: 'elliottditman',
33 | // from patreon:
34 | email: 'elliottditman@gmail.com',
35 | patreonId: '15026448',
36 | },
37 | {
38 | githubUsername: 'Armenm',
39 | // from patreon:
40 | nomen: 'Armen Mkrtchian',
41 | email: 'armen.mkrtchian@gmail.com',
42 | twitterUsername: 'armen_mkrtchian',
43 | },
44 | {
45 | githubUsername: 'leedriscoll',
46 | // from patreon:
47 | nomen: 'Lee Driscoll',
48 | email: 'lsdriscoll@icloud.com',
49 | patreonId: '5292556',
50 | },
51 | {
52 | githubUsername: 'Aglezabad',
53 | // from patreon:
54 | nomen: 'Ángel González',
55 | email: 'aglezabad@gmail.com',
56 | twitterUsername: 'Aglezabad',
57 | },
58 | {
59 | githubUsername: 'scokem',
60 | twitterUsername: 'scokem',
61 | // from patreon:
62 | nomen: 'Scott Kempson',
63 | email: 'scottkempson@gmail.com',
64 | },
65 | ]
66 |
67 | export const hiddenConfigurationProperties = [
68 | 'comment',
69 | 'versions',
70 | 'githubActionTestEnv',
71 | ]
72 |
73 | // fill this with a map of dependency package names to versions that are busted
74 | // so that if they are necessary, a previous version is used instead
75 | export const bustedVersions = {}
76 |
77 | export const allLanguages = [
78 | 'typescript',
79 | 'esnext',
80 | 'es5',
81 | 'coffeescript',
82 | 'json',
83 | 'react',
84 | 'jsx',
85 | 'mdx',
86 | 'html',
87 | 'css',
88 | ]
89 |
90 | // both browsers and typescript lag behind
91 | // as it is 2023-11-01 and ES2023 isn't available to typescript, even though it has been ratified (they get ratified in the middle of year)
92 | const aYearAgo = getDateWithYearOffset(-1)
93 |
94 | // we reverse, to make sure it is newest first
95 | export const allTypescriptEcmascriptTargets = [
96 | 'ESNext',
97 | ...getESVersionsByDate(aYearAgo).reverse(),
98 | ]
99 |
100 | // we reverse, to make sure it is newest first
101 | export const allEcmascriptVersions = [
102 | 'ESNext',
103 | ...getESVersionsByNow().reverse(),
104 | ]
105 |
106 | export const defaultCoffeeEcmascriptTarget = 'ESNext'
107 |
108 | // previous year
109 | export const defaultBrowserTarget = getESVersionByDate(aYearAgo)
110 |
111 | export const languageNames = {
112 | typescript: 'TypeScript',
113 | esnext: 'ESNext',
114 | es5: 'ES5',
115 | coffeescript: 'CoffeeScript',
116 | }
117 |
--------------------------------------------------------------------------------
/source/editions.js:
--------------------------------------------------------------------------------
1 | // builtin
2 | import * as pathUtil from 'node:path'
3 |
4 | // external
5 | import { add, has, intersect } from '@bevry/list'
6 | import { isAccessible } from '@bevry/fs-accessible'
7 | import write from '@bevry/fs-write'
8 | import { fetchAllCompatibleESVersionsForNodeVersions } from '@bevry/nodejs-ecmascript-compatibility'
9 | import { filterNodeVersions } from '@bevry/nodejs-versions'
10 |
11 | // local
12 | import { status } from './log.js'
13 | import {
14 | strip,
15 | addExtension,
16 | fixTsc,
17 | useStrict,
18 | exportOrExports,
19 | importOrRequire,
20 | cojoin,
21 | set,
22 | unjoin,
23 | } from './util.js'
24 | import { newPackageBinEntry } from './package.js'
25 | import {
26 | allTypescriptEcmascriptTargets,
27 | languageNames,
28 | defaultBrowserTarget,
29 | defaultCoffeeEcmascriptTarget,
30 | } from './data.js'
31 | import { spawn, exec, unlinkIfContains } from './fs.js'
32 | import state from './state.js'
33 |
34 | async function writeLoader({
35 | entry = 'index',
36 | autoloader = false,
37 | exportDefault = false,
38 | typesPath = '',
39 | targetEntry = '',
40 | targetPath = '',
41 | }) {
42 | const bin = entry.startsWith('bin')
43 | const mjs = entry.endsWith('.mjs')
44 | const cjs = !mjs
45 | const lines = [
46 | bin && '#!/usr/bin/env node',
47 | cjs && "'use strict'",
48 | '// auto-generated by boundation, do not update manually',
49 | ]
50 | if (autoloader) {
51 | if (mjs) {
52 | // https://github.com/bevry/editions/issues/83
53 | throw new Error('autoloader does not yet support mjs')
54 | }
55 | lines.push(`/** @type {typeof import("${cojoin('.', typesPath)}") } */`)
56 | if (targetEntry) {
57 | lines.push(
58 | `module.exports = require('editions').requirePackage(__dirname, require, '${targetEntry}')`,
59 | )
60 | } else {
61 | lines.push(
62 | `module.exports = require('editions').requirePackage(__dirname, require)`,
63 | )
64 | }
65 | } else {
66 | if (mjs) lines.push(`export * from './${targetPath}'`)
67 | if (exportDefault)
68 | lines.push(
69 | `import d from './${targetPath}'`,
70 | // cjs exports {default} instead of default
71 | 'export default ' + (cjs ? 'd.default || d' : 'd'),
72 | )
73 | if (cjs) lines.push(`module.exports = require('./${targetPath}')`)
74 | }
75 | await write(entry, lines.filter((i) => i).join('\n'))
76 | }
77 |
78 | async function writeRootEntry({
79 | entry = 'index',
80 | autoloader = false,
81 | always = false,
82 | exportDefault = false,
83 | sourceEdition,
84 | typesEdition,
85 | nodeEditionRequire,
86 | nodeEditionImport,
87 | }) {
88 | // prepare
89 | const typesPath = typesEdition
90 | ? typesEdition[entry + 'Path']
91 | : sourceEdition[entry + 'Path']
92 | // resolve
93 | let resolved
94 | if (nodeEditionRequire) {
95 | if (autoloader || always) {
96 | const entryWithExtension = entry + '.cjs'
97 | await writeLoader({
98 | entry: entryWithExtension,
99 | autoloader,
100 | typesPath,
101 | targetEntry: nodeEditionRequire[entry],
102 | targetPath: nodeEditionRequire[entry + 'Path'],
103 | })
104 | resolved = entryWithExtension
105 | } else {
106 | resolved = nodeEditionRequire[entry + 'Path']
107 | }
108 | } else if (nodeEditionImport) {
109 | if (autoloader) {
110 | throw new Error('autoloader does not yet support only mjs')
111 | } else if (always) {
112 | const entryWithExtension = entry + '.mjs'
113 | await writeLoader({
114 | entry: entryWithExtension,
115 | autoloader,
116 | typesPath,
117 | targetEntry: nodeEditionImport[entry],
118 | targetPath: nodeEditionImport[entry + 'Path'],
119 | })
120 | resolved = entryWithExtension
121 | } else {
122 | // package.json:exports.import dismisses the need for a .mjs loader file
123 | resolved = nodeEditionImport[entry + 'Path']
124 | }
125 | }
126 | // always resolve, as node doesn't support extensionless entries inside package.json
127 | return resolved
128 | }
129 |
130 | // Helpers
131 | class Edition {
132 | constructor(opts) {
133 | Object.defineProperty(this, 'description', {
134 | enumerable: true,
135 | get() {
136 | // ensure description exists
137 | const edition = this
138 | const browserSupport = edition.engines && edition.engines.browsers
139 | const nodeSupport = edition.engines && edition.engines.node
140 | const esSupport = edition.targets && edition.targets.es
141 | const description = [
142 | languageNames[state.answers.language] || state.answers.language,
143 | edition.directory === state.answers.sourceDirectory
144 | ? 'source code'
145 | : 'compiled',
146 | ]
147 | if (esSupport && typeof esSupport === 'string') {
148 | // what the typescript compiler targets
149 | description.push(`against ${esSupport}`)
150 | }
151 | if (browserSupport) {
152 | description.push(`for web browsers`)
153 | if (
154 | typeof browserSupport === 'string' &&
155 | browserSupport !== 'defaults'
156 | ) {
157 | description.push(`[${browserSupport}]`)
158 | }
159 | }
160 | if (nodeSupport) {
161 | description.push(browserSupport ? 'and' : 'for', `Node.js`)
162 | if (typeof nodeSupport === 'string') {
163 | // typescript compiler will be true, as typescript doesn't compile to specific node versions
164 | description.push(`${nodeSupport}`)
165 | }
166 | }
167 | if (has(edition.tags, 'types')) {
168 | description.push('Types')
169 | }
170 | if (has(edition.tags, 'require')) {
171 | description.push('with Require for modules')
172 | } else if (has(edition.tags, 'import')) {
173 | description.push('with Import for modules')
174 | }
175 | return description.join(' ')
176 | },
177 | })
178 |
179 | Object.defineProperty(this, 'targets', {
180 | enumerable: false,
181 | writable: true,
182 | })
183 |
184 | Object.defineProperty(this, 'dependencies', {
185 | enumerable: false,
186 | writable: true,
187 | })
188 |
189 | Object.defineProperty(this, 'devDependencies', {
190 | enumerable: false,
191 | writable: true,
192 | })
193 |
194 | Object.defineProperty(this, 'compiler', {
195 | enumerable: false,
196 | writable: true,
197 | })
198 |
199 | Object.defineProperty(this, 'scripts', {
200 | enumerable: false,
201 | writable: true,
202 | })
203 |
204 | Object.defineProperty(this, 'compiler', {
205 | enumerable: false,
206 | writable: true,
207 | })
208 |
209 | Object.defineProperty(this, 'babel', {
210 | enumerable: false,
211 | writable: true,
212 | })
213 |
214 | Object.defineProperty(this, 'active', {
215 | enumerable: false,
216 | writable: true,
217 | })
218 |
219 | Object.defineProperty(this, 'entry', {
220 | enumerable: false,
221 | get() {
222 | const engines = []
223 | for (const [name, supported] of Object.entries(this.engines)) {
224 | if (supported) engines.push(name)
225 | }
226 | const entry =
227 | engines.length !== 1 ? this.index : this[engines[0]] || this.index
228 | return entry
229 | },
230 | })
231 |
232 | Object.defineProperty(this, 'index', {
233 | enumerable: false,
234 | writable: true,
235 | })
236 |
237 | Object.defineProperty(this, 'node', {
238 | enumerable: false,
239 | writable: true,
240 | })
241 |
242 | Object.defineProperty(this, 'browser', {
243 | enumerable: false,
244 | writable: true,
245 | })
246 |
247 | Object.defineProperty(this, 'test', {
248 | enumerable: false,
249 | writable: true,
250 | })
251 |
252 | Object.defineProperty(this, 'bin', {
253 | enumerable: false,
254 | writable: true,
255 | })
256 |
257 | Object.defineProperty(this, 'indexPath', {
258 | enumerable: false,
259 | get() {
260 | return this.index && pathUtil.join(this.directory || '.', this.index)
261 | },
262 | })
263 |
264 | // entry path is an indexPath that actually has engines
265 | Object.defineProperty(this, 'entryPath', {
266 | enumerable: false,
267 | get() {
268 | return this.entry && pathUtil.join(this.directory || '.', this.entry)
269 | },
270 | })
271 |
272 | Object.defineProperty(this, 'nodePath', {
273 | enumerable: false,
274 | get() {
275 | return this.node && pathUtil.join(this.directory || '.', this.node)
276 | },
277 | })
278 |
279 | Object.defineProperty(this, 'browserPath', {
280 | enumerable: false,
281 | get() {
282 | return (
283 | this.browser && pathUtil.join(this.directory || '.', this.browser)
284 | )
285 | },
286 | })
287 |
288 | Object.defineProperty(this, 'testPath', {
289 | enumerable: false,
290 | get() {
291 | return this.test && pathUtil.join(this.directory || '.', this.test)
292 | },
293 | })
294 |
295 | Object.defineProperty(this, 'binPath', {
296 | enumerable: false,
297 | get() {
298 | return this.bin && pathUtil.join(this.directory || '.', this.bin)
299 | },
300 | })
301 |
302 | Object.defineProperty(this, 'compileCommand', {
303 | enumerable: false,
304 | writable: true,
305 | })
306 |
307 | opts.tags = new Set(opts.tags || [])
308 | opts.dependencies = new Set(opts.dependencies || [])
309 | opts.devDependencies = new Set(opts.devDependencies || [])
310 |
311 | Object.assign(this, { scripts: {}, active: true }, opts)
312 | }
313 | }
314 |
315 | // Actions
316 | export async function generateEditions(state) {
317 | const { answers, packageData } = state
318 |
319 | // log
320 | status('updating editions...')
321 |
322 | // source edition
323 | if (answers.website) {
324 | delete packageData.main
325 | state.editions = [
326 | new Edition({
327 | directory: '.',
328 | tags: [
329 | 'source',
330 | 'website',
331 | ...answers.languages,
332 | answers.sourceModule ? 'import' : 'require',
333 | ],
334 | }),
335 | ]
336 | } else {
337 | const editions = new Map()
338 |
339 | // Generate source edition based on language
340 | if (answers.language === 'es5') {
341 | const edition = new Edition({
342 | directory: answers.sourceDirectory,
343 | index: addExtension(answers.indexEntry, `js`),
344 | node: addExtension(answers.nodeEntry, `js`),
345 | browser: addExtension(answers.browserEntry, `js`),
346 | test: addExtension(answers.testEntry, `js`),
347 | bin: addExtension(answers.binEntry, `js`),
348 | tags: [
349 | 'source',
350 | 'javascript',
351 | 'es5',
352 | answers.sourceModule ? 'import' : 'require',
353 | ],
354 | engines: {
355 | node: true,
356 | browsers: answers.browsersTargeted,
357 | },
358 | })
359 |
360 | if (answers.flowtype) {
361 | add(edition.tags, 'flow type comments')
362 | }
363 |
364 | editions.set('source', edition)
365 | } else if (answers.language === 'esnext') {
366 | const edition = new Edition({
367 | directory: answers.sourceDirectory,
368 | index: addExtension(answers.indexEntry, `js`),
369 | node: addExtension(answers.nodeEntry, `js`),
370 | browser: addExtension(answers.browserEntry, `js`),
371 | test: addExtension(answers.testEntry, `js`),
372 | bin: addExtension(answers.binEntry, `js`),
373 | tags: [
374 | 'source',
375 | 'javascript',
376 | 'esnext',
377 | answers.sourceModule ? 'import' : 'require',
378 | ],
379 | engines: {
380 | node: true,
381 | browsers: answers.browsersTargeted && !answers.compilerBrowser,
382 | },
383 | })
384 |
385 | if (answers.flowtype) {
386 | add(edition.tags, 'flow type comments')
387 | }
388 |
389 | editions.set('source', edition)
390 | } else if (answers.language === 'typescript') {
391 | editions.set(
392 | 'source',
393 | new Edition({
394 | directory: answers.sourceDirectory,
395 | index: addExtension(answers.indexEntry, `ts`),
396 | node: addExtension(answers.nodeEntry, `ts`),
397 | browser: addExtension(answers.browserEntry, `ts`),
398 | test: addExtension(answers.testEntry, `ts`),
399 | bin: addExtension(answers.binEntry, `js`),
400 | tags: ['source', 'typescript', 'import'],
401 | engines: false,
402 | }),
403 | )
404 | } else if (answers.language === 'coffeescript') {
405 | editions.set(
406 | 'source',
407 | new Edition({
408 | directory: answers.sourceDirectory,
409 | index: addExtension(answers.indexEntry, `coffee`),
410 | node: addExtension(answers.nodeEntry, `coffee`),
411 | browser: addExtension(answers.browserEntry, `coffee`),
412 | test: addExtension(answers.testEntry, `coffee`),
413 | bin: addExtension(answers.binEntry, `coffee`),
414 | tags: ['source', 'coffeescript', 'require'],
415 | engines: false,
416 | }),
417 | )
418 | } else if (answers.language === 'json') {
419 | editions.set(
420 | 'source',
421 | new Edition({
422 | directory: answers.sourceDirectory,
423 | index: addExtension(answers.indexEntry, `json`),
424 | node: addExtension(answers.nodeEntry, `json`),
425 | browser: addExtension(answers.browserEntry, `json`),
426 | test: addExtension(answers.testEntry, `js`),
427 | bin: addExtension(answers.binEntry, `js`),
428 | tags: ['source', 'json', 'es5'],
429 | engines: {
430 | node: true,
431 | browsers: answers.browsersTargeted && !answers.compilerBrowser,
432 | },
433 | }),
434 | )
435 | } else {
436 | throw new Error('language should have been defined, but it was missing')
437 | }
438 |
439 | // add browser edition
440 | if (answers.compilerBrowser) {
441 | editions.set(
442 | 'browser',
443 | new Edition({
444 | compiler: answers.compilerBrowser,
445 | // for legacy b/c reasons this is not "edition-browser"
446 | directory: 'edition-browsers',
447 | index: addExtension(answers.browserEntry, `js`),
448 | browser: addExtension(answers.browserEntry, `js`),
449 | test: addExtension(answers.testEntry, `js`),
450 | bin: addExtension(answers.binEntry, `js`),
451 | tags: [
452 | 'compiled',
453 | 'javascript',
454 | answers.sourceModule ? 'import' : 'require',
455 | ],
456 | targets: {
457 | es: defaultBrowserTarget,
458 | browsers: answers.browsersTargeted,
459 | },
460 | engines: {
461 | node: false,
462 | browsers: answers.browsersTargeted,
463 | },
464 | }),
465 | )
466 | }
467 |
468 | // add coffeescript edition
469 | if (answers.compilerNode === 'coffeescript') {
470 | const esVersionTargetLower = defaultCoffeeEcmascriptTarget.toLowerCase()
471 | const directory = `edition-${esVersionTargetLower}`
472 | editions.set(
473 | 'coffeescript',
474 | new Edition({
475 | compiler: 'coffeescript',
476 | directory,
477 | index: addExtension(answers.indexEntry, `js`),
478 | node: addExtension(answers.nodeEntry, `js`),
479 | browser: addExtension(answers.browserEntry, `js`),
480 | test: addExtension(answers.testEntry, `js`),
481 | bin: addExtension(answers.binEntry, `js`),
482 | tags: ['compiled', 'javascript', esVersionTargetLower, 'require'],
483 | engines: {
484 | node: true,
485 | browsers: answers.browsersTargeted,
486 | },
487 | }),
488 | )
489 | }
490 | // add edition for each babel/typescript target
491 | else if (
492 | answers.compilerNode === 'babel' ||
493 | answers.compilerNode === 'typescript'
494 | ) {
495 | for (const targetModule of answers.targetModules) {
496 | /* eslint no-undefined:0 */
497 | const nodeVersionsTargets = filterNodeVersions(
498 | answers.nodeVersionsTargeted,
499 | {
500 | esm: targetModule === 'import',
501 | range:
502 | targetModule === 'import'
503 | ? answers.nodeVersionsTargetedImportRange
504 | : targetModule === 'require'
505 | ? answers.nodeVersionsTargetedRequireRange
506 | : undefined,
507 | },
508 | )
509 | .slice()
510 | .reverse() // reverse modifies the actual array, hence need for slice
511 | if (answers.compilerNode === 'babel') {
512 | for (const nodeVersionTarget of nodeVersionsTargets) {
513 | // fetch es version which is essential for accurate prettier configuration for node v6 which is es5
514 | const esVersionTargetLower = (
515 | await fetchAllCompatibleESVersionsForNodeVersions([
516 | nodeVersionTarget,
517 | ])
518 | )
519 | .reverse()[0]
520 | .toLowerCase()
521 | const directory =
522 | `edition-node-${nodeVersionTarget}` +
523 | (targetModule === 'import' ? '-esm' : '')
524 | editions.set(
525 | directory,
526 | new Edition({
527 | compiler: 'babel',
528 | directory,
529 | index: addExtension(answers.indexEntry, `js`),
530 | node: addExtension(answers.nodeEntry, `js`),
531 | browser: addExtension(answers.browserEntry, `js`),
532 | test: addExtension(answers.testEntry, `js`),
533 | bin: addExtension(answers.binEntry, `js`),
534 | tags: [
535 | 'compiled',
536 | 'javascript',
537 | esVersionTargetLower,
538 | targetModule,
539 | ],
540 | targets: {
541 | node: nodeVersionTarget,
542 | },
543 | engines: {
544 | node: true,
545 | browsers: false,
546 | },
547 | }),
548 | )
549 | }
550 | } else if (answers.compilerNode === 'typescript') {
551 | const esVersionsTargets = new Set()
552 | for (const nodeVersionTarget of nodeVersionsTargets) {
553 | // fetch the latest es version for the node.js version target that typescript supports
554 | const esVersionTarget = intersect(
555 | allTypescriptEcmascriptTargets,
556 | await fetchAllCompatibleESVersionsForNodeVersions([
557 | nodeVersionTarget,
558 | ]),
559 | )[0]
560 | // check that typescript supported it
561 | if (!esVersionTarget) continue
562 | // check that we haven't already generated an edition for this es verison target target
563 | if (esVersionsTargets.has(esVersionTarget)) continue
564 | esVersionsTargets.add(esVersionTarget)
565 | // generate the edition
566 | const esVersionTargetLower = esVersionTarget.toLowerCase()
567 | const directory =
568 | `edition-${esVersionTargetLower}` +
569 | (targetModule === 'import' ? '-esm' : '')
570 | editions.set(
571 | directory,
572 | new Edition({
573 | compiler: 'typescript',
574 | directory,
575 | index: addExtension(answers.indexEntry, `js`),
576 | node: addExtension(answers.nodeEntry, `js`),
577 | browser: addExtension(answers.browserEntry, `js`),
578 | test: addExtension(answers.testEntry, `js`),
579 | bin: addExtension(answers.binEntry, `js`),
580 | tags: [
581 | 'compiled',
582 | 'javascript',
583 | esVersionTargetLower,
584 | targetModule,
585 | ],
586 | targets: {
587 | node: nodeVersionTarget,
588 | es: esVersionTarget,
589 | },
590 | engines: {
591 | node: true,
592 | browsers: false,
593 | },
594 | }),
595 | )
596 | }
597 | } else {
598 | throw new Error(`invalid target for the compiler`)
599 | }
600 | }
601 | }
602 |
603 | // add types
604 | if (answers.language === 'typescript') {
605 | editions.set(
606 | 'types',
607 | new Edition({
608 | compiler: 'types',
609 | directory: 'edition-types',
610 | index: addExtension(answers.indexEntry, `d.ts`),
611 | node: addExtension(answers.nodeEntry, `d.ts`),
612 | browser: addExtension(answers.browserEntry, `d.ts`),
613 | test: addExtension(answers.testEntry, `d.ts`),
614 | bin: addExtension(answers.binEntry, `d.ts`),
615 | tags: ['compiled', 'types', 'import'],
616 | engines: false,
617 | }),
618 | )
619 | } else {
620 | // define the possible locations
621 | // do note that they must exist throughout boundation, which if it is a compiled dir, is sporadic
622 | const sourceEdition = editions.get('source')
623 | const typePaths = [
624 | // e.g. index.d.ts
625 | pathUtil.join(answers.indexEntry + '.d.ts'),
626 | // e.g. source/index.d.ts
627 | sourceEdition &&
628 | pathUtil.join(sourceEdition.directory, answers.indexEntry + '.d.ts'),
629 | ].filter((i) => i)
630 | // fetch their existing status and convert back into the original location
631 | const typePathsExisting = await Promise.all(
632 | typePaths.map((i) => isAccessible(i).then((e) => e && i)),
633 | )
634 | // find the first location that exists
635 | const typePath = typePathsExisting.find((i) => i)
636 | // and if exists, add our types edition
637 | if (typePath) {
638 | editions.set(
639 | 'types',
640 | new Edition({
641 | directory: '.',
642 | index: typePath,
643 | tags: ['types', answers.sourceModule ? 'import' : 'require'],
644 | engines: false,
645 | }),
646 | )
647 | }
648 | }
649 |
650 | // update state
651 | state.editions = Array.from(editions.values())
652 | }
653 |
654 | // log
655 | console.log(
656 | 'editions:',
657 | state.editions.map((edition) => edition.directory).join(', '),
658 | )
659 | status('...updated editions')
660 | }
661 |
662 | export function updateEditionFields(state) {
663 | const { answers, editions } = state
664 |
665 | // autogenerate various fields
666 | editions.forEach(function (edition) {
667 | const compileScriptName = `our:compile:${edition.directory}`
668 |
669 | // add compilation details
670 | if (edition.compiler === 'coffeescript') {
671 | edition.scripts[compileScriptName] =
672 | `coffee -bco ./${edition.directory} ./${answers.sourceDirectory}`
673 | } else if (edition.compiler === 'types') {
674 | edition.scripts[compileScriptName] = [
675 | 'tsc',
676 | '--emitDeclarationOnly',
677 | '--declaration',
678 | '--declarationMap',
679 | `--declarationDir ./${edition.directory}`,
680 | `--project ${answers.tsconfig}`,
681 | ...fixTsc(edition.directory, answers.sourceDirectory),
682 | ]
683 | .filter((part) => part)
684 | .join(' ')
685 | } else if (edition.compiler === 'typescript') {
686 | edition.scripts[compileScriptName] = [
687 | 'tsc',
688 | has(edition.tags, 'require') ? '--module commonjs' : '--module ESNext',
689 | `--target ${edition.targets.es}`,
690 | `--outDir ./${edition.directory}`,
691 | `--project ${answers.tsconfig}`,
692 | ...fixTsc(edition.directory, answers.sourceDirectory),
693 | // doesn't work: '|| true', // fixes failures where types may be temporarily missing
694 | ]
695 | .filter((part) => part)
696 | .join(' ')
697 | } else if (edition.compiler === 'babel') {
698 | if (answers.language === 'coffeescript') {
699 | // add coffee compile script
700 | edition.scripts[compileScriptName] = [
701 | `env BABEL_ENV=${edition.directory}`,
702 | 'coffee -bcto',
703 | `./${edition.directory}/`,
704 | `./${answers.sourceDirectory}`,
705 | ]
706 | .filter((part) => part)
707 | .join(' ')
708 | } else {
709 | // add babel compile script
710 | edition.scripts[compileScriptName] = [
711 | `env BABEL_ENV=${edition.directory}`,
712 | 'babel',
713 | answers.language === 'typescript' ? '--extensions ".ts,.tsx"' : '',
714 | `--out-dir ./${edition.directory}`,
715 | `./${answers.sourceDirectory}`,
716 | ]
717 | .filter((part) => part)
718 | .join(' ')
719 | }
720 |
721 | // populate babel
722 | edition.babel = {
723 | sourceType: answers.sourceModule ? 'module' : 'script',
724 | presets: [
725 | [
726 | '@babel/preset-env',
727 | {
728 | targets: strip(edition.targets, 'es'),
729 | modules: has(edition.tags, 'import')
730 | ? answers.sourceModule
731 | ? false
732 | : 'auto'
733 | : 'commonjs',
734 | },
735 | ],
736 | ],
737 | plugins: ['@babel/plugin-transform-object-rest-spread'],
738 | }
739 |
740 | add(
741 | edition.devDependencies,
742 | '@babel/cli',
743 | '@babel/core',
744 | '@babel/plugin-transform-object-rest-spread',
745 | '@babel/preset-env',
746 | )
747 |
748 | if (answers.language === 'typescript') {
749 | add(edition.babel.presets, '@babel/preset-typescript')
750 | add(
751 | edition.babel.plugins,
752 | '@babel/plugin-proposal-class-properties',
753 | '@babel/plugin-proposal-optional-chaining',
754 | )
755 | add(
756 | edition.devDependencies,
757 | '@babel/core',
758 | '@babel/plugin-proposal-class-properties',
759 | '@babel/plugin-proposal-optional-chaining',
760 | '@babel/plugin-transform-object-rest-spread',
761 | '@babel/preset-typescript',
762 | )
763 | }
764 | }
765 |
766 | // add the package.json type information to the edition
767 | if (edition.engines.node && edition.scripts[compileScriptName]) {
768 | const packageType = has(edition.tags, 'require') ? 'commonjs' : 'module'
769 | edition.scripts[compileScriptName] +=
770 | ` && printf '%s' '{"type": "${packageType}"}' > ${edition.directory}/package.json`
771 | }
772 |
773 | // note the compiler command
774 | edition.compileCommand = [answers.packageManager, 'run', compileScriptName]
775 | })
776 | }
777 |
778 | // Helpers
779 | export function updateEditionEntries(state) {
780 | const {
781 | typesEdition,
782 | nodeEditionRequire,
783 | nodeEditionImport,
784 | browserEdition,
785 | packageData,
786 | } = state
787 |
788 | // reset
789 | delete packageData.node
790 | delete packageData.mjs
791 | delete packageData.cjs
792 |
793 | // https://nodejs.org/api/esm.html#esm_conditional_exports
794 | // https://devblogs.microsoft.com/typescript/announcing-typescript-4-7/#package-json-exports-imports-and-self-referencing
795 | // https://nodejs.org/api/packages.html#packages_exports
796 | // https://nodejs.org/api/packages.html#package-entry-points
797 | // https://nodejs.org/api/packages.html#subpath-exports
798 | // https://nodejs.org/api/packages.html#conditional-exports
799 | packageData.exports = {}
800 |
801 | // types
802 | // https://www.typescriptlang.org/docs/handbook/declaration-files/publishing.html
803 | const typesIndexPath = cojoin('.', typesEdition && typesEdition.indexPath)
804 | if (typesIndexPath) {
805 | packageData.types = typesEdition.indexPath // don't prefix the ./
806 | }
807 |
808 | // node exports
809 | const autoloaderPath = cojoin(
810 | '.',
811 | state.useEditionsAutoloader && packageData.main,
812 | )
813 | const nodeImportPath = cojoin(
814 | '.',
815 | nodeEditionImport && nodeEditionImport.indexPath,
816 | )
817 | const nodeRequirePath = cojoin(
818 | '.',
819 | nodeEditionRequire && nodeEditionRequire.indexPath,
820 | )
821 | if (nodeImportPath || autoloaderPath || nodeRequirePath) {
822 | const nodeExports = {}
823 | set(nodeExports, 'types', typesIndexPath || null)
824 | set(nodeExports, 'import', nodeImportPath || null)
825 | set(nodeExports, 'default', autoloaderPath || null) // default before require, as require should be direct, whereas the autoloader is indirect, as intended
826 | set(nodeExports, 'require', nodeRequirePath || null)
827 | set(packageData.exports, 'node', nodeExports)
828 | }
829 |
830 | // browser exports
831 | const browserPath = cojoin('.', browserEdition && browserEdition.indexPath)
832 | const browserImportPath =
833 | has(browserEdition && browserEdition.tags, 'import') && browserPath
834 | const browserRequirePath =
835 | has(browserEdition && browserEdition.tags, 'require') && browserPath
836 | if (browserImportPath || browserRequirePath) {
837 | const browserExports = {}
838 | set(browserExports, 'types', typesIndexPath || null)
839 | set(browserExports, 'import', browserImportPath || null)
840 | set(browserExports, 'require', browserRequirePath || null)
841 | set(packageData.exports, 'browser', browserExports)
842 | }
843 | set(packageData, 'browser', unjoin('.', browserPath) || null)
844 | set(packageData, 'module', unjoin('.', browserImportPath) || null)
845 |
846 | // // default exports
847 | // const activePath = cojoin('.', activeEdition && activeEdition.indexPath)
848 | // const activeImportPath =
849 | // has(activeEdition && activeEdition.tags, 'import') && activePath
850 | // const activeRequirePath =
851 | // has(activeEdition && activeEdition.tags, 'require') && activePath
852 | // if (activeImportPath || activeRequirePath) {
853 | // const defaultExports = {}
854 | // set(defaultExports, 'types', typesIndexPath || null)
855 | // set(defaultExports, 'import', activeImportPath || null)
856 | // set(defaultExports, 'require', activeRequirePath || null)
857 | // set(packageData.exports, 'default', defaultExports)
858 | // }
859 | // ^ this never worked due to activeEdition not resolving due to missing getter, currently activeEdition is resolving to typescript source edition, which isn't what we want, so just ignore it for now.
860 |
861 | // delete the exports if we don't need it
862 | // this is required for for eslint-config-bevry/adapt.js
863 | // as if node.js handles exports, it only allows exported paths and that is it
864 | if (!nodeImportPath) delete packageData.exports
865 | }
866 |
867 | export async function scaffoldEditions(state) {
868 | // fetch
869 | const {
870 | typesEdition,
871 | sourceEdition,
872 | nodeEdition,
873 | nodeEditionRequire,
874 | nodeEditionImport,
875 | activeEditions,
876 | packageData,
877 | answers,
878 | } = state
879 |
880 | // clean old edition files
881 | await unlinkIfContains(
882 | [
883 | 'bin.js',
884 | 'bin.cjs',
885 | 'bin.mjs',
886 | 'index.js',
887 | 'index.cjs',
888 | 'index.mjs',
889 | 'test.js',
890 | 'test.cjs',
891 | 'test.mjs',
892 | ],
893 | 'auto-generated by boundation',
894 | )
895 |
896 | // export default
897 | let exportDefault = false
898 | answers.keywords.delete('export-default')
899 | if (answers.sourceModule) {
900 | try {
901 | await exec(`cat ${sourceEdition.indexPath} | grep 'export default'`)
902 | exportDefault = true
903 | answers.keywords.add('export-default')
904 | } catch (err) {}
905 | }
906 |
907 | // handle
908 | if (activeEditions.length) {
909 | // log
910 | status('scaffolding edition files...')
911 |
912 | // scaffold edition directories
913 | await spawn(
914 | ['mkdir', '-p'].concat(
915 | activeEditions.map((edition) => edition.directory || '.'),
916 | ),
917 | )
918 |
919 | // move or scaffold edition main path if needed
920 | if (sourceEdition.indexPath) {
921 | if ((await isAccessible(sourceEdition.indexPath)) === false) {
922 | // edition index entry doesn't exist, but it is a docpad plugin
923 | if (answers.docpadPlugin) {
924 | await write(
925 | sourceEdition.indexPath,
926 | [
927 | useStrict(answers.sourceModule),
928 | exportOrExports(
929 | "class MyPlugin extends require('docpad-baseplugin') {",
930 | answers.sourceModule,
931 | ),
932 | "\tget name () { return 'myplugin' }",
933 | '\tget initialConfig () { return {} }',
934 | '}',
935 | '',
936 | ].join('\n'),
937 | )
938 | }
939 | // edition index entry doesn't exist, so create an empty file
940 | else
941 | await write(
942 | sourceEdition.indexPath,
943 | [
944 | useStrict(answers.sourceModule),
945 | exportOrExports("'@todo'", answers.sourceModule),
946 | '',
947 | ].join('\n'),
948 | )
949 | }
950 | }
951 |
952 | // move or scaffold edition test path if needed
953 | if (sourceEdition.testPath) {
954 | if (answers.docpadPlugin === false) {
955 | if ((await isAccessible(sourceEdition.testPath)) === false) {
956 | // edition test entry doesn't exist, so create a basic test file
957 | if (answers.kava) {
958 | await write(
959 | sourceEdition.testPath,
960 | [
961 | useStrict(answers.sourceModule),
962 | importOrRequire(
963 | '{equal}',
964 | 'assert-helpers',
965 | answers.sourceModule,
966 | ),
967 | importOrRequire('kava', 'kava', answers.sourceModule),
968 | '',
969 | `kava.suite('${packageData.name}', function (suite, test) {`,
970 | "\ttest('no tests yet', function () {",
971 | "\t\tconsole.log('no tests yet')",
972 | '\t})',
973 | '})',
974 | '',
975 | ].join('\n'),
976 | )
977 | } else {
978 | await write(
979 | sourceEdition.testPath,
980 | [
981 | useStrict(answers.sourceModule),
982 | exportOrExports("'@todo'", answers.sourceModule),
983 | '',
984 | ].join('\n'),
985 | )
986 | }
987 | }
988 | }
989 | }
990 |
991 | // setup paths
992 | if (nodeEdition) {
993 | packageData.main = await writeRootEntry({
994 | entry: 'index',
995 | autoloader: state.useEditionsAutoloader,
996 | exportDefault,
997 | typesEdition,
998 | sourceEdition,
999 | nodeEditionRequire,
1000 | nodeEditionImport,
1001 | })
1002 |
1003 | // bin
1004 | if (answers.binEntry) {
1005 | packageData.bin = newPackageBinEntry(
1006 | packageData,
1007 | await writeRootEntry({
1008 | entry: 'bin',
1009 | always: true,
1010 | autoloader: state.useEditionsAutoloader,
1011 | exportDefault,
1012 | typesEdition,
1013 | sourceEdition,
1014 | nodeEditionRequire,
1015 | nodeEditionImport,
1016 | }),
1017 | )
1018 | } else {
1019 | delete packageData.bin
1020 | }
1021 |
1022 | // don't bother test with docpad plugins
1023 | // as they hae their own testing solution
1024 | if (answers.docpadPlugin === false) {
1025 | state.test = await writeRootEntry({
1026 | entry: 'test',
1027 | autoloader: state.useEditionsAutoloader,
1028 | exportDefault,
1029 | typesEdition,
1030 | sourceEdition,
1031 | nodeEditionRequire,
1032 | nodeEditionImport,
1033 | })
1034 | }
1035 | }
1036 | // no node edition, so no testing
1037 | else {
1038 | delete packageData.main
1039 | delete packageData.bin
1040 | delete packageData.test
1041 | delete state.test
1042 | }
1043 |
1044 | // make the type what the source edition is
1045 | // as the compiled editions get their own package.json file
1046 | // this does however require that example files get their appropriate extension
1047 | packageData.type = has(sourceEdition.tags, 'import') ? 'module' : 'commonjs'
1048 |
1049 | // browser path
1050 | updateEditionEntries(state)
1051 |
1052 | // log
1053 | status('...scaffolded edition files')
1054 | }
1055 | // no editions
1056 | else {
1057 | // delete type, as no way of determining what it should be
1058 | delete packageData.type
1059 |
1060 | // go directly to source
1061 | if (answers.indexEntry) {
1062 | packageData.main = answers.indexEntry + '.js'
1063 | }
1064 | updateEditionEntries(state)
1065 | if (answers.testEntry) {
1066 | state.test = answers.testEntry + '.js'
1067 | }
1068 | if (answers.binEntry) {
1069 | packageData.bin = newPackageBinEntry(
1070 | packageData,
1071 | answers.binEntry + '.js',
1072 | )
1073 | } else {
1074 | delete packageData.bin
1075 | }
1076 | }
1077 | }
1078 |
--------------------------------------------------------------------------------
/source/fs.js:
--------------------------------------------------------------------------------
1 | // builtin
2 | import { resolve, basename } from 'node:path'
3 | import { rename as _rename } from 'node:fs'
4 |
5 | // external
6 | import yaml from 'js-yaml'
7 | import safeps from 'safeps'
8 | import Errlop from 'errlop'
9 | import { isAccessible } from '@bevry/fs-accessible'
10 | import unlink from '@bevry/fs-unlink'
11 | import read from '@bevry/fs-read'
12 | import write from '@bevry/fs-write'
13 |
14 | // local
15 | import { status } from './log.js'
16 | import { pwd } from './data.js'
17 |
18 | export async function contains(file, data) {
19 | return (await read(file)).toString().includes(data)
20 | }
21 |
22 | export async function echoExists(file) {
23 | const e = await isAccessible(file)
24 | return e ? file : ''
25 | }
26 |
27 | export async function unlinkIfContains(file, what) {
28 | if (Array.isArray(file)) {
29 | return Promise.all(file.map((i) => unlinkIfContains(i, what)))
30 | }
31 | const path = resolve(pwd, file)
32 | if (await isAccessible(path)) {
33 | if (await contains(path, what)) {
34 | console.log(path, 'will be removed because it contains:', what)
35 | return unlink(path)
36 | } else {
37 | console.log(
38 | path,
39 | 'will not be removed because it does not contain:',
40 | what,
41 | )
42 | }
43 | }
44 | }
45 |
46 | export function rename(source, target) {
47 | source = resolve(pwd, source)
48 | target = resolve(pwd, target)
49 | return new Promise(function (resolve, reject) {
50 | _rename(source, target, function (error) {
51 | if (error) return reject(error)
52 | return resolve()
53 | })
54 | })
55 | }
56 |
57 | export async function readJSON(file) {
58 | const exist = await isAccessible(file)
59 | if (!exist) return {}
60 | const data = await read(file)
61 | return JSON.parse(data)
62 | }
63 |
64 | export async function readYAML(file) {
65 | const exist = await isAccessible(file)
66 | if (!exist) return {}
67 | const data = await read(file)
68 | return yaml.load(data)
69 | }
70 |
71 | export function writeYAML(file, data) {
72 | return write(file, yaml.dump(data, { noRefs: true }))
73 | }
74 |
75 | export function spawn(command, opts = {}) {
76 | opts.cwd = opts.cwd || pwd
77 | opts.stdio = opts.stdio == null ? 'inherit' : opts.stdio
78 | return new Promise(function (resolve, reject) {
79 | safeps.spawn(command, opts, function (err, stdout = '', stderr = '') {
80 | if (err) {
81 | const message = `spawn failed: ${command.join(' ')}`
82 | if (stderr) {
83 | const errorMessage = stderr.toLowerCase()
84 | if (
85 | errorMessage.includes('enoent') ||
86 | errorMessage.includes('etarget') ||
87 | errorMessage.includes('timeout') ||
88 | errorMessage.includes('econn')
89 | ) {
90 | console.log(
91 | 'trying again due to poor internet connection or caching',
92 | )
93 | return spawn(command, opts).then(resolve).catch(reject)
94 | }
95 | }
96 | return reject(new Errlop(message, err))
97 | }
98 | return resolve(stdout)
99 | })
100 | })
101 | }
102 |
103 | export function exec(command, opts = {}) {
104 | opts.cwd = opts.cwd || pwd
105 | return new Promise(function (resolve, reject) {
106 | safeps.exec(command, opts, function (err, stdout) {
107 | if (err) return reject(new Errlop(`exec failed: ${command}`, err))
108 | return resolve(stdout)
109 | })
110 | })
111 | }
112 |
113 | export async function parse(file) {
114 | const path = resolve(pwd, file)
115 | const filename = basename(path)
116 | status(`reading the ${filename} file...`)
117 | try {
118 | if (await isAccessible(path)) {
119 | const data = JSON.parse(await read(path))
120 | status(`...read the ${filename} file...`)
121 | return data
122 | } else {
123 | status(`...missing the ${path} file...`)
124 | }
125 | } catch (err) {
126 | status(`...skipped the ${filename} file`)
127 | return null
128 | }
129 | }
130 |
--------------------------------------------------------------------------------
/source/get-git.js:
--------------------------------------------------------------------------------
1 | // local
2 | import { pwd } from './data.js'
3 | import { exec } from './fs.js'
4 | import { repoToUsername, repoToProject } from './package.js'
5 |
6 | // Cache the results for different repositories
7 | const details = {}
8 |
9 | export async function getGitOriginUrl(cwd = pwd) {
10 | const detail = (details[cwd] = details[cwd] || {})
11 | if (detail.origin) return detail.origin
12 | try {
13 | const stdout = await exec('git remote get-url origin', {
14 | cwd,
15 | stdio: ['ignore', 'pipe', 'ignore'],
16 | })
17 | const result = (stdout && stdout.toString().trim()) || null
18 | detail.origin = result
19 | return result
20 | } catch (error) {
21 | return null
22 | }
23 | }
24 |
25 | export async function getGitOrganisation(cwd = pwd) {
26 | return repoToUsername(await getGitOriginUrl(cwd)) || null
27 | }
28 |
29 | export async function getGitProject(cwd = pwd) {
30 | return repoToProject(await getGitOriginUrl(cwd)) || null
31 | }
32 |
33 | export async function getGitUsername(cwd = pwd) {
34 | const detail = (details[cwd] = details[cwd] || {})
35 | if (detail.username) return detail.username
36 | try {
37 | const stdout = await exec('git config --global user.name', {
38 | cwd,
39 | stdio: ['ignore', 'pipe', 'ignore'],
40 | })
41 | const result = (stdout && stdout.toString().trim()) || null
42 | detail.username = result
43 | return result
44 | } catch (error) {
45 | return null
46 | }
47 | }
48 |
49 | async function getGitGlobalConfigDefaultBranch(cwd = pwd) {
50 | try {
51 | const stdout = await exec('git config --global init.defaultBranch', {
52 | cwd,
53 | stdio: ['ignore', 'pipe', 'ignore'],
54 | })
55 | const result = (stdout && stdout.toString().trim()) || null
56 | return result
57 | } catch (error) {
58 | return null
59 | }
60 | }
61 |
62 | async function getGitLocalConfigDefaultBranch(cwd = pwd) {
63 | try {
64 | const stdout = await exec('git config init.defaultBranch', {
65 | cwd,
66 | stdio: ['ignore', 'pipe', 'ignore'],
67 | })
68 | const result = (stdout && stdout.toString().trim()) || null
69 | return result
70 | } catch (error) {
71 | return null
72 | }
73 | }
74 |
75 | async function getGitActiveBranch(cwd = pwd) {
76 | try {
77 | const stdout = await exec('git rev-parse --abbrev-ref HEAD', {
78 | cwd,
79 | stdio: ['ignore', 'pipe', 'ignore'],
80 | })
81 | const result = (stdout && stdout.toString().trim()) || null
82 | return result
83 | } catch (error) {
84 | return null
85 | }
86 | }
87 |
88 | export async function getGitDefaultBranch(cwd = pwd) {
89 | const detail = (details[cwd] = details[cwd] || {})
90 | if (detail.branch) return detail.branch
91 | try {
92 | const result =
93 | (await getGitActiveBranch()) ||
94 | (await getGitLocalConfigDefaultBranch()) ||
95 | (await getGitGlobalConfigDefaultBranch()) ||
96 | 'main'
97 | detail.branch = result
98 | return result
99 | } catch (error) {
100 | return null
101 | }
102 | }
103 |
104 | export async function getGitEmail(cwd = pwd) {
105 | const detail = (details[cwd] = details[cwd] || {})
106 | if (detail.email) return detail.email
107 | try {
108 | const stdout = await exec('git config --global user.email', {
109 | cwd,
110 | stdio: ['ignore', 'pipe', 'ignore'],
111 | })
112 | const result = (stdout && stdout.toString().trim()) || null
113 | detail.email = result
114 | return result
115 | } catch (error) {
116 | return null
117 | }
118 | }
119 |
--------------------------------------------------------------------------------
/source/index.js:
--------------------------------------------------------------------------------
1 | // external
2 | import { preloadNodeVersions } from '@bevry/nodejs-versions'
3 |
4 | // local
5 | import { status, success } from './log.js'
6 | import { spawn } from './fs.js'
7 | import { readPackage, updatePackageData } from './package.js'
8 | import { updateCI } from './ci.js'
9 | import { getAnswers } from './questions.js'
10 | import { updateBaseFiles } from './base.js'
11 | import { generateEditions } from './editions.js'
12 | import { readWebsite, updateWebsite } from './website.js'
13 | import { updateRuntime } from './runtime.js'
14 |
15 | export default async function init(state) {
16 | await preloadNodeVersions()
17 |
18 | await readPackage(state)
19 |
20 | await readWebsite(state)
21 |
22 | await getAnswers(state)
23 |
24 | await updatePackageData(state)
25 |
26 | await generateEditions(state)
27 |
28 | await updateWebsite(state)
29 |
30 | await updateBaseFiles(state)
31 |
32 | await updateRuntime(state)
33 |
34 | await updateCI(state)
35 |
36 | // and finish it all up
37 | status('running release prepare...')
38 | await spawn([state.answers.packageManager, 'run', 'our:release:prepare'])
39 | status('...ran release prepare')
40 |
41 | // log
42 | success('all done!')
43 | }
44 |
--------------------------------------------------------------------------------
/source/log.js:
--------------------------------------------------------------------------------
1 | // external
2 | import * as ansi from '@bevry/ansi'
3 |
4 | export function status(...messages) {
5 | for (const message of messages) {
6 | process.stdout.write(ansi.bold(ansi.underline(message)))
7 | }
8 | process.stdout.write('\n')
9 | }
10 |
11 | export function note(...messages) {
12 | for (const message of messages) {
13 | process.stderr.write(ansi.bold(ansi.yellow(message)))
14 | }
15 | process.stderr.write('\n')
16 | }
17 |
18 | export function warn(...messages) {
19 | for (const message of messages) {
20 | process.stderr.write(ansi.bold(ansi.underline(ansi.magenta(message))))
21 | }
22 | process.stderr.write('\n')
23 | }
24 |
25 | export function error(...messages) {
26 | for (const message of messages) {
27 | process.stderr.write(ansi.bold(ansi.underline(ansi.red(message))))
28 | }
29 | process.stderr.write('\n')
30 | }
31 |
32 | export function success(...messages) {
33 | for (const message of messages) {
34 | process.stderr.write(ansi.bold(ansi.underline(ansi.green(message))))
35 | }
36 | process.stderr.write('\n')
37 | }
38 |
39 | export function fatal(...messages) {
40 | error(...messages)
41 | process.exit(1)
42 | }
43 |
--------------------------------------------------------------------------------
/source/package.js:
--------------------------------------------------------------------------------
1 | // builtin
2 | import * as pathUtil from 'node:path'
3 |
4 | // external
5 | import { is as isBevryOrganisation } from '@bevry/github-orgs'
6 | import { complement, has } from '@bevry/list'
7 | import arrangekeys from 'arrangekeys'
8 | import arrangePackageData from 'arrange-package-json'
9 | import { isAccessible } from '@bevry/fs-accessible'
10 | import write from '@bevry/fs-write'
11 | import {
12 | Fellow,
13 | getBackers,
14 | renderBackers,
15 | getGitHubSlugFromPackageData,
16 | getGitHubSlugFromUrl,
17 | getRepositoryIssuesUrlFromGitHubSlugOrUrl,
18 | getRepositoryUrlFromPackageData,
19 | getRepositoryUrlFromUrlOrGitHubSlug,
20 | getRepositoryWebsiteUrlFromGitHubSlugOrUrl,
21 | hasCredentials,
22 | } from '@bevry/github-api'
23 | import trimEmptyKeys from 'trim-empty-keys'
24 |
25 | // local
26 | import {
27 | defaultDeploy,
28 | ensureScript,
29 | fixBalupton,
30 | fixAuthor,
31 | fixAuthors,
32 | } from './util.js'
33 | import { pwd, pastBevrySponsors } from './data.js'
34 | import { status } from './log.js'
35 | import { echoExists, parse } from './fs.js'
36 | import { getVercelName } from './website.js'
37 |
38 | // Prepare
39 | const mandatoryScriptsList =
40 | 'our:setup our:compile our:meta our:verify our:deploy our:release test'.split(
41 | ' ',
42 | )
43 |
44 | // ====================================
45 | // Helpers
46 |
47 | export async function isNPM() {
48 | const npmlock = await isAccessible(`./package-lock.json`)
49 | return npmlock
50 | }
51 |
52 | export async function isPNPM() {
53 | const pnpm = await isAccessible(`./pnpm-lock.yaml`)
54 | return pnpm
55 | }
56 |
57 | export async function isYARN() {
58 | const pnpjs = await isAccessible(`./.pnp.js`)
59 | const pnp = await isAccessible(`./.pnp`)
60 | const yarnlock = await isAccessible(`./yarn.lock`)
61 | const yarn = yarnlock || pnp || pnpjs
62 | return yarn
63 | }
64 |
65 | export function isGitUrl(input) {
66 | return /\.git$/.test(input)
67 | }
68 |
69 | export function getRepoUrl(input = '') {
70 | return getRepositoryUrlFromUrlOrGitHubSlug(input) || null
71 | }
72 |
73 | export function slugToWebsite(githubSlug = '') {
74 | return getRepositoryWebsiteUrlFromGitHubSlugOrUrl(githubSlug) || null
75 | }
76 |
77 | export function slugToIssues(githubSlug = '') {
78 | return getRepositoryIssuesUrlFromGitHubSlugOrUrl(githubSlug) || null
79 | }
80 |
81 | export function repoToSlug(input = '') {
82 | return getGitHubSlugFromUrl(input) || null
83 | }
84 |
85 | export function repoToUsername(input = '') {
86 | const githubSlug = getGitHubSlugFromUrl(input)
87 | return (githubSlug && githubSlug.split('/')[0]) || null
88 | }
89 |
90 | export function repoToProject(input = '') {
91 | const githubSlug = getGitHubSlugFromUrl(input)
92 | return (githubSlug && githubSlug.split('/')[1]) || null
93 | }
94 |
95 | export function getPackageName(packageData) {
96 | return packageData.name || null
97 | }
98 |
99 | export function getPackageDescription(packageData) {
100 | return packageData.description || null
101 | }
102 |
103 | export function getPackageKeywords(packageData) {
104 | return (packageData.keywords && packageData.keywords.join(', ')) || null
105 | }
106 |
107 | export function getPackageNodeEngine(packageData) {
108 | return (packageData.engines && packageData.engines.node) || null
109 | }
110 |
111 | export function getPackageNodeEngineVersion(packageData) {
112 | const nodeEngine = getPackageNodeEngine(packageData)
113 | if (nodeEngine) return nodeEngine.replace(/[^0-9]+/, '') || null
114 | return null
115 | }
116 |
117 | export function setPackageNodeEngine(packageData, nodeEngine) {
118 | if (!packageData.engines) packageData.engines = {}
119 | packageData.engines.node = nodeEngine
120 | }
121 |
122 | export function getPackageDocumentationDependency(packageData) {
123 | if (packageData.devDependencies) {
124 | if (
125 | packageData.devDependencies.documentation ||
126 | packageData.devDependencies.yuidocjs ||
127 | packageData.devDependencies.biscotto
128 | ) {
129 | return true
130 | }
131 | }
132 | return false
133 | }
134 |
135 | export function getPackageFlowtypeDependency(packageData) {
136 | return (
137 | (packageData.devDependencies &&
138 | Boolean(packageData.devDependencies['flow-bin'])) ||
139 | null
140 | )
141 | }
142 |
143 | export function hasSyntax(packageData, syntax) {
144 | const edition =
145 | packageData.editions &&
146 | packageData.editions.length &&
147 | packageData.editions[0]
148 | const tags = (edition && (edition.tags || edition.syntaxes)) || []
149 | return has(tags, syntax)
150 | }
151 |
152 | /** Does the source code use ESM? */
153 | export function isSourceModule(packageData) {
154 | return hasSyntax(packageData, 'import')
155 | }
156 |
157 | /** Does the exported package use USM by default? */
158 | export function isPackageModule(packageData) {
159 | return packageData.type === 'module'
160 | }
161 |
162 | export function getPackageRepoUrl(packageData) {
163 | return getRepositoryUrlFromPackageData(packageData) || null
164 | }
165 |
166 | export function getPackageAuthor(packageData) {
167 | return packageData.author || null
168 | }
169 |
170 | export function hasEditions(packageData) {
171 | return packageData.editions && Boolean(packageData.editions.length)
172 | }
173 |
174 | export function isES5(packageData) {
175 | return (
176 | packageData.editions &&
177 | packageData.editions[0] &&
178 | has(packageData.editions[0].tags, 'es5')
179 | )
180 | }
181 |
182 | export function getPackageScript(packageData, key) {
183 | return (packageData.scripts && packageData.scripts[key]) || null
184 | }
185 |
186 | export function hasPackageScript(packageData, key) {
187 | return Boolean(getPackageScript(packageData, key))
188 | }
189 |
190 | export function hasPackageScriptPrefix(packageData, key) {
191 | return Boolean(
192 | Object.keys(packageData.scripts || {}).find((value) =>
193 | value.startsWith(key),
194 | ),
195 | )
196 | }
197 |
198 | export function hasDocumentation(packageData) {
199 | return hasPackageScript(packageData, 'our:meta:docs')
200 | }
201 |
202 | export function hasMultipleEditions(packageData) {
203 | if (packageData.editions) {
204 | return packageData.editions.length > 1
205 | }
206 | return null
207 | }
208 |
209 | export function isPackageJavaScript(packageData) {
210 | return hasSyntax(packageData, 'esnext')
211 | }
212 |
213 | export function isPackageTypeScript(packageData) {
214 | if (packageData) {
215 | if (/\.ts$/.test(packageData.main)) {
216 | return true
217 | }
218 | if (packageData.devDependencies) {
219 | if (packageData.devDependencies.typescript) {
220 | return true
221 | }
222 | }
223 | if (hasSyntax(packageData, 'typescript')) {
224 | return true
225 | }
226 | }
227 | return false
228 | }
229 |
230 | export function isPackageJSON(packageData) {
231 | return /\.json$/.test(packageData.main) || false
232 | }
233 |
234 | export function isPackageCoffee(packageData) {
235 | if (packageData) {
236 | if (/\.coffee$/.test(packageData.main)) {
237 | return true
238 | }
239 | if (packageData.devDependencies) {
240 | if (
241 | packageData.devDependencies['coffee-script'] ||
242 | packageData.devDependencies.coffeescript
243 | ) {
244 | return true
245 | }
246 | }
247 | if (hasSyntax(packageData, 'coffeescript')) {
248 | return true
249 | }
250 | }
251 | return false
252 | }
253 |
254 | export function getPackageProperty(packageData, key) {
255 | return packageData[key]
256 | }
257 |
258 | export function getPackageOrganisation(packageData) {
259 | return repoToUsername(getGitHubSlugFromPackageData(packageData)) || null
260 | }
261 |
262 | export function isPackageDocPadPlugin(packageData) {
263 | return (
264 | (packageData.name && packageData.name.startsWith('docpad-plugin-')) || false
265 | )
266 | }
267 |
268 | export function hasPackageDependency(packageData, key) {
269 | const {
270 | dependencies = {},
271 | devDependencies = {},
272 | peerDependencies = {},
273 | } = packageData
274 | return (
275 | Boolean(dependencies[key]) ||
276 | Boolean(devDependencies[key]) ||
277 | Boolean(peerDependencies[key])
278 | )
279 | }
280 |
281 | export function getBasename(path) {
282 | // remove dirname, then remove extension
283 | return (
284 | (typeof path === 'string' &&
285 | path.replace(/^.+\//, '').replace(/\.[^.]+$/, '')) ||
286 | null
287 | )
288 | }
289 |
290 | export function getPackageTestEntry(packageData) {
291 | if (packageData) {
292 | if (isPackageDocPadPlugin(packageData)) {
293 | return 'test'
294 | } else if (packageData.scripts && packageData.scripts.test) {
295 | const result = packageData.scripts.test.match(
296 | /^node(?: --[a-zA-Z0-9_]+)* (?:[^/]+\/)*([^.]+)\.js/,
297 | ) /* fetches filename without ext */
298 | return (result && result[1]) || null
299 | }
300 | }
301 | return null
302 | }
303 |
304 | // return the bin entry as a string (if single bin entry), or as an object of strings that point to the same bin entry (if multiple bin names)
305 | export function newPackageBinEntry(packageData, binEntry) {
306 | if (!binEntry) return null
307 | if (typeof packageData.bin === 'string') {
308 | return binEntry
309 | } else if (typeof packageData.bin === 'object') {
310 | const result = {}
311 | for (const key of Object.keys(packageData.bin)) {
312 | result[key] = binEntry
313 | }
314 | return result
315 | } else {
316 | // not yet created, so add
317 | return binEntry
318 | }
319 | }
320 |
321 | export function getPackageBinEntry(packageData, basename = true) {
322 | const bin = packageData.bin
323 | if (bin) {
324 | const entry = typeof bin === 'string' ? bin : Object.values(bin)[0]
325 | return basename ? getBasename(entry) : entry
326 | }
327 | return null
328 | }
329 |
330 | export async function getPackageIndexEntry(packageData) {
331 | if (packageData && isPackageDocPadPlugin(packageData)) {
332 | return 'index'
333 | }
334 | return getBasename(packageData && packageData.main)
335 | }
336 |
337 | export async function getPackageNodeEntry(packageData) {
338 | if (packageData && isPackageDocPadPlugin(packageData)) {
339 | return 'index'
340 | }
341 | return getBasename(
342 | (await echoExists('source/node.ts')) ||
343 | (await echoExists('source/node.coffee')) ||
344 | (await echoExists('source/node.mjs')) ||
345 | (await echoExists('source/node.js')),
346 | )
347 | // don't use packageData.node
348 | // have them set it via package.json:boundation:nodeEntry
349 | // as otherwise when you delete the node entry file, to say use index entry file instead, the change won't be automatically detected
350 | }
351 |
352 | export async function getPackageDenoEntry(packageData) {
353 | return getBasename(
354 | (await echoExists('source/deno.ts')) || (packageData && packageData.deno),
355 | )
356 | // don't use packageData.deno
357 | // have them set it via package.json:boundation:denoEntry
358 | // as otherwise when you delete the deno entry file, to say use index entry file instead, the change won't be automatically detected
359 | }
360 |
361 | export async function getPackageBrowserEntry() {
362 | return getBasename(
363 | (await echoExists('source/browser.ts')) ||
364 | (await echoExists('source/browser.coffee')) ||
365 | (await echoExists('source/browser.mjs')) ||
366 | (await echoExists('source/browser.js')),
367 | )
368 | // don't use packageData.browser
369 | // have them set it via package.json:boundation:browserEntry
370 | // as otherwise when you delete the browser entry file, to say use index entry file instead, the change won't be automatically detected
371 | }
372 |
373 | export function getWebsiteType(packageData, vercelConfig) {
374 | if (hasPackageDependency(packageData, 'next')) {
375 | return 'vercel: next.js'
376 | }
377 | if (hasPackageDependency(packageData, 'docpad')) {
378 | return 'vercel: docpad'
379 | }
380 | if (getVercelName(vercelConfig)) {
381 | if (
382 | vercelConfig.builds &&
383 | vercelConfig.builds.length &&
384 | vercelConfig.builds[0].use === '@vercel/static'
385 | ) {
386 | return 'vercel: static'
387 | }
388 | return 'vercel: custom'
389 | }
390 | if (hasPackageDependency(packageData, 'surge')) {
391 | return 'surge'
392 | }
393 | return 'custom'
394 | }
395 |
396 | export function getProjectType(packageData, vercelConfig) {
397 | if (hasPackageScript(packageData, 'start') || getVercelName(vercelConfig)) {
398 | return 'website'
399 | }
400 | return 'package'
401 | }
402 |
403 | // ====================================
404 | // Helpers
405 |
406 | export function arrangePackage(state) {
407 | let packageData = JSON.parse(JSON.stringify(state.packageData))
408 |
409 | // Keywords
410 | packageData.keywords = Array.from(state.answers.keywords.values()).sort()
411 |
412 | // ---------------------------------
413 | // Editions
414 |
415 | const activeEditions = state.activeEditions
416 |
417 | // inject edition properties into package data
418 | if (state.activeEditions.length) {
419 | // add targets to babel, while supporting custom configuration
420 | packageData.babel = packageData.babel || {}
421 | packageData.babel.env = {}
422 | for (const edition of state.babelEditions) {
423 | packageData.babel.env[edition.directory] = edition.babel
424 | }
425 |
426 | // trim babel if empty
427 | if (Object.keys(packageData.babel.env).length === 0) {
428 | delete packageData.babel
429 | }
430 |
431 | // arrange keys of editions
432 | packageData.editions = activeEditions.map(function (edition) {
433 | const result = arrangekeys(
434 | edition,
435 | 'description directory entry tags engines',
436 | )
437 | if (result.tags) result.tags = Array.from(result.tags.values())
438 | return result
439 | })
440 | } else {
441 | delete packageData.editions
442 | }
443 |
444 | // trim empty keys
445 | trimEmptyKeys(packageData)
446 |
447 | // ---------------------------------
448 | // Arrange
449 |
450 | // package keys
451 | packageData = arrangePackageData(packageData)
452 |
453 | // ---------------------------------
454 | // Scripts
455 |
456 | // scripts
457 | let scripts = Object.assign({}, state.userScripts, state.scripts)
458 |
459 | // merge in editions[].scripts
460 | Object.assign(
461 | scripts,
462 | ...activeEditions.map((edition) => edition.scripts || {}),
463 | )
464 |
465 | // inject empty mandatory scripts if they don't exist
466 | // to ensure they are sorted correctly
467 | for (const key of mandatoryScriptsList) {
468 | if (!scripts[key]) scripts[key] = false
469 | }
470 |
471 | // cycle through the scripts
472 | // done via a list and for of loop, as we want to run on new entries
473 | const merge = {}
474 | const list = new Set(Object.keys(scripts))
475 | for (const key of list) {
476 | const value = scripts[key]
477 | const parts = key.split(':')
478 | if (parts.length >= 2) {
479 | // if a my: script accessible with the same name as an our: script
480 | // then tell the our: script to use the my: script instead
481 | // this is a way to accomplish custom (non alphabetical) sort orders
482 | // while accomplishing the ability to override
483 | if (parts[0] === 'my') {
484 | if (value) {
485 | const ourKey = 'our:' + parts.slice(1).join(':')
486 | if (!scripts[ourKey]) {
487 | scripts[ourKey] = `${state.answers.packageManager} run ${key}`
488 | list.add(ourKey)
489 | }
490 | } else {
491 | delete scripts[key]
492 | }
493 | }
494 |
495 | // mark the prefixes as empty strings if not already set
496 | // so that we can fill them in later once everything is sorted in the right spots
497 | // and note which keys need to merged into what prefixes
498 | else if (
499 | parts.length >= 3 /* don't concat down to `our` */ &&
500 | parts[0] === 'our'
501 | ) {
502 | const prefix = parts.slice(0, -1).join(':')
503 | if (!scripts[prefix]) {
504 | scripts[prefix] = false
505 | merge[key] = prefix
506 | list.add(prefix)
507 | }
508 | }
509 | }
510 | }
511 |
512 | // perform the alpha sort, with my: scripts first, then our: scripts, then everything else
513 | const myScripts = Array.from(list)
514 | .filter((key) => key.startsWith('my:'))
515 | .sort()
516 | const ourScripts = Array.from(list)
517 | .filter((key) => key.startsWith('our:'))
518 | .sort()
519 | scripts = arrangekeys(scripts, myScripts.concat(ourScripts))
520 |
521 | // use new order, to merge scripts into a set, to prevent duplicates
522 | const sortedList = Object.keys(scripts)
523 | for (const key of sortedList) {
524 | const prefix = merge[key]
525 | // check if this key is one that is to be merged
526 | if (prefix) {
527 | const value = scripts[prefix] || false
528 | if (typeof value === 'string') {
529 | // ignore, keep the user override
530 | } else {
531 | if (!value) scripts[prefix] = new Set()
532 | scripts[prefix].add(`${state.answers.packageManager} run ${key}`)
533 | }
534 | }
535 | }
536 |
537 | // then combine them into a string once done
538 | for (const key of sortedList) {
539 | const script = scripts[key]
540 | if (script && script instanceof Set) {
541 | scripts[key] = Array.from(script).join(' && ')
542 | }
543 | }
544 |
545 | // if the mandatory scripts didn't have anything to merge, then prefill them
546 | for (const key of mandatoryScriptsList) {
547 | ensureScript(scripts, key)
548 | }
549 |
550 | // result
551 | packageData.scripts = scripts
552 |
553 | // ---------------------------------
554 | // Done
555 |
556 | return packageData
557 | }
558 |
559 | // ====================================
560 | // Update
561 |
562 | export async function readPackage(state) {
563 | const path = pathUtil.resolve(pwd, 'package.json')
564 | const special = ['start', 'test']
565 |
566 | // read
567 | let packageData = {}
568 | try {
569 | if (await isAccessible(path)) packageData = (await parse(path)) || {}
570 | } catch (err) {}
571 |
572 | // adjust
573 | const userScripts = {}
574 | if (packageData.scripts) {
575 | // deploy to my:deploy
576 | if (
577 | packageData.scripts.deploy &&
578 | packageData.scripts.deploy !== defaultDeploy
579 | ) {
580 | userScripts['my:deploy'] = packageData.scripts.deploy
581 | delete packageData.scripts.deploy
582 | }
583 | if (packageData.scripts['my:deploy']) {
584 | packageData.scripts['my:deploy'] = packageData.scripts[
585 | 'my:deploy'
586 | ].replace('npm run our:compile && ', '')
587 | packageData.scripts.deploy = defaultDeploy
588 | }
589 |
590 | // keep my:* scripts, and scripts with no parts
591 | Object.keys(packageData.scripts).forEach(function (key) {
592 | const value = packageData.scripts[key]
593 | if (special.includes(key)) {
594 | userScripts[key] = value
595 | } else if (key.startsWith('my:')) {
596 | userScripts[key] = value
597 | } else if (!key.includes(':')) {
598 | userScripts[key] = value
599 | }
600 | })
601 | }
602 |
603 | // apply
604 | state.packageData = packageData
605 | state.userScripts = userScripts
606 |
607 | // return
608 | return packageData
609 | }
610 |
611 | export async function writePackage(state) {
612 | const path = pathUtil.resolve(pwd, 'package.json')
613 |
614 | status('writing the package.json file...')
615 | await write(path, JSON.stringify(arrangePackage(state), null, ' '))
616 | status('...wrote the package.json file')
617 | }
618 |
619 | export async function updatePackageData(state) {
620 | const packageDataLocal = state.packageData
621 | const { answers } = state
622 |
623 | // note
624 | status('customising package data...')
625 |
626 | // package data
627 | const packageData = Object.assign(
628 | {
629 | version: '1.0.0',
630 | license: 'Artistic-2.0',
631 | engines: {},
632 | dependencies: {},
633 | devDependencies: {},
634 | scripts: {},
635 | },
636 | packageDataLocal || {},
637 | {
638 | name: answers.name,
639 | author: answers.author,
640 | description: answers.description,
641 | repository: {
642 | type: 'git',
643 | url: answers.repoUrl,
644 | },
645 | },
646 | )
647 |
648 | // prepare badge removals and remove badges relating to private
649 | const removeBadges = ['gratipay', 'daviddm', 'daviddmdev']
650 | if (!answers.npm) removeBadges.push('npmversion', 'npmdownloads')
651 |
652 | // homepage, issues
653 | const homepage = slugToWebsite(answers.githubSlug)
654 | if (homepage) packageData.homepage = homepage
655 | const issues = slugToIssues(answers.githubSlug)
656 | if (issues) packageData.bugs = { url: issues }
657 |
658 | // remove old fields
659 | delete packageData.nakeConfiguration
660 | delete packageData.cakeConfiguration
661 | delete packageData.directories
662 | delete packageData.preferGlobal
663 |
664 | // moved to vercel.json
665 | delete packageData.now
666 | delete packageData.vercel
667 |
668 | // remove old docpad engines convention, replaced by peer dependency
669 | delete packageData.engines.docpad
670 |
671 | // license
672 | if (packageData.license && packageData.license.type) {
673 | packageData.license = packageData.license.type
674 | }
675 |
676 | // private
677 | if (answers.npm) {
678 | delete packageData.private
679 | } else {
680 | packageData.private = true
681 | }
682 |
683 | // prepare backers
684 | packageData.author = fixAuthor(packageData.author)
685 | packageData.authors = fixAuthors(packageData.authors || [])
686 | if (!packageData.contributors) packageData.contributors = []
687 | if (!packageData.maintainers) packageData.maintainers = []
688 | if (!packageData.funders) packageData.funders = []
689 | if (!packageData.sponsors) packageData.sponsors = []
690 | if (!packageData.donors) packageData.donors = []
691 |
692 | // correct backer fields
693 | if (packageData.maintainers.length === 0)
694 | packageData.maintainers = [packageData.author.split(', ')[0]]
695 | packageData.maintainers = packageData.maintainers.map(fixBalupton)
696 | packageData.contributors = packageData.contributors.map(fixBalupton)
697 |
698 | // bevry org customisations
699 | if (isBevryOrganisation(answers.githubUsername)) {
700 | console.log('applying bevry customisations')
701 |
702 | // past donors
703 | packageData.donors = Fellow.add(packageData.donors, pastBevrySponsors).map(
704 | (fellow) => fellow.toString(),
705 | )
706 |
707 | // funding
708 | packageData.funding = 'https://bevry.me/fund'
709 |
710 | // change license
711 | if (packageData.license === 'MIT') packageData.license = 'Artistic-2.0'
712 |
713 | // badges
714 | packageData.badges = {
715 | list: [
716 | 'githubworkflow',
717 | 'npmversion',
718 | 'npmdownloads',
719 | '---',
720 | 'githubsponsors',
721 | 'thanksdev',
722 | 'liberapay',
723 | // doesn't support kofi
724 | 'buymeacoffee',
725 | 'opencollective',
726 | 'crypto',
727 | 'paypal',
728 | '---',
729 | 'discord',
730 | 'twitch',
731 | ],
732 | config: {
733 | githubWorkflow: state.githubWorkflow,
734 | githubSponsorsUsername: 'balupton',
735 | thanksdevGithubUsername: answers.githubUsername,
736 | liberapayUsername: 'bevry',
737 | buymeacoffeeUsername: 'balupton',
738 | opencollectiveUsername: 'bevry',
739 | cryptoURL: 'https://bevry.me/crypto',
740 | paypalURL: 'https://bevry.me/paypal',
741 | discordServerID: '1147436445783560193',
742 | discordServerInvite: 'nQuXddV7VP',
743 | twitchUsername: 'balupton',
744 | },
745 | }
746 | }
747 |
748 | // default badges
749 | if (
750 | !packageData.badges ||
751 | !packageData.badges.list ||
752 | !packageData.badges.list.length
753 | ) {
754 | packageData.badges = {
755 | list: ['npmversion', 'npmdownloads'],
756 | }
757 | }
758 |
759 | // apply badge removals
760 | packageData.badges.list = complement(packageData.badges.list, removeBadges)
761 | delete packageData.badges.gratipayUsername
762 |
763 | // merge with latest backers
764 | if (hasCredentials()) {
765 | console.log('fetching lastest backers...')
766 | const backers = await getBackers({
767 | githubSlug: answers.githubSlug,
768 | packageData,
769 | })
770 | Object.assign(packageData, renderBackers(backers, { format: 'package' }))
771 | console.log('...fetched lastest backers')
772 | }
773 |
774 | // note
775 | status('...customised package data')
776 |
777 | // apply
778 | state.packageData = packageData
779 | }
780 |
--------------------------------------------------------------------------------
/source/questions.js:
--------------------------------------------------------------------------------
1 | // builtin
2 | import * as pathUtil from 'node:path'
3 |
4 | // external
5 | import versionCompare from 'version-compare'
6 | import { unique, last, first } from '@bevry/list'
7 | import {
8 | filterNodeVersions,
9 | filterSignificantNodeVersions,
10 | } from '@bevry/nodejs-versions'
11 |
12 | // local
13 | import _getAnswers from './answers.js'
14 | import { pwd, allLanguages } from './data.js'
15 | import { hasScript, isNumber, isSpecified } from './util.js'
16 | import {
17 | getGitDefaultBranch,
18 | getGitEmail,
19 | getGitOriginUrl,
20 | getGitProject,
21 | getGitUsername,
22 | } from './get-git.js'
23 | import {
24 | getPackageAuthor,
25 | getPackageBinEntry,
26 | getPackageBrowserEntry,
27 | getPackageDescription,
28 | getPackageFlowtypeDependency,
29 | getPackageIndexEntry,
30 | getPackageKeywords,
31 | getPackageName,
32 | getPackageNodeEngine,
33 | getPackageNodeEntry,
34 | getPackageProperty,
35 | getPackageRepoUrl,
36 | getPackageTestEntry,
37 | getProjectType,
38 | getRepoUrl,
39 | getWebsiteType,
40 | hasDocumentation,
41 | hasEditions,
42 | hasPackageDependency,
43 | isES5,
44 | isGitUrl,
45 | isPackageCoffee,
46 | isPackageDocPadPlugin,
47 | isPackageJavaScript,
48 | isPackageJSON,
49 | isPackageTypeScript,
50 | isSourceModule,
51 | repoToSlug,
52 | repoToUsername,
53 | } from './package.js'
54 | import { getVercelAliases, getVercelName } from './website.js'
55 |
56 | // ====================================
57 | // Questions
58 |
59 | export async function getQuestions(state) {
60 | const { packageData, vercelConfig } = state
61 | const browsers = getPackageProperty(packageData, 'browsers')
62 | const browser = Boolean(
63 | browsers || getPackageProperty(packageData, 'browser'),
64 | )
65 | const browsersList = typeof browsers === 'string' ? browsers : 'defaults'
66 | const editioned = hasEditions(packageData)
67 | const nodeEngine = getPackageNodeEngine(packageData)
68 | return [
69 | {
70 | name: 'name',
71 | message: 'What will be the package name?',
72 | validate: isSpecified,
73 | default: getPackageName(packageData) || pathUtil.basename(pwd),
74 | },
75 | {
76 | name: 'description',
77 | message: 'and the package description?',
78 | validate: isSpecified,
79 | default: getPackageDescription(packageData),
80 | },
81 | {
82 | name: 'keywords',
83 | message: 'What are some keywords to describe the project?',
84 | validate: isSpecified,
85 | default: getPackageKeywords(packageData),
86 | skip({ keywords }) {
87 | return Boolean(keywords)
88 | },
89 | },
90 | {
91 | name: 'repoUrl',
92 | message: 'What will the git URL be?',
93 | validate: isGitUrl,
94 | default:
95 | getRepoUrl(await getGitOriginUrl()) || getPackageRepoUrl(packageData),
96 | },
97 | {
98 | name: 'defaultBranch',
99 | message: 'What is the default branch for the repository?',
100 | validate: isSpecified,
101 | default: (await getGitDefaultBranch()) || 'main',
102 | async skip() {
103 | return Boolean(await getGitDefaultBranch()) // not an issue due to caching
104 | },
105 | },
106 | {
107 | name: 'githubSlug',
108 | message: 'What is the GitHub Repository slug?',
109 | validate: isSpecified,
110 | skip: true,
111 | default({ repoUrl }) {
112 | return repoToSlug(repoUrl)
113 | },
114 | },
115 | {
116 | name: 'githubUsername',
117 | message: 'What is the GitHub username for the package?',
118 | validate: isSpecified,
119 | default({ repoUrl }) {
120 | return repoToUsername(repoUrl)
121 | },
122 | skip: true,
123 | },
124 | {
125 | name: 'author',
126 | message: 'Who will the package author be?',
127 | validate: isSpecified,
128 | default:
129 | getPackageAuthor(packageData) ||
130 | `${new Date().getFullYear()}+ ${(await getGitUsername()) || 'name'} <${
131 | (await getGitEmail()) || 'email'
132 | }>`,
133 | },
134 | {
135 | name: 'type',
136 | type: 'list',
137 | choices: ['package', 'website'],
138 | message: 'What type of project will this be?',
139 | validate: isSpecified,
140 | default: getProjectType(packageData, vercelConfig),
141 | },
142 | {
143 | name: 'website',
144 | type: 'list',
145 | choices: [
146 | 'vercel: next.js',
147 | 'vercel: docpad',
148 | 'vercel: static',
149 | 'vercel: custom',
150 | 'surge',
151 | 'custom',
152 | 'external',
153 | ],
154 | message: 'What type of website will this be?',
155 | default: getWebsiteType(packageData, vercelConfig),
156 | when({ type }) {
157 | return type === 'website'
158 | },
159 | },
160 | {
161 | name: 'docpadWebsite',
162 | type: 'confirm',
163 | message: 'Will it be a DocPad website?',
164 | default({ website }) {
165 | return Boolean(website && website.includes('docpad'))
166 | },
167 | skip: true,
168 | when({ docpadWebsite }) {
169 | return docpadWebsite
170 | },
171 | },
172 | {
173 | name: 'staticWebsite',
174 | type: 'confirm',
175 | message: 'Will it be a static website?',
176 | default({ website }) {
177 | return Boolean(
178 | website && (website.includes('static') || website === 'surge'),
179 | )
180 | },
181 | skip: true,
182 | when({ staticWebsite }) {
183 | return staticWebsite
184 | },
185 | },
186 | {
187 | name: 'staticDirectory',
188 | message:
189 | 'For the static website, which directory contains the site to be deployed?',
190 | validate: isSpecified,
191 | default({ website }) {
192 | return website && website.includes('docpad') ? 'out' : 'www'
193 | },
194 | when({ staticWebsite }) {
195 | return staticWebsite
196 | },
197 | },
198 | {
199 | name: 'deployTarget',
200 | message: 'For the static website, what is the deploy target?',
201 | validate: isSpecified,
202 | when({ staticDirectory, website }) {
203 | return staticDirectory && website === 'surge'
204 | },
205 | },
206 | {
207 | name: 'vercelWebsite',
208 | type: 'confirm',
209 | message: 'Will it be a Vercel website?',
210 | default({ website }) {
211 | return Boolean(website && website.includes('vercel'))
212 | },
213 | skip: true,
214 | when({ vercelWebsite }) {
215 | return vercelWebsite
216 | },
217 | },
218 | {
219 | name: 'vercelName',
220 | message: 'What label should be used for the site?',
221 | validate: isSpecified,
222 | default: getVercelName(vercelConfig) || (await getGitProject()),
223 | skip: getVercelName(vercelConfig),
224 | when({ vercelWebsite }) {
225 | return vercelWebsite
226 | },
227 | },
228 | {
229 | name: 'vercelAliases',
230 | message: 'What aliases should be used for the site?',
231 | default: getVercelAliases(vercelConfig).join(', '),
232 | skip({ vercelAliases }) {
233 | return vercelAliases
234 | },
235 | when({ vercelWebsite }) {
236 | return vercelWebsite
237 | },
238 | },
239 | {
240 | name: 'nextWebsite',
241 | type: 'confirm',
242 | message: 'Will it be a Next.js website?',
243 | default({ website }) {
244 | return Boolean(website && website.includes('next'))
245 | },
246 | skip: true,
247 | when({ nextWebsite }) {
248 | return nextWebsite
249 | },
250 | },
251 | {
252 | name: 'docpadPlugin',
253 | type: 'confirm',
254 | message: 'Will it be a DocPad plugin?',
255 | default: isPackageDocPadPlugin(packageData),
256 | skip({ docpadPlugin }) {
257 | return docpadPlugin || editioned
258 | },
259 | ignore({ website }) {
260 | return website
261 | },
262 | },
263 | {
264 | name: 'packageManager',
265 | type: 'list',
266 | message: 'Which package manager to use?',
267 | choices: ['npm', 'yarn'],
268 | default: 'npm',
269 | // async default() {
270 | // // having a different package manager for dev and production is not feasible
271 | // // as npm scripts and dev commands are constantly overriding each other
272 | // const yarn = await isYARN()
273 | // const npm = await isNPM()
274 | // if (yarn && !npm) return 'yarn'
275 | // return 'npm'
276 | // },
277 | },
278 | {
279 | name: 'languages',
280 | type: 'checkbox',
281 | choices: allLanguages,
282 | message: 'What programming languages will the source code be written in?',
283 | validate: isSpecified,
284 | default({ website, nextWebsite }) {
285 | const types = [
286 | isES5(packageData) && 'es5',
287 | isPackageTypeScript(packageData) && 'typescript',
288 | isPackageJavaScript(packageData) && 'esnext',
289 | isPackageCoffee(packageData) && 'coffeescript',
290 | isPackageJSON(packageData) && 'json',
291 | (hasPackageDependency(packageData, 'react') || nextWebsite) &&
292 | 'react',
293 | (hasPackageDependency(packageData, 'react') || nextWebsite) && 'jsx',
294 | website && 'html',
295 | website && 'css',
296 | ]
297 | const typesString =
298 | types.filter((value) => value).join(' ') || 'typescript'
299 | return typesString.split(' ')
300 | },
301 | },
302 | {
303 | name: 'language',
304 | type: 'list',
305 | message: 'Which programming language will be the primary one?',
306 | validate: isSpecified,
307 | choices({ languages }) {
308 | return languages
309 | },
310 | default({ languages }) {
311 | return languages[0]
312 | },
313 | skip({ languages }) {
314 | return languages.length === 1
315 | },
316 | ignore({ website }) {
317 | return website
318 | },
319 | },
320 | {
321 | name: 'tsconfig',
322 | message: 'What should the path of the tsconfig file be?',
323 | validate: isSpecified,
324 | default: 'tsconfig.json',
325 | ignore({ languages }) {
326 | return languages.includes('typescript') === false
327 | },
328 | },
329 | {
330 | name: 'sourceModule',
331 | type: 'confirm',
332 | message: 'Will the source code use ESM (import instead of require)?',
333 | default({ languages }) {
334 | return Boolean(
335 | languages.includes('typescript') ? true : isSourceModule(packageData),
336 | )
337 | },
338 | skip({ language }) {
339 | return language !== 'esnext'
340 | },
341 | },
342 | {
343 | name: 'flowtype',
344 | type: 'confirm',
345 | message: 'Will it use flow type for strong type checking?',
346 | default({ language }) {
347 | return Boolean(
348 | language === 'esnext' && getPackageFlowtypeDependency(packageData),
349 | )
350 | },
351 | skip({ language }) {
352 | return language !== 'esnext'
353 | },
354 | ignore({ website }) {
355 | return website
356 | },
357 | },
358 | {
359 | name: 'npm',
360 | type: 'confirm',
361 | message: 'Will it be published to npm?',
362 | default: !getPackageProperty(packageData, 'private'),
363 | skip({ npm }) {
364 | return !npm || editioned
365 | },
366 | ignore({ website }) {
367 | return website
368 | },
369 | },
370 | {
371 | name: 'browser',
372 | type: 'confirm',
373 | message: 'Will it be used on the client-side inside web browsers?',
374 | default({ language }) {
375 | return language === 'json' || browser
376 | },
377 | ignore({ website }) {
378 | return website
379 | },
380 | skip({ language }) {
381 | return language === 'json'
382 | },
383 | },
384 | {
385 | name: 'browsersTargeted',
386 | message: 'Which web browsers will be supported/targeted?',
387 | default: browsersList,
388 | ignore({ browser }) {
389 | return !browser
390 | },
391 | skip({ language }) {
392 | return language === 'json'
393 | },
394 | },
395 | {
396 | name: 'dom',
397 | type: 'confirm',
398 | message: 'Will you make use of any DOM APIs?',
399 | default: false,
400 | // @todo check the tsconfig for it in lib, or check keywords
401 | when({ browsers, tsconfig }) {
402 | return browsers && tsconfig
403 | },
404 | },
405 | {
406 | name: 'compileNode',
407 | type: 'confirm',
408 | message: 'Would you like to compile your source code for Node.js?',
409 | default: true,
410 | skip({ language }) {
411 | return ['typescript', 'coffeescript'].includes(language)
412 | },
413 | when({ website, language }) {
414 | return (
415 | !website &&
416 | ['esnext', 'typescript', 'coffeescript'].includes(language)
417 | )
418 | },
419 | },
420 | {
421 | name: 'compilerNode',
422 | type: 'list',
423 | message: 'Which compiler to use for Node.js?',
424 | validate: isSpecified,
425 | choices({ language }) {
426 | return language === 'typescript'
427 | ? ['typescript', 'babel']
428 | : language === 'coffeescript'
429 | ? ['babel', 'coffeescript']
430 | : ['babel']
431 | },
432 | default({ language }) {
433 | return language === 'typescript'
434 | ? 'typescript'
435 | : language === 'coffeescript'
436 | ? 'coffeescript'
437 | : 'babel'
438 | },
439 | when({ compileNode }) {
440 | return compileNode
441 | },
442 | },
443 | {
444 | name: 'compileBrowser',
445 | type: 'confirm',
446 | message: 'Would you like to compile your source code for web browsers?',
447 | default: true,
448 | skip({ language }) {
449 | return ['typescript', 'coffeescript'].includes(language)
450 | },
451 | when({ browser, language }) {
452 | return (
453 | browser && ['esnext', 'typescript', 'coffeescript'].includes(language)
454 | )
455 | },
456 | },
457 | {
458 | name: 'compilerBrowser',
459 | type: 'list',
460 | message: 'Which compiler to use for the browser edition?',
461 | validate: isSpecified,
462 | choices({ language }) {
463 | return language === 'typescript'
464 | ? ['typescript', 'babel']
465 | : language === 'coffeescript'
466 | ? ['babel', 'coffeescript']
467 | : ['babel']
468 | },
469 | default({ language }) {
470 | return language === 'typescript' ? 'typescript' : 'babel'
471 | },
472 | when({ compileBrowser }) {
473 | return compileBrowser
474 | },
475 | },
476 | {
477 | name: 'targetModules',
478 | type: 'checkbox',
479 | message: 'Which module formats should we target?',
480 | validate: isSpecified,
481 | choices({ sourceModule, docpadPlugin, compileNode, compileBrowser }) {
482 | if (docpadPlugin) return ['require']
483 | if (sourceModule) {
484 | // require first, as editions currently doesn't support loading esm:
485 | // Error [ERR_REQUIRE_ESM]: require() of ES Module /Users/balupton/Projects/auto/errlop/edition-es2017-esm/index.js not supported.
486 | // Instead change the require of index.js in null to a dynamic import() which is available in all CommonJS modules.
487 | if (compileNode || compileBrowser) return ['require', 'import']
488 | return ['import']
489 | }
490 | return ['require']
491 | },
492 | default(opts) {
493 | return this.choices(opts)
494 | },
495 | ignore({ website }) {
496 | return website
497 | },
498 | },
499 | {
500 | name: 'sourceDirectory',
501 | message: 'Which directory will the source code be located in?',
502 | validate: isSpecified,
503 | default: 'source',
504 | ignore({ website }) {
505 | return website
506 | },
507 | },
508 | {
509 | name: 'indexEntry',
510 | message: 'What is the default entry filename (without extension)?',
511 | validate: isSpecified, // @todo attempt to remove this
512 | default: (await getPackageIndexEntry(packageData)) || 'index',
513 | skip: editioned,
514 | ignore({ website }) {
515 | return website
516 | },
517 | },
518 | {
519 | name: 'nodeEntry',
520 | message:
521 | 'What is the entry filename (without extension) to use for Node.js?',
522 | validate: isSpecified,
523 | async default({ indexEntry }) {
524 | return (await getPackageNodeEntry(packageData)) || indexEntry
525 | },
526 | skip: editioned,
527 | ignore({ website, indexEntry }) {
528 | return website && indexEntry
529 | },
530 | },
531 | {
532 | name: 'browserEntry',
533 | message:
534 | 'What is the entry filename (without extension) to use for Web Browsers?',
535 | validate: isSpecified,
536 | async default({ indexEntry }) {
537 | return (await getPackageBrowserEntry()) || indexEntry
538 | },
539 | skip: editioned,
540 | when({ browser, indexEntry }) {
541 | return browser && indexEntry
542 | },
543 | },
544 | {
545 | name: 'testEntry',
546 | message:
547 | 'What is the entry filename (without extension) to use for tests?',
548 | validate: isSpecified,
549 | default: getPackageTestEntry(packageData) || 'test',
550 | skip: editioned,
551 | ignore({ website }) {
552 | return website
553 | },
554 | },
555 | {
556 | name: 'bin',
557 | message: 'Will there be a bin/executable/CLI?',
558 | type: 'confirm',
559 | default: Boolean(getPackageBinEntry(packageData)),
560 | skip({ bin }) {
561 | return bin
562 | },
563 | when({ npm }) {
564 | return npm
565 | },
566 | },
567 | {
568 | name: 'binEntry',
569 | message:
570 | 'What is the filename of the bin/executable/CLI entry (without extension)?',
571 | validate: isSpecified,
572 | default: getPackageBinEntry(packageData) || 'bin',
573 | skip() {
574 | return getPackageBinEntry(packageData)
575 | },
576 | when({ bin }) {
577 | return bin
578 | },
579 | },
580 | {
581 | name: 'nodeVersionsRange',
582 | message:
583 | 'What range (if any) do you wish to restrict the Node.js versions to?',
584 | ignore({ vercelWebsite, targetModules }) {
585 | return vercelWebsite || targetModules.join('') === 'import'
586 | },
587 | },
588 | {
589 | name: 'nodeVersions',
590 | message:
591 | 'Automated property to provide Node.js versions for the upcoming questions',
592 | type: 'checkbox',
593 | validate: isSpecified,
594 | choices({ vercelWebsite, targetModules, nodeVersionsRange }) {
595 | // use released flag just in case something ever changes
596 | if (vercelWebsite)
597 | return filterSignificantNodeVersions({ released: true, vercel: true })
598 | if (targetModules.join('') === 'import')
599 | return filterSignificantNodeVersions({
600 | released: true,
601 | maintainedOrLTS: true,
602 | esm: true,
603 | })
604 | return filterSignificantNodeVersions({
605 | released: true,
606 | maintainedOrLTS: true,
607 | gte: '4', // minimum supported editions and assert-helpers version
608 | range: nodeVersionsRange,
609 | })
610 | },
611 | async default(opts) {
612 | const choices = await this.choices(opts)
613 | return choices
614 | },
615 | skip: true,
616 | },
617 | {
618 | name: 'desiredNodeVersion',
619 | message: 'What is the desired Node.js version?',
620 | type: 'list',
621 | validate: isNumber,
622 | choices({ nodeVersions }) {
623 | return nodeVersions
624 | },
625 | default({ nodeVersions }) {
626 | // prefer the active LTS
627 | const preference = last(
628 | filterNodeVersions(nodeVersions, { active: true }),
629 | )
630 | if (preference) return preference
631 | // otherwise fallback to the latest preselected
632 | return last(nodeVersions)
633 | },
634 | },
635 | {
636 | name: 'desiredNodeOnly',
637 | message: `Should we only support the desired Node.js version?`,
638 | type: 'confirm',
639 | default({ website }) {
640 | return Boolean(website)
641 | },
642 | },
643 | {
644 | name: 'maintainedNodeVersions',
645 | message: 'Should we aim to support only maintained Node.js versions?',
646 | type: 'confirm',
647 | default({ desiredNodeOnly }) {
648 | if (desiredNodeOnly) return false
649 | return false
650 | },
651 | skip({ desiredNodeOnly }) {
652 | return Boolean(desiredNodeOnly)
653 | },
654 | },
655 | {
656 | name: 'expandNodeVersions',
657 | message:
658 | 'Should other Node.js versions be supported if they pass the tests?',
659 | type: 'confirm',
660 | default({ desiredNodeOnly }) {
661 | if (desiredNodeOnly) return false
662 | return false
663 | },
664 | skip({ desiredNodeOnly }) {
665 | return Boolean(desiredNodeOnly)
666 | },
667 | },
668 | {
669 | name: 'shrinkNodeVersions',
670 | message:
671 | 'Should unsupported Node.js versions be trimmed if they fail the tests?',
672 | type: 'confirm',
673 | default({ desiredNodeOnly }) {
674 | if (desiredNodeOnly) return false
675 | return false
676 | },
677 | skip({ desiredNodeOnly }) {
678 | return Boolean(desiredNodeOnly)
679 | },
680 | },
681 | {
682 | name: 'nodeVersionsSupportedRange',
683 | message:
684 | 'What range (if any) do you wish to restrict the supported Node.js versions to?',
685 | default: nodeEngine,
686 | skip({ desiredNodeOnly }) {
687 | return desiredNodeOnly
688 | },
689 | },
690 | {
691 | name: 'nodeVersionsSupported',
692 | message: 'Which Node.js versions must your package support?',
693 | type: 'checkbox',
694 | validate: isSpecified,
695 | choices({
696 | nodeVersions,
697 | desiredNodeOnly,
698 | desiredNodeVersion,
699 | maintainedNodeVersions,
700 | nodeVersionsSupportedRange,
701 | }) {
702 | if (desiredNodeOnly) {
703 | return [desiredNodeVersion]
704 | } else if (maintainedNodeVersions) {
705 | return filterNodeVersions(nodeVersions, {
706 | maintained: true,
707 | range: nodeVersionsSupportedRange,
708 | })
709 | } else {
710 | return filterNodeVersions(nodeVersions, {
711 | range: nodeVersionsSupportedRange,
712 | })
713 | }
714 | },
715 | default(opts) {
716 | return this.choices(opts)
717 | },
718 | skip({ maintainedNodeVersions, nodeVersionsSupportedRange }) {
719 | return Boolean(maintainedNodeVersions || nodeVersionsSupportedRange)
720 | },
721 | },
722 | {
723 | name: 'nodeVersionSupportedMinimum',
724 | message:
725 | 'Automated property (do not override) for constraining the minimum Node.js version to be supported',
726 | type: 'list',
727 | validate: isNumber,
728 | choices({ nodeVersionsSupported }) {
729 | return nodeVersionsSupported
730 | },
731 | default(opts) {
732 | return first(this.choices(opts))
733 | },
734 | skip: true,
735 | },
736 | {
737 | name: 'nodeVersionSupportedMaximum',
738 | message:
739 | 'Automated property (do not override) for constraining the maximum Node.js version to be supported support',
740 | type: 'list',
741 | validate: isNumber,
742 | choices({ nodeVersionsSupported }) {
743 | return nodeVersionsSupported
744 | },
745 | default(opts) {
746 | return last(this.choices(opts))
747 | },
748 | skip: true,
749 | },
750 | {
751 | name: 'nodeVersionsTestedRange',
752 | message:
753 | 'What range (if any) do you wish to restrict the tested Node.js versions to?',
754 | skip({ desiredNodeOnly }) {
755 | return desiredNodeOnly
756 | },
757 | },
758 | {
759 | name: 'nodeVersionsTested',
760 | message: 'Which Node.js versions must your package test against?',
761 | type: 'checkbox',
762 | validate: isSpecified,
763 | choices({
764 | language,
765 | desiredNodeOnly,
766 | desiredNodeVersion,
767 | nodeVersions,
768 | nodeVersionsSupported,
769 | maintainedNodeVersions,
770 | expandNodeVersions,
771 | nodeVersionsTestedRange,
772 | }) {
773 | if (language === 'json' || desiredNodeOnly) {
774 | return [desiredNodeVersion]
775 | } else if (expandNodeVersions) {
776 | return filterNodeVersions(nodeVersions, {
777 | range: nodeVersionsTestedRange,
778 | })
779 | } else if (maintainedNodeVersions) {
780 | return filterNodeVersions(nodeVersionsSupported, {
781 | range: nodeVersionsTestedRange,
782 | })
783 | } else {
784 | return filterNodeVersions(nodeVersions, {
785 | range: nodeVersionsTestedRange,
786 | })
787 | }
788 | },
789 | default(opts) {
790 | return this.choices(opts)
791 | },
792 | skip({ nodeVersionsTestedRange }) {
793 | return Boolean(nodeVersionsTestedRange)
794 | },
795 | },
796 | {
797 | name: 'nodeVersionTestedMinimum',
798 | message:
799 | 'Automated property (do not override) for constraining the minimum Node.js version for testing',
800 | type: 'list',
801 | validate: isNumber,
802 | choices({ nodeVersionsTested }) {
803 | return nodeVersionsTested
804 | },
805 | default(opts) {
806 | return first(this.choices(opts))
807 | },
808 | skip: true,
809 | },
810 | {
811 | name: 'nodeVersionTestedMaximum',
812 | message:
813 | 'Automated property (do not override) for constraining the maximum Node.js version for testing',
814 | type: 'list',
815 | validate: isNumber,
816 | choices({ nodeVersionsTested }) {
817 | return nodeVersionsTested
818 | },
819 | default(opts) {
820 | return last(this.choices(opts))
821 | },
822 | skip: true,
823 | },
824 | {
825 | name: 'nodeVersionsTargetedRange',
826 | message:
827 | 'What range (if any) do you wish to restrict the targeted Node.js versions to?',
828 | skip({ desiredNodeOnly }) {
829 | return desiredNodeOnly
830 | },
831 | },
832 | {
833 | name: 'nodeVersionsTargeted',
834 | message:
835 | 'Which Node.js versions must your package target compilation against?',
836 | type: 'checkbox',
837 | validate: isSpecified,
838 | choices({
839 | language,
840 | desiredNodeOnly,
841 | desiredNodeVersion,
842 | maintainedNodeVersions,
843 | nodeVersionsSupported,
844 | nodeVersionsTested,
845 | nodeVersionsTargetedRange,
846 | }) {
847 | if (language === 'json' || desiredNodeOnly) {
848 | return [desiredNodeVersion]
849 | } else if (maintainedNodeVersions) {
850 | return filterNodeVersions(nodeVersionsSupported, {
851 | range: nodeVersionsTargetedRange,
852 | })
853 | } else {
854 | const supportedAndTestedVersions = unique([
855 | ...nodeVersionsSupported,
856 | ...nodeVersionsTested,
857 | ]).sort(versionCompare)
858 | return filterNodeVersions(supportedAndTestedVersions, {
859 | range: nodeVersionsTargetedRange,
860 | })
861 | }
862 | },
863 | default(opts) {
864 | return this.choices(opts)
865 | },
866 | skip({ nodeVersionsTargetedRange }) {
867 | return Boolean(nodeVersionsTargetedRange)
868 | },
869 | },
870 | {
871 | name: 'nodeVersionsTargetedImportRange',
872 | message:
873 | 'What range (if any) do you wish to restrict the targeted Node.js versions for Import modules to?',
874 | skip({ desiredNodeOnly }) {
875 | return desiredNodeOnly
876 | },
877 | ignore({ targetModules }) {
878 | return targetModules.includes('import') === false
879 | },
880 | },
881 | {
882 | name: 'nodeVersionsTargetedRequireRange',
883 | message:
884 | 'What range (if any) do you wish to restrict the targeted Node.js versions for Require modules to?',
885 | skip({ desiredNodeOnly }) {
886 | return desiredNodeOnly
887 | },
888 | ignore({ targetModules }) {
889 | return targetModules.includes('require') === false
890 | },
891 | },
892 | {
893 | name: 'editionsAutoloader',
894 | type: 'confirm',
895 | message:
896 | 'Use the Editions Autoloader to support the case where multiple editions may be required to support all intended Node.js versions?',
897 | default: true,
898 | when({ nodeVersionsTargeted }) {
899 | return nodeVersionsTargeted.length > 1
900 | },
901 | },
902 | {
903 | name: 'kava',
904 | type: 'confirm',
905 | message: "Use Bevry's testing tools?",
906 | default: true,
907 | ignore({ website, vercelWebsite, docpadPlugin }) {
908 | return docpadPlugin || (website && !vercelWebsite)
909 | },
910 | },
911 | {
912 | name: 'docs',
913 | type: 'confirm',
914 | message: 'Will there be inline source code documentation?',
915 | default({ language }) {
916 | return hasDocumentation(packageData) || language === 'typescript'
917 | },
918 | skip({ language }) {
919 | return language === 'typescript'
920 | },
921 | ignore({ website }) {
922 | return website
923 | },
924 | },
925 | {
926 | name: 'deploymentStrategy',
927 | message:
928 | 'Which deployment strategy should be used for the project and its documentation?',
929 | choices: ['surge', 'bevry', 'custom', 'none'],
930 | validate: isSpecified,
931 | default: hasScript(packageData.scripts, 'my:deploy') ? 'custom' : 'surge',
932 | when({ docs, website }) {
933 | return docs || website
934 | },
935 | },
936 | ]
937 | }
938 |
939 | export async function getAnswers(state) {
940 | // Fetch
941 | const answers = await _getAnswers(
942 | await getQuestions(state),
943 | state.packageData && state.packageData.boundation,
944 | )
945 |
946 | // Apply
947 | state.answers = answers
948 | answers.keywords = new Set((answers.keywords || '').split(/,\s*/))
949 |
950 | // ensure we don't have a situation where node 14 is about to be released, but we only support node 13 and up
951 | if (answers.desiredNodeVersion && answers.nodeVersionSupportedMaximum) {
952 | if (
953 | versionCompare(
954 | answers.desiredNodeVersion,
955 | answers.nodeVersionSupportedMaximum,
956 | ) === 1
957 | ) {
958 | console.log(
959 | 'constrained desiredNodeVersion to the nodeVersionSupportedMaximum of',
960 | answers.nodeVersionSupportedMaximum,
961 | )
962 | answers.desiredNodeVersion = answers.nodeVersionSupportedMaximum
963 | }
964 | }
965 |
966 | // sanity check versions (== instead of === for number/string compare)
967 | /* eslint eqeqeq:0 */
968 | if (
969 | first(answers.nodeVersionsSupported) !=
970 | answers.nodeVersionSupportedMinimum ||
971 | last(answers.nodeVersionsSupported) !=
972 | answers.nodeVersionSupportedMaximum ||
973 | first(answers.nodeVersionsTested) != answers.nodeVersionTestedMinimum ||
974 | last(answers.nodeVersionsTested) != answers.nodeVersionTestedMaximum
975 | ) {
976 | console.error(
977 | first(answers.nodeVersionsSupported),
978 | answers.nodeVersionSupportedMinimum,
979 | last(answers.nodeVersionsSupported),
980 | answers.nodeVersionSupportedMaximum,
981 | first(answers.nodeVersionsTested),
982 | answers.nodeVersionTestedMinimum,
983 | last(answers.nodeVersionsTested),
984 | answers.nodeVersionTestedMaximum,
985 | )
986 | throw new Error(
987 | 'do not use nodeVersion*Minimum and nodeVersion*Maximum, use nodeVersions*Range instead',
988 | )
989 | }
990 |
991 | // return
992 | return answers
993 | }
994 |
--------------------------------------------------------------------------------
/source/state.js:
--------------------------------------------------------------------------------
1 | // external
2 | import { has } from '@bevry/list'
3 |
4 | // export
5 | export const state = {
6 | cleaned: false,
7 | githubWorkflow: 'bevry', // will change if custom was detected
8 | answers: null,
9 | nodeVersionsOptional: [],
10 | packageData: {},
11 | vercelConfig: {},
12 | editions: [],
13 | get useEditionsAutoloader() {
14 | return this.nodeEditionsRequire.length >= 2
15 | },
16 | // active is not loadable, active is only kept
17 | get activeEditions() {
18 | return this.editions.filter((edition) => edition.active !== false)
19 | },
20 | // get activeEdition() {
21 | // return this.activeEditions[0]
22 | // },
23 | get typesEditions() {
24 | return this.activeEditions.filter((edition) => has(edition.tags, 'types'))
25 | },
26 | get typesEdition() {
27 | const typesEditions = this.typesEditions
28 | if (typesEditions.length > 1) {
29 | console.error(typesEditions)
30 | throw new Error(
31 | 'there is more than one edition catered towards types, not sure what to do here...',
32 | )
33 | }
34 | return typesEditions[0]
35 | },
36 | get babelEditions() {
37 | return this.activeEditions.filter((edition) => edition.babel)
38 | },
39 | // get babelEdition() {
40 | // return this.babelEditions[0]
41 | // },
42 | get compiledEditions() {
43 | return this.activeEditions.filter(
44 | (edition) =>
45 | edition.engines && (edition.engines.node || edition.engines.browsers),
46 | )
47 | },
48 | // get compiledEdition() {
49 | // return this.compiledEditions[this.compiledEditions.length - 1]
50 | // },
51 | get nodeEditions() {
52 | return this.activeEditions.filter(
53 | (edition) => edition.engines && edition.engines.node,
54 | )
55 | },
56 | get nodeEdition() {
57 | return this.nodeEditions[0]
58 | },
59 | get nodeEditionsRequire() {
60 | return this.nodeEditions.filter((edition) => has(edition.tags, 'require'))
61 | },
62 | get nodeEditionRequire() {
63 | return this.nodeEditionsRequire[0]
64 | },
65 | get nodeEditionsImport() {
66 | return this.nodeEditions.filter((edition) => has(edition.tags, 'import'))
67 | },
68 | get nodeEditionImport() {
69 | return this.nodeEditionsImport[0]
70 | },
71 | get browserEditions() {
72 | return this.activeEditions.filter(
73 | (edition) => edition.engines && edition.engines.browsers,
74 | )
75 | },
76 | get browserEdition() {
77 | const browserEditions = this.browserEditions
78 | if (browserEditions.length > 1) {
79 | console.error(browserEditions)
80 | throw new Error(
81 | 'there is more than one edition catered towards browsers, not sure what to do here...',
82 | )
83 | }
84 | return browserEditions[0]
85 | },
86 | get sourceEdition() {
87 | const sourceEdition = this.editions[0]
88 | if (sourceEdition && sourceEdition.active === false) {
89 | throw new Error('source edition had .active=false which is not allowed')
90 | }
91 | return sourceEdition
92 | },
93 | userScripts: {},
94 | scripts: {},
95 | }
96 |
97 | export default state
98 |
--------------------------------------------------------------------------------
/source/test.js:
--------------------------------------------------------------------------------
1 | console.log('requiring boundation...')
2 | import boundation from './index.js'
3 | console.log('...required boundation')
4 |
--------------------------------------------------------------------------------
/source/util.js:
--------------------------------------------------------------------------------
1 | // external
2 | import * as typeChecker from 'typechecker'
3 | import Fellow from 'fellow'
4 |
5 | export function nodeMajorVersion(value) {
6 | if (typeof value === 'number') {
7 | value = String(value)
8 | } else if (typeof value !== 'string') {
9 | return null
10 | }
11 | return value.startsWith('0')
12 | ? value.split('.').slice(0, 2).join('.')
13 | : value.split('.')[0]
14 | }
15 |
16 | export function nodeMajorVersions(array) {
17 | return array.map((version) => nodeMajorVersion(version))
18 | }
19 |
20 | /** Ensure that the suffix path does not start with the prefix directory path. */
21 | export function unjoin(prefix, suffix) {
22 | if (!suffix) return null
23 | const start = prefix.endsWith('/') ? prefix : prefix + '/'
24 | const result = suffix.startsWith(start) ? suffix.substr(start.length) : start // @todo should be suffix?
25 | return result
26 | }
27 |
28 | /** Ensure that the suffix path starts with the prefix directory path. */
29 | export function cojoin(prefix, suffix) {
30 | if (!suffix) return null
31 | const start = prefix.endsWith('/') ? prefix : prefix + '/'
32 | const result = suffix.startsWith(start) ? suffix : start + suffix
33 | return result
34 | }
35 |
36 | /** Is the value empty? */
37 | export function isEmpty(value) {
38 | if (value == null) return true
39 | if (value === '') return true
40 | if (typeChecker.isPlainObject(value) && typeChecker.isEmptyPlainObject(value))
41 | return true
42 | return false
43 | }
44 |
45 | /** Set the property inside the object to the value, however if value is empty, delete the property instead. */
46 | export function set(obj, key, value) {
47 | if (isEmpty(value)) delete obj[key]
48 | else obj[key] = value
49 | }
50 |
51 | /**
52 | * Get the import/require statement text
53 | * @returns an ESM import if `isESM` is truthy, otherwise a CJS require if `isESM` is falsey
54 | */
55 | export function importOrRequire(left, right, isESM = true) {
56 | return isESM
57 | ? `import ${left} from '${right}'`
58 | : `const ${left} = require('${right}')`
59 | }
60 |
61 | /**
62 | * Get the export statement text
63 | * @returns an ESM export if `isESM` is truthy, otherwise a CJS export if `isESM` is falsey
64 | */
65 | export function exportOrExports(content, isESM = true) {
66 | return isESM ? `export default ${content}` : `module.exports = ${content}`
67 | }
68 |
69 | /** Get the `use strict` header text, but only if it is needed. */
70 | export function useStrict(isESM = true) {
71 | return isESM ? '' : "'use strict'\n"
72 | }
73 |
74 | /** Get packages from both `dependencies` and `devDependencies` */
75 | export function getAllDepNames(packageData) {
76 | if (!packageData.dependencies) packageData.dependencies = {}
77 | if (!packageData.devDependencies) packageData.devDependencies = {}
78 | const depNames = Object.keys(packageData.dependencies)
79 | const devDepNames = Object.keys(packageData.devDependencies)
80 | return depNames.concat(devDepNames)
81 | }
82 |
83 | /** Get packages that exist in both `dependencies` and `devDependencies` */
84 | export function getDuplicateDeps(packageData) {
85 | const allDepNames = new Set(getAllDepNames(packageData))
86 | const duplicateDepNames = []
87 | for (const key of allDepNames) {
88 | if (packageData.devDependencies[key] && packageData.dependencies[key]) {
89 | duplicateDepNames.push(key)
90 | }
91 | }
92 | return duplicateDepNames
93 | }
94 |
95 | /** Decrement the version number by the specified arguments */
96 | export function getPreviousVersion(version, major = 0, minor = 1) {
97 | const parts = String(version)
98 | .split('.')
99 | .map((i) => Number(i))
100 | if (major) {
101 | parts[0] -= major
102 | if (parts[0] < 0) parts[0] = 0
103 | }
104 | if (minor) {
105 | parts[1] -= minor
106 | if (parts[1] < 0) parts[1] = 0
107 | }
108 | return parts.join('.')
109 | }
110 |
111 | /** Fix typescript embedding the source directory inside the output */
112 | export function fixTsc(editionDirectory, sourceDirectory) {
113 | return [
114 | '&&',
115 | '(', // begin fix
116 | `test ! -d ${editionDirectory}/${sourceDirectory}`,
117 | '||',
118 | '(', // begin move
119 | `mv ${editionDirectory}/${sourceDirectory} edition-temp`,
120 | `&& rm -rf ${editionDirectory}`,
121 | `&& mv edition-temp ${editionDirectory}`,
122 | `)`, // end move
123 | ')', // end fix
124 | ]
125 | }
126 |
127 | /** If only one person, then no need to display the years */
128 | export function fixAuthors(people) {
129 | const fellows = Fellow.add(people)
130 | const opts = { displayYears: fellows.length !== 1 }
131 | return fellows.map((fellow) => fellow.toString(opts))
132 | }
133 |
134 | /** Convert Bevry to Benjamin Lupton */
135 | export function fixAuthor(input) {
136 | const people = input
137 | .split(', ')
138 | .map((person) =>
139 | person
140 | .replace('Bevry Pty Ltd', 'Benjamin Lupton')
141 | .replace('', '')
142 | .replace('://bevry.me', '://balupton.com'),
143 | )
144 | .join(', ')
145 | return fixAuthors(people).join(', ')
146 | }
147 |
148 | /** Fix various bad forms of Benjamin Lupton */
149 | export function fixBalupton(person) {
150 | return person
151 | .replace(
152 | /^Benjamin Lupton( )?$/,
153 | 'Benjamin Lupton (https://github.com/balupton)',
154 | )
155 | .replace(
156 | /^Benjamin Lupton( )? \(https?:\/\/github.com\/balupton\)$/,
157 | 'Benjamin Lupton (https://github.com/balupton)',
158 | )
159 | .replace(
160 | /^Benjamin Lupton( )? \(https?:\/\/balupton.com\/?\)$/,
161 | 'Benjamin Lupton (https://github.com/balupton)',
162 | )
163 | }
164 |
165 | /** Trim the organisation/scope name from the package name */
166 | export function trimOrgName(str) {
167 | if (str[0] === '@') return str.split('/').slice(1).join('/')
168 | return str
169 | }
170 |
171 | /** Strip the object of the keys */
172 | export function strip(obj, ...keys) {
173 | for (const key of keys) {
174 | delete obj[key]
175 | }
176 | return obj
177 | }
178 |
179 | export function addExtension(file, extension) {
180 | return file ? `${file}.${extension}` : file
181 | }
182 |
183 | /** Trim the string */
184 | export function trim(input = '') {
185 | return input.trim()
186 | }
187 |
188 | export function slugit(input) {
189 | return (
190 | (input && input !== 'undefined' && input.replace(/[^a-zA-Z0-9.-]+/g, '')) ||
191 | ''
192 | )
193 | }
194 |
195 | export function isSpecified(input) {
196 | return slugit(Array.isArray(input) ? input.join(' ') : input).length !== 0
197 | }
198 |
199 | /** Is the string representing a positive number? */
200 | export function isNumber(input) {
201 | return /^[0-9.]+$/.test(input)
202 | }
203 |
204 | export const defaultScript = "printf '%s\n' 'no need for this project'"
205 |
206 | export const defaultDeploy =
207 | 'npm run our:compile && npm run our:test && npm run our:deploy'
208 |
209 | export function hasScript(scripts, name) {
210 | return scripts && scripts[name] && scripts[name] !== defaultScript
211 | }
212 |
213 | export function ensureScript(scripts, name) {
214 | if (scripts && !scripts[name]) scripts[name] = defaultScript
215 | }
216 |
--------------------------------------------------------------------------------
/source/versions.js:
--------------------------------------------------------------------------------
1 | // builtin
2 | import { join } from 'node:path'
3 |
4 | // external
5 | import { Versions } from '@bevry/testen'
6 | import { complement, intersect } from '@bevry/list'
7 | import { filterNodeVersions } from '@bevry/nodejs-versions'
8 | import versionCompare from 'version-compare'
9 | import { isAccessible } from '@bevry/fs-accessible'
10 |
11 | // local
12 | import { status, note } from './log.js'
13 | import {
14 | writePackage,
15 | getPackageNodeEngine,
16 | setPackageNodeEngine,
17 | } from './package.js'
18 | import { updateRuntime } from './runtime.js'
19 | import { nodeMajorVersion, nodeMajorVersions } from './util.js'
20 | import { spawn } from './fs.js'
21 |
22 | // Update engines
23 | export async function updateEngines(state) {
24 | const { answers, nodeEditionsRequire, nodeEditionsImport, packageData } =
25 | state
26 | const allPassedVersions = new Set()
27 | const serial =
28 | ['testen', 'safefs', 'lazy-require'].includes(answers.name) ||
29 | answers.name.includes('docpad')
30 | let useSpecificEngineVersions = false
31 |
32 | // =================================
33 | // run each edition against the supported node version
34 | // to fetch the engines for each edition
35 |
36 | // if we have no editions suitable use `npm test` instead
37 | if (nodeEditionsRequire.length === 0 && nodeEditionsImport.length === 0) {
38 | // this can be the case if it is a website, or a mjs package
39 | status('determining engines for project...')
40 | const versions = new Versions(answers.nodeVersionsTested)
41 | await versions.load()
42 | await versions.install() // @todo if this fails (so no internet), it continues, this should not be the case
43 | const numbers = versions.array.map((version) => version.version)
44 | await versions.test(`${answers.packageManager} test`, serial)
45 | const passed = versions.json.passed || []
46 | if (passed.length === 0) {
47 | console.error(versions.messages.join('\n\n'))
48 | throw new Error(
49 | `There were no node versions [${numbers.join(
50 | ', ',
51 | )}] which the project's tests passed`,
52 | )
53 | } else {
54 | useSpecificEngineVersions = true
55 | }
56 |
57 | // add the versions to the list
58 | passed.forEach((version) =>
59 | allPassedVersions.add(nodeMajorVersion(version)),
60 | )
61 |
62 | // log
63 | status(
64 | `...determined engines for project as [${getPackageNodeEngine(
65 | packageData,
66 | )}] against [${numbers.join(', ')}]`,
67 | )
68 | } else {
69 | let recompile = false
70 |
71 | /* eslint no-loop-func:0 */
72 | // to determine the import edition, there can only be one, so use onlyAllSupported mode (to remove all editions that don't support everything (or at least just the supported versions), or fail)
73 | // to determine the require edition, there can be many, so use allUnique mode (to remove duplicates)
74 | for (const { list, nodeVersions, mode } of [
75 | {
76 | list: nodeEditionsImport,
77 | nodeVersions: filterNodeVersions(answers.nodeVersionsTested, {
78 | esm: true,
79 | }),
80 | mode: 'onlyAllSupported',
81 | },
82 | {
83 | list: nodeEditionsRequire,
84 | nodeVersions: answers.nodeVersionsTested,
85 | mode: answers.editionsAutoloader ? 'allUnique' : 'onlyAllSupported',
86 | },
87 | ]) {
88 | // Skip when we do not care about that module type
89 | if (list.length === 0) continue
90 |
91 | // Prepare
92 | const listPassedVersions = new Set()
93 | let skipRemainderBecausePassedEverything = false
94 | let debug = ''
95 |
96 | // Determine
97 | for (const edition of list) {
98 | // check if we need to skip because passed everything
99 | if (skipRemainderBecausePassedEverything) {
100 | note(
101 | `The edition [${edition.directory}] will be trimmed, as a previous edition already passed all targets`,
102 | )
103 | edition.active = false
104 | recompile = true
105 | continue
106 | }
107 |
108 | // target specified versions and the edition target
109 | const target =
110 | (edition.targets && nodeMajorVersion(edition.targets.node)) || null
111 | const targets = nodeVersions.concat(target || [])
112 |
113 | // check if we need to skip because unnecessary target
114 | if (target && listPassedVersions.has(target)) {
115 | note(
116 | `The edition [${edition.directory}] will be trimmed, as a previous edition already passed its target of ${target}`,
117 | )
118 | edition.active = false
119 | recompile = true
120 | continue
121 | }
122 |
123 | // log
124 | status(`determining engines for edition [${edition.directory}]...`)
125 |
126 | // run compile if needed
127 | if (edition.compileCommand && !(await isAccessible(edition.testPath))) {
128 | await spawn(edition.compileCommand)
129 | }
130 |
131 | // determine the test script for the edition
132 | const test = answers.docpadPlugin
133 | ? `npx docpad-plugintester --edition=${edition.directory}`
134 | : `node ./${join(edition.directory || '.', edition.test)}`
135 |
136 | // install and test the versions
137 | const versions = new Versions(targets)
138 | await versions.load()
139 | await versions.install()
140 | const numbers = versions.array.map((version) => version.version)
141 | await versions.test(test, serial)
142 | const passed = versions.json.passed || []
143 | const failed = versions.json.failed || []
144 |
145 | // update the sets
146 | const passedUnique = passed.filter(
147 | (version) =>
148 | listPassedVersions.has(nodeMajorVersion(version)) === false,
149 | )
150 | const failedUnique = failed.filter(
151 | (version) =>
152 | listPassedVersions.has(nodeMajorVersion(version)) === false,
153 | )
154 | const range = nodeMajorVersions(passed).join(' || ')
155 | skipRemainderBecausePassedEverything = failed.length === 0
156 |
157 | // make engines of the edition the passed versions
158 | edition.engines.node = range
159 |
160 | // log the results
161 | debug += versions.messages.join('\n\n')
162 | console.log(
163 | [
164 | `target: ${target || '*'}`,
165 | `passed: ${passed.join(', ')}`,
166 | `.unique: ${passedUnique.join(', ')}`,
167 | `failed: ${failed.join(', ')}`,
168 | `.unique: ${failedUnique.join(', ')}`,
169 | `range: ${range}`,
170 | ].join('\n'),
171 | )
172 |
173 | // trim non-unique version
174 | if (passedUnique.length === 0) {
175 | // if this one has no unique passes, then it is redundant and can be trimmed
176 | note(
177 | `The edition [${edition.directory}] will be trimmed, as it has no unique passing versions`,
178 | )
179 | edition.active = false
180 | recompile = true
181 | } else {
182 | // had unique passing
183 |
184 | // handle onlyAllSupported mode
185 | if (
186 | mode === 'onlyAllSupported' &&
187 | skipRemainderBecausePassedEverything
188 | ) {
189 | // if this one passes for all targets, then trim on all prior targets
190 | for (const priorEdition of list) {
191 | if (priorEdition === edition) break
192 | if (!priorEdition.active) continue
193 | note(
194 | `The prior edition [${priorEdition.directory}] will be trimmed, as it was partial`,
195 | )
196 | priorEdition.active = false
197 | recompile = true
198 | }
199 | }
200 |
201 | // add the unique versions to the list
202 | passedUnique.forEach((version) =>
203 | listPassedVersions.add(nodeMajorVersion(version)),
204 | )
205 |
206 | // log
207 | status(
208 | `...determined engines for edition [${edition.directory}] as [${
209 | edition.engines.node
210 | }] against [${numbers.join(', ')}]`,
211 | )
212 | }
213 | }
214 |
215 | // fetch the editions we've kept
216 | const keptEditions = Array.from(list.values()).filter(
217 | (edition) => edition.active,
218 | )
219 |
220 | // if we only want one edition, verify we only have one edition
221 | if (mode === 'onlyAllSupported' && keptEditions.length !== 1) {
222 | console.error(debug.trim())
223 | if (keptEditions.length === 0) {
224 | throw new Error(`No editions were kept, there should have been one.`)
225 | }
226 | throw new Error(
227 | `Multiple editions were kept [${keptEditions
228 | .map((edition) => edition.directory)
229 | .join(', ')}] when only one should have been.`,
230 | )
231 | }
232 |
233 | // verify we have editions that pass on our targets
234 | for (const version of nodeVersions) {
235 | if (
236 | !listPassedVersions.has(version) &&
237 | answers.nodeVersionsSupported.includes(version)
238 | ) {
239 | // all kept editions should pass for all supported versions
240 | console.error(debug.trim())
241 | throw new Error(
242 | `The kept editions [${keptEditions
243 | .map((edition) => edition.directory)
244 | .join(
245 | ', ',
246 | )}] still did not pass for the required node version [${version}]`,
247 | )
248 | }
249 | }
250 |
251 | // add the list passed versions to the all passed versions
252 | listPassedVersions.forEach((i) => allPassedVersions.add(i))
253 | }
254 |
255 | // if there has been an editions change, try again with an updated runtime
256 | if (recompile) {
257 | return await updateRuntime(state)
258 | }
259 | }
260 |
261 | // =================================
262 | // update engines.node
263 |
264 | const supported = answers.nodeVersionsSupported
265 | const tested = answers.nodeVersionsTested
266 | const testedAndPassed = Array.from(allPassedVersions.values()).sort(
267 | versionCompare,
268 | )
269 | const testedAndFailed = complement(tested, testedAndPassed)
270 | const testedAndSupported = intersect(tested, supported)
271 | const failedAndSupported = intersect(testedAndSupported, testedAndFailed)
272 | const passedAndUnsupported = complement(testedAndPassed, supported)
273 | const failedAndUnsupported = complement(testedAndFailed, supported)
274 |
275 | if (failedAndSupported.length) {
276 | throw new Error(
277 | `The project does not support the required versions: ${failedAndSupported.join(
278 | ', ',
279 | )}`,
280 | )
281 | }
282 |
283 | if (passedAndUnsupported.length) {
284 | note(
285 | `The project supports the extra versions: ${passedAndUnsupported.join(
286 | ', ',
287 | )}`,
288 | )
289 | }
290 |
291 | if (failedAndUnsupported.length) {
292 | note(
293 | `The project failed on the unsupported versions: ${failedAndUnsupported.join(
294 | ', ',
295 | )}`,
296 | )
297 | }
298 |
299 | // @todo use state instead of mutating
300 |
301 | // handle expansion
302 | // @todo there is a bug when using expandNodeVersionsed, as it defaults to Node.js 18, 20, 21
303 | // which if there is engines >=10, and tests only pass on 16 and above, then it will change to >=16, as 18 is greater than 16.
304 | if (
305 | answers.expandNodeVersions &&
306 | versionCompare(answers.nodeVersionSupportedMinimum, testedAndPassed[0]) ===
307 | 1
308 | ) {
309 | // our tests revealed we support a lower version than originally supported so expand
310 | const oldValue = getPackageNodeEngine(packageData)
311 | const newValue = '>=' + testedAndPassed[0]
312 | setPackageNodeEngine(packageData, newValue)
313 | if (oldValue !== newValue) {
314 | note(
315 | `The project's Node.js engine has expanded from ${oldValue} to ${newValue}`,
316 | )
317 | } else {
318 | note(`The project's Node.js engine has stayed as ${oldValue}`)
319 | }
320 | } else {
321 | const oldValue = getPackageNodeEngine(packageData)
322 | const newValue =
323 | (useSpecificEngineVersions &&
324 | nodeMajorVersions(testedAndPassed).join(' || ')) ||
325 | (answers.website && `>=${answers.desiredNodeVersion}`) ||
326 | `>=${answers.nodeVersionSupportedMinimum}`
327 | setPackageNodeEngine(packageData, newValue)
328 | if (oldValue !== newValue) {
329 | note(
330 | `The project's Node.js engine has changed from ${oldValue} to ${newValue}`,
331 | )
332 | } else {
333 | note(`The project's Node.js engine has stayed as ${oldValue}`)
334 | }
335 | }
336 |
337 | // handle shrinking in case min test version failed
338 | if (
339 | answers.shrinkNodeVersions &&
340 | versionCompare(answers.nodeVersionTestedMinimum, testedAndPassed[0]) === -1
341 | ) {
342 | const oldValue = answers.nodeVersionTestedMinimum
343 | const newValue = testedAndPassed[0]
344 | answers.nodeVersionTestedMinimum = newValue
345 | answers.nodeVersionsTested = testedAndPassed
346 | if (answers.expandNodeVersions === false) {
347 | state.nodeVersionsOptional = passedAndUnsupported
348 | }
349 | note(
350 | `The project's Node.js tests have been shrunk from ${oldValue} to ${newValue}`,
351 | )
352 | } else {
353 | state.nodeVersionsOptional = failedAndUnsupported
354 | }
355 |
356 | // =================================
357 | // update the package.json file
358 |
359 | await writePackage(state)
360 | }
361 |
--------------------------------------------------------------------------------
/source/website.js:
--------------------------------------------------------------------------------
1 | // builtin
2 | import * as pathUtil from 'node:path'
3 |
4 | // local
5 | import { pwd } from './data.js'
6 | import { parse } from './fs.js'
7 |
8 | export function getVercelName(vercelConfig) {
9 | return vercelConfig.name || null
10 | }
11 |
12 | export function parseVercelAliases(alias) {
13 | if (alias) {
14 | return Array.isArray(alias) ? alias : alias.split(/[,\s]+/)
15 | }
16 | return null
17 | }
18 |
19 | export function getVercelAliases(vercelConfig) {
20 | return parseVercelAliases(vercelConfig.alias) || []
21 | }
22 |
23 | export async function readWebsite(state) {
24 | const { packageData } = state
25 | state.vercelConfig = Object.assign(
26 | {},
27 | packageData.now || {},
28 | packageData.vercel || {},
29 | (await parse(pathUtil.resolve(pwd, 'now.json'))) || {},
30 | (await parse(pathUtil.resolve(pwd, 'vercel.json'))) || {},
31 | )
32 | }
33 |
34 | export async function updateWebsite(state) {
35 | const { answers, vercelConfig } = state
36 | if (!vercelConfig) {
37 | throw new Error('updateWebsite was called before readWebsite')
38 | }
39 |
40 | // add website deployment strategies
41 | if (answers.vercelWebsite) {
42 | // trim version 1 fields
43 | if (vercelConfig.version !== 2) {
44 | delete vercelConfig.type
45 | delete vercelConfig.public
46 | delete vercelConfig.files
47 | delete vercelConfig.static
48 | }
49 |
50 | // add the versions we know
51 | vercelConfig.version = 2
52 | vercelConfig.name = answers.vercelName
53 | vercelConfig.alias = parseVercelAliases(answers.vercelAliases)
54 |
55 | // next.js builder
56 | if (answers.website.includes('next')) {
57 | // remove old routes as they are no longer needed due to public directory now existing
58 | if (vercelConfig.routes)
59 | vercelConfig.routes = vercelConfig.routes.filter(
60 | (route) =>
61 | ['/favicon.ico', '/robots.txt'].includes(route.src) === false,
62 | )
63 | // delete old format
64 | delete vercelConfig.build
65 | }
66 |
67 | // static builder
68 | if (answers.staticWebsite) {
69 | if (!vercelConfig.builds)
70 | vercelConfig.builds = [
71 | { src: `${answers.staticDirectory}/**`, use: '@vercel/static' },
72 | ]
73 | }
74 | }
75 | }
76 |
--------------------------------------------------------------------------------