├── .clippy.toml ├── .cspell.json ├── .deny.toml ├── .editorconfig ├── .git-blame-ignore-revs ├── .gitattributes ├── .github ├── .cspell │ ├── project-dictionary.txt │ └── rust-dependencies.txt ├── dependabot.yml └── workflows │ ├── ci.yml │ └── release.yml ├── .gitignore ├── .markdownlint-cli2.yaml ├── .rustfmt.toml ├── .shellcheckrc ├── .taplo.toml ├── CHANGELOG.md ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── src ├── error.rs └── lib.rs ├── tests └── test.rs └── tools ├── .tidy-check-license-headers ├── publish.sh └── tidy.sh /.clippy.toml: -------------------------------------------------------------------------------- 1 | # Clippy configuration 2 | # https://doc.rust-lang.org/nightly/clippy/lint_configuration.html 3 | 4 | allow-private-module-inception = true 5 | avoid-breaking-exported-api = false 6 | disallowed-names = [] 7 | disallowed-macros = [ 8 | { path = "std::dbg", reason = "it is okay to use during development, but please do not include it in main branch" }, 9 | ] 10 | disallowed-methods = [ 11 | ] 12 | disallowed-types = [ 13 | ] 14 | -------------------------------------------------------------------------------- /.cspell.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2", 3 | "gitignoreRoot": ".", 4 | "useGitignore": true, 5 | "dictionaryDefinitions": [ 6 | { 7 | "name": "organization-dictionary", 8 | "path": "https://raw.githubusercontent.com/taiki-e/github-actions/HEAD/.github/.cspell/organization-dictionary.txt", 9 | "addWords": true 10 | }, 11 | { 12 | "name": "project-dictionary", 13 | "path": "./.github/.cspell/project-dictionary.txt", 14 | "addWords": true 15 | }, 16 | { 17 | "name": "rust-dependencies", 18 | "path": "./.github/.cspell/rust-dependencies.txt", 19 | "addWords": true 20 | } 21 | ], 22 | "dictionaries": [ 23 | "organization-dictionary", 24 | "project-dictionary", 25 | "rust-dependencies" 26 | ], 27 | "ignoreRegExpList": [ 28 | // Copyright notice 29 | "Copyright .*", 30 | "SPDX-(File|Snippet)CopyrightText: .*", 31 | // GHA actions/workflows 32 | "uses: .+@[\\w_.-]+", 33 | // GHA context (repo name, owner name, etc.) 34 | "github.[\\w_.-]+ (=|!)= '[^']+'", 35 | // GH username 36 | "( |\\[)@[\\w_-]+", 37 | // Git config username 38 | "git config( --[^ ]+)? user.name .*", 39 | // Username in TODO|FIXME comment 40 | "(TODO|FIXME)\\([\\w_., -]+\\)", 41 | // Cargo.toml authors 42 | "authors *= *\\[[^\\]]*\\]", 43 | "\"[^\"]* <[\\w_.+-]+@[\\w.-]+>\"" 44 | ], 45 | "languageSettings": [ 46 | { 47 | "languageId": ["*"], 48 | "dictionaries": ["bash", "cpp-refined", "rust"] 49 | } 50 | ], 51 | "ignorePaths": [] 52 | } 53 | -------------------------------------------------------------------------------- /.deny.toml: -------------------------------------------------------------------------------- 1 | # https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html 2 | [advisories] 3 | yanked = "deny" 4 | git-fetch-with-cli = true 5 | ignore = [ 6 | ] 7 | 8 | # https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html 9 | [bans] 10 | multiple-versions = "warn" 11 | wildcards = "deny" 12 | allow-wildcard-paths = true 13 | build.executables = "deny" 14 | build.interpreted = "deny" 15 | build.include-dependencies = true 16 | build.include-workspace = false # covered by tools/tidy.sh 17 | build.include-archives = true 18 | build.allow-build-scripts = [ 19 | { name = "proc-macro2" }, 20 | ] 21 | build.bypass = [ 22 | ] 23 | 24 | # https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html 25 | [licenses] 26 | unused-allowed-license = "deny" 27 | private.ignore = true 28 | allow = [ 29 | "Apache-2.0", 30 | "MIT", 31 | "Unicode-3.0", # unicode-ident 32 | ] 33 | 34 | # https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html 35 | [sources] 36 | unknown-registry = "deny" 37 | unknown-git = "deny" 38 | allow-git = [ 39 | ] 40 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig configuration 2 | # https://editorconfig.org 3 | 4 | root = true 5 | 6 | [*] 7 | charset = utf-8 8 | end_of_line = lf 9 | indent_size = 4 10 | indent_style = space 11 | insert_final_newline = true 12 | trim_trailing_whitespace = true 13 | 14 | [*.{css,html,json,md,rb,sh,yml,yaml}] 15 | indent_size = 2 16 | 17 | [*.{js,yml,yaml}] 18 | quote_type = single 19 | 20 | [*.sh] 21 | # https://google.github.io/styleguide/shellguide.html#s5.3-pipelines 22 | binary_next_line = true 23 | # https://google.github.io/styleguide/shellguide.html#s5.5-case-statement 24 | switch_case_indent = true 25 | -------------------------------------------------------------------------------- /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Change indent size of shell script files to match scripts in CI config 2 | c6c7771eb7ea4f48ebda79a3c7a423b34af1f97c 3 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto eol=lf 2 | tools/tidy.sh linguist-detectable=false 3 | .github/.cspell/rust-dependencies.txt linguist-generated 4 | -------------------------------------------------------------------------------- /.github/.cspell/project-dictionary.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/taiki-e/negative-impl/028725f920bd684751749afd525d04e733a3b60b/.github/.cspell/project-dictionary.txt -------------------------------------------------------------------------------- /.github/.cspell/rust-dependencies.txt: -------------------------------------------------------------------------------- 1 | // This file is @generated by tidy.sh. 2 | // It is not intended for manual editing. 3 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: cargo 4 | directory: / 5 | schedule: 6 | interval: daily 7 | commit-message: 8 | prefix: '' 9 | labels: [] 10 | - package-ecosystem: github-actions 11 | directory: / 12 | schedule: 13 | interval: daily 14 | commit-message: 15 | prefix: '' 16 | labels: [] 17 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | pull_request: 8 | push: 9 | branches: 10 | - main 11 | - dev 12 | schedule: 13 | - cron: '0 2 * * *' 14 | workflow_dispatch: 15 | 16 | env: 17 | CARGO_INCREMENTAL: 0 18 | CARGO_NET_GIT_FETCH_WITH_CLI: true 19 | CARGO_NET_RETRY: 10 20 | CARGO_TERM_COLOR: always 21 | RUST_BACKTRACE: 1 22 | RUSTDOCFLAGS: -D warnings 23 | RUSTFLAGS: -D warnings 24 | RUSTUP_MAX_RETRIES: 10 25 | 26 | defaults: 27 | run: 28 | shell: bash --noprofile --norc -CeEuxo pipefail {0} 29 | 30 | concurrency: 31 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} 32 | cancel-in-progress: true 33 | 34 | jobs: 35 | miri: 36 | uses: taiki-e/github-actions/.github/workflows/miri.yml@main 37 | msrv: 38 | uses: taiki-e/github-actions/.github/workflows/msrv.yml@main 39 | test: 40 | uses: taiki-e/github-actions/.github/workflows/test.yml@main 41 | with: 42 | rust: 1.56,stable,beta,nightly 43 | doc-nightly-only: true 44 | tidy: 45 | uses: taiki-e/github-actions/.github/workflows/tidy.yml@main 46 | permissions: 47 | contents: read 48 | pull-requests: write # for gh pr edit --add-assignee 49 | repository-projects: read # for gh pr edit --add-assignee 50 | secrets: inherit 51 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | push: 8 | tags: 9 | - v[0-9]+.* 10 | 11 | defaults: 12 | run: 13 | shell: bash --noprofile --norc -CeEuxo pipefail {0} 14 | 15 | jobs: 16 | create-release: 17 | if: github.repository_owner == 'taiki-e' 18 | uses: taiki-e/github-actions/.github/workflows/create-release.yml@main 19 | permissions: 20 | contents: write 21 | secrets: inherit 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | Cargo.lock 3 | 4 | # For platform and editor specific settings, it is recommended to add to 5 | # a global .gitignore file. 6 | # Refs: https://docs.github.com/en/github/using-git/ignoring-files#configuring-ignored-files-for-all-repositories-on-your-computer 7 | -------------------------------------------------------------------------------- /.markdownlint-cli2.yaml: -------------------------------------------------------------------------------- 1 | # https://github.com/DavidAnson/markdownlint/blob/HEAD/doc/Rules.md 2 | config: 3 | line-length: false # MD013 4 | no-duplicate-heading: false # MD024 5 | no-blanks-blockquote: false # MD028 (this warns valid GFM alerts usage) 6 | no-inline-html: false # MD033 7 | no-emphasis-as-heading: false # MD036 8 | 9 | # https://github.com/DavidAnson/markdownlint-cli2#markdownlint-cli2jsonc 10 | noBanner: true 11 | noProgress: true 12 | -------------------------------------------------------------------------------- /.rustfmt.toml: -------------------------------------------------------------------------------- 1 | # Rustfmt configuration 2 | # https://github.com/rust-lang/rustfmt/blob/HEAD/Configurations.md 3 | 4 | # Rustfmt cannot format long lines inside macros, but this option detects this. 5 | # This is unstable (tracking issue: https://github.com/rust-lang/rustfmt/issues/3391) 6 | error_on_line_overflow = true 7 | 8 | # Override the default formatting style. 9 | # See https://internals.rust-lang.org/t/running-rustfmt-on-rust-lang-rust-and-other-rust-lang-repositories/8732/81. 10 | use_small_heuristics = "Max" 11 | # This is unstable (tracking issue: https://github.com/rust-lang/rustfmt/issues/3370) 12 | overflow_delimited_expr = true 13 | # This is unstable (tracking issue: https://github.com/rust-lang/rustfmt/issues/4991). 14 | imports_granularity = "Crate" 15 | # This is unstable (tracking issue: https://github.com/rust-lang/rustfmt/issues/5083). 16 | group_imports = "StdExternalCrate" 17 | 18 | # Apply rustfmt to more places. 19 | # This is unstable (tracking issue: https://github.com/rust-lang/rustfmt/issues/3348). 20 | format_code_in_doc_comments = true 21 | 22 | # Automatically fix deprecated style. 23 | use_field_init_shorthand = true 24 | use_try_shorthand = true 25 | 26 | # Set the default settings again to always apply the proper formatting without 27 | # being affected by the editor settings. 28 | edition = "2021" 29 | style_edition = "2024" 30 | hard_tabs = false 31 | newline_style = "Unix" 32 | tab_spaces = 4 33 | -------------------------------------------------------------------------------- /.shellcheckrc: -------------------------------------------------------------------------------- 1 | # ShellCheck configuration 2 | # https://github.com/koalaman/shellcheck/blob/HEAD/shellcheck.1.md#rc-files 3 | 4 | # See also: 5 | # https://github.com/koalaman/shellcheck/wiki/Optional 6 | # https://google.github.io/styleguide/shellguide.html 7 | 8 | # https://github.com/koalaman/shellcheck/wiki/SC2249 9 | # enable=add-default-case 10 | 11 | # https://github.com/koalaman/shellcheck/wiki/SC2244 12 | enable=avoid-nullary-conditions 13 | 14 | # https://github.com/koalaman/shellcheck/wiki/SC2312 15 | # enable=check-extra-masked-returns 16 | 17 | # https://github.com/koalaman/shellcheck/wiki/SC2310 18 | # https://github.com/koalaman/shellcheck/wiki/SC2311 19 | # enable=check-set-e-suppressed 20 | 21 | # enable=check-unassigned-uppercase 22 | 23 | # https://github.com/koalaman/shellcheck/wiki/SC2230 24 | enable=deprecate-which 25 | 26 | # https://github.com/koalaman/shellcheck/wiki/SC2248 27 | enable=quote-safe-variables 28 | 29 | # https://github.com/koalaman/shellcheck/wiki/SC2292 30 | # https://google.github.io/styleguide/shellguide.html#s6.3-tests 31 | enable=require-double-brackets 32 | 33 | # https://github.com/koalaman/shellcheck/wiki/SC2250 34 | # https://google.github.io/styleguide/shellguide.html#s5.6-variable-expansion 35 | enable=require-variable-braces 36 | -------------------------------------------------------------------------------- /.taplo.toml: -------------------------------------------------------------------------------- 1 | # Taplo configuration 2 | # https://taplo.tamasfe.dev/configuration/formatter-options.html 3 | 4 | [formatting] 5 | align_comments = false 6 | allowed_blank_lines = 1 7 | array_auto_collapse = false 8 | array_auto_expand = false 9 | indent_string = " " 10 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | This project adheres to [Semantic Versioning](https://semver.org). 6 | 7 | Releases may yanked if there is a security bug, a soundness bug, or a regression. 8 | 9 | 12 | 13 | ## [Unreleased] 14 | 15 | ## [0.1.6] - 2024-08-23 16 | 17 | - Disable `derive` and `clone-impls` features of `syn` dependency. 18 | 19 | ## [0.1.5] - 2024-04-13 20 | 21 | - Diagnostic improvements. 22 | 23 | ## [0.1.4] - 2023-06-29 24 | 25 | - Fix build error from dependency when built with `-Z minimal-versions`. 26 | 27 | ## [0.1.3] - 2023-03-26 28 | 29 | - Update `syn` dependency to 2. This increase the minimum supported Rust version from Rust 1.37 to Rust 1.56. 30 | 31 | ## [0.1.2] - 2022-02-05 32 | 33 | - Support `UnwindSafe` and `RefUnwindSafe` in no-std at Rust 1.56+. ([#3](https://github.com/taiki-e/negative-impl/pull/3)) 34 | 35 | ## [0.1.1] - 2021-11-10 36 | 37 | - Suppress `clippy::non_send_fields_in_send_ty` lint in generated code. ([#2](https://github.com/taiki-e/negative-impl/pull/2)) 38 | 39 | ## [0.1.0] - 2021-03-27 40 | 41 | Initial release 42 | 43 | [Unreleased]: https://github.com/taiki-e/negative-impl/compare/v0.1.6...HEAD 44 | [0.1.6]: https://github.com/taiki-e/negative-impl/compare/v0.1.5...v0.1.6 45 | [0.1.5]: https://github.com/taiki-e/negative-impl/compare/v0.1.4...v0.1.5 46 | [0.1.4]: https://github.com/taiki-e/negative-impl/compare/v0.1.3...v0.1.4 47 | [0.1.3]: https://github.com/taiki-e/negative-impl/compare/v0.1.2...v0.1.3 48 | [0.1.2]: https://github.com/taiki-e/derive_utils/compare/v0.1.1...v0.1.2 49 | [0.1.1]: https://github.com/taiki-e/derive_utils/compare/v0.1.0...v0.1.1 50 | [0.1.0]: https://github.com/taiki-e/negative-impl/releases/tag/v0.1.0 51 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "negative-impl" 3 | version = "0.1.6" #publish:version 4 | edition = "2021" 5 | rust-version = "1.56" # For syn 6 | license = "Apache-2.0 OR MIT" 7 | repository = "https://github.com/taiki-e/negative-impl" 8 | keywords = ["macros"] 9 | categories = ["no-std", "no-std::no-alloc", "rust-patterns"] 10 | exclude = ["/.*", "/tools"] 11 | description = """ 12 | Negative trait implementations on stable Rust. 13 | """ 14 | 15 | [package.metadata.docs.rs] 16 | targets = ["x86_64-unknown-linux-gnu"] 17 | 18 | [lib] 19 | proc-macro = true 20 | 21 | [dependencies] 22 | proc-macro2 = "1.0.60" 23 | quote = "1.0.25" 24 | syn = { version = "2", default-features = false, features = ["parsing", "printing", "proc-macro", "full"] } 25 | 26 | [dev-dependencies] 27 | static_assertions = "1" 28 | 29 | [lints] 30 | workspace = true 31 | 32 | [workspace] 33 | resolver = "2" 34 | 35 | # This table is shared by projects under github.com/taiki-e. 36 | # Expect for unexpected_cfgs.check-cfg, it is not intended for manual editing. 37 | [workspace.lints.rust] 38 | deprecated_safe = "warn" 39 | improper_ctypes = "warn" 40 | improper_ctypes_definitions = "warn" 41 | non_ascii_idents = "warn" 42 | rust_2018_idioms = "warn" 43 | single_use_lifetimes = "warn" 44 | unexpected_cfgs = { level = "warn", check-cfg = [ 45 | ] } 46 | unnameable_types = "warn" 47 | unreachable_pub = "warn" 48 | # unsafe_op_in_unsafe_fn = "warn" # Set at crate-level instead since https://github.com/rust-lang/rust/pull/100081 merged in Rust 1.65 is not available on MSRV 49 | [workspace.lints.clippy] 50 | all = "warn" # Downgrade deny-by-default lints 51 | pedantic = "warn" 52 | as_ptr_cast_mut = "warn" 53 | as_underscore = "warn" 54 | default_union_representation = "warn" 55 | inline_asm_x86_att_syntax = "warn" 56 | trailing_empty_array = "warn" 57 | transmute_undefined_repr = "warn" 58 | undocumented_unsafe_blocks = "warn" 59 | unused_trait_names = "warn" 60 | # Suppress buggy or noisy clippy lints 61 | bool_assert_comparison = { level = "allow", priority = 1 } 62 | borrow_as_ptr = { level = "allow", priority = 1 } # https://github.com/rust-lang/rust-clippy/issues/8286 63 | cast_lossless = { level = "allow", priority = 1 } # https://godbolt.org/z/Pv6vbGG6E 64 | declare_interior_mutable_const = { level = "allow", priority = 1 } # https://github.com/rust-lang/rust-clippy/issues/7665 65 | doc_markdown = { level = "allow", priority = 1 } 66 | float_cmp = { level = "allow", priority = 1 } # https://github.com/rust-lang/rust-clippy/issues/7725 67 | incompatible_msrv = { level = "allow", priority = 1 } # buggy: doesn't consider cfg, https://github.com/rust-lang/rust-clippy/issues/12280, https://github.com/rust-lang/rust-clippy/issues/12257#issuecomment-2093667187 68 | lint_groups_priority = { level = "allow", priority = 1 } # https://github.com/rust-lang/rust-clippy/issues/12920 69 | manual_assert = { level = "allow", priority = 1 } 70 | manual_range_contains = { level = "allow", priority = 1 } # https://github.com/rust-lang/rust-clippy/issues/6455#issuecomment-1225966395 71 | missing_errors_doc = { level = "allow", priority = 1 } 72 | module_name_repetitions = { level = "allow", priority = 1 } # buggy: https://github.com/rust-lang/rust-clippy/issues?q=is%3Aissue+is%3Aopen+module_name_repetitions 73 | naive_bytecount = { level = "allow", priority = 1 } 74 | nonminimal_bool = { level = "allow", priority = 1 } # buggy: https://github.com/rust-lang/rust-clippy/issues?q=is%3Aissue+is%3Aopen+nonminimal_bool 75 | range_plus_one = { level = "allow", priority = 1 } # buggy: https://github.com/rust-lang/rust-clippy/issues?q=is%3Aissue+is%3Aopen+range_plus_one 76 | similar_names = { level = "allow", priority = 1 } 77 | single_match = { level = "allow", priority = 1 } 78 | single_match_else = { level = "allow", priority = 1 } 79 | struct_excessive_bools = { level = "allow", priority = 1 } 80 | struct_field_names = { level = "allow", priority = 1 } 81 | too_many_arguments = { level = "allow", priority = 1 } 82 | too_many_lines = { level = "allow", priority = 1 } 83 | type_complexity = { level = "allow", priority = 1 } 84 | unreadable_literal = { level = "allow", priority = 1 } 85 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Permission is hereby granted, free of charge, to any 2 | person obtaining a copy of this software and associated 3 | documentation files (the "Software"), to deal in the 4 | Software without restriction, including without 5 | limitation the rights to use, copy, modify, merge, 6 | publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software 8 | is furnished to do so, subject to the following 9 | conditions: 10 | 11 | The above copyright notice and this permission notice 12 | shall be included in all copies or substantial portions 13 | of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 17 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 18 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 19 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 22 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 23 | DEALINGS IN THE SOFTWARE. 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # negative-impl 2 | 3 | [![crates.io](https://img.shields.io/crates/v/negative-impl?style=flat-square&logo=rust)](https://crates.io/crates/negative-impl) 4 | [![docs.rs](https://img.shields.io/badge/docs.rs-negative--impl-blue?style=flat-square&logo=docs.rs)](https://docs.rs/negative-impl) 5 | [![license](https://img.shields.io/badge/license-Apache--2.0_OR_MIT-blue?style=flat-square)](#license) 6 | [![msrv](https://img.shields.io/badge/msrv-1.56-blue?style=flat-square&logo=rust)](https://www.rust-lang.org) 7 | [![github actions](https://img.shields.io/github/actions/workflow/status/taiki-e/negative-impl/ci.yml?branch=main&style=flat-square&logo=github)](https://github.com/taiki-e/negative-impl/actions) 8 | 9 | 10 | 11 | Negative trait implementations on stable Rust. 12 | 13 | This crate emulates the [unstable `negative_impls` feature](https://doc.rust-lang.org/nightly/unstable-book/language-features/negative-impls.html) 14 | by [generating a trait implementation with a condition that will never be true](https://github.com/taiki-e/negative-impl/issues/6#issuecomment-1669714453). 15 | 16 | ## Usage 17 | 18 | Add this to your `Cargo.toml`: 19 | 20 | ```toml 21 | [dependencies] 22 | negative-impl = "0.1" 23 | ``` 24 | 25 | ## Examples 26 | 27 | ```rust 28 | use negative_impl::negative_impl; 29 | 30 | pub struct Type {} 31 | 32 | #[negative_impl] 33 | impl !Send for Type {} 34 | #[negative_impl] 35 | impl !Sync for Type {} 36 | ``` 37 | 38 | ## Supported traits 39 | 40 | Currently this crate only supports [auto traits](https://doc.rust-lang.org/reference/special-types-and-traits.html#auto-traits). 41 | 42 | - [`Send`](https://doc.rust-lang.org/std/marker/trait.Send.html) 43 | - [`Sync`](https://doc.rust-lang.org/std/marker/trait.Sync.html) 44 | - [`Unpin`](https://doc.rust-lang.org/std/marker/trait.Unpin.html) 45 | - [`UnwindSafe`](https://doc.rust-lang.org/std/panic/trait.UnwindSafe.html) 46 | - [`RefUnwindSafe`](https://doc.rust-lang.org/std/panic/trait.RefUnwindSafe.html) 47 | 48 | ## Limitations 49 | 50 | ### Conflicting implementations 51 | 52 | The following code cannot compile due to `impl Trait for T` and 53 | `impl Trait for Type` conflict. 54 | 55 | 56 | ```rust 57 | use negative_impl::negative_impl; 58 | 59 | pub struct Type {} 60 | 61 | #[negative_impl] 62 | impl !Send for Type {} 63 | 64 | trait Trait {} 65 | 66 | impl Trait for T {} 67 | impl Trait for Type {} 68 | ``` 69 | 70 | ```text 71 | error[E0119]: conflicting implementations of trait `Trait` for type `Type`: 72 | --> src/lib.rs:60:1 73 | | 74 | 14 | impl Trait for T {} 75 | | ------------------------- first implementation here 76 | 15 | impl Trait for Type {} 77 | | ^^^^^^^^^^^^^^^^^^^ conflicting implementation for `Type` 78 | ``` 79 | 80 | The above code can be compiled using the unstable `negative_impls` feature. 81 | 82 | ```rust 83 | #![feature(negative_impls)] 84 | 85 | pub struct Type {} 86 | 87 | impl !Send for Type {} 88 | 89 | trait Trait {} 90 | 91 | impl Trait for T {} 92 | impl Trait for Type {} 93 | ``` 94 | 95 | 96 | 97 | ## License 98 | 99 | Licensed under either of [Apache License, Version 2.0](LICENSE-APACHE) or 100 | [MIT license](LICENSE-MIT) at your option. 101 | 102 | Unless you explicitly state otherwise, any contribution intentionally submitted 103 | for inclusion in the work by you, as defined in the Apache-2.0 license, shall 104 | be dual licensed as above, without any additional terms or conditions. 105 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | macro_rules! format_err { 4 | ($span:expr, $msg:expr $(,)?) => { 5 | syn::Error::new_spanned(&$span as &dyn quote::ToTokens, &$msg as &dyn std::fmt::Display) 6 | }; 7 | ($span:expr, $($tt:tt)*) => { 8 | format_err!($span, format!($($tt)*)) 9 | }; 10 | } 11 | 12 | macro_rules! bail { 13 | ($($tt:tt)*) => { 14 | return Err(format_err!($($tt)*)) 15 | }; 16 | } 17 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | /*! 4 | 6 | 7 | 8 | Negative trait implementations on stable Rust. 9 | 10 | This crate emulates the [unstable `negative_impls` feature](https://doc.rust-lang.org/nightly/unstable-book/language-features/negative-impls.html) 11 | by [generating a trait implementation with a condition that will never be true](https://github.com/taiki-e/negative-impl/issues/6#issuecomment-1669714453). 12 | 13 | ## Usage 14 | 15 | Add this to your `Cargo.toml`: 16 | 17 | ```toml 18 | [dependencies] 19 | negative-impl = "0.1" 20 | ``` 21 | 22 | ## Examples 23 | 24 | ``` 25 | use negative_impl::negative_impl; 26 | 27 | pub struct Type {} 28 | 29 | #[negative_impl] 30 | impl !Send for Type {} 31 | #[negative_impl] 32 | impl !Sync for Type {} 33 | ``` 34 | 35 | ## Supported traits 36 | 37 | Currently this crate only supports [auto traits](https://doc.rust-lang.org/reference/special-types-and-traits.html#auto-traits). 38 | 39 | - [`Send`](https://doc.rust-lang.org/std/marker/trait.Send.html) 40 | - [`Sync`](https://doc.rust-lang.org/std/marker/trait.Sync.html) 41 | - [`Unpin`](https://doc.rust-lang.org/std/marker/trait.Unpin.html) 42 | - [`UnwindSafe`](https://doc.rust-lang.org/std/panic/trait.UnwindSafe.html) 43 | - [`RefUnwindSafe`](https://doc.rust-lang.org/std/panic/trait.RefUnwindSafe.html) 44 | 45 | ## Limitations 46 | 47 | ### Conflicting implementations 48 | 49 | The following code cannot compile due to `impl Trait for T` and 50 | `impl Trait for Type` conflict. 51 | 52 | ```compile_fail,E0119 53 | use negative_impl::negative_impl; 54 | 55 | pub struct Type {} 56 | 57 | #[negative_impl] 58 | impl !Send for Type {} 59 | 60 | trait Trait {} 61 | 62 | impl Trait for T {} 63 | impl Trait for Type {} 64 | ``` 65 | 66 | ```text 67 | error[E0119]: conflicting implementations of trait `Trait` for type `Type`: 68 | --> src/lib.rs:60:1 69 | | 70 | 14 | impl Trait for T {} 71 | | ------------------------- first implementation here 72 | 15 | impl Trait for Type {} 73 | | ^^^^^^^^^^^^^^^^^^^ conflicting implementation for `Type` 74 | ``` 75 | 76 | The above code can be compiled using the unstable `negative_impls` feature. 77 | 78 | ``` 79 | #![feature(negative_impls)] 80 | 81 | pub struct Type {} 82 | 83 | impl !Send for Type {} 84 | 85 | trait Trait {} 86 | 87 | impl Trait for T {} 88 | impl Trait for Type {} 89 | ``` 90 | 91 | 92 | */ 93 | 94 | #![doc(test( 95 | no_crate_inject, 96 | attr( 97 | deny(warnings, rust_2018_idioms, single_use_lifetimes), 98 | allow(dead_code, unused_variables) 99 | ) 100 | ))] 101 | #![forbid(unsafe_code)] 102 | 103 | #[macro_use] 104 | mod error; 105 | 106 | use proc_macro::TokenStream; 107 | use proc_macro2::{Span, TokenStream as TokenStream2}; 108 | use quote::{format_ident, quote}; 109 | use syn::{ 110 | Error, Generics, ItemImpl, Lifetime, LifetimeParam, Path, Result, Token, Type, parse_quote, 111 | token, 112 | }; 113 | 114 | #[proc_macro_attribute] 115 | pub fn negative_impl(args: TokenStream, input: TokenStream) -> TokenStream { 116 | attribute(&args.into(), syn::parse_macro_input!(input)) 117 | .unwrap_or_else(Error::into_compile_error) 118 | .into() 119 | } 120 | 121 | fn attribute(args: &TokenStream2, mut impl_: ItemImpl) -> Result { 122 | parse_as_empty(args)?; 123 | 124 | let (not_token, trait_path, for_token) = match impl_.trait_.take() { 125 | Some((Some(not_token), path, for_token)) => (not_token, path, for_token), 126 | Some((_, path, _)) => bail!(path, "may only be used on negative trait impls"), 127 | None => bail!(impl_, "may only be used on negative trait impls"), 128 | }; 129 | // https://github.com/rust-lang/rust/issues/80481 130 | impl_.attrs.push(parse_quote!(#[doc(hidden)])); 131 | 132 | if impl_.unsafety.is_some() { 133 | bail!(quote!(#not_token #trait_path), "negative impls cannot be unsafe"); 134 | } 135 | if let Some(item) = impl_.items.first() { 136 | bail!(item, "negative impls cannot have any items"); 137 | } 138 | 139 | let TraitInfo { trivial_bounds, unsafety, maybe_unsized, full_path } = 140 | TraitInfo::new(&trait_path)?; 141 | 142 | let wrapper_lifetime = Lifetime::new("'__wrapper", Span::call_site()); 143 | let wrapper_ident = format_ident!("__Wrapper"); 144 | 145 | let trivial_bounds = parse_quote!( 146 | #wrapper_ident<#wrapper_lifetime, #trivial_bounds>: #full_path 147 | ); 148 | impl_.generics.make_where_clause().predicates.push(trivial_bounds); 149 | 150 | insert_lifetime(&mut impl_.generics, wrapper_lifetime); 151 | 152 | let unsafety = if unsafety { Some(::default()) } else { None }; 153 | 154 | let sized = if maybe_unsized { Some(quote!(: ?Sized)) } else { None }; 155 | let wrapper = quote! { 156 | pub struct #wrapper_ident<'a, T #sized>(::core::marker::PhantomData<&'a ()>, T); 157 | #unsafety impl #full_path for #wrapper_ident<'_, T> 158 | where T: #full_path {} 159 | }; 160 | 161 | impl_.trait_ = Some((None, full_path, for_token)); 162 | impl_.unsafety = unsafety; 163 | Ok(quote! { 164 | const _: () = { 165 | #wrapper 166 | // This is false positive as we generate a trait implementation with a condition that will never be true. 167 | #[allow(clippy::non_send_fields_in_send_ty)] 168 | #impl_ 169 | }; 170 | }) 171 | } 172 | 173 | struct TraitInfo { 174 | trivial_bounds: Type, 175 | unsafety: bool, 176 | maybe_unsized: bool, 177 | full_path: Path, 178 | } 179 | 180 | impl TraitInfo { 181 | fn new(path: &Path) -> Result { 182 | match &*path.segments.last().unwrap().ident.to_string() { 183 | "Send" => Ok(Self { 184 | // https://github.com/rust-lang/rust/blob/1.37.0/src/libcore/marker.rs#L41 185 | // https://github.com/rust-lang/rust/blob/1.84.0/library/core/src/marker.rs#L88 186 | trivial_bounds: parse_quote!(*const ()), 187 | unsafety: true, 188 | maybe_unsized: true, 189 | full_path: parse_quote!(::core::marker::Send), 190 | }), 191 | "Sync" => Ok(Self { 192 | // https://github.com/rust-lang/rust/blob/1.37.0/src/libcore/marker.rs#L380 193 | // https://github.com/rust-lang/rust/blob/1.84.0/library/core/src/marker.rs#L613 194 | trivial_bounds: parse_quote!(*const ()), 195 | unsafety: true, 196 | maybe_unsized: true, 197 | full_path: parse_quote!(::core::marker::Sync), 198 | }), 199 | "Unpin" => Ok(Self { 200 | // https://github.com/rust-lang/rust/blob/1.37.0/src/libcore/marker.rs#L650 201 | // https://github.com/rust-lang/rust/blob/1.84.0/library/core/src/marker.rs#L936 202 | trivial_bounds: parse_quote!(::core::marker::PhantomPinned), 203 | unsafety: false, 204 | maybe_unsized: true, 205 | full_path: parse_quote!(::core::marker::Unpin), 206 | }), 207 | "UnwindSafe" => Ok(Self { 208 | // https://github.com/rust-lang/rust/blob/1.37.0/src/libstd/panic.rs#L203 209 | // https://github.com/rust-lang/rust/blob/1.84.0/library/core/src/panic/unwind_safe.rs#L181 210 | trivial_bounds: parse_quote!(&'static mut ()), 211 | unsafety: false, 212 | maybe_unsized: true, 213 | full_path: parse_quote!(::core::panic::UnwindSafe), 214 | }), 215 | "RefUnwindSafe" => Ok(Self { 216 | // https://github.com/rust-lang/rust/blob/1.37.0/src/libstd/panic.rs#L234 217 | // https://github.com/rust-lang/rust/blob/1.84.0/library/core/src/panic/unwind_safe.rs#L200 218 | trivial_bounds: parse_quote!(::core::cell::UnsafeCell<()>), 219 | unsafety: false, 220 | maybe_unsized: true, 221 | full_path: parse_quote!(::core::panic::RefUnwindSafe), 222 | }), 223 | _ => bail!(path, "non auto traits are not supported"), 224 | } 225 | } 226 | } 227 | 228 | /// Inserts a `lifetime` at position `0` of `generics.params`. 229 | fn insert_lifetime(generics: &mut Generics, lifetime: Lifetime) { 230 | generics.lt_token.get_or_insert_with(token::Lt::default); 231 | generics.gt_token.get_or_insert_with(token::Gt::default); 232 | generics.params.insert(0, LifetimeParam::new(lifetime).into()); 233 | } 234 | 235 | /// Checks if `tokens` is an empty `TokenStream`. 236 | /// 237 | /// This is almost equivalent to `syn::parse2::()`, but produces 238 | /// a better error message and does not require ownership of `tokens`. 239 | fn parse_as_empty(tokens: &TokenStream2) -> Result<()> { 240 | if tokens.is_empty() { Ok(()) } else { bail!(tokens, "unexpected token: `{}`", tokens) } 241 | } 242 | -------------------------------------------------------------------------------- /tests/test.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | #![allow(dead_code)] 4 | 5 | use std::panic::{RefUnwindSafe, UnwindSafe}; 6 | 7 | use negative_impl::negative_impl; 8 | use static_assertions::assert_not_impl_all as assert_not_impl; 9 | 10 | mod basic { 11 | use super::*; 12 | 13 | struct Foo(T); 14 | 15 | #[negative_impl] 16 | impl !Send for Foo {} 17 | assert_not_impl!(Foo<()>: Send); 18 | 19 | #[negative_impl] 20 | impl !Sync for Foo {} 21 | assert_not_impl!(Foo<()>: Sync); 22 | 23 | #[negative_impl] 24 | impl !Unpin for Foo {} 25 | assert_not_impl!(Foo<()>: Unpin); 26 | 27 | #[negative_impl] 28 | impl !UnwindSafe for Foo {} 29 | assert_not_impl!(Foo<()>: UnwindSafe); 30 | 31 | #[negative_impl] 32 | impl !RefUnwindSafe for Foo {} 33 | assert_not_impl!(Foo<()>: RefUnwindSafe); 34 | } 35 | -------------------------------------------------------------------------------- /tools/.tidy-check-license-headers: -------------------------------------------------------------------------------- 1 | git ls-files 2 | -------------------------------------------------------------------------------- /tools/publish.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # SPDX-License-Identifier: Apache-2.0 OR MIT 3 | set -CeEuo pipefail 4 | IFS=$'\n\t' 5 | trap -- 's=$?; printf >&2 "%s\n" "${0##*/}:${LINENO}: \`${BASH_COMMAND}\` exit with ${s}"; exit ${s}' ERR 6 | cd -- "$(dirname -- "$0")"/.. 7 | 8 | # Publish a new release. 9 | # 10 | # USAGE: 11 | # ./tools/publish.sh 12 | # 13 | # Note: This script requires the following tools: 14 | # - parse-changelog 15 | 16 | retry() { 17 | for i in {1..10}; do 18 | if "$@"; then 19 | return 0 20 | else 21 | sleep "${i}" 22 | fi 23 | done 24 | "$@" 25 | } 26 | bail() { 27 | printf >&2 'error: %s\n' "$*" 28 | exit 1 29 | } 30 | 31 | version="${1:?}" 32 | version="${version#v}" 33 | tag_prefix="v" 34 | tag="${tag_prefix}${version}" 35 | changelog="CHANGELOG.md" 36 | if [[ ! "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[0-9A-Za-z\.-]+)?(\+[0-9A-Za-z\.-]+)?$ ]]; then 37 | bail "invalid version format '${version}'" 38 | fi 39 | if [[ $# -gt 1 ]]; then 40 | bail "invalid argument '$2'" 41 | fi 42 | if { sed --help 2>&1 || true; } | grep -Eq -e '-i extension'; then 43 | in_place=(-i '') 44 | else 45 | in_place=(-i) 46 | fi 47 | 48 | # Make sure there is no uncommitted change. 49 | git diff --exit-code 50 | git diff --exit-code --staged 51 | 52 | # Make sure the same release has not been created in the past. 53 | if gh release view "${tag}" &>/dev/null; then 54 | bail "tag '${tag}' has already been created and pushed" 55 | fi 56 | 57 | # Make sure that the release was created from an allowed branch. 58 | if ! git branch | grep -Eq '\* main$'; then 59 | bail "current branch is not 'main'" 60 | fi 61 | if ! git remote -v | grep -F origin | grep -Eq 'github\.com[:/]taiki-e/'; then 62 | bail "cannot publish a new release from fork repository" 63 | fi 64 | 65 | release_date=$(date -u '+%Y-%m-%d') 66 | tags=$(git --no-pager tag | { grep -E "^${tag_prefix}[0-9]+" || true; }) 67 | if [[ -n "${tags}" ]]; then 68 | # Make sure the same release does not exist in changelog. 69 | if grep -Eq "^## \\[${version//./\\.}\\]" "${changelog}"; then 70 | bail "release ${version} already exist in ${changelog}" 71 | fi 72 | if grep -Eq "^\\[${version//./\\.}\\]: " "${changelog}"; then 73 | bail "link to ${version} already exist in ${changelog}" 74 | fi 75 | # Update changelog. 76 | remote_url=$(grep -E '^\[Unreleased\]: https://' "${changelog}" | sed -E 's/^\[Unreleased\]: //; s/\.\.\.HEAD$//') 77 | prev_tag="${remote_url#*/compare/}" 78 | remote_url="${remote_url%/compare/*}" 79 | sed -E "${in_place[@]}" \ 80 | -e "s/^## \\[Unreleased\\]/## [Unreleased]\\n\\n## [${version}] - ${release_date}/" \ 81 | -e "s#^\[Unreleased\]: https://.*#[Unreleased]: ${remote_url}/compare/${tag}...HEAD\\n[${version}]: ${remote_url}/compare/${prev_tag}...${tag}#" "${changelog}" 82 | if ! grep -Eq "^## \\[${version//./\\.}\\] - ${release_date}$" "${changelog}"; then 83 | bail "failed to update ${changelog}" 84 | fi 85 | if ! grep -Eq "^\\[${version//./\\.}\\]: " "${changelog}"; then 86 | bail "failed to update ${changelog}" 87 | fi 88 | else 89 | # Make sure the release exists in changelog. 90 | if ! grep -Eq "^## \\[${version//./\\.}\\] - ${release_date}$" "${changelog}"; then 91 | bail "release ${version} does not exist in ${changelog} or has wrong release date" 92 | fi 93 | if ! grep -Eq "^\\[${version//./\\.}\\]: " "${changelog}"; then 94 | bail "link to ${version} does not exist in ${changelog}" 95 | fi 96 | fi 97 | 98 | # Make sure that a valid release note for this version exists. 99 | # https://github.com/taiki-e/parse-changelog 100 | changes=$(parse-changelog "${changelog}" "${version}") 101 | if [[ -z "${changes}" ]]; then 102 | bail "changelog for ${version} has no body" 103 | fi 104 | printf '============== CHANGELOG ==============\n' 105 | printf '%s\n' "${changes}" 106 | printf '=======================================\n' 107 | 108 | metadata=$(cargo metadata --format-version=1 --no-deps) 109 | prev_version='' 110 | docs=() 111 | for readme in $(git ls-files '*README.md'); do 112 | docs+=("${readme}") 113 | lib="$(dirname -- "${readme}")/src/lib.rs" 114 | if [[ -f "${lib}" ]]; then 115 | docs+=("${lib}") 116 | fi 117 | done 118 | changed_paths=("${changelog}" "${docs[@]}") 119 | # Publishing is unrestricted if null, and forbidden if an empty array. 120 | for pkg in $(jq -c '. as $metadata | .workspace_members[] as $id | $metadata.packages[] | select(.id == $id and .publish != [])' <<<"${metadata}"); do 121 | eval "$(jq -r '@sh "NAME=\(.name) ACTUAL_VERSION=\(.version) manifest_path=\(.manifest_path)"' <<<"${pkg}")" 122 | if [[ -z "${prev_version}" ]]; then 123 | prev_version="${ACTUAL_VERSION}" 124 | fi 125 | # Make sure that the version number of all publishable workspace members matches. 126 | if [[ "${ACTUAL_VERSION}" != "${prev_version}" ]]; then 127 | bail "publishable workspace members must be version '${prev_version}', but package '${NAME}' is version '${ACTUAL_VERSION}'" 128 | fi 129 | 130 | changed_paths+=("${manifest_path}") 131 | # Update version in Cargo.toml. 132 | if ! grep -Eq "^version = \"${prev_version}\" #publish:version" "${manifest_path}"; then 133 | bail "not found '#publish:version' in version in ${manifest_path}" 134 | fi 135 | sed -E "${in_place[@]}" "s/^version = \"${prev_version}\" #publish:version/version = \"${version}\" #publish:version/g" "${manifest_path}" 136 | # Update '=' requirement in Cargo.toml. 137 | for manifest in $(git ls-files '*Cargo.toml'); do 138 | if grep -Eq "^${NAME} = \\{ version = \"=${prev_version}\"" "${manifest}"; then 139 | sed -E "${in_place[@]}" "s/^${NAME} = \\{ version = \"=${prev_version}\"/${NAME} = { version = \"=${version}\"/g" "${manifest}" 140 | fi 141 | done 142 | # Update version in readme and lib.rs. 143 | for path in "${docs[@]}"; do 144 | # TODO: handle pre-release 145 | if [[ "${version}" == "0.0."* ]]; then 146 | # 0.0.x -> 0.0.y 147 | if grep -Eq "^${NAME} = \"${prev_version}\"" "${path}"; then 148 | sed -E "${in_place[@]}" "s/^${NAME} = \"${prev_version}\"/${NAME} = \"${version}\"/g" "${path}" 149 | fi 150 | if grep -Eq "^${NAME} = \\{ version = \"${prev_version}\"" "${path}"; then 151 | sed -E "${in_place[@]}" "s/^${NAME} = \\{ version = \"${prev_version}\"/${NAME} = { version = \"${version}\"/g" "${path}" 152 | fi 153 | elif [[ "${version}" == "0."* ]]; then 154 | prev_major_minor="${prev_version%.*}" 155 | major_minor="${version%.*}" 156 | if [[ "${prev_major_minor}" != "${major_minor}" ]]; then 157 | # 0.x -> 0.y 158 | # 0.x.* -> 0.y 159 | if grep -Eq "^${NAME} = \"${prev_major_minor}(\\.[0-9]+)?\"" "${path}"; then 160 | sed -E "${in_place[@]}" "s/^${NAME} = \"${prev_major_minor}(\\.[0-9]+)?\"/${NAME} = \"${major_minor}\"/g" "${path}" 161 | fi 162 | if grep -Eq "^${NAME} = \\{ version = \"${prev_major_minor}(\\.[0-9]+)?\"" "${path}"; then 163 | sed -E "${in_place[@]}" "s/^${NAME} = \\{ version = \"${prev_major_minor}(\\.[0-9]+)?\"/${NAME} = { version = \"${major_minor}\"/g" "${path}" 164 | fi 165 | fi 166 | else 167 | prev_major="${prev_version%%.*}" 168 | major="${version%%.*}" 169 | if [[ "${prev_major}" != "${major}" ]]; then 170 | # x -> y 171 | # x.* -> y 172 | # x.*.* -> y 173 | if grep -Eq "^${NAME} = \"${prev_major}(\\.[0-9]+(\\.[0-9]+)?)?\"" "${path}"; then 174 | sed -E "${in_place[@]}" "s/^${NAME} = \"${prev_major}(\\.[0-9]+(\\.[0-9]+)?)?\"/${NAME} = \"${major}\"/g" "${path}" 175 | fi 176 | if grep -Eq "^${NAME} = \\{ version = \"${prev_major}(\\.[0-9]+(\\.[0-9]+)?)?\"" "${path}"; then 177 | sed -E "${in_place[@]}" "s/^${NAME} = \\{ version = \"${prev_major}(\\.[0-9]+(\\.[0-9]+)?)?\"/${NAME} = { version = \"${major}\"/g" "${path}" 178 | fi 179 | fi 180 | fi 181 | done 182 | done 183 | 184 | if [[ -n "${tags}" ]]; then 185 | # Create a release commit. 186 | ( 187 | set -x 188 | git add "${changed_paths[@]}" 189 | git commit -m "Release ${version}" 190 | ) 191 | fi 192 | 193 | set -x 194 | 195 | git tag "${tag}" 196 | retry git push origin refs/heads/main 197 | retry git push origin refs/tags/"${tag}" 198 | -------------------------------------------------------------------------------- /tools/tidy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # SPDX-License-Identifier: Apache-2.0 OR MIT 3 | # shellcheck disable=SC2046 4 | set -CeEuo pipefail 5 | IFS=$'\n\t' 6 | trap -- 's=$?; printf >&2 "%s\n" "${0##*/}:${LINENO}: \`${BASH_COMMAND}\` exit with ${s}"; exit ${s}' ERR 7 | trap -- 'printf >&2 "%s\n" "${0##*/}: trapped SIGINT"; exit 1' SIGINT 8 | cd -- "$(dirname -- "$0")"/.. 9 | 10 | # USAGE: 11 | # ./tools/tidy.sh 12 | # 13 | # Note: This script requires the following tools: 14 | # - git 1.8+ 15 | # - jq 1.6+ 16 | # - npm (node 18+) 17 | # - python 3.6+ and pipx 18 | # - shfmt 19 | # - shellcheck 20 | # - cargo, rustfmt (if Rust code exists) 21 | # - clang-format (if C/C++/Protobuf code exists) 22 | # - parse-dockerfile (if Dockerfile exists) 23 | # 24 | # This script is shared by projects under github.com/taiki-e, so there may also 25 | # be checks for files not included in this repository, but they will be skipped 26 | # if the corresponding files do not exist. 27 | # It is not intended for manual editing. 28 | 29 | retry() { 30 | for i in {1..10}; do 31 | if "$@"; then 32 | return 0 33 | else 34 | sleep "${i}" 35 | fi 36 | done 37 | "$@" 38 | } 39 | error() { 40 | if [[ -n "${GITHUB_ACTIONS:-}" ]]; then 41 | printf '::error::%s\n' "$*" 42 | else 43 | printf >&2 'error: %s\n' "$*" 44 | fi 45 | should_fail=1 46 | } 47 | warn() { 48 | if [[ -n "${GITHUB_ACTIONS:-}" ]]; then 49 | printf '::warning::%s\n' "$*" 50 | else 51 | printf >&2 'warning: %s\n' "$*" 52 | fi 53 | } 54 | info() { 55 | printf >&2 'info: %s\n' "$*" 56 | } 57 | print_fenced() { 58 | printf '=======================================\n' 59 | printf '%s' "$*" 60 | printf '=======================================\n\n' 61 | } 62 | check_diff() { 63 | if [[ -n "${GITHUB_ACTIONS:-}" ]]; then 64 | if ! git -c color.ui=always --no-pager diff --exit-code "$@"; then 65 | should_fail=1 66 | fi 67 | elif [[ -n "${CI:-}" ]]; then 68 | if ! git --no-pager diff --exit-code "$@"; then 69 | should_fail=1 70 | fi 71 | else 72 | local res 73 | res=$(git --no-pager diff --exit-code --name-only "$@" || true) 74 | if [[ -n "${res}" ]]; then 75 | warn "please commit changes made by formatter/generator if exists on the following files" 76 | print_fenced "${res}"$'\n' 77 | should_fail=1 78 | fi 79 | fi 80 | } 81 | check_config() { 82 | if [[ ! -e "$1" ]]; then 83 | error "could not found $1 in the repository root${2:-}" 84 | fi 85 | } 86 | check_install() { 87 | for tool in "$@"; do 88 | if ! type -P "${tool}" >/dev/null; then 89 | if [[ "${tool}" == 'python3' ]]; then 90 | if type -P python >/dev/null; then 91 | continue 92 | fi 93 | fi 94 | error "'${tool}' is required to run this check" 95 | return 1 96 | fi 97 | done 98 | } 99 | check_unused() { 100 | local kind="$1" 101 | shift 102 | local res 103 | res=$(ls_files "$@") 104 | if [[ -n "${res}" ]]; then 105 | error "the following files are unused because there is no ${kind}; consider removing them" 106 | print_fenced "${res}"$'\n' 107 | fi 108 | } 109 | check_alt() { 110 | local recommended=$1 111 | local not_recommended=$2 112 | if [[ -n "$3" ]]; then 113 | error "please use ${recommended} instead of ${not_recommended} for consistency" 114 | print_fenced "$3"$'\n' 115 | fi 116 | } 117 | check_hidden() { 118 | local res 119 | for file in "$@"; do 120 | check_alt ".${file}" "${file}" "$(comm -23 <(ls_files "*${file}") <(ls_files "*.${file}"))" 121 | done 122 | } 123 | sed_rhs_escape() { 124 | sed 's/\\/\\\\/g; s/\&/\\\&/g; s/\//\\\//g' <<<"$1" 125 | } 126 | 127 | if [[ $# -gt 0 ]]; then 128 | cat </dev/null; then 137 | py_suffix=3 138 | fi 139 | yq() { 140 | pipx run yq "$@" 141 | } 142 | tomlq() { 143 | pipx run --spec yq tomlq "$@" 144 | } 145 | case "$(uname -s)" in 146 | Linux) 147 | if [[ "$(uname -o)" == 'Android' ]]; then 148 | ostype=android 149 | else 150 | ostype=linux 151 | fi 152 | ;; 153 | Darwin) ostype=macos ;; 154 | FreeBSD) ostype=freebsd ;; 155 | NetBSD) ostype=netbsd ;; 156 | OpenBSD) ostype=openbsd ;; 157 | DragonFly) ostype=dragonfly ;; 158 | SunOS) 159 | if [[ "$(/usr/bin/uname -o)" == 'illumos' ]]; then 160 | ostype=illumos 161 | else 162 | ostype=solaris 163 | # Solaris /usr/bin/* are not POSIX-compliant (e.g., grep has no -q, -E, -F), 164 | # and POSIX-compliant commands are in /usr/xpg{4,6,7}/bin. 165 | # https://docs.oracle.com/cd/E88353_01/html/E37853/xpg-7.html 166 | if [[ "${PATH}" != *'/usr/xpg4/bin'* ]]; then 167 | export PATH="/usr/xpg4/bin:${PATH}" 168 | fi 169 | # GNU/BSD grep/sed is required to run some checks, but most checks are okay with other POSIX grep/sed. 170 | # Solaris /usr/xpg4/bin/grep has -q, -E, -F, but no -o (non-POSIX). 171 | # Solaris /usr/xpg4/bin/sed has no -E (POSIX.1-2024) yet. 172 | for tool in sed grep; do 173 | if type -P "g${tool}" >/dev/null; then 174 | eval "${tool}() { g${tool} \"\$@\"; }" 175 | fi 176 | done 177 | fi 178 | ;; 179 | MINGW* | MSYS* | CYGWIN* | Windows_NT) 180 | ostype=windows 181 | if type -P jq >/dev/null; then 182 | # https://github.com/jqlang/jq/issues/1854 183 | _tmp=$(jq -r .a <<<'{}') 184 | if [[ "${_tmp}" != 'null' ]]; then 185 | _tmp=$(jq -b -r .a 2>/dev/null <<<'{}' || true) 186 | if [[ "${_tmp}" == 'null' ]]; then 187 | jq() { command jq -b "$@"; } 188 | else 189 | jq() { command jq "$@" | tr -d '\r'; } 190 | fi 191 | yq() { 192 | pipx run yq "$@" | tr -d '\r' 193 | } 194 | tomlq() { 195 | pipx run --spec yq tomlq "$@" | tr -d '\r' 196 | } 197 | fi 198 | fi 199 | ;; 200 | *) error "unrecognized os type '$(uname -s)' for \`\$(uname -s)\`" ;; 201 | esac 202 | 203 | check_install git 204 | exclude_from_ls_files=() 205 | # - `find` lists symlinks. `! ( -name -prune )` (.i.e., ignore ) are manually listed from .gitignore. 206 | # - `git submodule status` lists submodules. Use sed to remove the first character indicates status ( |+|-). 207 | # - `git ls-files --deleted` lists removed files. 208 | while IFS=$'\n' read -r line; do exclude_from_ls_files+=("${line}"); done < <({ 209 | find . \! \( -name .git -prune \) \! \( -name target -prune \) \! \( -name tmp -prune \) -type l | cut -c3- 210 | git submodule status | sed 's/^.//' | cut -d' ' -f2 211 | git ls-files --deleted 212 | } | LC_ALL=C sort -u) 213 | exclude_from_ls_files_no_symlink=() 214 | while IFS=$'\n' read -r line; do exclude_from_ls_files_no_symlink+=("${line}"); done < <({ 215 | git submodule status | sed 's/^.//' | cut -d' ' -f2 216 | git ls-files --deleted 217 | } | LC_ALL=C sort -u) 218 | ls_files() { 219 | if [[ "${1:-}" == '--include-symlink' ]]; then 220 | shift 221 | comm -23 <(git ls-files "$@" | LC_ALL=C sort) <(printf '%s\n' ${exclude_from_ls_files_no_symlink[@]+"${exclude_from_ls_files_no_symlink[@]}"}) 222 | else 223 | comm -23 <(git ls-files "$@" | LC_ALL=C sort) <(printf '%s\n' ${exclude_from_ls_files[@]+"${exclude_from_ls_files[@]}"}) 224 | fi 225 | } 226 | 227 | # Rust (if exists) 228 | if [[ -n "$(ls_files '*.rs')" ]]; then 229 | info "checking Rust code style" 230 | check_config .rustfmt.toml "; consider adding with reference to https://github.com/taiki-e/cargo-hack/blob/HEAD/.rustfmt.toml" 231 | check_config .clippy.toml "; consider adding with reference to https://github.com/taiki-e/cargo-hack/blob/HEAD/.clippy.toml" 232 | if check_install cargo jq python3 pipx; then 233 | # `cargo fmt` cannot recognize files not included in the current workspace and modules 234 | # defined inside macros, so run rustfmt directly. 235 | # We need to use nightly rustfmt because we use the unstable formatting options of rustfmt. 236 | rustc_version=$(rustc -vV | grep -E '^release:' | cut -d' ' -f2) 237 | if [[ "${rustc_version}" =~ nightly|dev ]] || ! type -P rustup >/dev/null; then 238 | if type -P rustup >/dev/null; then 239 | retry rustup component add rustfmt &>/dev/null 240 | fi 241 | info "running \`rustfmt \$(git ls-files '*.rs')\`" 242 | rustfmt $(ls_files '*.rs') 243 | else 244 | if type -P rustup >/dev/null; then 245 | retry rustup component add rustfmt --toolchain nightly &>/dev/null 246 | fi 247 | info "running \`rustfmt +nightly \$(git ls-files '*.rs')\`" 248 | rustfmt +nightly $(ls_files '*.rs') 249 | fi 250 | check_diff $(ls_files '*.rs') 251 | cast_without_turbofish=$(grep -Fn '.cast()' $(ls_files '*.rs') || true) 252 | if [[ -n "${cast_without_turbofish}" ]]; then 253 | error "please replace \`.cast()\` with \`.cast::()\`:" 254 | printf '%s\n' "${cast_without_turbofish}" 255 | fi 256 | # Make sure that public Rust crates don't contain executables and binaries. 257 | executables='' 258 | binaries='' 259 | metadata=$(cargo metadata --format-version=1 --no-deps) 260 | root_manifest='' 261 | if [[ -f Cargo.toml ]]; then 262 | root_manifest=$(cargo locate-project --message-format=plain --manifest-path Cargo.toml) 263 | fi 264 | exclude='' 265 | has_public_crate='' 266 | has_root_crate='' 267 | for pkg in $(jq -c '. as $metadata | .workspace_members[] as $id | $metadata.packages[] | select(.id == $id)' <<<"${metadata}"); do 268 | eval "$(jq -r '@sh "publish=\(.publish) manifest_path=\(.manifest_path)"' <<<"${pkg}")" 269 | if [[ "$(tomlq -c '.lints' "${manifest_path}")" == 'null' ]]; then 270 | error "no [lints] table in ${manifest_path} please add '[lints]' with 'workspace = true'" 271 | fi 272 | # Publishing is unrestricted if null, and forbidden if an empty array. 273 | if [[ -z "${publish}" ]]; then 274 | continue 275 | fi 276 | has_public_crate=1 277 | if [[ "${manifest_path}" == "${root_manifest}" ]]; then 278 | has_root_crate=1 279 | exclude=$(tomlq -r '.package.exclude[]' "${manifest_path}") 280 | if ! grep -Eq '^/\.\*$' <<<"${exclude}"; then 281 | error "top-level Cargo.toml of non-virtual workspace should have 'exclude' field with \"/.*\"" 282 | fi 283 | if [[ -e tools ]] && ! grep -Eq '^/tools$' <<<"${exclude}"; then 284 | error "top-level Cargo.toml of non-virtual workspace should have 'exclude' field with \"/tools\" if it exists" 285 | fi 286 | if [[ -e target-specs ]] && ! grep -Eq '^/target-specs$' <<<"${exclude}"; then 287 | error "top-level Cargo.toml of non-virtual workspace should have 'exclude' field with \"/target-specs\" if it exists" 288 | fi 289 | fi 290 | done 291 | if [[ -n "${has_public_crate}" ]]; then 292 | check_config .deny.toml "; consider adding with reference to https://github.com/taiki-e/cargo-hack/blob/HEAD/.deny.toml" 293 | info "checking public crates don't contain executables and binaries" 294 | for p in $(ls_files --include-symlink); do 295 | # Skip directories. 296 | if [[ -d "${p}" ]]; then 297 | continue 298 | fi 299 | # Top-level hidden files/directories and tools/* are excluded from crates.io (ensured by the above check). 300 | # TODO: fully respect exclude field in Cargo.toml. 301 | case "${p}" in 302 | .* | tools/* | target-specs/*) continue ;; 303 | */*) ;; 304 | *) 305 | # If there is no crate at root, executables at the repository root directory if always okay. 306 | if [[ -z "${has_root_crate}" ]]; then 307 | continue 308 | fi 309 | ;; 310 | esac 311 | if [[ -x "${p}" ]]; then 312 | executables+="${p}"$'\n' 313 | fi 314 | # Use `diff` instead of `file` because `file` treats an empty file as a binary. 315 | # https://unix.stackexchange.com/questions/275516/is-there-a-convenient-way-to-classify-files-as-binary-or-text#answer-402870 316 | if { diff .gitattributes "${p}" || true; } | grep -Eq '^Binary file'; then 317 | binaries+="${p}"$'\n' 318 | fi 319 | done 320 | if [[ -n "${executables}" ]]; then 321 | error "file-permissions-check failed: executables are only allowed to be present in directories that are excluded from crates.io" 322 | print_fenced "${executables}" 323 | fi 324 | if [[ -n "${binaries}" ]]; then 325 | error "file-permissions-check failed: binaries are only allowed to be present in directories that are excluded from crates.io" 326 | print_fenced "${binaries}" 327 | fi 328 | fi 329 | fi 330 | # Sync markdown to rustdoc. 331 | first=1 332 | for markdown in $(ls_files '*.md'); do 333 | markers=$(grep -En '^' "${markdown}" || true) 334 | # BSD wc's -l emits spaces before number. 335 | if [[ ! "$(LC_ALL=C wc -l <<<"${markers}")" =~ ^\ *2$ ]]; then 336 | if [[ -n "${markers}" ]]; then 337 | error "inconsistent '' marker found in ${markdown}" 338 | printf '%s\n' "${markers}" 339 | fi 340 | continue 341 | fi 342 | start_marker=$(head -n1 <<<"${markers}") 343 | end_marker=$(head -n2 <<<"${markers}" | tail -n1) 344 | if [[ "${start_marker}" == *"tidy:sync-markdown-to-rustdoc:end"* ]] || [[ "${end_marker}" == *"tidy:sync-markdown-to-rustdoc:start"* ]]; then 345 | error "inconsistent '' marker found in ${markdown}" 346 | printf '%s\n' "${markers}" 347 | continue 348 | fi 349 | if [[ -n "${first}" ]]; then 350 | first='' 351 | info "syncing markdown to rustdoc" 352 | fi 353 | lib="${start_marker#*:<\!-- tidy:sync-markdown-to-rustdoc:start:}" 354 | if [[ "${start_marker}" == "${lib}" ]]; then 355 | error "missing path in '' marker in ${markdown}" 356 | printf '%s\n' "${markers}" 357 | continue 358 | fi 359 | lib="${lib% -->}" 360 | lib="$(dirname -- "${markdown}")/${lib}" 361 | markers=$(grep -En '^' "${lib}" || true) 362 | # BSD wc's -l emits spaces before number. 363 | if [[ ! "$(LC_ALL=C wc -l <<<"${markers}")" =~ ^\ *2$ ]]; then 364 | if [[ -n "${markers}" ]]; then 365 | error "inconsistent '' marker found in ${lib}" 366 | printf '%s\n' "${markers}" 367 | else 368 | error "missing '' marker in ${lib}" 369 | fi 370 | continue 371 | fi 372 | start_marker=$(head -n1 <<<"${markers}") 373 | end_marker=$(head -n2 <<<"${markers}" | tail -n1) 374 | if [[ "${start_marker}" == *"tidy:sync-markdown-to-rustdoc:end"* ]] || [[ "${end_marker}" == *"tidy:sync-markdown-to-rustdoc:start"* ]]; then 375 | error "inconsistent '' marker found in ${lib}" 376 | printf '%s\n' "${markers}" 377 | continue 378 | fi 379 | new=''$'\a' 380 | empty_line_re='^ *$' 381 | gfm_alert_re='^> {0,4}\[!.*\] *$' 382 | rust_code_block_re='^ *```(rust|rs) *$' 383 | code_block_attr='' 384 | in_alert='' 385 | first_line=1 386 | ignore='' 387 | while IFS='' read -rd$'\a' line; do 388 | if [[ -n "${ignore}" ]]; then 389 | if [[ "${line}" == ''* ]]; then 390 | ignore='' 391 | fi 392 | continue 393 | fi 394 | if [[ -n "${first_line}" ]]; then 395 | # Ignore start marker. 396 | first_line='' 397 | continue 398 | elif [[ -n "${in_alert}" ]]; then 399 | if [[ "${line}" =~ ${empty_line_re} ]]; then 400 | in_alert='' 401 | new+=$'\a'""$'\a' 402 | fi 403 | elif [[ "${line}" =~ ${gfm_alert_re} ]]; then 404 | alert="${line#*[\!}" 405 | alert="${alert%%]*}" 406 | alert=$(tr '[:lower:]' '[:upper:]' <<<"${alert%%]*}") 407 | alert_lower=$(tr '[:upper:]' '[:lower:]' <<<"${alert}") 408 | case "${alert}" in 409 | NOTE | TIP | IMPORTANT) alert_sign='ⓘ' ;; 410 | WARNING | CAUTION) alert_sign='⚠' ;; 411 | *) 412 | error "unknown alert type '${alert}' found; please use one of the types listed in " 413 | new+="${line}"$'\a' 414 | continue 415 | ;; 416 | esac 417 | in_alert=1 418 | new+="
"$'\a\a' 419 | new+="> **${alert_sign} ${alert:0:1}${alert_lower:1}**"$'\a>\a' 420 | continue 421 | fi 422 | if [[ "${line}" =~ ${rust_code_block_re} ]]; then 423 | code_block_attr="${code_block_attr#<\!-- tidy:sync-markdown-to-rustdoc:code-block:}" 424 | code_block_attr="${code_block_attr%% -->*}" 425 | new+="${line/\`\`\`*/\`\`\`}${code_block_attr}"$'\a' 426 | code_block_attr='' 427 | continue 428 | fi 429 | if [[ -n "${code_block_attr}" ]]; then 430 | error "'${code_block_attr}' ignored because there is no subsequent Rust code block" 431 | code_block_attr='' 432 | fi 433 | if [[ "${line}" == ''* ]]; then 434 | code_block_attr="${line}" 435 | continue 436 | fi 437 | if [[ "${line}" == ''* ]]; then 438 | if [[ "${new}" == *$'\a\a' ]]; then 439 | new="${new%$'\a'}" 440 | fi 441 | ignore=1 442 | continue 443 | fi 444 | new+="${line}"$'\a' 445 | done < <(tr '\n' '\a' <"${markdown}" | grep -Eo '.*') 446 | new+='' 447 | new=$(tr '\n' '\a' <"${lib}" | sed "s/.*/$(sed_rhs_escape "${new}")/" | tr '\a' '\n') 448 | printf '%s\n' "${new}" >|"${lib}" 449 | check_diff "${lib}" 450 | done 451 | printf '\n' 452 | else 453 | check_unused "Rust code" '*.cargo*' '*clippy.toml' '*deny.toml' '*rustfmt.toml' '*Cargo.toml' '*Cargo.lock' 454 | fi 455 | check_hidden clippy.toml deny.toml rustfmt.toml 456 | 457 | # C/C++/Protobuf (if exists) 458 | clang_format_ext=('*.c' '*.h' '*.cpp' '*.hpp' '*.proto') 459 | if [[ -n "$(ls_files "${clang_format_ext[@]}")" ]]; then 460 | info "checking C/C++/Protobuf code style" 461 | check_config .clang-format 462 | if check_install clang-format; then 463 | IFS=' ' 464 | info "running \`clang-format -i \$(git ls-files ${clang_format_ext[*]})\`" 465 | IFS=$'\n\t' 466 | clang-format -i $(ls_files "${clang_format_ext[@]}") 467 | check_diff $(ls_files "${clang_format_ext[@]}") 468 | fi 469 | printf '\n' 470 | else 471 | check_unused "C/C++/Protobuf code" '*.clang-format*' 472 | fi 473 | check_alt '.clang-format' '_clang-format' "$(ls_files '*_clang-format')" 474 | # https://gcc.gnu.org/onlinedocs/gcc/Overall-Options.html 475 | check_alt '.cpp extension' 'other extensions' "$(ls_files '*.cc' '*.cp' '*.cxx' '*.C' '*.CPP' '*.c++')" 476 | check_alt '.hpp extension' 'other extensions' "$(ls_files '*.hh' '*.hp' '*.hxx' '*.H' '*.HPP' '*.h++')" 477 | 478 | # YAML/HTML/CSS/JavaScript/JSON (if exists) 479 | prettier_ext=('*.css' '*.html' '*.js' '*.json' '*.yml' '*.yaml') 480 | if [[ -n "$(ls_files "${prettier_ext[@]}")" ]]; then 481 | info "checking YAML/HTML/CSS/JavaScript/JSON code style" 482 | check_config .editorconfig 483 | if check_install npm; then 484 | IFS=' ' 485 | info "running \`npx -y prettier -l -w \$(git ls-files ${prettier_ext[*]})\`" 486 | IFS=$'\n\t' 487 | npx -y prettier -l -w $(ls_files "${prettier_ext[@]}") 488 | check_diff $(ls_files "${prettier_ext[@]}") 489 | fi 490 | printf '\n' 491 | else 492 | check_unused "YAML/HTML/CSS/JavaScript/JSON file" '*.prettierignore' 493 | fi 494 | # https://prettier.io/docs/en/configuration 495 | check_alt '.editorconfig' 'other configs' "$(ls_files '*.prettierrc*' '*prettier.config.*')" 496 | check_alt '.yml extension' '.yaml extension' "$(ls_files '*.yaml' | { grep -Fv '.markdownlint-cli2.yaml' || true; })" 497 | 498 | # TOML (if exists) 499 | if [[ -n "$(ls_files '*.toml' | { grep -Fv '.taplo.toml' || true; })" ]]; then 500 | info "checking TOML style" 501 | check_config .taplo.toml 502 | if check_install npm; then 503 | info "running \`npx -y @taplo/cli fmt \$(git ls-files '*.toml')\`" 504 | RUST_LOG=warn npx -y @taplo/cli fmt $(ls_files '*.toml') 505 | check_diff $(ls_files '*.toml') 506 | fi 507 | printf '\n' 508 | else 509 | check_unused "TOML file" '*taplo.toml' 510 | fi 511 | check_hidden taplo.toml 512 | 513 | # Markdown (if exists) 514 | if [[ -n "$(ls_files '*.md')" ]]; then 515 | info "checking markdown style" 516 | check_config .markdownlint-cli2.yaml 517 | if check_install npm; then 518 | info "running \`npx -y markdownlint-cli2 \$(git ls-files '*.md')\`" 519 | if ! npx -y markdownlint-cli2 $(ls_files '*.md'); then 520 | error "check failed; please resolve the above markdownlint error(s)" 521 | fi 522 | fi 523 | printf '\n' 524 | else 525 | check_unused "markdown file" '*.markdownlint-cli2.yaml' 526 | fi 527 | # https://github.com/DavidAnson/markdownlint-cli2#configuration 528 | check_alt '.markdownlint-cli2.yaml' 'other configs' "$(ls_files '*.markdownlint-cli2.jsonc' '*.markdownlint-cli2.cjs' '*.markdownlint-cli2.mjs' '*.markdownlint.*')" 529 | check_alt '.md extension' '*.markdown extension' "$(ls_files '*.markdown')" 530 | 531 | # Shell scripts 532 | info "checking shell scripts" 533 | shell_files=() 534 | docker_files=() 535 | bash_files=() 536 | grep_ere_files=() 537 | sed_ere_files=() 538 | for p in $(ls_files '*.sh' '*Dockerfile*'); do 539 | case "${p}" in 540 | tests/fixtures/* | */tests/fixtures/* | *.json) continue ;; 541 | esac 542 | case "${p##*/}" in 543 | *.sh) 544 | shell_files+=("${p}") 545 | re='^#!/.*bash' 546 | if [[ "$(head -1 "${p}")" =~ ${re} ]]; then 547 | bash_files+=("${p}") 548 | fi 549 | ;; 550 | *Dockerfile*) 551 | docker_files+=("${p}") 552 | bash_files+=("${p}") # TODO 553 | ;; 554 | esac 555 | if grep -Eq '(^|[^0-9A-Za-z\."'\''-])(grep) -[A-Za-z]*E[^\)]' "${p}"; then 556 | grep_ere_files+=("${p}") 557 | fi 558 | if grep -Eq '(^|[^0-9A-Za-z\."'\''-])(sed) -[A-Za-z]*E[^\)]' "${p}"; then 559 | sed_ere_files+=("${p}") 560 | fi 561 | done 562 | workflows=() 563 | actions=() 564 | if [[ -d .github/workflows ]]; then 565 | for p in .github/workflows/*.yml; do 566 | workflows+=("${p}") 567 | bash_files+=("${p}") # TODO 568 | done 569 | fi 570 | if [[ -n "$(ls_files '*action.yml')" ]]; then 571 | for p in $(ls_files '*action.yml'); do 572 | if [[ "${p##*/}" == 'action.yml' ]]; then 573 | actions+=("${p}") 574 | if ! grep -Fq 'shell: sh' "${p}"; then 575 | bash_files+=("${p}") 576 | fi 577 | fi 578 | done 579 | fi 580 | # correctness 581 | res=$({ grep -En '(\[\[ .* ]]|(^|[^\$])\(\(.*\)\))( +#| *$)' "${bash_files[@]}" || true; } | { grep -Ev '^[^ ]+: *(#|//)' || true; } | LC_ALL=C sort) 582 | if [[ -n "${res}" ]]; then 583 | error "bare [[ ]] and (( )) may not work as intended: see https://github.com/koalaman/shellcheck/issues/2360 for more" 584 | print_fenced "${res}"$'\n' 585 | fi 586 | # TODO: chmod|chown 587 | res=$({ grep -En '(^|[^0-9A-Za-z\."'\''-])(basename|cat|cd|cp|dirname|ln|ls|mkdir|mv|pushd|rm|rmdir|tee|touch|kill|trap)( +-[0-9A-Za-z]+)* +[^<>\|-]' "${bash_files[@]}" || true; } | { grep -Ev '^[^ ]+: *(#|//)' || true; } | LC_ALL=C sort) 588 | if [[ -n "${res}" ]]; then 589 | error "use \`--\` before path(s): see https://github.com/koalaman/shellcheck/issues/2707 / https://github.com/koalaman/shellcheck/issues/2612 / https://github.com/koalaman/shellcheck/issues/2305 / https://github.com/koalaman/shellcheck/issues/2157 / https://github.com/koalaman/shellcheck/issues/2121 / https://github.com/koalaman/shellcheck/issues/314 for more" 590 | print_fenced "${res}"$'\n' 591 | fi 592 | res=$({ grep -En '(^|[^0-9A-Za-z\."'\''-])(LINES|RANDOM|PWD)=' "${bash_files[@]}" || true; } | { grep -Ev '^[^ ]+: *(#|//)' || true; } | LC_ALL=C sort) 593 | if [[ -n "${res}" ]]; then 594 | error "do not modify these built-in bash variables: see https://github.com/koalaman/shellcheck/issues/2160 / https://github.com/koalaman/shellcheck/issues/2559 for more" 595 | print_fenced "${res}"$'\n' 596 | fi 597 | # perf 598 | res=$({ grep -En '(^|[^\\])\$\((cat) ' "${bash_files[@]}" || true; } | { grep -Ev '^[^ ]+: *(#|//)' || true; } | LC_ALL=C sort) 599 | if [[ -n "${res}" ]]; then 600 | error "use faster \`\$(' "${bash_files[@]}" || true; } | { grep -Ev '^[^ ]+: *(#|//)' || true; } | LC_ALL=C sort) 609 | if [[ -n "${res}" ]]; then 610 | error "\`type -P\` doesn't output to stderr; use \`>\` instead of \`&>\`" 611 | print_fenced "${res}"$'\n' 612 | fi 613 | # TODO: multi-line case 614 | res=$({ grep -En '(^|[^0-9A-Za-z\."'\''-])(echo|printf )[^;)]* \|[^\|]' "${bash_files[@]}" || true; } | { grep -Ev '^[^ ]+: *(#|//)' || true; } | LC_ALL=C sort) 615 | if [[ -n "${res}" ]]; then 616 | error "use faster \`<<<...\` instead of \`echo ... |\`/\`printf ... |\`: see https://github.com/koalaman/shellcheck/issues/2593 for more" 617 | print_fenced "${res}"$'\n' 618 | fi 619 | # style 620 | if [[ ${#grep_ere_files[@]} -gt 0 ]]; then 621 | # We intentionally do not check for occurrences in any other order (e.g., -iE, -i -E) here. 622 | # This enforces the style and makes it easier to search. 623 | res=$({ grep -En '(^|[^0-9A-Za-z\."'\''-])(grep) +([^-]|-[^EFP-]|--[^hv])' "${grep_ere_files[@]}" || true; } | { grep -Ev '^[^ ]+: *(#|//)' || true; } | LC_ALL=C sort) 624 | if [[ -n "${res}" ]]; then 625 | error "please always use ERE (grep -E) instead of BRE for code consistency within a file" 626 | print_fenced "${res}"$'\n' 627 | fi 628 | fi 629 | if [[ ${#sed_ere_files[@]} -gt 0 ]]; then 630 | res=$({ grep -En '(^|[^0-9A-Za-z\."'\''-])(sed) +([^-]|-[^E-]|--[^hv])' "${sed_ere_files[@]}" || true; } | { grep -Ev '^[^ ]+: *(#|//)' || true; } | LC_ALL=C sort) 631 | if [[ -n "${res}" ]]; then 632 | error "please always use ERE (sed -E) instead of BRE for code consistency within a file" 633 | print_fenced "${res}"$'\n' 634 | fi 635 | fi 636 | if check_install shfmt; then 637 | check_config .editorconfig 638 | info "running \`shfmt -w \$(git ls-files '*.sh')\`" 639 | if ! shfmt -w "${shell_files[@]}"; then 640 | error "check failed; please resolve the shfmt error(s)" 641 | fi 642 | check_diff "${shell_files[@]}" 643 | fi 644 | if [[ "${ostype}" == 'solaris' ]] && [[ -n "${CI:-}" ]] && ! type -P shellcheck >/dev/null; then 645 | warn "this check is skipped on Solaris due to no haskell/shellcheck in upstream package manager" 646 | elif check_install shellcheck; then 647 | check_config .shellcheckrc 648 | info "running \`shellcheck \$(git ls-files '*.sh')\`" 649 | if ! shellcheck "${shell_files[@]}"; then 650 | error "check failed; please resolve the above shellcheck error(s)" 651 | fi 652 | # Check scripts in dockerfile. 653 | if [[ ${#docker_files[@]} -gt 0 ]]; then 654 | # Exclude SC2096 due to the way the temporary script is created. 655 | shellcheck_exclude=SC2096 656 | info "running \`shellcheck --exclude ${shellcheck_exclude}\` for scripts in \`\$(git ls-files '*Dockerfile*')\`" 657 | if check_install jq python3 parse-dockerfile; then 658 | shellcheck_for_dockerfile() { 659 | local text=$1 660 | local shell=$2 661 | local display_path=$3 662 | if [[ "${text}" == 'null' ]]; then 663 | return 664 | fi 665 | text="#!${shell}"$'\n'"${text}" 666 | case "${ostype}" in 667 | windows) text=${text//$'\r'/} ;; # Parse error on git bash/msys2 bash. 668 | esac 669 | local color=auto 670 | if [[ -t 1 ]] || [[ -n "${GITHUB_ACTIONS:-}" ]]; then 671 | color=always 672 | fi 673 | # We don't use <(printf '%s\n' "${text}") here because: 674 | # Windows: failed to found fd created by <() ("/proc/*/fd/* (git bash/msys2 bash) /dev/fd/* (cygwin bash): openBinaryFile: does not exist (No such file or directory)" error) 675 | # DragonFly BSD: hang 676 | # Others: false negative 677 | trap -- 'rm -- ./tools/.tidy-tmp; printf >&2 "%s\n" "${0##*/}: trapped SIGINT"; exit 1' SIGINT 678 | printf '%s\n' "${text}" >|./tools/.tidy-tmp 679 | if ! shellcheck --color="${color}" --exclude "${shellcheck_exclude}" ./tools/.tidy-tmp | sed "s/\.\/tools\/\.tidy-tmp/$(sed_rhs_escape "${display_path}")/g"; then 680 | error "check failed; please resolve the above shellcheck error(s)" 681 | fi 682 | rm -- ./tools/.tidy-tmp 683 | trap -- 'printf >&2 "%s\n" "${0##*/}: trapped SIGINT"; exit 1' SIGINT 684 | } 685 | for dockerfile_path in ${docker_files[@]+"${docker_files[@]}"}; do 686 | dockerfile=$(parse-dockerfile "${dockerfile_path}") 687 | normal_shell='' 688 | for instruction in $(jq -c '.instructions[]' <<<"${dockerfile}"); do 689 | instruction_kind=$(jq -r '.kind' <<<"${instruction}") 690 | case "${instruction_kind}" in 691 | FROM) 692 | # https://docs.docker.com/reference/dockerfile/#from 693 | # > Each FROM instruction clears any state created by previous instructions. 694 | normal_shell='' 695 | continue 696 | ;; 697 | ADD | ARG | CMD | COPY | ENTRYPOINT | ENV | EXPOSE | HEALTHCHECK | LABEL) ;; 698 | # https://docs.docker.com/reference/build-checks/maintainer-deprecated/ 699 | MAINTAINER) error "MAINTAINER instruction is deprecated in favor of using label" ;; 700 | RUN) ;; 701 | SHELL) 702 | normal_shell='' 703 | for argument in $(jq -c '.arguments[]' <<<"${instruction}"); do 704 | value=$(jq -r '.value' <<<"${argument}") 705 | if [[ -z "${normal_shell}" ]]; then 706 | case "${value}" in 707 | cmd | cmd.exe | powershell | powershell.exe) 708 | # not unix shell 709 | normal_shell="${value}" 710 | break 711 | ;; 712 | esac 713 | else 714 | normal_shell+=' ' 715 | fi 716 | normal_shell+="${value}" 717 | done 718 | ;; 719 | STOPSIGNAL | USER | VOLUME | WORKDIR) ;; 720 | *) error "unknown instruction ${instruction_kind}" ;; 721 | esac 722 | arguments='' 723 | # only shell-form RUN/ENTRYPOINT/CMD is run in a shell 724 | case "${instruction_kind}" in 725 | RUN) 726 | if [[ "$(jq -r '.arguments.shell' <<<"${instruction}")" == 'null' ]]; then 727 | continue 728 | fi 729 | arguments=$(jq -r '.arguments.shell.value' <<<"${instruction}") 730 | if [[ -z "${arguments}" ]]; then 731 | if [[ "$(jq -r '.here_docs[0]' <<<"${instruction}")" == 'null' ]]; then 732 | error "empty RUN is useless (${dockerfile_path})" 733 | continue 734 | fi 735 | if [[ "$(jq -r '.here_docs[1]' <<<"${instruction}")" != 'null' ]]; then 736 | # TODO: 737 | error "multi here-docs without command is not yet supported (${dockerfile_path})" 738 | fi 739 | arguments=$(jq -r '.here_docs[0].value' <<<"${instruction}") 740 | if [[ "${arguments}" == '#!'* ]]; then 741 | # TODO: 742 | error "here-docs with shebang is not yet supported (${dockerfile_path})" 743 | continue 744 | fi 745 | else 746 | if [[ "$(jq -r '.here_docs[0]' <<<"${instruction}")" != 'null' ]]; then 747 | # TODO: 748 | error "sh/bash command with here-docs is not yet checked (${dockerfile_path})" 749 | fi 750 | fi 751 | ;; 752 | ENTRYPOINT | CMD) 753 | if [[ "$(jq -r '.arguments.shell' <<<"${instruction}")" == 'null' ]]; then 754 | continue 755 | fi 756 | arguments=$(jq -r '.arguments.shell.value' <<<"${instruction}") 757 | if [[ -z "${normal_shell}" ]] && [[ -n "${arguments}" ]]; then 758 | # https://docs.docker.com/reference/build-checks/json-args-recommended/ 759 | error "JSON arguments recommended for ENTRYPOINT/CMD to prevent unintended behavior related to OS signals" 760 | fi 761 | ;; 762 | HEALTHCHECK) 763 | if [[ "$(jq -r '.arguments.kind' <<<"${instruction}")" != "CMD" ]]; then 764 | continue 765 | fi 766 | if [[ "$(jq -r '.arguments.arguments.shell' <<<"${instruction}")" == 'null' ]]; then 767 | continue 768 | fi 769 | arguments=$(jq -r '.arguments.arguments.shell.value' <<<"${instruction}") 770 | ;; 771 | *) continue ;; 772 | esac 773 | case "${normal_shell}" in 774 | # not unix shell 775 | cmd | cmd.exe | powershell | powershell.exe) continue ;; 776 | # https://docs.docker.com/reference/dockerfile/#shell 777 | '') shell='/bin/sh -c' ;; 778 | *) shell="${normal_shell}" ;; 779 | esac 780 | shellcheck_for_dockerfile "${arguments}" "${shell}" "${dockerfile_path}" 781 | done 782 | done 783 | fi 784 | fi 785 | # Check scripts in YAML. 786 | if [[ ${#workflows[@]} -gt 0 ]] || [[ ${#actions[@]} -gt 0 ]]; then 787 | # Exclude SC2096 due to the way the temporary script is created. 788 | shellcheck_exclude=SC2086,SC2096,SC2129 789 | info "running \`shellcheck --exclude ${shellcheck_exclude}\` for scripts in .github/workflows/*.yml and **/action.yml" 790 | if check_install jq python3 pipx; then 791 | shellcheck_for_gha() { 792 | local text=$1 793 | local shell=$2 794 | local display_path=$3 795 | if [[ "${text}" == 'null' ]]; then 796 | return 797 | fi 798 | case "${shell}" in 799 | bash* | sh*) ;; 800 | *) return ;; 801 | esac 802 | text="#!/usr/bin/env ${shell%' {0}'}"$'\n'"${text}" 803 | # Use python because sed doesn't support .*?. 804 | text=$( 805 | "python${py_suffix}" - <&2 "%s\n" "${0##*/}: trapped SIGINT"; exit 1' SIGINT 823 | printf '%s\n' "${text}" >|./tools/.tidy-tmp 824 | if ! shellcheck --color="${color}" --exclude "${shellcheck_exclude}" ./tools/.tidy-tmp | sed "s/\.\/tools\/\.tidy-tmp/$(sed_rhs_escape "${display_path}")/g"; then 825 | error "check failed; please resolve the above shellcheck error(s)" 826 | fi 827 | rm -- ./tools/.tidy-tmp 828 | trap -- 'printf >&2 "%s\n" "${0##*/}: trapped SIGINT"; exit 1' SIGINT 829 | } 830 | for workflow_path in ${workflows[@]+"${workflows[@]}"}; do 831 | workflow=$(yq -c '.' "${workflow_path}") 832 | # The top-level permissions must be weak as they are referenced by all jobs. 833 | permissions=$(jq -c '.permissions' <<<"${workflow}") 834 | case "${permissions}" in 835 | '{"contents":"read"}' | '{"contents":"none"}') ;; 836 | null) error "${workflow_path}: top level permissions not found; it must be 'contents: read' or weaker permissions" ;; 837 | *) error "${workflow_path}: only 'contents: read' and weaker permissions are allowed at top level, but found '${permissions}'; if you want to use stronger permissions, please set job-level permissions" ;; 838 | esac 839 | default_shell=$(jq -r -c '.defaults.run.shell' <<<"${workflow}") 840 | # github's default is https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#defaultsrunshell 841 | re='^bash --noprofile --norc -CeEux?o pipefail \{0}$' 842 | if [[ ! "${default_shell}" =~ ${re} ]]; then 843 | error "${workflow_path}: defaults.run.shell should be 'bash --noprofile --norc -CeEuxo pipefail {0}' or 'bash --noprofile --norc -CeEuo pipefail {0}'" 844 | continue 845 | fi 846 | # .steps == null means the job is the caller of reusable workflow 847 | for job in $(jq -c '.jobs | to_entries[] | select(.value.steps)' <<<"${workflow}"); do 848 | name=$(jq -r '.key' <<<"${job}") 849 | job=$(jq -r '.value' <<<"${job}") 850 | n=0 851 | job_default_shell=$(jq -r '.defaults.run.shell' <<<"${job}") 852 | if [[ "${job_default_shell}" == 'null' ]]; then 853 | job_default_shell="${default_shell}" 854 | fi 855 | for step in $(jq -c '.steps[]' <<<"${job}"); do 856 | prepare='' 857 | eval "$(jq -r 'if .run then @sh "RUN=\(.run) shell=\(.shell)" else @sh "RUN=\(.with.run) prepare=\(.with.prepare) shell=\(.with.shell)" end' <<<"${step}")" 858 | if [[ "${RUN}" == 'null' ]]; then 859 | _=$((n++)) 860 | continue 861 | fi 862 | if [[ "${shell}" == 'null' ]]; then 863 | if [[ -z "${prepare}" ]]; then 864 | shell="${job_default_shell}" 865 | elif grep -Eq '^ *chsh +-s +[^ ]+/bash' <<<"${prepare}"; then 866 | shell='bash' 867 | else 868 | shell='sh' 869 | fi 870 | fi 871 | shellcheck_for_gha "${RUN}" "${shell}" "${workflow_path} ${name}.steps[${n}].run" 872 | shellcheck_for_gha "${prepare:-null}" 'sh' "${workflow_path} ${name}.steps[${n}].run" 873 | _=$((n++)) 874 | done 875 | done 876 | done 877 | for action_path in ${actions[@]+"${actions[@]}"}; do 878 | runs=$(yq -c '.runs' "${action_path}") 879 | if [[ "$(jq -r '.using' <<<"${runs}")" != "composite" ]]; then 880 | continue 881 | fi 882 | n=0 883 | for step in $(jq -c '.steps[]' <<<"${runs}"); do 884 | prepare='' 885 | eval "$(jq -r 'if .run then @sh "RUN=\(.run) shell=\(.shell)" else @sh "RUN=\(.with.run) prepare=\(.with.prepare) shell=\(.with.shell)" end' <<<"${step}")" 886 | if [[ "${RUN}" == 'null' ]]; then 887 | _=$((n++)) 888 | continue 889 | fi 890 | if [[ "${shell}" == 'null' ]]; then 891 | if [[ -z "${prepare}" ]]; then 892 | error "\`shell: ..\` is required" 893 | continue 894 | elif grep -Eq '^ *chsh +-s +[^ ]+/bash' <<<"${prepare}"; then 895 | shell='bash' 896 | else 897 | shell='sh' 898 | fi 899 | fi 900 | shellcheck_for_gha "${RUN}" "${shell}" "${action_path} steps[${n}].run" 901 | shellcheck_for_gha "${prepare:-null}" 'sh' "${action_path} steps[${n}].run" 902 | _=$((n++)) 903 | done 904 | done 905 | fi 906 | fi 907 | fi 908 | printf '\n' 909 | check_alt '.sh extension' '*.bash extension' "$(ls_files '*.bash')" 910 | 911 | # License check 912 | # TODO: This check is still experimental and does not track all files that should be tracked. 913 | if [[ -f tools/.tidy-check-license-headers ]]; then 914 | info "checking license headers (experimental)" 915 | failed_files='' 916 | for p in $(comm -12 <(eval $(|.cspell.json; printf >&2 "%s\n" "${0##*/}: trapped SIGINT"; exit 1' SIGINT 978 | printf '%s\n' "${config_new}" >|.cspell.json 979 | dependencies_words='' 980 | if [[ -n "${has_rust}" ]]; then 981 | dependencies_words=$(npx -y cspell stdin --no-progress --no-summary --words-only --unique <<<"${dependencies}" || true) 982 | fi 983 | all_words=$(ls_files | { grep -Fv "${project_dictionary}" || true; } | npx -y cspell --file-list stdin --no-progress --no-summary --words-only --unique || true) 984 | printf '%s\n' "${config_old}" >|.cspell.json 985 | trap -- 'printf >&2 "%s\n" "${0##*/}: trapped SIGINT"; exit 1' SIGINT 986 | cat >|.github/.cspell/rust-dependencies.txt <>.github/.cspell/rust-dependencies.txt <<<"${dependencies_words}"$'\n' 992 | fi 993 | if [[ -z "${CI:-}" ]]; then 994 | REMOVE_UNUSED_WORDS=1 995 | fi 996 | if [[ -z "${REMOVE_UNUSED_WORDS:-}" ]]; then 997 | check_diff .github/.cspell/rust-dependencies.txt 998 | fi 999 | if ! grep -Fq '.github/.cspell/rust-dependencies.txt linguist-generated' .gitattributes; then 1000 | error "you may want to mark .github/.cspell/rust-dependencies.txt linguist-generated" 1001 | fi 1002 | 1003 | info "running \`git ls-files | npx -y cspell --file-list stdin --no-progress --no-summary\`" 1004 | if ! ls_files | npx -y cspell --file-list stdin --no-progress --no-summary; then 1005 | error "spellcheck failed: please fix uses of below words or add to ${project_dictionary} if correct" 1006 | printf '=======================================\n' 1007 | { ls_files | npx -y cspell --file-list stdin --no-progress --no-summary --words-only || true; } | sed "s/'s$//g" | LC_ALL=C sort -f -u 1008 | printf '=======================================\n\n' 1009 | fi 1010 | 1011 | # Make sure the project-specific dictionary does not contain duplicated words. 1012 | for dictionary in .github/.cspell/*.txt; do 1013 | if [[ "${dictionary}" == "${project_dictionary}" ]]; then 1014 | continue 1015 | fi 1016 | case "${ostype}" in 1017 | # NetBSD uniq doesn't support -i flag. 1018 | netbsd) dup=$(sed '/^$/d; /^\/\//d' "${project_dictionary}" "${dictionary}" | LC_ALL=C sort -f | tr '[:upper:]' '[:lower:]' | LC_ALL=C uniq -d) ;; 1019 | *) dup=$(sed '/^$/d; /^\/\//d' "${project_dictionary}" "${dictionary}" | LC_ALL=C sort -f | LC_ALL=C uniq -d -i) ;; 1020 | esac 1021 | if [[ -n "${dup}" ]]; then 1022 | error "duplicated words in dictionaries; please remove the following words from ${project_dictionary}" 1023 | print_fenced "${dup}"$'\n' 1024 | fi 1025 | done 1026 | 1027 | # Make sure the project-specific dictionary does not contain unused words. 1028 | if [[ -n "${REMOVE_UNUSED_WORDS:-}" ]]; then 1029 | grep_args=() 1030 | for word in $(grep -Ev '^//' "${project_dictionary}" || true); do 1031 | if ! grep -Eqi "^${word}$" <<<"${all_words}"; then 1032 | grep_args+=(-e "^${word}$") 1033 | fi 1034 | done 1035 | if [[ ${#grep_args[@]} -gt 0 ]]; then 1036 | info "removing unused words from ${project_dictionary}" 1037 | res=$(grep -Ev "${grep_args[@]}" "${project_dictionary}" || true) 1038 | if [[ -n "${res}" ]]; then 1039 | printf '%s\n' "${res}" >|"${project_dictionary}" 1040 | else 1041 | printf '' >|"${project_dictionary}" 1042 | fi 1043 | fi 1044 | else 1045 | unused='' 1046 | for word in $(grep -Ev '^//' "${project_dictionary}" || true); do 1047 | if ! grep -Eqi "^${word}$" <<<"${all_words}"; then 1048 | unused+="${word}"$'\n' 1049 | fi 1050 | done 1051 | if [[ -n "${unused}" ]]; then 1052 | error "unused words in dictionaries; please remove the following words from ${project_dictionary} or run ${0##*/} locally" 1053 | print_fenced "${unused}" 1054 | fi 1055 | fi 1056 | fi 1057 | printf '\n' 1058 | fi 1059 | 1060 | if [[ -n "${should_fail:-}" ]]; then 1061 | exit 1 1062 | fi 1063 | --------------------------------------------------------------------------------