├── .clippy.toml ├── .cspell.json ├── .deny.toml ├── .editorconfig ├── .git-blame-ignore-revs ├── .gitattributes ├── .github ├── .cspell │ ├── project-dictionary.txt │ └── rust-dependencies.txt ├── dependabot.yml └── workflows │ ├── ci.yml │ └── release.yml ├── .gitignore ├── .markdownlint-cli2.yaml ├── .rustfmt.toml ├── .shellcheckrc ├── .taplo.toml ├── CHANGELOG.md ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── src ├── ast.rs ├── error.rs ├── iter.rs ├── lib.rs └── to_tokens.rs ├── tests ├── compiletest.rs ├── run-pass │ ├── const_trait_impl.rs │ ├── impl_trait_in_assoc_type.rs │ ├── min_specialization.rs │ └── specialization.rs ├── test.rs └── ui │ ├── invalid.rs │ ├── invalid.stderr │ ├── maybe.rs │ ├── maybe.stderr │ ├── visibility.rs │ └── visibility.stderr └── tools ├── .tidy-check-license-headers ├── publish.sh └── tidy.sh /.clippy.toml: -------------------------------------------------------------------------------- 1 | # Clippy configuration 2 | # https://doc.rust-lang.org/nightly/clippy/lint_configuration.html 3 | 4 | allow-private-module-inception = true 5 | avoid-breaking-exported-api = false 6 | disallowed-names = [] 7 | disallowed-macros = [ 8 | { path = "std::dbg", reason = "it is okay to use during development, but please do not include it in main branch" }, 9 | ] 10 | disallowed-methods = [ 11 | ] 12 | disallowed-types = [ 13 | ] 14 | -------------------------------------------------------------------------------- /.cspell.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2", 3 | "gitignoreRoot": ".", 4 | "useGitignore": true, 5 | "dictionaryDefinitions": [ 6 | { 7 | "name": "organization-dictionary", 8 | "path": "https://raw.githubusercontent.com/taiki-e/github-actions/HEAD/.github/.cspell/organization-dictionary.txt", 9 | "addWords": true 10 | }, 11 | { 12 | "name": "project-dictionary", 13 | "path": "./.github/.cspell/project-dictionary.txt", 14 | "addWords": true 15 | }, 16 | { 17 | "name": "rust-dependencies", 18 | "path": "./.github/.cspell/rust-dependencies.txt", 19 | "addWords": true 20 | } 21 | ], 22 | "dictionaries": [ 23 | "organization-dictionary", 24 | "project-dictionary", 25 | "rust-dependencies" 26 | ], 27 | "ignoreRegExpList": [ 28 | // Copyright notice 29 | "Copyright .*", 30 | "SPDX-(File|Snippet)CopyrightText: .*", 31 | // GHA actions/workflows 32 | "uses: .+@[\\w_.-]+", 33 | // GHA context (repo name, owner name, etc.) 34 | "github.[\\w_.-]+ (=|!)= '[^']+'", 35 | // GH username 36 | "( |\\[)@[\\w_-]+", 37 | // Git config username 38 | "git config( --[^ ]+)? user.name .*", 39 | // Username in TODO|FIXME comment 40 | "(TODO|FIXME)\\([\\w_., -]+\\)", 41 | // Cargo.toml authors 42 | "authors *= *\\[[^\\]]*\\]", 43 | "\"[^\"]* <[\\w_.+-]+@[\\w.-]+>\"" 44 | ], 45 | "languageSettings": [ 46 | { 47 | "languageId": ["*"], 48 | "dictionaries": ["bash", "cpp-refined", "rust"] 49 | } 50 | ], 51 | "ignorePaths": [] 52 | } 53 | -------------------------------------------------------------------------------- /.deny.toml: -------------------------------------------------------------------------------- 1 | # https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html 2 | [advisories] 3 | yanked = "deny" 4 | git-fetch-with-cli = true 5 | ignore = [ 6 | ] 7 | 8 | # https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html 9 | [bans] 10 | multiple-versions = "warn" 11 | wildcards = "deny" 12 | allow-wildcard-paths = true 13 | build.executables = "deny" 14 | build.interpreted = "deny" 15 | build.include-dependencies = true 16 | build.include-workspace = false # covered by tools/tidy.sh 17 | build.include-archives = true 18 | build.allow-build-scripts = [ 19 | ] 20 | build.bypass = [ 21 | ] 22 | 23 | # https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html 24 | [licenses] 25 | unused-allowed-license = "deny" 26 | private.ignore = true 27 | allow = [ 28 | "Apache-2.0", 29 | "MIT", 30 | ] 31 | 32 | # https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html 33 | [sources] 34 | unknown-registry = "deny" 35 | unknown-git = "deny" 36 | allow-git = [ 37 | ] 38 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig configuration 2 | # https://editorconfig.org 3 | 4 | root = true 5 | 6 | [*] 7 | charset = utf-8 8 | end_of_line = lf 9 | indent_size = 4 10 | indent_style = space 11 | insert_final_newline = true 12 | trim_trailing_whitespace = true 13 | 14 | [*.{css,html,json,md,rb,sh,yml,yaml}] 15 | indent_size = 2 16 | 17 | [*.{js,yml,yaml}] 18 | quote_type = single 19 | 20 | [*.sh] 21 | # https://google.github.io/styleguide/shellguide.html#s5.3-pipelines 22 | binary_next_line = true 23 | # https://google.github.io/styleguide/shellguide.html#s5.5-case-statement 24 | switch_case_indent = true 25 | -------------------------------------------------------------------------------- /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Change indent size of shell script files to match scripts in CI config 2 | 43c584a7ecad2ea18d790d848f85ad10f283e819 3 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto eol=lf 2 | tools/tidy.sh linguist-detectable=false 3 | .github/.cspell/rust-dependencies.txt linguist-generated 4 | -------------------------------------------------------------------------------- /.github/.cspell/project-dictionary.txt: -------------------------------------------------------------------------------- 1 | asyncness 2 | defaultness 3 | subtrait 4 | desugared 5 | forall 6 | -------------------------------------------------------------------------------- /.github/.cspell/rust-dependencies.txt: -------------------------------------------------------------------------------- 1 | // This file is @generated by tidy.sh. 2 | // It is not intended for manual editing. 3 | 4 | rustversion 5 | trybuild 6 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: cargo 4 | directory: / 5 | schedule: 6 | interval: daily 7 | commit-message: 8 | prefix: '' 9 | labels: [] 10 | - package-ecosystem: github-actions 11 | directory: / 12 | schedule: 13 | interval: daily 14 | commit-message: 15 | prefix: '' 16 | labels: [] 17 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | pull_request: 8 | push: 9 | branches: 10 | - main 11 | - dev 12 | schedule: 13 | - cron: '0 2 * * *' 14 | workflow_dispatch: 15 | 16 | env: 17 | CARGO_INCREMENTAL: 0 18 | CARGO_NET_GIT_FETCH_WITH_CLI: true 19 | CARGO_NET_RETRY: 10 20 | CARGO_TERM_COLOR: always 21 | RUST_BACKTRACE: 1 22 | RUSTDOCFLAGS: -D warnings 23 | RUSTFLAGS: -D warnings 24 | RUSTUP_MAX_RETRIES: 10 25 | 26 | defaults: 27 | run: 28 | shell: bash --noprofile --norc -CeEuxo pipefail {0} 29 | 30 | concurrency: 31 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} 32 | cancel-in-progress: true 33 | 34 | jobs: 35 | miri: 36 | uses: taiki-e/github-actions/.github/workflows/miri.yml@main 37 | msrv: 38 | uses: taiki-e/github-actions/.github/workflows/msrv.yml@main 39 | test: 40 | uses: taiki-e/github-actions/.github/workflows/test.yml@main 41 | tidy: 42 | uses: taiki-e/github-actions/.github/workflows/tidy.yml@main 43 | permissions: 44 | contents: read 45 | pull-requests: write # for gh pr edit --add-assignee 46 | repository-projects: read # for gh pr edit --add-assignee 47 | secrets: inherit 48 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | push: 8 | tags: 9 | - v[0-9]+.* 10 | 11 | defaults: 12 | run: 13 | shell: bash --noprofile --norc -CeEuxo pipefail {0} 14 | 15 | jobs: 16 | create-release: 17 | if: github.repository_owner == 'taiki-e' 18 | uses: taiki-e/github-actions/.github/workflows/create-release.yml@main 19 | permissions: 20 | contents: write 21 | secrets: inherit 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | Cargo.lock 3 | 4 | # For platform and editor specific settings, it is recommended to add to 5 | # a global .gitignore file. 6 | # Refs: https://docs.github.com/en/github/using-git/ignoring-files#configuring-ignored-files-for-all-repositories-on-your-computer 7 | -------------------------------------------------------------------------------- /.markdownlint-cli2.yaml: -------------------------------------------------------------------------------- 1 | # https://github.com/DavidAnson/markdownlint/blob/HEAD/doc/Rules.md 2 | config: 3 | line-length: false # MD013 4 | no-duplicate-heading: false # MD024 5 | no-blanks-blockquote: false # MD028 (this warns valid GFM alerts usage) 6 | no-inline-html: false # MD033 7 | no-emphasis-as-heading: false # MD036 8 | 9 | # https://github.com/DavidAnson/markdownlint-cli2#markdownlint-cli2jsonc 10 | noBanner: true 11 | noProgress: true 12 | -------------------------------------------------------------------------------- /.rustfmt.toml: -------------------------------------------------------------------------------- 1 | # Rustfmt configuration 2 | # https://github.com/rust-lang/rustfmt/blob/HEAD/Configurations.md 3 | 4 | # Rustfmt cannot format long lines inside macros, but this option detects this. 5 | # This is unstable (tracking issue: https://github.com/rust-lang/rustfmt/issues/3391) 6 | error_on_line_overflow = true 7 | 8 | # Override the default formatting style. 9 | # See https://internals.rust-lang.org/t/running-rustfmt-on-rust-lang-rust-and-other-rust-lang-repositories/8732/81. 10 | use_small_heuristics = "Max" 11 | # This is unstable (tracking issue: https://github.com/rust-lang/rustfmt/issues/3370) 12 | overflow_delimited_expr = true 13 | # This is unstable (tracking issue: https://github.com/rust-lang/rustfmt/issues/4991). 14 | imports_granularity = "Crate" 15 | # This is unstable (tracking issue: https://github.com/rust-lang/rustfmt/issues/5083). 16 | group_imports = "StdExternalCrate" 17 | 18 | # Apply rustfmt to more places. 19 | # This is unstable (tracking issue: https://github.com/rust-lang/rustfmt/issues/3348). 20 | format_code_in_doc_comments = true 21 | 22 | # Automatically fix deprecated style. 23 | use_field_init_shorthand = true 24 | use_try_shorthand = true 25 | 26 | # Set the default settings again to always apply the proper formatting without 27 | # being affected by the editor settings. 28 | edition = "2018" 29 | style_edition = "2024" 30 | hard_tabs = false 31 | newline_style = "Unix" 32 | tab_spaces = 4 33 | -------------------------------------------------------------------------------- /.shellcheckrc: -------------------------------------------------------------------------------- 1 | # ShellCheck configuration 2 | # https://github.com/koalaman/shellcheck/blob/HEAD/shellcheck.1.md#rc-files 3 | 4 | # See also: 5 | # https://github.com/koalaman/shellcheck/wiki/Optional 6 | # https://google.github.io/styleguide/shellguide.html 7 | 8 | # https://github.com/koalaman/shellcheck/wiki/SC2249 9 | # enable=add-default-case 10 | 11 | # https://github.com/koalaman/shellcheck/wiki/SC2244 12 | enable=avoid-nullary-conditions 13 | 14 | # https://github.com/koalaman/shellcheck/wiki/SC2312 15 | # enable=check-extra-masked-returns 16 | 17 | # https://github.com/koalaman/shellcheck/wiki/SC2310 18 | # https://github.com/koalaman/shellcheck/wiki/SC2311 19 | # enable=check-set-e-suppressed 20 | 21 | # enable=check-unassigned-uppercase 22 | 23 | # https://github.com/koalaman/shellcheck/wiki/SC2230 24 | enable=deprecate-which 25 | 26 | # https://github.com/koalaman/shellcheck/wiki/SC2248 27 | enable=quote-safe-variables 28 | 29 | # https://github.com/koalaman/shellcheck/wiki/SC2292 30 | # https://google.github.io/styleguide/shellguide.html#s6.3-tests 31 | enable=require-double-brackets 32 | 33 | # https://github.com/koalaman/shellcheck/wiki/SC2250 34 | # https://google.github.io/styleguide/shellguide.html#s5.6-variable-expansion 35 | enable=require-variable-braces 36 | -------------------------------------------------------------------------------- /.taplo.toml: -------------------------------------------------------------------------------- 1 | # Taplo configuration 2 | # https://taplo.tamasfe.dev/configuration/formatter-options.html 3 | 4 | [formatting] 5 | align_comments = false 6 | allowed_blank_lines = 1 7 | array_auto_collapse = false 8 | array_auto_expand = false 9 | indent_string = " " 10 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | This project adheres to [Semantic Versioning](https://semver.org). 6 | 7 | Releases may yanked if there is a security bug, a soundness bug, or a regression. 8 | 9 | 12 | 13 | ## [Unreleased] 14 | 15 | ## [1.0.2] - 2024-05-30 16 | 17 | - Fix error with arbitrary self types. ([#42](https://github.com/taiki-e/easy-ext/pull/42), thanks @mbazero) 18 | 19 | ## [1.0.1] - 2022-09-29 20 | 21 | - Fix "patterns aren't allowed in functions without bodies" error when patterns are used in arguments. 22 | 23 | ## [1.0.0] - 2021-08-24 24 | 25 | - Remove deprecated old impl-level visibility syntax (`#[ext(pub)]`). ([#38](https://github.com/taiki-e/easy-ext/pull/38)) 26 | 27 | Use `pub impl` syntax instead: 28 | 29 | ```diff 30 | - #[ext(pub)] 31 | - impl Type { 32 | + #[ext] 33 | + pub impl Type { 34 | fn method(&self) {} 35 | } 36 | ``` 37 | 38 | ## [0.2.9] - 2021-07-03 39 | 40 | - Fix bug in parsing of where clause. ([#37](https://github.com/taiki-e/easy-ext/pull/37)) 41 | 42 | ## [0.2.8] - 2021-06-23 43 | 44 | **Note:** This release has been yanked because of regression which fixed in 0.2.9. 45 | 46 | - Support specifying visibility directly on `impl`. ([#31](https://github.com/taiki-e/easy-ext/pull/31)) 47 | 48 | ```rust 49 | #[ext(Ext)] 50 | pub impl Type { 51 | fn method(&self) {} 52 | } 53 | ``` 54 | 55 | ```text 56 | pub impl Type { 57 | ^^^ 58 | ``` 59 | 60 | The old impl-level visibility syntax (`#[ext(pub)]`) will still be supported, but it is deprecated and will be removed in the next major version. 61 | 62 | Migration: 63 | 64 | ```diff 65 | - #[ext(pub)] 66 | - impl Type { 67 | + #[ext] 68 | + pub impl Type { 69 | fn method(&self) {} 70 | } 71 | ``` 72 | 73 | - Improve compile time by removing all dependencies. ([#35](https://github.com/taiki-e/easy-ext/pull/35)) 74 | 75 | - Support type parameter defaults. ([#32](https://github.com/taiki-e/easy-ext/pull/32)) 76 | 77 | ## [0.2.7] - 2021-03-25 78 | 79 | - Support associated types. ([#26](https://github.com/taiki-e/easy-ext/pull/26)) 80 | 81 | ## [0.2.6] - 2021-01-19 82 | 83 | - Support specifying visibility at impl-level. ([#25](https://github.com/taiki-e/easy-ext/pull/25)) 84 | 85 | ## [0.2.5] - 2021-01-05 86 | 87 | - Exclude unneeded files from crates.io. 88 | 89 | ## [0.2.4] - 2020-12-29 90 | 91 | - Documentation improvements. 92 | 93 | ## [0.2.3] - 2020-08-24 94 | 95 | - [Documentation (`#[doc]` attributes) is now generated only for trait definitions.](https://github.com/taiki-e/easy-ext/pull/23) Previously it generated for both trait definition and trait implementation. See [#20](https://github.com/taiki-e/easy-ext/issues/20) for more details. 96 | 97 | ## [0.2.2] - 2020-07-22 98 | 99 | - Fix `unused_attributes` lint in generated code. ([#22](https://github.com/taiki-e/easy-ext/pull/22)) 100 | 101 | - Diagnostic improvements. 102 | 103 | ## [0.2.1] - 2020-07-11 104 | 105 | - Documentation improvements. 106 | 107 | ## [0.2.0] - 2020-04-22 108 | 109 | - [`#[ext]` no longer adds type parameter, which is equivalent to `Self`, to the trait's generics.](https://github.com/taiki-e/easy-ext/pull/15) See [#11](https://github.com/taiki-e/easy-ext/issues/11) for more details. 110 | 111 | ## [0.1.8] - 2020-04-20 112 | 113 | - Documentation improvements. 114 | 115 | ## [0.1.7] - 2020-04-20 116 | 117 | - Supported unnamed extension trait. ([#9](https://github.com/taiki-e/easy-ext/pull/9)) 118 | 119 | ## [0.1.6] - 2019-10-12 120 | 121 | - Improved error messages related to visibility. ([#5](https://github.com/taiki-e/easy-ext/pull/5)) 122 | 123 | ## [0.1.5] - 2019-08-15 124 | 125 | - Updated `syn` and `quote` to 1.0. 126 | 127 | ## [0.1.4] - 2019-03-10 128 | 129 | - Updated minimum `syn` version to 0.15.29. 130 | 131 | ## [0.1.3] - 2019-02-21 132 | 133 | - Removed `inline` attributes on trait method side. It can avoid `clippy::inline_fn_without_body` lint by this. 134 | 135 | ## [0.1.2] - 2019-02-21 136 | 137 | - Used `#[allow(patterns_in_fns_without_body)]` to generated extension trait. 138 | 139 | - Fixed some bugs related to generics. 140 | 141 | ## [0.1.1] - 2019-02-21 142 | 143 | **Note:** This release has been yanked. 144 | 145 | - Fixed an error related to generics. 146 | 147 | ## [0.1.0] - 2019-02-20 148 | 149 | **Note:** This release has been yanked. 150 | 151 | Initial release 152 | 153 | [Unreleased]: https://github.com/taiki-e/easy-ext/compare/v1.0.2...HEAD 154 | [1.0.2]: https://github.com/taiki-e/easy-ext/compare/v1.0.1...v1.0.2 155 | [1.0.1]: https://github.com/taiki-e/easy-ext/compare/v1.0.0...v1.0.1 156 | [1.0.0]: https://github.com/taiki-e/easy-ext/compare/v0.2.9...v1.0.0 157 | [0.2.9]: https://github.com/taiki-e/easy-ext/compare/v0.2.8...v0.2.9 158 | [0.2.8]: https://github.com/taiki-e/easy-ext/compare/v0.2.7...v0.2.8 159 | [0.2.7]: https://github.com/taiki-e/easy-ext/compare/v0.2.6...v0.2.7 160 | [0.2.6]: https://github.com/taiki-e/easy-ext/compare/v0.2.5...v0.2.6 161 | [0.2.5]: https://github.com/taiki-e/easy-ext/compare/v0.2.4...v0.2.5 162 | [0.2.4]: https://github.com/taiki-e/easy-ext/compare/v0.2.3...v0.2.4 163 | [0.2.3]: https://github.com/taiki-e/easy-ext/compare/v0.2.2...v0.2.3 164 | [0.2.2]: https://github.com/taiki-e/easy-ext/compare/v0.2.1...v0.2.2 165 | [0.2.1]: https://github.com/taiki-e/easy-ext/compare/v0.2.0...v0.2.1 166 | [0.2.0]: https://github.com/taiki-e/easy-ext/compare/v0.1.8...v0.2.0 167 | [0.1.8]: https://github.com/taiki-e/easy-ext/compare/v0.1.7...v0.1.8 168 | [0.1.7]: https://github.com/taiki-e/easy-ext/compare/v0.1.6...v0.1.7 169 | [0.1.6]: https://github.com/taiki-e/easy-ext/compare/v0.1.5...v0.1.6 170 | [0.1.5]: https://github.com/taiki-e/easy-ext/compare/v0.1.4...v0.1.5 171 | [0.1.4]: https://github.com/taiki-e/easy-ext/compare/v0.1.3...v0.1.4 172 | [0.1.3]: https://github.com/taiki-e/easy-ext/compare/v0.1.2...v0.1.3 173 | [0.1.2]: https://github.com/taiki-e/easy-ext/compare/v0.1.1...v0.1.2 174 | [0.1.1]: https://github.com/taiki-e/easy-ext/compare/v0.1.0...v0.1.1 175 | [0.1.0]: https://github.com/taiki-e/easy-ext/releases/tag/v0.1.0 176 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "easy-ext" 3 | version = "1.0.2" #publish:version 4 | edition = "2018" 5 | rust-version = "1.31" 6 | license = "Apache-2.0 OR MIT" 7 | repository = "https://github.com/taiki-e/easy-ext" 8 | keywords = ["extension", "trait", "macros", "attribute"] 9 | categories = ["no-std", "no-std::no-alloc", "rust-patterns"] 10 | exclude = ["/.*", "/tools"] 11 | description = """ 12 | A lightweight attribute macro for easily writing extension trait pattern. 13 | """ 14 | 15 | [package.metadata.docs.rs] 16 | targets = ["x86_64-unknown-linux-gnu"] 17 | 18 | [lib] 19 | proc-macro = true 20 | 21 | [dev-dependencies] 22 | rustversion = "1" 23 | trybuild = { git = "https://github.com/taiki-e/trybuild.git", branch = "dev" } # adjust overwrite behavior 24 | 25 | [lints] 26 | workspace = true 27 | 28 | [workspace] 29 | 30 | # This table is shared by projects under github.com/taiki-e. 31 | # Expect for unexpected_cfgs.check-cfg, it is not intended for manual editing. 32 | [workspace.lints.rust] 33 | deprecated_safe = "warn" 34 | improper_ctypes = "warn" 35 | improper_ctypes_definitions = "warn" 36 | non_ascii_idents = "warn" 37 | rust_2018_idioms = "warn" 38 | single_use_lifetimes = "warn" 39 | unexpected_cfgs = { level = "warn", check-cfg = [ 40 | ] } 41 | unnameable_types = "warn" 42 | unreachable_pub = "warn" 43 | # unsafe_op_in_unsafe_fn = "warn" # Set at crate-level instead since https://github.com/rust-lang/rust/pull/100081 merged in Rust 1.65 is not available on MSRV 44 | [workspace.lints.clippy] 45 | all = "warn" # Downgrade deny-by-default lints 46 | pedantic = "warn" 47 | as_ptr_cast_mut = "warn" 48 | as_underscore = "warn" 49 | default_union_representation = "warn" 50 | inline_asm_x86_att_syntax = "warn" 51 | trailing_empty_array = "warn" 52 | transmute_undefined_repr = "warn" 53 | undocumented_unsafe_blocks = "warn" 54 | unused_trait_names = "warn" 55 | # Suppress buggy or noisy clippy lints 56 | bool_assert_comparison = { level = "allow", priority = 1 } 57 | borrow_as_ptr = { level = "allow", priority = 1 } # https://github.com/rust-lang/rust-clippy/issues/8286 58 | cast_lossless = { level = "allow", priority = 1 } # https://godbolt.org/z/Pv6vbGG6E 59 | declare_interior_mutable_const = { level = "allow", priority = 1 } # https://github.com/rust-lang/rust-clippy/issues/7665 60 | doc_markdown = { level = "allow", priority = 1 } 61 | float_cmp = { level = "allow", priority = 1 } # https://github.com/rust-lang/rust-clippy/issues/7725 62 | incompatible_msrv = { level = "allow", priority = 1 } # buggy: doesn't consider cfg, https://github.com/rust-lang/rust-clippy/issues/12280, https://github.com/rust-lang/rust-clippy/issues/12257#issuecomment-2093667187 63 | lint_groups_priority = { level = "allow", priority = 1 } # https://github.com/rust-lang/rust-clippy/issues/12920 64 | manual_assert = { level = "allow", priority = 1 } 65 | manual_range_contains = { level = "allow", priority = 1 } # https://github.com/rust-lang/rust-clippy/issues/6455#issuecomment-1225966395 66 | missing_errors_doc = { level = "allow", priority = 1 } 67 | module_name_repetitions = { level = "allow", priority = 1 } # buggy: https://github.com/rust-lang/rust-clippy/issues?q=is%3Aissue+is%3Aopen+module_name_repetitions 68 | naive_bytecount = { level = "allow", priority = 1 } 69 | nonminimal_bool = { level = "allow", priority = 1 } # buggy: https://github.com/rust-lang/rust-clippy/issues?q=is%3Aissue+is%3Aopen+nonminimal_bool 70 | range_plus_one = { level = "allow", priority = 1 } # buggy: https://github.com/rust-lang/rust-clippy/issues?q=is%3Aissue+is%3Aopen+range_plus_one 71 | similar_names = { level = "allow", priority = 1 } 72 | single_match = { level = "allow", priority = 1 } 73 | single_match_else = { level = "allow", priority = 1 } 74 | struct_excessive_bools = { level = "allow", priority = 1 } 75 | struct_field_names = { level = "allow", priority = 1 } 76 | too_many_arguments = { level = "allow", priority = 1 } 77 | too_many_lines = { level = "allow", priority = 1 } 78 | type_complexity = { level = "allow", priority = 1 } 79 | unreadable_literal = { level = "allow", priority = 1 } 80 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Permission is hereby granted, free of charge, to any 2 | person obtaining a copy of this software and associated 3 | documentation files (the "Software"), to deal in the 4 | Software without restriction, including without 5 | limitation the rights to use, copy, modify, merge, 6 | publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software 8 | is furnished to do so, subject to the following 9 | conditions: 10 | 11 | The above copyright notice and this permission notice 12 | shall be included in all copies or substantial portions 13 | of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 17 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 18 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 19 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 22 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 23 | DEALINGS IN THE SOFTWARE. 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # easy-ext 2 | 3 | [![crates.io](https://img.shields.io/crates/v/easy-ext?style=flat-square&logo=rust)](https://crates.io/crates/easy-ext) 4 | [![docs.rs](https://img.shields.io/badge/docs.rs-easy--ext-blue?style=flat-square&logo=docs.rs)](https://docs.rs/easy-ext) 5 | [![license](https://img.shields.io/badge/license-Apache--2.0_OR_MIT-blue?style=flat-square)](#license) 6 | [![msrv](https://img.shields.io/badge/msrv-1.31-blue?style=flat-square&logo=rust)](https://www.rust-lang.org) 7 | [![github actions](https://img.shields.io/github/actions/workflow/status/taiki-e/easy-ext/ci.yml?branch=main&style=flat-square&logo=github)](https://github.com/taiki-e/easy-ext/actions) 8 | 9 | 10 | 11 | A lightweight attribute macro for easily writing [extension trait pattern][rfc0445]. 12 | 13 | ```toml 14 | [dependencies] 15 | easy-ext = "1" 16 | ``` 17 | 18 | ## Examples 19 | 20 | ```rust 21 | use easy_ext::ext; 22 | 23 | #[ext(ResultExt)] 24 | pub impl Result { 25 | fn err_into(self) -> Result 26 | where 27 | E: Into, 28 | { 29 | self.map_err(Into::into) 30 | } 31 | } 32 | ``` 33 | 34 | Code like this will be generated: 35 | 36 | ```rust 37 | pub trait ResultExt { 38 | fn err_into(self) -> Result 39 | where 40 | E: Into; 41 | } 42 | 43 | impl ResultExt for Result { 44 | fn err_into(self) -> Result 45 | where 46 | E: Into, 47 | { 48 | self.map_err(Into::into) 49 | } 50 | } 51 | ``` 52 | 53 | You can elide the trait name. 54 | 55 | ```rust 56 | use easy_ext::ext; 57 | 58 | #[ext] 59 | impl Result { 60 | fn err_into(self) -> Result 61 | where 62 | E: Into, 63 | { 64 | self.map_err(Into::into) 65 | } 66 | } 67 | ``` 68 | 69 | Note that in this case, `#[ext]` assigns a random name, so you cannot 70 | import/export the generated trait. 71 | 72 | ### Visibility 73 | 74 | There are two ways to specify visibility. 75 | 76 | #### Impl-level visibility 77 | 78 | The first way is to specify visibility at the impl level. For example: 79 | 80 | ```rust 81 | use easy_ext::ext; 82 | 83 | // unnamed 84 | #[ext] 85 | pub impl str { 86 | fn foo(&self) {} 87 | } 88 | 89 | // named 90 | #[ext(StrExt)] 91 | pub impl str { 92 | fn bar(&self) {} 93 | } 94 | ``` 95 | 96 | #### Associated-item-level visibility 97 | 98 | Another way is to specify visibility at the associated item level. 99 | 100 | For example, if the method is `pub` then the trait will also be `pub`: 101 | 102 | ```rust 103 | use easy_ext::ext; 104 | 105 | #[ext(ResultExt)] // generate `pub trait ResultExt` 106 | impl Result { 107 | pub fn err_into(self) -> Result 108 | where 109 | E: Into, 110 | { 111 | self.map_err(Into::into) 112 | } 113 | } 114 | ``` 115 | 116 | This is useful when migrate from an inherent impl to an extension trait. 117 | 118 | Note that the visibility of all the associated items in the `impl` must be identical. 119 | 120 | Note that you cannot specify impl-level visibility and associated-item-level visibility at the same time. 121 | 122 | ### [Supertraits](https://doc.rust-lang.org/reference/items/traits.html#supertraits) 123 | 124 | If you want the extension trait to be a subtrait of another trait, 125 | add `Self: SubTrait` bound to the `where` clause. 126 | 127 | ```rust 128 | use easy_ext::ext; 129 | 130 | #[ext(Ext)] 131 | impl T 132 | where 133 | Self: Default, 134 | { 135 | fn method(&self) {} 136 | } 137 | ``` 138 | 139 | ### Supported items 140 | 141 | #### [Associated functions (methods)](https://doc.rust-lang.org/reference/items/associated-items.html#associated-functions-and-methods) 142 | 143 | ```rust 144 | use easy_ext::ext; 145 | 146 | #[ext] 147 | impl T { 148 | fn method(&self) {} 149 | } 150 | ``` 151 | 152 | #### [Associated constants](https://doc.rust-lang.org/reference/items/associated-items.html#associated-constants) 153 | 154 | ```rust 155 | use easy_ext::ext; 156 | 157 | #[ext] 158 | impl T { 159 | const MSG: &'static str = "Hello!"; 160 | } 161 | ``` 162 | 163 | #### [Associated types](https://doc.rust-lang.org/reference/items/associated-items.html#associated-types) 164 | 165 | ```rust 166 | use easy_ext::ext; 167 | 168 | #[ext] 169 | impl str { 170 | type Owned = String; 171 | 172 | fn method(&self) -> Self::Owned { 173 | self.to_owned() 174 | } 175 | } 176 | ``` 177 | 178 | [rfc0445]: https://rust-lang.github.io/rfcs/0445-extension-trait-conventions.html 179 | 180 | 181 | 182 | ## License 183 | 184 | Licensed under either of [Apache License, Version 2.0](LICENSE-APACHE) or 185 | [MIT license](LICENSE-MIT) at your option. 186 | 187 | Unless you explicitly state otherwise, any contribution intentionally submitted 188 | for inclusion in the work by you, as defined in the Apache-2.0 license, shall 189 | be dual licensed as above, without any additional terms or conditions. 190 | -------------------------------------------------------------------------------- /src/ast.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | // Based on: 4 | // - https://github.com/dtolnay/syn/blob/1.0.70/src/item.rs 5 | // - https://github.com/dtolnay/syn/blob/1.0.70/src/generics.rs 6 | 7 | use std::fmt; 8 | 9 | use proc_macro::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree}; 10 | 11 | #[derive(Clone)] 12 | pub(crate) struct Lifetime { 13 | pub(crate) apostrophe: Span, 14 | pub(crate) ident: Ident, 15 | } 16 | 17 | #[derive(Clone, Default)] 18 | pub(crate) struct Generics { 19 | pub(crate) lt_token: Option, 20 | pub(crate) params: Vec<(GenericParam, Option)>, 21 | pub(crate) gt_token: Option, 22 | pub(crate) where_clause: Option, 23 | } 24 | 25 | impl Generics { 26 | pub(crate) fn make_where_clause(&mut self) -> &mut WhereClause { 27 | self.where_clause.get_or_insert_with(|| WhereClause { 28 | where_token: Ident::new("where", Span::call_site()), 29 | predicates: vec![], 30 | }) 31 | } 32 | 33 | pub(crate) fn impl_generics(&self) -> ImplGenerics<'_> { 34 | ImplGenerics(self) 35 | } 36 | 37 | pub(crate) fn ty_generics(&self) -> TypeGenerics<'_> { 38 | TypeGenerics(self) 39 | } 40 | } 41 | 42 | #[derive(Clone)] 43 | pub(crate) enum GenericParam { 44 | /// A generic type parameter: `T: Into`. 45 | Type(TypeParam), 46 | /// A lifetime definition: `'a: 'b + 'c + 'd`. 47 | Lifetime(LifetimeDef), 48 | /// A const generic parameter: `const LENGTH: usize`. 49 | Const(ConstParam), 50 | } 51 | 52 | #[derive(Clone)] 53 | pub(crate) struct TypeParam { 54 | pub(crate) attrs: Vec, 55 | pub(crate) ident: Ident, 56 | pub(crate) colon_token: Option, 57 | pub(crate) bounds: Vec<(TypeParamBound, Option)>, 58 | pub(crate) eq_token: Option, 59 | pub(crate) default: Option, 60 | } 61 | 62 | #[derive(Clone)] 63 | pub(crate) struct TypeParamBound { 64 | pub(crate) tokens: TokenStream, 65 | pub(crate) is_maybe: bool, 66 | } 67 | 68 | impl TypeParamBound { 69 | fn new(tokens: Vec, is_maybe: bool) -> Self { 70 | Self { tokens: tokens.into_iter().collect(), is_maybe } 71 | } 72 | } 73 | 74 | #[derive(Clone)] 75 | pub(crate) struct LifetimeDef { 76 | pub(crate) attrs: Vec, 77 | pub(crate) lifetime: Lifetime, 78 | pub(crate) colon_token: Option, 79 | pub(crate) bounds: TokenStream, 80 | } 81 | 82 | #[derive(Clone)] 83 | pub(crate) struct BoundLifetimes { 84 | pub(crate) for_token: Ident, 85 | pub(crate) lt_token: Punct, 86 | pub(crate) lifetimes: Vec<(LifetimeDef, Option)>, 87 | pub(crate) gt_token: Punct, 88 | } 89 | 90 | #[derive(Clone)] 91 | pub(crate) struct ConstParam { 92 | pub(crate) attrs: Vec, 93 | pub(crate) const_token: Ident, 94 | pub(crate) ident: Ident, 95 | pub(crate) colon_token: Punct, 96 | pub(crate) ty: TokenStream, 97 | pub(crate) eq_token: Option, 98 | pub(crate) default: Option, 99 | } 100 | 101 | pub(crate) struct ImplGenerics<'a>(&'a Generics); 102 | pub(crate) struct TypeGenerics<'a>(&'a Generics); 103 | 104 | #[derive(Clone)] 105 | pub(crate) struct WhereClause { 106 | pub(crate) where_token: Ident, 107 | pub(crate) predicates: Vec<(WherePredicate, Option)>, 108 | } 109 | 110 | #[derive(Clone)] 111 | pub(crate) enum WherePredicate { 112 | Type(PredicateType), 113 | Lifetime(PredicateLifetime), 114 | } 115 | 116 | #[derive(Clone)] 117 | pub(crate) struct PredicateType { 118 | pub(crate) lifetimes: Option, 119 | pub(crate) bounded_ty: TokenStream, 120 | pub(crate) colon_token: Punct, 121 | pub(crate) bounds: Vec<(TypeParamBound, Option)>, 122 | } 123 | 124 | #[derive(Clone)] 125 | pub(crate) struct PredicateLifetime { 126 | pub(crate) lifetime: Lifetime, 127 | pub(crate) colon_token: Punct, 128 | pub(crate) bounds: Vec<(Lifetime, Option)>, 129 | } 130 | 131 | // Outer attribute 132 | #[derive(Clone)] 133 | pub(crate) struct Attribute { 134 | // `#` 135 | pub(crate) pound_token: Punct, 136 | // `[...]` 137 | pub(crate) tokens: Group, 138 | pub(crate) kind: AttributeKind, 139 | } 140 | 141 | #[derive(Clone, Copy, PartialEq)] 142 | pub(crate) enum AttributeKind { 143 | // #[doc ...] 144 | Doc, 145 | // #[inline ...] 146 | Inline, 147 | Other, 148 | } 149 | 150 | impl Attribute { 151 | pub(crate) fn new(tokens: Vec) -> Self { 152 | Self { 153 | pound_token: Punct::new('#', Spacing::Alone), 154 | tokens: Group::new(Delimiter::Bracket, tokens.into_iter().collect()), 155 | kind: AttributeKind::Other, 156 | } 157 | } 158 | } 159 | 160 | #[derive(Clone)] 161 | pub(crate) enum Visibility { 162 | // `pub`. 163 | Public(Ident), 164 | //`pub(self)`, `pub(super)`, `pub(crate)`, or `pub(in some::module)` 165 | Restricted(Ident, Group), 166 | Inherited, 167 | } 168 | 169 | impl Visibility { 170 | pub(crate) fn is_inherited(&self) -> bool { 171 | match self { 172 | Visibility::Inherited => true, 173 | _ => false, 174 | } 175 | } 176 | } 177 | 178 | impl PartialEq for Visibility { 179 | fn eq(&self, other: &Self) -> bool { 180 | match (self, other) { 181 | (Visibility::Public(_), Visibility::Public(_)) 182 | | (Visibility::Inherited, Visibility::Inherited) => true, 183 | (Visibility::Restricted(_, x), Visibility::Restricted(_, y)) => { 184 | x.stream().to_string() == y.stream().to_string() 185 | } 186 | _ => false, 187 | } 188 | } 189 | } 190 | 191 | impl fmt::Display for Visibility { 192 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 193 | match self { 194 | Visibility::Public(_) => f.write_str("pub"), 195 | Visibility::Inherited => Ok(()), 196 | Visibility::Restricted(_, g) => write!(f, "pub{}", g), 197 | } 198 | } 199 | } 200 | 201 | pub(crate) struct ItemImpl { 202 | pub(crate) attrs: Vec, 203 | pub(crate) vis: Visibility, 204 | pub(crate) defaultness: Option, 205 | pub(crate) unsafety: Option, 206 | pub(crate) impl_token: Ident, 207 | pub(crate) generics: Generics, 208 | pub(crate) const_token: Option, 209 | pub(crate) trait_: Option<(Ident, TokenStream, Ident)>, 210 | pub(crate) self_ty: Vec, 211 | pub(crate) brace_token: Span, 212 | pub(crate) items: Vec, 213 | } 214 | 215 | pub(crate) enum ImplItem { 216 | Const(ImplItemConst), 217 | Method(ImplItemMethod), 218 | Type(ImplItemType), 219 | } 220 | 221 | pub(crate) struct ImplItemConst { 222 | pub(crate) attrs: Vec, 223 | pub(crate) vis: Visibility, 224 | pub(crate) defaultness: Option, 225 | pub(crate) const_token: Ident, 226 | pub(crate) ident: Ident, 227 | pub(crate) colon_token: Punct, 228 | pub(crate) ty: TokenStream, 229 | pub(crate) eq_token: Punct, 230 | pub(crate) expr: Vec, 231 | pub(crate) semi_token: Punct, 232 | } 233 | 234 | pub(crate) struct ImplItemMethod { 235 | pub(crate) attrs: Vec, 236 | pub(crate) vis: Visibility, 237 | pub(crate) defaultness: Option, 238 | pub(crate) sig: Signature, 239 | pub(crate) body: Group, 240 | } 241 | 242 | pub(crate) struct ImplItemType { 243 | pub(crate) attrs: Vec, 244 | pub(crate) vis: Visibility, 245 | pub(crate) defaultness: Option, 246 | pub(crate) type_token: Ident, 247 | pub(crate) ident: Ident, 248 | pub(crate) generics: Generics, 249 | pub(crate) eq_token: Punct, 250 | pub(crate) ty: Vec, 251 | pub(crate) semi_token: Punct, 252 | } 253 | 254 | #[derive(Clone)] 255 | pub(crate) struct Signature { 256 | // [const] [async] [unsafe] [extern []] fn 257 | pub(crate) before_ident: Vec, 258 | pub(crate) ident: Ident, 259 | pub(crate) generics: Generics, 260 | pub(crate) paren_token: Span, 261 | pub(crate) inputs: Vec, 262 | pub(crate) output: Option, 263 | } 264 | 265 | #[derive(Clone)] 266 | pub(crate) enum FnArg { 267 | Receiver(TokenStream, Option), 268 | Typed(TokenStream, Punct, TokenStream, Option), 269 | } 270 | 271 | pub(crate) struct ItemTrait { 272 | pub(crate) attrs: Vec, 273 | pub(crate) vis: Visibility, 274 | pub(crate) unsafety: Option, 275 | pub(crate) trait_token: Ident, 276 | pub(crate) ident: Ident, 277 | pub(crate) generics: Generics, 278 | pub(crate) brace_token: Span, 279 | pub(crate) items: Vec, 280 | } 281 | 282 | pub(crate) enum TraitItem { 283 | Const(TraitItemConst), 284 | Method(TraitItemMethod), 285 | Type(TraitItemType), 286 | } 287 | 288 | pub(crate) struct TraitItemConst { 289 | pub(crate) attrs: Vec, 290 | pub(crate) const_token: Ident, 291 | pub(crate) ident: Ident, 292 | pub(crate) colon_token: Punct, 293 | pub(crate) ty: TokenStream, 294 | pub(crate) semi_token: Punct, 295 | } 296 | 297 | pub(crate) struct TraitItemMethod { 298 | pub(crate) attrs: Vec, 299 | pub(crate) sig: Signature, 300 | pub(crate) semi_token: Punct, 301 | } 302 | 303 | pub(crate) struct TraitItemType { 304 | pub(crate) attrs: Vec, 305 | pub(crate) type_token: Ident, 306 | pub(crate) ident: Ident, 307 | pub(crate) generics: Generics, 308 | pub(crate) semi_token: Punct, 309 | } 310 | 311 | pub(crate) mod parsing { 312 | use std::iter::FromIterator; 313 | 314 | use proc_macro::{Delimiter, Punct, Spacing, TokenStream, TokenTree}; 315 | 316 | use super::{ 317 | Attribute, AttributeKind, BoundLifetimes, ConstParam, FnArg, GenericParam, Generics, 318 | ImplItem, ImplItemConst, ImplItemMethod, ImplItemType, ItemImpl, Lifetime, LifetimeDef, 319 | PredicateLifetime, PredicateType, Signature, TypeParam, TypeParamBound, Visibility, 320 | WhereClause, WherePredicate, 321 | }; 322 | use crate::{error::Result, iter::TokenIter, to_tokens::ToTokens}; 323 | 324 | fn parse_until_punct(input: &mut TokenIter, ch: char) -> Result<(Vec, Punct)> { 325 | let mut buf = vec![]; 326 | loop { 327 | let tt = input.next(); 328 | match tt { 329 | Some(TokenTree::Punct(ref p)) 330 | if p.as_char() == ch && p.spacing() == Spacing::Alone => 331 | { 332 | if let Some(TokenTree::Punct(p)) = tt { 333 | return Ok((buf, p)); 334 | } 335 | unreachable!(); 336 | } 337 | None => { 338 | // TODO: pass scope span 339 | bail!(TokenStream::new(), "expected `{}`", ch); 340 | } 341 | Some(tt) => buf.push(tt), 342 | } 343 | } 344 | } 345 | 346 | fn append_tokens_until( 347 | input: &mut TokenIter, 348 | buf: &mut Vec, 349 | visit_first_angle_bracket: bool, 350 | f: fn(Option<&TokenTree>) -> bool, 351 | ) -> Result<()> { 352 | let mut angle_bracket: i32 = 0 - visit_first_angle_bracket as i32; 353 | loop { 354 | let tt = input.peek(); 355 | match tt { 356 | Some(TokenTree::Punct(p)) if p.as_char() == '<' => { 357 | angle_bracket += 1; 358 | } 359 | Some(TokenTree::Punct(p)) if p.as_char() == '>' => { 360 | match buf.last() { 361 | Some(TokenTree::Punct(p)) 362 | if p.as_char() == '-' && p.spacing() == Spacing::Joint => 363 | { 364 | // `->` 365 | // It's so confusing with `>`, so do not visit it. 366 | buf.push(input.next().unwrap()); 367 | continue; 368 | } 369 | _ => {} 370 | } 371 | angle_bracket -= 1; 372 | if angle_bracket >= 0 { 373 | buf.push(input.next().unwrap()); 374 | continue; 375 | } 376 | } 377 | Some(_) | None => {} 378 | } 379 | if angle_bracket <= 0 && f(tt) { 380 | return Ok(()); 381 | } 382 | buf.push(input.next().ok_or_else(|| { 383 | // TODO: pass scope span 384 | format_err!(TokenStream::new(), "unexpected end of input") 385 | })?); 386 | } 387 | } 388 | 389 | fn parse_attrs(input: &mut TokenIter) -> Result> { 390 | let mut attrs = vec![]; 391 | while input.peek_t(&'#') { 392 | let pound_token = input.parse_punct('#')?; 393 | let tokens = input.parse_group(Delimiter::Bracket)?; 394 | let mut kind = AttributeKind::Other; 395 | let mut iter = TokenIter::new(tokens.stream()); 396 | if let Some(TokenTree::Ident(i)) = iter.next() { 397 | match iter.next() { 398 | // ignore #[path ...] 399 | Some(TokenTree::Punct(ref p)) if p.as_char() == ':' => {} 400 | _ => match &*i.to_string() { 401 | "doc" => kind = AttributeKind::Doc, 402 | "inline" => kind = AttributeKind::Inline, 403 | _ => {} 404 | }, 405 | } 406 | } 407 | 408 | let attr = Attribute { pound_token, tokens, kind }; 409 | attrs.push(attr); 410 | } 411 | Ok(attrs) 412 | } 413 | 414 | fn parse_generics(input: &mut TokenIter) -> Result { 415 | if !input.peek_t(&'<') { 416 | return Ok(Generics::default()); 417 | } 418 | 419 | let lt_token = input.parse_punct('<')?; 420 | 421 | let mut params = vec![]; 422 | loop { 423 | if input.peek_t(&'>') { 424 | break; 425 | } 426 | 427 | let attrs = parse_attrs(input)?; 428 | let value = if input.peek_lifetime() { 429 | GenericParam::Lifetime(LifetimeDef { attrs, ..parse_lifetime_def(input)? }) 430 | } else if input.peek_t(&"const") { 431 | GenericParam::Const(ConstParam { attrs, ..parse_const_param(input)? }) 432 | } else if input.peek_t(&"_") { 433 | GenericParam::Type(TypeParam { 434 | attrs, 435 | ident: input.parse_ident()?, 436 | colon_token: None, 437 | bounds: vec![], 438 | eq_token: None, 439 | default: None, 440 | }) 441 | } else if input.peek_ident().is_some() { 442 | GenericParam::Type(TypeParam { attrs, ..parse_type_param(input)? }) 443 | } else { 444 | bail!(input.next(), "expected one of: lifetime, identifier, `const`, `_`"); 445 | }; 446 | 447 | if input.peek_t(&'>') { 448 | params.push((value, None)); 449 | break; 450 | } 451 | let punct = input.parse_punct(',')?; 452 | params.push((value, Some(punct))); 453 | } 454 | 455 | let gt_token = input.parse_punct('>')?; 456 | 457 | Ok(Generics { 458 | lt_token: Some(lt_token), 459 | params, 460 | gt_token: Some(gt_token), 461 | where_clause: None, 462 | }) 463 | } 464 | 465 | fn parse_lifetime(input: &mut TokenIter) -> Result { 466 | let tt = input.next(); 467 | match &tt { 468 | Some(TokenTree::Punct(p)) if p.as_char() == '\'' && p.spacing() == Spacing::Joint => { 469 | match input.next() { 470 | Some(TokenTree::Ident(ident)) => Ok(Lifetime { apostrophe: p.span(), ident }), 471 | Some(tt2) => { 472 | bail!(TokenStream::from_iter(vec![tt.unwrap(), tt2]), "expected lifetime") 473 | } 474 | None => bail!(p, "expected lifetime"), 475 | } 476 | } 477 | // TODO: pass scope span if tt is None 478 | tt => bail!(tt, "expected lifetime"), 479 | } 480 | } 481 | 482 | fn parse_lifetime_def(input: &mut TokenIter) -> Result { 483 | let attrs = parse_attrs(input)?; 484 | let lifetime = parse_lifetime(input)?; 485 | let colon_token = input.parse_punct_opt(':'); 486 | 487 | let mut bounds = TokenStream::new(); 488 | if colon_token.is_some() { 489 | loop { 490 | if input.peek_t(&',') || input.peek_t(&'>') { 491 | break; 492 | } 493 | let value = parse_lifetime(input)?; 494 | value.to_tokens(&mut bounds); 495 | if !input.peek_t(&'+') { 496 | break; 497 | } 498 | let punct = input.parse_punct('+')?; 499 | punct.to_tokens(&mut bounds); 500 | } 501 | } 502 | 503 | Ok(LifetimeDef { attrs, lifetime, colon_token, bounds }) 504 | } 505 | 506 | fn parse_bound_lifetimes(input: &mut TokenIter) -> Result { 507 | Ok(BoundLifetimes { 508 | for_token: input.parse_kw("for")?, 509 | lt_token: input.parse_punct('<')?, 510 | lifetimes: { 511 | let mut lifetimes = vec![]; 512 | while !input.peek_t(&'>') { 513 | let lifetime = parse_lifetime_def(input)?; 514 | if input.peek_t(&'>') { 515 | lifetimes.push((lifetime, None)); 516 | break; 517 | } 518 | let punct = input.parse_punct(',')?; 519 | lifetimes.push((lifetime, Some(punct))); 520 | } 521 | lifetimes 522 | }, 523 | gt_token: input.parse_punct('>')?, 524 | }) 525 | } 526 | 527 | fn parse_type_param(input: &mut TokenIter) -> Result { 528 | let attrs = parse_attrs(input)?; 529 | let ident = input.parse_ident()?; 530 | let colon_token = input.parse_punct_opt(':'); 531 | 532 | let mut bounds = vec![]; 533 | if colon_token.is_some() { 534 | loop { 535 | if input.peek_t(&',') || input.peek_t(&'>') || input.peek_t(&'=') { 536 | break; 537 | } 538 | 539 | let is_maybe = input.peek_t(&'?') && !input.peek2_t(&"const"); 540 | let mut value = vec![]; 541 | append_tokens_until(input, &mut value, false, |next| match next { 542 | Some(TokenTree::Punct(p)) 543 | if p.as_char() == ',' 544 | || p.as_char() == '>' 545 | || p.as_char() == '=' 546 | || p.as_char() == '+' => 547 | { 548 | true 549 | } 550 | None => true, 551 | _ => false, 552 | })?; 553 | if !input.peek_t(&'+') { 554 | bounds.push((TypeParamBound::new(value, is_maybe), None)); 555 | break; 556 | } 557 | let punct = input.parse_punct('+')?; 558 | bounds.push((TypeParamBound::new(value, is_maybe), Some(punct))); 559 | } 560 | } 561 | 562 | let mut default = None; 563 | let eq_token = input.parse_punct_opt('='); 564 | if eq_token.is_some() { 565 | default = Some({ 566 | let mut ty = vec![]; 567 | append_tokens_until(input, &mut ty, false, |next| match next { 568 | Some(TokenTree::Punct(p)) if p.as_char() == '>' || p.as_char() == ',' => true, 569 | None => true, 570 | _ => false, 571 | })?; 572 | ty.into_iter().collect() 573 | }); 574 | } 575 | 576 | Ok(TypeParam { attrs, ident, colon_token, bounds, eq_token, default }) 577 | } 578 | 579 | fn const_argument(input: &mut TokenIter) -> Result { 580 | let tt = input.next(); 581 | match &tt { 582 | Some(TokenTree::Literal(_)) | Some(TokenTree::Ident(_)) => Ok(tt.unwrap()), 583 | Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Brace => Ok(tt.unwrap()), 584 | // TODO: pass scope span if tt is None 585 | _ => bail!(tt, "expected one of: literal, ident, `{`"), 586 | } 587 | } 588 | 589 | fn parse_const_param(input: &mut TokenIter) -> Result { 590 | let attrs = parse_attrs(input)?; 591 | let const_token = input.parse_kw("const")?; 592 | let ident = input.parse_ident()?; 593 | let colon_token = input.parse_punct(':')?; 594 | 595 | let mut ty = vec![]; 596 | append_tokens_until(input, &mut ty, false, |next| match next { 597 | Some(TokenTree::Punct(p)) 598 | if p.as_char() == '>' 599 | || p.as_char() == '=' && p.spacing() == Spacing::Alone 600 | || p.as_char() == ',' && p.spacing() == Spacing::Alone => 601 | { 602 | true 603 | } 604 | None => true, 605 | _ => false, 606 | })?; 607 | let mut default = None; 608 | let eq_token = if input.peek_t(&'=') { 609 | let eq_token = input.parse_punct('=')?; 610 | default = Some(std::iter::once(const_argument(input)?).collect()); 611 | Some(eq_token) 612 | } else { 613 | None 614 | }; 615 | 616 | Ok(ConstParam { 617 | attrs, 618 | const_token, 619 | ident, 620 | colon_token, 621 | ty: ty.into_iter().collect(), 622 | eq_token, 623 | default, 624 | }) 625 | } 626 | 627 | fn parse_where_clause(input: &mut TokenIter) -> Result { 628 | let where_token = input.parse_kw("where")?; 629 | let mut predicates = vec![]; 630 | loop { 631 | if input.is_empty() 632 | || input.peek_t(&Delimiter::Brace) 633 | || input.peek_t(&',') 634 | || input.peek_t(&';') 635 | || input.peek_t(&':') && !input.peek2_t(&':') 636 | || input.peek_t(&'=') 637 | { 638 | break; 639 | } 640 | let value = parse_where_predicate(input)?; 641 | if !input.peek_t(&',') { 642 | predicates.push((value, None)); 643 | break; 644 | } 645 | let punct = input.parse_punct(',')?; 646 | predicates.push((value, Some(punct))); 647 | } 648 | Ok(WhereClause { where_token, predicates }) 649 | } 650 | 651 | fn parse_where_predicate(input: &mut TokenIter) -> Result { 652 | if input.peek_lifetime() && input.peek3_t(&':') { 653 | Ok(WherePredicate::Lifetime(PredicateLifetime { 654 | lifetime: parse_lifetime(input)?, 655 | colon_token: input.parse_punct(':')?, 656 | bounds: { 657 | let mut bounds = vec![]; 658 | loop { 659 | if input.is_empty() 660 | || input.peek_t(&Delimiter::Brace) 661 | || input.peek_t(&',') 662 | || input.peek_t(&';') 663 | || input.peek_t(&':') 664 | || input.peek_t(&'=') 665 | { 666 | break; 667 | } 668 | let value = parse_lifetime(input)?; 669 | if !input.peek_t(&'+') { 670 | bounds.push((value, None)); 671 | break; 672 | } 673 | let punct = input.parse_punct('+')?; 674 | bounds.push((value, Some(punct))); 675 | } 676 | bounds 677 | }, 678 | })) 679 | } else { 680 | Ok(WherePredicate::Type(PredicateType { 681 | lifetimes: { 682 | if input.peek_t(&"for") { Some(parse_bound_lifetimes(input)?) } else { None } 683 | }, 684 | bounded_ty: { 685 | let mut ty = vec![]; 686 | append_tokens_until(input, &mut ty, false, |next| match next { 687 | Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Brace => true, 688 | Some(TokenTree::Punct(p)) 689 | if p.as_char() == ',' 690 | || p.as_char() == '=' && p.spacing() == Spacing::Alone 691 | || p.as_char() == ':' && p.spacing() == Spacing::Alone => 692 | { 693 | true 694 | } 695 | _ => false, 696 | })?; 697 | ty.into_iter().collect() 698 | }, 699 | colon_token: input.parse_punct(':')?, 700 | bounds: { 701 | let mut bounds = vec![]; 702 | loop { 703 | if input.is_empty() 704 | || input.peek_t(&Delimiter::Brace) 705 | || input.peek_t(&',') 706 | || input.peek_t(&';') 707 | || input.peek_t(&':') && !input.peek2_t(&':') 708 | || input.peek_t(&'=') 709 | { 710 | break; 711 | } 712 | 713 | let is_maybe = input.peek_t(&'?') && !input.peek2_t(&"const"); 714 | let mut value = vec![]; 715 | append_tokens_until(input, &mut value, false, |next| match next { 716 | Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Brace => true, 717 | Some(TokenTree::Punct(p)) 718 | if p.as_char() == ',' 719 | || p.as_char() == '>' 720 | || p.as_char() == '=' 721 | || p.as_char() == '+' => 722 | { 723 | true 724 | } 725 | None => true, 726 | _ => false, 727 | })?; 728 | if !input.peek_t(&'+') { 729 | bounds.push((TypeParamBound::new(value, is_maybe), None)); 730 | break; 731 | } 732 | let punct = input.parse_punct('+')?; 733 | bounds.push((TypeParamBound::new(value, is_maybe), Some(punct))); 734 | } 735 | bounds 736 | }, 737 | })) 738 | } 739 | } 740 | 741 | pub(crate) fn parse_visibility(input: &mut TokenIter) -> Result { 742 | if input.peek_t(&"pub") { 743 | let pub_token = input.parse_ident()?; 744 | match input.peek() { 745 | Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Parenthesis {} => { 746 | let g = input.parse_group(Delimiter::Parenthesis)?; 747 | Ok(Visibility::Restricted(pub_token, g)) 748 | } 749 | _ => Ok(Visibility::Public(pub_token)), 750 | } 751 | } else { 752 | Ok(Visibility::Inherited) 753 | } 754 | } 755 | 756 | pub(crate) fn parse_inputs(input: TokenStream) -> Result> { 757 | let input = &mut TokenIter::new(input); 758 | let mut inputs = vec![]; 759 | 760 | loop { 761 | let mut pat = vec![]; 762 | append_tokens_until(input, &mut pat, false, |next| match next { 763 | Some(TokenTree::Punct(p)) if p.as_char() == ',' || p.as_char() == ':' => true, 764 | None => true, 765 | _ => false, 766 | })?; 767 | if !input.peek_t(&':') { 768 | if input.peek_t(&',') { 769 | inputs.push(FnArg::Receiver( 770 | pat.into_iter().collect(), 771 | Some(input.next().unwrap()), 772 | )); 773 | continue; 774 | } 775 | assert!(input.next().is_none()); 776 | inputs.push(FnArg::Receiver(pat.into_iter().collect(), None)); 777 | break; 778 | } 779 | let colon = input.parse_punct(':')?; 780 | let mut ty = vec![]; 781 | append_tokens_until(input, &mut ty, false, |next| match next { 782 | Some(TokenTree::Punct(p)) if p.as_char() == ',' => true, 783 | None => true, 784 | _ => false, 785 | })?; 786 | if input.peek_t(&',') { 787 | inputs.push(FnArg::Typed( 788 | pat.into_iter().collect(), 789 | colon, 790 | ty.into_iter().collect(), 791 | Some(input.next().unwrap()), 792 | )); 793 | continue; 794 | } 795 | assert!(input.next().is_none()); 796 | inputs.push(FnArg::Typed( 797 | pat.into_iter().collect(), 798 | colon, 799 | ty.into_iter().collect(), 800 | None, 801 | )); 802 | break; 803 | } 804 | 805 | Ok(inputs) 806 | } 807 | 808 | pub(crate) fn parse_impl(input: &mut TokenIter) -> Result { 809 | let attrs = parse_attrs(input)?; 810 | let vis: Visibility = parse_visibility(input)?; 811 | let defaultness = input.parse_kw_opt("default"); 812 | let unsafety = input.parse_kw_opt("unsafe"); 813 | let impl_token = input.parse_kw("impl")?; 814 | 815 | let has_generics = input.peek_t(&'<') 816 | && (input.peek2_t(&'>') 817 | || input.peek2_t(&'#') 818 | || input.peek2_ident().is_some() 819 | && (input.peek3_t(&':') 820 | || input.peek3_t(&',') 821 | || input.peek3_t(&'>') 822 | || input.peek3_t(&'=')) 823 | || input.peek2_lifetime() 824 | && (input.peek4_t(&':') 825 | || input.peek4_t(&',') 826 | || input.peek4_t(&'>') 827 | || input.peek4_t(&'=')) 828 | || input.peek2_t(&"const")); 829 | let mut generics: Generics = 830 | if has_generics { parse_generics(input)? } else { Generics::default() }; 831 | 832 | let const_token = input.parse_kw_opt("const"); 833 | 834 | let mut self_ty = vec![]; 835 | append_tokens_until(input, &mut self_ty, false, |next| match next { 836 | Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Brace => true, 837 | Some(TokenTree::Ident(i)) if i.to_string() == "where" => true, 838 | _ => false, 839 | })?; 840 | 841 | if input.peek_t(&"where") { 842 | generics.where_clause = Some(parse_where_clause(input)?); 843 | } 844 | 845 | let g = input.parse_group(Delimiter::Brace)?; 846 | let brace_token = g.span(); 847 | let content = &mut TokenIter::new(g.stream()); 848 | 849 | let mut items = vec![]; 850 | while !content.is_empty() { 851 | items.push(parse_impl_item(content)?); 852 | } 853 | 854 | Ok(ItemImpl { 855 | attrs, 856 | vis, 857 | defaultness, 858 | unsafety, 859 | impl_token, 860 | generics, 861 | const_token, 862 | trait_: None, 863 | self_ty, 864 | brace_token, 865 | items, 866 | }) 867 | } 868 | 869 | fn parse_impl_item(input: &mut TokenIter) -> Result { 870 | let attrs = parse_attrs(input)?; 871 | let vis = parse_visibility(input)?; 872 | 873 | let defaultness = if input.peek_t(&"default") && !input.peek2_t(&'!') { 874 | Some(input.parse_kw("default")?) 875 | } else { 876 | None 877 | }; 878 | 879 | if peek_signature(input) { 880 | let sig = parse_signature(input)?; 881 | let body = input.parse_group(Delimiter::Brace)?; 882 | Ok(ImplItem::Method(ImplItemMethod { attrs, vis, defaultness, sig, body })) 883 | } else if input.peek_t(&"const") { 884 | let const_token = input.parse_kw("const")?; 885 | let ident = input.parse_ident()?; 886 | let colon_token = input.parse_punct(':')?; 887 | 888 | let mut ty = vec![]; 889 | append_tokens_until(input, &mut ty, false, |next| match next { 890 | Some(TokenTree::Punct(p)) 891 | if p.as_char() == '=' && p.spacing() == Spacing::Alone 892 | || p.as_char() == ';' && p.spacing() == Spacing::Alone => 893 | { 894 | true 895 | } 896 | _ => false, 897 | })?; 898 | let eq_token = input.parse_punct('=')?; 899 | 900 | let (expr, semi_token) = parse_until_punct(input, ';')?; 901 | 902 | Ok(ImplItem::Const(ImplItemConst { 903 | attrs, 904 | vis, 905 | defaultness, 906 | const_token, 907 | ident, 908 | colon_token, 909 | ty: ty.into_iter().collect(), 910 | eq_token, 911 | expr, 912 | semi_token, 913 | })) 914 | } else if input.peek_t(&"type") { 915 | let type_token = input.parse_kw("type")?; 916 | let ident = input.parse_ident()?; 917 | let mut generics = parse_generics(input)?; 918 | 919 | if input.peek_t(&"where") { 920 | generics.where_clause = Some(parse_where_clause(input)?); 921 | } 922 | 923 | let eq_token = input.parse_punct('=')?; 924 | 925 | let (ty, semi_token) = parse_until_punct(input, ';')?; 926 | 927 | Ok(ImplItem::Type(ImplItemType { 928 | attrs, 929 | vis, 930 | defaultness, 931 | type_token, 932 | ident, 933 | generics, 934 | eq_token, 935 | ty, 936 | semi_token, 937 | })) 938 | } else { 939 | bail!(input.next(), "expected one of: `default`, `fn`, `const`, `type`") 940 | } 941 | } 942 | 943 | fn peek_signature(input: &TokenIter) -> bool { 944 | let fork = &mut input.clone(); 945 | fork.parse_kw_opt("const"); 946 | fork.parse_kw_opt("async"); 947 | fork.parse_kw_opt("unsafe"); 948 | if fork.peek_t(&"extern") { 949 | let _extern_token = fork.parse_kw("extern"); 950 | fork.parse_literal_opt(); 951 | } 952 | fork.peek_t(&"fn") 953 | } 954 | 955 | fn parse_signature(input: &mut TokenIter) -> Result { 956 | let mut before_ident = vec![]; 957 | loop { 958 | let tt = input.tt()?; 959 | match &tt { 960 | TokenTree::Ident(i) if i.to_string() == "fn" => { 961 | before_ident.push(tt); 962 | break; 963 | } 964 | TokenTree::Group(g) if g.delimiter() == Delimiter::None => { 965 | let mut iter = g.stream().into_iter(); 966 | if let Some(TokenTree::Ident(i)) = iter.next() { 967 | if iter.next().is_none() && i.to_string() == "fn" { 968 | before_ident.push(tt); 969 | break; 970 | } 971 | } 972 | before_ident.push(tt); 973 | } 974 | _ => before_ident.push(tt), 975 | } 976 | } 977 | 978 | let ident = input.parse_ident()?; 979 | let mut generics = parse_generics(input)?; 980 | 981 | let inputs = input.parse_group(Delimiter::Parenthesis)?; 982 | let paren_token = inputs.span(); 983 | let inputs = parse_inputs(inputs.stream())?; 984 | 985 | let output = if input.peek_punct('-').map_or(false, |p| p.spacing() == Spacing::Joint) 986 | && input.peek2_t(&'>') 987 | { 988 | let arrow1 = input.tt()?; 989 | let arrow2 = input.tt()?; 990 | let mut tokens = vec![arrow1, arrow2]; 991 | append_tokens_until(input, &mut tokens, false, |next| match next { 992 | Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Brace => true, 993 | Some(TokenTree::Ident(i)) if i.to_string() == "where" => true, 994 | None => true, 995 | _ => false, 996 | })?; 997 | Some(tokens.into_iter().collect()) 998 | } else { 999 | None 1000 | }; 1001 | 1002 | if input.peek_t(&"where") { 1003 | generics.where_clause = Some(parse_where_clause(input)?); 1004 | } 1005 | 1006 | Ok(Signature { before_ident, ident, generics, paren_token, inputs, output }) 1007 | } 1008 | } 1009 | 1010 | pub(crate) mod printing { 1011 | use proc_macro::{Delimiter, Group, Punct, Spacing, Span, TokenStream}; 1012 | 1013 | use super::{ 1014 | Attribute, BoundLifetimes, ConstParam, FnArg, GenericParam, Generics, ImplGenerics, 1015 | ImplItem, ImplItemConst, ImplItemMethod, ImplItemType, ItemImpl, ItemTrait, Lifetime, 1016 | LifetimeDef, PredicateLifetime, PredicateType, Signature, TraitItem, TraitItemConst, 1017 | TraitItemMethod, TraitItemType, TypeGenerics, TypeParam, TypeParamBound, Visibility, 1018 | WhereClause, WherePredicate, 1019 | }; 1020 | use crate::to_tokens::ToTokens; 1021 | 1022 | fn tokens_or_default(p: Option<&Punct>, ch: char, tokens: &mut TokenStream) { 1023 | match p { 1024 | Some(p) => p.to_tokens(tokens), 1025 | None => Punct::new(ch, Spacing::Alone).to_tokens(tokens), 1026 | } 1027 | } 1028 | 1029 | impl ToTokens for Generics { 1030 | fn to_tokens(&self, tokens: &mut TokenStream) { 1031 | if self.params.is_empty() { 1032 | return; 1033 | } 1034 | 1035 | tokens_or_default(self.lt_token.as_ref(), '<', tokens); 1036 | 1037 | // Print lifetimes before types and consts, regardless of their 1038 | // order in self.params. 1039 | // 1040 | // TODO: ordering rules for const parameters vs type parameters have 1041 | // not been settled yet. https://github.com/rust-lang/rust/issues/44580 1042 | let mut trailing_or_empty = true; 1043 | for (param, p) in &self.params { 1044 | if let GenericParam::Lifetime(_) = param { 1045 | param.to_tokens(tokens); 1046 | p.to_tokens(tokens); 1047 | trailing_or_empty = p.is_some(); 1048 | } 1049 | } 1050 | for (param, p) in &self.params { 1051 | match param { 1052 | GenericParam::Type(_) | GenericParam::Const(_) => { 1053 | if !trailing_or_empty { 1054 | Punct::new(',', Spacing::Alone).to_tokens(tokens); 1055 | trailing_or_empty = true; 1056 | } 1057 | param.to_tokens(tokens); 1058 | p.to_tokens(tokens); 1059 | } 1060 | GenericParam::Lifetime(_) => {} 1061 | } 1062 | } 1063 | 1064 | tokens_or_default(self.gt_token.as_ref(), '>', tokens); 1065 | } 1066 | } 1067 | 1068 | impl ToTokens for GenericParam { 1069 | fn to_tokens(&self, tokens: &mut TokenStream) { 1070 | match self { 1071 | GenericParam::Const(p) => p.to_tokens(tokens), 1072 | GenericParam::Lifetime(l) => l.to_tokens(tokens), 1073 | GenericParam::Type(t) => t.to_tokens(tokens), 1074 | } 1075 | } 1076 | } 1077 | 1078 | impl ToTokens for BoundLifetimes { 1079 | fn to_tokens(&self, tokens: &mut TokenStream) { 1080 | self.for_token.to_tokens(tokens); 1081 | self.lt_token.to_tokens(tokens); 1082 | self.lifetimes.to_tokens(tokens); 1083 | self.gt_token.to_tokens(tokens); 1084 | } 1085 | } 1086 | 1087 | impl ToTokens for Lifetime { 1088 | fn to_tokens(&self, tokens: &mut TokenStream) { 1089 | let mut apostrophe = Punct::new('\'', Spacing::Joint); 1090 | apostrophe.set_span(self.apostrophe); 1091 | apostrophe.to_tokens(tokens); 1092 | self.ident.to_tokens(tokens); 1093 | } 1094 | } 1095 | 1096 | impl ToTokens for LifetimeDef { 1097 | fn to_tokens(&self, tokens: &mut TokenStream) { 1098 | self.attrs.to_tokens(tokens); 1099 | self.lifetime.to_tokens(tokens); 1100 | if !self.bounds.is_empty() { 1101 | tokens_or_default(self.colon_token.as_ref(), ':', tokens); 1102 | self.bounds.to_tokens(tokens); 1103 | } 1104 | } 1105 | } 1106 | 1107 | impl ToTokens for TypeParam { 1108 | fn to_tokens(&self, tokens: &mut TokenStream) { 1109 | self.attrs.to_tokens(tokens); 1110 | self.ident.to_tokens(tokens); 1111 | if !self.bounds.is_empty() { 1112 | tokens_or_default(self.colon_token.as_ref(), ':', tokens); 1113 | for (bound, punct) in &self.bounds { 1114 | bound.to_tokens(tokens); 1115 | punct.to_tokens(tokens); 1116 | } 1117 | } 1118 | if let Some(default) = &self.default { 1119 | tokens_or_default(self.eq_token.as_ref(), '=', tokens); 1120 | default.to_tokens(tokens); 1121 | } 1122 | } 1123 | } 1124 | 1125 | impl ToTokens for TypeParamBound { 1126 | fn to_tokens(&self, tokens: &mut TokenStream) { 1127 | self.tokens.to_tokens(tokens); 1128 | } 1129 | } 1130 | 1131 | impl ToTokens for ConstParam { 1132 | fn to_tokens(&self, tokens: &mut TokenStream) { 1133 | self.attrs.to_tokens(tokens); 1134 | self.const_token.to_tokens(tokens); 1135 | self.ident.to_tokens(tokens); 1136 | self.colon_token.to_tokens(tokens); 1137 | self.ty.to_tokens(tokens); 1138 | if let Some(default) = &self.default { 1139 | tokens_or_default(self.eq_token.as_ref(), '=', tokens); 1140 | default.to_tokens(tokens); 1141 | } 1142 | } 1143 | } 1144 | 1145 | impl ToTokens for ImplGenerics<'_> { 1146 | fn to_tokens(&self, tokens: &mut TokenStream) { 1147 | if self.0.params.is_empty() { 1148 | return; 1149 | } 1150 | 1151 | tokens_or_default(self.0.lt_token.as_ref(), '<', tokens); 1152 | 1153 | // Print lifetimes before types and consts, regardless of their 1154 | // order in self.params. 1155 | // 1156 | // TODO: ordering rules for const parameters vs type parameters have 1157 | // not been settled yet. https://github.com/rust-lang/rust/issues/44580 1158 | let mut trailing_or_empty = true; 1159 | for (param, p) in &self.0.params { 1160 | if let GenericParam::Lifetime(_) = param { 1161 | param.to_tokens(tokens); 1162 | p.to_tokens(tokens); 1163 | trailing_or_empty = p.is_some(); 1164 | } 1165 | } 1166 | for (param, p) in &self.0.params { 1167 | if let GenericParam::Lifetime(_) = param { 1168 | continue; 1169 | } 1170 | if !trailing_or_empty { 1171 | Punct::new(',', Spacing::Alone).to_tokens(tokens); 1172 | trailing_or_empty = true; 1173 | } 1174 | match param { 1175 | GenericParam::Lifetime(_) => unreachable!(), 1176 | GenericParam::Type(param) => { 1177 | // Leave off the type parameter defaults 1178 | param.attrs.to_tokens(tokens); 1179 | param.ident.to_tokens(tokens); 1180 | if !param.bounds.is_empty() { 1181 | tokens_or_default(param.colon_token.as_ref(), ':', tokens); 1182 | param.bounds.to_tokens(tokens); 1183 | } 1184 | } 1185 | GenericParam::Const(param) => { 1186 | // Leave off the const parameter defaults 1187 | param.attrs.to_tokens(tokens); 1188 | param.const_token.to_tokens(tokens); 1189 | param.ident.to_tokens(tokens); 1190 | param.colon_token.to_tokens(tokens); 1191 | param.ty.to_tokens(tokens); 1192 | } 1193 | } 1194 | p.to_tokens(tokens); 1195 | } 1196 | 1197 | tokens_or_default(self.0.gt_token.as_ref(), '>', tokens); 1198 | } 1199 | } 1200 | 1201 | impl ToTokens for TypeGenerics<'_> { 1202 | fn to_tokens(&self, tokens: &mut TokenStream) { 1203 | if self.0.params.is_empty() { 1204 | return; 1205 | } 1206 | 1207 | tokens_or_default(self.0.lt_token.as_ref(), '<', tokens); 1208 | 1209 | // Print lifetimes before types and consts, regardless of their 1210 | // order in self.params. 1211 | // 1212 | // TODO: ordering rules for const parameters vs type parameters have 1213 | // not been settled yet. https://github.com/rust-lang/rust/issues/44580 1214 | let mut trailing_or_empty = true; 1215 | for (param, p) in &self.0.params { 1216 | if let GenericParam::Lifetime(def) = param { 1217 | // Leave off the lifetime bounds and attributes 1218 | def.lifetime.to_tokens(tokens); 1219 | p.to_tokens(tokens); 1220 | trailing_or_empty = p.is_some(); 1221 | } 1222 | } 1223 | for (param, p) in &self.0.params { 1224 | if let GenericParam::Lifetime(_) = param { 1225 | continue; 1226 | } 1227 | if !trailing_or_empty { 1228 | Punct::new(',', Spacing::Alone).to_tokens(tokens); 1229 | trailing_or_empty = true; 1230 | } 1231 | match param { 1232 | GenericParam::Lifetime(_) => unreachable!(), 1233 | GenericParam::Type(param) => { 1234 | // Leave off the type parameter defaults 1235 | param.ident.to_tokens(tokens); 1236 | } 1237 | GenericParam::Const(param) => { 1238 | // Leave off the const parameter defaults 1239 | param.ident.to_tokens(tokens); 1240 | } 1241 | } 1242 | p.to_tokens(tokens); 1243 | } 1244 | 1245 | tokens_or_default(self.0.gt_token.as_ref(), '>', tokens); 1246 | } 1247 | } 1248 | 1249 | impl ToTokens for WhereClause { 1250 | fn to_tokens(&self, tokens: &mut TokenStream) { 1251 | if !self.predicates.is_empty() { 1252 | self.where_token.to_tokens(tokens); 1253 | self.predicates.to_tokens(tokens); 1254 | } 1255 | } 1256 | } 1257 | 1258 | impl ToTokens for WherePredicate { 1259 | fn to_tokens(&self, tokens: &mut TokenStream) { 1260 | match self { 1261 | WherePredicate::Lifetime(l) => l.to_tokens(tokens), 1262 | WherePredicate::Type(t) => t.to_tokens(tokens), 1263 | } 1264 | } 1265 | } 1266 | 1267 | impl ToTokens for PredicateType { 1268 | fn to_tokens(&self, tokens: &mut TokenStream) { 1269 | self.lifetimes.to_tokens(tokens); 1270 | self.bounded_ty.to_tokens(tokens); 1271 | self.colon_token.to_tokens(tokens); 1272 | self.bounds.to_tokens(tokens); 1273 | } 1274 | } 1275 | 1276 | impl ToTokens for PredicateLifetime { 1277 | fn to_tokens(&self, tokens: &mut TokenStream) { 1278 | self.lifetime.to_tokens(tokens); 1279 | self.colon_token.to_tokens(tokens); 1280 | self.bounds.to_tokens(tokens); 1281 | } 1282 | } 1283 | 1284 | impl ToTokens for Visibility { 1285 | fn to_tokens(&self, tokens: &mut TokenStream) { 1286 | match self { 1287 | Visibility::Public(i) => i.to_tokens(tokens), 1288 | Visibility::Restricted(i, g) => { 1289 | i.to_tokens(tokens); 1290 | g.to_tokens(tokens); 1291 | } 1292 | Visibility::Inherited => {} 1293 | } 1294 | } 1295 | } 1296 | 1297 | impl ToTokens for Attribute { 1298 | fn to_tokens(&self, tokens: &mut TokenStream) { 1299 | self.pound_token.to_tokens(tokens); 1300 | self.tokens.to_tokens(tokens); 1301 | } 1302 | } 1303 | 1304 | fn group( 1305 | span: Span, 1306 | delimiter: Delimiter, 1307 | tokens: &mut TokenStream, 1308 | f: &dyn Fn(&mut TokenStream), 1309 | ) { 1310 | let mut inner = TokenStream::new(); 1311 | f(&mut inner); 1312 | let mut g = Group::new(delimiter, inner); 1313 | g.set_span(span); 1314 | g.to_tokens(tokens); 1315 | } 1316 | 1317 | impl ToTokens for ItemTrait { 1318 | fn to_tokens(&self, tokens: &mut TokenStream) { 1319 | self.attrs.to_tokens(tokens); 1320 | self.vis.to_tokens(tokens); 1321 | self.unsafety.to_tokens(tokens); 1322 | self.trait_token.to_tokens(tokens); 1323 | self.ident.to_tokens(tokens); 1324 | self.generics.to_tokens(tokens); 1325 | self.generics.where_clause.to_tokens(tokens); 1326 | group(self.brace_token, Delimiter::Brace, tokens, &|tokens| { 1327 | self.items.to_tokens(tokens); 1328 | }); 1329 | } 1330 | } 1331 | 1332 | impl ToTokens for ItemImpl { 1333 | fn to_tokens(&self, tokens: &mut TokenStream) { 1334 | self.attrs.to_tokens(tokens); 1335 | self.defaultness.to_tokens(tokens); 1336 | self.unsafety.to_tokens(tokens); 1337 | self.impl_token.to_tokens(tokens); 1338 | self.generics.impl_generics().to_tokens(tokens); 1339 | self.const_token.to_tokens(tokens); 1340 | if let Some((path, generics, for_)) = &self.trait_ { 1341 | path.to_tokens(tokens); 1342 | generics.to_tokens(tokens); 1343 | for_.to_tokens(tokens); 1344 | } 1345 | self.self_ty.to_tokens(tokens); 1346 | self.generics.where_clause.to_tokens(tokens); 1347 | group(self.brace_token, Delimiter::Brace, tokens, &|tokens| { 1348 | self.items.to_tokens(tokens); 1349 | }); 1350 | } 1351 | } 1352 | 1353 | impl ToTokens for TraitItem { 1354 | fn to_tokens(&self, tokens: &mut TokenStream) { 1355 | match self { 1356 | TraitItem::Const(i) => i.to_tokens(tokens), 1357 | TraitItem::Method(i) => i.to_tokens(tokens), 1358 | TraitItem::Type(i) => i.to_tokens(tokens), 1359 | } 1360 | } 1361 | } 1362 | 1363 | impl ToTokens for TraitItemConst { 1364 | fn to_tokens(&self, tokens: &mut TokenStream) { 1365 | self.attrs.to_tokens(tokens); 1366 | self.const_token.to_tokens(tokens); 1367 | self.ident.to_tokens(tokens); 1368 | self.colon_token.to_tokens(tokens); 1369 | self.ty.to_tokens(tokens); 1370 | self.semi_token.to_tokens(tokens); 1371 | } 1372 | } 1373 | 1374 | impl ToTokens for TraitItemMethod { 1375 | fn to_tokens(&self, tokens: &mut TokenStream) { 1376 | self.attrs.to_tokens(tokens); 1377 | self.sig.to_tokens(tokens); 1378 | self.semi_token.to_tokens(tokens); 1379 | } 1380 | } 1381 | 1382 | impl ToTokens for TraitItemType { 1383 | fn to_tokens(&self, tokens: &mut TokenStream) { 1384 | self.attrs.to_tokens(tokens); 1385 | self.type_token.to_tokens(tokens); 1386 | self.ident.to_tokens(tokens); 1387 | self.generics.to_tokens(tokens); 1388 | self.generics.where_clause.to_tokens(tokens); 1389 | self.semi_token.to_tokens(tokens); 1390 | } 1391 | } 1392 | 1393 | impl ToTokens for ImplItem { 1394 | fn to_tokens(&self, tokens: &mut TokenStream) { 1395 | match self { 1396 | ImplItem::Const(i) => i.to_tokens(tokens), 1397 | ImplItem::Method(i) => i.to_tokens(tokens), 1398 | ImplItem::Type(i) => i.to_tokens(tokens), 1399 | } 1400 | } 1401 | } 1402 | 1403 | impl ToTokens for ImplItemConst { 1404 | fn to_tokens(&self, tokens: &mut TokenStream) { 1405 | self.attrs.to_tokens(tokens); 1406 | self.vis.to_tokens(tokens); 1407 | self.defaultness.to_tokens(tokens); 1408 | self.const_token.to_tokens(tokens); 1409 | self.ident.to_tokens(tokens); 1410 | self.colon_token.to_tokens(tokens); 1411 | self.ty.to_tokens(tokens); 1412 | self.eq_token.to_tokens(tokens); 1413 | self.expr.to_tokens(tokens); 1414 | self.semi_token.to_tokens(tokens); 1415 | } 1416 | } 1417 | 1418 | impl ToTokens for ImplItemMethod { 1419 | fn to_tokens(&self, tokens: &mut TokenStream) { 1420 | self.attrs.to_tokens(tokens); 1421 | self.vis.to_tokens(tokens); 1422 | self.defaultness.to_tokens(tokens); 1423 | self.sig.to_tokens(tokens); 1424 | self.body.to_tokens(tokens); 1425 | } 1426 | } 1427 | 1428 | impl ToTokens for ImplItemType { 1429 | fn to_tokens(&self, tokens: &mut TokenStream) { 1430 | self.attrs.to_tokens(tokens); 1431 | self.vis.to_tokens(tokens); 1432 | self.defaultness.to_tokens(tokens); 1433 | self.type_token.to_tokens(tokens); 1434 | self.ident.to_tokens(tokens); 1435 | self.generics.to_tokens(tokens); 1436 | self.generics.where_clause.to_tokens(tokens); 1437 | self.eq_token.to_tokens(tokens); 1438 | self.ty.to_tokens(tokens); 1439 | self.semi_token.to_tokens(tokens); 1440 | } 1441 | } 1442 | 1443 | impl ToTokens for Signature { 1444 | fn to_tokens(&self, tokens: &mut TokenStream) { 1445 | self.before_ident.to_tokens(tokens); 1446 | self.ident.to_tokens(tokens); 1447 | self.generics.to_tokens(tokens); 1448 | group(self.paren_token, Delimiter::Parenthesis, tokens, &|tokens| { 1449 | for arg in &self.inputs { 1450 | arg.to_tokens(tokens); 1451 | } 1452 | }); 1453 | self.output.to_tokens(tokens); 1454 | self.generics.where_clause.to_tokens(tokens); 1455 | } 1456 | } 1457 | 1458 | impl ToTokens for FnArg { 1459 | fn to_tokens(&self, tokens: &mut TokenStream) { 1460 | match self { 1461 | FnArg::Receiver(pat, p) => { 1462 | pat.to_tokens(tokens); 1463 | p.to_tokens(tokens); 1464 | } 1465 | FnArg::Typed(pat, colon, ty, p) => { 1466 | pat.to_tokens(tokens); 1467 | colon.to_tokens(tokens); 1468 | ty.to_tokens(tokens); 1469 | p.to_tokens(tokens); 1470 | } 1471 | } 1472 | } 1473 | } 1474 | } 1475 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | use std::iter::{self, FromIterator}; 4 | 5 | use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; 6 | 7 | use crate::to_tokens::ToTokens; 8 | 9 | macro_rules! format_err { 10 | ($span:expr, $msg:expr $(,)*) => { 11 | crate::error::Error::new(&$span, String::from($msg)) 12 | }; 13 | ($span:expr, $($tt:tt)*) => { 14 | format_err!($span, format!($($tt)*)) 15 | }; 16 | } 17 | 18 | macro_rules! bail { 19 | ($($tt:tt)*) => { 20 | return Err(format_err!($($tt)*)) 21 | }; 22 | } 23 | 24 | pub(crate) type Result = std::result::Result; 25 | 26 | #[derive(Debug)] 27 | pub(crate) struct Error { 28 | start_span: Span, 29 | end_span: Span, 30 | msg: String, 31 | } 32 | 33 | impl Error { 34 | pub(crate) fn new(tokens: &dyn ToTokens, msg: String) -> Self { 35 | let mut iter = tokens.to_token_stream().into_iter(); 36 | // `Span` on stable Rust has a limitation that only points to the first 37 | // token, not the whole tokens. We can work around this limitation by 38 | // using the first/last span of the tokens like `syn::Error::new_spanned` does. 39 | let start_span = iter.next().map_or_else(Span::call_site, |t| t.span()); 40 | let end_span = iter.last().map_or(start_span, |t| t.span()); 41 | 42 | Self { start_span, end_span, msg } 43 | } 44 | 45 | // Based on https://github.com/dtolnay/syn/blob/1.0.39/src/error.rs#L210-L237 46 | pub(crate) fn into_compile_error(self) -> TokenStream { 47 | // compile_error!($msg) 48 | TokenStream::from_iter(vec![ 49 | TokenTree::Ident(Ident::new("compile_error", self.start_span)), 50 | TokenTree::Punct({ 51 | let mut punct = Punct::new('!', Spacing::Alone); 52 | punct.set_span(self.start_span); 53 | punct 54 | }), 55 | TokenTree::Group({ 56 | let mut group = Group::new(Delimiter::Brace, { 57 | iter::once(TokenTree::Literal({ 58 | let mut string = Literal::string(&self.msg); 59 | string.set_span(self.end_span); 60 | string 61 | })) 62 | .collect() 63 | }); 64 | group.set_span(self.end_span); 65 | group 66 | }), 67 | ]) 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/iter.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | use proc_macro::{ 4 | Delimiter, Group, Ident, Literal, Punct, Spacing, TokenStream, TokenTree, token_stream, 5 | }; 6 | 7 | use crate::error::Result; 8 | 9 | #[derive(Clone)] 10 | pub(crate) struct TokenIter { 11 | stack: Vec, 12 | peeked: Option, 13 | peeked2: Option, 14 | peeked3: Option, 15 | peeked4: Option, 16 | } 17 | 18 | impl TokenIter { 19 | pub(crate) fn new(tokens: TokenStream) -> Self { 20 | Self { 21 | stack: vec![tokens.into_iter()], 22 | peeked: None, 23 | peeked2: None, 24 | peeked3: None, 25 | peeked4: None, 26 | } 27 | } 28 | 29 | #[allow(clippy::wrong_self_convention)] 30 | pub(crate) fn is_empty(&mut self) -> bool { 31 | self.peek().is_none() 32 | } 33 | 34 | pub(crate) fn peek(&mut self) -> Option<&TokenTree> { 35 | self.peeked = self.next(); 36 | self.peeked.as_ref() 37 | } 38 | 39 | pub(crate) fn peek_t(&mut self, token: &dyn Token) -> bool { 40 | match self.peek() { 41 | Some(tt) => token.match_token(tt), 42 | None => false, 43 | } 44 | } 45 | 46 | pub(crate) fn peek2(&mut self) -> Option<&TokenTree> { 47 | let peeked = self.next(); 48 | let peeked2 = self.next(); 49 | self.peeked = peeked; 50 | self.peeked2 = peeked2; 51 | self.peeked2.as_ref() 52 | } 53 | 54 | pub(crate) fn peek2_t(&mut self, token: &dyn Token) -> bool { 55 | match self.peek2() { 56 | Some(tt) => token.match_token(tt), 57 | None => false, 58 | } 59 | } 60 | 61 | pub(crate) fn peek3(&mut self) -> Option<&TokenTree> { 62 | let peeked = self.next(); 63 | let peeked2 = self.next(); 64 | let peeked3 = self.next(); 65 | self.peeked = peeked; 66 | self.peeked2 = peeked2; 67 | self.peeked3 = peeked3; 68 | self.peeked3.as_ref() 69 | } 70 | 71 | pub(crate) fn peek3_t(&mut self, token: &dyn Token) -> bool { 72 | match self.peek3() { 73 | Some(tt) => token.match_token(tt), 74 | None => false, 75 | } 76 | } 77 | 78 | pub(crate) fn peek4(&mut self) -> Option<&TokenTree> { 79 | let peeked = self.next(); 80 | let peeked2 = self.next(); 81 | let peeked3 = self.next(); 82 | let peeked4 = self.next(); 83 | self.peeked = peeked; 84 | self.peeked2 = peeked2; 85 | self.peeked3 = peeked3; 86 | self.peeked4 = peeked4; 87 | self.peeked4.as_ref() 88 | } 89 | 90 | pub(crate) fn peek4_t(&mut self, token: &dyn Token) -> bool { 91 | match self.peek4() { 92 | Some(tt) => token.match_token(tt), 93 | None => false, 94 | } 95 | } 96 | 97 | pub(crate) fn peek_ident(&mut self) -> Option<&Ident> { 98 | match self.peek() { 99 | Some(TokenTree::Ident(i)) => Some(i), 100 | _ => None, 101 | } 102 | } 103 | 104 | pub(crate) fn peek2_ident(&mut self) -> Option<&Ident> { 105 | match self.peek2() { 106 | Some(TokenTree::Ident(i)) => Some(i), 107 | _ => None, 108 | } 109 | } 110 | 111 | pub(crate) fn peek3_ident(&mut self) -> Option<&Ident> { 112 | match self.peek3() { 113 | Some(TokenTree::Ident(i)) => Some(i), 114 | _ => None, 115 | } 116 | } 117 | 118 | pub(crate) fn parse_ident(&mut self) -> Result { 119 | match self.next() { 120 | Some(TokenTree::Ident(i)) => Ok(i), 121 | // TODO: pass scope span if tt is None 122 | tt => bail!(tt, "expected identifier"), 123 | } 124 | } 125 | 126 | pub(crate) fn parse_ident_opt(&mut self) -> Option { 127 | self.peek_ident()?; 128 | Some(self.parse_ident().unwrap()) 129 | } 130 | 131 | pub(crate) fn parse_kw(&mut self, kw: &str) -> Result { 132 | let tt = self.next(); 133 | match &tt { 134 | Some(TokenTree::Ident(i)) if i.to_string() == kw => { 135 | if let Some(TokenTree::Ident(i)) = tt { Ok(i) } else { unreachable!() } 136 | } 137 | // TODO: pass scope span if tt is None 138 | tt => bail!(tt, "expected `{}`", kw), 139 | } 140 | } 141 | 142 | pub(crate) fn parse_kw_opt(&mut self, kw: &str) -> Option { 143 | if self.peek_t(&kw) { Some(self.parse_ident().unwrap()) } else { None } 144 | } 145 | 146 | pub(crate) fn peek_punct(&mut self, ch: char) -> Option<&Punct> { 147 | match self.peek() { 148 | Some(TokenTree::Punct(p)) if p.as_char() == ch => Some(p), 149 | _ => None, 150 | } 151 | } 152 | 153 | pub(crate) fn peek2_punct(&mut self, ch: char) -> Option<&Punct> { 154 | match self.peek2() { 155 | Some(TokenTree::Punct(p)) if p.as_char() == ch => Some(p), 156 | _ => None, 157 | } 158 | } 159 | 160 | pub(crate) fn parse_punct(&mut self, ch: char) -> Result { 161 | let tt = self.next(); 162 | match &tt { 163 | Some(TokenTree::Punct(p)) if p.as_char() == ch => { 164 | if let Some(TokenTree::Punct(p)) = tt { Ok(p) } else { unreachable!() } 165 | } 166 | // TODO: pass scope span if tt is None 167 | tt => bail!(tt, "expected `{}`", ch), 168 | } 169 | } 170 | 171 | pub(crate) fn parse_punct_opt(&mut self, ch: char) -> Option { 172 | self.peek_punct(ch)?; 173 | Some(self.parse_punct(ch).unwrap()) 174 | } 175 | 176 | pub(crate) fn peek_lifetime(&mut self) -> bool { 177 | self.peek_punct('\'').map_or(false, |p| p.spacing() == Spacing::Joint) 178 | && self.peek2_ident().is_some() 179 | } 180 | 181 | pub(crate) fn peek2_lifetime(&mut self) -> bool { 182 | self.peek2_punct('\'').map_or(false, |p| p.spacing() == Spacing::Joint) 183 | && self.peek3_ident().is_some() 184 | } 185 | 186 | pub(crate) fn parse_group(&mut self, delimiter: Delimiter) -> Result { 187 | let tt = self.next(); 188 | match &tt { 189 | Some(TokenTree::Group(g)) if g.delimiter() == delimiter => { 190 | if let Some(TokenTree::Group(g)) = tt { Ok(g) } else { unreachable!() } 191 | } 192 | tt => { 193 | let d = match delimiter { 194 | Delimiter::Brace => "`{`", 195 | Delimiter::Bracket => "`[`", 196 | Delimiter::Parenthesis => "`(`", 197 | Delimiter::None => "none-delimited group", 198 | }; 199 | // TODO: pass scope span if tt is None 200 | bail!(tt, "expected {}", d) 201 | } 202 | } 203 | } 204 | 205 | pub(crate) fn peek_literal(&mut self) -> Option<&Literal> { 206 | match self.peek() { 207 | Some(TokenTree::Literal(l)) => Some(l), 208 | _ => None, 209 | } 210 | } 211 | 212 | pub(crate) fn parse_literal(&mut self) -> Result { 213 | match self.next() { 214 | Some(TokenTree::Literal(l)) => Ok(l), 215 | // TODO: pass scope span if tt is None 216 | tt => bail!(tt, "expected literal"), 217 | } 218 | } 219 | 220 | pub(crate) fn parse_literal_opt(&mut self) -> Option { 221 | self.peek_literal()?; 222 | Some(self.parse_literal().unwrap()) 223 | } 224 | 225 | pub(crate) fn tt(&mut self) -> Result { 226 | self.next().ok_or_else(|| { 227 | // TODO: pass scope span 228 | format_err!(TokenStream::new(), "unexpected end of input") 229 | }) 230 | } 231 | } 232 | 233 | // Based on https://github.com/dtolnay/proc-macro-hack/blob/0.5.19/src/iter.rs 234 | impl Iterator for TokenIter { 235 | type Item = TokenTree; 236 | 237 | fn next(&mut self) -> Option { 238 | if let Some(tt) = self.peeked.take() { 239 | return Some(tt); 240 | } 241 | if let Some(tt) = self.peeked2.take() { 242 | return Some(tt); 243 | } 244 | if let Some(tt) = self.peeked3.take() { 245 | return Some(tt); 246 | } 247 | if let Some(tt) = self.peeked4.take() { 248 | return Some(tt); 249 | } 250 | loop { 251 | let top = self.stack.last_mut()?; 252 | match top.next() { 253 | None => drop(self.stack.pop()), 254 | Some(TokenTree::Group(ref group)) if group.delimiter() == Delimiter::None => { 255 | self.stack.push(group.stream().into_iter()); 256 | } 257 | Some(tt) => return Some(tt), 258 | } 259 | } 260 | } 261 | } 262 | 263 | pub(crate) trait Token { 264 | fn match_token(&self, tt: &TokenTree) -> bool; 265 | } 266 | 267 | impl Token for char { 268 | fn match_token(&self, tt: &TokenTree) -> bool { 269 | match tt { 270 | TokenTree::Punct(p) => p.as_char() == *self, 271 | _ => false, 272 | } 273 | } 274 | } 275 | 276 | impl Token for &str { 277 | fn match_token(&self, tt: &TokenTree) -> bool { 278 | match tt { 279 | TokenTree::Ident(i) => i.to_string() == *self, 280 | _ => false, 281 | } 282 | } 283 | } 284 | 285 | impl Token for Delimiter { 286 | fn match_token(&self, tt: &TokenTree) -> bool { 287 | match tt { 288 | TokenTree::Group(g) => g.delimiter() == *self, 289 | _ => false, 290 | } 291 | } 292 | } 293 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | /*! 4 | 6 | 7 | 8 | A lightweight attribute macro for easily writing [extension trait pattern][rfc0445]. 9 | 10 | ```toml 11 | [dependencies] 12 | easy-ext = "1" 13 | ``` 14 | 15 | ## Examples 16 | 17 | ``` 18 | use easy_ext::ext; 19 | 20 | #[ext(ResultExt)] 21 | pub impl Result { 22 | fn err_into(self) -> Result 23 | where 24 | E: Into, 25 | { 26 | self.map_err(Into::into) 27 | } 28 | } 29 | ``` 30 | 31 | Code like this will be generated: 32 | 33 | ``` 34 | pub trait ResultExt { 35 | fn err_into(self) -> Result 36 | where 37 | E: Into; 38 | } 39 | 40 | impl ResultExt for Result { 41 | fn err_into(self) -> Result 42 | where 43 | E: Into, 44 | { 45 | self.map_err(Into::into) 46 | } 47 | } 48 | ``` 49 | 50 | You can elide the trait name. 51 | 52 | ``` 53 | use easy_ext::ext; 54 | 55 | #[ext] 56 | impl Result { 57 | fn err_into(self) -> Result 58 | where 59 | E: Into, 60 | { 61 | self.map_err(Into::into) 62 | } 63 | } 64 | ``` 65 | 66 | Note that in this case, `#[ext]` assigns a random name, so you cannot 67 | import/export the generated trait. 68 | 69 | ### Visibility 70 | 71 | There are two ways to specify visibility. 72 | 73 | #### Impl-level visibility 74 | 75 | The first way is to specify visibility at the impl level. For example: 76 | 77 | ``` 78 | use easy_ext::ext; 79 | 80 | // unnamed 81 | #[ext] 82 | pub impl str { 83 | fn foo(&self) {} 84 | } 85 | 86 | // named 87 | #[ext(StrExt)] 88 | pub impl str { 89 | fn bar(&self) {} 90 | } 91 | ``` 92 | 93 | #### Associated-item-level visibility 94 | 95 | Another way is to specify visibility at the associated item level. 96 | 97 | For example, if the method is `pub` then the trait will also be `pub`: 98 | 99 | ``` 100 | use easy_ext::ext; 101 | 102 | #[ext(ResultExt)] // generate `pub trait ResultExt` 103 | impl Result { 104 | pub fn err_into(self) -> Result 105 | where 106 | E: Into, 107 | { 108 | self.map_err(Into::into) 109 | } 110 | } 111 | ``` 112 | 113 | This is useful when migrate from an inherent impl to an extension trait. 114 | 115 | Note that the visibility of all the associated items in the `impl` must be identical. 116 | 117 | Note that you cannot specify impl-level visibility and associated-item-level visibility at the same time. 118 | 119 | ### [Supertraits](https://doc.rust-lang.org/reference/items/traits.html#supertraits) 120 | 121 | If you want the extension trait to be a subtrait of another trait, 122 | add `Self: SubTrait` bound to the `where` clause. 123 | 124 | ``` 125 | use easy_ext::ext; 126 | 127 | #[ext(Ext)] 128 | impl T 129 | where 130 | Self: Default, 131 | { 132 | fn method(&self) {} 133 | } 134 | ``` 135 | 136 | ### Supported items 137 | 138 | #### [Associated functions (methods)](https://doc.rust-lang.org/reference/items/associated-items.html#associated-functions-and-methods) 139 | 140 | ``` 141 | use easy_ext::ext; 142 | 143 | #[ext] 144 | impl T { 145 | fn method(&self) {} 146 | } 147 | ``` 148 | 149 | #### [Associated constants](https://doc.rust-lang.org/reference/items/associated-items.html#associated-constants) 150 | 151 | ``` 152 | use easy_ext::ext; 153 | 154 | #[ext] 155 | impl T { 156 | const MSG: &'static str = "Hello!"; 157 | } 158 | ``` 159 | 160 | #### [Associated types](https://doc.rust-lang.org/reference/items/associated-items.html#associated-types) 161 | 162 | ``` 163 | use easy_ext::ext; 164 | 165 | #[ext] 166 | impl str { 167 | type Owned = String; 168 | 169 | fn method(&self) -> Self::Owned { 170 | self.to_owned() 171 | } 172 | } 173 | ``` 174 | 175 | [rfc0445]: https://rust-lang.github.io/rfcs/0445-extension-trait-conventions.html 176 | 177 | 178 | */ 179 | 180 | #![doc(test( 181 | no_crate_inject, 182 | attr( 183 | deny(warnings, rust_2018_idioms, single_use_lifetimes), 184 | allow(dead_code, unused_variables) 185 | ) 186 | ))] 187 | #![forbid(unsafe_code)] 188 | 189 | // older compilers require explicit `extern crate`. 190 | #[allow(unused_extern_crates)] 191 | extern crate proc_macro; 192 | 193 | #[macro_use] 194 | mod error; 195 | 196 | mod ast; 197 | mod iter; 198 | mod to_tokens; 199 | 200 | use std::{collections::hash_map::DefaultHasher, hash::Hasher, iter::FromIterator, mem}; 201 | 202 | use proc_macro::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree}; 203 | 204 | use crate::{ 205 | ast::{ 206 | Attribute, AttributeKind, FnArg, GenericParam, Generics, ImplItem, ItemImpl, ItemTrait, 207 | PredicateType, Signature, TraitItem, TraitItemConst, TraitItemMethod, TraitItemType, 208 | TypeParam, Visibility, WherePredicate, parsing, 209 | }, 210 | error::{Error, Result}, 211 | iter::TokenIter, 212 | to_tokens::ToTokens, 213 | }; 214 | 215 | /// A lightweight attribute macro for easily writing [extension trait pattern][rfc0445]. 216 | /// 217 | /// See the [crate-level documentation](crate) for details. 218 | /// 219 | /// [rfc0445]: https://rust-lang.github.io/rfcs/0445-extension-trait-conventions.html 220 | #[proc_macro_attribute] 221 | pub fn ext(args: TokenStream, input: TokenStream) -> TokenStream { 222 | expand(args, input).unwrap_or_else(Error::into_compile_error) 223 | } 224 | 225 | fn expand(args: TokenStream, input: TokenStream) -> Result { 226 | let trait_name = match parse_args(args)? { 227 | None => Ident::new(&format!("__ExtTrait{}", hash(&input)), Span::call_site()), 228 | Some(trait_name) => trait_name, 229 | }; 230 | 231 | let mut item: ItemImpl = parsing::parse_impl(&mut TokenIter::new(input))?; 232 | 233 | let mut tokens = trait_from_impl(&mut item, trait_name)?.to_token_stream(); 234 | item.to_tokens(&mut tokens); 235 | Ok(tokens) 236 | } 237 | 238 | fn parse_args(input: TokenStream) -> Result> { 239 | let input = &mut TokenIter::new(input); 240 | let vis = ast::parsing::parse_visibility(input)?; 241 | if !vis.is_inherited() { 242 | bail!(vis, "use `{} impl` instead", vis); 243 | } 244 | let trait_name = input.parse_ident_opt(); 245 | if !input.is_empty() { 246 | let tt = input.next().unwrap(); 247 | bail!(tt, "unexpected token: `{}`", tt); 248 | } 249 | Ok(trait_name) 250 | } 251 | 252 | fn determine_trait_generics<'a>( 253 | generics: &mut Generics, 254 | self_ty: &'a [TokenTree], 255 | ) -> Option<&'a Ident> { 256 | if self_ty.len() != 1 { 257 | return None; 258 | } 259 | if let TokenTree::Ident(self_ty) = &self_ty[0] { 260 | let i = generics.params.iter().position(|(param, _)| { 261 | if let GenericParam::Type(param) = param { 262 | param.ident.to_string() == self_ty.to_string() 263 | } else { 264 | false 265 | } 266 | }); 267 | if let Some(i) = i { 268 | let mut params = mem::replace(&mut generics.params, vec![]); 269 | let (param, _) = params.remove(i); 270 | generics.params = params; 271 | 272 | if let GenericParam::Type(TypeParam { 273 | colon_token: Some(colon_token), bounds, .. 274 | }) = param 275 | { 276 | let bounds = bounds.into_iter().filter(|(b, _)| !b.is_maybe).collect::>(); 277 | if !bounds.is_empty() { 278 | let where_clause = generics.make_where_clause(); 279 | if let Some((_, p)) = where_clause.predicates.last_mut() { 280 | p.get_or_insert_with(|| Punct::new(',', Spacing::Alone)); 281 | } 282 | where_clause.predicates.push(( 283 | WherePredicate::Type(PredicateType { 284 | lifetimes: None, 285 | bounded_ty: std::iter::once(TokenTree::Ident(Ident::new( 286 | "Self", 287 | self_ty.span(), 288 | ))) 289 | .collect(), 290 | colon_token, 291 | bounds, 292 | }), 293 | None, 294 | )); 295 | } 296 | } 297 | 298 | return Some(self_ty); 299 | } 300 | } 301 | None 302 | } 303 | 304 | fn trait_from_impl(item: &mut ItemImpl, trait_name: Ident) -> Result { 305 | /// Replace `self_ty` with `Self`. 306 | struct ReplaceParam { 307 | self_ty: String, 308 | // Restrict the scope for removing `?Trait` bounds, because `?Trait` 309 | // bounds are only permitted at the point where a type parameter is 310 | // declared. 311 | remove_maybe: bool, 312 | } 313 | 314 | impl ReplaceParam { 315 | fn visit_token_stream(&self, tokens: &mut TokenStream) -> bool { 316 | let mut out: Vec = vec![]; 317 | let mut modified = false; 318 | let iter = tokens.clone().into_iter(); 319 | for tt in iter { 320 | match tt { 321 | TokenTree::Ident(ident) => { 322 | if ident.to_string() == self.self_ty { 323 | modified = true; 324 | let self_ = Ident::new("Self", ident.span()); 325 | out.push(self_.into()); 326 | } else { 327 | out.push(TokenTree::Ident(ident)); 328 | } 329 | } 330 | TokenTree::Group(group) => { 331 | let mut content = group.stream(); 332 | modified |= self.visit_token_stream(&mut content); 333 | let mut new = Group::new(group.delimiter(), content); 334 | new.set_span(group.span()); 335 | out.push(TokenTree::Group(new)); 336 | } 337 | other => out.push(other), 338 | } 339 | } 340 | if modified { 341 | *tokens = TokenStream::from_iter(out); 342 | } 343 | modified 344 | } 345 | 346 | // Everything below is simply traversing the syntax tree. 347 | 348 | fn visit_trait_item_mut(&self, node: &mut TraitItem) { 349 | match node { 350 | TraitItem::Const(node) => { 351 | self.visit_token_stream(&mut node.ty); 352 | } 353 | TraitItem::Method(node) => { 354 | self.visit_signature_mut(&mut node.sig); 355 | } 356 | TraitItem::Type(node) => { 357 | self.visit_generics_mut(&mut node.generics); 358 | } 359 | } 360 | } 361 | 362 | fn visit_signature_mut(&self, node: &mut Signature) { 363 | self.visit_generics_mut(&mut node.generics); 364 | for arg in &mut node.inputs { 365 | self.visit_fn_arg_mut(arg); 366 | } 367 | if let Some(ty) = &mut node.output { 368 | self.visit_token_stream(ty); 369 | } 370 | } 371 | 372 | fn visit_fn_arg_mut(&self, node: &mut FnArg) { 373 | match node { 374 | FnArg::Receiver(pat, _) => { 375 | self.visit_token_stream(pat); 376 | } 377 | FnArg::Typed(pat, _, ty, _) => { 378 | self.visit_token_stream(pat); 379 | self.visit_token_stream(ty); 380 | } 381 | } 382 | } 383 | 384 | fn visit_generics_mut(&self, generics: &mut Generics) { 385 | for (param, _) in &mut generics.params { 386 | match param { 387 | GenericParam::Type(param) => { 388 | for (bound, _) in &mut param.bounds { 389 | self.visit_token_stream(&mut bound.tokens); 390 | } 391 | } 392 | GenericParam::Const(_) | GenericParam::Lifetime(_) => {} 393 | } 394 | } 395 | if let Some(where_clause) = &mut generics.where_clause { 396 | let predicates = Vec::with_capacity(where_clause.predicates.len()); 397 | for (mut predicate, p) in mem::replace(&mut where_clause.predicates, predicates) { 398 | match &mut predicate { 399 | WherePredicate::Type(pred) => { 400 | if self.remove_maybe { 401 | let mut iter = pred.bounded_ty.clone().into_iter(); 402 | if let Some(TokenTree::Ident(i)) = iter.next() { 403 | if iter.next().is_none() && self.self_ty == i.to_string() { 404 | let bounds = mem::replace(&mut pred.bounds, vec![]) 405 | .into_iter() 406 | .filter(|(b, _)| !b.is_maybe) 407 | .collect::>(); 408 | if !bounds.is_empty() { 409 | self.visit_token_stream(&mut pred.bounded_ty); 410 | pred.bounds = bounds; 411 | for (bound, _) in &mut pred.bounds { 412 | self.visit_token_stream(&mut bound.tokens); 413 | } 414 | where_clause.predicates.push((predicate, p)); 415 | } 416 | continue; 417 | } 418 | } 419 | } 420 | 421 | self.visit_token_stream(&mut pred.bounded_ty); 422 | for (bound, _) in &mut pred.bounds { 423 | self.visit_token_stream(&mut bound.tokens); 424 | } 425 | } 426 | WherePredicate::Lifetime(_) => {} 427 | } 428 | where_clause.predicates.push((predicate, p)); 429 | } 430 | } 431 | } 432 | } 433 | 434 | let mut generics = item.generics.clone(); 435 | let mut visitor = determine_trait_generics(&mut generics, &item.self_ty) 436 | .map(|self_ty| ReplaceParam { self_ty: self_ty.to_string(), remove_maybe: false }); 437 | 438 | if let Some(visitor) = &mut visitor { 439 | visitor.remove_maybe = true; 440 | visitor.visit_generics_mut(&mut generics); 441 | visitor.remove_maybe = false; 442 | } 443 | let ty_generics = generics.ty_generics(); 444 | item.trait_ = Some(( 445 | trait_name.clone(), 446 | ty_generics.to_token_stream(), 447 | Ident::new("for", Span::call_site()), 448 | )); 449 | 450 | // impl-level visibility 451 | let impl_vis = if item.vis.is_inherited() { None } else { Some(item.vis.clone()) }; 452 | // assoc-item-level visibility 453 | let mut assoc_vis = None; 454 | let mut items = Vec::with_capacity(item.items.len()); 455 | item.items.iter_mut().try_for_each(|item| { 456 | trait_item_from_impl_item(item, &mut assoc_vis, impl_vis.as_ref()).map(|mut item| { 457 | if let Some(visitor) = &mut visitor { 458 | visitor.visit_trait_item_mut(&mut item); 459 | } 460 | items.push(item); 461 | }) 462 | })?; 463 | 464 | let mut attrs = item.attrs.clone(); 465 | find_remove(&mut item.attrs, AttributeKind::Doc); // https://github.com/taiki-e/easy-ext/issues/20 466 | attrs.push(Attribute::new(vec![ 467 | TokenTree::Ident(Ident::new("allow", Span::call_site())), 468 | TokenTree::Group(Group::new( 469 | Delimiter::Parenthesis, 470 | std::iter::once(TokenTree::Ident(Ident::new( 471 | "patterns_in_fns_without_body", 472 | Span::call_site(), 473 | ))) 474 | .collect(), 475 | )), 476 | ])); // mut self 477 | 478 | Ok(ItemTrait { 479 | attrs, 480 | // priority: impl-level visibility > assoc-item-level visibility > inherited visibility 481 | vis: impl_vis.unwrap_or_else(|| assoc_vis.unwrap_or(Visibility::Inherited)), 482 | unsafety: item.unsafety.clone(), 483 | trait_token: Ident::new("trait", item.impl_token.span()), 484 | ident: trait_name, 485 | generics, 486 | brace_token: item.brace_token, 487 | items, 488 | }) 489 | } 490 | 491 | fn trait_item_from_impl_item( 492 | impl_item: &mut ImplItem, 493 | prev_vis: &mut Option, 494 | impl_vis: Option<&Visibility>, 495 | ) -> Result { 496 | fn check_visibility( 497 | current: Visibility, 498 | prev: &mut Option, 499 | impl_vis: Option<&Visibility>, 500 | span: &dyn ToTokens, 501 | ) -> Result<()> { 502 | if impl_vis.is_some() { 503 | if current.is_inherited() { 504 | return Ok(()); 505 | } 506 | bail!(current, "all associated items must have inherited visibility"); 507 | } 508 | match prev { 509 | None => *prev = Some(current), 510 | Some(prev) if *prev == current => {} 511 | Some(prev) => { 512 | if prev.is_inherited() { 513 | bail!(current, "all associated items must have inherited visibility"); 514 | } 515 | bail!( 516 | if current.is_inherited() { span } else { ¤t }, 517 | "all associated items must have a visibility of `{}`", 518 | prev, 519 | ); 520 | } 521 | } 522 | Ok(()) 523 | } 524 | 525 | match impl_item { 526 | ImplItem::Const(impl_const) => { 527 | let vis = mem::replace(&mut impl_const.vis, Visibility::Inherited); 528 | check_visibility(vis, prev_vis, impl_vis, &impl_const.ident)?; 529 | 530 | let attrs = impl_const.attrs.clone(); 531 | find_remove(&mut impl_const.attrs, AttributeKind::Doc); // https://github.com/taiki-e/easy-ext/issues/20 532 | Ok(TraitItem::Const(TraitItemConst { 533 | attrs, 534 | const_token: impl_const.const_token.clone(), 535 | ident: impl_const.ident.clone(), 536 | colon_token: impl_const.colon_token.clone(), 537 | ty: impl_const.ty.clone(), 538 | semi_token: impl_const.semi_token.clone(), 539 | })) 540 | } 541 | ImplItem::Type(impl_type) => { 542 | let vis = mem::replace(&mut impl_type.vis, Visibility::Inherited); 543 | check_visibility(vis, prev_vis, impl_vis, &impl_type.ident)?; 544 | 545 | let attrs = impl_type.attrs.clone(); 546 | find_remove(&mut impl_type.attrs, AttributeKind::Doc); // https://github.com/taiki-e/easy-ext/issues/20 547 | Ok(TraitItem::Type(TraitItemType { 548 | attrs, 549 | type_token: impl_type.type_token.clone(), 550 | ident: impl_type.ident.clone(), 551 | generics: impl_type.generics.clone(), 552 | semi_token: impl_type.semi_token.clone(), 553 | })) 554 | } 555 | ImplItem::Method(impl_method) => { 556 | let vis = mem::replace(&mut impl_method.vis, Visibility::Inherited); 557 | check_visibility(vis, prev_vis, impl_vis, &impl_method.sig.ident)?; 558 | 559 | let mut attrs = impl_method.attrs.clone(); 560 | find_remove(&mut impl_method.attrs, AttributeKind::Doc); // https://github.com/taiki-e/easy-ext/issues/20 561 | find_remove(&mut attrs, AttributeKind::Inline); // `#[inline]` is ignored on function prototypes 562 | Ok(TraitItem::Method(TraitItemMethod { 563 | attrs, 564 | sig: { 565 | let mut sig = impl_method.sig.clone(); 566 | for arg in &mut sig.inputs { 567 | if let FnArg::Typed(pat, ..) = arg { 568 | if pat.to_string() != "self" { 569 | *pat = std::iter::once(TokenTree::Ident(Ident::new( 570 | "_", 571 | pat.clone().into_iter().next().unwrap().span(), 572 | ))) 573 | .collect(); 574 | } 575 | } 576 | } 577 | sig 578 | }, 579 | semi_token: { 580 | let mut punct = Punct::new(';', Spacing::Alone); 581 | punct.set_span(impl_method.body.span()); 582 | punct 583 | }, 584 | })) 585 | } 586 | } 587 | } 588 | 589 | fn find_remove(attrs: &mut Vec, kind: AttributeKind) { 590 | while let Some(i) = attrs.iter().position(|attr| attr.kind == kind) { 591 | attrs.remove(i); 592 | } 593 | } 594 | 595 | /// Returns the hash value of the input AST. 596 | fn hash(input: &TokenStream) -> u64 { 597 | let mut hasher = DefaultHasher::new(); 598 | hasher.write(input.to_string().as_bytes()); 599 | hasher.finish() 600 | } 601 | -------------------------------------------------------------------------------- /src/to_tokens.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | use std::iter; 4 | 5 | use proc_macro::{Group, Ident, Punct, TokenStream, TokenTree}; 6 | 7 | pub(crate) trait ToTokens { 8 | fn to_tokens(&self, tokens: &mut TokenStream); 9 | 10 | fn to_token_stream(&self) -> TokenStream { 11 | let mut tokens = TokenStream::new(); 12 | self.to_tokens(&mut tokens); 13 | tokens 14 | } 15 | } 16 | 17 | impl ToTokens for Ident { 18 | fn to_tokens(&self, tokens: &mut TokenStream) { 19 | tokens.extend(iter::once(TokenTree::Ident(self.clone()))); 20 | } 21 | } 22 | 23 | impl ToTokens for Punct { 24 | fn to_tokens(&self, tokens: &mut TokenStream) { 25 | tokens.extend(iter::once(TokenTree::Punct(self.clone()))); 26 | } 27 | } 28 | 29 | impl ToTokens for Group { 30 | fn to_tokens(&self, tokens: &mut TokenStream) { 31 | tokens.extend(iter::once(TokenTree::Group(self.clone()))); 32 | } 33 | } 34 | 35 | impl ToTokens for TokenTree { 36 | fn to_tokens(&self, tokens: &mut TokenStream) { 37 | tokens.extend(iter::once(self.clone())); 38 | } 39 | } 40 | 41 | impl ToTokens for TokenStream { 42 | fn to_tokens(&self, tokens: &mut TokenStream) { 43 | tokens.extend(self.clone()); 44 | } 45 | } 46 | 47 | impl ToTokens for Option { 48 | fn to_tokens(&self, tokens: &mut TokenStream) { 49 | if let Some(t) = self { 50 | T::to_tokens(t, tokens); 51 | } 52 | } 53 | } 54 | 55 | impl ToTokens for &T { 56 | fn to_tokens(&self, tokens: &mut TokenStream) { 57 | T::to_tokens(self, tokens); 58 | } 59 | } 60 | 61 | impl ToTokens for [T] { 62 | fn to_tokens(&self, tokens: &mut TokenStream) { 63 | for t in self { 64 | T::to_tokens(t, tokens); 65 | } 66 | } 67 | } 68 | 69 | impl ToTokens for [(T, Option)] { 70 | fn to_tokens(&self, tokens: &mut TokenStream) { 71 | for (t, p) in self { 72 | T::to_tokens(t, tokens); 73 | p.to_tokens(tokens); 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /tests/compiletest.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | #![cfg(not(miri))] 4 | 5 | #[rustversion::attr(not(nightly), ignore)] 6 | #[test] 7 | fn ui() { 8 | let t = trybuild::TestCases::new(); 9 | t.compile_fail("tests/ui/**/*.rs"); 10 | t.pass("tests/run-pass/**/*.rs"); 11 | } 12 | -------------------------------------------------------------------------------- /tests/run-pass/const_trait_impl.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | #![feature(const_trait_impl)] 4 | 5 | /* 6 | TODO: update for https://github.com/rust-lang/rust/pull/100982 7 | 8 | // https://github.com/rust-lang/rust/issues/67792 9 | // https://github.com/rust-lang/rust/blob/1.63.0/src/test/ui/rfc-2632-const-trait-impl/call-const-trait-method-pass.rs 10 | 11 | use easy_ext::ext; 12 | 13 | #[ext(Ext)] 14 | impl const i32 { 15 | fn plus(self, rhs: Self) -> Self { 16 | self + rhs 17 | } 18 | } 19 | 20 | pub const fn add_i32(a: i32, b: i32) -> i32 { 21 | a.plus(b) 22 | } 23 | 24 | const ADD_I32: i32 = 1i32.plus(2i32); 25 | */ 26 | 27 | fn main() {} 28 | -------------------------------------------------------------------------------- /tests/run-pass/impl_trait_in_assoc_type.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | #![feature(impl_trait_in_assoc_type)] 4 | 5 | use easy_ext::ext; 6 | 7 | #[ext(E1)] 8 | impl I 9 | where 10 | I: Iterator, 11 | { 12 | type Assoc = impl Iterator; 13 | fn assoc(self) -> Self::Assoc { 14 | self 15 | } 16 | } 17 | 18 | fn main() {} 19 | -------------------------------------------------------------------------------- /tests/run-pass/min_specialization.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | #![feature(min_specialization)] 4 | 5 | // See also run-pass/specialization.rs. 6 | 7 | // https://github.com/rust-lang/rust/blob/1.84.0/tests/ui/specialization/min_specialization/implcit-well-formed-bounds.rs 8 | pub mod implicit_well_formed_bounds { 9 | use easy_ext::ext; 10 | 11 | struct OrdOnly(T); 12 | 13 | #[ext(SpecTrait)] 14 | impl T { 15 | default fn f() {} 16 | } 17 | 18 | impl SpecTrait<()> for OrdOnly { 19 | fn f() {} 20 | } 21 | 22 | impl SpecTrait> for () { 23 | fn f() {} 24 | } 25 | 26 | impl SpecTrait<(OrdOnly, OrdOnly)> for &[OrdOnly] { 27 | fn f() {} 28 | } 29 | } 30 | 31 | // https://github.com/rust-lang/rust/blob/1.84.0/tests/ui/specialization/min_specialization/spec-iter.rs 32 | pub mod spec_iter { 33 | use easy_ext::ext; 34 | 35 | #[ext(SpecFromIter)] 36 | impl<'a, T: 'a, I: Iterator> I { 37 | default fn f(&self) {} 38 | } 39 | 40 | // See also spec_iter module in ui/min_specialization.rs. 41 | impl<'a, T> SpecFromIter<'a, T> for std::slice::Iter<'a, T> { 42 | fn f(&self) {} 43 | } 44 | } 45 | 46 | // https://github.com/rust-lang/rust/blob/1.84.0/tests/ui/specialization/min_specialization/spec-reference.rs 47 | pub mod spec_reference { 48 | use easy_ext::ext; 49 | 50 | #[ext(MySpecTrait)] 51 | impl T { 52 | default fn f() {} 53 | } 54 | 55 | impl<'a, T: ?Sized> MySpecTrait for &'a T { 56 | fn f() {} 57 | } 58 | } 59 | 60 | fn main() {} 61 | -------------------------------------------------------------------------------- /tests/run-pass/specialization.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | #![allow(incomplete_features)] 4 | #![feature(specialization)] 5 | 6 | // See also run-pass/min_specialization.rs. 7 | 8 | pub mod default_impl { 9 | // I don't feel `default impl` is the good feature to combine with ext trait, but test anyway. 10 | 11 | // https://github.com/rust-lang/rust/blob/1.84.0/tests/ui/specialization/defaultimpl/auxiliary/go_trait.rs 12 | pub mod go_trait { 13 | use easy_ext::ext; 14 | 15 | pub trait Go { 16 | fn go(&self, arg: isize); 17 | } 18 | 19 | pub fn go(this: &G, arg: isize) { 20 | this.go(arg) 21 | } 22 | 23 | pub fn go_mut(this: &mut G, arg: isize) { 24 | this.go_mut(arg) 25 | } 26 | 27 | pub fn go_once(this: G, arg: isize) { 28 | this.go_once(arg) 29 | } 30 | 31 | #[ext(GoMut)] 32 | pub default impl G 33 | where 34 | G: Go, 35 | { 36 | fn go_mut(&mut self, arg: isize) { 37 | go(&*self, arg) 38 | } 39 | } 40 | 41 | #[ext(GoOnce)] 42 | pub default impl G 43 | where 44 | G: GoMut, 45 | { 46 | fn go_once(mut self, arg: isize) { 47 | go_mut(&mut self, arg) 48 | } 49 | } 50 | } 51 | } 52 | 53 | fn main() {} 54 | -------------------------------------------------------------------------------- /tests/test.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | #![allow( 4 | dead_code, 5 | unreachable_pub, 6 | clippy::items_after_statements, 7 | clippy::missing_safety_doc, 8 | clippy::needless_pass_by_value, 9 | clippy::no_effect_underscore_binding, 10 | clippy::undocumented_unsafe_blocks 11 | )] 12 | 13 | use std::{pin::Pin, rc::Rc}; 14 | 15 | use easy_ext::ext; 16 | 17 | #[test] 18 | fn simple() { 19 | #[ext] 20 | impl str { 21 | fn foo(&self, pat: &str) -> String { 22 | self.replace(pat, "_") 23 | } 24 | } 25 | 26 | assert_eq!("--".foo("-"), "__"); 27 | } 28 | 29 | #[test] 30 | fn params() { 31 | #[ext] 32 | impl Result { 33 | fn err_into(self) -> Result 34 | where 35 | E: Into, 36 | { 37 | self.map_err(Into::into) 38 | } 39 | } 40 | 41 | let err: Result<(), _> = Err(1_u32); 42 | assert_eq!(err.err_into::().unwrap_err(), 1_u64); 43 | } 44 | 45 | #[test] 46 | fn lifetime() { 47 | #[ext(OptionExt)] 48 | impl<'a, T> &'a mut Option { 49 | fn into_ref(self) -> Option<&'a T> { 50 | self.as_ref() 51 | } 52 | } 53 | 54 | let _: Option<&u8> = Some(1).into_ref(); 55 | } 56 | 57 | mod bar { 58 | use easy_ext::ext; 59 | 60 | // assoc-item-level visibility + named 61 | #[ext(E1)] 62 | impl str { 63 | pub const FOO1: &'static str = "_"; 64 | 65 | pub fn foo1(&self, pat: &str) -> String { 66 | self.replace(pat, Self::FOO1) 67 | } 68 | } 69 | 70 | // assoc-item-level visibility + unnamed 71 | #[ext] 72 | impl str { 73 | pub const FOO2: &'static str = "_"; 74 | 75 | pub fn foo2(&self, pat: &str) -> String { 76 | self.replace(pat, Self::FOO2) 77 | } 78 | } 79 | 80 | // impl-level visibility + named 81 | #[ext(E2)] 82 | pub impl str { 83 | const FOO3: &'static str = "_"; 84 | 85 | fn foo3(&self, pat: &str) -> String { 86 | self.replace(pat, Self::FOO3) 87 | } 88 | } 89 | 90 | // impl-level visibility + unnamed 91 | #[ext] 92 | pub impl str { 93 | const FOO4: &'static str = "_"; 94 | 95 | fn foo4(&self, pat: &str) -> String { 96 | self.replace(pat, Self::FOO4) 97 | } 98 | } 99 | 100 | pub(super) mod baz { 101 | use easy_ext::ext; 102 | 103 | #[ext(E4)] 104 | impl str { 105 | pub(super) fn bar(&self, pat: &str) -> String { 106 | self.replace(pat, "_") 107 | } 108 | } 109 | 110 | #[ext(E5)] 111 | impl str { 112 | pub fn baz(&self, pat: &str) -> String { 113 | self.replace(pat, "_") 114 | } 115 | 116 | pub fn baz2(&self, pat: &str) -> String { 117 | self.replace(pat, "-") 118 | } 119 | } 120 | 121 | #[ext(E6)] 122 | pub(super) impl str { 123 | fn bar2(&self, pat: &str) -> String { 124 | self.replace(pat, "_") 125 | } 126 | } 127 | 128 | #[ext(E7)] 129 | pub(crate) impl str { 130 | fn baz3(&self, pat: &str) -> String { 131 | self.replace(pat, "_") 132 | } 133 | 134 | fn baz4(&self, pat: &str) -> String { 135 | self.replace(pat, "-") 136 | } 137 | } 138 | } 139 | } 140 | 141 | #[test] 142 | fn visibility() { 143 | use self::bar::{ 144 | E1, E2, 145 | baz::{E5, E7}, 146 | }; 147 | 148 | assert_eq!("..".foo1("."), "__"); 149 | assert_eq!("..".foo3("."), "__"); 150 | assert_eq!("..".baz("."), "__"); 151 | assert_eq!("..".baz2("."), "--"); 152 | assert_eq!("..".baz3("."), "__"); 153 | assert_eq!("..".baz4("."), "--"); 154 | } 155 | 156 | #[test] 157 | fn generics() { 158 | #[ext(IterExt)] 159 | impl I { 160 | fn next2(self) -> Option { 161 | self.into_iter().next() 162 | } 163 | } 164 | 165 | assert_eq!(vec![1, 2, 3].next2(), Some(1_u8)); 166 | } 167 | 168 | #[test] 169 | fn trait_generics() { 170 | #[derive(Debug, PartialEq, Eq)] 171 | struct A {} 172 | 173 | impl Iterator for A { 174 | type Item = (); 175 | fn next(&mut self) -> Option { 176 | None 177 | } 178 | } 179 | 180 | #[ext(ConstInit)] 181 | impl A { 182 | const INIT1: Self = Self {}; 183 | const INIT2: A = A {}; 184 | } 185 | 186 | #[ext(Ext1)] 187 | impl I { 188 | const CONST1: Self = Self::INIT1; 189 | const CONST2: I = I::INIT1; 190 | type Item2 = Self::Item; 191 | type Item3 = I::Item; 192 | fn method1(mut self) -> Option { 193 | self.next() 194 | } 195 | fn method2(mut self) -> Option { 196 | self.next() 197 | } 198 | fn method3(mut self) -> Option { 199 | self.next() 200 | } 201 | fn method4(mut self) -> Option<::Item3> { 202 | self.next() 203 | } 204 | } 205 | 206 | fn a(mut x: T) { 207 | let y = T::CONST1; 208 | let _ = T::CONST2; 209 | assert_eq!(x, y); 210 | assert!(x.next().is_none()); 211 | } 212 | 213 | assert_eq!(A {}.method1(), None); 214 | assert_eq!(A {}.method2(), None); 215 | 216 | a(A::INIT1); 217 | a(A::INIT2); 218 | 219 | #[ext(Ext2)] 220 | impl I { 221 | const CONST3: I = { 222 | fn a() {} 223 | I::INIT1 224 | }; 225 | type Item4 = I::Item; 226 | fn method5(self, _: I::Item) -> (Option, ::Item4) { 227 | fn a() {} 228 | unimplemented!() 229 | } 230 | } 231 | } 232 | 233 | #[test] 234 | fn type_parameter_defaults() { 235 | #[ext(Ext)] 236 | impl () {} 237 | impl Ext for u8 {} 238 | 239 | // The code above is equivalent to the code below. 240 | 241 | trait Trait {} 242 | impl Trait for () {} 243 | impl Trait for u8 {} 244 | } 245 | 246 | // See also ui/maybe.rs 247 | #[test] 248 | fn maybe() { 249 | #[ext] 250 | impl T { 251 | fn f(&self) {} 252 | } 253 | 254 | #[ext] 255 | impl T 256 | where 257 | T: ?Sized, 258 | { 259 | fn f(&self) {} 260 | } 261 | 262 | #[ext] 263 | impl T { 264 | fn f(&self) {} 265 | } 266 | 267 | #[ext] 268 | impl T 269 | where 270 | T: Send + ?Sized + Sync, 271 | { 272 | fn f(&self) {} 273 | } 274 | 275 | #[ext] 276 | impl T 277 | where 278 | T: Iterator, 279 | T: ?Sized, 280 | T: Send, 281 | { 282 | fn f(&self) {} 283 | } 284 | } 285 | 286 | #[test] 287 | fn inline() { 288 | #[ext] 289 | impl str { 290 | #[inline] 291 | fn auto(&self) {} 292 | #[inline(always)] 293 | fn always(&self) {} 294 | #[inline(never)] 295 | fn never(&self) {} 296 | } 297 | } 298 | 299 | #[test] 300 | fn assoc_ty() { 301 | #[ext(StrExt)] 302 | impl str { 303 | type Assoc = String; 304 | 305 | fn owned(&self) -> Self::Assoc { 306 | self.to_owned() 307 | } 308 | } 309 | 310 | let s: ::Assoc = "?".owned(); 311 | assert_eq!(s, "?"); 312 | 313 | #[ext(TryIterator)] 314 | impl>, T, E> I { 315 | type Ok = T; 316 | type Error = E; 317 | 318 | fn try_next(&mut self) -> Result, Self::Error> { 319 | self.next().transpose() 320 | } 321 | } 322 | 323 | let mut iter = vec![Ok(1), Err(1)].into_iter(); 324 | assert_eq!(iter.try_next(), Ok(Some(1))); 325 | assert_eq!(iter.try_next(), Err(1)); 326 | assert_eq!(iter.try_next(), Ok(None)); 327 | } 328 | 329 | #[allow(clippy::let_underscore_future)] 330 | #[test] 331 | fn syntax() { 332 | #[ext(E1)] 333 | unsafe impl str { 334 | fn normal(&self) {} 335 | unsafe fn unsafety(&self) {} 336 | extern "C" fn abi1() {} 337 | extern "C" fn abi2() {} 338 | unsafe extern "C" fn unsafe_abi1() {} 339 | unsafe extern "C" fn unsafe_abi2() {} 340 | async fn asyncness(&self) {} 341 | async unsafe fn unsafe_asyncness(&self) {} 342 | } 343 | 344 | "a".normal(); 345 | unsafe { "?".unsafety() } 346 | str::abi1(); 347 | unsafe { str::unsafe_abi1() } 348 | let _ = async { 349 | "a".asyncness().await; 350 | unsafe { "b".unsafe_asyncness().await } 351 | }; 352 | 353 | struct S {} 354 | unsafe impl E1 for S { 355 | fn normal(&self) {} 356 | unsafe fn unsafety(&self) {} 357 | extern "C" fn abi1() {} 358 | extern "C" fn abi2() {} 359 | unsafe extern "C" fn unsafe_abi1() {} 360 | unsafe extern "C" fn unsafe_abi2() {} 361 | async fn asyncness(&self) {} 362 | async unsafe fn unsafe_asyncness(&self) {} 363 | } 364 | } 365 | 366 | // test for angle bracket 367 | #[test] 368 | fn angle_bracket() { 369 | #[ext] 370 | impl fn() -> () { 371 | const FUNC: fn() -> fn() -> fn() -> () = Self::func; 372 | type Func = fn() -> fn() -> (); 373 | fn func() -> fn() -> fn() -> () { 374 | || || {} 375 | } 376 | } 377 | 378 | #[ext(E1)] 379 | impl T 380 | where 381 | Self::Assoc3: Sized, 382 | T::Assoc3: Sized, 383 | Self: E2, 384 | T: E2, 385 | { 386 | const ASSOC1: ::Assoc1 = ::assoc1; 387 | type Assoc1 = fn() -> ::Assoc2; 388 | type Assoc2 = (); 389 | fn assoc1() -> ::Assoc2 390 | where 391 | ::Assoc1: Sized, 392 | ::Assoc1: Sized, 393 | Self::Assoc1: Sized, 394 | T::Assoc3: Sized, 395 | Self: E2, 396 | T: E2, 397 | { 398 | } 399 | } 400 | 401 | struct A {} 402 | #[ext(E2)] 403 | impl A { 404 | const ASSOC1: ::Assoc3 = ::assoc2; 405 | type Assoc3 = fn() -> ::Assoc4; 406 | type Assoc4 = (); 407 | fn assoc2() -> ::Assoc4 408 | where 409 | ::Assoc3: Sized, 410 | Self::Assoc3: Sized, 411 | { 412 | } 413 | } 414 | 415 | #[ext] 416 | impl fn() -> T, E> Result 417 | where 418 | E: FnOnce() -> Result, 419 | &'static dyn Fn() -> T: Fn() -> T + 'static, 420 | fn() -> fn() -> T: Fn() -> fn() -> T, 421 | { 422 | fn where_clause fn() -> T, F>(self, _f: F) -> Self 423 | where 424 | F: FnOnce() -> Result, 425 | &'static dyn Fn() -> T: Fn() -> T + 'static, 426 | fn() -> fn() -> T: Fn() -> fn() -> T, 427 | { 428 | unimplemented!() 429 | } 430 | } 431 | } 432 | 433 | #[test] 434 | fn min_const_generics() { 435 | struct S1([T; CAP]); 436 | #[ext(E1)] 437 | impl S1 { 438 | const CAPACITY: usize = CAP; 439 | fn f() -> S1 { 440 | unimplemented!() 441 | } 442 | } 443 | 444 | struct S2; 445 | impl E1<(), { CAP }> for S2<{ CAP }> { 446 | const CAPACITY: usize = CAP; 447 | fn f() -> S1<(), { C }> { 448 | S1([(); C]) 449 | } 450 | } 451 | 452 | let _: [(); 2] = >::f::<2>().0; 453 | 454 | struct S3(T); 455 | 456 | #[ext(E2)] 457 | impl str { 458 | fn method1(&self) -> S1 ()>, 1> { 459 | S1([Some(|| {})]) 460 | } 461 | #[allow(unused_braces)] 462 | fn method2(&self) -> S1 ()>, { 1 }> { 463 | S1([Some(|| {})]) 464 | } 465 | fn method3(&self) -> S3 (), 'a'> { 466 | S3(|| {}) 467 | } 468 | #[allow(unused_braces)] 469 | fn method4(&self) -> S3 (), { 'a' }> { 470 | S3(|| {}) 471 | } 472 | } 473 | } 474 | 475 | mod const_generics_defaults { 476 | // https://github.com/rust-lang/rust/tree/1.80.0/tests/ui/const-generics/defaults 477 | 478 | use easy_ext::ext; 479 | 480 | #[ext(Ext)] 481 | impl () {} 482 | impl Ext for u8 {} 483 | 484 | // The code above is equivalent to the code below. 485 | 486 | trait Trait {} 487 | impl Trait for () {} 488 | impl Trait for u8 {} 489 | 490 | // https://github.com/rust-lang/rust/blob/1.84.0/tests/ui/const-generics/defaults/const-param-as-default-value.rs 491 | #[ext(Ext2)] 492 | impl () {} 493 | } 494 | 495 | mod generic_associated_types { 496 | // https://github.com/rust-lang/rust/blob/1.84.0/tests/ui/generic-associated-types/collections.rs 497 | 498 | use easy_ext::ext; 499 | 500 | trait CollectionFamily { 501 | type Member: Collection; 502 | } 503 | 504 | struct VecFamily; 505 | 506 | impl CollectionFamily for VecFamily { 507 | type Member = Vec; 508 | } 509 | 510 | #[ext(Collection)] 511 | impl Vec { 512 | // TODO: handle where clause in GAT: https://github.com/rust-lang/rust/pull/90076 513 | // type Iter<'iter> = std::slice::Iter<'iter, T> 514 | // where 515 | // T: 'iter, 516 | // Self: 'iter; 517 | type Family = VecFamily; 518 | type Sibling = <>::Family as CollectionFamily>::Member; 519 | 520 | fn empty() -> Self { 521 | vec![] 522 | } 523 | 524 | fn add(&mut self, value: T) { 525 | self.push(value); 526 | } 527 | 528 | // TODO: handle where clause in GAT: https://github.com/rust-lang/rust/pull/90076 529 | // fn iterate<'iter>(&'iter self) -> Self::Iter<'iter> { 530 | // self.iter() 531 | // } 532 | } 533 | } 534 | 535 | mod associated_type_bounds { 536 | // https://github.com/rust-lang/rust/blob/1.84.0/tests/ui/associated-type-bounds/fn-where.rs 537 | mod fn_where { 538 | use easy_ext::ext; 539 | 540 | use super::fn_aux::*; 541 | 542 | #[ext(E1)] 543 | impl T { 544 | fn where_bound(beta: B) -> usize 545 | where 546 | B: Beta, 547 | { 548 | desugared_bound(beta) 549 | } 550 | 551 | fn where_bound_region(beta: B) -> usize 552 | where 553 | B: Beta, 554 | { 555 | desugared_bound_region(beta) 556 | } 557 | 558 | fn where_bound_multi(beta: B) -> usize 559 | where 560 | B: Copy + Beta, 561 | { 562 | desugared_bound_multi(beta) 563 | } 564 | 565 | fn where_bound_region_specific<'a, B>(gamma: &'a B::Gamma) -> usize 566 | where 567 | B: Beta>, 568 | { 569 | desugared_bound_region_specific::(gamma) 570 | } 571 | 572 | fn where_bound_region_forall(beta: B) -> usize 573 | where 574 | B: Beta Epsilon<'a>>, 575 | { 576 | desugared_bound_region_forall(beta) 577 | } 578 | 579 | fn where_bound_region_forall2(beta: B) -> usize 580 | where 581 | B: Beta Epsilon<'a, Zeta: Eta>>, 582 | { 583 | desugared_bound_region_forall2(beta) 584 | } 585 | 586 | fn where_constraint_region_forall(beta: B) -> usize 587 | where 588 | for<'a> &'a B: Beta, 589 | { 590 | desugared_constraint_region_forall(beta) 591 | } 592 | 593 | fn where_bound_nested(beta: B) -> usize 594 | where 595 | B: Beta>, 596 | { 597 | desugared_bound_nested(beta) 598 | } 599 | } 600 | } 601 | 602 | // https://github.com/rust-lang/rust/blob/1.84.0/tests/ui/associated-type-bounds/auxiliary/fn-aux.rs 603 | mod fn_aux { 604 | // Traits: 605 | 606 | pub trait Alpha { 607 | fn alpha(self) -> usize; 608 | } 609 | 610 | pub trait Beta { 611 | type Gamma; 612 | fn gamma(self) -> Self::Gamma; 613 | } 614 | 615 | pub trait Delta { 616 | fn delta(self) -> usize; 617 | } 618 | 619 | pub trait Epsilon<'a> { 620 | type Zeta; 621 | fn zeta(&'a self) -> Self::Zeta; 622 | 623 | fn epsilon(&'a self) -> usize; 624 | } 625 | 626 | pub trait Eta { 627 | fn eta(self) -> usize; 628 | } 629 | 630 | // Assertions: 631 | 632 | pub fn assert_alpha(x: T) -> usize { 633 | x.alpha() 634 | } 635 | pub fn assert_static(_: T) -> usize { 636 | 24 637 | } 638 | pub fn assert_delta(x: T) -> usize { 639 | x.delta() 640 | } 641 | pub fn assert_epsilon_specific<'a, T: 'a + Epsilon<'a>>(x: &'a T) -> usize { 642 | x.epsilon() 643 | } 644 | pub fn assert_epsilon_forall Epsilon<'a>>() {} 645 | pub fn assert_forall_epsilon_zeta_satisfies_eta(x: T) -> usize 646 | where 647 | T: for<'a> Epsilon<'a>, 648 | for<'a> >::Zeta: Eta, 649 | { 650 | x.epsilon() + x.zeta().eta() 651 | } 652 | 653 | // Implementations and types: 654 | 655 | #[derive(Copy, Clone)] 656 | pub struct BetaType; 657 | 658 | #[derive(Copy, Clone)] 659 | pub struct GammaType; 660 | 661 | #[derive(Copy, Clone)] 662 | pub struct ZetaType; 663 | 664 | impl Beta for BetaType { 665 | type Gamma = GammaType; 666 | fn gamma(self) -> Self::Gamma { 667 | GammaType 668 | } 669 | } 670 | 671 | impl Beta for &BetaType { 672 | type Gamma = GammaType; 673 | fn gamma(self) -> Self::Gamma { 674 | GammaType 675 | } 676 | } 677 | 678 | impl Beta for GammaType { 679 | type Gamma = Self; 680 | fn gamma(self) -> Self::Gamma { 681 | self 682 | } 683 | } 684 | 685 | impl Alpha for GammaType { 686 | fn alpha(self) -> usize { 687 | 42 688 | } 689 | } 690 | 691 | impl Delta for GammaType { 692 | fn delta(self) -> usize { 693 | 1337 694 | } 695 | } 696 | 697 | impl<'a> Epsilon<'a> for GammaType { 698 | type Zeta = ZetaType; 699 | fn zeta(&'a self) -> Self::Zeta { 700 | ZetaType 701 | } 702 | 703 | fn epsilon(&'a self) -> usize { 704 | 7331 705 | } 706 | } 707 | 708 | impl Eta for ZetaType { 709 | fn eta(self) -> usize { 710 | 7 711 | } 712 | } 713 | 714 | // Desugared forms to check against: 715 | 716 | pub fn desugared_bound(beta: B) -> usize 717 | where 718 | B: Beta, 719 | B::Gamma: Alpha, 720 | { 721 | let gamma: B::Gamma = beta.gamma(); 722 | assert_alpha::(gamma) 723 | } 724 | 725 | pub fn desugared_bound_region(beta: B) -> usize 726 | where 727 | B: Beta, 728 | B::Gamma: 'static, 729 | { 730 | assert_static::(beta.gamma()) 731 | } 732 | 733 | pub fn desugared_bound_multi(beta: B) -> usize 734 | where 735 | B: Copy + Beta, 736 | B::Gamma: Alpha + 'static + Delta, 737 | { 738 | assert_alpha::(beta.gamma()) 739 | + assert_static::(beta.gamma()) 740 | + assert_delta::(beta.gamma()) 741 | } 742 | 743 | pub fn desugared_bound_region_specific<'a, B>(gamma: &'a B::Gamma) -> usize 744 | where 745 | B: Beta, 746 | B::Gamma: 'a + Epsilon<'a>, 747 | { 748 | assert_epsilon_specific::(gamma) 749 | } 750 | 751 | pub fn desugared_bound_region_forall(beta: B) -> usize 752 | where 753 | B: Beta, 754 | B::Gamma: Copy + for<'a> Epsilon<'a>, 755 | { 756 | assert_epsilon_forall::(); 757 | let g1: B::Gamma = beta.gamma(); 758 | let g2: B::Gamma = g1; 759 | assert_epsilon_specific::(&g1) + assert_epsilon_specific::(&g2) 760 | } 761 | 762 | pub fn desugared_bound_region_forall2(beta: B) -> usize 763 | where 764 | B: Beta, 765 | B::Gamma: Copy + for<'a> Epsilon<'a>, 766 | for<'a> >::Zeta: Eta, 767 | { 768 | let gamma = beta.gamma(); 769 | assert_forall_epsilon_zeta_satisfies_eta::(gamma) 770 | } 771 | 772 | pub fn desugared_constraint_region_forall(beta: B) -> usize 773 | where 774 | for<'a> &'a B: Beta, 775 | for<'a> <&'a B as Beta>::Gamma: Alpha, 776 | { 777 | let g1 = beta.gamma(); 778 | let g2 = beta.gamma(); 779 | assert_alpha(g1) + assert_alpha(g2) 780 | } 781 | 782 | pub fn desugared_bound_nested(beta: B) -> usize 783 | where 784 | B: Beta, 785 | B::Gamma: Copy + Alpha + Beta, 786 | ::Gamma: Delta, 787 | { 788 | let go = beta.gamma(); 789 | let gi = go.gamma(); 790 | go.alpha() + gi.delta() 791 | } 792 | 793 | pub fn desugared() { 794 | let beta = BetaType; 795 | let gamma = beta.gamma(); 796 | 797 | assert_eq!(42, desugared_bound(beta)); 798 | assert_eq!(24, desugared_bound_region(beta)); 799 | assert_eq!(42 + 24 + 1337, desugared_bound_multi(beta)); 800 | assert_eq!(7331, desugared_bound_region_specific::(&gamma)); 801 | assert_eq!(7331 * 2, desugared_bound_region_forall(beta)); 802 | assert_eq!(42 + 1337, desugared_bound_nested(beta)); 803 | } 804 | } 805 | } 806 | 807 | #[test] 808 | fn macros() { 809 | macro_rules! m { 810 | ( 811 | $impl:ident $path:path [$($generics:tt)*] $where:ident {$( 812 | [$vis:vis, $($fn_sig:ident)*] 813 | )*} 814 | ) => { 815 | $( 816 | #[ext] 817 | $impl Result { 818 | $vis $($fn_sig)* (self) -> Result 819 | where 820 | E: Into, 821 | { 822 | unimplemented!() 823 | } 824 | } 825 | )* 826 | }; 827 | } 828 | 829 | m!(impl Result [T,E] where { 830 | [, fn a] 831 | [pub, fn b] 832 | [pub, unsafe fn c] 833 | [pub(crate), fn d] 834 | }); 835 | } 836 | 837 | // https://github.com/taiki-e/easy-ext/issues/36 838 | #[test] 839 | fn where_clause() { 840 | pub trait Trait {} 841 | 842 | #[rustfmt::skip] 843 | #[ext] 844 | pub impl Vec 845 | where 846 | Self: Trait> 847 | { 848 | } 849 | } 850 | 851 | #[allow(clippy::needless_pub_self)] // This is intentional 852 | pub mod visibility { 853 | use easy_ext::ext; 854 | 855 | pub struct Pub; 856 | #[ext] 857 | impl str { 858 | pub const ASSOC: u8 = 1; 859 | pub type Assoc = u8; 860 | pub fn assoc() {} 861 | } 862 | 863 | pub struct PubCrate; 864 | #[ext] 865 | impl PubCrate { 866 | pub(crate) const ASSOC: u8 = 1; 867 | pub(crate) type Assoc = u8; 868 | pub(crate) fn assoc() {} 869 | } 870 | 871 | pub struct PubSelf; 872 | #[ext] 873 | impl PubSelf { 874 | pub(self) const ASSOC: u8 = 1; 875 | pub(self) type Assoc = u8; 876 | pub(self) fn assoc() {} 877 | } 878 | 879 | pub mod m { 880 | use easy_ext::ext; 881 | 882 | pub struct PubSuper; 883 | #[ext] 884 | impl PubSuper { 885 | pub(super) const ASSOC: u8 = 1; 886 | pub(super) type Assoc = u8; 887 | pub(super) fn assoc() {} 888 | } 889 | 890 | pub struct PubIn; 891 | #[ext] 892 | impl PubIn { 893 | pub(in super::m) const ASSOC: u8 = 1; 894 | pub(in super::m) type Assoc = u8; 895 | pub(in super::m) fn assoc() {} 896 | } 897 | } 898 | } 899 | 900 | #[test] 901 | fn arg_pat() { 902 | #[ext] 903 | impl str { 904 | fn f((x, y): (u8, u8)) { 905 | let _x = x; 906 | let _y = y; 907 | } 908 | } 909 | } 910 | 911 | #[test] 912 | fn arbitrary_self_types() { 913 | #[ext] 914 | #[allow(clippy::needless_arbitrary_self_type)] 915 | impl String { 916 | fn recv(self: Self) {} 917 | fn recv_ref(self: &Self) {} 918 | fn recv_mut(self: &mut Self) {} 919 | fn recv_rc(self: Rc) {} 920 | fn recv_rc_ref(self: &Rc) {} 921 | fn recv_rc_mut(self: &mut Rc) {} 922 | fn recv_pin_box(self: Pin>) {} 923 | } 924 | 925 | String::default().recv(); 926 | String::default().recv_ref(); 927 | String::default().recv_mut(); 928 | Rc::new(String::default()).recv_rc(); 929 | Rc::new(String::default()).recv_rc_ref(); 930 | Rc::new(String::default()).recv_rc_mut(); 931 | Box::pin(String::default()).recv_pin_box(); 932 | } 933 | -------------------------------------------------------------------------------- /tests/ui/invalid.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | mod basic { 4 | use easy_ext::ext; 5 | 6 | #[ext(NoValueConst)] 7 | impl str { 8 | const ASSOC: u8; //~ ERROR expected `=` 9 | } 10 | #[ext(NoValueTy)] 11 | impl str { 12 | type Assoc; //~ ERROR expected `=` 13 | } 14 | #[ext(NoValueFn)] 15 | impl str { 16 | fn assoc(); //~ ERROR expected `{` 17 | } 18 | 19 | #[ext(Macro)] 20 | impl str { 21 | mac!(); //~ ERROR expected one of: `default`, `fn`, `const`, `type` 22 | } 23 | 24 | #[rustfmt::skip] 25 | #[ext(ExtraArg,)] //~ ERROR unexpected token 26 | impl str {} 27 | 28 | #[ext(pub OldVisSyntax1)] //~ ERROR use `pub impl` instead 29 | impl str {} 30 | 31 | #[ext(pub(crate) OldVisSyntax2)] //~ ERROR use `pub(crate) impl` instead 32 | impl str {} 33 | } 34 | 35 | mod visibility { 36 | use easy_ext::ext; 37 | 38 | #[ext(AssocLevel1)] 39 | impl str { 40 | pub const ASSOC1: u8 = 1; 41 | const ASSOC2: u8 = 2; //~ ERROR all associated items must have a visibility of `pub` 42 | } 43 | 44 | #[ext(AssocLevel2)] 45 | impl str { 46 | fn assoc1(&self) {} 47 | 48 | pub fn assoc2(&self) {} //~ ERROR all associated items must have inherited visibility 49 | } 50 | 51 | #[ext(AssocLevel3)] 52 | impl str { 53 | pub(crate) type Assoc1 = (); 54 | pub type Assoc2 = (); //~ ERROR all associated items must have a visibility of `pub(crate)` 55 | } 56 | 57 | #[ext(ImplLevel1)] 58 | pub impl str { 59 | fn assoc1(&self) {} 60 | 61 | pub fn assoc2(&self) {} //~ ERROR all associated items must have inherited visibility 62 | } 63 | } 64 | 65 | fn main() {} 66 | -------------------------------------------------------------------------------- /tests/ui/invalid.stderr: -------------------------------------------------------------------------------- 1 | error: expected `=` 2 | --> tests/ui/invalid.rs:8:24 3 | | 4 | 8 | const ASSOC: u8; //~ ERROR expected `=` 5 | | ^ 6 | 7 | error: expected `=` 8 | --> tests/ui/invalid.rs:12:19 9 | | 10 | 12 | type Assoc; //~ ERROR expected `=` 11 | | ^ 12 | 13 | error: expected `{` 14 | --> tests/ui/invalid.rs:16:19 15 | | 16 | 16 | fn assoc(); //~ ERROR expected `{` 17 | | ^ 18 | 19 | error: expected one of: `default`, `fn`, `const`, `type` 20 | --> tests/ui/invalid.rs:21:9 21 | | 22 | 21 | mac!(); //~ ERROR expected one of: `default`, `fn`, `const`, `type` 23 | | ^^^ 24 | 25 | error: unexpected token: `,` 26 | --> tests/ui/invalid.rs:25:19 27 | | 28 | 25 | #[ext(ExtraArg,)] //~ ERROR unexpected token 29 | | ^ 30 | 31 | error: use `pub impl` instead 32 | --> tests/ui/invalid.rs:28:11 33 | | 34 | 28 | #[ext(pub OldVisSyntax1)] //~ ERROR use `pub impl` instead 35 | | ^^^ 36 | 37 | error: use `pub(crate) impl` instead 38 | --> tests/ui/invalid.rs:31:11 39 | | 40 | 31 | #[ext(pub(crate) OldVisSyntax2)] //~ ERROR use `pub(crate) impl` instead 41 | | ^^^^^^^^^^ 42 | 43 | error: all associated items must have a visibility of `pub` 44 | --> tests/ui/invalid.rs:41:15 45 | | 46 | 41 | const ASSOC2: u8 = 2; //~ ERROR all associated items must have a visibility of `pub` 47 | | ^^^^^^ 48 | 49 | error: all associated items must have inherited visibility 50 | --> tests/ui/invalid.rs:48:9 51 | | 52 | 48 | pub fn assoc2(&self) {} //~ ERROR all associated items must have inherited visibility 53 | | ^^^ 54 | 55 | error: all associated items must have a visibility of `pub(crate)` 56 | --> tests/ui/invalid.rs:54:9 57 | | 58 | 54 | pub type Assoc2 = (); //~ ERROR all associated items must have a visibility of `pub(crate)` 59 | | ^^^ 60 | 61 | error: all associated items must have inherited visibility 62 | --> tests/ui/invalid.rs:61:9 63 | | 64 | 61 | pub fn assoc2(&self) {} //~ ERROR all associated items must have inherited visibility 65 | | ^^^ 66 | -------------------------------------------------------------------------------- /tests/ui/maybe.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | // See also `maybe` test in test.rs 4 | 5 | use easy_ext::ext; 6 | 7 | #[ext(E1)] 8 | impl T // Ok 9 | { 10 | fn f(&self) {} 11 | } 12 | 13 | #[ext(E2)] 14 | impl T 15 | where 16 | T: ?Sized, // Ok 17 | { 18 | fn f(&self) {} 19 | } 20 | 21 | #[ext(E3)] 22 | impl T 23 | where 24 | Self: ?Sized, 25 | { 26 | //~^^ ERROR `?Trait` bounds are only permitted at the point where a type parameter is declared 27 | fn f(&self) {} 28 | } 29 | 30 | // The following is a case where #[ext] is not used. The behavior should match in both cases. 31 | 32 | trait T1 33 | where 34 | Self: ?Sized, 35 | { 36 | //~^^ ERROR `?Trait` bounds are only permitted at the point where a type parameter is declared 37 | fn f(&self); 38 | } 39 | 40 | trait T2 { 41 | fn f(&self); 42 | } 43 | impl T2 for T 44 | where 45 | Self: ?Sized, 46 | { 47 | //~^^ ERROR `?Trait` bounds are only permitted at the point where a type parameter is declared 48 | fn f(&self) {} 49 | } 50 | 51 | trait T3 { 52 | fn f(&self); 53 | } 54 | impl T3 for T // Ok 55 | { 56 | fn f(&self) {} 57 | } 58 | 59 | trait T4 { 60 | fn f(&self); 61 | } 62 | impl T4 for T 63 | where 64 | T: ?Sized, // Ok 65 | { 66 | fn f(&self) {} 67 | } 68 | 69 | trait T5 { 70 | fn f(&self); 71 | } 72 | impl T5 for T { 73 | fn f(&self) 74 | where 75 | T: ?Sized, 76 | { 77 | //~^^ ERROR `?Trait` bounds are only permitted at the point where a type parameter is declared 78 | } 79 | } 80 | 81 | fn main() {} 82 | -------------------------------------------------------------------------------- /tests/ui/maybe.stderr: -------------------------------------------------------------------------------- 1 | error[E0658]: `?Trait` bounds are only permitted at the point where a type parameter is declared 2 | --> tests/ui/maybe.rs:34:11 3 | | 4 | 34 | Self: ?Sized, 5 | | ^^^^^^ 6 | | 7 | = help: add `#![feature(more_maybe_bounds)]` to the crate attributes to enable 8 | 9 | error[E0658]: `?Trait` bounds are only permitted at the point where a type parameter is declared 10 | --> tests/ui/maybe.rs:45:11 11 | | 12 | 45 | Self: ?Sized, 13 | | ^^^^^^ 14 | | 15 | = help: add `#![feature(more_maybe_bounds)]` to the crate attributes to enable 16 | 17 | error[E0658]: `?Trait` bounds are only permitted at the point where a type parameter is declared 18 | --> tests/ui/maybe.rs:75:12 19 | | 20 | 75 | T: ?Sized, 21 | | ^^^^^^ 22 | | 23 | = help: add `#![feature(more_maybe_bounds)]` to the crate attributes to enable 24 | 25 | error[E0658]: `?Trait` bounds are only permitted at the point where a type parameter is declared 26 | --> tests/ui/maybe.rs:24:11 27 | | 28 | 24 | Self: ?Sized, 29 | | ^^^^^^ 30 | | 31 | = help: add `#![feature(more_maybe_bounds)]` to the crate attributes to enable 32 | -------------------------------------------------------------------------------- /tests/ui/visibility.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 OR MIT 2 | 3 | mod foo { 4 | use easy_ext::ext; 5 | 6 | #[ext(StrExt1)] 7 | impl str { 8 | fn method1(&self, pat: &str) -> String { 9 | self.replace(pat, "_") 10 | } 11 | } 12 | 13 | #[ext(StrExt2)] 14 | pub(self) impl str { 15 | fn method2(&self, pat: &str) -> String { 16 | self.replace(pat, "_") 17 | } 18 | } 19 | 20 | pub mod bar { 21 | use easy_ext::ext; 22 | 23 | #[ext(StrExt3)] 24 | pub(super) impl str { 25 | fn method3(&self, pat: &str) -> String { 26 | self.replace(pat, "_") 27 | } 28 | } 29 | } 30 | 31 | #[allow(unused_imports)] 32 | use bar::StrExt3; 33 | } 34 | 35 | fn main() { 36 | #[rustfmt::skip] 37 | use foo::StrExt1; //~ ERROR trait `StrExt1` is private [E0603] 38 | #[rustfmt::skip] 39 | use foo::StrExt2; //~ ERROR trait `StrExt2` is private [E0603] 40 | #[rustfmt::skip] 41 | use foo::bar::StrExt3; //~ ERROR trait `StrExt2` is private [E0603] 42 | } 43 | -------------------------------------------------------------------------------- /tests/ui/visibility.stderr: -------------------------------------------------------------------------------- 1 | error[E0603]: trait `StrExt1` is private 2 | --> tests/ui/visibility.rs:37:14 3 | | 4 | 37 | use foo::StrExt1; //~ ERROR trait `StrExt1` is private [E0603] 5 | | ^^^^^^^ private trait 6 | | 7 | note: the trait `StrExt1` is defined here 8 | --> tests/ui/visibility.rs:7:5 9 | | 10 | 7 | impl str { 11 | | ^^^^^^^^ 12 | 13 | error[E0603]: trait `StrExt2` is private 14 | --> tests/ui/visibility.rs:39:14 15 | | 16 | 39 | use foo::StrExt2; //~ ERROR trait `StrExt2` is private [E0603] 17 | | ^^^^^^^ private trait 18 | | 19 | note: the trait `StrExt2` is defined here 20 | --> tests/ui/visibility.rs:14:5 21 | | 22 | 14 | pub(self) impl str { 23 | | ^^^^^^^^^^^^^^^^^^ 24 | 25 | error[E0603]: trait `StrExt3` is private 26 | --> tests/ui/visibility.rs:41:19 27 | | 28 | 41 | use foo::bar::StrExt3; //~ ERROR trait `StrExt2` is private [E0603] 29 | | ^^^^^^^ private trait 30 | | 31 | note: the trait `StrExt3` is defined here 32 | --> tests/ui/visibility.rs:24:9 33 | | 34 | 24 | pub(super) impl str { 35 | | ^^^^^^^^^^^^^^^^^^^ 36 | -------------------------------------------------------------------------------- /tools/.tidy-check-license-headers: -------------------------------------------------------------------------------- 1 | git ls-files 2 | -------------------------------------------------------------------------------- /tools/publish.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # SPDX-License-Identifier: Apache-2.0 OR MIT 3 | set -CeEuo pipefail 4 | IFS=$'\n\t' 5 | trap -- 's=$?; printf >&2 "%s\n" "${0##*/}:${LINENO}: \`${BASH_COMMAND}\` exit with ${s}"; exit ${s}' ERR 6 | cd -- "$(dirname -- "$0")"/.. 7 | 8 | # Publish a new release. 9 | # 10 | # USAGE: 11 | # ./tools/publish.sh 12 | # 13 | # Note: This script requires the following tools: 14 | # - parse-changelog 15 | 16 | retry() { 17 | for i in {1..10}; do 18 | if "$@"; then 19 | return 0 20 | else 21 | sleep "${i}" 22 | fi 23 | done 24 | "$@" 25 | } 26 | bail() { 27 | printf >&2 'error: %s\n' "$*" 28 | exit 1 29 | } 30 | 31 | version="${1:?}" 32 | version="${version#v}" 33 | tag_prefix="v" 34 | tag="${tag_prefix}${version}" 35 | changelog="CHANGELOG.md" 36 | if [[ ! "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[0-9A-Za-z\.-]+)?(\+[0-9A-Za-z\.-]+)?$ ]]; then 37 | bail "invalid version format '${version}'" 38 | fi 39 | if [[ $# -gt 1 ]]; then 40 | bail "invalid argument '$2'" 41 | fi 42 | if { sed --help 2>&1 || true; } | grep -Eq -e '-i extension'; then 43 | in_place=(-i '') 44 | else 45 | in_place=(-i) 46 | fi 47 | 48 | # Make sure there is no uncommitted change. 49 | git diff --exit-code 50 | git diff --exit-code --staged 51 | 52 | # Make sure the same release has not been created in the past. 53 | if gh release view "${tag}" &>/dev/null; then 54 | bail "tag '${tag}' has already been created and pushed" 55 | fi 56 | 57 | # Make sure that the release was created from an allowed branch. 58 | if ! git branch | grep -Eq '\* main$'; then 59 | bail "current branch is not 'main'" 60 | fi 61 | if ! git remote -v | grep -F origin | grep -Eq 'github\.com[:/]taiki-e/'; then 62 | bail "cannot publish a new release from fork repository" 63 | fi 64 | 65 | release_date=$(date -u '+%Y-%m-%d') 66 | tags=$(git --no-pager tag | { grep -E "^${tag_prefix}[0-9]+" || true; }) 67 | if [[ -n "${tags}" ]]; then 68 | # Make sure the same release does not exist in changelog. 69 | if grep -Eq "^## \\[${version//./\\.}\\]" "${changelog}"; then 70 | bail "release ${version} already exist in ${changelog}" 71 | fi 72 | if grep -Eq "^\\[${version//./\\.}\\]: " "${changelog}"; then 73 | bail "link to ${version} already exist in ${changelog}" 74 | fi 75 | # Update changelog. 76 | remote_url=$(grep -E '^\[Unreleased\]: https://' "${changelog}" | sed -E 's/^\[Unreleased\]: //; s/\.\.\.HEAD$//') 77 | prev_tag="${remote_url#*/compare/}" 78 | remote_url="${remote_url%/compare/*}" 79 | sed -E "${in_place[@]}" \ 80 | -e "s/^## \\[Unreleased\\]/## [Unreleased]\\n\\n## [${version}] - ${release_date}/" \ 81 | -e "s#^\[Unreleased\]: https://.*#[Unreleased]: ${remote_url}/compare/${tag}...HEAD\\n[${version}]: ${remote_url}/compare/${prev_tag}...${tag}#" "${changelog}" 82 | if ! grep -Eq "^## \\[${version//./\\.}\\] - ${release_date}$" "${changelog}"; then 83 | bail "failed to update ${changelog}" 84 | fi 85 | if ! grep -Eq "^\\[${version//./\\.}\\]: " "${changelog}"; then 86 | bail "failed to update ${changelog}" 87 | fi 88 | else 89 | # Make sure the release exists in changelog. 90 | if ! grep -Eq "^## \\[${version//./\\.}\\] - ${release_date}$" "${changelog}"; then 91 | bail "release ${version} does not exist in ${changelog} or has wrong release date" 92 | fi 93 | if ! grep -Eq "^\\[${version//./\\.}\\]: " "${changelog}"; then 94 | bail "link to ${version} does not exist in ${changelog}" 95 | fi 96 | fi 97 | 98 | # Make sure that a valid release note for this version exists. 99 | # https://github.com/taiki-e/parse-changelog 100 | changes=$(parse-changelog "${changelog}" "${version}") 101 | if [[ -z "${changes}" ]]; then 102 | bail "changelog for ${version} has no body" 103 | fi 104 | printf '============== CHANGELOG ==============\n' 105 | printf '%s\n' "${changes}" 106 | printf '=======================================\n' 107 | 108 | metadata=$(cargo metadata --format-version=1 --no-deps) 109 | prev_version='' 110 | docs=() 111 | for readme in $(git ls-files '*README.md'); do 112 | docs+=("${readme}") 113 | lib="$(dirname -- "${readme}")/src/lib.rs" 114 | if [[ -f "${lib}" ]]; then 115 | docs+=("${lib}") 116 | fi 117 | done 118 | changed_paths=("${changelog}" "${docs[@]}") 119 | # Publishing is unrestricted if null, and forbidden if an empty array. 120 | for pkg in $(jq -c '. as $metadata | .workspace_members[] as $id | $metadata.packages[] | select(.id == $id and .publish != [])' <<<"${metadata}"); do 121 | eval "$(jq -r '@sh "NAME=\(.name) ACTUAL_VERSION=\(.version) manifest_path=\(.manifest_path)"' <<<"${pkg}")" 122 | if [[ -z "${prev_version}" ]]; then 123 | prev_version="${ACTUAL_VERSION}" 124 | fi 125 | # Make sure that the version number of all publishable workspace members matches. 126 | if [[ "${ACTUAL_VERSION}" != "${prev_version}" ]]; then 127 | bail "publishable workspace members must be version '${prev_version}', but package '${NAME}' is version '${ACTUAL_VERSION}'" 128 | fi 129 | 130 | changed_paths+=("${manifest_path}") 131 | # Update version in Cargo.toml. 132 | if ! grep -Eq "^version = \"${prev_version}\" #publish:version" "${manifest_path}"; then 133 | bail "not found '#publish:version' in version in ${manifest_path}" 134 | fi 135 | sed -E "${in_place[@]}" "s/^version = \"${prev_version}\" #publish:version/version = \"${version}\" #publish:version/g" "${manifest_path}" 136 | # Update '=' requirement in Cargo.toml. 137 | for manifest in $(git ls-files '*Cargo.toml'); do 138 | if grep -Eq "^${NAME} = \\{ version = \"=${prev_version}\"" "${manifest}"; then 139 | sed -E "${in_place[@]}" "s/^${NAME} = \\{ version = \"=${prev_version}\"/${NAME} = { version = \"=${version}\"/g" "${manifest}" 140 | fi 141 | done 142 | # Update version in readme and lib.rs. 143 | for path in "${docs[@]}"; do 144 | # TODO: handle pre-release 145 | if [[ "${version}" == "0.0."* ]]; then 146 | # 0.0.x -> 0.0.y 147 | if grep -Eq "^${NAME} = \"${prev_version}\"" "${path}"; then 148 | sed -E "${in_place[@]}" "s/^${NAME} = \"${prev_version}\"/${NAME} = \"${version}\"/g" "${path}" 149 | fi 150 | if grep -Eq "^${NAME} = \\{ version = \"${prev_version}\"" "${path}"; then 151 | sed -E "${in_place[@]}" "s/^${NAME} = \\{ version = \"${prev_version}\"/${NAME} = { version = \"${version}\"/g" "${path}" 152 | fi 153 | elif [[ "${version}" == "0."* ]]; then 154 | prev_major_minor="${prev_version%.*}" 155 | major_minor="${version%.*}" 156 | if [[ "${prev_major_minor}" != "${major_minor}" ]]; then 157 | # 0.x -> 0.y 158 | # 0.x.* -> 0.y 159 | if grep -Eq "^${NAME} = \"${prev_major_minor}(\\.[0-9]+)?\"" "${path}"; then 160 | sed -E "${in_place[@]}" "s/^${NAME} = \"${prev_major_minor}(\\.[0-9]+)?\"/${NAME} = \"${major_minor}\"/g" "${path}" 161 | fi 162 | if grep -Eq "^${NAME} = \\{ version = \"${prev_major_minor}(\\.[0-9]+)?\"" "${path}"; then 163 | sed -E "${in_place[@]}" "s/^${NAME} = \\{ version = \"${prev_major_minor}(\\.[0-9]+)?\"/${NAME} = { version = \"${major_minor}\"/g" "${path}" 164 | fi 165 | fi 166 | else 167 | prev_major="${prev_version%%.*}" 168 | major="${version%%.*}" 169 | if [[ "${prev_major}" != "${major}" ]]; then 170 | # x -> y 171 | # x.* -> y 172 | # x.*.* -> y 173 | if grep -Eq "^${NAME} = \"${prev_major}(\\.[0-9]+(\\.[0-9]+)?)?\"" "${path}"; then 174 | sed -E "${in_place[@]}" "s/^${NAME} = \"${prev_major}(\\.[0-9]+(\\.[0-9]+)?)?\"/${NAME} = \"${major}\"/g" "${path}" 175 | fi 176 | if grep -Eq "^${NAME} = \\{ version = \"${prev_major}(\\.[0-9]+(\\.[0-9]+)?)?\"" "${path}"; then 177 | sed -E "${in_place[@]}" "s/^${NAME} = \\{ version = \"${prev_major}(\\.[0-9]+(\\.[0-9]+)?)?\"/${NAME} = { version = \"${major}\"/g" "${path}" 178 | fi 179 | fi 180 | fi 181 | done 182 | done 183 | 184 | if [[ -n "${tags}" ]]; then 185 | # Create a release commit. 186 | ( 187 | set -x 188 | git add "${changed_paths[@]}" 189 | git commit -m "Release ${version}" 190 | ) 191 | fi 192 | 193 | set -x 194 | 195 | git tag "${tag}" 196 | retry git push origin refs/heads/main 197 | retry git push origin refs/tags/"${tag}" 198 | -------------------------------------------------------------------------------- /tools/tidy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # SPDX-License-Identifier: Apache-2.0 OR MIT 3 | # shellcheck disable=SC2046 4 | set -CeEuo pipefail 5 | IFS=$'\n\t' 6 | trap -- 's=$?; printf >&2 "%s\n" "${0##*/}:${LINENO}: \`${BASH_COMMAND}\` exit with ${s}"; exit ${s}' ERR 7 | trap -- 'printf >&2 "%s\n" "${0##*/}: trapped SIGINT"; exit 1' SIGINT 8 | cd -- "$(dirname -- "$0")"/.. 9 | 10 | # USAGE: 11 | # ./tools/tidy.sh 12 | # 13 | # Note: This script requires the following tools: 14 | # - git 1.8+ 15 | # - jq 1.6+ 16 | # - npm (node 18+) 17 | # - python 3.6+ and pipx 18 | # - shfmt 19 | # - shellcheck 20 | # - cargo, rustfmt (if Rust code exists) 21 | # - clang-format (if C/C++/Protobuf code exists) 22 | # - parse-dockerfile (if Dockerfile exists) 23 | # 24 | # This script is shared by projects under github.com/taiki-e, so there may also 25 | # be checks for files not included in this repository, but they will be skipped 26 | # if the corresponding files do not exist. 27 | # It is not intended for manual editing. 28 | 29 | retry() { 30 | for i in {1..10}; do 31 | if "$@"; then 32 | return 0 33 | else 34 | sleep "${i}" 35 | fi 36 | done 37 | "$@" 38 | } 39 | error() { 40 | if [[ -n "${GITHUB_ACTIONS:-}" ]]; then 41 | printf '::error::%s\n' "$*" 42 | else 43 | printf >&2 'error: %s\n' "$*" 44 | fi 45 | should_fail=1 46 | } 47 | warn() { 48 | if [[ -n "${GITHUB_ACTIONS:-}" ]]; then 49 | printf '::warning::%s\n' "$*" 50 | else 51 | printf >&2 'warning: %s\n' "$*" 52 | fi 53 | } 54 | info() { 55 | printf >&2 'info: %s\n' "$*" 56 | } 57 | print_fenced() { 58 | printf '=======================================\n' 59 | printf '%s' "$*" 60 | printf '=======================================\n\n' 61 | } 62 | check_diff() { 63 | if [[ -n "${GITHUB_ACTIONS:-}" ]]; then 64 | if ! git -c color.ui=always --no-pager diff --exit-code "$@"; then 65 | should_fail=1 66 | fi 67 | elif [[ -n "${CI:-}" ]]; then 68 | if ! git --no-pager diff --exit-code "$@"; then 69 | should_fail=1 70 | fi 71 | else 72 | local res 73 | res=$(git --no-pager diff --exit-code --name-only "$@" || true) 74 | if [[ -n "${res}" ]]; then 75 | warn "please commit changes made by formatter/generator if exists on the following files" 76 | print_fenced "${res}"$'\n' 77 | should_fail=1 78 | fi 79 | fi 80 | } 81 | check_config() { 82 | if [[ ! -e "$1" ]]; then 83 | error "could not found $1 in the repository root${2:-}" 84 | fi 85 | } 86 | check_install() { 87 | for tool in "$@"; do 88 | if ! type -P "${tool}" >/dev/null; then 89 | if [[ "${tool}" == 'python3' ]]; then 90 | if type -P python >/dev/null; then 91 | continue 92 | fi 93 | fi 94 | error "'${tool}' is required to run this check" 95 | return 1 96 | fi 97 | done 98 | } 99 | check_unused() { 100 | local kind="$1" 101 | shift 102 | local res 103 | res=$(ls_files "$@") 104 | if [[ -n "${res}" ]]; then 105 | error "the following files are unused because there is no ${kind}; consider removing them" 106 | print_fenced "${res}"$'\n' 107 | fi 108 | } 109 | check_alt() { 110 | local recommended=$1 111 | local not_recommended=$2 112 | if [[ -n "$3" ]]; then 113 | error "please use ${recommended} instead of ${not_recommended} for consistency" 114 | print_fenced "$3"$'\n' 115 | fi 116 | } 117 | check_hidden() { 118 | local res 119 | for file in "$@"; do 120 | check_alt ".${file}" "${file}" "$(comm -23 <(ls_files "*${file}") <(ls_files "*.${file}"))" 121 | done 122 | } 123 | sed_rhs_escape() { 124 | sed 's/\\/\\\\/g; s/\&/\\\&/g; s/\//\\\//g' <<<"$1" 125 | } 126 | 127 | if [[ $# -gt 0 ]]; then 128 | cat </dev/null; then 137 | py_suffix=3 138 | fi 139 | yq() { 140 | pipx run yq "$@" 141 | } 142 | tomlq() { 143 | pipx run --spec yq tomlq "$@" 144 | } 145 | case "$(uname -s)" in 146 | Linux) 147 | if [[ "$(uname -o)" == 'Android' ]]; then 148 | ostype=android 149 | else 150 | ostype=linux 151 | fi 152 | ;; 153 | Darwin) ostype=macos ;; 154 | FreeBSD) ostype=freebsd ;; 155 | NetBSD) ostype=netbsd ;; 156 | OpenBSD) ostype=openbsd ;; 157 | DragonFly) ostype=dragonfly ;; 158 | SunOS) 159 | if [[ "$(/usr/bin/uname -o)" == 'illumos' ]]; then 160 | ostype=illumos 161 | else 162 | ostype=solaris 163 | # Solaris /usr/bin/* are not POSIX-compliant (e.g., grep has no -q, -E, -F), 164 | # and POSIX-compliant commands are in /usr/xpg{4,6,7}/bin. 165 | # https://docs.oracle.com/cd/E88353_01/html/E37853/xpg-7.html 166 | if [[ "${PATH}" != *'/usr/xpg4/bin'* ]]; then 167 | export PATH="/usr/xpg4/bin:${PATH}" 168 | fi 169 | # GNU/BSD grep/sed is required to run some checks, but most checks are okay with other POSIX grep/sed. 170 | # Solaris /usr/xpg4/bin/grep has -q, -E, -F, but no -o (non-POSIX). 171 | # Solaris /usr/xpg4/bin/sed has no -E (POSIX.1-2024) yet. 172 | for tool in sed grep; do 173 | if type -P "g${tool}" >/dev/null; then 174 | eval "${tool}() { g${tool} \"\$@\"; }" 175 | fi 176 | done 177 | fi 178 | ;; 179 | MINGW* | MSYS* | CYGWIN* | Windows_NT) 180 | ostype=windows 181 | if type -P jq >/dev/null; then 182 | # https://github.com/jqlang/jq/issues/1854 183 | _tmp=$(jq -r .a <<<'{}') 184 | if [[ "${_tmp}" != 'null' ]]; then 185 | _tmp=$(jq -b -r .a 2>/dev/null <<<'{}' || true) 186 | if [[ "${_tmp}" == 'null' ]]; then 187 | jq() { command jq -b "$@"; } 188 | else 189 | jq() { command jq "$@" | tr -d '\r'; } 190 | fi 191 | yq() { 192 | pipx run yq "$@" | tr -d '\r' 193 | } 194 | tomlq() { 195 | pipx run --spec yq tomlq "$@" | tr -d '\r' 196 | } 197 | fi 198 | fi 199 | ;; 200 | *) error "unrecognized os type '$(uname -s)' for \`\$(uname -s)\`" ;; 201 | esac 202 | 203 | check_install git 204 | exclude_from_ls_files=() 205 | # - `find` lists symlinks. `! ( -name -prune )` (.i.e., ignore ) are manually listed from .gitignore. 206 | # - `git submodule status` lists submodules. Use sed to remove the first character indicates status ( |+|-). 207 | # - `git ls-files --deleted` lists removed files. 208 | while IFS=$'\n' read -r line; do exclude_from_ls_files+=("${line}"); done < <({ 209 | find . \! \( -name .git -prune \) \! \( -name target -prune \) \! \( -name tmp -prune \) -type l | cut -c3- 210 | git submodule status | sed 's/^.//' | cut -d' ' -f2 211 | git ls-files --deleted 212 | } | LC_ALL=C sort -u) 213 | exclude_from_ls_files_no_symlink=() 214 | while IFS=$'\n' read -r line; do exclude_from_ls_files_no_symlink+=("${line}"); done < <({ 215 | git submodule status | sed 's/^.//' | cut -d' ' -f2 216 | git ls-files --deleted 217 | } | LC_ALL=C sort -u) 218 | ls_files() { 219 | if [[ "${1:-}" == '--include-symlink' ]]; then 220 | shift 221 | comm -23 <(git ls-files "$@" | LC_ALL=C sort) <(printf '%s\n' ${exclude_from_ls_files_no_symlink[@]+"${exclude_from_ls_files_no_symlink[@]}"}) 222 | else 223 | comm -23 <(git ls-files "$@" | LC_ALL=C sort) <(printf '%s\n' ${exclude_from_ls_files[@]+"${exclude_from_ls_files[@]}"}) 224 | fi 225 | } 226 | 227 | # Rust (if exists) 228 | if [[ -n "$(ls_files '*.rs')" ]]; then 229 | info "checking Rust code style" 230 | check_config .rustfmt.toml "; consider adding with reference to https://github.com/taiki-e/cargo-hack/blob/HEAD/.rustfmt.toml" 231 | check_config .clippy.toml "; consider adding with reference to https://github.com/taiki-e/cargo-hack/blob/HEAD/.clippy.toml" 232 | if check_install cargo jq python3 pipx; then 233 | # `cargo fmt` cannot recognize files not included in the current workspace and modules 234 | # defined inside macros, so run rustfmt directly. 235 | # We need to use nightly rustfmt because we use the unstable formatting options of rustfmt. 236 | rustc_version=$(rustc -vV | grep -E '^release:' | cut -d' ' -f2) 237 | if [[ "${rustc_version}" =~ nightly|dev ]] || ! type -P rustup >/dev/null; then 238 | if type -P rustup >/dev/null; then 239 | retry rustup component add rustfmt &>/dev/null 240 | fi 241 | info "running \`rustfmt \$(git ls-files '*.rs')\`" 242 | rustfmt $(ls_files '*.rs') 243 | else 244 | if type -P rustup >/dev/null; then 245 | retry rustup component add rustfmt --toolchain nightly &>/dev/null 246 | fi 247 | info "running \`rustfmt +nightly \$(git ls-files '*.rs')\`" 248 | rustfmt +nightly $(ls_files '*.rs') 249 | fi 250 | check_diff $(ls_files '*.rs') 251 | cast_without_turbofish=$(grep -Fn '.cast()' $(ls_files '*.rs') || true) 252 | if [[ -n "${cast_without_turbofish}" ]]; then 253 | error "please replace \`.cast()\` with \`.cast::()\`:" 254 | printf '%s\n' "${cast_without_turbofish}" 255 | fi 256 | # Make sure that public Rust crates don't contain executables and binaries. 257 | executables='' 258 | binaries='' 259 | metadata=$(cargo metadata --format-version=1 --no-deps) 260 | root_manifest='' 261 | if [[ -f Cargo.toml ]]; then 262 | root_manifest=$(cargo locate-project --message-format=plain --manifest-path Cargo.toml) 263 | fi 264 | exclude='' 265 | has_public_crate='' 266 | has_root_crate='' 267 | for pkg in $(jq -c '. as $metadata | .workspace_members[] as $id | $metadata.packages[] | select(.id == $id)' <<<"${metadata}"); do 268 | eval "$(jq -r '@sh "publish=\(.publish) manifest_path=\(.manifest_path)"' <<<"${pkg}")" 269 | if [[ "$(tomlq -c '.lints' "${manifest_path}")" == 'null' ]]; then 270 | error "no [lints] table in ${manifest_path} please add '[lints]' with 'workspace = true'" 271 | fi 272 | # Publishing is unrestricted if null, and forbidden if an empty array. 273 | if [[ -z "${publish}" ]]; then 274 | continue 275 | fi 276 | has_public_crate=1 277 | if [[ "${manifest_path}" == "${root_manifest}" ]]; then 278 | has_root_crate=1 279 | exclude=$(tomlq -r '.package.exclude[]' "${manifest_path}") 280 | if ! grep -Eq '^/\.\*$' <<<"${exclude}"; then 281 | error "top-level Cargo.toml of non-virtual workspace should have 'exclude' field with \"/.*\"" 282 | fi 283 | if [[ -e tools ]] && ! grep -Eq '^/tools$' <<<"${exclude}"; then 284 | error "top-level Cargo.toml of non-virtual workspace should have 'exclude' field with \"/tools\" if it exists" 285 | fi 286 | if [[ -e target-specs ]] && ! grep -Eq '^/target-specs$' <<<"${exclude}"; then 287 | error "top-level Cargo.toml of non-virtual workspace should have 'exclude' field with \"/target-specs\" if it exists" 288 | fi 289 | fi 290 | done 291 | if [[ -n "${has_public_crate}" ]]; then 292 | check_config .deny.toml "; consider adding with reference to https://github.com/taiki-e/cargo-hack/blob/HEAD/.deny.toml" 293 | info "checking public crates don't contain executables and binaries" 294 | for p in $(ls_files --include-symlink); do 295 | # Skip directories. 296 | if [[ -d "${p}" ]]; then 297 | continue 298 | fi 299 | # Top-level hidden files/directories and tools/* are excluded from crates.io (ensured by the above check). 300 | # TODO: fully respect exclude field in Cargo.toml. 301 | case "${p}" in 302 | .* | tools/* | target-specs/*) continue ;; 303 | */*) ;; 304 | *) 305 | # If there is no crate at root, executables at the repository root directory if always okay. 306 | if [[ -z "${has_root_crate}" ]]; then 307 | continue 308 | fi 309 | ;; 310 | esac 311 | if [[ -x "${p}" ]]; then 312 | executables+="${p}"$'\n' 313 | fi 314 | # Use `diff` instead of `file` because `file` treats an empty file as a binary. 315 | # https://unix.stackexchange.com/questions/275516/is-there-a-convenient-way-to-classify-files-as-binary-or-text#answer-402870 316 | if { diff .gitattributes "${p}" || true; } | grep -Eq '^Binary file'; then 317 | binaries+="${p}"$'\n' 318 | fi 319 | done 320 | if [[ -n "${executables}" ]]; then 321 | error "file-permissions-check failed: executables are only allowed to be present in directories that are excluded from crates.io" 322 | print_fenced "${executables}" 323 | fi 324 | if [[ -n "${binaries}" ]]; then 325 | error "file-permissions-check failed: binaries are only allowed to be present in directories that are excluded from crates.io" 326 | print_fenced "${binaries}" 327 | fi 328 | fi 329 | fi 330 | # Sync markdown to rustdoc. 331 | first=1 332 | for markdown in $(ls_files '*.md'); do 333 | markers=$(grep -En '^' "${markdown}" || true) 334 | # BSD wc's -l emits spaces before number. 335 | if [[ ! "$(LC_ALL=C wc -l <<<"${markers}")" =~ ^\ *2$ ]]; then 336 | if [[ -n "${markers}" ]]; then 337 | error "inconsistent '' marker found in ${markdown}" 338 | printf '%s\n' "${markers}" 339 | fi 340 | continue 341 | fi 342 | start_marker=$(head -n1 <<<"${markers}") 343 | end_marker=$(head -n2 <<<"${markers}" | tail -n1) 344 | if [[ "${start_marker}" == *"tidy:sync-markdown-to-rustdoc:end"* ]] || [[ "${end_marker}" == *"tidy:sync-markdown-to-rustdoc:start"* ]]; then 345 | error "inconsistent '' marker found in ${markdown}" 346 | printf '%s\n' "${markers}" 347 | continue 348 | fi 349 | if [[ -n "${first}" ]]; then 350 | first='' 351 | info "syncing markdown to rustdoc" 352 | fi 353 | lib="${start_marker#*:<\!-- tidy:sync-markdown-to-rustdoc:start:}" 354 | if [[ "${start_marker}" == "${lib}" ]]; then 355 | error "missing path in '' marker in ${markdown}" 356 | printf '%s\n' "${markers}" 357 | continue 358 | fi 359 | lib="${lib% -->}" 360 | lib="$(dirname -- "${markdown}")/${lib}" 361 | markers=$(grep -En '^' "${lib}" || true) 362 | # BSD wc's -l emits spaces before number. 363 | if [[ ! "$(LC_ALL=C wc -l <<<"${markers}")" =~ ^\ *2$ ]]; then 364 | if [[ -n "${markers}" ]]; then 365 | error "inconsistent '' marker found in ${lib}" 366 | printf '%s\n' "${markers}" 367 | else 368 | error "missing '' marker in ${lib}" 369 | fi 370 | continue 371 | fi 372 | start_marker=$(head -n1 <<<"${markers}") 373 | end_marker=$(head -n2 <<<"${markers}" | tail -n1) 374 | if [[ "${start_marker}" == *"tidy:sync-markdown-to-rustdoc:end"* ]] || [[ "${end_marker}" == *"tidy:sync-markdown-to-rustdoc:start"* ]]; then 375 | error "inconsistent '' marker found in ${lib}" 376 | printf '%s\n' "${markers}" 377 | continue 378 | fi 379 | new=''$'\a' 380 | empty_line_re='^ *$' 381 | gfm_alert_re='^> {0,4}\[!.*\] *$' 382 | rust_code_block_re='^ *```(rust|rs) *$' 383 | code_block_attr='' 384 | in_alert='' 385 | first_line=1 386 | ignore='' 387 | while IFS='' read -rd$'\a' line; do 388 | if [[ -n "${ignore}" ]]; then 389 | if [[ "${line}" == ''* ]]; then 390 | ignore='' 391 | fi 392 | continue 393 | fi 394 | if [[ -n "${first_line}" ]]; then 395 | # Ignore start marker. 396 | first_line='' 397 | continue 398 | elif [[ -n "${in_alert}" ]]; then 399 | if [[ "${line}" =~ ${empty_line_re} ]]; then 400 | in_alert='' 401 | new+=$'\a'""$'\a' 402 | fi 403 | elif [[ "${line}" =~ ${gfm_alert_re} ]]; then 404 | alert="${line#*[\!}" 405 | alert="${alert%%]*}" 406 | alert=$(tr '[:lower:]' '[:upper:]' <<<"${alert%%]*}") 407 | alert_lower=$(tr '[:upper:]' '[:lower:]' <<<"${alert}") 408 | case "${alert}" in 409 | NOTE | TIP | IMPORTANT) alert_sign='ⓘ' ;; 410 | WARNING | CAUTION) alert_sign='⚠' ;; 411 | *) 412 | error "unknown alert type '${alert}' found; please use one of the types listed in " 413 | new+="${line}"$'\a' 414 | continue 415 | ;; 416 | esac 417 | in_alert=1 418 | new+="
"$'\a\a' 419 | new+="> **${alert_sign} ${alert:0:1}${alert_lower:1}**"$'\a>\a' 420 | continue 421 | fi 422 | if [[ "${line}" =~ ${rust_code_block_re} ]]; then 423 | code_block_attr="${code_block_attr#<\!-- tidy:sync-markdown-to-rustdoc:code-block:}" 424 | code_block_attr="${code_block_attr%% -->*}" 425 | new+="${line/\`\`\`*/\`\`\`}${code_block_attr}"$'\a' 426 | code_block_attr='' 427 | continue 428 | fi 429 | if [[ -n "${code_block_attr}" ]]; then 430 | error "'${code_block_attr}' ignored because there is no subsequent Rust code block" 431 | code_block_attr='' 432 | fi 433 | if [[ "${line}" == ''* ]]; then 434 | code_block_attr="${line}" 435 | continue 436 | fi 437 | if [[ "${line}" == ''* ]]; then 438 | if [[ "${new}" == *$'\a\a' ]]; then 439 | new="${new%$'\a'}" 440 | fi 441 | ignore=1 442 | continue 443 | fi 444 | new+="${line}"$'\a' 445 | done < <(tr '\n' '\a' <"${markdown}" | grep -Eo '.*') 446 | new+='' 447 | new=$(tr '\n' '\a' <"${lib}" | sed "s/.*/$(sed_rhs_escape "${new}")/" | tr '\a' '\n') 448 | printf '%s\n' "${new}" >|"${lib}" 449 | check_diff "${lib}" 450 | done 451 | printf '\n' 452 | else 453 | check_unused "Rust code" '*.cargo*' '*clippy.toml' '*deny.toml' '*rustfmt.toml' '*Cargo.toml' '*Cargo.lock' 454 | fi 455 | check_hidden clippy.toml deny.toml rustfmt.toml 456 | 457 | # C/C++/Protobuf (if exists) 458 | clang_format_ext=('*.c' '*.h' '*.cpp' '*.hpp' '*.proto') 459 | if [[ -n "$(ls_files "${clang_format_ext[@]}")" ]]; then 460 | info "checking C/C++/Protobuf code style" 461 | check_config .clang-format 462 | if check_install clang-format; then 463 | IFS=' ' 464 | info "running \`clang-format -i \$(git ls-files ${clang_format_ext[*]})\`" 465 | IFS=$'\n\t' 466 | clang-format -i $(ls_files "${clang_format_ext[@]}") 467 | check_diff $(ls_files "${clang_format_ext[@]}") 468 | fi 469 | printf '\n' 470 | else 471 | check_unused "C/C++/Protobuf code" '*.clang-format*' 472 | fi 473 | check_alt '.clang-format' '_clang-format' "$(ls_files '*_clang-format')" 474 | # https://gcc.gnu.org/onlinedocs/gcc/Overall-Options.html 475 | check_alt '.cpp extension' 'other extensions' "$(ls_files '*.cc' '*.cp' '*.cxx' '*.C' '*.CPP' '*.c++')" 476 | check_alt '.hpp extension' 'other extensions' "$(ls_files '*.hh' '*.hp' '*.hxx' '*.H' '*.HPP' '*.h++')" 477 | 478 | # YAML/HTML/CSS/JavaScript/JSON (if exists) 479 | prettier_ext=('*.css' '*.html' '*.js' '*.json' '*.yml' '*.yaml') 480 | if [[ -n "$(ls_files "${prettier_ext[@]}")" ]]; then 481 | info "checking YAML/HTML/CSS/JavaScript/JSON code style" 482 | check_config .editorconfig 483 | if check_install npm; then 484 | IFS=' ' 485 | info "running \`npx -y prettier -l -w \$(git ls-files ${prettier_ext[*]})\`" 486 | IFS=$'\n\t' 487 | npx -y prettier -l -w $(ls_files "${prettier_ext[@]}") 488 | check_diff $(ls_files "${prettier_ext[@]}") 489 | fi 490 | printf '\n' 491 | else 492 | check_unused "YAML/HTML/CSS/JavaScript/JSON file" '*.prettierignore' 493 | fi 494 | # https://prettier.io/docs/en/configuration 495 | check_alt '.editorconfig' 'other configs' "$(ls_files '*.prettierrc*' '*prettier.config.*')" 496 | check_alt '.yml extension' '.yaml extension' "$(ls_files '*.yaml' | { grep -Fv '.markdownlint-cli2.yaml' || true; })" 497 | 498 | # TOML (if exists) 499 | if [[ -n "$(ls_files '*.toml' | { grep -Fv '.taplo.toml' || true; })" ]]; then 500 | info "checking TOML style" 501 | check_config .taplo.toml 502 | if check_install npm; then 503 | info "running \`npx -y @taplo/cli fmt \$(git ls-files '*.toml')\`" 504 | RUST_LOG=warn npx -y @taplo/cli fmt $(ls_files '*.toml') 505 | check_diff $(ls_files '*.toml') 506 | fi 507 | printf '\n' 508 | else 509 | check_unused "TOML file" '*taplo.toml' 510 | fi 511 | check_hidden taplo.toml 512 | 513 | # Markdown (if exists) 514 | if [[ -n "$(ls_files '*.md')" ]]; then 515 | info "checking markdown style" 516 | check_config .markdownlint-cli2.yaml 517 | if check_install npm; then 518 | info "running \`npx -y markdownlint-cli2 \$(git ls-files '*.md')\`" 519 | if ! npx -y markdownlint-cli2 $(ls_files '*.md'); then 520 | error "check failed; please resolve the above markdownlint error(s)" 521 | fi 522 | fi 523 | printf '\n' 524 | else 525 | check_unused "markdown file" '*.markdownlint-cli2.yaml' 526 | fi 527 | # https://github.com/DavidAnson/markdownlint-cli2#configuration 528 | check_alt '.markdownlint-cli2.yaml' 'other configs' "$(ls_files '*.markdownlint-cli2.jsonc' '*.markdownlint-cli2.cjs' '*.markdownlint-cli2.mjs' '*.markdownlint.*')" 529 | check_alt '.md extension' '*.markdown extension' "$(ls_files '*.markdown')" 530 | 531 | # Shell scripts 532 | info "checking shell scripts" 533 | shell_files=() 534 | docker_files=() 535 | bash_files=() 536 | grep_ere_files=() 537 | sed_ere_files=() 538 | for p in $(ls_files '*.sh' '*Dockerfile*'); do 539 | case "${p}" in 540 | tests/fixtures/* | */tests/fixtures/* | *.json) continue ;; 541 | esac 542 | case "${p##*/}" in 543 | *.sh) 544 | shell_files+=("${p}") 545 | re='^#!/.*bash' 546 | if [[ "$(head -1 "${p}")" =~ ${re} ]]; then 547 | bash_files+=("${p}") 548 | fi 549 | ;; 550 | *Dockerfile*) 551 | docker_files+=("${p}") 552 | bash_files+=("${p}") # TODO 553 | ;; 554 | esac 555 | if grep -Eq '(^|[^0-9A-Za-z\."'\''-])(grep) -[A-Za-z]*E[^\)]' "${p}"; then 556 | grep_ere_files+=("${p}") 557 | fi 558 | if grep -Eq '(^|[^0-9A-Za-z\."'\''-])(sed) -[A-Za-z]*E[^\)]' "${p}"; then 559 | sed_ere_files+=("${p}") 560 | fi 561 | done 562 | workflows=() 563 | actions=() 564 | if [[ -d .github/workflows ]]; then 565 | for p in .github/workflows/*.yml; do 566 | workflows+=("${p}") 567 | bash_files+=("${p}") # TODO 568 | done 569 | fi 570 | if [[ -n "$(ls_files '*action.yml')" ]]; then 571 | for p in $(ls_files '*action.yml'); do 572 | if [[ "${p##*/}" == 'action.yml' ]]; then 573 | actions+=("${p}") 574 | if ! grep -Fq 'shell: sh' "${p}"; then 575 | bash_files+=("${p}") 576 | fi 577 | fi 578 | done 579 | fi 580 | # correctness 581 | res=$({ grep -En '(\[\[ .* ]]|(^|[^\$])\(\(.*\)\))( +#| *$)' "${bash_files[@]}" || true; } | { grep -Ev '^[^ ]+: *(#|//)' || true; } | LC_ALL=C sort) 582 | if [[ -n "${res}" ]]; then 583 | error "bare [[ ]] and (( )) may not work as intended: see https://github.com/koalaman/shellcheck/issues/2360 for more" 584 | print_fenced "${res}"$'\n' 585 | fi 586 | # TODO: chmod|chown 587 | res=$({ grep -En '(^|[^0-9A-Za-z\."'\''-])(basename|cat|cd|cp|dirname|ln|ls|mkdir|mv|pushd|rm|rmdir|tee|touch|kill|trap)( +-[0-9A-Za-z]+)* +[^<>\|-]' "${bash_files[@]}" || true; } | { grep -Ev '^[^ ]+: *(#|//)' || true; } | LC_ALL=C sort) 588 | if [[ -n "${res}" ]]; then 589 | error "use \`--\` before path(s): see https://github.com/koalaman/shellcheck/issues/2707 / https://github.com/koalaman/shellcheck/issues/2612 / https://github.com/koalaman/shellcheck/issues/2305 / https://github.com/koalaman/shellcheck/issues/2157 / https://github.com/koalaman/shellcheck/issues/2121 / https://github.com/koalaman/shellcheck/issues/314 for more" 590 | print_fenced "${res}"$'\n' 591 | fi 592 | res=$({ grep -En '(^|[^0-9A-Za-z\."'\''-])(LINES|RANDOM|PWD)=' "${bash_files[@]}" || true; } | { grep -Ev '^[^ ]+: *(#|//)' || true; } | LC_ALL=C sort) 593 | if [[ -n "${res}" ]]; then 594 | error "do not modify these built-in bash variables: see https://github.com/koalaman/shellcheck/issues/2160 / https://github.com/koalaman/shellcheck/issues/2559 for more" 595 | print_fenced "${res}"$'\n' 596 | fi 597 | # perf 598 | res=$({ grep -En '(^|[^\\])\$\((cat) ' "${bash_files[@]}" || true; } | { grep -Ev '^[^ ]+: *(#|//)' || true; } | LC_ALL=C sort) 599 | if [[ -n "${res}" ]]; then 600 | error "use faster \`\$(' "${bash_files[@]}" || true; } | { grep -Ev '^[^ ]+: *(#|//)' || true; } | LC_ALL=C sort) 609 | if [[ -n "${res}" ]]; then 610 | error "\`type -P\` doesn't output to stderr; use \`>\` instead of \`&>\`" 611 | print_fenced "${res}"$'\n' 612 | fi 613 | # TODO: multi-line case 614 | res=$({ grep -En '(^|[^0-9A-Za-z\."'\''-])(echo|printf )[^;)]* \|[^\|]' "${bash_files[@]}" || true; } | { grep -Ev '^[^ ]+: *(#|//)' || true; } | LC_ALL=C sort) 615 | if [[ -n "${res}" ]]; then 616 | error "use faster \`<<<...\` instead of \`echo ... |\`/\`printf ... |\`: see https://github.com/koalaman/shellcheck/issues/2593 for more" 617 | print_fenced "${res}"$'\n' 618 | fi 619 | # style 620 | if [[ ${#grep_ere_files[@]} -gt 0 ]]; then 621 | # We intentionally do not check for occurrences in any other order (e.g., -iE, -i -E) here. 622 | # This enforces the style and makes it easier to search. 623 | res=$({ grep -En '(^|[^0-9A-Za-z\."'\''-])(grep) +([^-]|-[^EFP-]|--[^hv])' "${grep_ere_files[@]}" || true; } | { grep -Ev '^[^ ]+: *(#|//)' || true; } | LC_ALL=C sort) 624 | if [[ -n "${res}" ]]; then 625 | error "please always use ERE (grep -E) instead of BRE for code consistency within a file" 626 | print_fenced "${res}"$'\n' 627 | fi 628 | fi 629 | if [[ ${#sed_ere_files[@]} -gt 0 ]]; then 630 | res=$({ grep -En '(^|[^0-9A-Za-z\."'\''-])(sed) +([^-]|-[^E-]|--[^hv])' "${sed_ere_files[@]}" || true; } | { grep -Ev '^[^ ]+: *(#|//)' || true; } | LC_ALL=C sort) 631 | if [[ -n "${res}" ]]; then 632 | error "please always use ERE (sed -E) instead of BRE for code consistency within a file" 633 | print_fenced "${res}"$'\n' 634 | fi 635 | fi 636 | if check_install shfmt; then 637 | check_config .editorconfig 638 | info "running \`shfmt -w \$(git ls-files '*.sh')\`" 639 | if ! shfmt -w "${shell_files[@]}"; then 640 | error "check failed; please resolve the shfmt error(s)" 641 | fi 642 | check_diff "${shell_files[@]}" 643 | fi 644 | if [[ "${ostype}" == 'solaris' ]] && [[ -n "${CI:-}" ]] && ! type -P shellcheck >/dev/null; then 645 | warn "this check is skipped on Solaris due to no haskell/shellcheck in upstream package manager" 646 | elif check_install shellcheck; then 647 | check_config .shellcheckrc 648 | info "running \`shellcheck \$(git ls-files '*.sh')\`" 649 | if ! shellcheck "${shell_files[@]}"; then 650 | error "check failed; please resolve the above shellcheck error(s)" 651 | fi 652 | # Check scripts in dockerfile. 653 | if [[ ${#docker_files[@]} -gt 0 ]]; then 654 | # Exclude SC2096 due to the way the temporary script is created. 655 | shellcheck_exclude=SC2096 656 | info "running \`shellcheck --exclude ${shellcheck_exclude}\` for scripts in \`\$(git ls-files '*Dockerfile*')\`" 657 | if check_install jq python3 parse-dockerfile; then 658 | shellcheck_for_dockerfile() { 659 | local text=$1 660 | local shell=$2 661 | local display_path=$3 662 | if [[ "${text}" == 'null' ]]; then 663 | return 664 | fi 665 | text="#!${shell}"$'\n'"${text}" 666 | case "${ostype}" in 667 | windows) text=${text//$'\r'/} ;; # Parse error on git bash/msys2 bash. 668 | esac 669 | local color=auto 670 | if [[ -t 1 ]] || [[ -n "${GITHUB_ACTIONS:-}" ]]; then 671 | color=always 672 | fi 673 | # We don't use <(printf '%s\n' "${text}") here because: 674 | # Windows: failed to found fd created by <() ("/proc/*/fd/* (git bash/msys2 bash) /dev/fd/* (cygwin bash): openBinaryFile: does not exist (No such file or directory)" error) 675 | # DragonFly BSD: hang 676 | # Others: false negative 677 | trap -- 'rm -- ./tools/.tidy-tmp; printf >&2 "%s\n" "${0##*/}: trapped SIGINT"; exit 1' SIGINT 678 | printf '%s\n' "${text}" >|./tools/.tidy-tmp 679 | if ! shellcheck --color="${color}" --exclude "${shellcheck_exclude}" ./tools/.tidy-tmp | sed "s/\.\/tools\/\.tidy-tmp/$(sed_rhs_escape "${display_path}")/g"; then 680 | error "check failed; please resolve the above shellcheck error(s)" 681 | fi 682 | rm -- ./tools/.tidy-tmp 683 | trap -- 'printf >&2 "%s\n" "${0##*/}: trapped SIGINT"; exit 1' SIGINT 684 | } 685 | for dockerfile_path in ${docker_files[@]+"${docker_files[@]}"}; do 686 | dockerfile=$(parse-dockerfile "${dockerfile_path}") 687 | normal_shell='' 688 | for instruction in $(jq -c '.instructions[]' <<<"${dockerfile}"); do 689 | instruction_kind=$(jq -r '.kind' <<<"${instruction}") 690 | case "${instruction_kind}" in 691 | FROM) 692 | # https://docs.docker.com/reference/dockerfile/#from 693 | # > Each FROM instruction clears any state created by previous instructions. 694 | normal_shell='' 695 | continue 696 | ;; 697 | ADD | ARG | CMD | COPY | ENTRYPOINT | ENV | EXPOSE | HEALTHCHECK | LABEL) ;; 698 | # https://docs.docker.com/reference/build-checks/maintainer-deprecated/ 699 | MAINTAINER) error "MAINTAINER instruction is deprecated in favor of using label" ;; 700 | RUN) ;; 701 | SHELL) 702 | normal_shell='' 703 | for argument in $(jq -c '.arguments[]' <<<"${instruction}"); do 704 | value=$(jq -r '.value' <<<"${argument}") 705 | if [[ -z "${normal_shell}" ]]; then 706 | case "${value}" in 707 | cmd | cmd.exe | powershell | powershell.exe) 708 | # not unix shell 709 | normal_shell="${value}" 710 | break 711 | ;; 712 | esac 713 | else 714 | normal_shell+=' ' 715 | fi 716 | normal_shell+="${value}" 717 | done 718 | ;; 719 | STOPSIGNAL | USER | VOLUME | WORKDIR) ;; 720 | *) error "unknown instruction ${instruction_kind}" ;; 721 | esac 722 | arguments='' 723 | # only shell-form RUN/ENTRYPOINT/CMD is run in a shell 724 | case "${instruction_kind}" in 725 | RUN) 726 | if [[ "$(jq -r '.arguments.shell' <<<"${instruction}")" == 'null' ]]; then 727 | continue 728 | fi 729 | arguments=$(jq -r '.arguments.shell.value' <<<"${instruction}") 730 | if [[ -z "${arguments}" ]]; then 731 | if [[ "$(jq -r '.here_docs[0]' <<<"${instruction}")" == 'null' ]]; then 732 | error "empty RUN is useless (${dockerfile_path})" 733 | continue 734 | fi 735 | if [[ "$(jq -r '.here_docs[1]' <<<"${instruction}")" != 'null' ]]; then 736 | # TODO: 737 | error "multi here-docs without command is not yet supported (${dockerfile_path})" 738 | fi 739 | arguments=$(jq -r '.here_docs[0].value' <<<"${instruction}") 740 | if [[ "${arguments}" == '#!'* ]]; then 741 | # TODO: 742 | error "here-docs with shebang is not yet supported (${dockerfile_path})" 743 | continue 744 | fi 745 | else 746 | if [[ "$(jq -r '.here_docs[0]' <<<"${instruction}")" != 'null' ]]; then 747 | # TODO: 748 | error "sh/bash command with here-docs is not yet checked (${dockerfile_path})" 749 | fi 750 | fi 751 | ;; 752 | ENTRYPOINT | CMD) 753 | if [[ "$(jq -r '.arguments.shell' <<<"${instruction}")" == 'null' ]]; then 754 | continue 755 | fi 756 | arguments=$(jq -r '.arguments.shell.value' <<<"${instruction}") 757 | if [[ -z "${normal_shell}" ]] && [[ -n "${arguments}" ]]; then 758 | # https://docs.docker.com/reference/build-checks/json-args-recommended/ 759 | error "JSON arguments recommended for ENTRYPOINT/CMD to prevent unintended behavior related to OS signals" 760 | fi 761 | ;; 762 | HEALTHCHECK) 763 | if [[ "$(jq -r '.arguments.kind' <<<"${instruction}")" != "CMD" ]]; then 764 | continue 765 | fi 766 | if [[ "$(jq -r '.arguments.arguments.shell' <<<"${instruction}")" == 'null' ]]; then 767 | continue 768 | fi 769 | arguments=$(jq -r '.arguments.arguments.shell.value' <<<"${instruction}") 770 | ;; 771 | *) continue ;; 772 | esac 773 | case "${normal_shell}" in 774 | # not unix shell 775 | cmd | cmd.exe | powershell | powershell.exe) continue ;; 776 | # https://docs.docker.com/reference/dockerfile/#shell 777 | '') shell='/bin/sh -c' ;; 778 | *) shell="${normal_shell}" ;; 779 | esac 780 | shellcheck_for_dockerfile "${arguments}" "${shell}" "${dockerfile_path}" 781 | done 782 | done 783 | fi 784 | fi 785 | # Check scripts in YAML. 786 | if [[ ${#workflows[@]} -gt 0 ]] || [[ ${#actions[@]} -gt 0 ]]; then 787 | # Exclude SC2096 due to the way the temporary script is created. 788 | shellcheck_exclude=SC2086,SC2096,SC2129 789 | info "running \`shellcheck --exclude ${shellcheck_exclude}\` for scripts in .github/workflows/*.yml and **/action.yml" 790 | if check_install jq python3 pipx; then 791 | shellcheck_for_gha() { 792 | local text=$1 793 | local shell=$2 794 | local display_path=$3 795 | if [[ "${text}" == 'null' ]]; then 796 | return 797 | fi 798 | case "${shell}" in 799 | bash* | sh*) ;; 800 | *) return ;; 801 | esac 802 | text="#!/usr/bin/env ${shell%' {0}'}"$'\n'"${text}" 803 | # Use python because sed doesn't support .*?. 804 | text=$( 805 | "python${py_suffix}" - <&2 "%s\n" "${0##*/}: trapped SIGINT"; exit 1' SIGINT 823 | printf '%s\n' "${text}" >|./tools/.tidy-tmp 824 | if ! shellcheck --color="${color}" --exclude "${shellcheck_exclude}" ./tools/.tidy-tmp | sed "s/\.\/tools\/\.tidy-tmp/$(sed_rhs_escape "${display_path}")/g"; then 825 | error "check failed; please resolve the above shellcheck error(s)" 826 | fi 827 | rm -- ./tools/.tidy-tmp 828 | trap -- 'printf >&2 "%s\n" "${0##*/}: trapped SIGINT"; exit 1' SIGINT 829 | } 830 | for workflow_path in ${workflows[@]+"${workflows[@]}"}; do 831 | workflow=$(yq -c '.' "${workflow_path}") 832 | # The top-level permissions must be weak as they are referenced by all jobs. 833 | permissions=$(jq -c '.permissions' <<<"${workflow}") 834 | case "${permissions}" in 835 | '{"contents":"read"}' | '{"contents":"none"}') ;; 836 | null) error "${workflow_path}: top level permissions not found; it must be 'contents: read' or weaker permissions" ;; 837 | *) error "${workflow_path}: only 'contents: read' and weaker permissions are allowed at top level, but found '${permissions}'; if you want to use stronger permissions, please set job-level permissions" ;; 838 | esac 839 | default_shell=$(jq -r -c '.defaults.run.shell' <<<"${workflow}") 840 | # github's default is https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#defaultsrunshell 841 | re='^bash --noprofile --norc -CeEux?o pipefail \{0}$' 842 | if [[ ! "${default_shell}" =~ ${re} ]]; then 843 | error "${workflow_path}: defaults.run.shell should be 'bash --noprofile --norc -CeEuxo pipefail {0}' or 'bash --noprofile --norc -CeEuo pipefail {0}'" 844 | continue 845 | fi 846 | # .steps == null means the job is the caller of reusable workflow 847 | for job in $(jq -c '.jobs | to_entries[] | select(.value.steps)' <<<"${workflow}"); do 848 | name=$(jq -r '.key' <<<"${job}") 849 | job=$(jq -r '.value' <<<"${job}") 850 | n=0 851 | job_default_shell=$(jq -r '.defaults.run.shell' <<<"${job}") 852 | if [[ "${job_default_shell}" == 'null' ]]; then 853 | job_default_shell="${default_shell}" 854 | fi 855 | for step in $(jq -c '.steps[]' <<<"${job}"); do 856 | prepare='' 857 | eval "$(jq -r 'if .run then @sh "RUN=\(.run) shell=\(.shell)" else @sh "RUN=\(.with.run) prepare=\(.with.prepare) shell=\(.with.shell)" end' <<<"${step}")" 858 | if [[ "${RUN}" == 'null' ]]; then 859 | _=$((n++)) 860 | continue 861 | fi 862 | if [[ "${shell}" == 'null' ]]; then 863 | if [[ -z "${prepare}" ]]; then 864 | shell="${job_default_shell}" 865 | elif grep -Eq '^ *chsh +-s +[^ ]+/bash' <<<"${prepare}"; then 866 | shell='bash' 867 | else 868 | shell='sh' 869 | fi 870 | fi 871 | shellcheck_for_gha "${RUN}" "${shell}" "${workflow_path} ${name}.steps[${n}].run" 872 | shellcheck_for_gha "${prepare:-null}" 'sh' "${workflow_path} ${name}.steps[${n}].run" 873 | _=$((n++)) 874 | done 875 | done 876 | done 877 | for action_path in ${actions[@]+"${actions[@]}"}; do 878 | runs=$(yq -c '.runs' "${action_path}") 879 | if [[ "$(jq -r '.using' <<<"${runs}")" != "composite" ]]; then 880 | continue 881 | fi 882 | n=0 883 | for step in $(jq -c '.steps[]' <<<"${runs}"); do 884 | prepare='' 885 | eval "$(jq -r 'if .run then @sh "RUN=\(.run) shell=\(.shell)" else @sh "RUN=\(.with.run) prepare=\(.with.prepare) shell=\(.with.shell)" end' <<<"${step}")" 886 | if [[ "${RUN}" == 'null' ]]; then 887 | _=$((n++)) 888 | continue 889 | fi 890 | if [[ "${shell}" == 'null' ]]; then 891 | if [[ -z "${prepare}" ]]; then 892 | error "\`shell: ..\` is required" 893 | continue 894 | elif grep -Eq '^ *chsh +-s +[^ ]+/bash' <<<"${prepare}"; then 895 | shell='bash' 896 | else 897 | shell='sh' 898 | fi 899 | fi 900 | shellcheck_for_gha "${RUN}" "${shell}" "${action_path} steps[${n}].run" 901 | shellcheck_for_gha "${prepare:-null}" 'sh' "${action_path} steps[${n}].run" 902 | _=$((n++)) 903 | done 904 | done 905 | fi 906 | fi 907 | fi 908 | printf '\n' 909 | check_alt '.sh extension' '*.bash extension' "$(ls_files '*.bash')" 910 | 911 | # License check 912 | # TODO: This check is still experimental and does not track all files that should be tracked. 913 | if [[ -f tools/.tidy-check-license-headers ]]; then 914 | info "checking license headers (experimental)" 915 | failed_files='' 916 | for p in $(comm -12 <(eval $(|.cspell.json; printf >&2 "%s\n" "${0##*/}: trapped SIGINT"; exit 1' SIGINT 978 | printf '%s\n' "${config_new}" >|.cspell.json 979 | dependencies_words='' 980 | if [[ -n "${has_rust}" ]]; then 981 | dependencies_words=$(npx -y cspell stdin --no-progress --no-summary --words-only --unique <<<"${dependencies}" || true) 982 | fi 983 | all_words=$(ls_files | { grep -Fv "${project_dictionary}" || true; } | npx -y cspell --file-list stdin --no-progress --no-summary --words-only --unique || true) 984 | printf '%s\n' "${config_old}" >|.cspell.json 985 | trap -- 'printf >&2 "%s\n" "${0##*/}: trapped SIGINT"; exit 1' SIGINT 986 | cat >|.github/.cspell/rust-dependencies.txt <>.github/.cspell/rust-dependencies.txt <<<"${dependencies_words}"$'\n' 992 | fi 993 | if [[ -z "${CI:-}" ]]; then 994 | REMOVE_UNUSED_WORDS=1 995 | fi 996 | if [[ -z "${REMOVE_UNUSED_WORDS:-}" ]]; then 997 | check_diff .github/.cspell/rust-dependencies.txt 998 | fi 999 | if ! grep -Fq '.github/.cspell/rust-dependencies.txt linguist-generated' .gitattributes; then 1000 | error "you may want to mark .github/.cspell/rust-dependencies.txt linguist-generated" 1001 | fi 1002 | 1003 | info "running \`git ls-files | npx -y cspell --file-list stdin --no-progress --no-summary\`" 1004 | if ! ls_files | npx -y cspell --file-list stdin --no-progress --no-summary; then 1005 | error "spellcheck failed: please fix uses of below words or add to ${project_dictionary} if correct" 1006 | printf '=======================================\n' 1007 | { ls_files | npx -y cspell --file-list stdin --no-progress --no-summary --words-only || true; } | sed "s/'s$//g" | LC_ALL=C sort -f -u 1008 | printf '=======================================\n\n' 1009 | fi 1010 | 1011 | # Make sure the project-specific dictionary does not contain duplicated words. 1012 | for dictionary in .github/.cspell/*.txt; do 1013 | if [[ "${dictionary}" == "${project_dictionary}" ]]; then 1014 | continue 1015 | fi 1016 | case "${ostype}" in 1017 | # NetBSD uniq doesn't support -i flag. 1018 | netbsd) dup=$(sed '/^$/d; /^\/\//d' "${project_dictionary}" "${dictionary}" | LC_ALL=C sort -f | tr '[:upper:]' '[:lower:]' | LC_ALL=C uniq -d) ;; 1019 | *) dup=$(sed '/^$/d; /^\/\//d' "${project_dictionary}" "${dictionary}" | LC_ALL=C sort -f | LC_ALL=C uniq -d -i) ;; 1020 | esac 1021 | if [[ -n "${dup}" ]]; then 1022 | error "duplicated words in dictionaries; please remove the following words from ${project_dictionary}" 1023 | print_fenced "${dup}"$'\n' 1024 | fi 1025 | done 1026 | 1027 | # Make sure the project-specific dictionary does not contain unused words. 1028 | if [[ -n "${REMOVE_UNUSED_WORDS:-}" ]]; then 1029 | grep_args=() 1030 | for word in $(grep -Ev '^//' "${project_dictionary}" || true); do 1031 | if ! grep -Eqi "^${word}$" <<<"${all_words}"; then 1032 | grep_args+=(-e "^${word}$") 1033 | fi 1034 | done 1035 | if [[ ${#grep_args[@]} -gt 0 ]]; then 1036 | info "removing unused words from ${project_dictionary}" 1037 | res=$(grep -Ev "${grep_args[@]}" "${project_dictionary}" || true) 1038 | if [[ -n "${res}" ]]; then 1039 | printf '%s\n' "${res}" >|"${project_dictionary}" 1040 | else 1041 | printf '' >|"${project_dictionary}" 1042 | fi 1043 | fi 1044 | else 1045 | unused='' 1046 | for word in $(grep -Ev '^//' "${project_dictionary}" || true); do 1047 | if ! grep -Eqi "^${word}$" <<<"${all_words}"; then 1048 | unused+="${word}"$'\n' 1049 | fi 1050 | done 1051 | if [[ -n "${unused}" ]]; then 1052 | error "unused words in dictionaries; please remove the following words from ${project_dictionary} or run ${0##*/} locally" 1053 | print_fenced "${unused}" 1054 | fi 1055 | fi 1056 | fi 1057 | printf '\n' 1058 | fi 1059 | 1060 | if [[ -n "${should_fail:-}" ]]; then 1061 | exit 1 1062 | fi 1063 | --------------------------------------------------------------------------------